gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package app.metatron.discovery.domain.dataconnection.query.expression;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import app.metatron.discovery.domain.dataconnection.DataConnectionHelper;
import app.metatron.discovery.domain.dataconnection.query.NativeCriteria;
import app.metatron.discovery.domain.dataconnection.query.utils.VarGenerator;
import app.metatron.discovery.extension.dataconnection.jdbc.dialect.JdbcDialect;
import app.metatron.discovery.extension.dataconnection.jdbc.exception.JdbcDataConnectionException;
public class NativeProjection {
private static final Logger LOGGER = LoggerFactory.getLogger(NativeProjection.class);
/**
* Projection list
*/
private List<ProjectionBean> projections;
/**
* Projections for GROUP BY.
*/
private List<String> groupProjections;
/**
* Availables aggregates.
*/
public enum AggregateProjection {
/**
* The AVG.
*/
AVG("AVG"),
/**
* The SUM.
*/
SUM("SUM"),
/**
* The MAX.
*/
MAX("MAX"),
/**
* The MIN.
*/
MIN("MIN"),
/**
* The COUNT.
*/
COUNT("COUNT");
/**
* The value.
*/
private String value;
/**
* Instantiates a new aggregate projection.
*
* @param value the value
*/
AggregateProjection(String value) {
this.value = value;
}
/**
* Gets the value.
*
* @return the value
*/
public String getValue() {
return value;
}
}
/**
* Bean for columns which are aggregates.
*/
private class ProjectionBean {
/**
* Column name
*/
private String columnName;
/**
* Column alias.
*/
private String alias;
/**
* Aggregate function.
*/
private AggregateProjection aggregateProjection;
/**
* Is projections is subquery
*/
private boolean subquery;
/**
* Subquery.
*/
private NativeCriteria criteria;
/**
* Custom projection
*/
private NativeExp customProjection;
/**
* @param columnName the column name
* @param alias the alias
*/
public ProjectionBean(String columnName, String alias) {
this.columnName = columnName;
this.alias = alias;
this.subquery = false;
}
/**
* Create the projection bean.
*
* @param columnName the column name
* @param alias the alias
* @param aggregateProjection the aggregate projection
*/
public ProjectionBean(String columnName, String alias,
AggregateProjection aggregateProjection) {
this.columnName = columnName;
this.alias = alias;
this.aggregateProjection = aggregateProjection;
this.subquery = false;
}
/**
* @param criteria the criteria
* @param alias the alias
*/
public ProjectionBean(NativeCriteria criteria, String alias) {
this.criteria = criteria;
this.subquery = true;
this.alias = alias;
this.columnName = alias;
}
/**
* Create projection bean based on custom projection.
*
* @param customProjection
*/
public ProjectionBean(NativeExp customProjection) {
this.customProjection = customProjection;
}
private boolean isCustomProjection() {
return customProjection != null;
}
/**
* Gets the column name.
*
* @return the column name
*/
public String getColumnName() {
return columnName;
}
/**
* Gets the alias.
*
* @return the alias
*/
public String getAlias() {
return alias;
}
/**
* Checks if is aggregate.
*
* @return true, if is aggregate
*/
public boolean isAggregate() {
return aggregateProjection != null;
}
/**
* Checks if is subquery.
*
* @return the subquery
*/
public boolean isSubquery() {
return subquery;
}
/**
* To sql.
*
* @return the string
*/
public String toSQL(String implementor) {
StringBuilder sql = new StringBuilder();
if (isAggregate()) {
sql.append(aggregateProjection.getValue()).append("(").append(
NativeProjection.getQuotedColumnName(implementor, columnName)).append(")");
} else if (isSubquery()) {
sql.append("(").append(criteria.toSQL()).append(")");
} else if (isCustomProjection()) {
return customProjection.toSQL(implementor);
} else {
sql.append(NativeProjection.getQuotedColumnName(implementor, columnName));
}
return sql.append(" as ").append(NativeProjection.getQuotedColumnName(implementor, alias)).toString();
}
}
/**
*/
public NativeProjection() {
projections = new ArrayList<ProjectionBean>();
groupProjections = new ArrayList<String>();
}
/**
* Add projection.
*
* @param columnName the column name
* @return the native projection
*/
public NativeProjection addProjection(String columnName) {
if (StringUtils.isBlank(columnName)) {
throw new IllegalStateException("columnName is null!");
}
projections.add(new ProjectionBean(columnName, VarGenerator.gen(columnName)));
return this;
}
/**
* Add projection with alias.
*
* @param columnName the column name
* @param alias the alias
* @return the native projection
*/
public NativeProjection addProjection(String columnName, String alias) {
if (StringUtils.isBlank(columnName)) {
throw new IllegalStateException("columnName is null!");
}
projections.add(new ProjectionBean(columnName, alias));
return this;
}
/**
* <p>
* Special approach of creating projection. With this approach the column name + alias could be provided as single String.
* This method removing the requirement of creation new map with column names as keys and aliases as values. On this method that
* kind of map is created out-of-the-box under the hood.
* </p>
*
* <p>The column with alias should be provides on the following pattern: <strong>"columnName as alias"</strong></p>
*
* @param columnsWithAliases column name with alias according the pattern <strong>"columnName as alias"</strong>, for example
* <strong>"p.name as productName"</strong>
* @return {@link NativeProjection}
*/
public NativeProjection addProjectionWithAliases(String... columnsWithAliases) {
Preconditions.checkNotNull(columnsWithAliases);
Map<String, String> columnsProjection = Maps.newLinkedHashMap();
for (String columnWithAlias : columnsWithAliases) {
final String[] result = columnWithAlias.split("(?i) AS ");
if (result.length != 2) {
throw new IllegalArgumentException("There is the problem with provides column and alias statement: \" " +
columnWithAlias + " \". Please check the statement. It should be pattern \"columnName as alias\"");
}
columnsProjection.put(result[0], result[1]);
}
return addProjection(columnsProjection);
}
/**
* Add projection as list column.
*
* @param columns list columns
* @return the native projection
*/
public NativeProjection addProjection(List<String> columns) {
if (columns == null || columns.isEmpty()) {
throw new IllegalStateException("column is empty!");
}
for (String col : columns) {
projections.add(new ProjectionBean(col, VarGenerator.gen(col)));
}
return this;
}
public NativeProjection addProjection(NativeExp customProjection) {
projections.add(new ProjectionBean(customProjection));
return this;
}
/**
* Add projection as string array.
*
* @param columns list columns
* @return the native projection
*/
public NativeProjection addProjection(String... columns) {
Preconditions.checkNotNull(columns);
return addProjection(Lists.newArrayList(columns));
}
/**
* Add projection subquery with alias.
*
* @param subquery the subquery
* @param alias the alias
* @return the native projection
*/
public NativeProjection addSubqueryProjection(NativeCriteria subquery, String alias) {
if (subquery == null) {
throw new IllegalStateException("subquery is null!");
}
projections.add(new ProjectionBean(subquery, alias));
return this;
}
/**
* Add projection as map: column name - alias.
*
* @param columns columns
* @return the native projection
*/
public NativeProjection addProjection(Map<String, String> columns) {
if (columns == null) {
throw new IllegalStateException("columns is empty!");
}
for (Map.Entry<String, String> entry : columns.entrySet()) {
projections.add(new ProjectionBean(entry.getKey(), entry.getValue()));
}
return this;
}
/**
* Delete projections.
*
* @param projection the projection
* @return the native projection
*/
public NativeProjection removeProjection(String projection) {
ProjectionBean bean = null;
for (ProjectionBean b : projections) {
if (b.getColumnName().equalsIgnoreCase(projection)) {
bean = b;
break;
}
}
projections.remove(bean);
return this;
}
/**
* Clear projections.
*
* @return the native projection
*/
public NativeProjection clearProjections() {
projections.clear();
return this;
}
/**
* Add aggregate.
*
* @param columnName the column name
* @param projection the projection
* @return the native projection
*/
public NativeProjection addAggregateProjection(String columnName,
AggregateProjection projection) {
if (StringUtils.isBlank(columnName)) {
throw new IllegalStateException("columnName is null!");
}
if (projection == null) {
throw new IllegalStateException("projection is null!");
}
projections.add(new ProjectionBean(columnName,
VarGenerator.gen(columnName), projection));
return this;
}
/**
* Add aggreagate with alias.
*
* @param columnName the column name
* @param alias the alias
* @param projection the projection
* @return the native projection
*/
public NativeProjection addAggregateProjection(String columnName, String alias,
AggregateProjection projection) {
if (StringUtils.isBlank(columnName)) {
throw new IllegalStateException("columnName is null!");
}
if (projection == null) {
throw new IllegalStateException("projection is null!");
}
projections.add(new ProjectionBean(columnName, alias, projection));
return this;
}
/**
* Add group projection.
*
* @param columnName the column name
* @return the native projection
*/
public NativeProjection addGroupProjection(String columnName) {
if (StringUtils.isBlank(columnName)) {
throw new IllegalStateException("columnName is null!");
}
groupProjections.add(columnName);
return this;
}
/**
* Return projection index.
*
* Method returns -1 when the column projection does not exist.
*
* @param columnName the column name
* @return the projection index
*/
public int getProjectionIndex(String columnName) {
for (int i = 0; i < projections.size(); i++) {
if (projections.get(i).getColumnName().equalsIgnoreCase(columnName) ||
projections.get(i).getAlias().equalsIgnoreCase(columnName)) {
return i;
}
}
return -1;
}
/**
* Return projection.
*
* @return true, if successful
*/
public boolean hasProjections() {
return projections != null && !projections.isEmpty();
}
/**
* Count projections.
*
* @return the int
*/
public int countProjections() {
return projections.size();
}
/**
* Is aggreagates exist.
*
* @return true, if successful
*/
public boolean hasAggregates() {
for (ProjectionBean bean : projections) {
if (bean.isAggregate()) {
return true;
}
}
return false;
}
/**
* SQL projection.
*
* @return the string
*/
public String projectionToSQL(String implementor) {
StringBuilder sqlBuilder = new StringBuilder();
boolean first = true;
for (ProjectionBean bean : projections) {
if (first) {
sqlBuilder.append(bean.toSQL(implementor));
first = false;
} else {
sqlBuilder.append(", ").append(bean.toSQL(implementor));
}
}
return sqlBuilder.toString();
}
/**
* Group BY SQL.
*
* @return the string
*/
public String groupByToSQL(String implementor) {
// automatic added projections to group by clause if aggregates exist
if (hasAggregates()) {
for (ProjectionBean bean : projections) {
if (!bean.isAggregate() && !bean.isSubquery() &&
!groupProjections.contains(bean.getColumnName())) {
groupProjections.add(bean.getColumnName());
}
}
}
if (!groupProjections.isEmpty()) {
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("GROUP BY ");
boolean first = true;
for (String group : groupProjections) {
if (first) {
sqlBuilder.append(NativeProjection.getQuotedColumnName(implementor, group));
first = false;
} else {
sqlBuilder.append(", ").append(NativeProjection.getQuotedColumnName(implementor, group));
}
}
return sqlBuilder.append(" ").toString();
}
return "";
}
public List<String> getAliases(String implementor){
List<String> aliases = projections.stream()
.map(projectionBean -> getQuotedColumnName(implementor, projectionBean.getAlias()))
.collect(Collectors.toList());
return aliases;
}
public static String getQuotedColumnName(String implementor, String columnName){
try{
JdbcDialect jdbcDialect = DataConnectionHelper.lookupDialect(implementor);
if(jdbcDialect != null){
return jdbcDialect.getQuotedFieldName(null, columnName);
}
} catch (JdbcDataConnectionException e){
LOGGER.debug("no suitable dialect for quote : {}", implementor);
}
return Arrays.stream(columnName.split("\\."))
.map(spliced -> "`" + spliced + "`")
.collect(Collectors.joining("."));
}
}
| |
/*L
* Copyright Northrop Grumman Information Technology.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/nci-report-writer/LICENSE.txt for details.
*/
package gov.nih.nci.evs.reportwriter.bean;
import gov.nih.nci.evs.reportwriter.properties.*;
import gov.nih.nci.evs.reportwriter.utils.*;
import gov.nih.nci.evs.utils.*;
import gov.nih.nci.security.*;
import gov.nih.nci.security.authentication.*;
import gov.nih.nci.security.authorization.domainobjects.User;
import java.util.*;
import javax.faces.event.*;
import javax.faces.model.*;
import javax.naming.*;
import javax.servlet.http.*;
import org.apache.log4j.*;
import java.net.*;
import java.sql.*;
import javax.sql.DataSource;
import gov.nih.nci.security.util.*;
import gov.nih.nci.evs.reportwriter.security.CSMAuthorizationManager;
/**
*
*/
/**
* @author EVS Team (Kim Ong, David Yee)
* @version 1.0
*/
public class LoginBean extends Object {
private static Logger _logger = Logger.getLogger(LoginBean.class);
private static final String ADDRESS = "2115 East Jefferson, Rockville 20852";
private static final String APP_NAME = "ncireportwriter";
private static final String CSM_LOCKOUT_TIME =
AppProperties.getInstance()
.getProperty(AppProperties.CSM_LOCKOUT_TIME);
private static final String CSM_ALLOWED_LOGIN_TIME =
AppProperties.getInstance()
.getProperty(AppProperties.CSM_ALLOWED_LOGIN_TIME);
private static final String CSM_ALLOWED_ATTEMPTS = "1000";
//AppProperties.getInstance()
// .getProperty(AppProperties.CSM_ALLOWED_ATTEMPTS);
private String _userid;
private String _password;
private long _roleGroupId;
private String _selectedTask = null;
private Boolean _isAdmin = null;
private InitialContext _context = null;
public LoginBean() {
super();
LockoutManager.initialize(CSM_LOCKOUT_TIME, CSM_ALLOWED_LOGIN_TIME,
CSM_ALLOWED_ATTEMPTS);
}
public void setSelectedTask(String selectedTask) {
_selectedTask = selectedTask;
_logger.debug("selectedTask: " + _selectedTask);
}
public String getUserid() {
return _userid;
}
public void setUserid(String newUserid) {
_userid = newUserid;
}
public String getPassword() {
return _password;
}
public void setPassword(String newPassword) {
_password = newPassword;
}
public long getRoleGroupId() {
return _roleGroupId;
}
public void setRoleGroupId(long roleGroupId) {
_roleGroupId = roleGroupId;
}
public static gov.nih.nci.security.authorization.domainobjects.User getCSMUser(String userid) throws Exception {
AuthorizationManager manager = CSMAuthorizationManager.getAuthorizationManagerDirectly(APP_NAME);
if (manager == null)
throw new Exception("Can not get authorization manager for: " + APP_NAME);
gov.nih.nci.security.authorization.domainobjects.User user = manager.getUser(userid);
if (user == null)
throw new Exception("Error retrieving CSM userid " + userid + ".");
return user;
}
private Boolean hasAdminPrivilege(String userid) throws Exception {
gov.nih.nci.security.authorization.domainobjects.User user = getCSMUser(userid);
if (user == null) {
System.out.println("getCSMUser returns NULL??? " + userid);
}
//KLO
//AuthorizationManager manager = SecurityServiceProvider.getAuthorizationManager(APP_NAME);
AuthorizationManager manager = CSMAuthorizationManager.getAuthorizationManagerDirectly(APP_NAME);
boolean permission =
manager.checkPermission(user.getLoginName(), "admin-pe", "EXECUTE");
return new Boolean(permission);
//return new Boolean(true);
}
private String getEmail(String userid) throws Exception {
gov.nih.nci.security.authorization.domainobjects.User user = getCSMUser(userid);
String email = user.getEmailId();
return email != null ? email : null;
}
public List<SelectItem> getTaskList() {
return DataUtils.getTaskList(_isAdmin);
}
public gov.nih.nci.evs.reportwriter.bean.User getUser(String loginName) {
try {
SDKClientUtil sdkClientUtil = new SDKClientUtil();
String FQName = "gov.nih.nci.evs.reportwriter.bean.User";
String methodName = "setLoginName";
Object obj = sdkClientUtil.search(FQName, methodName, loginName);
if (obj == null)
throw new Exception("Error retrieving user: " + loginName
+ ". sdkClientUtil.search returns null");
gov.nih.nci.evs.reportwriter.bean.User user =
(gov.nih.nci.evs.reportwriter.bean.User) obj;
return user;
} catch (Exception e) {
ExceptionUtils.print(_logger, e, " * getUser(" + loginName
+ ") method returns null");
// e.printStackTrace();
}
return null;
}
/*
String url = "jdbc:mysql://localhost/test";
Class.forName ("com.mysql.jdbc.Driver").newInstance ();
Connection conn = DriverManager.getConnection (url, "username", "password");
<Resource name="ncirw"
auth="Container"
type="javax.sql.DataSource"
username="@database.user@"
password="@database.password@"
driverClassName="@database.driver@"
url="@database.url@"
maxActive="10"
maxIdle="4"/>
*/
public boolean validateUser(String user, String password) {
Connection conn = null;
Statement st = null;
ResultSet rs = null;
boolean match = false;
try {
/*
String url = "jdbc:mysql://localhost/ncirw";
Class.forName ("com.mysql.jdbc.Driver").newInstance ();
Connection conn = DriverManager.getConnection (url, "username", "password");
*/
InitialContext ctx = new InitialContext();
//DataSource ds = (DataSource) ctx.lookup("ncirw");
// This works too
Context envCtx = (Context) ctx.lookup("java:comp/env");
DataSource ds = (DataSource) envCtx.lookup(APP_NAME);
conn = ds.getConnection();
try {
password = new StringEncrypter().encrypt(password);
} catch (Exception ex) {
ex.printStackTrace();
}
st = conn.createStatement();
rs = st.executeQuery("SELECT LOGIN_NAME, PASSWORD FROM csm_user where LOGIN_NAME = " + "\"" + user + "\"" + " and PASSWORD = "
+ "\"" + password + "\"");
while (rs.next()) {
match = true;
}
} catch (Exception ex) {
ex.printStackTrace();
} finally {
try { if (rs != null) rs.close(); } catch (SQLException e) { e.printStackTrace(); }
try { if (st != null) st.close(); } catch (SQLException e) { e.printStackTrace(); }
try { if (conn != null) conn.close(); } catch (SQLException e) { e.printStackTrace(); }
}
return match;
}
public String loginAction() {
_logger.debug("(*****************) calling loginAction ... " );
//useDebugUserid();
try {
_isAdmin = false;
if (_userid.length() <= 0)
throw new Exception("Please enter your login ID.");
if (_password.length() <= 0)
throw new Exception("Please enter your password.");
_logger.debug("_userid: " + _userid);
_logger.debug("_password: " + _password);
_logger.debug("SecurityServiceProvider.getAuthenticationManager: APP_NAME " + APP_NAME);
_logger.debug("SecurityServiceProvider.getAuthenticationManager: CSM_LOCKOUT_TIME " + CSM_LOCKOUT_TIME);
_logger.debug("SecurityServiceProvider.getAuthenticationManager: CSM_ALLOWED_LOGIN_TIME " + CSM_ALLOWED_LOGIN_TIME);
_logger.debug("SecurityServiceProvider.getAuthenticationManager: CSM_ALLOWED_ATTEMPTS " + CSM_ALLOWED_ATTEMPTS);
AuthenticationManager authenticationManager =
SecurityServiceProvider.getAuthenticationManager(APP_NAME,
CSM_LOCKOUT_TIME, CSM_ALLOWED_LOGIN_TIME,
CSM_ALLOWED_ATTEMPTS);
if (authenticationManager == null) {
throw new Exception("NULL authenticationManager???");
}
if (!authenticationManager.login(_userid, _password)) {
//if (!validateUser(_userid, _password)) {
throw new Exception("Incorrect login credential.");
}
_logger.debug("(*) SecurityServiceProvider.login: success -- continue..." + _userid);
HttpServletRequest request = HTTPUtils.getRequest();
HttpSession session = request.getSession(); // true
if (session != null)
session.setAttribute("uid", _userid);
_logger.debug("(************) calling .hasAdminPrivilege.." );
_isAdmin = hasAdminPrivilege(_userid);
_logger.debug("(*) hasAdminPrivilege? " + _isAdmin);
session.setAttribute("isAdmin", _isAdmin);
String email = getEmail(_userid);
session.setAttribute("email", email);
gov.nih.nci.evs.reportwriter.bean.User user = getUser(_userid);
if (user == null) {
// Synchronize with CSM User table
SDKClientUtil sdkclientutil = new SDKClientUtil();
sdkclientutil.insertUser(_userid);
}
session.setAttribute("isSessionValid", Boolean.TRUE);
HTTPUtils.getRequest().removeAttribute("loginWarning");
return "success";
} catch (Exception e) {
String msg = reformatError(e.getMessage());
_logger.error(StringUtils.SEPARATOR);
ExceptionUtils
.print(_logger, e, " * Error logging in: " + _userid);
HTTPUtils.getRequest().setAttribute("loginWarning", msg);
return "failure";
}
}
private void useDebugUserid() {
if (!AppProperties.getInstance().getBoolProperty(
AppProperties.DEBUG_ON, false))
return;
if (_userid.length() <= 0)
_userid = "rwadmin";
if (_password.length() <= 0)
_password = modify(ADDRESS);
}
private String modify(String text) {
text = new StringBuffer(text).reverse().toString();
text = text.replace("tsaE ", "");
text = text.replace("ellivkcoR ", "");
text = text.replaceAll(",", "");
text = text.replaceAll(" ", ".");
return text;
}
private String reformatError(String text) {
if (text.equals("Invalid Login Credentials"))
return "Invalid login credentials.";
if (text.equals("Allowed Attempts Reached ! User Name is locked out !"))
return "Allowed attempts reached. Login ID is currently locked out.";
return text;
}
public void changeTaskSelection(ValueChangeEvent vce) {
String newValue = (String) vce.getNewValue();
setSelectedTask(newValue);
}
public Object getService(String serviceBeanName)
throws javax.naming.NamingException {
return _context.lookup(serviceBeanName);
}
// FYI: Does not seem to be used.
// public String logoutAction() {
// return logout();
// }
// FYI: Does not seem to be used.
// public String logout() {
// HttpSession session = SessionUtil.getSession();
// if (session != null)
// session.invalidate();
// return "logout";
// }
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.cleaner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.snapshot.DisabledTableSnapshotHandler;
import org.apache.hadoop.hbase.master.snapshot.SnapshotHFileCleaner;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
import org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger;
import org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotReferenceUtil;
import org.apache.hadoop.hbase.snapshot.SnapshotTestingUtils;
import org.apache.hadoop.hbase.snapshot.UnknownSnapshotException;
import org.apache.hadoop.hbase.testclassification.MasterTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
/**
* Test the master-related aspects of a snapshot
*/
@Category({MasterTests.class, MediumTests.class})
public class TestSnapshotFromMaster {
private static final Log LOG = LogFactory.getLog(TestSnapshotFromMaster.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final int NUM_RS = 2;
private static Path rootDir;
private static FileSystem fs;
private static HMaster master;
// for hfile archiving test.
private static Path archiveDir;
private static final byte[] TEST_FAM = Bytes.toBytes("fam");
private static final TableName TABLE_NAME =
TableName.valueOf("test");
// refresh the cache every 1/2 second
private static final long cacheRefreshPeriod = 500;
private static final int blockingStoreFiles = 12;
/**
* Setup the config for the cluster
*/
@BeforeClass
public static void setupCluster() throws Exception {
setupConf(UTIL.getConfiguration());
UTIL.startMiniCluster(NUM_RS);
fs = UTIL.getDFSCluster().getFileSystem();
master = UTIL.getMiniHBaseCluster().getMaster();
rootDir = master.getMasterFileSystem().getRootDir();
archiveDir = new Path(rootDir, HConstants.HFILE_ARCHIVE_DIRECTORY);
}
private static void setupConf(Configuration conf) {
// disable the ui
conf.setInt("hbase.regionsever.info.port", -1);
// change the flush size to a small amount, regulating number of store files
conf.setInt("hbase.hregion.memstore.flush.size", 25000);
// so make sure we get a compaction when doing a load, but keep around some
// files in the store
conf.setInt("hbase.hstore.compaction.min", 2);
conf.setInt("hbase.hstore.compactionThreshold", 5);
// block writes if we get to 12 store files
conf.setInt("hbase.hstore.blockingStoreFiles", blockingStoreFiles);
// Ensure no extra cleaners on by default (e.g. TimeToLiveHFileCleaner)
conf.set(HFileCleaner.MASTER_HFILE_CLEANER_PLUGINS, "");
conf.set(HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS, "");
// Enable snapshot
conf.setBoolean(SnapshotManager.HBASE_SNAPSHOT_ENABLED, true);
conf.setLong(SnapshotHFileCleaner.HFILE_CACHE_REFRESH_PERIOD_CONF_KEY, cacheRefreshPeriod);
conf.set(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
ConstantSizeRegionSplitPolicy.class.getName());
conf.setInt("hbase.hfile.compactions.cleaner.interval", 20 * 1000);
}
@Before
public void setup() throws Exception {
UTIL.createTable(TABLE_NAME, TEST_FAM);
master.getSnapshotManager().setSnapshotHandlerForTesting(TABLE_NAME, null);
}
@After
public void tearDown() throws Exception {
UTIL.deleteTable(TABLE_NAME);
SnapshotTestingUtils.deleteAllSnapshots(UTIL.getAdmin());
SnapshotTestingUtils.deleteArchiveDirectory(UTIL);
}
@AfterClass
public static void cleanupTest() throws Exception {
try {
UTIL.shutdownMiniCluster();
} catch (Exception e) {
// NOOP;
}
}
/**
* Test that the contract from the master for checking on a snapshot are valid.
* <p>
* <ol>
* <li>If a snapshot fails with an error, we expect to get the source error.</li>
* <li>If there is no snapshot name supplied, we should get an error.</li>
* <li>If asking about a snapshot has hasn't occurred, you should get an error.</li>
* </ol>
*/
@Test(timeout = 300000)
public void testIsDoneContract() throws Exception {
IsSnapshotDoneRequest.Builder builder = IsSnapshotDoneRequest.newBuilder();
String snapshotName = "asyncExpectedFailureTest";
// check that we get an exception when looking up snapshot where one hasn't happened
SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(),
UnknownSnapshotException.class);
// and that we get the same issue, even if we specify a name
SnapshotDescription desc = SnapshotDescription.newBuilder()
.setName(snapshotName).setTable(TABLE_NAME.getNameAsString()).build();
builder.setSnapshot(desc);
SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(),
UnknownSnapshotException.class);
// set a mock handler to simulate a snapshot
DisabledTableSnapshotHandler mockHandler = Mockito.mock(DisabledTableSnapshotHandler.class);
Mockito.when(mockHandler.getException()).thenReturn(null);
Mockito.when(mockHandler.getSnapshot()).thenReturn(desc);
Mockito.when(mockHandler.isFinished()).thenReturn(new Boolean(true));
Mockito.when(mockHandler.getCompletionTimestamp())
.thenReturn(EnvironmentEdgeManager.currentTime());
master.getSnapshotManager()
.setSnapshotHandlerForTesting(TABLE_NAME, mockHandler);
// if we do a lookup without a snapshot name, we should fail - you should always know your name
builder = IsSnapshotDoneRequest.newBuilder();
SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(),
UnknownSnapshotException.class);
// then do the lookup for the snapshot that it is done
builder.setSnapshot(desc);
IsSnapshotDoneResponse response =
master.getMasterRpcServices().isSnapshotDone(null, builder.build());
assertTrue("Snapshot didn't complete when it should have.", response.getDone());
// now try the case where we are looking for a snapshot we didn't take
builder.setSnapshot(SnapshotDescription.newBuilder().setName("Not A Snapshot").build());
SnapshotTestingUtils.expectSnapshotDoneException(master, builder.build(),
UnknownSnapshotException.class);
// then create a snapshot to the fs and make sure that we can find it when checking done
snapshotName = "completed";
desc = createSnapshot(snapshotName);
builder.setSnapshot(desc);
response = master.getMasterRpcServices().isSnapshotDone(null, builder.build());
assertTrue("Completed, on-disk snapshot not found", response.getDone());
}
@Test(timeout = 300000)
public void testGetCompletedSnapshots() throws Exception {
// first check when there are no snapshots
GetCompletedSnapshotsRequest request = GetCompletedSnapshotsRequest.newBuilder().build();
GetCompletedSnapshotsResponse response =
master.getMasterRpcServices().getCompletedSnapshots(null, request);
assertEquals("Found unexpected number of snapshots", 0, response.getSnapshotsCount());
// write one snapshot to the fs
String snapshotName = "completed";
SnapshotDescription snapshot = createSnapshot(snapshotName);
// check that we get one snapshot
response = master.getMasterRpcServices().getCompletedSnapshots(null, request);
assertEquals("Found unexpected number of snapshots", 1, response.getSnapshotsCount());
List<SnapshotDescription> snapshots = response.getSnapshotsList();
List<SnapshotDescription> expected = Lists.newArrayList(snapshot);
assertEquals("Returned snapshots don't match created snapshots", expected, snapshots);
// write a second snapshot
snapshotName = "completed_two";
snapshot = createSnapshot(snapshotName);
expected.add(snapshot);
// check that we get one snapshot
response = master.getMasterRpcServices().getCompletedSnapshots(null, request);
assertEquals("Found unexpected number of snapshots", 2, response.getSnapshotsCount());
snapshots = response.getSnapshotsList();
assertEquals("Returned snapshots don't match created snapshots", expected, snapshots);
}
@Test(timeout = 300000)
public void testDeleteSnapshot() throws Exception {
String snapshotName = "completed";
SnapshotDescription snapshot = SnapshotDescription.newBuilder().setName(snapshotName).build();
DeleteSnapshotRequest request = DeleteSnapshotRequest.newBuilder().setSnapshot(snapshot)
.build();
try {
master.getMasterRpcServices().deleteSnapshot(null, request);
fail("Master didn't throw exception when attempting to delete snapshot that doesn't exist");
} catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException e) {
// Expected
}
// write one snapshot to the fs
createSnapshot(snapshotName);
// then delete the existing snapshot,which shouldn't cause an exception to be thrown
master.getMasterRpcServices().deleteSnapshot(null, request);
}
/**
* Test that the snapshot hfile archive cleaner works correctly. HFiles that are in snapshots
* should be retained, while those that are not in a snapshot should be deleted.
* @throws Exception on failure
*/
@Test(timeout = 300000)
public void testSnapshotHFileArchiving() throws Exception {
Admin admin = UTIL.getAdmin();
// make sure we don't fail on listing snapshots
SnapshotTestingUtils.assertNoSnapshots(admin);
// recreate test table with disabled compactions; otherwise compaction may happen before
// snapshot, the call after snapshot will be a no-op and checks will fail
UTIL.deleteTable(TABLE_NAME);
TableDescriptor td = TableDescriptorBuilder.newBuilder(TABLE_NAME)
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAM))
.setCompactionEnabled(false)
.build();
UTIL.getAdmin().createTable(td);
// load the table
for (int i = 0; i < blockingStoreFiles / 2; i ++) {
UTIL.loadTable(UTIL.getConnection().getTable(TABLE_NAME), TEST_FAM);
UTIL.flush(TABLE_NAME);
}
// disable the table so we can take a snapshot
admin.disableTable(TABLE_NAME);
// take a snapshot of the table
String snapshotName = "snapshot";
byte[] snapshotNameBytes = Bytes.toBytes(snapshotName);
admin.snapshot(snapshotNameBytes, TABLE_NAME);
LOG.info("After snapshot File-System state");
FSUtils.logFileSystemState(fs, rootDir, LOG);
// ensure we only have one snapshot
SnapshotTestingUtils.assertOneSnapshotThatMatches(admin, snapshotNameBytes, TABLE_NAME);
td = TableDescriptorBuilder.newBuilder(td)
.setCompactionEnabled(true)
.build();
// enable compactions now
admin.modifyTable(td);
// renable the table so we can compact the regions
admin.enableTable(TABLE_NAME);
// compact the files so we get some archived files for the table we just snapshotted
List<HRegion> regions = UTIL.getHBaseCluster().getRegions(TABLE_NAME);
for (HRegion region : regions) {
region.waitForFlushesAndCompactions(); // enable can trigger a compaction, wait for it.
region.compactStores(); // min is 2 so will compact and archive
}
List<RegionServerThread> regionServerThreads = UTIL.getMiniHBaseCluster()
.getRegionServerThreads();
HRegionServer hrs = null;
for (RegionServerThread rs : regionServerThreads) {
if (!rs.getRegionServer().getRegions(TABLE_NAME).isEmpty()) {
hrs = rs.getRegionServer();
break;
}
}
CompactedHFilesDischarger cleaner = new CompactedHFilesDischarger(100, null, hrs, false);
cleaner.chore();
LOG.info("After compaction File-System state");
FSUtils.logFileSystemState(fs, rootDir, LOG);
// make sure the cleaner has run
LOG.debug("Running hfile cleaners");
ensureHFileCleanersRun();
LOG.info("After cleaners File-System state: " + rootDir);
FSUtils.logFileSystemState(fs, rootDir, LOG);
// get the snapshot files for the table
Path snapshotTable = SnapshotDescriptionUtils.getCompletedSnapshotDir(snapshotName, rootDir);
Set<String> snapshotHFiles = SnapshotReferenceUtil.getHFileNames(
UTIL.getConfiguration(), fs, snapshotTable);
// check that the files in the archive contain the ones that we need for the snapshot
LOG.debug("Have snapshot hfiles:");
for (String fileName : snapshotHFiles) {
LOG.debug(fileName);
}
// get the archived files for the table
Collection<String> archives = getHFiles(archiveDir, fs, TABLE_NAME);
// get the hfiles for the table
Collection<String> hfiles = getHFiles(rootDir, fs, TABLE_NAME);
// and make sure that there is a proper subset
for (String fileName : snapshotHFiles) {
boolean exist = archives.contains(fileName) || hfiles.contains(fileName);
assertTrue("Archived hfiles " + archives
+ " and table hfiles " + hfiles + " is missing snapshot file:" + fileName, exist);
}
// delete the existing snapshot
admin.deleteSnapshot(snapshotNameBytes);
SnapshotTestingUtils.assertNoSnapshots(admin);
// make sure that we don't keep around the hfiles that aren't in a snapshot
// make sure we wait long enough to refresh the snapshot hfile
List<BaseHFileCleanerDelegate> delegates = UTIL.getMiniHBaseCluster().getMaster()
.getHFileCleaner().cleanersChain;
for (BaseHFileCleanerDelegate delegate: delegates) {
if (delegate instanceof SnapshotHFileCleaner) {
((SnapshotHFileCleaner)delegate).getFileCacheForTesting().triggerCacheRefreshForTesting();
}
}
// run the cleaner again
LOG.debug("Running hfile cleaners");
ensureHFileCleanersRun();
LOG.info("After delete snapshot cleaners run File-System state");
FSUtils.logFileSystemState(fs, rootDir, LOG);
archives = getHFiles(archiveDir, fs, TABLE_NAME);
assertEquals("Still have some hfiles in the archive, when their snapshot has been deleted.", 0,
archives.size());
}
/**
* @return all the HFiles for a given table in the specified dir
* @throws IOException on expected failure
*/
private final Collection<String> getHFiles(Path dir, FileSystem fs, TableName tableName) throws IOException {
Path tableDir = FSUtils.getTableDir(dir, tableName);
return SnapshotTestingUtils.listHFileNames(fs, tableDir);
}
/**
* Make sure the {@link HFileCleaner HFileCleaners} run at least once
*/
private static void ensureHFileCleanersRun() {
UTIL.getHBaseCluster().getMaster().getHFileCleaner().chore();
}
private SnapshotDescription createSnapshot(final String snapshotName) throws IOException {
SnapshotTestingUtils.SnapshotMock snapshotMock =
new SnapshotTestingUtils.SnapshotMock(UTIL.getConfiguration(), fs, rootDir);
SnapshotTestingUtils.SnapshotMock.SnapshotBuilder builder =
snapshotMock.createSnapshotV2(snapshotName, "test", 0);
builder.commit();
return builder.getSnapshotDescription();
}
}
| |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.management.snmp.mib;
import org.json.JSONArray;
import org.json.JSONException;
import org.kaazing.gateway.management.Utils;
import org.kaazing.gateway.management.context.ManagementContext;
import org.kaazing.gateway.management.gateway.GatewayManagementBean;
import org.kaazing.gateway.management.snmp.SummaryDataIntervalMO;
import org.snmp4j.agent.DuplicateRegistrationException;
import org.snmp4j.agent.MOGroup;
import org.snmp4j.agent.MOServer;
import org.snmp4j.agent.mo.DefaultMOMutableRow2PC;
import org.snmp4j.agent.mo.DefaultMOMutableTableModel;
import org.snmp4j.agent.mo.DefaultMOTable;
import org.snmp4j.agent.mo.DefaultMOTableRow;
import org.snmp4j.agent.mo.MOAccessImpl;
import org.snmp4j.agent.mo.MOColumn;
import org.snmp4j.agent.mo.MOFactory;
import org.snmp4j.agent.mo.MOMutableColumn;
import org.snmp4j.agent.mo.MOScalar;
import org.snmp4j.agent.mo.MOTable;
import org.snmp4j.agent.mo.MOTableIndex;
import org.snmp4j.agent.mo.MOTableModel;
import org.snmp4j.agent.mo.MOTableRow;
import org.snmp4j.agent.mo.MOTableSubIndex;
import org.snmp4j.agent.mo.snmp.AgentCapabilityList;
import org.snmp4j.agent.request.SubRequest;
import org.snmp4j.event.CounterEvent;
import org.snmp4j.event.CounterListener;
import org.snmp4j.smi.Counter64;
import org.snmp4j.smi.Integer32;
import org.snmp4j.smi.OID;
import org.snmp4j.smi.OctetString;
import org.snmp4j.smi.SMIConstants;
import org.snmp4j.smi.Variable;
/**
* MIB support for Gateway-level dynamic data.
* <p/>
* Kaazing's SNMP support is based on the SNMP4J open-source library under the Apache 2.0 license. To see the full text of the
* license, please see the Kaazing third-party licenses file.
*/
public class GatewayManagementMIB implements MOGroup, CounterListener, AgentCapabilityList {
private final ManagementContext managementContext;
private DefaultMOTable sysOREntry;
private DefaultMOMutableTableModel sysOREntryModel;
private MOTableSubIndex[] gatewayEntryIndexes;
private MOTableIndex gatewayEntryIndex;
private MOTable gatewayEntry;
private MOTableModel gatewayEntryModel;
private MOScalar summaryDataFields;
private MOScalar summaryDataNotificationInterval;
public GatewayManagementMIB(ManagementContext managementContext, MOFactory factory) {
this.managementContext = managementContext;
createMO(factory);
}
private void createMO(MOFactory moFactory) {
// Index definition
OID gatewayConfigEntryIndexOID = ((OID) MIBConstants.oidGatewayEntry.clone()).append(1);
gatewayEntryIndexes =
new MOTableSubIndex[]{
moFactory.createSubIndex(gatewayConfigEntryIndexOID,
SMIConstants.SYNTAX_INTEGER, 1, 1),
};
gatewayEntryIndex =
moFactory.createIndex(gatewayEntryIndexes, true);
// Columns
MOColumn[] gatewayEntryColumns = new MOColumn[MIBConstants.GATEWAY_COLUMN_COUNT];
gatewayEntryColumns[MIBConstants.indexGatewayIndex] =
new MOMutableColumn(MIBConstants.colGatewayIndex,
SMIConstants.SYNTAX_INTEGER32,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexGatewayId] =
new MOMutableColumn(MIBConstants.colGatewayId,
SMIConstants.SYNTAX_COUNTER64,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexTotalCurrentSessions] =
new MOMutableColumn(MIBConstants.colTotalCurrentSessions,
SMIConstants.SYNTAX_COUNTER64,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexTotalBytesReceived] =
new MOMutableColumn(MIBConstants.colTotalBytesReceived,
SMIConstants.SYNTAX_COUNTER64,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexTotalBytesSent] =
new MOMutableColumn(MIBConstants.colTotalBytesSent,
SMIConstants.SYNTAX_COUNTER64,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexUptime] =
new MOMutableColumn(MIBConstants.colUptime,
SMIConstants.SYNTAX_COUNTER64,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexStartTime] =
new MOMutableColumn(MIBConstants.colStartTime,
SMIConstants.SYNTAX_COUNTER64,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexInstanceKey] =
new MOMutableColumn(MIBConstants.colInstanceKey,
SMIConstants.SYNTAX_OCTET_STRING,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexGatewaySummaryData] =
new MOMutableColumn(MIBConstants.colGatewaySummaryData,
SMIConstants.SYNTAX_OCTET_STRING,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexClusterMembers] =
new MOMutableColumn(MIBConstants.colClusterMembers,
SMIConstants.SYNTAX_OCTET_STRING,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexBalancerMap] =
new MOMutableColumn(MIBConstants.colBalancerMap,
SMIConstants.SYNTAX_OCTET_STRING,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexManagementServiceMap] =
new MOMutableColumn(MIBConstants.colManagementServiceMap,
SMIConstants.SYNTAX_OCTET_STRING,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexLatestUpdateableVersion] = new MOMutableColumn(
MIBConstants.colLatestUpdateableVersion,
SMIConstants.SYNTAX_OCTET_STRING,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ));
gatewayEntryColumns[MIBConstants.indexForceUpdateVersionCheck] = new MOMutableColumn(
MIBConstants.colForceUpdateVersionCheck,
SMIConstants.SYNTAX_INTEGER,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ_WRITE));
// Table model
gatewayEntryModel = new GatewayMXBeanTableModel();
gatewayEntry = moFactory.createTable(MIBConstants.oidGatewayEntry,
gatewayEntryIndex,
gatewayEntryColumns,
gatewayEntryModel);
try {
JSONArray jsonArray = new JSONArray(GatewayManagementBean.SUMMARY_DATA_FIELD_LIST);
summaryDataFields = new MOScalar(MIBConstants.oidGatewaySummaryDataFields,
moFactory.createAccess(MOAccessImpl.ACCESSIBLE_FOR_READ),
new OctetString(jsonArray.toString()));
} catch (JSONException ex) {
// Should not be possible to get here, since the list of
// strings is valid and constant.
}
summaryDataNotificationInterval = new SummaryDataIntervalMO(moFactory,
managementContext.getGatewaySummaryDataNotificationInterval(),
MIBConstants.oidGatewaySummaryDataNotificationInterval);
}
@Override
public void registerMOs(MOServer server, OctetString context) throws DuplicateRegistrationException {
server.register(gatewayEntry, context);
server.register(summaryDataFields, context);
server.register(summaryDataNotificationInterval, context);
}
@Override
public void unregisterMOs(MOServer server, OctetString context) {
server.unregister(gatewayEntry, context);
server.unregister(summaryDataFields, context);
server.unregister(summaryDataNotificationInterval, context);
}
@Override
public void incrementCounter(CounterEvent event) {
// FIXME: do we need this?
}
@Override
public OID addSysOREntry(OID sysORID, OctetString sysORDescr) {
OID index = new OID(new int[]{sysOREntryModel.getRowCount() + 1});
Variable[] values = new Variable[sysOREntry.getColumnCount()];
int n = 0;
values[n++] = sysORID;
values[n++] = sysORDescr;
DefaultMOTableRow row = new DefaultMOTableRow(index, values);
sysOREntry.addRow(row);
return index;
}
@Override
public MOTableRow removeSysOREntry(OID index) {
return sysOREntry.removeRow(index);
}
public OID addGatewayBean(GatewayManagementBean bean) {
// Note: as gateways are appearing and disappearing, we cannot just
// have a gateway index as the current number of rows in the table,
// because earlier gateway entries may be gone.
OID gatewayIndexOID = new OID(new int[]{bean.getId()});
gatewayEntry.addRow(new GatewayEntryRow(gatewayIndexOID, bean));
return gatewayIndexOID;
}
public void removeGatewayBean(OID oid) {
gatewayEntry.removeRow(oid);
}
private class GatewayMXBeanTableModel extends DefaultMOMutableTableModel {
}
private final class GatewayEntryRow extends DefaultMOMutableRow2PC {
private GatewayManagementBean bean;
private GatewayEntryRow(OID index, GatewayManagementBean bean) {
super(index, null);
this.bean = bean;
}
@Override
public int size() {
return MIBConstants.GATEWAY_COLUMN_COUNT;
}
@Override
public Variable getValue(int column) {
try {
switch (column) {
case MIBConstants.indexGatewayIndex:
return new Integer32(getIndex().last());
case MIBConstants.indexGatewayId:
return Utils.stringToVariable(bean.getHostAndPid());
case MIBConstants.indexTotalCurrentSessions:
return new Counter64(bean.getTotalCurrentSessions());
case MIBConstants.indexTotalBytesReceived:
return new Counter64(bean.getTotalBytesReceived());
case MIBConstants.indexTotalBytesSent:
return new Counter64(bean.getTotalBytesSent());
case MIBConstants.indexUptime:
return new Counter64(bean.getUptime());
case MIBConstants.indexStartTime:
return new Counter64(bean.getStartTime());
case MIBConstants.indexInstanceKey:
return Utils.stringToVariable(bean.getInstanceKey());
case MIBConstants.indexGatewaySummaryData:
return new OctetString(bean.getSummaryData());
case MIBConstants.indexClusterMembers:
return new OctetString(bean.getClusterMembers());
case MIBConstants.indexBalancerMap:
return new OctetString(bean.getClusterBalancerMap());
case MIBConstants.indexManagementServiceMap:
return new OctetString(bean.getManagementServiceMap());
case MIBConstants.indexLatestUpdateableVersion:
return new OctetString(bean.getAvailableUpdateVersion());
default:
return super.getValue(column);
}
} catch (Exception ex) {
// FIXME: handle errors
return new Integer32(-1);
}
}
@Override
public void commit(SubRequest subRequest, MOTableRow changeSet, int column) {
setValue(column, (Variable) subRequest.getVariableBinding().getVariable().clone());
subRequest.completed();
}
@Override
public void setValue(int column, Variable newValue) {
switch (column) {
case MIBConstants.indexForceUpdateVersionCheck:
if ((newValue instanceof Integer32) && (((Integer32) newValue).getValue() == 0)) {
bean.forceUpdateVersionCheck();
}
break;
default:
super.setValue(column, newValue);
break;
}
}
}
}
| |
/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hyracks.tests.unit;
import static org.apache.hyracks.tests.unit.AbstractRunGeneratorTest.ComparatorFactories;
import static org.apache.hyracks.tests.unit.AbstractRunGeneratorTest.RecordDesc;
import static org.apache.hyracks.tests.unit.AbstractRunGeneratorTest.SerDers;
import static org.apache.hyracks.tests.unit.AbstractRunGeneratorTest.SortFields;
import static org.apache.hyracks.tests.unit.AbstractRunGeneratorTest.assertFTADataIsSorted;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import org.junit.Test;
import org.apache.hyracks.api.comm.FixedSizeFrame;
import org.apache.hyracks.api.comm.IFrame;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import org.apache.hyracks.dataflow.std.sort.AbstractSortRunGenerator;
import org.apache.hyracks.dataflow.std.sort.HybridTopKSortRunGenerator;
import org.apache.hyracks.dataflow.std.sort.HeapSortRunGenerator;
public class TopKRunGeneratorTest {
static final int PAGE_SIZE = 512;
static final int NUM_PAGES = 80;
static final int SORT_FRAME_LIMIT = 4;
enum ORDER {
INORDER,
REVERSE
}
public class InMemorySortDataValidator implements IFrameWriter {
InMemorySortDataValidator(Map<Integer, String> answer) {
this.answer = answer;
}
Map<Integer, String> answer;
FrameTupleAccessor accessor;
int preKey = Integer.MIN_VALUE;
@Override
public void open() throws HyracksDataException {
accessor = new FrameTupleAccessor(RecordDesc);
preKey = Integer.MIN_VALUE;
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
accessor.reset(buffer);
preKey = assertFTADataIsSorted(accessor, answer, preKey);
}
@Override
public void fail() throws HyracksDataException {
}
@Override
public void close() throws HyracksDataException {
assertTrue(answer.isEmpty());
}
}
@Test
public void testReverseOrderedDataShouldNotGenerateAnyRuns() throws HyracksDataException {
int topK = 1;
IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
HeapSortRunGenerator sorter = new HeapSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
SortFields, null, ComparatorFactories, RecordDesc);
testInMemoryOnly(ctx, topK, ORDER.REVERSE, sorter);
}
@Test
public void testAlreadySortedDataShouldNotGenerateAnyRuns() throws HyracksDataException {
int topK = SORT_FRAME_LIMIT;
IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
HeapSortRunGenerator sorter = new HeapSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
SortFields, null, ComparatorFactories, RecordDesc);
testInMemoryOnly(ctx, topK, ORDER.INORDER, sorter);
}
@Test
public void testHybridTopKShouldNotGenerateAnyRuns() throws HyracksDataException {
int topK = 1;
IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
SortFields, null, ComparatorFactories, RecordDesc);
testInMemoryOnly(ctx, topK, ORDER.REVERSE, sorter);
}
@Test
public void testHybridTopKShouldSwitchToFrameSorterWhenFlushed() {
int topK = 1;
IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
SortFields, null, ComparatorFactories, RecordDesc);
}
private void testInMemoryOnly(IHyracksTaskContext ctx, int topK, ORDER order, AbstractSortRunGenerator sorter)
throws HyracksDataException {
Map<Integer, String> keyValuePair = null;
switch (order) {
case INORDER:
keyValuePair = new TreeMap<>();
break;
case REVERSE:
keyValuePair = new TreeMap<>(Collections.reverseOrder());
break;
}
List<IFrame> frameList = new ArrayList<>();
int minDataSize = PAGE_SIZE * NUM_PAGES * 4 / 5;
int minRecordSize = 16;
int maxRecordSize = 64;
AbstractRunGeneratorTest
.prepareData(ctx, frameList, minDataSize, minRecordSize, maxRecordSize, null, keyValuePair);
assert topK > 0;
ByteBuffer buffer = prepareSortedData(keyValuePair);
Map<Integer, String> topKAnswer = getTopKAnswer(keyValuePair, topK);
doSort(sorter, buffer);
assertEquals(0, sorter.getRuns().size());
validateResult(sorter, topKAnswer);
}
private void validateResult(AbstractSortRunGenerator sorter, Map<Integer, String> topKAnswer)
throws HyracksDataException {
InMemorySortDataValidator validator = new InMemorySortDataValidator(topKAnswer);
validator.open();
sorter.getSorter().flush(validator);
validator.close();
}
private void doSort(AbstractSortRunGenerator sorter, ByteBuffer buffer) throws HyracksDataException {
sorter.open();
sorter.nextFrame(buffer);
sorter.close();
}
private Map<Integer, String> getTopKAnswer(Map<Integer, String> keyValuePair, int topK) {
TreeMap<Integer, String> copy = new TreeMap<>(keyValuePair);
Map<Integer, String> answer = new TreeMap<>();
for (Map.Entry<Integer, String> entry : copy.entrySet()) {
if (answer.size() < topK) {
answer.put(entry.getKey(), entry.getValue());
} else {
break;
}
}
return answer;
}
private ByteBuffer prepareSortedData(Map<Integer, String> keyValuePair) throws HyracksDataException {
ByteBuffer buffer = ByteBuffer.allocate(PAGE_SIZE * NUM_PAGES);
IFrame inputFrame = new FixedSizeFrame(buffer);
FrameTupleAppender appender = new FrameTupleAppender();
appender.reset(inputFrame, true);
ArrayTupleBuilder builder = new ArrayTupleBuilder(RecordDesc.getFieldCount());
for (Map.Entry<Integer, String> entry : keyValuePair.entrySet()) {
builder.reset();
builder.addField(SerDers[0], entry.getKey());
builder.addField(SerDers[1], entry.getValue());
appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize());
}
return buffer;
}
}
| |
package com.mossle.audit.persistence.domain;
// Generated by Hibernate Tools
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
/**
* AuditBase .
*
* @author Lingo
*/
@Entity
@Table(name = "AUDIT_BASE")
public class AuditBase implements java.io.Serializable {
private static final long serialVersionUID = 0L;
/** null. */
private Long id;
/** null. */
private String user;
/** null. */
private String resourceType;
/** null. */
private String resourceId;
/** null. */
private String action;
/** null. */
private String result;
/** null. */
private String application;
/** null. */
private Date auditTime;
/** null. */
private String client;
/** null. */
private String server;
/** null. */
private String description;
/** null. */
private String tenantId;
public AuditBase() {
}
public AuditBase(Long id) {
this.id = id;
}
public AuditBase(Long id, String user, String resourceType,
String resourceId, String action, String result,
String application, Date auditTime, String client, String server,
String description, String tenantId) {
this.id = id;
this.user = user;
this.resourceType = resourceType;
this.resourceId = resourceId;
this.action = action;
this.result = result;
this.application = application;
this.auditTime = auditTime;
this.client = client;
this.server = server;
this.description = description;
this.tenantId = tenantId;
}
/** @return null. */
@Id
@Column(name = "ID", unique = true, nullable = false)
public Long getId() {
return this.id;
}
/**
* @param id
* null.
*/
public void setId(Long id) {
this.id = id;
}
/** @return null. */
@Column(name = "USER", length = 200)
public String getUser() {
return this.user;
}
/**
* @param user
* null.
*/
public void setUser(String user) {
this.user = user;
}
/** @return null. */
@Column(name = "RESOURCE_TYPE", length = 200)
public String getResourceType() {
return this.resourceType;
}
/**
* @param resourceType
* null.
*/
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
/** @return null. */
@Column(name = "RESOURCE_ID", length = 200)
public String getResourceId() {
return this.resourceId;
}
/**
* @param resourceId
* null.
*/
public void setResourceId(String resourceId) {
this.resourceId = resourceId;
}
/** @return null. */
@Column(name = "ACTION", length = 200)
public String getAction() {
return this.action;
}
/**
* @param action
* null.
*/
public void setAction(String action) {
this.action = action;
}
/** @return null. */
@Column(name = "RESULT", length = 200)
public String getResult() {
return this.result;
}
/**
* @param result
* null.
*/
public void setResult(String result) {
this.result = result;
}
/** @return null. */
@Column(name = "APPLICATION", length = 200)
public String getApplication() {
return this.application;
}
/**
* @param application
* null.
*/
public void setApplication(String application) {
this.application = application;
}
/** @return null. */
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "AUDIT_TIME", length = 26)
public Date getAuditTime() {
return this.auditTime;
}
/**
* @param auditTime
* null.
*/
public void setAuditTime(Date auditTime) {
this.auditTime = auditTime;
}
/** @return null. */
@Column(name = "CLIENT", length = 200)
public String getClient() {
return this.client;
}
/**
* @param client
* null.
*/
public void setClient(String client) {
this.client = client;
}
/** @return null. */
@Column(name = "SERVER", length = 200)
public String getServer() {
return this.server;
}
/**
* @param server
* null.
*/
public void setServer(String server) {
this.server = server;
}
/** @return null. */
@Column(name = "DESCRIPTION", length = 200)
public String getDescription() {
return this.description;
}
/**
* @param description
* null.
*/
public void setDescription(String description) {
this.description = description;
}
/** @return null. */
@Column(name = "TENANT_ID", length = 64)
public String getTenantId() {
return this.tenantId;
}
/**
* @param tenantId
* null.
*/
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
}
| |
/*
Copyright (c) 2007, Distributed Computing Group (DCG)
ETH Zurich
Switzerland
dcg.ethz.ch
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
- Neither the name 'Sinalgo' nor the names of its contributors may be
used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package sinalgo.gui.multiLineTooltip;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import javax.swing.CellRendererPane;
import javax.swing.JComponent;
import javax.swing.JTextArea;
import javax.swing.JToolTip;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.basic.BasicToolTipUI;
/**
* The Tooltip to display if the user stops with the mouse over the graph panel. Displays information about
* the edge or node under the current position.
*/
@SuppressWarnings("serial")
public class MultiLineToolTip extends JToolTip
{
String tipText;
JComponent component;
/**
* The constructor for the MyToolTip class.
*
*/
public MultiLineToolTip(){
updateUI();
}
@Override
public void updateUI(){
setUI(MultiLineToolTipUI.createUI(this));
}
/**
* This method sets the number of colums the tooltip has.
*
* @param columns The number of columns the tooltip has.
*/
public void setColumns(int columns){
this.columns = columns;
this.fixedwidth = 0;
}
/**
* This method returns the number of columns the tooltip has.
*
* @return The number of columns the Tooltip has.
*/
public int getColumns(){
return columns;
}
/**
* This method sets the fixed width for the tooltip.
*
* @param width The fixed width of the tooltip.
*/
public void setFixedWidth(int width){
this.fixedwidth = width;
this.columns = 0;
}
/**
* This method returns the fixes with of the tooltip.
*
* @return The fixed width of the tooltip.
*/
public int getFixedWidth(){
return fixedwidth;
}
protected int columns = 0;
protected int fixedwidth = 0;
}
// used such that the tooltip can display several lines of text (e.g. newlines)
class MultiLineToolTipUI extends BasicToolTipUI {
static MultiLineToolTipUI sharedInstance = new MultiLineToolTipUI();
Font smallFont;
static JToolTip tip;
protected CellRendererPane rendererPane;
private static JTextArea textArea ;
public static ComponentUI createUI(JComponent c) {
return sharedInstance;
}
/**
* The constructor for the MultiLineToolTipUI class.
*
*/
public MultiLineToolTipUI() {
super();
}
@Override
public void installUI(JComponent c) {
super.installUI(c);
tip = (JToolTip)c;
rendererPane = new CellRendererPane();
c.add(rendererPane);
}
@Override
public void uninstallUI(JComponent c) {
super.uninstallUI(c);
c.remove(rendererPane);
rendererPane = null;
}
@Override
public void paint(Graphics g, JComponent c) {
Dimension size = c.getSize();
textArea.setBackground(c.getBackground());
rendererPane.paintComponent(g, textArea, c, 1, 1,
size.width - 1, size.height - 1, true);
}
@Override
public Dimension getPreferredSize(JComponent c) {
String tipText = ((JToolTip)c).getTipText();
if (tipText == null)
return new Dimension(0,0);
textArea = new JTextArea(tipText );
rendererPane.removeAll();
rendererPane.add(textArea );
textArea.setWrapStyleWord(true);
int width = ((MultiLineToolTip)c).getFixedWidth();
int columns = ((MultiLineToolTip)c).getColumns();
if( columns > 0 ) {
textArea.setColumns(columns);
textArea.setSize(0,0);
textArea.setLineWrap(true);
textArea.setSize( textArea.getPreferredSize() );
} else if( width > 0 ) {
textArea.setLineWrap(true);
Dimension d = textArea.getPreferredSize();
d.width = width;
d.height++;
textArea.setSize(d);
} else {
textArea.setLineWrap(false);
}
Dimension dim = textArea.getPreferredSize();
dim.height += 1;
dim.width += 1;
return dim;
}
@Override
public Dimension getMinimumSize(JComponent c) {
return getPreferredSize(c);
}
@Override
public Dimension getMaximumSize(JComponent c) {
return getPreferredSize(c);
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.publishers;
import com.thoughtworks.go.domain.DownloadAction;
import com.thoughtworks.go.domain.JobIdentifier;
import com.thoughtworks.go.domain.Property;
import com.thoughtworks.go.domain.builder.FetchArtifactBuilder;
import com.thoughtworks.go.domain.exception.ArtifactPublishingException;
import com.thoughtworks.go.remote.AgentIdentifier;
import com.thoughtworks.go.remote.work.ConsoleOutputTransmitter;
import com.thoughtworks.go.remote.work.RemoteConsoleAppender;
import com.thoughtworks.go.util.*;
import com.thoughtworks.go.work.DefaultGoPublisher;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.Properties;
import java.util.zip.Deflater;
import static com.thoughtworks.go.util.ArtifactLogUtil.getConsoleOutputFolderAndFileNameUrl;
import static com.thoughtworks.go.util.CachedDigestUtils.md5Hex;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
import static com.thoughtworks.go.util.FileUtil.normalizePath;
import static com.thoughtworks.go.util.GoConstants.PUBLISH_MAX_RETRIES;
import static com.thoughtworks.go.util.command.TaggedStreamConsumer.PUBLISH;
import static com.thoughtworks.go.util.command.TaggedStreamConsumer.PUBLISH_ERR;
import static java.lang.String.format;
import static org.apache.commons.lang.StringUtils.removeStart;
@Component
public class GoArtifactsManipulator {
private final HttpService httpService;
private final URLService urlService;
private final ZipUtil zipUtil;
private static final Logger LOGGER = Logger.getLogger(GoArtifactsManipulator.class);
@Autowired
public GoArtifactsManipulator(HttpService httpService, URLService urlService, ZipUtil zipUtil) {
this.httpService = httpService;
this.urlService = urlService;
this.zipUtil = zipUtil;
}
public void publish(DefaultGoPublisher goPublisher, String destPath, File source, JobIdentifier jobIdentifier) {
if (!source.exists()) {
String message = "Failed to find " + source.getAbsolutePath();
goPublisher.taggedConsumeLineWithPrefix(PUBLISH_ERR, message);
bomb(message);
}
int publishingAttempts = 0;
Throwable lastException = null;
while (publishingAttempts < PUBLISH_MAX_RETRIES) {
File tmpDir = null;
try {
publishingAttempts++;
tmpDir = FileUtil.createTempFolder();
File dataToUpload = new File(tmpDir, source.getName() + ".zip");
zipUtil.zip(source, dataToUpload, Deflater.BEST_SPEED);
long size = 0;
if (source.isDirectory()) {
size = FileUtils.sizeOfDirectory(source);
} else {
size = source.length();
}
goPublisher.taggedConsumeLineWithPrefix(PUBLISH, "Uploading artifacts from " + source.getAbsolutePath() + " to " + getDestPath(destPath));
String normalizedDestPath = normalizePath(destPath);
String url = urlService.getUploadUrlOfAgent(jobIdentifier, normalizedDestPath, publishingAttempts);
int statusCode = httpService.upload(url, size, dataToUpload, artifactChecksums(source, normalizedDestPath));
if (statusCode == HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE) {
String message = String.format("Artifact upload for file %s (Size: %s) was denied by the server. This usually happens when server runs out of disk space.",
source.getAbsolutePath(), size);
goPublisher.taggedConsumeLineWithPrefix(PUBLISH_ERR, message);
LOGGER.error("[Artifact Upload] Artifact upload was denied by the server. This usually happens when server runs out of disk space.");
publishingAttempts = PUBLISH_MAX_RETRIES;
bomb(message + ". HTTP return code is " + statusCode);
}
if (statusCode < HttpServletResponse.SC_OK || statusCode >= HttpServletResponse.SC_MULTIPLE_CHOICES) {
bomb("Failed to upload " + source.getAbsolutePath() + ". HTTP return code is " + statusCode);
}
return;
} catch (Throwable e) {
String message = "Failed to upload " + source.getAbsolutePath();
LOGGER.error(message, e);
goPublisher.taggedConsumeLineWithPrefix(PUBLISH_ERR, message);
lastException = e;
} finally {
FileUtil.deleteFolder(tmpDir);
}
}
if (lastException != null) {
throw new RuntimeException(lastException);
}
}
private Properties artifactChecksums(File source, String destPath) throws IOException {
if (source.isDirectory()) {
return computeChecksumForContentsOfDirectory(source, destPath);
}
FileInputStream inputStream = null;
Properties properties = null;
try {
inputStream = new FileInputStream(source);
properties = computeChecksumForFile(source.getName(), md5Hex(inputStream), destPath);
} finally {
if (inputStream != null) {
inputStream.close();
}
}
return properties;
}
private Properties computeChecksumForContentsOfDirectory(File directory, String destPath) throws IOException {
Collection<File> fileStructure = FileUtils.listFiles(directory, null, true);
Properties checksumProperties = new Properties();
for (File file : fileStructure) {
String filePath = removeStart(file.getAbsolutePath(), directory.getParentFile().getAbsolutePath());
FileInputStream inputStream = null;
try {
inputStream = new FileInputStream(file);
checksumProperties.setProperty(getEffectiveFileName(destPath, normalizePath(filePath)), md5Hex(inputStream));
} finally {
if (inputStream != null) {
inputStream.close();
}
}
}
return checksumProperties;
}
private Properties computeChecksumForFile(String sourceName, String md5, String destPath) throws IOException {
String effectiveFileName = getEffectiveFileName(destPath, sourceName);
Properties properties = new Properties();
properties.setProperty(effectiveFileName, md5);
return properties;
}
private String getEffectiveFileName(String computedDestPath, String filePath) {
File artifactDest = computedDestPath.isEmpty() ? new File(filePath) : new File(computedDestPath, filePath);
return removeLeadingSlash(artifactDest);
}
private String removeLeadingSlash(File artifactDest) {
return removeStart(normalizePath(artifactDest.getPath()), "/");
}
public void fetch(DefaultGoPublisher goPublisher, FetchArtifactBuilder fetchArtifactBuilder) {
try {
String fetchMsg = String.format("Fetching artifact [%s] from [%s]", fetchArtifactBuilder.getSrc(),
fetchArtifactBuilder.jobLocatorForDisplay());
goPublisher.taggedConsumeLineWithPrefix(DefaultGoPublisher.OUT, fetchMsg);
fetchArtifactBuilder.fetch(new DownloadAction(httpService, goPublisher, new SystemTimeClock()), urlService);
} catch (Exception e) {
String fetchMsg = String.format("Failed to save artifact [%s] to [%s]",
fetchArtifactBuilder.getSrc(), fetchArtifactBuilder.getDest());
LOGGER.error(fetchMsg, e);
goPublisher.taggedConsumeLineWithPrefix(DefaultGoPublisher.ERR, fetchMsg);
throw new RuntimeException(e);
}
}
private String getDestPath(String file) {
if (StringUtils.isEmpty(file)) {
return "[defaultRoot]";
} else {
return file;
}
}
public void setProperty(JobIdentifier jobIdentifier, Property property) {
try {
String propertiesUrl = urlService.getPropertiesUrl(jobIdentifier, property.getKey());
httpService.postProperty(propertiesUrl, property.getValue());
} catch (Exception e) {
throw new ArtifactPublishingException(format("Failed to set property %s with value %s", property.getKey(), property.getValue()), e);
}
}
public ConsoleOutputTransmitter createConsoleOutputTransmitter(JobIdentifier jobIdentifier,
AgentIdentifier agentIdentifier) {
String consoleUrl = urlService.getUploadUrlOfAgent(jobIdentifier, getConsoleOutputFolderAndFileNameUrl());
return new ConsoleOutputTransmitter(new RemoteConsoleAppender(consoleUrl, httpService));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.service;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.cluster.AckedClusterStateTaskListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterState.Builder;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ClusterStateTaskConfig;
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
import org.elasticsearch.cluster.ClusterStateTaskListener;
import org.elasticsearch.cluster.ClusterStateUpdateTask;
import org.elasticsearch.cluster.LocalNodeMasterListener;
import org.elasticsearch.cluster.NodeConnectionsService;
import org.elasticsearch.cluster.TimeoutClusterStateListener;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.OperationRouting;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor;
import org.elasticsearch.common.util.concurrent.PrioritizedRunnable;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Queue;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
public class ClusterService extends AbstractLifecycleComponent {
public static final Setting<TimeValue> CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING =
Setting.positiveTimeSetting("cluster.service.slow_task_logging_threshold", TimeValue.timeValueSeconds(30),
Property.Dynamic, Property.NodeScope);
public static final String UPDATE_THREAD_NAME = "clusterService#updateTask";
private final ThreadPool threadPool;
private final ClusterName clusterName;
private BiConsumer<ClusterChangedEvent, Discovery.AckListener> clusterStatePublisher;
private final OperationRouting operationRouting;
private final ClusterSettings clusterSettings;
private TimeValue slowTaskLoggingThreshold;
private volatile PrioritizedEsThreadPoolExecutor updateTasksExecutor;
/**
* Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine
*/
private final Collection<ClusterStateListener> priorityClusterStateListeners = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>();
private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>();
final Map<ClusterStateTaskExecutor, LinkedHashSet<UpdateTask>> updateTasksPerExecutor = new HashMap<>();
// TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API
private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>();
private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat(priorityClusterStateListeners,
clusterStateListeners, lastClusterStateListeners);
private final LocalNodeMasterListeners localNodeMasterListeners;
private final Queue<NotifyTimeout> onGoingTimeouts = ConcurrentCollections.newQueue();
private final AtomicReference<ClusterServiceState> state;
private final ClusterBlocks.Builder initialBlocks;
private NodeConnectionsService nodeConnectionsService;
public ClusterService(Settings settings,
ClusterSettings clusterSettings, ThreadPool threadPool) {
super(settings);
this.operationRouting = new OperationRouting(settings, clusterSettings);
this.threadPool = threadPool;
this.clusterSettings = clusterSettings;
this.clusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings);
// will be replaced on doStart.
this.state = new AtomicReference<>(new ClusterServiceState(ClusterState.builder(clusterName).build(), ClusterStateStatus.UNKNOWN));
this.clusterSettings.addSettingsUpdateConsumer(CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING,
this::setSlowTaskLoggingThreshold);
this.slowTaskLoggingThreshold = CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.get(settings);
localNodeMasterListeners = new LocalNodeMasterListeners(threadPool);
initialBlocks = ClusterBlocks.builder();
}
private void setSlowTaskLoggingThreshold(TimeValue slowTaskLoggingThreshold) {
this.slowTaskLoggingThreshold = slowTaskLoggingThreshold;
}
public synchronized void setClusterStatePublisher(BiConsumer<ClusterChangedEvent, Discovery.AckListener> publisher) {
clusterStatePublisher = publisher;
}
public synchronized void setLocalNode(DiscoveryNode localNode) {
assert clusterServiceState().getClusterState().nodes().getLocalNodeId() == null : "local node is already set";
updateState(css -> {
ClusterState clusterState = css.getClusterState();
DiscoveryNodes nodes = DiscoveryNodes.builder(clusterState.nodes()).add(localNode).localNodeId(localNode.getId()).build();
return new ClusterServiceState(ClusterState.builder(clusterState).nodes(nodes).build(), css.getClusterStateStatus());
});
}
private void updateState(UnaryOperator<ClusterServiceState> updateFunction) {
this.state.getAndUpdate(oldClusterServiceState -> {
ClusterServiceState newClusterServiceState = updateFunction.apply(oldClusterServiceState);
assert validStateTransition(oldClusterServiceState, newClusterServiceState) :
"Invalid cluster service state transition from " + oldClusterServiceState + " to " + newClusterServiceState;
return newClusterServiceState;
});
}
private static boolean validStateTransition(ClusterServiceState oldClusterServiceState, ClusterServiceState newClusterServiceState) {
if (oldClusterServiceState == null || newClusterServiceState == null) {
return false;
}
ClusterStateStatus oldStatus = oldClusterServiceState.getClusterStateStatus();
ClusterStateStatus newStatus = newClusterServiceState.getClusterStateStatus();
// only go from UNKNOWN to UNKNOWN or BEING_APPLIED
if (oldStatus == ClusterStateStatus.UNKNOWN && newStatus == ClusterStateStatus.APPLIED) {
return false;
}
// only go from BEING_APPLIED to APPLIED
if (oldStatus == ClusterStateStatus.BEING_APPLIED && newStatus != ClusterStateStatus.APPLIED) {
return false;
}
// only go from APPLIED to BEING_APPLIED
if (oldStatus == ClusterStateStatus.APPLIED && newStatus != ClusterStateStatus.BEING_APPLIED) {
return false;
}
boolean identicalClusterState = oldClusterServiceState.getClusterState() == newClusterServiceState.getClusterState();
return identicalClusterState == (oldStatus == ClusterStateStatus.BEING_APPLIED && newStatus == ClusterStateStatus.APPLIED);
}
public synchronized void setNodeConnectionsService(NodeConnectionsService nodeConnectionsService) {
assert this.nodeConnectionsService == null : "nodeConnectionsService is already set";
this.nodeConnectionsService = nodeConnectionsService;
}
/**
* Adds an initial block to be set on the first cluster state created.
*/
public synchronized void addInitialStateBlock(ClusterBlock block) throws IllegalStateException {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial block when started");
}
initialBlocks.addGlobalBlock(block);
}
/**
* Remove an initial block to be set on the first cluster state created.
*/
public synchronized void removeInitialStateBlock(ClusterBlock block) throws IllegalStateException {
removeInitialStateBlock(block.id());
}
/**
* Remove an initial block to be set on the first cluster state created.
*/
public synchronized void removeInitialStateBlock(int blockId) throws IllegalStateException {
if (lifecycle.started()) {
throw new IllegalStateException("can't set initial block when started");
}
initialBlocks.removeGlobalBlock(blockId);
}
@Override
protected synchronized void doStart() {
Objects.requireNonNull(clusterStatePublisher, "please set a cluster state publisher before starting");
Objects.requireNonNull(clusterServiceState().getClusterState().nodes().getLocalNode(), "please set the local node before starting");
Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting");
add(localNodeMasterListeners);
updateState(css -> new ClusterServiceState(
ClusterState.builder(css.getClusterState()).blocks(initialBlocks).build(),
css.getClusterStateStatus()));
this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(UPDATE_THREAD_NAME, daemonThreadFactory(settings, UPDATE_THREAD_NAME),
threadPool.getThreadContext());
}
@Override
protected synchronized void doStop() {
for (NotifyTimeout onGoingTimeout : onGoingTimeouts) {
onGoingTimeout.cancel();
try {
onGoingTimeout.cancel();
onGoingTimeout.listener.onClose();
} catch (Exception ex) {
logger.debug("failed to notify listeners on shutdown", ex);
}
}
ThreadPool.terminate(updateTasksExecutor, 10, TimeUnit.SECONDS);
// close timeout listeners that did not have an ongoing timeout
postAppliedListeners
.stream()
.filter(listener -> listener instanceof TimeoutClusterStateListener)
.map(listener -> (TimeoutClusterStateListener)listener)
.forEach(TimeoutClusterStateListener::onClose);
remove(localNodeMasterListeners);
}
@Override
protected synchronized void doClose() {
}
/**
* The local node.
*/
public DiscoveryNode localNode() {
DiscoveryNode localNode = state().getNodes().getLocalNode();
if (localNode == null) {
throw new IllegalStateException("No local node found. Is the node started?");
}
return localNode;
}
public OperationRouting operationRouting() {
return operationRouting;
}
/**
* The current cluster state.
*/
public ClusterState state() {
return clusterServiceState().getClusterState();
}
/**
* The current cluster service state comprising cluster state and cluster state status.
*/
public ClusterServiceState clusterServiceState() {
return this.state.get();
}
/**
* Adds a priority listener for updated cluster states.
*/
public void addFirst(ClusterStateListener listener) {
priorityClusterStateListeners.add(listener);
}
/**
* Adds last listener.
*/
public void addLast(ClusterStateListener listener) {
lastClusterStateListeners.add(listener);
}
/**
* Adds a listener for updated cluster states.
*/
public void add(ClusterStateListener listener) {
clusterStateListeners.add(listener);
}
/**
* Removes a listener for updated cluster states.
*/
public void remove(ClusterStateListener listener) {
clusterStateListeners.remove(listener);
priorityClusterStateListeners.remove(listener);
lastClusterStateListeners.remove(listener);
postAppliedListeners.remove(listener);
for (Iterator<NotifyTimeout> it = onGoingTimeouts.iterator(); it.hasNext(); ) {
NotifyTimeout timeout = it.next();
if (timeout.listener.equals(listener)) {
timeout.cancel();
it.remove();
}
}
}
/**
* Add a listener for on/off local node master events
*/
public void add(LocalNodeMasterListener listener) {
localNodeMasterListeners.add(listener);
}
/**
* Remove the given listener for on/off local master events
*/
public void remove(LocalNodeMasterListener listener) {
localNodeMasterListeners.remove(listener);
}
/**
* Adds a cluster state listener that will timeout after the provided timeout,
* and is executed after the clusterstate has been successfully applied ie. is
* in state {@link ClusterStateStatus#APPLIED}
* NOTE: a {@code null} timeout means that the listener will never be removed
* automatically
*/
public void add(@Nullable final TimeValue timeout, final TimeoutClusterStateListener listener) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
return;
}
// call the post added notification on the same event thread
try {
updateTasksExecutor.execute(new SourcePrioritizedRunnable(Priority.HIGH, "_add_listener_") {
@Override
public void run() {
if (timeout != null) {
NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout);
notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout);
onGoingTimeouts.add(notifyTimeout);
}
postAppliedListeners.add(listener);
listener.postAdded();
}
});
} catch (EsRejectedExecutionException e) {
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
throw e;
}
}
}
/**
* Submits a cluster state update task; unlike {@link #submitStateUpdateTask(String, Object, ClusterStateTaskConfig,
* ClusterStateTaskExecutor, ClusterStateTaskListener)}, submitted updates will not be batched.
*
* @param source the source of the cluster state update task
* @param updateTask the full context for the cluster state update
* task
*
*/
public void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask) {
submitStateUpdateTask(source, updateTask, updateTask, updateTask, updateTask);
}
/**
* Submits a cluster state update task; submitted updates will be
* batched across the same instance of executor. The exact batching
* semantics depend on the underlying implementation but a rough
* guideline is that if the update task is submitted while there
* are pending update tasks for the same executor, these update
* tasks will all be executed on the executor in a single batch
*
* @param source the source of the cluster state update task
* @param task the state needed for the cluster state update task
* @param config the cluster state update task configuration
* @param executor the cluster state update task executor; tasks
* that share the same executor will be executed
* batches on this executor
* @param listener callback after the cluster state update task
* completes
* @param <T> the type of the cluster state update task state
*
*/
public <T> void submitStateUpdateTask(final String source, final T task,
final ClusterStateTaskConfig config,
final ClusterStateTaskExecutor<T> executor,
final ClusterStateTaskListener listener) {
submitStateUpdateTasks(source, Collections.singletonMap(task, listener), config, executor);
}
/**
* Submits a batch of cluster state update tasks; submitted updates are guaranteed to be processed together,
* potentially with more tasks of the same executor.
*
* @param source the source of the cluster state update task
* @param tasks a map of update tasks and their corresponding listeners
* @param config the cluster state update task configuration
* @param executor the cluster state update task executor; tasks
* that share the same executor will be executed
* batches on this executor
* @param <T> the type of the cluster state update task state
*
*/
public <T> void submitStateUpdateTasks(final String source,
final Map<T, ClusterStateTaskListener> tasks, final ClusterStateTaskConfig config,
final ClusterStateTaskExecutor<T> executor) {
if (!lifecycle.started()) {
return;
}
if (tasks.isEmpty()) {
return;
}
try {
// convert to an identity map to check for dups based on update tasks semantics of using identity instead of equal
final IdentityHashMap<T, ClusterStateTaskListener> tasksIdentity = new IdentityHashMap<>(tasks);
final List<UpdateTask<T>> updateTasks = tasksIdentity.entrySet().stream().map(
entry -> new UpdateTask<>(source, entry.getKey(), config.priority(), executor, safe(entry.getValue(), logger))
).collect(Collectors.toList());
synchronized (updateTasksPerExecutor) {
LinkedHashSet<UpdateTask> existingTasks = updateTasksPerExecutor.computeIfAbsent(executor,
k -> new LinkedHashSet<>(updateTasks.size()));
for (@SuppressWarnings("unchecked") UpdateTask<T> existing : existingTasks) {
if (tasksIdentity.containsKey(existing.task)) {
throw new IllegalStateException("task [" + executor.describeTasks(Collections.singletonList(existing.task)) +
"] with source [" + source + "] is already queued");
}
}
existingTasks.addAll(updateTasks);
}
final UpdateTask<T> firstTask = updateTasks.get(0);
final TimeValue timeout = config.timeout();
if (timeout != null) {
updateTasksExecutor.execute(firstTask, threadPool.scheduler(), timeout, () -> threadPool.generic().execute(() -> {
final ArrayList<UpdateTask<T>> toRemove = new ArrayList<>();
for (UpdateTask<T> task : updateTasks) {
if (task.processed.getAndSet(true) == false) {
logger.debug("cluster state update task [{}] timed out after [{}]", source, timeout);
toRemove.add(task);
}
}
if (toRemove.isEmpty() == false) {
ClusterStateTaskExecutor<T> clusterStateTaskExecutor = toRemove.get(0).executor;
synchronized (updateTasksPerExecutor) {
LinkedHashSet<UpdateTask> existingTasks = updateTasksPerExecutor.get(clusterStateTaskExecutor);
if (existingTasks != null) {
existingTasks.removeAll(toRemove);
if (existingTasks.isEmpty()) {
updateTasksPerExecutor.remove(clusterStateTaskExecutor);
}
}
}
for (UpdateTask<T> task : toRemove) {
task.listener.onFailure(source, new ProcessClusterEventTimeoutException(timeout, source));
}
}
}));
} else {
updateTasksExecutor.execute(firstTask);
}
} catch (EsRejectedExecutionException e) {
// ignore cases where we are shutting down..., there is really nothing interesting
// to be done here...
if (!lifecycle.stoppedOrClosed()) {
throw e;
}
}
}
/**
* Returns the tasks that are pending.
*/
public List<PendingClusterTask> pendingTasks() {
PrioritizedEsThreadPoolExecutor.Pending[] pendings = updateTasksExecutor.getPending();
List<PendingClusterTask> pendingClusterTasks = new ArrayList<>(pendings.length);
for (PrioritizedEsThreadPoolExecutor.Pending pending : pendings) {
final String source;
final long timeInQueue;
// we have to capture the task as it will be nulled after execution and we don't want to change while we check things here.
final Object task = pending.task;
if (task == null) {
continue;
} else if (task instanceof SourcePrioritizedRunnable) {
SourcePrioritizedRunnable runnable = (SourcePrioritizedRunnable) task;
source = runnable.source();
timeInQueue = runnable.getAgeInMillis();
} else {
assert false : "expected SourcePrioritizedRunnable got " + task.getClass();
source = "unknown [" + task.getClass() + "]";
timeInQueue = 0;
}
pendingClusterTasks.add(
new PendingClusterTask(pending.insertionOrder, pending.priority, new Text(source), timeInQueue, pending.executing));
}
return pendingClusterTasks;
}
/**
* Returns the number of currently pending tasks.
*/
public int numberOfPendingTasks() {
return updateTasksExecutor.getNumberOfPendingTasks();
}
/**
* Returns the maximum wait time for tasks in the queue
*
* @return A zero time value if the queue is empty, otherwise the time value oldest task waiting in the queue
*/
public TimeValue getMaxTaskWaitTime() {
return updateTasksExecutor.getMaxTaskWaitTime();
}
/** asserts that the current thread is the cluster state update thread */
public static boolean assertClusterStateThread() {
assert Thread.currentThread().getName().contains(ClusterService.UPDATE_THREAD_NAME) :
"not called from the cluster state update thread";
return true;
}
/** asserts that the current thread is <b>NOT</b> the cluster state update thread */
public static boolean assertNotClusterStateUpdateThread(String reason) {
assert Thread.currentThread().getName().contains(UPDATE_THREAD_NAME) == false :
"Expected current thread [" + Thread.currentThread() + "] to not be the cluster state update thread. Reason: [" + reason + "]";
return true;
}
public ClusterName getClusterName() {
return clusterName;
}
abstract static class SourcePrioritizedRunnable extends PrioritizedRunnable {
protected final String source;
public SourcePrioritizedRunnable(Priority priority, String source) {
super(priority);
this.source = source;
}
public String source() {
return source;
}
}
<T> void runTasksForExecutor(ClusterStateTaskExecutor<T> executor) {
final ArrayList<UpdateTask<T>> toExecute = new ArrayList<>();
final Map<String, ArrayList<T>> processTasksBySource = new HashMap<>();
synchronized (updateTasksPerExecutor) {
LinkedHashSet<UpdateTask> pending = updateTasksPerExecutor.remove(executor);
if (pending != null) {
for (UpdateTask<T> task : pending) {
if (task.processed.getAndSet(true) == false) {
logger.trace("will process {}", task);
toExecute.add(task);
processTasksBySource.computeIfAbsent(task.source, s -> new ArrayList<>()).add(task.task);
} else {
logger.trace("skipping {}, already processed", task);
}
}
}
}
if (toExecute.isEmpty()) {
return;
}
final String tasksSummary = processTasksBySource.entrySet().stream().map(entry -> {
String tasks = executor.describeTasks(entry.getValue());
return tasks.isEmpty() ? entry.getKey() : entry.getKey() + "[" + tasks + "]";
}).reduce((s1, s2) -> s1 + ", " + s2).orElse("");
if (!lifecycle.started()) {
logger.debug("processing [{}]: ignoring, cluster_service not started", tasksSummary);
return;
}
logger.debug("processing [{}]: execute", tasksSummary);
ClusterState previousClusterState = clusterServiceState().getClusterState();
if (!previousClusterState.nodes().isLocalNodeElectedMaster() && executor.runOnlyOnMaster()) {
logger.debug("failing [{}]: local node is no longer master", tasksSummary);
toExecute.stream().forEach(task -> task.listener.onNoLongerMaster(task.source));
return;
}
ClusterStateTaskExecutor.BatchResult<T> batchResult;
long startTimeNS = currentTimeInNanos();
try {
List<T> inputs = toExecute.stream().map(tUpdateTask -> tUpdateTask.task).collect(Collectors.toList());
batchResult = executor.execute(previousClusterState, inputs);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
if (logger.isTraceEnabled()) {
logger.trace(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to execute cluster state update in [{}], state:\nversion [{}], source [{}]\n{}{}{}",
executionTime,
previousClusterState.version(),
tasksSummary,
previousClusterState.nodes(),
previousClusterState.routingTable(),
previousClusterState.getRoutingNodes()),
e);
}
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
batchResult = ClusterStateTaskExecutor.BatchResult.<T>builder()
.failures(toExecute.stream().map(updateTask -> updateTask.task)::iterator, e)
.build(previousClusterState);
}
assert batchResult.executionResults != null;
assert batchResult.executionResults.size() == toExecute.size()
: String.format(Locale.ROOT, "expected [%d] task result%s but was [%d]", toExecute.size(),
toExecute.size() == 1 ? "" : "s", batchResult.executionResults.size());
boolean assertsEnabled = false;
assert (assertsEnabled = true);
if (assertsEnabled) {
for (UpdateTask<T> updateTask : toExecute) {
assert batchResult.executionResults.containsKey(updateTask.task) :
"missing task result for " + updateTask;
}
}
ClusterState newClusterState = batchResult.resultingState;
final ArrayList<UpdateTask<T>> proccessedListeners = new ArrayList<>();
// fail all tasks that have failed and extract those that are waiting for results
for (UpdateTask<T> updateTask : toExecute) {
assert batchResult.executionResults.containsKey(updateTask.task) : "missing " + updateTask;
final ClusterStateTaskExecutor.TaskResult executionResult =
batchResult.executionResults.get(updateTask.task);
executionResult.handle(
() -> proccessedListeners.add(updateTask),
ex -> {
logger.debug(
(Supplier<?>)
() -> new ParameterizedMessage("cluster state update task {} failed", updateTask), ex);
updateTask.listener.onFailure(updateTask.source, ex);
}
);
}
if (previousClusterState == newClusterState) {
for (UpdateTask<T> task : proccessedListeners) {
if (task.listener instanceof AckedClusterStateTaskListener) {
//no need to wait for ack if nothing changed, the update can be counted as acknowledged
((AckedClusterStateTaskListener) task.listener).onAllNodesAcked(null);
}
task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState);
}
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
logger.debug("processing [{}]: took [{}] no change in cluster_state", tasksSummary, executionTime);
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
return;
}
try {
ArrayList<Discovery.AckListener> ackListeners = new ArrayList<>();
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
// only the master controls the version numbers
Builder builder = ClusterState.builder(newClusterState).incrementVersion();
if (previousClusterState.routingTable() != newClusterState.routingTable()) {
builder.routingTable(RoutingTable.builder(newClusterState.routingTable())
.version(newClusterState.routingTable().version() + 1).build());
}
if (previousClusterState.metaData() != newClusterState.metaData()) {
builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1));
}
newClusterState = builder.build();
for (UpdateTask<T> task : proccessedListeners) {
if (task.listener instanceof AckedClusterStateTaskListener) {
final AckedClusterStateTaskListener ackedListener = (AckedClusterStateTaskListener) task.listener;
if (ackedListener.ackTimeout() == null || ackedListener.ackTimeout().millis() == 0) {
ackedListener.onAckTimeout();
} else {
try {
ackListeners.add(new AckCountDownListener(ackedListener, newClusterState.version(), newClusterState.nodes(),
threadPool));
} catch (EsRejectedExecutionException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex);
}
//timeout straightaway, otherwise we could wait forever as the timeout thread has not started
ackedListener.onAckTimeout();
}
}
}
}
}
final Discovery.AckListener ackListener = new DelegetingAckListener(ackListeners);
if (logger.isTraceEnabled()) {
logger.trace("cluster state updated, source [{}]\n{}", tasksSummary, newClusterState);
} else if (logger.isDebugEnabled()) {
logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), tasksSummary);
}
ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(tasksSummary, newClusterState, previousClusterState);
// new cluster state, notify all listeners
final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta();
if (nodesDelta.hasChanges() && logger.isInfoEnabled()) {
String summary = nodesDelta.shortSummary();
if (summary.length() > 0) {
logger.info("{}, reason: {}", summary, tasksSummary);
}
}
nodeConnectionsService.connectToNodes(clusterChangedEvent.nodesDelta().addedNodes());
// if we are the master, publish the new state to all nodes
// we publish here before we send a notification to all the listeners, since if it fails
// we don't want to notify
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
logger.debug("publishing cluster state version [{}]", newClusterState.version());
try {
clusterStatePublisher.accept(clusterChangedEvent, ackListener);
} catch (Discovery.FailedToCommitClusterStateException t) {
final long version = newClusterState.version();
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failing [{}]: failed to commit cluster state version [{}]", tasksSummary, version),
t);
// ensure that list of connected nodes in NodeConnectionsService is in-sync with the nodes of the current cluster state
nodeConnectionsService.disconnectFromNodes(clusterChangedEvent.nodesDelta().addedNodes());
proccessedListeners.forEach(task -> task.listener.onFailure(task.source, t));
return;
}
}
// update the current cluster state
ClusterState finalNewClusterState = newClusterState;
updateState(css -> new ClusterServiceState(finalNewClusterState, ClusterStateStatus.BEING_APPLIED));
logger.debug("set local cluster state to version {}", newClusterState.version());
try {
// nothing to do until we actually recover from the gateway or any other block indicates we need to disable persistency
if (clusterChangedEvent.state().blocks().disableStatePersistence() == false && clusterChangedEvent.metaDataChanged()) {
final Settings incomingSettings = clusterChangedEvent.state().metaData().settings();
clusterSettings.applySettings(incomingSettings);
}
} catch (Exception ex) {
logger.warn("failed to apply cluster settings", ex);
}
for (ClusterStateListener listener : preAppliedListeners) {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
}
nodeConnectionsService.disconnectFromNodes(clusterChangedEvent.nodesDelta().removedNodes());
updateState(css -> new ClusterServiceState(css.getClusterState(), ClusterStateStatus.APPLIED));
for (ClusterStateListener listener : postAppliedListeners) {
try {
logger.trace("calling [{}] with change to version [{}]", listener, newClusterState.version());
listener.clusterChanged(clusterChangedEvent);
} catch (Exception ex) {
logger.warn("failed to notify ClusterStateListener", ex);
}
}
//manual ack only from the master at the end of the publish
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
try {
ackListener.onNodeAck(newClusterState.nodes().getLocalNode(), null);
} catch (Exception e) {
final DiscoveryNode localNode = newClusterState.nodes().getLocalNode();
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage("error while processing ack for master node [{}]", localNode),
e);
}
}
for (UpdateTask<T> task : proccessedListeners) {
task.listener.clusterStateProcessed(task.source, previousClusterState, newClusterState);
}
try {
executor.clusterStatePublished(clusterChangedEvent);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown while notifying executor of new cluster state publication [{}]",
tasksSummary),
e);
}
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
logger.debug("processing [{}]: took [{}] done applying updated cluster_state (version: {}, uuid: {})", tasksSummary,
executionTime, newClusterState.version(), newClusterState.stateUUID());
warnAboutSlowTaskIfNeeded(executionTime, tasksSummary);
} catch (Exception e) {
TimeValue executionTime = TimeValue.timeValueMillis(Math.max(0, TimeValue.nsecToMSec(currentTimeInNanos() - startTimeNS)));
final long version = newClusterState.version();
final String stateUUID = newClusterState.stateUUID();
final String fullState = newClusterState.toString();
logger.warn(
(Supplier<?>) () -> new ParameterizedMessage(
"failed to apply updated cluster state in [{}]:\nversion [{}], uuid [{}], source [{}]\n{}",
executionTime,
version,
stateUUID,
tasksSummary,
fullState),
e);
// TODO: do we want to call updateTask.onFailure here?
}
}
// this one is overridden in tests so we can control time
protected long currentTimeInNanos() {return System.nanoTime();}
private static SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Logger logger) {
if (listener instanceof AckedClusterStateTaskListener) {
return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger);
} else {
return new SafeClusterStateTaskListener(listener, logger);
}
}
private static class SafeClusterStateTaskListener implements ClusterStateTaskListener {
private final ClusterStateTaskListener listener;
private final Logger logger;
public SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) {
this.listener = listener;
this.logger = logger;
}
@Override
public void onFailure(String source, Exception e) {
try {
listener.onFailure(source, e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener notifying of failure from [{}]", source), inner);
}
}
@Override
public void onNoLongerMaster(String source) {
try {
listener.onNoLongerMaster(source);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener while notifying no longer master from [{}]", source), e);
}
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) {
try {
listener.clusterStateProcessed(source, oldState, newState);
} catch (Exception e) {
logger.error(
(Supplier<?>) () -> new ParameterizedMessage(
"exception thrown by listener while notifying of cluster state processed from [{}], old cluster state:\n" +
"{}\nnew cluster state:\n{}",
source, oldState, newState),
e);
}
}
}
private static class SafeAckedClusterStateTaskListener extends SafeClusterStateTaskListener implements AckedClusterStateTaskListener {
private final AckedClusterStateTaskListener listener;
private final Logger logger;
public SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) {
super(listener, logger);
this.listener = listener;
this.logger = logger;
}
@Override
public boolean mustAck(DiscoveryNode discoveryNode) {
return listener.mustAck(discoveryNode);
}
@Override
public void onAllNodesAcked(@Nullable Exception e) {
try {
listener.onAllNodesAcked(e);
} catch (Exception inner) {
inner.addSuppressed(e);
logger.error("exception thrown by listener while notifying on all nodes acked", inner);
}
}
@Override
public void onAckTimeout() {
try {
listener.onAckTimeout();
} catch (Exception e) {
logger.error("exception thrown by listener while notifying on ack timeout", e);
}
}
@Override
public TimeValue ackTimeout() {
return listener.ackTimeout();
}
}
class UpdateTask<T> extends SourcePrioritizedRunnable {
public final T task;
public final ClusterStateTaskListener listener;
private final ClusterStateTaskExecutor<T> executor;
public final AtomicBoolean processed = new AtomicBoolean();
UpdateTask(String source, T task, Priority priority, ClusterStateTaskExecutor<T> executor, ClusterStateTaskListener listener) {
super(priority, source);
this.task = task;
this.executor = executor;
this.listener = listener;
}
@Override
public void run() {
// if this task is already processed, the executor shouldn't execute other tasks (that arrived later),
// to give other executors a chance to execute their tasks.
if (processed.get() == false) {
runTasksForExecutor(executor);
}
}
@Override
public String toString() {
String taskDescription = executor.describeTasks(Collections.singletonList(task));
if (taskDescription.isEmpty()) {
return "[" + source + "]";
} else {
return "[" + source + "[" + taskDescription + "]]";
}
}
}
private void warnAboutSlowTaskIfNeeded(TimeValue executionTime, String source) {
if (executionTime.getMillis() > slowTaskLoggingThreshold.getMillis()) {
logger.warn("cluster state update task [{}] took [{}] above the warn threshold of {}", source, executionTime,
slowTaskLoggingThreshold);
}
}
class NotifyTimeout implements Runnable {
final TimeoutClusterStateListener listener;
final TimeValue timeout;
volatile ScheduledFuture future;
NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) {
this.listener = listener;
this.timeout = timeout;
}
public void cancel() {
FutureUtils.cancel(future);
}
@Override
public void run() {
if (future != null && future.isCancelled()) {
return;
}
if (lifecycle.stoppedOrClosed()) {
listener.onClose();
} else {
listener.onTimeout(this.timeout);
}
// note, we rely on the listener to remove itself in case of timeout if needed
}
}
private static class LocalNodeMasterListeners implements ClusterStateListener {
private final List<LocalNodeMasterListener> listeners = new CopyOnWriteArrayList<>();
private final ThreadPool threadPool;
private volatile boolean master = false;
private LocalNodeMasterListeners(ThreadPool threadPool) {
this.threadPool = threadPool;
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!master && event.localNodeMaster()) {
master = true;
for (LocalNodeMasterListener listener : listeners) {
Executor executor = threadPool.executor(listener.executorName());
executor.execute(new OnMasterRunnable(listener));
}
return;
}
if (master && !event.localNodeMaster()) {
master = false;
for (LocalNodeMasterListener listener : listeners) {
Executor executor = threadPool.executor(listener.executorName());
executor.execute(new OffMasterRunnable(listener));
}
}
}
private void add(LocalNodeMasterListener listener) {
listeners.add(listener);
}
private void remove(LocalNodeMasterListener listener) {
listeners.remove(listener);
}
private void clear() {
listeners.clear();
}
}
private static class OnMasterRunnable implements Runnable {
private final LocalNodeMasterListener listener;
private OnMasterRunnable(LocalNodeMasterListener listener) {
this.listener = listener;
}
@Override
public void run() {
listener.onMaster();
}
}
private static class OffMasterRunnable implements Runnable {
private final LocalNodeMasterListener listener;
private OffMasterRunnable(LocalNodeMasterListener listener) {
this.listener = listener;
}
@Override
public void run() {
listener.offMaster();
}
}
private static class DelegetingAckListener implements Discovery.AckListener {
private final List<Discovery.AckListener> listeners;
private DelegetingAckListener(List<Discovery.AckListener> listeners) {
this.listeners = listeners;
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
for (Discovery.AckListener listener : listeners) {
listener.onNodeAck(node, e);
}
}
@Override
public void onTimeout() {
throw new UnsupportedOperationException("no timeout delegation");
}
}
private static class AckCountDownListener implements Discovery.AckListener {
private static final Logger logger = Loggers.getLogger(AckCountDownListener.class);
private final AckedClusterStateTaskListener ackedTaskListener;
private final CountDown countDown;
private final DiscoveryNodes nodes;
private final long clusterStateVersion;
private final Future<?> ackTimeoutCallback;
private Exception lastFailure;
AckCountDownListener(AckedClusterStateTaskListener ackedTaskListener, long clusterStateVersion, DiscoveryNodes nodes,
ThreadPool threadPool) {
this.ackedTaskListener = ackedTaskListener;
this.clusterStateVersion = clusterStateVersion;
this.nodes = nodes;
int countDown = 0;
for (DiscoveryNode node : nodes) {
if (ackedTaskListener.mustAck(node)) {
countDown++;
}
}
//we always wait for at least 1 node (the master)
countDown = Math.max(1, countDown);
logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion);
this.countDown = new CountDown(countDown);
this.ackTimeoutCallback = threadPool.schedule(ackedTaskListener.ackTimeout(), ThreadPool.Names.GENERIC, new Runnable() {
@Override
public void run() {
onTimeout();
}
});
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
if (!ackedTaskListener.mustAck(node)) {
//we always wait for the master ack anyway
if (!node.equals(nodes.getMasterNode())) {
return;
}
}
if (e == null) {
logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion);
} else {
this.lastFailure = e;
logger.debug(
(Supplier<?>) () -> new ParameterizedMessage(
"ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion),
e);
}
if (countDown.countDown()) {
logger.trace("all expected nodes acknowledged cluster_state update (version: {})", clusterStateVersion);
FutureUtils.cancel(ackTimeoutCallback);
ackedTaskListener.onAllNodesAcked(lastFailure);
}
}
@Override
public void onTimeout() {
if (countDown.fastForward()) {
logger.trace("timeout waiting for acknowledgement for cluster_state update (version: {})", clusterStateVersion);
ackedTaskListener.onAckTimeout();
}
}
}
public ClusterSettings getClusterSettings() {
return clusterSettings;
}
public Settings getSettings() {
return settings;
}
}
| |
package org.domeos.framework.api.model.project;
import org.domeos.framework.api.consolemodel.deployment.EnvDraft;
import org.domeos.framework.api.model.project.related.*;
import org.domeos.framework.engine.model.RowModelBase;
import org.domeos.util.StringUtils;
import java.util.List;
import java.util.Map;
/**
* Created by feiliu206363 on 2016/4/4.
*/
public class Project extends RowModelBase {
private CodeConfiguration codeInfo; // gitlab configuration
private AutoBuild autoBuildInfo;
private boolean userDefineDockerfile = false;
private DockerfileContent dockerfileConfig;
private CustomDockerfile customDockerfile;
private UserDefinedDockerfile dockerfileInfo;
private Map<String, String> confFiles;
private List<EnvDraft> envConfDefault;
private ExclusiveBuild exclusiveBuild;
private int authority;
public CodeConfiguration getCodeInfo() {
return codeInfo;
}
public void setCodeInfo(CodeConfiguration codeInfo) {
this.codeInfo = codeInfo;
}
public AutoBuild getAutoBuildInfo() {
return autoBuildInfo;
}
public void setAutoBuildInfo(AutoBuild autoBuildInfo) {
this.autoBuildInfo = autoBuildInfo;
}
public boolean isUserDefineDockerfile() {
return userDefineDockerfile;
}
public void setUserDefineDockerfile(boolean userDefineDockerfile) {
this.userDefineDockerfile = userDefineDockerfile;
}
public DockerfileContent getDockerfileConfig() {
return dockerfileConfig;
}
public void setDockerfileConfig(DockerfileContent dockerfileConfig) {
this.dockerfileConfig = dockerfileConfig;
}
public CustomDockerfile getCustomDockerfile() {
return customDockerfile;
}
public void setCustomDockerfile(CustomDockerfile customDockerfile) {
this.customDockerfile = customDockerfile;
}
public UserDefinedDockerfile getDockerfileInfo() {
return dockerfileInfo;
}
public void setDockerfileInfo(UserDefinedDockerfile dockerfileInfo) {
this.dockerfileInfo = dockerfileInfo;
}
public Map<String, String> getConfFiles() {
return confFiles;
}
public void setConfFiles(Map<String, String> confFiles) {
this.confFiles = confFiles;
}
public List<EnvDraft> getEnvConfDefault() {
return envConfDefault;
}
public void setEnvConfDefault(List<EnvDraft> envConfDefault) {
this.envConfDefault = envConfDefault;
}
public int getAuthority() {
return authority;
}
public void setAuthority(int authority) {
this.authority = authority;
}
public ExclusiveBuild getExclusiveBuild() {
return exclusiveBuild;
}
public void setExclusiveBuild(ExclusiveBuild exclusiveBuild) {
this.exclusiveBuild = exclusiveBuild;
}
//for old mysql info to use privilegeBuild
public void setPrivilegeBuild(ExclusiveBuild privilegeBuild) {
this.exclusiveBuild = privilegeBuild;
}
public String dockerfilePathInCodeManager() {
if (dockerfileInfo == null) {
return null;
}
String path = dockerfileInfo.getDockerfilePath();
if (path.startsWith("/")) {
return path.substring(1);
}
return path;
}
public String dockerfilePath(String ref) {
if (dockerfileInfo == null || codeInfo == null) {
return null;
}
String dockerfilePath = dockerfileInfo.getDockerfilePath();
if (!dockerfilePath.startsWith("/")) {
dockerfilePath = "/" + dockerfilePath;
}
if (CodeManager.subversion.equals(codeInfo.getCodeManager())) {
dockerfilePath = "/" + getName() + ref + dockerfilePath;
}
return dockerfilePath;
}
public String buildPath(String ref) {
String buildPath = dockerfileInfo.getBuildPath();
if (!buildPath.startsWith("/")) {
buildPath = "/" + buildPath;
}
if (CodeManager.subversion.equals(codeInfo.getCodeManager())) {
buildPath = "/" + getName() + ref + buildPath;
}
return buildPath;
}
public String checkLegality() {
if (StringUtils.isBlank(getName())) {
return "project name must be set";
}
if (!isRegularDockerName(getName())) {
return "project name must match [a-z0-9]+([._-][a-z0-9]+)*";
}
if (codeInfo != null && !StringUtils.isBlank(codeInfo.checkLegality())) {
return codeInfo.checkLegality();
}
if (dockerfileInfo != null && !StringUtils.isBlank(dockerfileInfo.checkLegality())) {
return dockerfileInfo.checkLegality();
}
if (autoBuildInfo != null) {
if (codeInfo == null) {
return "code info is null, cannot set auto build info";
}
if ((autoBuildInfo.getBranches() == null || autoBuildInfo.getBranches().isEmpty())
&& autoBuildInfo.getTag() <= 0) {
return "Auto build info is null, cannot set auto build info";
}
}
if (dockerfileConfig != null && !StringUtils.isBlank(dockerfileConfig.checkLegality())) {
return dockerfileConfig.checkLegality();
}
if (customDockerfile != null) {
if (!StringUtils.isBlank(customDockerfile.checkLegality())) {
return customDockerfile.checkLegality();
}
}
if (exclusiveBuild != null && !StringUtils.isBlank(exclusiveBuild.checkLegality())) {
return exclusiveBuild.checkLegality();
}
if (envConfDefault != null && !envConfDefault.isEmpty()) {
String error;
for (EnvDraft draft : envConfDefault) {
error = draft.checkLegality();
if (!StringUtils.isBlank(error)) {
return error;
}
}
}
return null;
}
public static boolean isRegularDockerName(String name) {
try {
String parts[] = name.split("/");
for (String part : parts) {
if (!StringUtils.checkImageNamePattern(part)) {
return false;
}
}
return true;
} catch (Exception e) {
return false;
}
}
}
| |
package com.atlassian.plugin.event.impl;
import junit.framework.TestCase;
import com.atlassian.plugin.event.impl.DefaultPluginEventManager;
import com.atlassian.plugin.event.impl.ListenerMethodSelector;
import com.atlassian.plugin.event.impl.MethodNameListenerMethodSelector;
import com.atlassian.plugin.event.PluginEventListener;
import com.atlassian.event.api.EventListener;
import java.lang.reflect.Method;
public class TestPluginEventManagerImpl extends TestCase
{
private DefaultPluginEventManager eventManager;
public void setUp()
{
eventManager = new DefaultPluginEventManager();
}
public void tearDown()
{
eventManager = null;
}
public void testRegister()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new Object());
assertEquals(1, methodTestListener.called);
}
public void testRegisterWithBroadcastSupertype()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new String());
assertEquals(1, methodTestListener.called);
}
public void testRegisterWithFooBroadcastSupertype()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new MethodTestListener());
assertEquals(1, methodTestListener.fooCalled);
assertEquals(1, methodTestListener.called);
}
public void testRegisterTwice()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.register(methodTestListener);
eventManager.broadcast(new Object());
assertEquals(1, methodTestListener.called);
}
public void testRegisterWithBadListener()
{
BadListener l = new BadListener();
try
{
eventManager.register(l);
fail();
}
catch (IllegalArgumentException ex)
{
// test passed
}
assertEquals(0, l.called);
}
public void testRegisterWithCustomSelector()
{
eventManager = new DefaultPluginEventManager(new ListenerMethodSelector[]{
new ListenerMethodSelector() {
public boolean isListenerMethod(Method method)
{
return "onEvent".equals(method.getName());
}
}
});
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast("jim");
assertEquals(1, methodTestListener.jimCalled);
}
public void testRegisterWithOverlappingSelectorsBroadcastsTwoMessages()
{
eventManager = new DefaultPluginEventManager(new ListenerMethodSelector[]{
new MethodNameListenerMethodSelector(), new MethodNameListenerMethodSelector()});
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new Object());
assertEquals(2, methodTestListener.called);
}
public void testRegisterWithCustom()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new Object());
assertEquals(1, methodTestListener.called);
}
public void testRegisterAnnotatedListener()
{
AnnotationTestListener listener = new AnnotationTestListener();
eventManager.register(listener);
eventManager.broadcast(new Object());
assertEquals(1, listener.eventListenerCalled);
assertEquals(1, listener.pluginEventListenerCalled);
}
public void testUnregister()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new Object());
eventManager.unregister(methodTestListener);
eventManager.broadcast(new Object());
assertEquals(1, methodTestListener.called);
}
public void testSuperEvent()
{
MethodTestListener methodTestListener = new MethodTestListener();
eventManager.register(methodTestListener);
eventManager.broadcast(new MethodTestListener());
assertEquals(1, methodTestListener.called);
}
public void testRegisterNull()
{
try
{
eventManager.register(null);
fail("should have thrown exception");
}
catch (IllegalArgumentException ex)
{
// test passed
}
}
public void testUnregisterNull()
{
try
{
eventManager.unregister(null);
fail("should have thrown an exception");
}
catch (IllegalArgumentException e)
{
// passes
}
}
public static class AnnotationTestListener
{
int pluginEventListenerCalled = 0;
int eventListenerCalled = 0;
@PluginEventListener
public void doEventOld(Object obj)
{
++pluginEventListenerCalled;
}
@EventListener
public void doEventNew(Object obj)
{
++eventListenerCalled;
}
}
public static class MethodTestListener
{
int called = 0;
int fooCalled = 0;
int jimCalled = 0;
public void channel(Object obj)
{
called++;
}
public void channel(MethodTestListener obj)
{
fooCalled++;
}
public void onEvent(String o)
{
jimCalled++;
}
}
public static class BadListener
{
int called = 0;
public void somemethod() {
called++;
}
}
}
| |
/* Copyright (c) 2006, Sun Microsystems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Sun Microsystems, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.javacc.jjdoc;
import org.javacc.parser.CharStream;
import org.javacc.parser.JavaCCErrors;
import org.javacc.parser.JavaCCParser;
import org.javacc.parser.JavaCCScanner;
import org.javacc.parser.JavaCCState;
import org.javacc.parser.MetaParseException;
import org.javacc.parser.ParseException;
import org.javacc.utils.Tools;
import org.javacc.utils.io.IndentingPrintWriter;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStreamReader;
public final class Main {
public static void main(String[] args) throws Exception {
System.exit(mainProgram(args));
}
public static int mainProgram(String[] args) throws Exception {
org.javacc.parser.Main.reInitAll();
JJDocOptions.init();
Tools.bannerLine("Documentation Generator", "0.1.4");
if (args.length == 0) {
usage();
return 1;
}
else {
JJDocGlobals.info("(type \"jjdoc\" with no arguments for help)");
}
if (JJDocOptions.isOption(args[args.length - 1])) {
JJDocGlobals.error("Last argument \"" + args[args.length - 1] + "\" is not a filename or \"-\". ");
return 1;
}
for (int arg = 0; arg < args.length - 1; arg++) {
if (!JJDocOptions.isOption(args[arg])) {
JJDocGlobals.error("Argument \"" + args[arg] + "\" must be an option setting. ");
return 1;
}
JJDocOptions.setCmdLineOption(args[arg]);
}
JavaCCParser parser;
if (args[args.length - 1].equals("-")) {
JJDocGlobals.info("Reading from standard input . . .");
parser = new JavaCCParser(
new JavaCCScanner(
new CharStream.Escaping(
new CharStream.ForReader(
new InputStreamReader(System.in)))));
JJDocGlobals.inputFile = "standard input";
JJDocGlobals.outputFile = "standard output";
}
else {
JJDocGlobals.info("Reading from file " + args[args.length - 1] + " . . .");
try {
File fp = new File(args[args.length - 1]);
if (!fp.exists()) {
JJDocGlobals.error("File " + args[args.length - 1] + " not found.");
return 1;
}
if (fp.isDirectory()) {
JJDocGlobals.error(args[args.length - 1] + " is a directory. Please use a valid file name.");
return 1;
}
JJDocGlobals.inputFile = fp.getName();
BufferedReader reader = new BufferedReader(
new InputStreamReader(
new FileInputStream(args[args.length - 1]),
JJDocOptions.getGrammarEncoding()));
parser = new JavaCCParser(
new JavaCCScanner(
new CharStream.Escaping(
new CharStream.ForReader(reader))));
}
catch (SecurityException se) {
JJDocGlobals.error("Security violation while trying to open " + args[args.length - 1]);
return 1;
}
catch (FileNotFoundException e) {
JJDocGlobals.error("File " + args[args.length - 1] + " not found.");
return 1;
}
}
try {
JavaCCState state = new JavaCCState();
parser.setState(state);
parser.start();
IndentingPrintWriter out = JJDocGlobals.createOutputStream();
try {
JJDoc doc = new JJDoc(state, JJDocGlobals.createFormatter(out));
doc.start();
}
finally {
out.close();
}
if (JavaCCErrors.getErrorCount() == 0) {
if (JavaCCErrors.getWarningCount() == 0) {
JJDocGlobals.info("Grammar documentation generated successfully in " + JJDocGlobals.outputFile);
}
else {
JJDocGlobals.info("Grammar documentation generated with 0 errors and "
+ JavaCCErrors.getWarningCount() + " warnings.");
}
return 0;
}
else {
JJDocGlobals.error("Detected " + JavaCCErrors.getErrorCount() + " errors and "
+ JavaCCErrors.getWarningCount() + " warnings.");
return JavaCCErrors.getErrorCount() == 0 ? 0 : 1;
}
}
catch (MetaParseException ex) {
JJDocGlobals.error(ex.toString());
JJDocGlobals.error("Detected " + JavaCCErrors.getErrorCount() + " errors and "
+ JavaCCErrors.getWarningCount() + " warnings.");
return 1;
}
catch (ParseException ex) {
JJDocGlobals.error(ex.toString());
JJDocGlobals.error("Detected " + (JavaCCErrors.getErrorCount() + 1) + " errors and "
+ JavaCCErrors.getWarningCount() + " warnings.");
return 1;
}
}
private static void usage() {
JJDocGlobals.info("");
JJDocGlobals.info(" jjdoc option-settings - (to read from standard input)");
JJDocGlobals.info("OR");
JJDocGlobals.info(" jjdoc option-settings inputfile (to read from a file)");
JJDocGlobals.info("");
JJDocGlobals.info("WHERE");
JJDocGlobals.info(" \"option-settings\" is a sequence of settings separated by spaces.");
JJDocGlobals.info("");
JJDocGlobals.info("Each option setting must be of one of the following forms:");
JJDocGlobals.info("");
JJDocGlobals.info(" -optionname=value (e.g., -TEXT=false)");
JJDocGlobals.info(" -optionname:value (e.g., -TEXT:false)");
JJDocGlobals.info(" -optionname (equivalent to -optionname=true. e.g., -TEXT)");
JJDocGlobals.info(" -NOoptionname (equivalent to -optionname=false. e.g., -NOTEXT)");
JJDocGlobals.info("");
JJDocGlobals.info("Option settings are not case-sensitive, so one can say \"-nOtExT\" instead");
JJDocGlobals.info("of \"-NOTEXT\". Option values must be appropriate for the corresponding");
JJDocGlobals.info("option, and must be either an integer, boolean or string value.");
JJDocGlobals.info("");
JJDocGlobals.info("The string valued options are:");
JJDocGlobals.info("");
JJDocGlobals.info(" OUTPUT_FILE");
JJDocGlobals.info(" CSS");
JJDocGlobals.info("");
JJDocGlobals.info("The boolean valued options are:");
JJDocGlobals.info("");
JJDocGlobals.info(" ONE_TABLE (default true)");
JJDocGlobals.info(" TEXT (default false)");
JJDocGlobals.info(" BNF (default false)");
JJDocGlobals.info("");
JJDocGlobals.info("");
JJDocGlobals.info("EXAMPLES:");
JJDocGlobals.info(" jjdoc -ONE_TABLE=false mygrammar.jj");
JJDocGlobals.info(" jjdoc - < mygrammar.jj");
JJDocGlobals.info("");
JJDocGlobals.info("ABOUT JJDoc:");
JJDocGlobals.info(" JJDoc generates JavaDoc documentation from JavaCC grammar files.");
JJDocGlobals.info("");
JJDocGlobals.info(" For more information, see the online JJDoc documentation at");
JJDocGlobals.info(" https://javacc.dev.java.net/doc/JJDoc.html");
}
}
| |
/**
* Copyright 2005-2011 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.impl.peopleflow;
import org.junit.Test;
import org.kuali.rice.core.api.delegation.DelegationType;
import org.kuali.rice.core.api.membership.MemberType;
import org.kuali.rice.kew.api.action.ActionRequestPolicy;
import org.kuali.rice.kew.impl.type.KewAttributeDefinitionBo;
import org.kuali.rice.kew.impl.type.KewTypeAttributeBo;
import org.kuali.rice.kew.impl.type.KewTypeBo;
import org.kuali.rice.kew.responsibility.service.ResponsibilityIdService;
import org.kuali.rice.kew.service.KEWServiceLocator;
import org.kuali.rice.kew.test.KEWTestCase;
import org.kuali.rice.krad.service.BusinessObjectService;
import org.kuali.rice.krad.service.KRADServiceLocator;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
/**
* Test the basic persistence of business objects related to PeopleFlows
*/
public class PeopleFlowBoTest extends KEWTestCase {
private BusinessObjectService boService;
private ResponsibilityIdService responsibilityIdService;
@org.junit.Before
public void setupBoService() {
boService = KRADServiceLocator.getBusinessObjectService();
responsibilityIdService = KEWServiceLocator.getResponsibilityIdService();
}
@Test
public void testKewTypeBoBasicPersist() {
KewTypeBoBuilder builder = new KewTypeBoBuilder("testType", "testNamespace");
boService.save(builder.build());
try {
// same info again should be a no go
boService.save(builder.build());
fail("this should violate unique constraints");
} catch (Exception e) {
// good
}
}
@Test
public void testKewTypeBoFullPersist() {
KewTypeBoBuilder builder = new KewTypeBoBuilder("testType", "testNamespace").setServiceName("testService");
KewTypeBo kewTypeBo = builder.build();
for (int i=1; i<=3; i++) {
KewAttributeDefinitionBo attributeDefn = new KewAttributeDefinitionBo();
attributeDefn.setName("attrDef"+i);
attributeDefn.setDescription("this is a description of attrDef" + i);
attributeDefn.setComponentName("componentName" + i);
attributeDefn.setLabel("label" + i);
attributeDefn.setNamespace(kewTypeBo.getNamespace());
boService.save(attributeDefn);
KewTypeAttributeBo typeAttribute = new KewTypeAttributeBo();
typeAttribute.setSequenceNumber(i);
typeAttribute.setAttributeDefinition(attributeDefn);
kewTypeBo.getAttributes().add(typeAttribute);
}
boService.save(kewTypeBo);
}
@Test
public void testPeopleFlowPersonMembers() {
PeopleFlowMemberBo peopleFlowMember = new PeopleFlowMemberBo();
peopleFlowMember.setMemberType(MemberType.PRINCIPAL);
peopleFlowMember.setMemberId("admin");
peopleFlowMember.setPriority(1);
peopleFlowMember.setResponsibilityId(responsibilityIdService.getNewResponsibilityId());
assertNotNull(peopleFlowMember.getPerson());
assertEquals("admin", peopleFlowMember.getPerson().getPrincipalName());
PeopleFlowDelegateBo peopleFlowDelegate = new PeopleFlowDelegateBo();
peopleFlowDelegate.setMemberType(MemberType.PRINCIPAL);
peopleFlowDelegate.setMemberId("admin");
peopleFlowDelegate.setDelegationTypeCode(DelegationType.PRIMARY.getCode());
peopleFlowDelegate.setResponsibilityId(responsibilityIdService.getNewResponsibilityId());
assertNotNull(peopleFlowDelegate.getPerson());
assertEquals("admin", peopleFlowDelegate.getPerson().getPrincipalName());
}
@Test
public void testPeopleFlowBoPersist() {
testKewTypeBoFullPersist();
Map<String,String> keysMap = new HashMap<String, String>();
keysMap.put("name", "testType");
keysMap.put("namespace", "testNamespace");
KewTypeBo kewTypeBo = boService.findByPrimaryKey(KewTypeBo.class, keysMap);
// minimal peopleflow
PeopleFlowBo peopleFlowBo = new PeopleFlowBo();
peopleFlowBo.setDescription("description of testPeopleFlow");
peopleFlowBo.setName("testPeopleFlow");
peopleFlowBo.setNamespaceCode("testNamespace");
peopleFlowBo.setTypeId(kewTypeBo.getId());
boService.save(peopleFlowBo);
// fill out peopleflow
KewTypeAttributeBo attribute = kewTypeBo.getAttributes().get(0);
PeopleFlowAttributeBo peopleFlowAttr = new PeopleFlowAttributeBo();
peopleFlowAttr.setAttributeDefinition(attribute.getAttributeDefinition());
peopleFlowAttr.setPeopleFlowId(peopleFlowBo.getId());
peopleFlowAttr.setValue("testAttrValue");
peopleFlowBo.getAttributeBos().add(peopleFlowAttr);
PeopleFlowMemberBo peopleFlowMember = new PeopleFlowMemberBo();
peopleFlowMember.setMemberType(MemberType.PRINCIPAL);
peopleFlowMember.setMemberId("admin");
peopleFlowMember.setPriority(1);
peopleFlowMember.setResponsibilityId(responsibilityIdService.getNewResponsibilityId());
peopleFlowBo.getMembers().add(peopleFlowMember);
PeopleFlowDelegateBo peopleFlowDelegate1 = new PeopleFlowDelegateBo();
peopleFlowDelegate1.setMemberType(MemberType.GROUP);
peopleFlowDelegate1.setMemberId("1");
peopleFlowDelegate1.setDelegationTypeCode(DelegationType.PRIMARY.getCode());
peopleFlowDelegate1.setResponsibilityId(responsibilityIdService.getNewResponsibilityId());
peopleFlowMember.getDelegates().add(peopleFlowDelegate1);
PeopleFlowDelegateBo peopleFlowDelegate2 = new PeopleFlowDelegateBo();
peopleFlowDelegate2.setMemberType(MemberType.ROLE);
peopleFlowDelegate2.setMemberId("2");
peopleFlowDelegate2.setActionRequestPolicyCode(ActionRequestPolicy.FIRST.getCode());
peopleFlowDelegate2.setDelegationTypeCode(DelegationType.SECONDARY.getCode());
peopleFlowDelegate2.setResponsibilityId(responsibilityIdService.getNewResponsibilityId());
peopleFlowMember.getDelegates().add(peopleFlowDelegate2);
boService.save(peopleFlowBo);
assertNotNull(peopleFlowBo.getId());
peopleFlowBo = boService.findBySinglePrimaryKey(PeopleFlowBo.class, peopleFlowBo.getId());
assertNotNull(peopleFlowBo);
assertNotNull(peopleFlowBo.getId());
assertTrue(peopleFlowBo.getMembers().size() == 1);
PeopleFlowMemberBo memberBo = peopleFlowBo.getMembers().get(0);
assertNotNull(memberBo.getId());
assertEquals(peopleFlowBo.getId(), memberBo.getPeopleFlowId());
assertEquals("admin", memberBo.getMemberId());
assertEquals(MemberType.PRINCIPAL, memberBo.getMemberType());
assertNotNull(memberBo.getPerson());
assertEquals("admin", memberBo.getPerson().getPrincipalName());
assertEquals(peopleFlowMember.getResponsibilityId(), memberBo.getResponsibilityId());
assertSame(1, memberBo.getPriority());
assertTrue(memberBo.getDelegates().size() == 2);
PeopleFlowDelegateBo delegateBo1 = memberBo.getDelegates().get(0);
assertNotNull(delegateBo1.getId());
assertEquals(memberBo.getId(), delegateBo1.getPeopleFlowMemberId());
assertEquals("1", delegateBo1.getMemberId());
assertEquals(MemberType.GROUP, delegateBo1.getMemberType());
assertEquals(DelegationType.PRIMARY.getCode(), delegateBo1.getDelegationTypeCode());
assertEquals(peopleFlowDelegate1.getResponsibilityId(), delegateBo1.getResponsibilityId());
assertNull(delegateBo1.getActionRequestPolicyCode());
PeopleFlowDelegateBo delegateBo2 = memberBo.getDelegates().get(1);
assertNotNull(delegateBo2.getId());
assertEquals(memberBo.getId(), delegateBo2.getPeopleFlowMemberId());
assertEquals("2", delegateBo2.getMemberId());
assertEquals(MemberType.ROLE, delegateBo2.getMemberType());
assertEquals(DelegationType.SECONDARY.getCode(), delegateBo2.getDelegationTypeCode());
assertEquals(peopleFlowDelegate2.getResponsibilityId(), delegateBo2.getResponsibilityId());
assertEquals(ActionRequestPolicy.FIRST.getCode(), delegateBo2.getActionRequestPolicyCode());
}
public static KewTypeBo buildMinimalKewTypeBo() {
KewTypeBo kewTypeBo = new KewTypeBo();
kewTypeBo.setName("TestType");
kewTypeBo.setNamespace("TestNamespace");
return kewTypeBo;
}
private static class KewTypeBoBuilder {
private boolean active = true;
private String name;
private String namespace;
private String serviceName;
public KewTypeBoBuilder(String name, String namespace) {
this.name = name;
this.namespace = namespace;
}
public KewTypeBoBuilder setServiceName(String serviceName) {
this.serviceName = serviceName;
return this;
}
public KewTypeBoBuilder setName(String name) {
this.name = name;
return this;
}
public KewTypeBoBuilder setNamespace(String namespace) {
this.namespace = namespace;
return this;
}
public KewTypeBoBuilder setActive(boolean active) {
this.active = active;
return this;
}
public KewTypeBo build() {
KewTypeBo kewTypeBo = new KewTypeBo();
kewTypeBo.setActive(active);
kewTypeBo.setName(name);
kewTypeBo.setNamespace(namespace);
kewTypeBo.setServiceName(serviceName);
return kewTypeBo;
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/osconfig/v1beta/osconfig_common.proto
package com.google.cloud.osconfig.v1beta;
public final class Common {
private Common() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
public interface FixedOrPercentOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.osconfig.v1beta.FixedOrPercent)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return Whether the fixed field is set.
*/
boolean hasFixed();
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return The fixed.
*/
int getFixed();
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return Whether the percent field is set.
*/
boolean hasPercent();
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return The percent.
*/
int getPercent();
public com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.ModeCase getModeCase();
}
/**
*
*
* <pre>
* Message encapsulating a value that can be either absolute ("fixed") or
* relative ("percent") to a value.
* </pre>
*
* Protobuf type {@code google.cloud.osconfig.v1beta.FixedOrPercent}
*/
public static final class FixedOrPercent extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.osconfig.v1beta.FixedOrPercent)
FixedOrPercentOrBuilder {
private static final long serialVersionUID = 0L;
// Use FixedOrPercent.newBuilder() to construct.
private FixedOrPercent(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FixedOrPercent() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FixedOrPercent();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private FixedOrPercent(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
modeCase_ = 1;
mode_ = input.readInt32();
break;
}
case 16:
{
modeCase_ = 2;
mode_ = input.readInt32();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.osconfig.v1beta.Common
.internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.osconfig.v1beta.Common
.internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.class,
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.Builder.class);
}
private int modeCase_ = 0;
private java.lang.Object mode_;
public enum ModeCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
FIXED(1),
PERCENT(2),
MODE_NOT_SET(0);
private final int value;
private ModeCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ModeCase valueOf(int value) {
return forNumber(value);
}
public static ModeCase forNumber(int value) {
switch (value) {
case 1:
return FIXED;
case 2:
return PERCENT;
case 0:
return MODE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public ModeCase getModeCase() {
return ModeCase.forNumber(modeCase_);
}
public static final int FIXED_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return Whether the fixed field is set.
*/
@java.lang.Override
public boolean hasFixed() {
return modeCase_ == 1;
}
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return The fixed.
*/
@java.lang.Override
public int getFixed() {
if (modeCase_ == 1) {
return (java.lang.Integer) mode_;
}
return 0;
}
public static final int PERCENT_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return Whether the percent field is set.
*/
@java.lang.Override
public boolean hasPercent() {
return modeCase_ == 2;
}
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return The percent.
*/
@java.lang.Override
public int getPercent() {
if (modeCase_ == 2) {
return (java.lang.Integer) mode_;
}
return 0;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (modeCase_ == 1) {
output.writeInt32(1, (int) ((java.lang.Integer) mode_));
}
if (modeCase_ == 2) {
output.writeInt32(2, (int) ((java.lang.Integer) mode_));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (modeCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
1, (int) ((java.lang.Integer) mode_));
}
if (modeCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
2, (int) ((java.lang.Integer) mode_));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.osconfig.v1beta.Common.FixedOrPercent)) {
return super.equals(obj);
}
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent other =
(com.google.cloud.osconfig.v1beta.Common.FixedOrPercent) obj;
if (!getModeCase().equals(other.getModeCase())) return false;
switch (modeCase_) {
case 1:
if (getFixed() != other.getFixed()) return false;
break;
case 2:
if (getPercent() != other.getPercent()) return false;
break;
case 0:
default:
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (modeCase_) {
case 1:
hash = (37 * hash) + FIXED_FIELD_NUMBER;
hash = (53 * hash) + getFixed();
break;
case 2:
hash = (37 * hash) + PERCENT_FIELD_NUMBER;
hash = (53 * hash) + getPercent();
break;
case 0:
default:
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message encapsulating a value that can be either absolute ("fixed") or
* relative ("percent") to a value.
* </pre>
*
* Protobuf type {@code google.cloud.osconfig.v1beta.FixedOrPercent}
*/
public static final class Builder
extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.osconfig.v1beta.FixedOrPercent)
com.google.cloud.osconfig.v1beta.Common.FixedOrPercentOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.osconfig.v1beta.Common
.internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.osconfig.v1beta.Common
.internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.class,
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.Builder.class);
}
// Construct using com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
modeCase_ = 0;
mode_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.osconfig.v1beta.Common
.internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_descriptor;
}
@java.lang.Override
public com.google.cloud.osconfig.v1beta.Common.FixedOrPercent getDefaultInstanceForType() {
return com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.osconfig.v1beta.Common.FixedOrPercent build() {
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.osconfig.v1beta.Common.FixedOrPercent buildPartial() {
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent result =
new com.google.cloud.osconfig.v1beta.Common.FixedOrPercent(this);
if (modeCase_ == 1) {
result.mode_ = mode_;
}
if (modeCase_ == 2) {
result.mode_ = mode_;
}
result.modeCase_ = modeCase_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index,
java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.osconfig.v1beta.Common.FixedOrPercent) {
return mergeFrom((com.google.cloud.osconfig.v1beta.Common.FixedOrPercent) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.osconfig.v1beta.Common.FixedOrPercent other) {
if (other == com.google.cloud.osconfig.v1beta.Common.FixedOrPercent.getDefaultInstance())
return this;
switch (other.getModeCase()) {
case FIXED:
{
setFixed(other.getFixed());
break;
}
case PERCENT:
{
setPercent(other.getPercent());
break;
}
case MODE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.osconfig.v1beta.Common.FixedOrPercent parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.osconfig.v1beta.Common.FixedOrPercent) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int modeCase_ = 0;
private java.lang.Object mode_;
public ModeCase getModeCase() {
return ModeCase.forNumber(modeCase_);
}
public Builder clearMode() {
modeCase_ = 0;
mode_ = null;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return Whether the fixed field is set.
*/
public boolean hasFixed() {
return modeCase_ == 1;
}
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return The fixed.
*/
public int getFixed() {
if (modeCase_ == 1) {
return (java.lang.Integer) mode_;
}
return 0;
}
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @param value The fixed to set.
* @return This builder for chaining.
*/
public Builder setFixed(int value) {
modeCase_ = 1;
mode_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies a fixed value.
* </pre>
*
* <code>int32 fixed = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearFixed() {
if (modeCase_ == 1) {
modeCase_ = 0;
mode_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return Whether the percent field is set.
*/
public boolean hasPercent() {
return modeCase_ == 2;
}
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return The percent.
*/
public int getPercent() {
if (modeCase_ == 2) {
return (java.lang.Integer) mode_;
}
return 0;
}
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @param value The percent to set.
* @return This builder for chaining.
*/
public Builder setPercent(int value) {
modeCase_ = 2;
mode_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the relative value defined as a percentage, which will be
* multiplied by a reference value.
* </pre>
*
* <code>int32 percent = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPercent() {
if (modeCase_ == 2) {
modeCase_ = 0;
mode_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.osconfig.v1beta.FixedOrPercent)
}
// @@protoc_insertion_point(class_scope:google.cloud.osconfig.v1beta.FixedOrPercent)
private static final com.google.cloud.osconfig.v1beta.Common.FixedOrPercent DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.osconfig.v1beta.Common.FixedOrPercent();
}
public static com.google.cloud.osconfig.v1beta.Common.FixedOrPercent getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FixedOrPercent> PARSER =
new com.google.protobuf.AbstractParser<FixedOrPercent>() {
@java.lang.Override
public FixedOrPercent parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new FixedOrPercent(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<FixedOrPercent> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FixedOrPercent> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.osconfig.v1beta.Common.FixedOrPercent getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_descriptor;
private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n2google/cloud/osconfig/v1beta/osconfig_"
+ "common.proto\022\034google.cloud.osconfig.v1be"
+ "ta\032\034google/api/annotations.proto\"<\n\016Fixe"
+ "dOrPercent\022\017\n\005fixed\030\001 \001(\005H\000\022\021\n\007percent\030\002"
+ " \001(\005H\000B\006\n\004modeBp\n com.google.cloud.oscon"
+ "fig.v1betaB\006CommonZDgoogle.golang.org/ge"
+ "nproto/googleapis/cloud/osconfig/v1beta;"
+ "osconfigb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
});
internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_osconfig_v1beta_FixedOrPercent_descriptor,
new java.lang.String[] {
"Fixed", "Percent", "Mode",
});
com.google.api.AnnotationsProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*******************************************************************************
* Copyright 2009-2015 Amazon Services. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
*
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at: http://aws.amazon.com/apache2.0
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*******************************************************************************
* Get Lowest Offer Listings For SKU Result
* API Version: 2011-10-01
* Library Version: 2015-02-13
* Generated: Tue Feb 10 14:34:49 PST 2015
*/
package com.amazonservices.mws.products.model;
import javax.xml.bind.annotation.*;
import com.amazonservices.mws.client.*;
/**
* GetLowestOfferListingsForSKUResult complex type.
*
* XML schema:
*
* <pre>
* <complexType name="GetLowestOfferListingsForSKUResult">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="AllOfferListingsConsidered" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="Product" type="{http://mws.amazonservices.com/schema/Products/2011-10-01}Product" minOccurs="0"/>
* <element name="Error" type="{http://mws.amazonservices.com/schema/Products/2011-10-01}Error" minOccurs="0"/>
* </sequence>
* <attribute name="SellerSKU" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <attribute name="status" use="required" type="{http://www.w3.org/2001/XMLSchema}string"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name="GetLowestOfferListingsForSKUResult", propOrder={
"allOfferListingsConsidered",
"product",
"error"
})
@XmlRootElement(name = "GetLowestOfferListingsForSKUResult")
public class GetLowestOfferListingsForSKUResult extends AbstractMwsObject {
@XmlElement(name="AllOfferListingsConsidered")
private Boolean allOfferListingsConsidered;
@XmlElement(name="Product")
private Product product;
@XmlElement(name="Error")
private Error error;
@XmlAttribute
private String sellerSKU;
@XmlAttribute(required=true)
private String status;
/**
* Check the value of AllOfferListingsConsidered.
*
* @return true if AllOfferListingsConsidered is set to true.
*/
public boolean isAllOfferListingsConsidered() {
return allOfferListingsConsidered!=null && allOfferListingsConsidered.booleanValue();
}
/**
* Get the value of AllOfferListingsConsidered.
*
* @return The value of AllOfferListingsConsidered.
*/
public Boolean getAllOfferListingsConsidered() {
return allOfferListingsConsidered;
}
/**
* Set the value of AllOfferListingsConsidered.
*
* @param allOfferListingsConsidered
* The new value to set.
*/
public void setAllOfferListingsConsidered(Boolean allOfferListingsConsidered) {
this.allOfferListingsConsidered = allOfferListingsConsidered;
}
/**
* Check to see if AllOfferListingsConsidered is set.
*
* @return true if AllOfferListingsConsidered is set.
*/
public boolean isSetAllOfferListingsConsidered() {
return allOfferListingsConsidered != null;
}
/**
* Set the value of AllOfferListingsConsidered, return this.
*
* @param allOfferListingsConsidered
* The new value to set.
*
* @return This instance.
*/
public GetLowestOfferListingsForSKUResult withAllOfferListingsConsidered(Boolean allOfferListingsConsidered) {
this.allOfferListingsConsidered = allOfferListingsConsidered;
return this;
}
/**
* Get the value of Product.
*
* @return The value of Product.
*/
public Product getProduct() {
return product;
}
/**
* Set the value of Product.
*
* @param product
* The new value to set.
*/
public void setProduct(Product product) {
this.product = product;
}
/**
* Check to see if Product is set.
*
* @return true if Product is set.
*/
public boolean isSetProduct() {
return product != null;
}
/**
* Set the value of Product, return this.
*
* @param product
* The new value to set.
*
* @return This instance.
*/
public GetLowestOfferListingsForSKUResult withProduct(Product product) {
this.product = product;
return this;
}
/**
* Get the value of Error.
*
* @return The value of Error.
*/
public Error getError() {
return error;
}
/**
* Set the value of Error.
*
* @param error
* The new value to set.
*/
public void setError(Error error) {
this.error = error;
}
/**
* Check to see if Error is set.
*
* @return true if Error is set.
*/
public boolean isSetError() {
return error != null;
}
/**
* Set the value of Error, return this.
*
* @param error
* The new value to set.
*
* @return This instance.
*/
public GetLowestOfferListingsForSKUResult withError(Error error) {
this.error = error;
return this;
}
/**
* Get the value of SellerSKU.
*
* @return The value of SellerSKU.
*/
public String getSellerSKU() {
return sellerSKU;
}
/**
* Set the value of SellerSKU.
*
* @param sellerSKU
* The new value to set.
*/
public void setSellerSKU(String sellerSKU) {
this.sellerSKU = sellerSKU;
}
/**
* Check to see if SellerSKU is set.
*
* @return true if SellerSKU is set.
*/
public boolean isSetSellerSKU() {
return sellerSKU != null;
}
/**
* Set the value of SellerSKU, return this.
*
* @param sellerSKU
* The new value to set.
*
* @return This instance.
*/
public GetLowestOfferListingsForSKUResult withSellerSKU(String sellerSKU) {
this.sellerSKU = sellerSKU;
return this;
}
/**
* Get the value of status.
*
* @return The value of status.
*/
public String getStatus() {
return status;
}
/**
* Set the value of status.
*
* @param status
* The new value to set.
*/
public void setStatus(String status) {
this.status = status;
}
/**
* Check to see if status is set.
*
* @return true if status is set.
*/
public boolean isSetStatus() {
return status != null;
}
/**
* Set the value of status, return this.
*
* @param status
* The new value to set.
*
* @return This instance.
*/
public GetLowestOfferListingsForSKUResult withStatus(String status) {
this.status = status;
return this;
}
/**
* Read members from a MwsReader.
*
* @param r
* The reader to read from.
*/
@Override
public void readFragmentFrom(MwsReader r) {
sellerSKU = r.readAttribute("SellerSKU", String.class);
status = r.readAttribute("status", String.class);
allOfferListingsConsidered = r.read("AllOfferListingsConsidered", Boolean.class);
product = r.read("Product", Product.class);
error = r.read("Error", Error.class);
}
/**
* Write members to a MwsWriter.
*
* @param w
* The writer to write to.
*/
@Override
public void writeFragmentTo(MwsWriter w) {
w.writeAttribute("SellerSKU",sellerSKU);
w.writeAttribute("status",status);
w.write("AllOfferListingsConsidered", allOfferListingsConsidered);
w.write("Product", product);
w.write("Error", error);
}
/**
* Write tag, xmlns and members to a MwsWriter.
*
* @param w
* The Writer to write to.
*/
@Override
public void writeTo(MwsWriter w) {
w.write("http://mws.amazonservices.com/schema/Products/2011-10-01", "GetLowestOfferListingsForSKUResult",this);
}
/** Value constructor. */
public GetLowestOfferListingsForSKUResult(Boolean allOfferListingsConsidered,Product product,Error error,String sellerSKU,String status) {
this.allOfferListingsConsidered = allOfferListingsConsidered;
this.product = product;
this.error = error;
this.sellerSKU = sellerSKU;
this.status = status;
}
/** Default constructor. */
public GetLowestOfferListingsForSKUResult() {
super();
}
}
| |
/**
*
* Copyright 2017 Florian Erhard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package gedi.lfc;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.commons.math3.distribution.BetaDistribution;
import gedi.core.data.annotation.Transcript;
import gedi.core.data.reads.AlignedReadsData;
import gedi.core.data.reads.ContrastMapping;
import gedi.core.processing.old.GenomicRegionProcessor;
import gedi.core.processing.old.OverlapMode;
import gedi.core.processing.old.ProcessorContext;
import gedi.core.region.GenomicRegion;
import gedi.core.region.GenomicRegionStorage;
import gedi.core.region.ImmutableReferenceGenomicRegion;
import gedi.core.region.MissingInformationIntronInformation;
import gedi.core.region.MutableReferenceGenomicRegion;
import gedi.util.ArrayUtils;
import gedi.util.datastructure.tree.redblacktree.IntervalTree;
import gedi.util.datastructure.tree.redblacktree.SimpleInterval;
import gedi.util.io.text.LineOrientedFile;
public class LfcAlignedReadsProcessor implements GenomicRegionProcessor {
// TODO: once table framework is there, do not produce an output file but write into a table of the context! (a table output processor will then do the trick)
private ContrastMapping before;
private ContrastMapping after;
private Downsampling downsampling;
private double credi = 0.05;
private boolean allreads = false;
private LineOrientedFile out;
double[] total;
double[] buff;
private int minCond = -1;
private GenomicRegionStorage<Transcript> transcripts;
public LfcAlignedReadsProcessor(ContrastMapping contrast,
Downsampling downsampling, LineOrientedFile out) {
this.before = contrast;
this.downsampling = downsampling;
this.out = out;
// if (contrast.getNumMergedConditions()!=2)
// throw new RuntimeException("Must be binary contrast!");
}
public LfcAlignedReadsProcessor(ContrastMapping before,ContrastMapping after,
Downsampling downsampling, LineOrientedFile out) {
this.before = before;
this.after = after;
this.downsampling = downsampling;
this.out = out;
}
public LfcAlignedReadsProcessor setAllreads(boolean allreads) {
this.allreads = allreads;
return this;
}
@Override
public void begin(ProcessorContext context) throws IOException {
out.startWriting();
if (allreads) {
out.writef("Gene\tLocation\tMode");
if (minCond>-1)
out.writef("\twith reads");
ContrastMapping contr = after==null?before:after;
for (int i=0; i<contr.getNumMergedConditions(); i++)
out.writef("\t%s",contr.getMappedName(i));
out.writeLine(transcripts!=null?"\tTranscripts":"");
total = new double[contr.getNumMergedConditions()];
buff = new double[contr.getNumMergedConditions()];
}
else if (multimode()){
out.writef("Gene");
ContrastMapping contr = after==null?before:after;
for (int i=0; i<contr.getNumMergedConditions(); i++)
out.writef("\t%s",contr.getMappedName(i));
out.writeLine();
total = new double[contr.getNumMergedConditions()];
buff = new double[contr.getNumMergedConditions()];
}
else{
out.writef("Gene\talpha\tbeta\t%.3g credible\tlog2 fold change\t%.3g credible\n",0.5*credi,1-0.5*credi);
total = new double[2];
buff = new double[2];
}
}
public LfcAlignedReadsProcessor forceMultiMode() {
setCredible(Double.NaN);
return this;
}
public LfcAlignedReadsProcessor setCredible(double credi) {
this.credi = credi;
return this;
}
public LfcAlignedReadsProcessor setTranscripts(
GenomicRegionStorage<Transcript> transcripts) {
this.transcripts = transcripts;
return this;
}
public GenomicRegionProcessor setMinConditionsWithReads(int minCond) {
this.minCond = minCond;
return this;
}
private boolean multimode() {
return Double.isNaN(credi) || (after==null && before.getNumMergedConditions()!=2)||(after!=null && after.getNumMergedConditions()!=2);
}
@Override
public void beginRegion(MutableReferenceGenomicRegion<?> region, ProcessorContext context) {
Arrays.fill(total, 0);
Arrays.fill(buff, 0);
}
public void setBuffer(double[] buff) {
this.total = buff;
}
@Override
public void read(MutableReferenceGenomicRegion<?> region,
MutableReferenceGenomicRegion<AlignedReadsData> read, ProcessorContext context) throws IOException {
// if (region.getRegion().contains(read.getRegion())){
// compute downsampled and add
// if (read.getRegion().getTotalLength()>100) return;
if (after==null)
downsampling.getDownsampled(read.getData(), before, buff);
else
downsampling.getDownsampled(read.getData(), before, after, buff);
// if (read.getData().getNumConditions()==16)
// System.out.println(read);
// else
// for (int i=0; i<read.getData().getNumConditions(); i++) {
// if (i/8==1 || i/8==3) {
// if (read.getData().getTotalCount(i)>0) {
// System.out.print(read.getReference()+":"+read.getRegion()+" [");
// for (int c=0; c<read.getData().getNumConditions(); c++) {
// if (c/8==1 || c/8==3) {
// if (c>8) System.out.print(", ");
// System.out.print(read.getData().getTotalCount(c));
// }
// }
// System.out.println("] "+read.getData().getMultiplicity(0));
// break;
// }
// }
// }
int wr = 0;
if (minCond>0) {
for (int i=0; i<buff.length; i++)
if (buff[i]>0) wr++;
}
if (wr>=minCond)
ArrayUtils.add(total, buff);
if (allreads && wr>=minCond) {
String mode = "";
for (OverlapMode m : OverlapMode.values())
if (m.test(region.getRegion(), context.get(ProcessorContext.EXON_TREE), read.getRegion())) {
mode = m.name();
break;
}
out.writef("%s\t%s:%s\t%s",region.getData(),read.getReference().toString(),read.getRegion().toRegionString(),mode);
if (minCond>-1)
out.writef("\t%d",wr);
for (int i=0; i<buff.length; i++)
out.writef("\t%.1f",buff[i]);
// find all compatible transcripts
if (transcripts!=null) {
out.writef("\t");
int n = 0;
for (ImmutableReferenceGenomicRegion<Transcript> tr : transcripts.getReferenceRegionsIntersecting(read.getReference(), read.getRegion())) {
if (compatible(tr.getRegion(),read.getRegion() instanceof MissingInformationIntronInformation?((MissingInformationIntronInformation)read.getRegion()).getInformationGenomicRegions():new GenomicRegion[] {read.getRegion()}))
out.writef(n++>0?",%s":"%s", tr.getData().getTranscriptId());
}
}
out.writeLine();
}
// System.out.println(Arrays.toString(buff)+"\t"+read.getReference()+":"+read.getRegion()+"\t"+read.getData());
// }
}
private boolean compatible(GenomicRegion region, GenomicRegion[] info) {
for (GenomicRegion i : info)
if (!region.containsUnspliced(i))
return false;
return true;
}
@Override
public void endRegion(MutableReferenceGenomicRegion<?> region, ProcessorContext context) throws IOException {
if (allreads) {
}else if (multimode()){
out.writef("%s",region.getData());
for (int i=0; i<total.length; i++)
out.writef("\t%.1f",total[i]);
out.writeLine();
} else {
BetaDistribution beta = new BetaDistribution(total[0]+1, total[1]+1);
out.writef("%s\t%.1f\t%.1f\t%.4f\t%.4f\t%.4f\n",region.getData(),total[0]+1,total[1]+1,
pToLog2Fc(beta.inverseCumulativeProbability(0.5*credi)),
pToLog2Fc((beta.getAlpha()-1)/(beta.getAlpha()+beta.getBeta()-2)),
pToLog2Fc(beta.inverseCumulativeProbability(1-0.5*credi))
);
}
}
@Override
public void end(ProcessorContext context) throws IOException {
out.finishWriting();
}
private static double pToLog2Fc(double p) {
if (Double.isNaN(p)) return 0;
return Math.log(p/(1-p))/Math.log(2);
}
}
| |
/*
* Encog(tm) Core v3.3 - Java Version
* http://www.heatonresearch.com/encog/
* https://github.com/encog/encog-java-core
* Copyright 2008-2014 Heaton Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package org.encog.app.analyst.script.normalize;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.encog.Encog;
import org.encog.app.analyst.AnalystError;
import org.encog.app.analyst.EncogAnalyst;
import org.encog.app.analyst.csv.basic.BasicFile;
import org.encog.app.analyst.script.AnalystClassItem;
import org.encog.app.analyst.script.DataField;
import org.encog.app.analyst.util.CSVHeaders;
import org.encog.app.quant.QuantError;
import org.encog.mathutil.Equilateral;
import org.encog.util.EngineArray;
import org.encog.util.arrayutil.ClassItem;
import org.encog.util.arrayutil.NormalizationAction;
import org.encog.util.csv.CSVFormat;
/**
* Holds a field to be analyzed.
*
*/
public class AnalystField {
/**
* Minimum classes for encode using equilateral.
*/
public static final int MIN_EQ_CLASSES = 3;
/**
* The actual high from the sample data.
*/
private double actualHigh;
/**
* The actual low from the sample data.
*/
private double actualLow;
/**
* The desired normalized high.
*/
private double normalizedHigh;
/**
* The desired normalized low from the sample data.
*/
private double normalizedLow;
/**
* The action that should be taken on this column.
*/
private NormalizationAction action;
/**
* The name of this column.
*/
private String name;
/**
* The list of classes.
*/
private final List<ClassItem> classes = new ArrayList<ClassItem>();
/**
* If equilateral classification is used, this is the Equilateral object.
*/
private Equilateral eq;
/**
* Allows the index of a field to be looked up.
*/
private final Map<String, Integer> lookup = new HashMap<String, Integer>();
/**
* True, if this is an output field.
*/
private boolean output;
/**
* The time slice number.
*/
private int timeSlice;
/**
* Construct the object with a range of 1 and -1.
*/
public AnalystField() {
this(1, -1);
}
/**
* Construct an analyst field. Works like a C++ copy constructor.
* @param field The field to clone.
*/
public AnalystField(final AnalystField field) {
this.actualHigh = field.actualHigh;
this.actualLow = field.actualLow;
this.normalizedHigh = field.normalizedHigh;
this.normalizedLow = field.normalizedLow;
this.action = field.action;
this.name = field.name;
this.output = field.output;
this.timeSlice = field.timeSlice;
fixSingleValue();
}
/**
* Construct the object.
*
* @param theNormalizedHigh
* The normalized high.
* @param theNormalizedLow
* The normalized low.
*/
public AnalystField(final double theNormalizedHigh,
final double theNormalizedLow) {
this.normalizedHigh = theNormalizedHigh;
this.normalizedLow = theNormalizedLow;
this.actualHigh = Double.MIN_VALUE;
this.actualLow = Double.MAX_VALUE;
this.action = NormalizationAction.Normalize;
fixSingleValue();
}
/**
* Construct an object.
*
* @param theAction
* The desired action.
* @param theName
* The name of this column.
*/
public AnalystField(final NormalizationAction theAction,
final String theName) {
this(theAction, theName, 0, 0, 0, 0);
}
/**
* Construct the field, with no defaults.
*
* @param theAction
* The normalization action to take.
* @param theName
* The name of this field.
* @param ahigh
* The actual high.
* @param alow
* The actual low.
* @param nhigh
* The normalized high.
* @param nlow
* The normalized low.
*/
public AnalystField(final NormalizationAction theAction,
final String theName,
final double ahigh, final double alow, final double nhigh,
final double nlow) {
this.action = theAction;
this.actualHigh = ahigh;
this.actualLow = alow;
this.normalizedHigh = nhigh;
this.normalizedLow = nlow;
this.name = theName;
fixSingleValue();
}
/**
* Construct an analyst field to use.
* @param theName The name of the field.
* @param theAction The action to use.
* @param high The high value.
* @param low The low value.
*/
public AnalystField(final String theName,
final NormalizationAction theAction,
final double high, final double low) {
this.name = theName;
this.action = theAction;
this.normalizedHigh = high;
this.normalizedLow = low;
fixSingleValue();
}
/**
* Add headings for a raw file.
* @param line The line to write the raw headings to.
* @param prefix The prefix to place.
* @param format The format to use.
*/
public void addRawHeadings(final StringBuilder line,
final String prefix,
final CSVFormat format) {
final int subFields = getColumnsNeeded();
for (int i = 0; i < subFields; i++) {
final String str = CSVHeaders.tagColumn(this.name, i,
this.timeSlice, subFields > 1);
BasicFile.appendSeparator(line, format);
line.append('\"');
if (prefix != null) {
line.append(prefix);
}
line.append(str);
line.append('\"');
}
}
/**
* Analyze the specified value. Adjust min/max as needed. Usually used only
* internally.
*
* @param d
* The value to analyze.
*/
public void analyze(final double d) {
this.actualHigh = Math.max(this.actualHigh, d);
this.actualLow = Math.min(this.actualLow, d);
}
/**
* Denormalize the specified value.
*
* @param value
* The value to normalize.
* @return The normalized value.
*/
public double deNormalize(final double value) {
final double result = ((this.actualLow - this.actualHigh) * value
- this.normalizedHigh * this.actualLow + this.actualHigh
* this.normalizedLow)
/ (this.normalizedLow - this.normalizedHigh);
// typically caused by a number that should not have been normalized
// (i.e. normalization or actual range is infinitely small.
if( Double.isNaN(result) ) {
return ((this.normalizedHigh-this.normalizedLow)/2)+this.normalizedLow;
}
return result;
}
/**
* Determine what class the specified data belongs to.
*
* @param data
* The data to analyze.
* @return The class the data belongs to.
*/
public ClassItem determineClass(final double[] data) {
int resultIndex = 0;
switch (this.action) {
case Equilateral:
resultIndex = this.eq.decode(data);
break;
case OneOf:
resultIndex = EngineArray.indexOfLargest(data);
break;
case SingleField:
resultIndex = (int) data[0];
break;
default:
throw new AnalystError("Unknown action: " + this.action);
}
return this.classes.get(resultIndex);
}
/**
* Determine the class using part of an array.
* @param pos The position to begin.
* @param data The array to check.
* @return The class item.
*/
public ClassItem determineClass(final int pos, final double[] data) {
int resultIndex = 0;
final double[] d = new double[getColumnsNeeded()];
EngineArray.arrayCopy(data, pos, d, 0, d.length);
switch (this.action) {
case Equilateral:
resultIndex = this.eq.decode(d);
break;
case OneOf:
resultIndex = EngineArray.indexOfLargest(d);
break;
case SingleField:
resultIndex = (int)Math.round(d[0]);
break;
default:
throw new AnalystError("Invalid action: " + this.action);
}
if (resultIndex < 0) {
return null;
}
if( resultIndex>= this.classes.size() ) {
return null;
}
return this.classes.get(resultIndex);
}
/**
* Encode the class.
*
* @param classNumber
* The class number.
* @return The encoded class.
*/
public double[] encode(final int classNumber) {
switch (this.action) {
case OneOf:
return encodeOneOf(classNumber);
case Equilateral:
return encodeEquilateral(classNumber);
case SingleField:
return encodeSingleField(classNumber);
default:
return null;
}
}
/**
* Encode the string to numeric form.
* @param str The string to encode.
* @return The numeric form.
*/
public double[] encode(final String str) {
int classNumber = lookup(str);
if (classNumber == -1) {
try {
classNumber = Integer.parseInt(str);
} catch (final NumberFormatException ex) {
throw new QuantError("Can't determine class for: " + str);
}
}
return encode(classNumber);
}
/**
* Perform an equilateral encode.
*
* @param classNumber
* The class number.
* @return The class to encode.
*/
public double[] encodeEquilateral(final int classNumber) {
return this.eq.encode(classNumber);
}
/**
* Perform the encoding for "one of".
*
* @param classNumber
* The class number.
* @return The encoded columns.
*/
private double[] encodeOneOf(final int classNumber) {
final double[] result = new double[getColumnsNeeded()];
for (int i = 0; i < this.classes.size(); i++) {
if (i == classNumber) {
result[i] = this.normalizedHigh;
} else {
result[i] = this.normalizedLow;
}
}
return result;
}
/**
* Encode a single field.
*
* @param classNumber
* The class number to encode.
* @return The encoded columns.
*/
private double[] encodeSingleField(final int classNumber) {
final double[] d = new double[1];
d[0] = classNumber;
return d;
}
/**
* Fix normalized fields that have a single value for the min/max. Separate
* them by 2 units.
*/
public void fixSingleValue() {
if (this.action == NormalizationAction.Normalize) {
if (Math.abs(this.actualHigh - this.actualLow)
< Encog.DEFAULT_DOUBLE_EQUAL) {
this.actualHigh += 1;
this.actualLow -= 1;
}
}
}
/**
* @return The action for the field.
*/
public NormalizationAction getAction() {
return this.action;
}
/**
* @return The actual high for the field.
*/
public double getActualHigh() {
return this.actualHigh;
}
/**
* @return The actual low for the field.
*/
public double getActualLow() {
return this.actualLow;
}
/**
* @return The classes.
*/
public List<ClassItem> getClasses() {
return this.classes;
}
/**
* @return Returns the number of columns needed for this classification. The
* number of columns needed will vary, depending on the
* classification method used.
*/
public int getColumnsNeeded() {
switch (this.action) {
case Ignore:
return 0;
case Equilateral:
return this.classes.size() - 1;
case OneOf:
return this.classes.size();
default:
return 1;
}
}
/**
* @return The equilateral utility.
*/
public Equilateral getEq() {
return this.eq;
}
/**
* @return The name of the field.
*/
public String getName() {
return this.name;
}
/**
* @return The normalized high for the field.
*/
public double getNormalizedHigh() {
return this.normalizedHigh;
}
/**
* @return The normalized low for the neural network.
*/
public double getNormalizedLow() {
return this.normalizedLow;
}
/**
* @return the timeSlice
*/
public int getTimeSlice() {
return this.timeSlice;
}
/**
* Init any internal structures.
*
*/
public void init() {
if (this.action == NormalizationAction.Equilateral) {
if (this.classes.size() < MIN_EQ_CLASSES) {
throw new QuantError(
"There must be at least three classes to make "
+ "use of equilateral normalization.");
}
this.eq = new Equilateral(this.classes.size(), this.normalizedHigh,
this.normalizedLow);
}
// build lookup map
for (int i = 0; i < this.classes.size(); i++) {
this.lookup.put(this.classes.get(i).getName(), this.classes.get(i)
.getIndex());
}
}
/**
* @return True if this field is classification.
*/
public boolean isClassify() {
return (this.action == NormalizationAction.Equilateral)
|| (this.action == NormalizationAction.OneOf)
|| (this.action == NormalizationAction.SingleField);
}
/**
* @return Is this field ignored.
*/
public final boolean isIgnored() {
return this.action == NormalizationAction.Ignore;
}
/**
* @return Is this field input.
*/
public boolean isInput() {
return !this.output;
}
/**
* @return Is this field output.
*/
public boolean isOutput() {
return this.output;
}
/**
* Lookup the specified field.
*
* @param str
* The name of the field to lookup.
* @return The index of the field, or -1 if not found.
*/
public int lookup(final String str) {
if (!this.lookup.containsKey(str)) {
return -1;
}
return this.lookup.get(str);
}
/**
* Make the classes based on numbers.
* @param theAction The action.
* @param classFrom The starting class.
* @param classTo The ending class.
* @param high The high value.
* @param low The low value.
*/
public void makeClass(final NormalizationAction theAction,
final int classFrom, final int classTo, final int high,
final int low) {
if ((action != NormalizationAction.Equilateral)
&& (action != NormalizationAction.OneOf)
&& (action != NormalizationAction.SingleField)) {
throw new QuantError("Unsupported normalization type");
}
this.action = theAction;
this.classes.clear();
this.normalizedHigh = high;
this.normalizedLow = low;
this.actualHigh = 0;
this.actualLow = 0;
int index = 0;
for (int i = classFrom; i < classTo; i++) {
this.classes.add(new ClassItem("" + i, index++));
}
}
/**
* Make the classes using names.
* @param theAction The action to use.
* @param cls The class names.
* @param high The high value.
* @param low The low value.
*/
public void makeClass(final NormalizationAction theAction,
final String[] cls,
final double high, final double low) {
if ((action != NormalizationAction.Equilateral)
&& (action != NormalizationAction.OneOf)
&& (action != NormalizationAction.SingleField)) {
throw new QuantError("Unsupported normalization type");
}
this.action = theAction;
this.classes.clear();
this.normalizedHigh = high;
this.normalizedLow = low;
this.actualHigh = 0;
this.actualLow = 0;
for (int i = 0; i < cls.length; i++) {
this.classes.add(new ClassItem(cls[i], i));
}
}
/**
* Make this a pass-through field.
*/
public void makePassThrough() {
this.normalizedHigh = 0;
this.normalizedLow = 0;
this.actualHigh = 0;
this.actualLow = 0;
this.action = NormalizationAction.PassThrough;
}
/**
* Normalize the specified value.
*
* @param value
* The value to normalize.
* @return The normalized value.
*/
public double normalize(final double value) {
double result = ((value - this.actualLow) / (this.actualHigh - this.actualLow))
* (this.normalizedHigh - this.normalizedLow)
+ this.normalizedLow;
// typically caused by a number that should not have been normalized
// (i.e. normalization or actual range is infinitely small.
if( Double.isNaN(result) ) {
return ((this.normalizedHigh-this.normalizedLow)/2)+this.normalizedLow;
}
return result;
}
/**
* Set the theAction for the field.
*
* @param theAction
* The action for the field.
*/
public void setAction(final NormalizationAction theAction) {
this.action = theAction;
}
/**
* Set the actual high for the field.
*
* @param theActualHigh
* The actual high for the field.
*/
public void setActualHigh(final double theActualHigh) {
this.actualHigh = theActualHigh;
}
/**
* Set the actual low for the field.
*
* @param theActualLow
* The actual low for the field.
*/
public void setActualLow(final double theActualLow) {
this.actualLow = theActualLow;
}
/**
* Set the name of the field.
*
* @param theName
* The name of the field.
*/
public void setName(final String theName) {
this.name = theName;
}
/**
* Set the normalized high for the field.
*
* @param theNormalizedHigh
* The normalized high for the field.
*/
public void setNormalizedHigh(final double theNormalizedHigh) {
this.normalizedHigh = theNormalizedHigh;
}
/**
* Set the normalized low for the field.
*
* @param theNormalizedLow
* The normalized low for the field.
*/
public void setNormalizedLow(final double theNormalizedLow) {
this.normalizedLow = theNormalizedLow;
}
/**
* Set if this is an output field.
* @param b True, if this is output.
*/
public void setOutput(final boolean b) {
this.output = b;
}
/**
* @param theTimeSlice
* the timeSlice to set
*/
public void setTimeSlice(final int theTimeSlice) {
this.timeSlice = theTimeSlice;
}
/** {@inheritDoc} */
@Override
public String toString() {
final StringBuilder result = new StringBuilder("[");
result.append(getClass().getSimpleName());
result.append(" name=");
result.append(this.name);
result.append(", actualHigh=");
result.append(this.actualHigh);
result.append(", actualLow=");
result.append(this.actualLow);
result.append("]");
return result.toString();
}
/**
* Determine the mode, this is the class item that has the most instances.
* @param analyst The Encog analyst.
* @return The mode.
*/
public int determineMode(EncogAnalyst analyst) {
if( !this.isClassify() ) {
throw new AnalystError("Can only calculate the mode for a class.");
}
DataField df = analyst.getScript().findDataField(this.name);
AnalystClassItem m = null;
int result = 0;
int idx = 0;
for( AnalystClassItem item: df.getClassMembers() )
{
if( m==null || m.getCount()<item.getCount() ) {
m = item;
result = idx;
}
idx++;
}
return result;
}
public double[] encode(double d) {
return encode((int)d);
}
public ClassItem findClass(int index) {
for(ClassItem itm: this.classes) {
if( itm.getIndex()==index) {
return itm;
}
}
return null;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudfront.model;
import java.io.Serializable;
/**
* <p>
* A complex type that describes how you'd prefer CloudFront to respond
* to requests that result in either a 4xx or 5xx response. You can
* control whether a custom error page should be displayed, what the
* desired response code should be for this error page and how long
* should the error response be cached by CloudFront. If you don't want
* to specify any custom error responses, include only an empty
* CustomErrorResponses element. To delete all custom error responses in
* an existing distribution, update the distribution configuration and
* include only an empty CustomErrorResponses element. To add, change, or
* remove one or more custom error responses, update the distribution
* configuration and specify all of the custom error responses that you
* want to include in the updated distribution.
* </p>
*/
public class CustomErrorResponse implements Serializable, Cloneable {
/**
* The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
*/
private Integer errorCode;
/**
* The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
*/
private String responsePagePath;
/**
* The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
*/
private String responseCode;
/**
* The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
*/
private Long errorCachingMinTTL;
/**
* The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
*
* @return The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
*/
public Integer getErrorCode() {
return errorCode;
}
/**
* The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
*
* @param errorCode The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
*/
public void setErrorCode(Integer errorCode) {
this.errorCode = errorCode;
}
/**
* The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param errorCode The 4xx or 5xx HTTP status code that you want to customize. For a list
* of HTTP status codes that you can customize, see CloudFront
* documentation.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CustomErrorResponse withErrorCode(Integer errorCode) {
this.errorCode = errorCode;
return this;
}
/**
* The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
*
* @return The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
*/
public String getResponsePagePath() {
return responsePagePath;
}
/**
* The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
*
* @param responsePagePath The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
*/
public void setResponsePagePath(String responsePagePath) {
this.responsePagePath = responsePagePath;
}
/**
* The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param responsePagePath The path of the custom error page (for example, /custom_404.html). The
* path is relative to the distribution and must begin with a slash (/).
* If the path includes any non-ASCII characters or unsafe characters as
* defined in RFC 1783 (http://www.ietf.org/rfc/rfc1738.txt), URL encode
* those characters. Do not URL encode any other characters in the path,
* or CloudFront will not return the custom error page to the viewer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CustomErrorResponse withResponsePagePath(String responsePagePath) {
this.responsePagePath = responsePagePath;
return this;
}
/**
* The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
*
* @return The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
*/
public String getResponseCode() {
return responseCode;
}
/**
* The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
*
* @param responseCode The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
*/
public void setResponseCode(String responseCode) {
this.responseCode = responseCode;
}
/**
* The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param responseCode The HTTP status code that you want CloudFront to return with the
* custom error page to the viewer. For a list of HTTP status codes that
* you can replace, see CloudFront Documentation.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CustomErrorResponse withResponseCode(String responseCode) {
this.responseCode = responseCode;
return this;
}
/**
* The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
*
* @return The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
*/
public Long getErrorCachingMinTTL() {
return errorCachingMinTTL;
}
/**
* The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
*
* @param errorCachingMinTTL The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
*/
public void setErrorCachingMinTTL(Long errorCachingMinTTL) {
this.errorCachingMinTTL = errorCachingMinTTL;
}
/**
* The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param errorCachingMinTTL The minimum amount of time you want HTTP error codes to stay in
* CloudFront caches before CloudFront queries your origin to see whether
* the object has been updated. You can specify a value from 0 to
* 31,536,000.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public CustomErrorResponse withErrorCachingMinTTL(Long errorCachingMinTTL) {
this.errorCachingMinTTL = errorCachingMinTTL;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getErrorCode() != null) sb.append("ErrorCode: " + getErrorCode() + ",");
if (getResponsePagePath() != null) sb.append("ResponsePagePath: " + getResponsePagePath() + ",");
if (getResponseCode() != null) sb.append("ResponseCode: " + getResponseCode() + ",");
if (getErrorCachingMinTTL() != null) sb.append("ErrorCachingMinTTL: " + getErrorCachingMinTTL() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getErrorCode() == null) ? 0 : getErrorCode().hashCode());
hashCode = prime * hashCode + ((getResponsePagePath() == null) ? 0 : getResponsePagePath().hashCode());
hashCode = prime * hashCode + ((getResponseCode() == null) ? 0 : getResponseCode().hashCode());
hashCode = prime * hashCode + ((getErrorCachingMinTTL() == null) ? 0 : getErrorCachingMinTTL().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof CustomErrorResponse == false) return false;
CustomErrorResponse other = (CustomErrorResponse)obj;
if (other.getErrorCode() == null ^ this.getErrorCode() == null) return false;
if (other.getErrorCode() != null && other.getErrorCode().equals(this.getErrorCode()) == false) return false;
if (other.getResponsePagePath() == null ^ this.getResponsePagePath() == null) return false;
if (other.getResponsePagePath() != null && other.getResponsePagePath().equals(this.getResponsePagePath()) == false) return false;
if (other.getResponseCode() == null ^ this.getResponseCode() == null) return false;
if (other.getResponseCode() != null && other.getResponseCode().equals(this.getResponseCode()) == false) return false;
if (other.getErrorCachingMinTTL() == null ^ this.getErrorCachingMinTTL() == null) return false;
if (other.getErrorCachingMinTTL() != null && other.getErrorCachingMinTTL().equals(this.getErrorCachingMinTTL()) == false) return false;
return true;
}
@Override
public CustomErrorResponse clone() {
try {
return (CustomErrorResponse) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/*
* Copyright (c) 2015, University of Oslo
*
* All rights reserved.
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.sdk.utils.services;
import org.apache.commons.jexl2.JexlException;
import org.hisp.dhis.android.sdk.controllers.metadata.MetaDataController;
import org.hisp.dhis.android.sdk.controllers.tracker.TrackerController;
import org.hisp.dhis.android.sdk.persistence.models.Constant;
import org.hisp.dhis.android.sdk.persistence.models.DataElement;
import org.hisp.dhis.android.sdk.persistence.models.DataValue;
import org.hisp.dhis.android.sdk.persistence.models.Enrollment;
import org.hisp.dhis.android.sdk.persistence.models.Event;
import org.hisp.dhis.android.sdk.persistence.models.ProgramIndicator;
import org.hisp.dhis.android.sdk.persistence.models.ProgramStage;
import org.hisp.dhis.android.sdk.persistence.models.ProgramStageDataElement;
import org.hisp.dhis.android.sdk.persistence.models.TrackedEntityAttribute;
import org.hisp.dhis.android.sdk.persistence.models.TrackedEntityAttributeValue;
import org.hisp.dhis.android.sdk.utils.support.DateUtils;
import org.hisp.dhis.android.sdk.utils.support.ExpressionUtils;
import org.hisp.dhis.android.sdk.utils.support.MathUtils;
import org.hisp.dhis.android.sdk.utils.support.TextUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
/**
* @author Chau Thu Tran
*/
/**
* Handles logic related to ProgramIndicators such as calculating values based on expressions.
* This class has been copied from the dhis 2 core repository and been stripped down.
*/
public class ProgramIndicatorService {
public static final String CLASS_TAG = ProgramIndicatorService.class.getSimpleName();
private static final String NULL_REPLACEMENT = "null";
/**
* Calculate an program indicator value based on program instance and an
* indicator defined for a TrackedEntityInstance
*
* @param programInstance ProgramInstance
* @param programIndicator ProgramIndicator
* @return Indicator value
*/
public static String getProgramIndicatorValue(Enrollment programInstance, ProgramIndicator programIndicator) {
if(programIndicator == null) {
return null;
}
Double value = getValue(programInstance, null, programIndicator);
if (value != null && !Double.isNaN(value)) {
value = MathUtils.getRounded(value, 2);
return String.valueOf(value);
}
return null;
}
/**
* Calculate an program indicator value based on a single event
*
* @param event Event
* @param programIndicator ProgramIndicator
* @return Indicator value
*/
public static String getProgramIndicatorValue(Event event, ProgramIndicator programIndicator) {
if(programIndicator == null) {
return null;
}
Double value = getValue(null, event, programIndicator);
if (value != null && !Double.isNaN(value)) {
value = MathUtils.getRounded(value, 2);
return String.valueOf(value);
}
return null;
}
/**
* Get indicator values of all program indicators defined for a TrackedEntityInstance
*
* @param programInstance ProgramInstance
* @return Map<Indicator name, Indicator value>
*/
public static Map<String, String> getProgramIndicatorValues(Enrollment programInstance) {
Map<String, String> result = new HashMap<>();
Collection<ProgramIndicator> programIndicators = new HashSet(programInstance.getProgram().getProgramIndicators());
for (ProgramIndicator programIndicator : programIndicators) {
String value = getProgramIndicatorValue(programInstance, programIndicator);
if (value != null) {
result.put(programIndicator.getDisplayName(),
getProgramIndicatorValue(programInstance, programIndicator));
}
}
return result;
}
/**
* Get description of an indicator expression
*
* @param expression A expression string
* @return The description
*/
public static String getExpressionDescription(String expression) {
StringBuffer description = new StringBuffer();
Matcher matcher = ProgramIndicator.EXPRESSION_PATTERN.matcher(expression);
while (matcher.find()) {
String key = matcher.group(1);
String uid = matcher.group(2);
if (ProgramIndicator.KEY_DATAELEMENT.equals(key)) {
String de = matcher.group(3);
ProgramStage programStage = MetaDataController.getProgramStage(uid);
DataElement dataElement = MetaDataController.getDataElement(de);
if (programStage != null && dataElement != null) {
String programStageName = programStage.getDisplayName();
String dataelementName = dataElement.getDisplayName();
matcher.appendReplacement(description, programStageName + ProgramIndicator.SEPARATOR_ID + dataelementName);
}
} else if (ProgramIndicator.KEY_ATTRIBUTE.equals(key)) {
TrackedEntityAttribute attribute = MetaDataController.getTrackedEntityAttribute(uid);
if (attribute != null) {
matcher.appendReplacement(description, attribute.getDisplayName());
}
} else if (ProgramIndicator.KEY_CONSTANT.equals(key)) {
Constant constant = MetaDataController.getConstant(uid);
if (constant != null) {
matcher.appendReplacement(description, constant.getDisplayName());
}
} else if (ProgramIndicator.KEY_PROGRAM_VARIABLE.equals(key)) {
if (ProgramIndicator.CURRENT_DATE.equals(uid)) {
matcher.appendReplacement(description, "Current date");
} else if (ProgramIndicator.ENROLLMENT_DATE.equals(uid)) {
matcher.appendReplacement(description, "Enrollment date");
} else if (ProgramIndicator.INCIDENT_DATE.equals(uid)) {
matcher.appendReplacement(description, "Incident date");
} else if (ProgramIndicator.VALUE_COUNT.equals(uid)) {
matcher.appendReplacement(description, "Value count");
}
}
}
matcher.appendTail(description);
return description.toString();
}
/**
* Get description of an indicator expression
*
* @param expression A expression string
* @return The expression is valid or not
*/
public static String expressionIsValid(String expression) {
StringBuffer description = new StringBuffer();
Matcher matcher = ProgramIndicator.EXPRESSION_PATTERN.matcher(expression);
while (matcher.find()) {
String key = matcher.group(1);
String uid = matcher.group(2);
if (ProgramIndicator.KEY_DATAELEMENT.equals(key)) {
String de = matcher.group(3);
ProgramStage programStage = MetaDataController.getProgramStage(uid);
DataElement dataElement = MetaDataController.getDataElement(de);
if (programStage != null && dataElement != null) {
matcher.appendReplacement(description, String.valueOf(1));
} else {
return ProgramIndicator.EXPRESSION_NOT_WELL_FORMED;
}
} else if (ProgramIndicator.KEY_ATTRIBUTE.equals(key)) {
TrackedEntityAttribute attribute = MetaDataController.getTrackedEntityAttribute(uid);
if (attribute != null) {
matcher.appendReplacement(description, String.valueOf(1));
} else {
return ProgramIndicator.EXPRESSION_NOT_WELL_FORMED;
}
} else if (ProgramIndicator.KEY_CONSTANT.equals(key)) {
Constant constant = MetaDataController.getConstant(uid);
if (constant != null) {
matcher.appendReplacement(description, String.valueOf(constant.getValue()));
} else {
return ProgramIndicator.EXPRESSION_NOT_WELL_FORMED;
}
} else if (ProgramIndicator.KEY_PROGRAM_VARIABLE.equals(key)) {
matcher.appendReplacement(description, String.valueOf(0));
}
}
matcher.appendTail(description);
// ---------------------------------------------------------------------
// Well-formed expression
// ---------------------------------------------------------------------
if (MathUtils.expressionHasErrors(description.toString())) {
return ProgramIndicator.EXPRESSION_NOT_WELL_FORMED;
}
return ProgramIndicator.VALID;
}
/**
* Get all {@link org.hisp.dhis.android.sdk.persistence.models.ProgramStageDataElement} part of the expression of the
* given indicator.
*
* @param indicator the ProgramIndicator.
* @return a set of ProgramStageDataElements.
*/
public static Set<ProgramStageDataElement> getProgramStageDataElementsInExpression(ProgramIndicator indicator) {
Set<ProgramStageDataElement> elements = new HashSet<>();
Matcher matcher = ProgramIndicator.DATAELEMENT_PATTERN.matcher(indicator.getExpression());
while (matcher.find()) {
String ps = matcher.group(1);
String de = matcher.group(2);
ProgramStage programStage = MetaDataController.getProgramStage(ps);
DataElement dataElement = MetaDataController.getDataElement(de);
if (programStage != null && dataElement != null) {
elements.add(programStage.getProgramStageDataElement(dataElement.getUid()));
}
}
return elements;
}
public static List<String> getDataElementsInExpression(ProgramIndicator indicator) {
List<String> elements = new ArrayList<>();
Matcher matcher = ProgramIndicator.DATAELEMENT_PATTERN.matcher(indicator.getExpression());
while (matcher.find()) {
String ps = matcher.group(1);
String de = matcher.group(2);
elements.add(de);
}
return elements;
}
/**
* Get all {@link org.hisp.dhis.android.sdk.persistence.models.TrackedEntityAttribute} part of the expression of the
* given indicator.
*
* @param indicator the ProgramIndicator.
* @return a set of TrackedEntityAttributes.
*/
public static Set<TrackedEntityAttribute> getAttributesInExpression(ProgramIndicator indicator) {
Set<TrackedEntityAttribute> attributes = new HashSet<>();
Matcher matcher = ProgramIndicator.ATTRIBUTE_PATTERN.matcher(indicator.getExpression());
while (matcher.find()) {
String at = matcher.group(1);
TrackedEntityAttribute attribute = MetaDataController.getTrackedEntityAttribute(at);
if (attribute != null) {
attributes.add(attribute);
}
}
return attributes;
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
/**
* @param programInstance can be null if event is not null in case single event without reg
* @param event can be null if programInstance is not null
* @param indicator
* @return
*/
private static Double getValue(Enrollment programInstance, Event event, ProgramIndicator indicator) {
StringBuffer buffer = new StringBuffer();
String expression = indicator.getExpression();
Matcher matcher = ProgramIndicator.EXPRESSION_PATTERN.matcher(expression);
int valueCount = 0;
int zeroPosValueCount = 0;
Event programStageInstance = null;
Map<String, DataValue> dataElementToDataValues = new HashMap<>();
while (matcher.find()) {
String key = matcher.group(1);
String uid = matcher.group(2);
if (ProgramIndicator.KEY_DATAELEMENT.equals(key)) {
String de = matcher.group(3);
String programStageUid = uid;
if (programStageUid != null && de != null) {
if (programInstance == null) { //in case single event without reg
if(programStageInstance == null) {
programStageInstance = event;
if (programStageInstance.getDataValues() != null) {
for (DataValue dataValue : programStageInstance.getDataValues()) {
dataElementToDataValues.put(dataValue.getDataElement(), dataValue);
}
}
}
} else {
if (programStageInstance == null || !programStageInstance.getUid().equals(programStageUid)) {
programStageInstance = TrackerController.getEvent(programInstance.getLocalId(), programStageUid);
dataElementToDataValues.clear();
if (programStageInstance.getDataValues() != null) {
for(DataValue dataValue: programStageInstance.getDataValues()) {
dataElementToDataValues.put(dataValue.getDataElement(), dataValue);
}
}
}
}
DataValue dataValue;
if (programStageInstance.getDataValues() == null) {
continue;
}
dataValue = dataElementToDataValues.get(de);
String value;
if (dataValue == null || dataValue.getValue() == null || dataValue.getValue().isEmpty()) {
value = NULL_REPLACEMENT;
} else {
value = dataValue.getValue();
valueCount++;
zeroPosValueCount = isZeroOrPositive(value) ? (zeroPosValueCount + 1) : zeroPosValueCount;
}
matcher.appendReplacement(buffer, value);
} else {
continue;
}
} else if (ProgramIndicator.KEY_ATTRIBUTE.equals(key)) {
if (programInstance != null) { //in case single event without reg
if (uid != null) {
TrackedEntityAttributeValue attributeValue = TrackerController.getTrackedEntityAttributeValue(
uid, programInstance.getLocalTrackedEntityInstanceId());
String value;
if (attributeValue == null || attributeValue.getValue() == null || attributeValue.getValue().isEmpty()) {
value = NULL_REPLACEMENT;
} else {
value = attributeValue.getValue();
valueCount++;
zeroPosValueCount = isZeroOrPositive(value) ? (zeroPosValueCount + 1) : zeroPosValueCount;
}
matcher.appendReplacement(buffer, value);
} else {
continue;
}
}
} else if (ProgramIndicator.KEY_CONSTANT.equals(key)) {
Constant constant = MetaDataController.getConstant(uid);
if (constant != null) {
matcher.appendReplacement(buffer, String.valueOf(constant.getValue()));
} else {
continue;
}
} else if (ProgramIndicator.KEY_PROGRAM_VARIABLE.equals(key)) {
if (programInstance != null) { //in case of single event without reg
Date currentDate = new Date();
Date date = null;
if (ProgramIndicator.ENROLLMENT_DATE.equals(uid)) {
date = DateUtils.getMediumDate(programInstance.getEnrollmentDate());
} else if (ProgramIndicator.INCIDENT_DATE.equals(uid)) {
date = DateUtils.getMediumDate(programInstance.getIncidentDate());
} else if (ProgramIndicator.CURRENT_DATE.equals(uid)) {
date = currentDate;
}
if (date != null) {
matcher.appendReplacement(buffer, DateUtils.daysBetween(currentDate, date) + "");
}
}
}
}
if(valueCount <= 0) {
//returning null in case there are now values in the expression.
return null;
}
expression = TextUtils.appendTail(matcher, buffer);
// ---------------------------------------------------------------------
// Value count variable
// ---------------------------------------------------------------------
buffer = new StringBuffer();
matcher = ProgramIndicator.VALUECOUNT_PATTERN.matcher(expression);
while (matcher.find()) {
String var = matcher.group(1);
if (ProgramIndicator.VAR_VALUE_COUNT.equals(var)) {
matcher.appendReplacement(buffer, String.valueOf(valueCount));
} else if (ProgramIndicator.VAR_ZERO_POS_VALUE_COUNT.equals(var)) {
matcher.appendReplacement(buffer, String.valueOf(zeroPosValueCount));
}
}
expression = TextUtils.appendTail(matcher, buffer);
Double value;
try {
value = ExpressionUtils.evaluateToDouble(expression, null);
} catch (JexlException e) {
e.printStackTrace();
value = new Double(0);
}
return value;
}
private static boolean isZeroOrPositive(String value) {
return MathUtils.isNumeric(value) && Double.valueOf(value) >= 0d;
}
}
| |
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.csp.sentinel.dashboard.controller;
import java.util.Date;
import java.util.List;
import com.alibaba.csp.sentinel.dashboard.auth.AuthAction;
import com.alibaba.csp.sentinel.dashboard.client.SentinelApiClient;
import com.alibaba.csp.sentinel.dashboard.discovery.MachineInfo;
import com.alibaba.csp.sentinel.dashboard.auth.AuthService.PrivilegeType;
import com.alibaba.csp.sentinel.dashboard.repository.rule.RuleRepository;
import com.alibaba.csp.sentinel.slots.block.RuleConstant;
import com.alibaba.csp.sentinel.slots.block.degrade.circuitbreaker.CircuitBreakerStrategy;
import com.alibaba.csp.sentinel.util.StringUtil;
import com.alibaba.csp.sentinel.dashboard.datasource.entity.rule.DegradeRuleEntity;
import com.alibaba.csp.sentinel.dashboard.domain.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* Controller regarding APIs of degrade rules. Refactored since 1.8.0.
*
* @author Carpenter Lee
* @author Eric Zhao
*/
@RestController
@RequestMapping("/degrade")
public class DegradeController {
private final Logger logger = LoggerFactory.getLogger(DegradeController.class);
@Autowired
private RuleRepository<DegradeRuleEntity, Long> repository;
@Autowired
private SentinelApiClient sentinelApiClient;
@GetMapping("/rules.json")
@AuthAction(PrivilegeType.READ_RULE)
public Result<List<DegradeRuleEntity>> apiQueryMachineRules(String app, String ip, Integer port) {
if (StringUtil.isEmpty(app)) {
return Result.ofFail(-1, "app can't be null or empty");
}
if (StringUtil.isEmpty(ip)) {
return Result.ofFail(-1, "ip can't be null or empty");
}
if (port == null) {
return Result.ofFail(-1, "port can't be null");
}
try {
List<DegradeRuleEntity> rules = sentinelApiClient.fetchDegradeRuleOfMachine(app, ip, port);
rules = repository.saveAll(rules);
return Result.ofSuccess(rules);
} catch (Throwable throwable) {
logger.error("queryApps error:", throwable);
return Result.ofThrowable(-1, throwable);
}
}
@PostMapping("/rule")
@AuthAction(PrivilegeType.WRITE_RULE)
public Result<DegradeRuleEntity> apiAddRule(@RequestBody DegradeRuleEntity entity) {
Result<DegradeRuleEntity> checkResult = checkEntityInternal(entity);
if (checkResult != null) {
return checkResult;
}
Date date = new Date();
entity.setGmtCreate(date);
entity.setGmtModified(date);
try {
entity = repository.save(entity);
} catch (Throwable t) {
logger.error("Failed to add new degrade rule, app={}, ip={}", entity.getApp(), entity.getIp(), t);
return Result.ofThrowable(-1, t);
}
if (!publishRules(entity.getApp(), entity.getIp(), entity.getPort())) {
logger.warn("Publish degrade rules failed, app={}", entity.getApp());
}
return Result.ofSuccess(entity);
}
@PutMapping("/rule/{id}")
@AuthAction(PrivilegeType.WRITE_RULE)
public Result<DegradeRuleEntity> apiUpdateRule(@PathVariable("id") Long id,
@RequestBody DegradeRuleEntity entity) {
if (id == null || id <= 0) {
return Result.ofFail(-1, "id can't be null or negative");
}
DegradeRuleEntity oldEntity = repository.findById(id);
if (oldEntity == null) {
return Result.ofFail(-1, "Degrade rule does not exist, id=" + id);
}
entity.setApp(oldEntity.getApp());
entity.setIp(oldEntity.getIp());
entity.setPort(oldEntity.getPort());
entity.setId(oldEntity.getId());
Result<DegradeRuleEntity> checkResult = checkEntityInternal(entity);
if (checkResult != null) {
return checkResult;
}
entity.setGmtCreate(oldEntity.getGmtCreate());
entity.setGmtModified(new Date());
try {
entity = repository.save(entity);
} catch (Throwable t) {
logger.error("Failed to save degrade rule, id={}, rule={}", id, entity, t);
return Result.ofThrowable(-1, t);
}
if (!publishRules(entity.getApp(), entity.getIp(), entity.getPort())) {
logger.warn("Publish degrade rules failed, app={}", entity.getApp());
}
return Result.ofSuccess(entity);
}
@DeleteMapping("/rule/{id}")
@AuthAction(PrivilegeType.DELETE_RULE)
public Result<Long> delete(@PathVariable("id") Long id) {
if (id == null) {
return Result.ofFail(-1, "id can't be null");
}
DegradeRuleEntity oldEntity = repository.findById(id);
if (oldEntity == null) {
return Result.ofSuccess(null);
}
try {
repository.delete(id);
} catch (Throwable throwable) {
logger.error("Failed to delete degrade rule, id={}", id, throwable);
return Result.ofThrowable(-1, throwable);
}
if (!publishRules(oldEntity.getApp(), oldEntity.getIp(), oldEntity.getPort())) {
logger.warn("Publish degrade rules failed, app={}", oldEntity.getApp());
}
return Result.ofSuccess(id);
}
private boolean publishRules(String app, String ip, Integer port) {
List<DegradeRuleEntity> rules = repository.findAllByMachine(MachineInfo.of(app, ip, port));
return sentinelApiClient.setDegradeRuleOfMachine(app, ip, port, rules);
}
private <R> Result<R> checkEntityInternal(DegradeRuleEntity entity) {
if (StringUtil.isBlank(entity.getApp())) {
return Result.ofFail(-1, "app can't be blank");
}
if (StringUtil.isBlank(entity.getIp())) {
return Result.ofFail(-1, "ip can't be null or empty");
}
if (entity.getPort() == null || entity.getPort() <= 0) {
return Result.ofFail(-1, "invalid port: " + entity.getPort());
}
if (StringUtil.isBlank(entity.getLimitApp())) {
return Result.ofFail(-1, "limitApp can't be null or empty");
}
if (StringUtil.isBlank(entity.getResource())) {
return Result.ofFail(-1, "resource can't be null or empty");
}
Double threshold = entity.getCount();
if (threshold == null || threshold < 0) {
return Result.ofFail(-1, "invalid threshold: " + threshold);
}
Integer recoveryTimeoutSec = entity.getTimeWindow();
if (recoveryTimeoutSec == null || recoveryTimeoutSec <= 0) {
return Result.ofFail(-1, "recoveryTimeout should be positive");
}
Integer strategy = entity.getGrade();
if (strategy == null) {
return Result.ofFail(-1, "circuit breaker strategy cannot be null");
}
if (strategy < CircuitBreakerStrategy.SLOW_REQUEST_RATIO.getType()
|| strategy > RuleConstant.DEGRADE_GRADE_EXCEPTION_COUNT) {
return Result.ofFail(-1, "Invalid circuit breaker strategy: " + strategy);
}
if (entity.getMinRequestAmount() == null || entity.getMinRequestAmount() <= 0) {
return Result.ofFail(-1, "Invalid minRequestAmount");
}
if (entity.getStatIntervalMs() == null || entity.getStatIntervalMs() <= 0) {
return Result.ofFail(-1, "Invalid statInterval");
}
if (strategy == RuleConstant.DEGRADE_GRADE_RT) {
Double slowRatio = entity.getSlowRatioThreshold();
if (slowRatio == null) {
return Result.ofFail(-1, "SlowRatioThreshold is required for slow request ratio strategy");
} else if (slowRatio < 0 || slowRatio > 1) {
return Result.ofFail(-1, "SlowRatioThreshold should be in range: [0.0, 1.0]");
}
} else if (strategy == RuleConstant.DEGRADE_GRADE_EXCEPTION_RATIO) {
if (threshold > 1) {
return Result.ofFail(-1, "Ratio threshold should be in range: [0.0, 1.0]");
}
}
return null;
}
}
| |
/**
* Copyright (c) 2007, Gaudenz Alder
*/
package com.mxgraph.model;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* Cells are the elements of the graph model. They represent the state
* of the groups, vertices and edges in a graph.
*
* <h4>Edge Labels</h4>
*
* Using the x- and y-coordinates of a cell's geometry it is
* possible to position the label on edges on a specific location
* on the actual edge shape as it appears on the screen. The
* x-coordinate of an edge's geometry is used to describe the
* distance from the center of the edge from -1 to 1 with 0
* being the center of the edge and the default value. The
* y-coordinate of an edge's geometry is used to describe
* the absolute, orthogonal distance in pixels from that
* point. In addition, the mxGeometry.offset is used
* as a absolute offset vector from the resulting point.
*
* The width and height of an edge geometry are ignored.
*
* To add more than one edge label, add a child vertex with
* a relative geometry. The x- and y-coordinates of that
* geometry will have the same semantiv as the above for
* edge labels.
*/
public class mxCell implements mxICell, Cloneable, Serializable
{
/**
*
*/
private static final long serialVersionUID = 910211337632342672L;
/**
* Holds the Id. Default is null.
*/
protected String id;
/**
* Holds the user object. Default is null.
*/
protected Object value;
/**
* Holds the geometry. Default is null.
*/
protected mxGeometry geometry;
/**
* Holds the style as a string of the form
* stylename[;key=value]. Default is null.
*/
protected String style;
/**
* Specifies whether the cell is a vertex or edge and whether it is
* connectable, visible and collapsed. Default values are false, false,
* true, true and false respectively.
*/
protected boolean vertex = false, edge = false, connectable = true,
visible = true, collapsed = false;
/**
* Reference to the parent cell and source and target terminals for edges.
*/
protected mxICell parent, source, target;
/**
* Holds the child cells and connected edges.
*/
protected List<Object> children, edges;
/**
* Constructs a new cell with an empty user object.
*/
public mxCell()
{
this(null);
}
/**
* Constructs a new cell for the given user object.
*
* @param value
* Object that represents the value of the cell.
*/
public mxCell(Object value)
{
this(value, null, null);
}
/**
* Constructs a new cell for the given parameters.
*
* @param value Object that represents the value of the cell.
* @param geometry Specifies the geometry of the cell.
* @param style Specifies the style as a formatted string.
*/
public mxCell(Object value, mxGeometry geometry, String style)
{
setValue(value);
setGeometry(geometry);
setStyle(style);
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getId()
*/
public String getId()
{
return id;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setId(String)
*/
public void setId(String id)
{
this.id = id;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getValue()
*/
public Object getValue()
{
return value;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setValue(Object)
*/
public void setValue(Object value)
{
this.value = value;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getGeometry()
*/
public mxGeometry getGeometry()
{
return geometry;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setGeometry(com.mxgraph.model.mxGeometry)
*/
public void setGeometry(mxGeometry geometry)
{
this.geometry = geometry;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getStyle()
*/
public String getStyle()
{
return style;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setStyle(String)
*/
public void setStyle(String style)
{
this.style = style;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#isVertex()
*/
public boolean isVertex()
{
return vertex;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setVertex(boolean)
*/
public void setVertex(boolean vertex)
{
this.vertex = vertex;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#isEdge()
*/
public boolean isEdge()
{
return edge;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setEdge(boolean)
*/
public void setEdge(boolean edge)
{
this.edge = edge;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#isConnectable()
*/
public boolean isConnectable()
{
return connectable;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setConnectable(boolean)
*/
public void setConnectable(boolean connectable)
{
this.connectable = connectable;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#isVisible()
*/
public boolean isVisible()
{
return visible;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setVisible(boolean)
*/
public void setVisible(boolean visible)
{
this.visible = visible;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#isCollapsed()
*/
public boolean isCollapsed()
{
return collapsed;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setCollapsed(boolean)
*/
public void setCollapsed(boolean collapsed)
{
this.collapsed = collapsed;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getParent()
*/
public mxICell getParent()
{
return parent;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setParent(com.mxgraph.model.mxICell)
*/
public void setParent(mxICell parent)
{
this.parent = parent;
}
/**
* Returns the source terminal.
*/
public mxICell getSource()
{
return source;
}
/**
* Sets the source terminal.
*
* @param source Cell that represents the new source terminal.
*/
public void setSource(mxICell source)
{
this.source = source;
}
/**
* Returns the target terminal.
*/
public mxICell getTarget()
{
return target;
}
/**
* Sets the target terminal.
*
* @param target Cell that represents the new target terminal.
*/
public void setTarget(mxICell target)
{
this.target = target;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getTerminal(boolean)
*/
public mxICell getTerminal(boolean source)
{
return (source) ? getSource() : getTarget();
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#setTerminal(com.mxgraph.model.mxICell, boolean)
*/
public mxICell setTerminal(mxICell terminal, boolean isSource)
{
if (isSource)
{
setSource(terminal);
}
else
{
setTarget(terminal);
}
return terminal;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getChildCount()
*/
public int getChildCount()
{
return (children != null) ? children.size() : 0;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getIndex(com.mxgraph.model.mxICell)
*/
public int getIndex(mxICell child)
{
return (children != null) ? children.indexOf(child) : -1;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getChildAt(int)
*/
public mxICell getChildAt(int index)
{
return (children != null) ? (mxICell) children.get(index) : null;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#insert(com.mxgraph.model.mxICell)
*/
public mxICell insert(mxICell child)
{
int index = getChildCount();
if (child.getParent() == this)
{
index--;
}
return insert(child, index);
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#insert(com.mxgraph.model.mxICell, int)
*/
public mxICell insert(mxICell child, int index)
{
if (child != null)
{
child.removeFromParent();
child.setParent(this);
if (children == null)
{
children = new ArrayList<Object>();
children.add(child);
}
else
{
children.add(index, child);
}
}
return child;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#remove(int)
*/
public mxICell remove(int index)
{
mxICell child = null;
if (children != null && index >= 0)
{
child = getChildAt(index);
remove(child);
}
return child;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#remove(com.mxgraph.model.mxICell)
*/
public mxICell remove(mxICell child)
{
if (child != null && children != null)
{
children.remove(child);
child.setParent(null);
}
return child;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#removeFromParent()
*/
public void removeFromParent()
{
if (parent != null)
{
parent.remove(this);
}
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getEdgeCount()
*/
public int getEdgeCount()
{
return (edges != null) ? edges.size() : 0;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getEdgeIndex(com.mxgraph.model.mxICell)
*/
public int getEdgeIndex(mxICell edge)
{
return (edges != null) ? edges.indexOf(edge) : -1;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#getEdgeAt(int)
*/
public mxICell getEdgeAt(int index)
{
return (edges != null) ? (mxICell) edges.get(index) : null;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#insertEdge(com.mxgraph.model.mxICell, boolean)
*/
public mxICell insertEdge(mxICell edge, boolean isOutgoing)
{
if (edge != null)
{
edge.removeFromTerminal(isOutgoing);
edge.setTerminal(this, isOutgoing);
if (edges == null || edge.getTerminal(!isOutgoing) != this
|| !edges.contains(edge))
{
if (edges == null)
{
edges = new ArrayList<Object>();
}
edges.add(edge);
}
}
return edge;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#removeEdge(com.mxgraph.model.mxICell, boolean)
*/
public mxICell removeEdge(mxICell edge, boolean isOutgoing)
{
if (edge != null)
{
if (edge.getTerminal(!isOutgoing) != this && edges != null)
{
edges.remove(edge);
}
edge.setTerminal(null, isOutgoing);
}
return edge;
}
/* (non-Javadoc)
* @see com.mxgraph.model.mxICell#removeFromTerminal(boolean)
*/
public void removeFromTerminal(boolean isSource)
{
mxICell terminal = getTerminal(isSource);
if (terminal != null)
{
terminal.removeEdge(this, isSource);
}
}
/**
* Returns the specified attribute from the user object if it is an XML
* node.
*
* @param name Name of the attribute whose value should be returned.
* @return Returns the value of the given attribute or null.
*/
public String getAttribute(String name)
{
return getAttribute(name, null);
}
/**
* Returns the specified attribute from the user object if it is an XML
* node.
*
* @param name Name of the attribute whose value should be returned.
* @param defaultValue Default value to use if the attribute has no value.
* @return Returns the value of the given attribute or defaultValue.
*/
public String getAttribute(String name, String defaultValue)
{
Object userObject = getValue();
String val = null;
if (userObject instanceof Element)
{
Element element = (Element) userObject;
val = element.getAttribute(name);
}
if (val == null)
{
val = defaultValue;
}
return val;
}
/**
* Sets the specified attribute on the user object if it is an XML node.
*
* @param name Name of the attribute whose value should be set.
* @param value New value of the attribute.
*/
public void setAttribute(String name, String value)
{
Object userObject = getValue();
if (userObject instanceof Element)
{
Element element = (Element) userObject;
element.setAttribute(name, value);
}
}
/**
* Returns a clone of the cell.
*/
public Object clone() throws CloneNotSupportedException
{
mxCell clone = (mxCell) super.clone();
clone.setValue(cloneValue());
clone.setStyle(getStyle());
clone.setCollapsed(isCollapsed());
clone.setConnectable(isConnectable());
clone.setEdge(isEdge());
clone.setVertex(isVertex());
clone.setVisible(isVisible());
clone.setParent(null);
clone.setSource(null);
clone.setTarget(null);
clone.children = null;
clone.edges = null;
mxGeometry geometry = getGeometry();
if (geometry != null)
{
clone.setGeometry((mxGeometry) geometry.clone());
}
return clone;
}
/**
* Returns a clone of the user object. This implementation clones any XML
* nodes or otherwise returns the same user object instance.
*/
protected Object cloneValue()
{
Object value = getValue();
if (value instanceof Node)
{
value = ((Node) value).cloneNode(true);
}
return value;
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder(64);
builder.append(getClass().getSimpleName());
builder.append(" [");
builder.append("id=");
builder.append(id);
builder.append(", value=");
builder.append(value);
builder.append(", geometry=");
builder.append(geometry);
builder.append("]");
return builder.toString();
}
}
| |
package chav1961.purelib.fsys.bridge;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.DirectoryStream;
import java.nio.file.FileSystemNotFoundException;
import java.nio.file.NotDirectoryException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.Set;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import chav1961.purelib.basic.Utils;
import chav1961.purelib.fsys.FileSystemFactory;
import chav1961.purelib.fsys.interfaces.FileSystemInterface;
import chav1961.purelib.fsys.interfaces.FileSystemInterfaceDescriptor;
import chav1961.purelib.testing.OrdinalTestCategory;
@Category(OrdinalTestCategory.class)
public class PureLibFileSystemProviderTest {
final File ioDir = new File(new File(System.getProperty("java.io.tmpdir")),"testDir");
@Before
public void prepare() throws IOException {
new File(ioDir,"fromDir").mkdirs();
try(final FileOutputStream fos = new FileOutputStream(new File(ioDir,"fromDir/fromFile.txt"))) {
fos.write("test string".getBytes());
fos.flush();
}
}
@After
public void unprepare() throws IOException {
Utils.deleteDir(ioDir);
}
@Test
public void basicAndSPITest() throws IOException {
final PureLibFileSystemProvider p = new PureLibFileSystemProvider();
final Set<URI> schemes = new HashSet<>();
final Set<URI> providers = new HashSet<>();
Assert.assertEquals(FileSystemInterface.FILESYSTEM_URI_SCHEME,p.getScheme());
for (FileSystemInterfaceDescriptor item : FileSystemFactory.getAvailableFileSystems()) {
providers.add(URI.create(item.getUriTemplate().getSchemeSpecificPart()));
}
Assert.assertTrue(!providers.isEmpty());
try(final PureLibFileSystem fs = (PureLibFileSystem) p.newFileSystem(URI.create("fsys:/"),Utils.mkMap())) {
for (Path item : fs.getRootDirectories()) {
schemes.add(item.toUri());
}
}
Assert.assertEquals(providers,schemes);
schemes.clear();
try(final PureLibFileSystem fs = (PureLibFileSystem) p.getFileSystem(URI.create("fsys:/"))) {
for (Path item : fs.getRootDirectories()) {
schemes.add(item.toUri());
}
}
Assert.assertEquals(providers,schemes);
try {p.newFileSystem((URI)null,Utils.mkMap());
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try {p.newFileSystem(URI.create("unknown:/"),Utils.mkMap());
Assert.fail("Mandatory exception was not detected (illegal 1-st argument scheme)");
} catch (IOException exc) {
}
try {p.newFileSystem(URI.create("fsys:/"),null);
Assert.fail("Mandatory exception was not detected (null 2-nd argument)");
} catch (NullPointerException exc) {
}
try {p.getFileSystem((URI)null);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try {p.getFileSystem(URI.create("unknown:/"));
Assert.fail("Mandatory exception was not detected (illegal 1-st argument scheme)");
} catch (FileSystemNotFoundException exc) {
}
}
@Test
public void viewsAndAttributesTest() throws IOException {
final PureLibFileSystemProvider plfsp = new PureLibFileSystemProvider();
}
@Test
public void pathAndActionsTest() throws IOException {
final PureLibFileSystemProvider plfsp = new PureLibFileSystemProvider();
try(final PureLibFileSystem fs = (PureLibFileSystem) plfsp.newFileSystem(URI.create("fsys:/"),Utils.mkMap())) {
final Path p = fs.getPath("file",ioDir.getAbsoluteFile().toURI().getSchemeSpecificPart());
final Set<String> dirNames = new HashSet<>();
Assert.assertTrue(p.isAbsolute());
Assert.assertEquals(p,plfsp.getPath(p.toUri()));
try{fs.getPath(null,ioDir.getAbsoluteFile().toURI().getSchemeSpecificPart());
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (IllegalArgumentException exc) {
}
try{fs.getPath("",ioDir.getAbsoluteFile().toURI().getSchemeSpecificPart());
Assert.fail("Mandatory exception was not detected (empty 1-st argument)");
} catch (IllegalArgumentException exc) {
}
try{fs.getPath("file",(String[])null);
Assert.fail("Mandatory exception was not detected (null 2-nd argument)");
} catch (NullPointerException exc) {
}
try{fs.getPath("file",(String)null);
Assert.fail("Mandatory exception was not detected (nulls inside 2-nd argument)");
} catch (IllegalArgumentException exc) {
}
try{plfsp.getPath((URI)null);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try{plfsp.getPath(URI.create("abcde"));
Assert.fail("Mandatory exception was not detected (1-st argument is not absolute URI)");
} catch (IllegalArgumentException exc) {
}
try{plfsp.getPath(URI.create("unknown:/abcde"));
Assert.fail("Mandatory exception was not detected (1-st argument refers to unknown file system)");
} catch (FileSystemNotFoundException exc) {
}
final Path pNew = fs.getPath("file",ioDir.getAbsoluteFile().toURI().getSchemeSpecificPart(),"toDir");
plfsp.createDirectory(pNew);
try{plfsp.createDirectory(null);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try{plfsp.createDirectory(Paths.get(URI.create("file:/c:/")));
Assert.fail("Mandatory exception was not detected (1-st argument is not a Pure Library path)");
} catch (IllegalArgumentException exc) {
}
try(DirectoryStream<Path> stream = plfsp.newDirectoryStream(p,PureLibFileSystemProvider.ALL_CONTENT)) {
for (Path item : stream) {
dirNames.add(item.getFileName().toUri().toString());
}
}
Assert.assertEquals(Set.of("fromDir","toDir"),dirNames);
try{plfsp.newDirectoryStream(null,PureLibFileSystemProvider.ALL_CONTENT);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try{plfsp.newDirectoryStream(Paths.get(URI.create("file:/c:/")),PureLibFileSystemProvider.ALL_CONTENT);
Assert.fail("Mandatory exception was not detected (1-st argument is not a Pure Library path)");
} catch (IllegalArgumentException exc) {
}
try{plfsp.newDirectoryStream(fs.getPath("file",ioDir.getAbsoluteFile().toURI().getSchemeSpecificPart(),"unknown"),PureLibFileSystemProvider.ALL_CONTENT);
Assert.fail("Mandatory exception was not detected (1-st argument refers to invalid object)");
} catch (NotDirectoryException exc) {
}
try{plfsp.newDirectoryStream(p,null);
Assert.fail("Mandatory exception was not detected (null 2-nd argument)");
} catch (NullPointerException exc) {
}
final Path pOld = fs.getPath("file",ioDir.getAbsoluteFile().toURI().getSchemeSpecificPart(),"fromDir");
plfsp.move(pOld,pNew);
try{plfsp.copy(null,pNew);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try{plfsp.copy(Paths.get(URI.create("file:/c:/")),pNew);
Assert.fail("Mandatory exception was not detected (1-st argument is not a Pure Library path)");
} catch (IllegalArgumentException exc) {
}
try{plfsp.copy(pOld,null);
Assert.fail("Mandatory exception was not detected (null 2-nd argument)");
} catch (NullPointerException exc) {
}
try{plfsp.copy(pOld,Paths.get(URI.create("file:/c:/")));
Assert.fail("Mandatory exception was not detected (2-nd argument is not a Pure Library path)");
} catch (IllegalArgumentException exc) {
}
try{plfsp.delete(null);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try{plfsp.delete(Paths.get(URI.create("file:/c:/")));
Assert.fail("Mandatory exception was not detected (1-st argument is not a Pure Library path)");
} catch (IllegalArgumentException exc) {
}
dirNames.clear();
try(DirectoryStream<Path> stream = plfsp.newDirectoryStream(p,PureLibFileSystemProvider.ALL_CONTENT)) {
for (Path item : stream) {
dirNames.add(item.getFileName().toUri().toString());
}
}
Assert.assertEquals(Set.of("toDir"),dirNames);
Assert.assertTrue(plfsp.isSameFile(pNew,pNew));
Assert.assertFalse(plfsp.isSameFile(pNew,pOld));
try{plfsp.isSameFile(null,pNew);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
try{plfsp.isSameFile(pNew,null);
Assert.fail("Mandatory exception was not detected (null 2-nd argument)");
} catch (NullPointerException exc) {
}
Assert.assertFalse(plfsp.isHidden(pNew));
try{plfsp.isHidden(null);
Assert.fail("Mandatory exception was not detected (null 1-st argument)");
} catch (NullPointerException exc) {
}
}
}
}
| |
/*
* Copyright 2014 SeaClouds
* Contact: SeaClouds
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.seaclouds.platform.dashboard.resources;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import eu.seaclouds.platform.dashboard.config.SlaFactory;
import eu.seaclouds.platform.dashboard.http.HttpGetRequestBuilder;
import eu.seaclouds.platform.dashboard.http.HttpPostRequestBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.net.URISyntaxException;
@Path("/sla")
@Produces(MediaType.APPLICATION_JSON)
public class SlaResource {
static Logger log = LoggerFactory.getLogger(SlaResource.class);
private final SlaFactory sla;
public SlaResource() {
this(new SlaFactory());
log.warn("Using default configuration for SlaResource");
}
public SlaResource(SlaFactory slaFactory) {
this.sla = slaFactory;
}
@POST
@Path("agreements")
public Response addAgreements(String json) {
JsonObject input = new JsonParser().parse(json).getAsJsonObject();
String rules = input.get("rules").getAsJsonPrimitive().getAsString();
String agreements = input.get("agreements").getAsJsonPrimitive().getAsString();
if (agreements != null && rules != null) {
try {
String slaResponse = new HttpPostRequestBuilder()
.multipartPostRequest(true)
.addParam("sla", agreements)
.addParam("rules", rules)
.host(sla.getEndpoint())
.path("/seaclouds/agreements")
.addHeader("Accept", "application/json")
.build();
// Change to JSON if necessary
// .addHeader("Content-Type", "application/json")
// .addHeader("Accept", "application/json")
// Notify the SLA when the rules are ready (Issue #56)
new HttpPostRequestBuilder()
.host(sla.getEndpoint())
.path("/seaclouds/commands/rulesready")
.addHeader("Accept", "application/json")
.build();
return Response.ok(slaResponse.toString()).build();
} catch (URISyntaxException | IOException e) {
log.error(e.getMessage());
return Response.status(Response.Status.NOT_FOUND).build();
}
} else {
return Response.status(Response.Status.BAD_REQUEST).build();
}
}
@GET
@Path("agreements")
public Response listAgreements(@QueryParam("provider") String provider, @QueryParam("status") String status) {
try {
String calculatedPath = "/agreements";
if (provider != null) {
calculatedPath += "?provider=" + provider;
}
if (provider == null && status != null) {
calculatedPath += "?";
} else if (provider != null && status != null) {
calculatedPath += "&";
}
if (status != null) {
calculatedPath += "status=" + status;
}
String slaResponse = new HttpGetRequestBuilder()
.host(sla.getEndpoint())
.path(calculatedPath)
.addHeader("Content-Type", "application/json")
.addHeader("Accept", "application/json")
.build();
return Response.ok(slaResponse.toString()).build();
} catch (IOException | URISyntaxException e) {
log.error(e.getMessage());
return Response.status(Response.Status.NOT_FOUND).build();
}
}
@GET
@Path("agreements/{id}")
public Response getAgreement(@PathParam("id") String id) {
if (id != null) {
try {
String slaResponse = new HttpGetRequestBuilder()
.host(sla.getEndpoint())
.path("/agreements/" + id)
.addHeader("Content-Type", "application/json")
.addHeader("Accept", "application/json")
.build();
return Response.ok(slaResponse).build();
} catch (IOException | URISyntaxException e) {
log.error(e.getMessage());
return Response.status(Response.Status.NOT_FOUND).build();
}
} else {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
@GET
@Path("agreements/{id}/status")
public Response getAgreementStatus(@PathParam("id") String id) {
if (id != null) {
try {
// Get guarantee status
String slaResponse = new HttpGetRequestBuilder()
.host(sla.getEndpoint())
.path("/agreements/" + id + "/guaranteestatus")
.addHeader("Content-Type", "application/json")
.addHeader("Accept", "application/json")
.build();
JsonObject agreementStatusJson = new JsonParser().parse(slaResponse).getAsJsonObject();
JsonArray terms = agreementStatusJson.getAsJsonArray("guaranteeterms");
for (JsonElement term : terms) {
String guaranteeTermName = term.getAsJsonObject().get("name").getAsString();
String guaranteeTermStatus = term.getAsJsonObject().get("status").getAsString();
if(guaranteeTermStatus.equals("VIOLATED")){
slaResponse = new HttpGetRequestBuilder()
.host(sla.getEndpoint())
.addParam("agreementId", id)
.addParam("guaranteeTerm", guaranteeTermName)
.path("/violations")
.addHeader("Content-Type", "application/json")
.addHeader("Accept", "application/json")
.build();
term.getAsJsonObject().add("violations", new JsonParser().parse(slaResponse));
}else{
term.getAsJsonObject().add("violations", new JsonArray());
}
}
return Response.ok(agreementStatusJson.toString()).build();
} catch (IOException | URISyntaxException e) {
log.error(e.getMessage());
return Response.status(Response.Status.NOT_FOUND).build();
}
} else {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
}
| |
/*
* CentVisualOptions.java
*
* Created on 7 gennaio 2008, 18.19
*/
package org.cytoscape.centiscape.internal.visualizer;
import java.lang.Double;
import java.util.Collections;
import java.util.Iterator;
import java.util.Observable;
import java.util.Observer;
import java.util.Vector;
import org.cytoscape.centiscape.internal.charts.CentPlotByNode;
import org.cytoscape.centiscape.internal.charts.CentScatterPlot;
import org.cytoscape.model.CyColumn;
import org.cytoscape.model.CyNetwork;
import org.cytoscape.model.CyNode;
import org.cytoscape.model.CyTable;
import org.cytoscape.view.model.CyNetworkView;
/**
*
* @author admini
*/
public class CentVisualOptions extends javax.swing.JPanel {
public class MyObservable extends Observable {
public void doIt() {
this.setChanged();
notifyObservers();
}
}
public MyObservable notifier;
public boolean isAnd;
public CyNetworkView currentview;
public CyNetwork currentnetwork;
public Vector<CompCyNode> al;
public Vector centralities;
/** Creates new form CentVisualOptions */
public CentVisualOptions(CyNetworkView currentview, Vector centralities) {
this.currentview = currentview;
this.currentnetwork = currentview.getModel();
this.centralities = centralities;
initComponents();
loadNodesList();
isAnd=true;
notifier=new MyObservable();
}
private class CompCyNode implements Comparable{
public CyNode node;
public String nodename;
public CompCyNode(CyNode n){
node=n;
nodename = currentnetwork.getDefaultNodeTable().getRow(node.getSUID()).get("name", String.class);
}
public int compareTo(Object o) {
CompCyNode o1=(CompCyNode) o;
// return(node.getSUID().compareTo(o1.node.getSUID()));
CyTable currentnodetable = currentnetwork.getDefaultNodeTable();
return(currentnodetable.getRow(node.getSUID()).get("name", String.class).compareTo(
currentnodetable.getRow(o1.node.getSUID()).get("name", String.class)));
}
}
//sort CyNodes and load the "Plot By Node" combobox
public void loadNodesList(){
plotByNodeSelector.removeAllItems();
//CyNetwork currentnetwork = currentview.getModel();
al=new Vector();
for (Iterator i=currentnetwork.getNodeList().listIterator();i.hasNext();){
CyNode el=(CyNode)i.next();
al.add(new CompCyNode(el));
}
Collections.sort(al);
for (int i =0;i<al.size(); i++) {
plotByNodeSelector.addItem(((CompCyNode)al.get(i)).nodename);
}
}
// populate centralities comboboxes
public void loadCentralities(Vector<Centrality> cents){
xCombo.removeAllItems();
yCombo.removeAllItems();
/* for (Iterator<Centrality> it = cents.iterator(); it.hasNext();) {
Centrality cent = (Centrality) it.next();
xCombo.addItem(cent);
} */
// add numerical attributes too
CyTable nodeTable = currentview.getModel().getDefaultNodeTable();
for (Iterator i = nodeTable.getColumns().iterator(); i.hasNext();) {
CyColumn currentcolumn = (CyColumn)i.next();
// System.out.println("la colonna current ?? "+ currentcolumn.getName());
// System.out.println("il primo tipo ?? "+ currentcolumn.getType());
// System.out.println("il secondo tipo ?? "+ Double.class );
if (currentcolumn.getType().equals(Double.class) ||
currentcolumn.getType().equals(Long.class) ||
currentcolumn.getType().equals(Integer.class) ) {
String currentattribute = currentcolumn.getName();
// System.out.println("la colonna current secondo ?? "+ currentcolumn.getName());
yCombo.addItem(currentattribute);
xCombo.addItem(currentattribute);
}
}
CyTable edgeTable = currentview.getModel().getDefaultEdgeTable();
for (Iterator i = edgeTable.getColumns().iterator(); i.hasNext();) {
CyColumn currentcolumn = (CyColumn)i.next();
// System.out.println("la colonna current ?? "+ currentcolumn.getName());
// System.out.println("il primo tipo ?? "+ currentcolumn.getType());
// System.out.println("il secondo tipo ?? "+ Double.class );
if (currentcolumn.getType().equals(Double.class) ||
currentcolumn.getType().equals(Long.class) ||
currentcolumn.getType().equals(Integer.class) ) {
String currentattribute = currentcolumn.getName();
// System.out.println("la colonna current secondo ?? "+ currentcolumn.getName());
yCombo.addItem(currentattribute);
xCombo.addItem(currentattribute);
}
}
// CyAttributes attrs = Cytoscape.getNodeAttributes();
/*
System.out.println("carico altri attributi");
String[] names=attrs.getAttributeNames();
for (int i = 0; i < names.length; i++) {
String string = names[i];
System.out.println("la stringa names ??"+names[i]);
if ((attrs.getType(string)==attrs.TYPE_FLOATING) || (attrs.getType(string)==attrs.TYPE_INTEGER) )
{
System.out.println("la stringa names ?? dentro"+names[i]);
yCombo.addItem(string);
xCombo.addItem(string);
}
}*/
}
public void addObserver(Observer obs){
notifier.addObserver(obs);
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
ANDORButtonGroup = new javax.swing.ButtonGroup();
andButton = new javax.swing.JRadioButton();
orButton = new javax.swing.JRadioButton();
jLabel1 = new javax.swing.JLabel();
plottByNodeButton = new javax.swing.JButton();
jLabel2 = new javax.swing.JLabel();
jSeparator1 = new javax.swing.JSeparator();
plotByNodeSelector = new javax.swing.JComboBox();
jSeparator2 = new javax.swing.JSeparator();
jLabel3 = new javax.swing.JLabel();
xCombo = new javax.swing.JComboBox();
yCombo = new javax.swing.JComboBox();
jLabel4 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
plotByCentrality = new javax.swing.JButton();
setBorder(javax.swing.BorderFactory.createTitledBorder("Options and views"));
setMaximumSize(new java.awt.Dimension(500, 270));
ANDORButtonGroup.add(andButton);
andButton.setSelected(true);
andButton.setText("AND");
andButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
andButtonActionPerformed(evt);
}
});
ANDORButtonGroup.add(orButton);
orButton.setText("OR");
orButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
orButtonActionPerformed(evt);
}
});
jLabel1.setFont(new java.awt.Font("Tahoma", 1, 11));
jLabel1.setText("plot by node");
plottByNodeButton.setText("plot");
plottByNodeButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
plottByNodeButtonActionPerformed(evt);
}
});
jLabel2.setText("filter type");
plotByNodeSelector.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
plotByNodeSelectorActionPerformed(evt);
}
});
jLabel3.setFont(new java.awt.Font("Tahoma", 1, 11));
jLabel3.setText("plot by centralities");
xCombo.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
xComboActionPerformed(evt);
}
});
jLabel4.setFont(new java.awt.Font("Tahoma", 1, 11));
jLabel4.setText("horizontal axis");
jLabel5.setFont(new java.awt.Font("Tahoma", 1, 11));
jLabel5.setText("vertical axis");
plotByCentrality.setText("plot");
plotByCentrality.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
plotByCentralityActionPerformed(evt);
}
});
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.add(jLabel2)
.add(29, 29, 29)
.add(andButton)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(orButton))
.add(layout.createSequentialGroup()
.add(jLabel1)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(plotByNodeSelector, 0, 153, Short.MAX_VALUE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(plottByNodeButton)
.add(4, 4, 4)))
.addContainerGap())
.add(jSeparator1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 328, Short.MAX_VALUE)
.add(jSeparator2, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 328, Short.MAX_VALUE)
.add(layout.createSequentialGroup()
.addContainerGap()
.add(jLabel3)
.addContainerGap(209, Short.MAX_VALUE))
.add(layout.createSequentialGroup()
.addContainerGap()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jLabel5)
.add(jLabel4))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)
.add(yCombo, 0, 201, Short.MAX_VALUE)
.add(xCombo, 0, 201, Short.MAX_VALUE))
.addContainerGap())
.add(layout.createSequentialGroup()
.addContainerGap()
.add(plotByCentrality, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 304, Short.MAX_VALUE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(orButton)
.add(jLabel2)
.add(andButton))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(jSeparator1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 7, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(jLabel1)
.add(plottByNodeButton)
.add(plotByNodeSelector, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(jSeparator2, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 10, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(jLabel3)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(jLabel4))
.add(layout.createSequentialGroup()
.add(7, 7, 7)
.add(xCombo, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(jLabel5)
.add(yCombo, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(plotByCentrality)
.addContainerGap(13, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
private void andButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_andButtonActionPerformed
// TODO add your handling code here:
isAnd=true;
notifier.doIt();
}//GEN-LAST:event_andButtonActionPerformed
private void orButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_orButtonActionPerformed
// TODO add your handling code here:
isAnd=false;
notifier.doIt();
}//GEN-LAST:event_orButtonActionPerformed
private void plottByNodeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_plottByNodeButtonActionPerformed
// TODO add your handling code here:
String nodename=(String)plotByNodeSelector.getSelectedItem();
CyNode c;
for (int i =0;i<al.size(); i++) {
if (((CompCyNode)al.get(i)).nodename.equals((String)plotByNodeSelector.getSelectedItem())) {
c = (CyNode)((CompCyNode)al.get(i)).node;
CentPlotByNode pbn=new CentPlotByNode(c,currentnetwork,centralities);
pbn.setSize(700,400);
pbn.setVisible(true);
return;
}
}
// CyNode c=(CyNode)plotByNodeSelector.getSelectedItem();
}//GEN-LAST:event_plottByNodeButtonActionPerformed
private void plotByCentralityActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_plotByCentralityActionPerformed
// TODO add your handling code here:
// CentScatterPlot scatter=new CentScatterPlot((Centrality)xCombo.getSelectedItem(),(String)yCombo.getSelectedItem());
CentScatterPlot scatter=new CentScatterPlot((String)xCombo.getSelectedItem(),(String)yCombo.getSelectedItem(),currentnetwork);
scatter.setSize(700,600);
scatter.setVisible(true);
}//GEN-LAST:event_plotByCentralityActionPerformed
private void plotByNodeSelectorActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_plotByNodeSelectorActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_plotByNodeSelectorActionPerformed
private void xComboActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_xComboActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_xComboActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.ButtonGroup ANDORButtonGroup;
private javax.swing.JRadioButton andButton;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JSeparator jSeparator2;
private javax.swing.JRadioButton orButton;
private javax.swing.JButton plotByCentrality;
private javax.swing.JComboBox plotByNodeSelector;
private javax.swing.JButton plottByNodeButton;
private javax.swing.JComboBox xCombo;
private javax.swing.JComboBox yCombo;
// End of variables declaration//GEN-END:variables
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jackrabbit.oak.segment;
import static com.google.common.collect.Lists.newArrayList;
import static org.apache.jackrabbit.oak.plugins.memory.MultiBinaryPropertyState.binaryPropertyFromBlob;
import static org.apache.jackrabbit.oak.segment.DefaultSegmentWriterBuilder.defaultSegmentWriterBuilder;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import static org.apache.jackrabbit.oak.segment.file.tar.GCGeneration.newGCGeneration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Random;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.GCNodeWriteMonitor;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.segment.file.cancel.Canceller;
import org.apache.jackrabbit.oak.segment.spi.persistence.Buffer;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class CompactorTest {
@Rule
public TemporaryFolder folder = new TemporaryFolder(new File("target"));
private FileStore fileStore;
private SegmentNodeStore nodeStore;
@Before
public void setup() throws IOException, InvalidFileStoreVersionException {
fileStore = fileStoreBuilder(folder.getRoot()).build();
nodeStore = SegmentNodeStoreBuilders.builder(fileStore).build();
}
@After
public void tearDown() {
fileStore.close();
}
@Test
public void testCompact() throws Exception {
Compactor compactor = createCompactor(fileStore, null);
addTestContent(nodeStore);
SegmentNodeState uncompacted = (SegmentNodeState) nodeStore.getRoot();
SegmentNodeState compacted = compactor.compact(uncompacted, Canceller.newCanceller());
assertNotNull(compacted);
assertFalse(uncompacted == compacted);
assertEquals(uncompacted, compacted);
assertEquals(uncompacted.getSegment().getGcGeneration().nextFull(), compacted.getSegment().getGcGeneration());
modifyTestContent(nodeStore);
NodeState modified = nodeStore.getRoot();
compacted = compactor.compact(uncompacted, modified, compacted, Canceller.newCanceller());
assertNotNull(compacted);
assertFalse(modified == compacted);
assertEquals(modified, compacted);
assertEquals(uncompacted.getSegment().getGcGeneration().nextFull(), compacted.getSegment().getGcGeneration());
}
@Test
public void testExceedUpdateLimit() throws Exception {
Compactor compactor = createCompactor(fileStore, null);
addNodes(nodeStore, Compactor.UPDATE_LIMIT * 2 + 1);
SegmentNodeState uncompacted = (SegmentNodeState) nodeStore.getRoot();
SegmentNodeState compacted = compactor.compact(uncompacted, Canceller.newCanceller());
assertNotNull(compacted);
assertFalse(uncompacted == compacted);
assertEquals(uncompacted, compacted);
assertEquals(uncompacted.getSegment().getGcGeneration().nextFull(), compacted.getSegment().getGcGeneration());
}
@Test
public void testCancel() throws IOException, CommitFailedException {
Compactor compactor = createCompactor(fileStore, null);
addTestContent(nodeStore);
NodeBuilder builder = nodeStore.getRoot().builder();
builder.setChildNode("cancel").setProperty("cancel", "cancel");
nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
assertNull(compactor.compact(nodeStore.getRoot(), Canceller.newCanceller().withCondition("reason", () -> true)));
}
@Test(expected = IOException.class)
public void testIOException() throws IOException, CommitFailedException {
Compactor compactor = createCompactor(fileStore, "IOException");
addTestContent(nodeStore);
compactor.compact(nodeStore.getRoot(), Canceller.newCanceller());
}
@NotNull
private static Compactor createCompactor(FileStore fileStore, String failOnName) {
SegmentWriter writer = defaultSegmentWriterBuilder("c")
.withGeneration(newGCGeneration(1, 1, true))
.build(fileStore);
if (failOnName != null) {
writer = new FailingSegmentWriter(writer, failOnName);
}
return new Compactor(fileStore.getReader(), writer, fileStore.getBlobStore(), GCNodeWriteMonitor.EMPTY);
}
private static void addNodes(SegmentNodeStore nodeStore, int count)
throws CommitFailedException {
NodeBuilder builder = nodeStore.getRoot().builder();
for (int k = 0; k < count; k++) {
builder.setChildNode("n-" + k);
}
nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
private static void addTestContent(NodeStore nodeStore) throws CommitFailedException, IOException {
NodeBuilder builder = nodeStore.getRoot().builder();
builder.setChildNode("a").setChildNode("aa").setProperty("p", 42);
builder.getChildNode("a").setChildNode("error").setChildNode("IOException");
builder.setChildNode("b").setProperty("bin", createBlob(nodeStore, 42));
builder.setChildNode("c").setProperty(binaryPropertyFromBlob("bins", createBlobs(nodeStore, 42, 43, 44)));
nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
private static void modifyTestContent(NodeStore nodeStore) throws CommitFailedException {
NodeBuilder builder = nodeStore.getRoot().builder();
builder.getChildNode("a").getChildNode("aa").remove();
builder.getChildNode("b").setProperty("bin", "changed");
builder.getChildNode("c").removeProperty("bins");
nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
}
private static Blob createBlob(NodeStore nodeStore, int size) throws IOException {
byte[] data = new byte[size];
new Random().nextBytes(data);
return nodeStore.createBlob(new ByteArrayInputStream(data));
}
private static List<Blob> createBlobs(NodeStore nodeStore, int... sizes) throws IOException {
List<Blob> blobs = newArrayList();
for (int size : sizes) {
blobs.add(createBlob(nodeStore, size));
}
return blobs;
}
private static class FailingSegmentWriter implements SegmentWriter {
@NotNull
private final SegmentWriter delegate;
@NotNull
private final String failOnName;
public FailingSegmentWriter(@NotNull SegmentWriter delegate, @NotNull String failOnName) {
this.delegate = delegate;
this.failOnName = failOnName;
}
@Override
public void flush() throws IOException {
delegate.flush();
}
@NotNull
@Override
public RecordId writeBlob(@NotNull Blob blob) throws IOException {
return delegate.writeBlob(blob);
}
@NotNull
@Override
public RecordId writeStream(@NotNull InputStream stream) throws IOException {
return delegate.writeStream(stream);
}
@NotNull
@Override
public RecordId writeNode(@NotNull NodeState state, @Nullable Buffer stableIdBytes) throws IOException {
if (state.hasChildNode(failOnName)) {
throw new IOException("Encountered node with name " + failOnName);
}
return delegate.writeNode(state, stableIdBytes);
}
}
}
| |
/*
* Copyright 2014 Ran Meng
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.totyumengr.minicubes.cluster;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
import org.hibernate.validator.constraints.NotBlank;
import org.roaringbitmap.RoaringBitmap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.util.ObjectUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* @author mengran
*
*/
@Controller
public class BootTimeSeriesMiniCubeController {
private static final Logger LOGGER = LoggerFactory.getLogger(BootTimeSeriesMiniCubeController.class);
private ObjectMapper objectMapper = new ObjectMapper();
public static final String OK = "ok";
@Autowired
private TimeSeriesMiniCubeManager manager;
@RequestMapping(value="/status", method=RequestMethod.GET)
public @ResponseBody Map<String, List<String>> status() {
Map<String, List<String>> status = new LinkedHashMap<>();
Collection<String> allCubeIds = manager.allCubeIds();
status.put("working", allCubeIds.stream().filter(e -> !e.startsWith("?")).collect(Collectors.toList()));
status.put("awaiting", allCubeIds.stream().filter(e -> e.startsWith("?")).collect(Collectors.toList()));
LOGGER.info("All miniCube size in cluster: {}, working/awaiting is {}/{}",
allCubeIds.size(), status.get("working").size(), status.get("awaiting").size());
return status;
}
@RequestMapping(value="/mode", method=RequestMethod.GET)
public @ResponseBody String mode(@NotBlank @RequestParam String timeSeries, @NotBlank @RequestParam boolean mode) {
manager.aggs(timeSeries).setMode(mode);
LOGGER.info("Success to set mode {} to {}", mode, timeSeries);
return Boolean.toString(mode);
}
@RequestMapping(value="/reassign", method=RequestMethod.POST)
public @ResponseBody String reassign(@NotBlank @RequestParam String cubeId,
@NotBlank @RequestParam String timeSeries) {
LOGGER.info("Try to assign cubeId{} to handle{} request.", cubeId, timeSeries);
String newCubeId = manager.reassignRole(cubeId, timeSeries);
LOGGER.info("Sucess to assign cubeId{} to handle{} request.", newCubeId, timeSeries);
return newCubeId;
}
@RequestMapping(value="/merge", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody String merge(@NotBlank @RequestParam int version,
@NotBlank @RequestParam String timeSeries) {
LOGGER.info("Try to merge data of {} to {}.", version, timeSeries);
int result = manager.merge(timeSeries, version);
LOGGER.info("Success for merge data of {} to {}, result is {}", version, timeSeries, result);
return OK;
}
@RequestMapping(value="/sum", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody BigDecimal sum(@NotBlank @RequestParam String indName,
@RequestParam(required=false) String filterDims,
@NotBlank @RequestParam String... timeSeries) throws Throwable {
LOGGER.info("Try to sum {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries), filterDims);
long timing = System.currentTimeMillis();
Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
: objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {});
BigDecimal sum = manager.aggs(timeSeries).sum(indName, filter);
LOGGER.info("Sucess to sum {} on {} result is {} using {}ms.", indName, timeSeries, sum, System.currentTimeMillis() - timing);
return sum;
}
@RequestMapping(value="/groupsum", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody Map<Integer, BigDecimal> groupsum(@NotBlank @RequestParam String indName,
@RequestParam(required=false) String filterDims,
@RequestParam String groupbyDim,
@NotBlank @RequestParam String... timeSeries) throws Throwable {
LOGGER.info("Try to sum {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries), filterDims);
long timing = System.currentTimeMillis();
Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
: objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {});
Map<Integer, BigDecimal> sum = manager.aggs(timeSeries).sum(indName, groupbyDim, filter);
LOGGER.info("Sucess to sum {} on {} result size is {} using {}ms.", indName, timeSeries, sum.size(), System.currentTimeMillis() - timing);
LOGGER.debug("Sucess to sum {} on {} result is {}.", indName, timeSeries, sum);
return sum;
}
@RequestMapping(value="/count", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody Long count(@NotBlank @RequestParam String indName,
@RequestParam(required=false) String filterDims,
@NotBlank @RequestParam String... timeSeries) throws Throwable {
LOGGER.info("Try to count {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries), filterDims);
long timing = System.currentTimeMillis();
Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
: objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {});
long count = manager.aggs(timeSeries).count(indName, filter);
LOGGER.info("Sucess to count {} on {} result is {} using {}ms.", indName, timeSeries, count, System.currentTimeMillis() - timing);
return count;
}
@RequestMapping(value="/groupcount", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody Map<Integer, Long> groupcount(@NotBlank @RequestParam String indName,
@RequestParam(required=false) String filterDims,
@RequestParam String groupbyDim,
@NotBlank @RequestParam String... timeSeries) throws Throwable {
LOGGER.info("Try to count {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries), filterDims);
long timing = System.currentTimeMillis();
Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
: objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {});
Map<Integer, Long> count = manager.aggs(timeSeries).count(indName, groupbyDim, filter);
LOGGER.info("Sucess to count {} on {} result size is {} using {}ms.", indName, timeSeries, count.size(), System.currentTimeMillis() - timing);
LOGGER.debug("Sucess to count {} on {} result is {}.", indName, timeSeries, count);
return count;
}
@RequestMapping(value="/distinct", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody Map<Integer, Set<Integer>> distinct(@NotBlank @RequestParam String indName,
@NotBlank @RequestParam(required=false) Boolean isDim,
@RequestParam(required=false) String filterDims,
@RequestParam String groupbyDim,
@NotBlank @RequestParam String... timeSeries) throws Throwable {
LOGGER.info("Try to distinct {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries), filterDims);
long timing = System.currentTimeMillis();
Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
: objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {});
Map<Integer, RoaringBitmap> distinct = manager.aggs(timeSeries).distinct(indName, isDim == null ? true : isDim, groupbyDim, filter);
LOGGER.info("Sucess to distinct {} on {} result size is {} using {}ms.", indName, timeSeries, distinct.size(), System.currentTimeMillis() - timing);
LOGGER.debug("Sucess to distinct {} on {} result is {}.", indName, timeSeries, distinct);
Map<Integer, Set<Integer>> result = new HashMap<Integer, Set<Integer>>();
distinct.forEach(new BiConsumer<Integer, RoaringBitmap>() {
@Override
public void accept(Integer t, RoaringBitmap u) {
result.put(t, Arrays.stream(u.toArray()).collect(HashSet<Integer> :: new, Set :: add, (l, r) -> {}));
}
});
return result;
}
@RequestMapping(value="/distinctcount", method={RequestMethod.POST, RequestMethod.GET})
public @ResponseBody Map<Integer, Integer> distinctCount(@NotBlank @RequestParam String indName,
@NotBlank @RequestParam(required=false) Boolean isDim,
@RequestParam(required=false) String filterDims,
@RequestParam String groupbyDim,
@NotBlank @RequestParam String... timeSeries) throws Throwable {
LOGGER.info("Try to distinct-count {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries), filterDims);
long timing = System.currentTimeMillis();
Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
: objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {});
Map<Integer, Integer> distinct = manager.aggs(timeSeries).discnt(indName, isDim == null ? true : isDim, groupbyDim, filter);
LOGGER.info("Sucess to distinct-count {} on {} result size is {} using {}ms.", indName, timeSeries, distinct.size(), System.currentTimeMillis() - timing);
LOGGER.debug("Sucess to distinct-count {} on {} result is {}.", indName, timeSeries, distinct);
return distinct;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.rejh.icerrr.itson;
import java.util.Calendar;
import org.apache.cordova.Config;
import org.apache.cordova.DroidGap;
import android.Manifest;
import android.app.Activity;
import android.app.KeyguardManager;
import android.app.KeyguardManager.KeyguardLock;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.PowerManager;
import android.provider.Settings;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AlertDialog;
import android.util.Log;
import android.view.KeyEvent;
import android.webkit.WebView;
import android.widget.Toast;
public class Icerrr extends DroidGap
{
final static String APPTAG = "Icerrr";
private final static int PERMISSION_REQ_READ_STORAGE = 1;
private final static int PERMISSION_REQ_WRITE_STORAGE = 2;
private SharedPreferences sett;
private long intentTime = -1;
private Handler keyguardHandler;
private Runnable keyguardRunnable;
private KeyguardManager keyguardManager;
private KeyguardLock lock;
private long timeKeyguardDisabled = 0;
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
// Preferences
sett = getSharedPreferences(APPTAG,Context.MODE_MULTI_PROCESS | 2);
// Clear cache
super.clearCache();
super.appView.getSettings().setAllowFileAccess(true);
// Set by <content src="index.html" /> in config.xml
super.setStringProperty("url", null);
super.setStringProperty("errorUrl", null);
super.loadUrl(Config.getStartUrl());
//super.loadUrl("file:///android_asset/www/index.html")
if(Build.VERSION.SDK_INT >= 19) {
WebView.setWebContentsDebuggingEnabled(true);
}
JavaScriptInterface jsInterface = new JavaScriptInterface(this);
// super.appView.getSettings().setJavaScriptEnabled(true);
super.appView.addJavascriptInterface(jsInterface, "JSInterface");
// Keyguard (lockscreen)
keyguardManager = (KeyguardManager) getSystemService(Activity.KEYGUARD_SERVICE);
lock = keyguardManager.newKeyguardLock(KEYGUARD_SERVICE);
// Kill App Receiver
IntentFilter filter = new IntentFilter();
filter.addAction("com.rejh.icerrr.itson.actions.KILL_APP");
registerReceiver(killAppReceiver, filter);
// Call onNewIntent when app is not started before..
onNewIntent(getIntent());
}
@Override
public void onStart() {
Log.d(APPTAG,APPTAG +".onStart()");
super.onStart();
}
@Override
public void onResume() {
Log.d(APPTAG,APPTAG +".onResume()");
super.onResume();
Intent incomingIntent = getIntent();
String functionCall = "setTimeout(function() { site.lifecycle.onNewIntent('" + incomingIntent.getDataString() + "',"+ intentTime +"); },1);";
Log.d(APPTAG," > "+ functionCall);
super.sendJavascript(functionCall);
}
@Override
public void onPause() {
Log.d(APPTAG,APPTAG+".onPause()");
super.onPause();
skiplock(false);
}
@Override
public void onNewIntent(Intent newIntent) {
Log.d(APPTAG,APPTAG +".onNewIntent()");
super.onNewIntent(newIntent);
// Store new intent
setIntent(newIntent);
// Store intentTime so the app doesn't respond to onResume firing the same intent..
Calendar calnow = Calendar.getInstance();
calnow.setTimeInMillis(System.currentTimeMillis());
intentTime = calnow.getTimeInMillis();
// Check if extra cmd==alarm, dismiss keyguard
if (newIntent.hasExtra("cmd")) {
if (newIntent.getStringExtra("cmd").equals("alarm")) {
Log.w(APPTAG," -> onNewIntent: Extra 'cmd' == 'alarm', dismiss keyguard || turn screen on :D");
//getWindow().addFlags(WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON); // doesn't dismiss keyguard at all
//getWindow().addFlags(WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD | WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON); // meh keeps keyguard dismissed for activity..
skiplock(true);
}
}
}
@Override
public void onDestroy() {
super.onDestroy();
unregisterReceiver(killAppReceiver);
}
// --------------------------------------
// Get permissions..
public boolean hasIcerrrPermissions() {
if (
ContextCompat.checkSelfPermission(this,
Manifest.permission.READ_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED
&&
ContextCompat.checkSelfPermission(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE)
== PackageManager.PERMISSION_GRANTED
) {
return true;
} else {
return false;
}
}
public boolean requestIcerrrPermissions() {
// Here, thisActivity is the current activity
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.READ_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
// We're not explaining this atm,
// TODO: do explain plz.
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},
PERMISSION_REQ_READ_STORAGE);
return false;
}
// Here, thisActivity is the current activity
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED) {
// We're not explaining this atm,
// TODO: do explain plz.
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
PERMISSION_REQ_WRITE_STORAGE);
return false;
}
// Here, thisActivity is the current activity
/*
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.READ_PHONE_STATE)
!= PackageManager.PERMISSION_GRANTED) {
// We're not explaining this atm,
// TODO: do explain plz.
ActivityCompat.requestPermissions(this,
new String[]{Manifest.permission.READ_PHONE_STATE},
PERMISSION_REQ_PHONE_STATE);
return false;
}
/**/
super.loadUrl(Config.getStartUrl());
return true;
}
@Override
public void onRequestPermissionsResult(int requestCode, String permissions[], int[] grantResults) {
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission was granted, yay!
requestIcerrrPermissions();
} else {
youidiot();
}
return;
}
private void youidiot() {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage("Sorry, Icerrr can not function without the requested permission. Press cancel to retry.");
builder.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
});
builder.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
// User cancelled the dialog
requestIcerrrPermissions();
}
});
AlertDialog dialog = builder.create();
dialog.show();
}
// --------------------------------------
// Keys
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
//If volume down key
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
super.loadUrl("javascript:cordova.fireDocumentEvent('volumedownbutton');");
return true;
} else if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
super.loadUrl("javascript:cordova.fireDocumentEvent('volumeupbutton');");
return true;
} else {
//return super.onKeyDown(keyCode, event);
}
//return super.onKeyDown(keyCode, event);
return true;
}
/**/
// Keyguard
private void skiplock(boolean action) {
skiplock(action,false);
}
private void skiplock(boolean action, boolean force) {
Log.d(APPTAG,APPTAG+".skiplock(): "+ action);
if (!sett.getBoolean("turnOnScreenForAlarms",true)) {
Log.d(APPTAG," -> Disabled, do nothing..");
return;
}
if (!force && System.currentTimeMillis() < timeKeyguardDisabled+1000) {
Log.d(APPTAG," -> !Force and timeKeyguardDisabled<1s ago, do nothing..");
return;
}
if (keyguardRunnable==null) {
keyguardRunnable = new Runnable() {
public void run() {
skiplock(false,true);
}
};
}
// DO IT
if (action == true) {
// Power up display
Log.d(APPTAG," -> Wakelock: turn on display, 30s");
PowerManager powerMgr = (PowerManager) getApplicationContext().getSystemService(Context.POWER_SERVICE);
PowerManager.WakeLock wakelock = powerMgr.newWakeLock((PowerManager.SCREEN_BRIGHT_WAKE_LOCK
| PowerManager.FULL_WAKE_LOCK
| PowerManager.ACQUIRE_CAUSES_WAKEUP), APPTAG);
wakelock.acquire(30000);
// Disable keyguard
lock.disableKeyguard();
//Toast.makeText(getApplicationContext(), "Lockscreen Disabled", Toast.LENGTH_SHORT).show(); // DEBUG // TODO
// Enable keyguard after xx seconds..
keyguardHandler = new Handler();
keyguardHandler.postDelayed(keyguardRunnable, 60000);
// Time..
timeKeyguardDisabled = System.currentTimeMillis();
}
//
else if (action==false) {
try { keyguardHandler.removeCallbacks(keyguardRunnable); } catch(Exception e) {}
lock.reenableKeyguard();
//Toast.makeText(getApplicationContext(), "Lockscreen Enabled", Toast.LENGTH_SHORT).show(); // DEBUG // TODO
}
/**/
}
// --------------------------------------
// Others
BroadcastReceiver killAppReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
finish();
}
};
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.metadata.TableLayout;
import com.facebook.presto.metadata.TableLayout.NodePartitioning;
import com.facebook.presto.spi.connector.ConnectorPartitioningHandle;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.MetadataDeleteNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanFragmentId;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.SimplePlanRewriter;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.COORDINATOR_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.optimizations.PlanNodeSearcher.searchFrom;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.REMOTE;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Predicates.in;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
/**
* Splits a logical plan into fragments that can be shipped and executed on distributed nodes
*/
public class PlanFragmenter
{
private PlanFragmenter()
{
}
public static SubPlan createSubPlans(Session session, Metadata metadata, Plan plan)
{
Fragmenter fragmenter = new Fragmenter(session, metadata, plan.getTypes());
FragmentProperties properties = new FragmentProperties(new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), plan.getRoot().getOutputSymbols()))
.setSingleNodeDistribution();
PlanNode root = SimplePlanRewriter.rewriteWith(fragmenter, plan.getRoot(), properties);
SubPlan result = fragmenter.buildRootFragment(root, properties);
checkState(result.getFragment().getPartitioning().isSingleNode(), "Root of PlanFragment is not single node");
result.sanityCheck();
return result;
}
private static class Fragmenter
extends SimplePlanRewriter<FragmentProperties>
{
private static final int ROOT_FRAGMENT_ID = 0;
private final Session session;
private final Metadata metadata;
private final Map<Symbol, Type> types;
private int nextFragmentId = ROOT_FRAGMENT_ID + 1;
public Fragmenter(Session session, Metadata metadata, Map<Symbol, Type> types)
{
this.session = requireNonNull(session, "session is null");
this.metadata = requireNonNull(metadata, "metadata is null");
this.types = ImmutableMap.copyOf(requireNonNull(types, "types is null"));
}
public SubPlan buildRootFragment(PlanNode root, FragmentProperties properties)
{
return buildFragment(root, properties, new PlanFragmentId(String.valueOf(ROOT_FRAGMENT_ID)));
}
private PlanFragmentId nextFragmentId()
{
return new PlanFragmentId(String.valueOf(nextFragmentId++));
}
private SubPlan buildFragment(PlanNode root, FragmentProperties properties, PlanFragmentId fragmentId)
{
Set<Symbol> dependencies = SymbolsExtractor.extractOutputSymbols(root);
List<PlanNodeId> schedulingOrder = findAllTableScanPlanNodeId(root);
boolean equals = properties.getPartitionedSources().equals(ImmutableSet.copyOf(schedulingOrder));
checkArgument(equals, "Expected scheduling order (%s) to contain an entry for all partitioned sources (%s)", schedulingOrder, properties.getPartitionedSources());
PlanFragment fragment = new PlanFragment(
fragmentId,
root,
Maps.filterKeys(types, in(dependencies)),
properties.getPartitioningHandle(),
schedulingOrder,
properties.getPartitioningScheme());
return new SubPlan(fragment, properties.getChildren());
}
private List<PlanNodeId> findAllTableScanPlanNodeId(PlanNode root)
{
return searchFrom(root)
.where(TableScanNode.class::isInstance)
.<TableScanNode>findAll()
.stream()
.map(TableScanNode::getId)
.collect(toImmutableList());
}
@Override
public PlanNode visitOutput(OutputNode node, RewriteContext<FragmentProperties> context)
{
context.get().setSingleNodeDistribution(); // TODO: add support for distributed output
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitExplainAnalyze(ExplainAnalyzeNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTableFinish(TableFinishNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitMetadataDelete(MetadataDeleteNode node, RewriteContext<FragmentProperties> context)
{
context.get().setCoordinatorOnlyDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTableScan(TableScanNode node, RewriteContext<FragmentProperties> context)
{
PartitioningHandle partitioning = node.getLayout()
.map(layout -> metadata.getLayout(session, layout))
.flatMap(TableLayout::getNodePartitioning)
.map(NodePartitioning::getPartitioningHandle)
.orElse(SOURCE_DISTRIBUTION);
context.get().addSourceDistribution(node.getId(), partitioning);
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitValues(ValuesNode node, RewriteContext<FragmentProperties> context)
{
context.get().setSingleNodeDistribution();
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitExchange(ExchangeNode exchange, RewriteContext<FragmentProperties> context)
{
if (exchange.getScope() != REMOTE) {
return context.defaultRewrite(exchange, context.get());
}
PartitioningScheme partitioningScheme = exchange.getPartitioningScheme();
if (exchange.getType() == ExchangeNode.Type.GATHER) {
context.get().setSingleNodeDistribution();
}
else if (exchange.getType() == ExchangeNode.Type.REPARTITION) {
context.get().setDistribution(partitioningScheme.getPartitioning().getHandle());
}
ImmutableList.Builder<SubPlan> builder = ImmutableList.builder();
for (int sourceIndex = 0; sourceIndex < exchange.getSources().size(); sourceIndex++) {
FragmentProperties childProperties = new FragmentProperties(partitioningScheme.translateOutputLayout(exchange.getInputs().get(sourceIndex)));
builder.add(buildSubPlan(exchange.getSources().get(sourceIndex), childProperties, context));
}
List<SubPlan> children = builder.build();
context.get().addChildren(children);
List<PlanFragmentId> childrenIds = children.stream()
.map(SubPlan::getFragment)
.map(PlanFragment::getId)
.collect(toImmutableList());
return new RemoteSourceNode(exchange.getId(), childrenIds, exchange.getOutputSymbols());
}
private SubPlan buildSubPlan(PlanNode node, FragmentProperties properties, RewriteContext<FragmentProperties> context)
{
PlanFragmentId planFragmentId = nextFragmentId();
PlanNode child = context.rewrite(node, properties);
return buildFragment(child, properties, planFragmentId);
}
}
private static class FragmentProperties
{
private final List<SubPlan> children = new ArrayList<>();
private final PartitioningScheme partitioningScheme;
private Optional<PartitioningHandle> partitioningHandle = Optional.empty();
private final Set<PlanNodeId> partitionedSources = new HashSet<>();
public FragmentProperties(PartitioningScheme partitioningScheme)
{
this.partitioningScheme = partitioningScheme;
}
public List<SubPlan> getChildren()
{
return children;
}
public FragmentProperties setSingleNodeDistribution()
{
if (partitioningHandle.isPresent() && partitioningHandle.get().isSingleNode()) {
// already single node distribution
return this;
}
checkState(!partitioningHandle.isPresent(),
"Cannot overwrite partitioning with %s (currently set to %s)",
SINGLE_DISTRIBUTION,
partitioningHandle);
partitioningHandle = Optional.of(SINGLE_DISTRIBUTION);
return this;
}
public FragmentProperties setDistribution(PartitioningHandle distribution)
{
if (partitioningHandle.isPresent()) {
chooseDistribution(distribution);
return this;
}
partitioningHandle = Optional.of(distribution);
return this;
}
private void chooseDistribution(PartitioningHandle distribution)
{
checkState(partitioningHandle.isPresent(), "No partitioning to choose from");
if (partitioningHandle.get().equals(distribution) ||
partitioningHandle.get().isSingleNode() ||
isCompatibleSystemPartitioning(distribution)) {
return;
}
if (partitioningHandle.get().equals(SOURCE_DISTRIBUTION)) {
partitioningHandle = Optional.of(distribution);
return;
}
throw new IllegalStateException(format(
"Cannot set distribution to %s. Already set to %s",
distribution,
partitioningHandle));
}
private boolean isCompatibleSystemPartitioning(PartitioningHandle distribution)
{
ConnectorPartitioningHandle currentHandle = partitioningHandle.get().getConnectorHandle();
ConnectorPartitioningHandle distributionHandle = distribution.getConnectorHandle();
if ((currentHandle instanceof SystemPartitioningHandle) &&
(distributionHandle instanceof SystemPartitioningHandle)) {
return ((SystemPartitioningHandle) currentHandle).getPartitioning() ==
((SystemPartitioningHandle) distributionHandle).getPartitioning();
}
return false;
}
public FragmentProperties setCoordinatorOnlyDistribution()
{
if (partitioningHandle.isPresent() && partitioningHandle.get().isCoordinatorOnly()) {
// already single node distribution
return this;
}
// only system SINGLE can be upgraded to COORDINATOR_ONLY
checkState(!partitioningHandle.isPresent() || partitioningHandle.get().equals(SINGLE_DISTRIBUTION),
"Cannot overwrite partitioning with %s (currently set to %s)",
COORDINATOR_DISTRIBUTION,
partitioningHandle);
partitioningHandle = Optional.of(COORDINATOR_DISTRIBUTION);
return this;
}
public FragmentProperties addSourceDistribution(PlanNodeId source, PartitioningHandle distribution)
{
requireNonNull(source, "source is null");
requireNonNull(distribution, "distribution is null");
partitionedSources.add(source);
if (partitioningHandle.isPresent()) {
PartitioningHandle currentPartitioning = partitioningHandle.get();
if (!currentPartitioning.equals(distribution)) {
// If already system SINGLE or COORDINATOR_ONLY, leave it as is (this is for single-node execution)
checkState(
currentPartitioning.equals(SINGLE_DISTRIBUTION) || currentPartitioning.equals(COORDINATOR_DISTRIBUTION),
"Cannot overwrite distribution with %s (currently set to %s)",
distribution,
currentPartitioning);
return this;
}
}
partitioningHandle = Optional.of(distribution);
return this;
}
public FragmentProperties addChildren(List<SubPlan> children)
{
this.children.addAll(children);
return this;
}
public PartitioningScheme getPartitioningScheme()
{
return partitioningScheme;
}
public PartitioningHandle getPartitioningHandle()
{
return partitioningHandle.get();
}
public Set<PlanNodeId> getPartitionedSources()
{
return partitionedSources;
}
}
}
| |
package org.zstack.storage.primary.nfs;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.compute.vm.VmExpungeRootVolumeValidator;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.cloudbus.EventCallback;
import org.zstack.core.cloudbus.EventFacade;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.config.GlobalConfigException;
import org.zstack.core.config.GlobalConfigValidatorExtensionPoint;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.Q;
import org.zstack.core.db.SQL;
import org.zstack.core.db.SQLBatch;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.header.Component;
import org.zstack.header.cluster.ClusterUpdateOSExtensionPoint;
import org.zstack.header.cluster.ClusterVO;
import org.zstack.header.core.workflow.Flow;
import org.zstack.header.core.workflow.FlowRollback;
import org.zstack.header.core.workflow.FlowTrigger;
import org.zstack.header.core.workflow.NoRollbackFlow;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.host.*;
import org.zstack.header.image.ImageConstant;
import org.zstack.header.image.ImageInventory;
import org.zstack.header.message.MessageReply;
import org.zstack.header.rest.RESTFacade;
import org.zstack.header.storage.backup.*;
import org.zstack.header.storage.primary.*;
import org.zstack.header.storage.snapshot.CreateTemplateFromVolumeSnapshotExtensionPoint;
import org.zstack.header.storage.snapshot.VolumeSnapshotInventory;
import org.zstack.header.vm.VmInstanceState;
import org.zstack.header.vm.VmInstanceVO;
import org.zstack.header.vm.VmInstanceVO_;
import org.zstack.header.volume.*;
import org.zstack.kvm.KVMConstant;
import org.zstack.storage.primary.ChangePrimaryStorageStatusMsg;
import org.zstack.storage.primary.PrimaryStorageCapacityUpdater;
import org.zstack.storage.primary.PrimaryStorageSystemTags;
import org.zstack.storage.primary.nfs.NfsPrimaryStorageKVMBackendCommands.NfsPrimaryStorageAgentResponse;
import org.zstack.storage.snapshot.PostMarkRootVolumeAsSnapshotExtension;
import org.zstack.tag.SystemTagCreator;
import org.zstack.tag.TagManager;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.path.PathUtil;
import static org.zstack.core.Platform.*;
import javax.persistence.TypedQuery;
import java.util.*;
import java.util.concurrent.Callable;
import static org.zstack.utils.CollectionDSL.*;
public class NfsPrimaryStorageFactory implements NfsPrimaryStorageManager, PrimaryStorageFactory, Component, CreateTemplateFromVolumeSnapshotExtensionPoint, RecalculatePrimaryStorageCapacityExtensionPoint,
PrimaryStorageDetachExtensionPoint, PrimaryStorageAttachExtensionPoint, HostDeleteExtensionPoint, PostMarkRootVolumeAsSnapshotExtension, ClusterUpdateOSExtensionPoint, AfterInstantiateVolumeExtensionPoint {
private static CLogger logger = Utils.getLogger(NfsPrimaryStorageFactory.class);
@Autowired
private DatabaseFacade dbf;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private TagManager tagMgr;
@Autowired
private PrimaryStorageManager psMgr;
@Autowired
private ErrorFacade errf;
@Autowired
private CloudBus bus;
@Autowired
private RESTFacade restf;
@Autowired
protected EventFacade evtf;
private Map<String, NfsPrimaryStorageBackend> backends = new HashMap<String, NfsPrimaryStorageBackend>();
private Map<String, Map<String, NfsPrimaryToBackupStorageMediator>> mediators =
new HashMap<>();
private static final PrimaryStorageType type = new PrimaryStorageType(NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE);
static {
type.setSupportHeartbeatFile(true);
type.setOrder(899);
}
@VmExpungeRootVolumeValidator.VmExpungeRootVolumeValidatorMethod
static void vmExpungeRootVolumeValidator(String vmUuid, String volumeUuid) {
new SQLBatch() {
@Override
protected void scripts() {
String psUuid = q(VolumeVO.class).select(VolumeVO_.primaryStorageUuid).eq(VolumeVO_.uuid, volumeUuid)
.findValue();
if (psUuid == null) {
return;
}
if (!q(PrimaryStorageVO.class).eq(PrimaryStorageVO_.uuid, psUuid)
.eq(PrimaryStorageVO_.type, NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE)
.isExists()) {
// not NFS
return;
}
if (!q(PrimaryStorageClusterRefVO.class).eq(PrimaryStorageClusterRefVO_.primaryStorageUuid, psUuid).isExists()) {
throw new OperationFailureException(operr("the NFS primary storage[uuid:%s] is not attached" +
" to any clusters, and cannot expunge the root volume[uuid:%s] of the VM[uuid:%s]", psUuid, vmUuid, volumeUuid));
}
}
}.execute();
}
@Override
public PrimaryStorageType getPrimaryStorageType() {
return type;
}
@Override
public PrimaryStorageInventory createPrimaryStorage(PrimaryStorageVO vo, APIAddPrimaryStorageMsg msg) {
String mountPathBase = NfsPrimaryStorageGlobalConfig.MOUNT_BASE.value(String.class);
if (mountPathBase == null) {
mountPathBase = NfsPrimaryStorageConstant.DEFAULT_NFS_MOUNT_PATH_ON_HOST;
}
String mountPath = PathUtil.join(mountPathBase, "prim-" + vo.getUuid());
vo.setMountPath(mountPath);
vo = dbf.persistAndRefresh(vo);
SystemTagCreator creator = PrimaryStorageSystemTags.CAPABILITY_HYPERVISOR_SNAPSHOT.newSystemTagCreator(vo.getUuid());
creator.setTagByTokens(map(
e(PrimaryStorageSystemTags.CAPABILITY_HYPERVISOR_SNAPSHOT_TOKEN, KVMConstant.KVM_HYPERVISOR_TYPE)
));
creator.create();
return PrimaryStorageInventory.valueOf(vo);
}
@Override
public PrimaryStorage getPrimaryStorage(PrimaryStorageVO vo) {
return new NfsPrimaryStorage(vo);
}
@Override
public PrimaryStorageInventory getInventory(String uuid) {
PrimaryStorageVO vo = dbf.findByUuid(uuid, PrimaryStorageVO.class);
return PrimaryStorageInventory.valueOf(vo);
}
private void populateExtensions() {
for (NfsPrimaryStorageBackend extp : pluginRgty.getExtensionList(NfsPrimaryStorageBackend.class)) {
NfsPrimaryStorageBackend old = backends.get(extp.getHypervisorType().toString());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate NfsPrimaryStorageBackend[%s, %s] for type[%s]",
extp.getClass().getName(), old.getClass().getName(), old.getHypervisorType()));
}
backends.put(extp.getHypervisorType().toString(), extp);
}
for (NfsPrimaryToBackupStorageMediator extp : pluginRgty.getExtensionList(NfsPrimaryToBackupStorageMediator.class)) {
if (extp.getSupportedPrimaryStorageType().equals(type.toString())) {
Map<String, NfsPrimaryToBackupStorageMediator> map = mediators.get(extp.getSupportedBackupStorageType());
if (map == null) {
map = new HashMap<>(1);
}
for (String hvType : extp.getSupportedHypervisorTypes()) {
map.put(hvType, extp);
}
mediators.put(extp.getSupportedBackupStorageType(), map);
}
}
}
public NfsPrimaryToBackupStorageMediator getPrimaryToBackupStorageMediator(BackupStorageType bsType, HypervisorType hvType) {
Map<String, NfsPrimaryToBackupStorageMediator> mediatorMap = mediators.get(bsType.toString());
if (mediatorMap == null) {
throw new CloudRuntimeException(
String.format("primary storage[type:%s] wont have mediator supporting backup storage[type:%s]", type, bsType));
}
NfsPrimaryToBackupStorageMediator mediator = mediatorMap.get(hvType.toString());
if (mediator == null) {
throw new CloudRuntimeException(
String.format("PrimaryToBackupStorageMediator[primary storage type: %s, backup storage type: %s] doesn't have backend supporting hypervisor type[%s]", type, bsType, hvType));
}
return mediator;
}
@Override
public boolean start() {
populateExtensions();
setupCanonicalEvents();
NfsPrimaryStorageGlobalConfig.MOUNT_BASE.installValidateExtension(new GlobalConfigValidatorExtensionPoint() {
@Override
public void validateGlobalConfig(String category, String name, String oldValue, String value) throws GlobalConfigException {
if (!value.startsWith("/")) {
throw new GlobalConfigException(String.format("%s must be an absolute path starting with '/'", NfsPrimaryStorageGlobalConfig.MOUNT_BASE.getCanonicalName()));
}
}
});
return true;
}
private void setupCanonicalEvents(){
evtf.on(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_HOST_STATUS_CHANGED_PATH, new EventCallback() {
@Override
protected void run(Map tokens, Object data) {
PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData d =
(PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData)data;
PrimaryStorageStatus nfsStatus = Q.New(PrimaryStorageVO.class)
.eq(PrimaryStorageVO_.uuid, d.getPrimaryStorageUuid())
.eq(PrimaryStorageVO_.type, NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE)
.select(PrimaryStorageVO_.status)
.findValue();
boolean recoverConnection = d.getNewStatus() == PrimaryStorageHostStatus.Connected &&
d.getOldStatus() != PrimaryStorageHostStatus.Connected;
if (nfsStatus == null || !recoverConnection) {
return;
}
logger.debug(String.format("NFS[uuid:%s] recover connection to host[uuid:%s]", d.getPrimaryStorageUuid(), d.getHostUuid()));
if (nfsStatus != PrimaryStorageStatus.Connected) {
// use sync call here to make sure the NFS primary storage connected before continue to the next step
ChangePrimaryStorageStatusMsg cmsg = new ChangePrimaryStorageStatusMsg();
cmsg.setPrimaryStorageUuid(d.getPrimaryStorageUuid());
cmsg.setStatus(PrimaryStorageStatus.Connected.toString());
bus.makeTargetServiceIdByResourceUuid(cmsg, PrimaryStorageConstant.SERVICE_ID, d.getPrimaryStorageUuid());
bus.call(cmsg);
logger.debug(String.format("connect nfs[uuid:%s] completed", d.getPrimaryStorageUuid()));
}
recalculateCapacity(d.getPrimaryStorageUuid());
}
});
}
@Override
public boolean stop() {
return true;
}
public NfsPrimaryStorageBackend getHypervisorBackend(HypervisorType hvType) {
NfsPrimaryStorageBackend backend = backends.get(hvType.toString());
if (backend == null) {
throw new CloudRuntimeException(String.format("Cannot find hypervisor backend for nfs primary storage supporting hypervisor type[%s]", hvType));
}
return backend;
}
public List<HostInventory> getConnectedHostForPing(PrimaryStorageInventory pri) {
if (pri.getAttachedClusterUuids().isEmpty()) {
throw new OperationFailureException(operr("cannot find a Connected host to execute command for nfs primary storage[uuid:%s]", pri.getUuid()));
}
String sql = "select h from HostVO h " +
"where h.status = :connectionState and h.clusterUuid in (:clusterUuids)";
TypedQuery<HostVO> q = dbf.getEntityManager().createQuery(sql, HostVO.class);
q.setParameter("connectionState", HostStatus.Connected);
q.setParameter("clusterUuids", pri.getAttachedClusterUuids());
List<HostVO> ret = q.getResultList();
if (ret.isEmpty()) {
throw new OperationFailureException(
operr("cannot find a connected host in cluster which ps [uuid: %s] attached", pri.getUuid()));
} else {
Collections.shuffle(ret);
return HostInventory.valueOf(ret);
}
}
public List<HostInventory> getConnectedHostForOperation(PrimaryStorageInventory pri) {
if (pri.getAttachedClusterUuids().isEmpty()) {
throw new OperationFailureException(operr("cannot find a Connected host to execute command for nfs primary storage[uuid:%s]", pri.getUuid()));
}
//we need to filter out the non-enabled host in case of host maintained but kvmagent downed
String sql = "select h from HostVO h " +
"where h.status = :connectionState and h.state = :state " +
"and h.clusterUuid in (:clusterUuids) " +
"and h.uuid not in (select ref.hostUuid from PrimaryStorageHostRefVO ref " +
"where ref.primaryStorageUuid = :psUuid and ref.hostUuid = h.uuid and ref.status = :status)";
TypedQuery<HostVO> q = dbf.getEntityManager().createQuery(sql, HostVO.class);
q.setParameter("connectionState", HostStatus.Connected);
q.setParameter("state", HostState.Enabled);
q.setParameter("clusterUuids", pri.getAttachedClusterUuids());
q.setParameter("psUuid", pri.getUuid());
q.setParameter("status", PrimaryStorageHostStatus.Disconnected);
List<HostVO> ret = q.getResultList();
if (ret.isEmpty()) {
throw new OperationFailureException(
operr("cannot find a host which has Connected host-NFS connection to execute command " +
"for nfs primary storage[uuid:%s]", pri.getUuid()));
} else {
Collections.shuffle(ret);
return HostInventory.valueOf(ret);
}
}
public final void updateNfsHostStatus(String psUuid, String huuid, PrimaryStorageHostStatus newStatus) {
updateNfsHostStatus(psUuid, huuid, newStatus, null);
}
public final void updateNfsHostStatus(String psUuid, String huuid, PrimaryStorageHostStatus newStatus, Runnable runIfUpdated){
PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData data =
new PrimaryStorageCanonicalEvent.PrimaryStorageHostStatusChangeData();
new SQLBatch(){
@Override
protected void scripts() {
PrimaryStorageHostStatus oldStatus = Q.New(PrimaryStorageHostRefVO.class)
.eq(PrimaryStorageHostRefVO_.hostUuid, huuid)
.eq(PrimaryStorageHostRefVO_.primaryStorageUuid, psUuid)
.select(PrimaryStorageHostRefVO_.status)
.findValue();
if (oldStatus == newStatus) {
return;
}
if (oldStatus == null) {
PrimaryStorageHostRefVO ref = new PrimaryStorageHostRefVO();
ref.setPrimaryStorageUuid(psUuid);
ref.setHostUuid(huuid);
ref.setStatus(newStatus);
persist(ref);
} else if(newStatus != oldStatus) {
sql(PrimaryStorageHostRefVO.class)
.eq(PrimaryStorageHostRefVO_.primaryStorageUuid, psUuid)
.eq(PrimaryStorageHostRefVO_.hostUuid, huuid)
.set(PrimaryStorageHostRefVO_.status, newStatus)
.update();
}
logger.debug(String.format(
"change status between primary storage[uuid:%s] and host[uuid:%s] from %s to %s in db",
psUuid, huuid, oldStatus == null ? "unknown" : oldStatus.toString(), newStatus.toString()));
data.setHostUuid(huuid);
data.setPrimaryStorageUuid(psUuid);
data.setNewStatus(newStatus);
data.setOldStatus(oldStatus);
}
}.execute();
if (data.getHostUuid() != null){
evtf.fire(PrimaryStorageCanonicalEvent.PRIMARY_STORAGE_HOST_STATUS_CHANGED_PATH, data);
Optional.ofNullable(runIfUpdated).ifPresent(Runnable::run);
}
}
@Override
public void reportCapacityIfNeeded(String psUuid, NfsPrimaryStorageAgentResponse rsp) {
if (rsp.getAvailableCapacity() != null && rsp.getTotalCapacity() != null) {
new PrimaryStorageCapacityUpdater(psUuid).updateAvailablePhysicalCapacity(rsp.getAvailableCapacity());
}
}
@Override
public HypervisorType findHypervisorTypeByImageFormatAndPrimaryStorageUuid(String imageFormat, final String psUuid) {
HypervisorType hvType = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(imageFormat);
if (hvType != null) {
return hvType;
}
String type = new Callable<String>() {
@Override
@Transactional(readOnly = true)
public String call() {
String sql = "select c.hypervisorType" +
" from ClusterVO c, PrimaryStorageClusterRefVO ref" +
" where c.uuid = ref.clusterUuid" +
" and ref.primaryStorageUuid = :psUuid";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("psUuid", psUuid);
List<String> types = q.getResultList();
return types.isEmpty() ? null : types.get(0);
}
}.call();
if (type != null) {
return HypervisorType.valueOf(type);
}
throw new OperationFailureException(operr("cannot find proper hypervisorType for primary storage[uuid:%s] to handle image format or volume format[%s]", psUuid, imageFormat));
}
@Override
public WorkflowTemplate createTemplateFromVolumeSnapshot(final ParamIn paramIn) {
WorkflowTemplate template = new WorkflowTemplate();
final HypervisorType hvtype = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(paramIn.getSnapshot().getFormat());
class Context {
String tempInstallPath;
}
final Context ctx = new Context();
template.setCreateTemporaryTemplate(new Flow() {
String __name__ = "create-temporary-template";
@Override
public void run(final FlowTrigger trigger, Map data) {
final ParamOut out = (ParamOut) data.get(ParamOut.class);
CreateTemporaryVolumeFromSnapshotMsg msg = new CreateTemporaryVolumeFromSnapshotMsg();
msg.setPrimaryStorageUuid(paramIn.getPrimaryStorageUuid());
msg.setSnapshot(paramIn.getSnapshot());
msg.setTemporaryVolumeUuid(paramIn.getImage().getUuid());
msg.setHypervisorType(hvtype.toString());
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, paramIn.getPrimaryStorageUuid());
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
} else {
CreateTemporaryVolumeFromSnapshotReply r = reply.castReply();
ctx.tempInstallPath = r.getInstallPath();
out.setActualSize(r.getActualSize());
out.setSize(r.getSize());
trigger.next();
}
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (ctx.tempInstallPath != null) {
DeleteVolumeBitsOnPrimaryStorageMsg msg = new DeleteVolumeBitsOnPrimaryStorageMsg();
msg.setPrimaryStorageUuid(paramIn.getPrimaryStorageUuid());
msg.setInstallPath(ctx.tempInstallPath);
msg.setHypervisorType(hvtype.toString());
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, paramIn.getPrimaryStorageUuid());
bus.send(msg);
}
trigger.rollback();
}
});
template.setUploadToBackupStorage(new Flow() {
String __name__ = "upload-to-backup-storage";
@Override
public void run(final FlowTrigger trigger, Map data) {
final ParamOut out = (ParamOut) data.get(ParamOut.class);
BackupStorageAskInstallPathMsg ask = new BackupStorageAskInstallPathMsg();
ask.setImageUuid(paramIn.getImage().getUuid());
ask.setBackupStorageUuid(paramIn.getBackupStorageUuid());
ask.setImageMediaType(paramIn.getImage().getMediaType());
bus.makeTargetServiceIdByResourceUuid(ask, BackupStorageConstant.SERVICE_ID, paramIn.getBackupStorageUuid());
MessageReply ar = bus.call(ask);
if (!ar.isSuccess()) {
trigger.fail(ar.getError());
return;
}
String bsInstallPath = ((BackupStorageAskInstallPathReply) ar).getInstallPath();
UploadBitsToBackupStorageMsg msg = new UploadBitsToBackupStorageMsg();
msg.setHypervisorType(hvtype.toString());
msg.setPrimaryStorageUuid(paramIn.getPrimaryStorageUuid());
msg.setPrimaryStorageInstallPath(ctx.tempInstallPath);
msg.setBackupStorageUuid(paramIn.getBackupStorageUuid());
msg.setBackupStorageInstallPath(bsInstallPath);
msg.setImageUuid(paramIn.getImage().getUuid());
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, paramIn.getPrimaryStorageUuid());
bus.send(msg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
} else {
UploadBitsToBackupStorageReply r = reply.castReply();
out.setBackupStorageInstallPath(r.getBackupStorageInstallPath());
trigger.next();
}
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
final ParamOut out = (ParamOut) data.get(ParamOut.class);
if (out.getBackupStorageInstallPath() != null) {
DeleteBitsOnBackupStorageMsg msg = new DeleteBitsOnBackupStorageMsg();
msg.setInstallPath(out.getBackupStorageInstallPath());
msg.setBackupStorageUuid(paramIn.getBackupStorageUuid());
bus.makeTargetServiceIdByResourceUuid(msg, BackupStorageConstant.SERVICE_ID, paramIn.getBackupStorageUuid());
bus.send(msg);
}
trigger.rollback();
}
});
template.setDeleteTemporaryTemplate(new NoRollbackFlow() {
String __name__ = "delete-temporary-template";
@Override
public void run(FlowTrigger trigger, Map data) {
DeleteVolumeBitsOnPrimaryStorageMsg msg = new DeleteVolumeBitsOnPrimaryStorageMsg();
msg.setHypervisorType(hvtype.toString());
msg.setPrimaryStorageUuid(paramIn.getPrimaryStorageUuid());
msg.setInstallPath(ctx.tempInstallPath);
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, paramIn.getPrimaryStorageUuid());
bus.send(msg);
trigger.next();
}
});
return template;
}
@Override
public String createTemplateFromVolumeSnapshotPrimaryStorageType() {
return NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE;
}
@Override
public String getPrimaryStorageTypeForRecalculateCapacityExtensionPoint() {
return type.toString();
}
@Override
public void beforeRecalculatePrimaryStorageCapacity(RecalculatePrimaryStorageCapacityStruct struct) {
// do nothing
return;
}
@Override
public void afterRecalculatePrimaryStorageCapacity(RecalculatePrimaryStorageCapacityStruct struct) {
if(isNfsUnmounted(struct.getPrimaryStorageUuid())){
resetDefaultCapacityWhenNfsUnmounted(struct.getPrimaryStorageUuid());
}
}
private boolean isNfsUnmounted(String psUuid) {
long count = Q.New(PrimaryStorageClusterRefVO.class)
.eq(PrimaryStorageClusterRefVO_.primaryStorageUuid, psUuid).count();
return count == 0;
}
private void resetDefaultCapacityWhenNfsUnmounted(String psUuid) {
PrimaryStorageCapacityUpdater pupdater = new PrimaryStorageCapacityUpdater(psUuid);
long totalCapacity = 0;
long availableCapacity = 0;
long totalPhysicalCapacity = 0;
long availablePhysicalCapacity = 0;
pupdater.run(new PrimaryStorageCapacityUpdaterRunnable() {
@Override
public PrimaryStorageCapacityVO call(PrimaryStorageCapacityVO cap) {
cap.setTotalCapacity(totalCapacity);
cap.setAvailableCapacity(availableCapacity);
cap.setTotalPhysicalCapacity(totalPhysicalCapacity);
cap.setAvailablePhysicalCapacity(availablePhysicalCapacity);
return cap;
}
});
}
@Override
public void preDetachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) throws PrimaryStorageException {
return;
}
@Override
public void beforeDetachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) {
return;
}
@Override
public void failToDetachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) {
return;
}
@Override
public void afterDetachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) {
if (!inventory.getType().equals(NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE)){
return;
}
new SQLBatch(){
@Override
protected void scripts() {
List<String> huuids = Q.New(HostVO.class).select(HostVO_.uuid)
.eq(HostVO_.clusterUuid, clusterUuid)
.listValues();
SQL.New(PrimaryStorageHostRefVO.class)
.eq(PrimaryStorageHostRefVO_.primaryStorageUuid, inventory.getUuid())
.in(PrimaryStorageHostRefVO_.hostUuid, huuids)
.hardDelete();
}
}.execute();
logger.debug("succeed delete PrimaryStorageHostRef record");
recalculateCapacity(inventory.getUuid());
}
public void preDeleteHost(HostInventory inventory) throws HostException {
}
@Override
public void beforeDeleteHost(HostInventory inventory) {
}
@Override
public void afterDeleteHost(HostInventory inventory) {
String clusterUuid = inventory.getClusterUuid();
List<String> psUuids = getNfsPrimaryStorageInCluster(clusterUuid);
if(psUuids == null || psUuids.isEmpty()) {
return;
}
if (Q.New(HostVO.class).eq(HostVO_.clusterUuid, clusterUuid).notEq(HostVO_.uuid, inventory.getUuid()).isExists()) {
return;
}
for(String psUuid : psUuids) {
releasePrimaryStorageCapacity(psUuid);
}
}
private void releasePrimaryStorageCapacity(String psUuid) {
NfsRecalculatePrimaryStorageCapacityMsg msg = new NfsRecalculatePrimaryStorageCapacityMsg();
msg.setPrimaryStorageUuid(psUuid);
msg.setRelease(true);
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, psUuid);
bus.send(msg);
}
private List<String> getNfsPrimaryStorageInCluster(String clusterUuid) {
return SQL.New("select pri.uuid" +
" from PrimaryStorageVO pri, PrimaryStorageClusterRefVO ref" +
" where pri.uuid = ref.primaryStorageUuid" +
" and ref.clusterUuid = :cuuid" +
" and pri.type = :ptype")
.param("cuuid", clusterUuid)
.param("ptype", NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE)
.list();
}
@Override
public void preAttachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) throws PrimaryStorageException {
}
@Override
public void beforeAttachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) {
}
@Override
public void failToAttachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) {
}
@Override
public void afterAttachPrimaryStorage(PrimaryStorageInventory inventory, String clusterUuid) {
if(inventory.getType().equals(NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE)){
Q.New(HostVO.class).select(HostVO_.uuid)
.eq(HostVO_.clusterUuid, clusterUuid)
.eq(HostVO_.status, HostStatus.Connected)
.notIn(HostVO_.state, list(HostState.PreMaintenance, HostState.Maintenance))
.listValues()
.forEach(huuid ->
updateNfsHostStatus(inventory.getUuid(), (String)huuid, PrimaryStorageHostStatus.Connected));
logger.debug("succeed add PrimaryStorageHostRef record");
recalculateCapacity(inventory.getUuid());
}
}
@Override
public void afterMarkRootVolumeAsSnapshot(VolumeSnapshotInventory snapshot) {
}
private void recalculateCapacity(String psUuid){
RecalculatePrimaryStorageCapacityMsg msg = new RecalculatePrimaryStorageCapacityMsg();
msg.setPrimaryStorageUuid(psUuid);
bus.makeTargetServiceIdByResourceUuid(msg, PrimaryStorageConstant.SERVICE_ID, psUuid);
bus.send(msg);
}
@Override
public String preUpdateClusterOS(ClusterVO cls) {
// do not update hosts that also run nfs ps
List<String> matched = new ArrayList<>();
new SQLBatch() {
@Override
protected void scripts() {
List<String> hostIps = q(HostVO.class)
.select(HostVO_.managementIp)
.eq(HostVO_.clusterUuid, cls.getUuid())
.listValues();
for (String hostIp : hostIps) {
String psUuid = q(PrimaryStorageVO.class)
.select(PrimaryStorageVO_.uuid)
.eq(PrimaryStorageVO_.type, NfsPrimaryStorageConstant.NFS_PRIMARY_STORAGE_TYPE)
.like(PrimaryStorageVO_.url, String.format("%s:/%%", hostIp))
.limit(1)
.findValue();
if (psUuid == null || psUuid.equals("")) {
continue;
}
// vm running on the nfs ps
List<String> volumes = q(VolumeVO.class)
.select(VolumeVO_.uuid)
.eq(VolumeVO_.type, VolumeType.Root)
.eq(VolumeVO_.primaryStorageUuid, psUuid)
.listValues();
if (volumes == null || volumes.isEmpty()) {
continue;
}
boolean vmRunning = q(VmInstanceVO.class)
.notEq(VmInstanceVO_.state, VmInstanceState.Stopped)
.in(VmInstanceVO_.rootVolumeUuid, volumes)
.isExists();
if (vmRunning) {
matched.add(hostIp);
}
}
}
}.execute();
if (matched.isEmpty()) {
return null;
} else {
return String.format("nfs server running on hosts [%s], " +
"stop releated vm instances before update host os.", String.join(",", matched));
}
}
@Override
public void beforeUpdateClusterOS(ClusterVO cls) {
}
@Override
public void afterUpdateClusterOS(ClusterVO cls) {
}
@Override
public void afterInstantiateVolume(InstantiateVolumeOnPrimaryStorageMsg msg) {
if (msg instanceof InstantiateMemoryVolumeOnPrimaryStorageMsg) {
return;
}
String psType = dbf.findByUuid(msg.getPrimaryStorageUuid(), PrimaryStorageVO.class).getType();
if (!type.toString().equals(psType)) {
return;
}
boolean hasBackingFile = false;
if (msg instanceof InstantiateRootVolumeFromTemplateOnPrimaryStorageMsg) {
InstantiateRootVolumeFromTemplateOnPrimaryStorageMsg imsg = (InstantiateRootVolumeFromTemplateOnPrimaryStorageMsg) msg;
ImageInventory image = imsg.getTemplateSpec().getInventory();
if (ImageConstant.ImageMediaType.RootVolumeTemplate.toString().equals(image.getMediaType())) {
hasBackingFile = true;
}
}
VolumeInventory volume = msg.getVolume();
volume.setPrimaryStorageUuid(msg.getPrimaryStorageUuid());
for (CreateQcow2VolumeProvisioningStrategyExtensionPoint exp : pluginRgty.getExtensionList(CreateQcow2VolumeProvisioningStrategyExtensionPoint.class)) {
exp.saveQcow2VolumeProvisioningStrategy(volume, hasBackingFile);
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lightsail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lightsail-2016-11-28/CreateContainerServiceDeployment"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateContainerServiceDeploymentRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the container service for which to create the deployment.
* </p>
*/
private String serviceName;
/**
* <p>
* An object that describes the settings of the containers that will be launched on the container service.
* </p>
*/
private java.util.Map<String, Container> containers;
/**
* <p>
* An object that describes the settings of the public endpoint for the container service.
* </p>
*/
private EndpointRequest publicEndpoint;
/**
* <p>
* The name of the container service for which to create the deployment.
* </p>
*
* @param serviceName
* The name of the container service for which to create the deployment.
*/
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
/**
* <p>
* The name of the container service for which to create the deployment.
* </p>
*
* @return The name of the container service for which to create the deployment.
*/
public String getServiceName() {
return this.serviceName;
}
/**
* <p>
* The name of the container service for which to create the deployment.
* </p>
*
* @param serviceName
* The name of the container service for which to create the deployment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateContainerServiceDeploymentRequest withServiceName(String serviceName) {
setServiceName(serviceName);
return this;
}
/**
* <p>
* An object that describes the settings of the containers that will be launched on the container service.
* </p>
*
* @return An object that describes the settings of the containers that will be launched on the container service.
*/
public java.util.Map<String, Container> getContainers() {
return containers;
}
/**
* <p>
* An object that describes the settings of the containers that will be launched on the container service.
* </p>
*
* @param containers
* An object that describes the settings of the containers that will be launched on the container service.
*/
public void setContainers(java.util.Map<String, Container> containers) {
this.containers = containers;
}
/**
* <p>
* An object that describes the settings of the containers that will be launched on the container service.
* </p>
*
* @param containers
* An object that describes the settings of the containers that will be launched on the container service.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateContainerServiceDeploymentRequest withContainers(java.util.Map<String, Container> containers) {
setContainers(containers);
return this;
}
/**
* Add a single Containers entry
*
* @see CreateContainerServiceDeploymentRequest#withContainers
* @returns a reference to this object so that method calls can be chained together.
*/
public CreateContainerServiceDeploymentRequest addContainersEntry(String key, Container value) {
if (null == this.containers) {
this.containers = new java.util.HashMap<String, Container>();
}
if (this.containers.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.containers.put(key, value);
return this;
}
/**
* Removes all the entries added into Containers.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateContainerServiceDeploymentRequest clearContainersEntries() {
this.containers = null;
return this;
}
/**
* <p>
* An object that describes the settings of the public endpoint for the container service.
* </p>
*
* @param publicEndpoint
* An object that describes the settings of the public endpoint for the container service.
*/
public void setPublicEndpoint(EndpointRequest publicEndpoint) {
this.publicEndpoint = publicEndpoint;
}
/**
* <p>
* An object that describes the settings of the public endpoint for the container service.
* </p>
*
* @return An object that describes the settings of the public endpoint for the container service.
*/
public EndpointRequest getPublicEndpoint() {
return this.publicEndpoint;
}
/**
* <p>
* An object that describes the settings of the public endpoint for the container service.
* </p>
*
* @param publicEndpoint
* An object that describes the settings of the public endpoint for the container service.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateContainerServiceDeploymentRequest withPublicEndpoint(EndpointRequest publicEndpoint) {
setPublicEndpoint(publicEndpoint);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getServiceName() != null)
sb.append("ServiceName: ").append(getServiceName()).append(",");
if (getContainers() != null)
sb.append("Containers: ").append(getContainers()).append(",");
if (getPublicEndpoint() != null)
sb.append("PublicEndpoint: ").append(getPublicEndpoint());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateContainerServiceDeploymentRequest == false)
return false;
CreateContainerServiceDeploymentRequest other = (CreateContainerServiceDeploymentRequest) obj;
if (other.getServiceName() == null ^ this.getServiceName() == null)
return false;
if (other.getServiceName() != null && other.getServiceName().equals(this.getServiceName()) == false)
return false;
if (other.getContainers() == null ^ this.getContainers() == null)
return false;
if (other.getContainers() != null && other.getContainers().equals(this.getContainers()) == false)
return false;
if (other.getPublicEndpoint() == null ^ this.getPublicEndpoint() == null)
return false;
if (other.getPublicEndpoint() != null && other.getPublicEndpoint().equals(this.getPublicEndpoint()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getServiceName() == null) ? 0 : getServiceName().hashCode());
hashCode = prime * hashCode + ((getContainers() == null) ? 0 : getContainers().hashCode());
hashCode = prime * hashCode + ((getPublicEndpoint() == null) ? 0 : getPublicEndpoint().hashCode());
return hashCode;
}
@Override
public CreateContainerServiceDeploymentRequest clone() {
return (CreateContainerServiceDeploymentRequest) super.clone();
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.microsoft.azure.spring.autoconfigure.b2c;
import org.hibernate.validator.constraints.URL;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.lang.NonNull;
import org.springframework.security.oauth2.client.authentication.OAuth2AuthorizationCodeAuthenticationProvider;
import org.springframework.security.oauth2.client.oidc.authentication.OidcAuthorizationCodeAuthenticationProvider;
import org.springframework.validation.annotation.Validated;
import javax.validation.constraints.NotBlank;
import java.net.MalformedURLException;
/**
* Configuration properties for Azure Active Directory B2C.
*/
@Validated
@ConfigurationProperties(prefix = AADB2CProperties.PREFIX)
public class AADB2CProperties {
private static final String USER_FLOWS = "user-flows";
/**
* We do not use ${@link String#format(String, Object...)}
* as it's not real constant, which cannot be referenced in annotation.
*/
public static final String USER_FLOW_PASSWORD_RESET = USER_FLOWS + ".password-reset";
public static final String USER_FLOW_PROFILE_EDIT = USER_FLOWS + ".profile-edit";
public static final String USER_FLOW_SIGN_UP_OR_SIGN_IN = USER_FLOWS + ".sign-up-or-sign-in";
public static final String DEFAULT_LOGOUT_SUCCESS_URL = "http://localhost:8080/login";
public static final String PREFIX = "azure.activedirectory.b2c";
/**
* The name of the b2c tenant.
*/
@NotBlank(message = "tenant name should not be blank")
private String tenant;
/**
* Use OIDC ${@link OidcAuthorizationCodeAuthenticationProvider} by default. If set to false,
* will use Oauth2 ${@link OAuth2AuthorizationCodeAuthenticationProvider}.
*/
private Boolean oidcEnabled = true;
/**
* The application ID that registered under b2c tenant.
*/
@NotBlank(message = "client ID should not be blank")
private String clientId;
/**
* The application secret that registered under b2c tenant.
*/
@NotBlank(message = "client secret should not be blank")
private String clientSecret;
@URL(message = "reply URL should be valid URL")
private String replyUrl;
@URL(message = "logout success should be valid URL")
private String logoutSuccessUrl = DEFAULT_LOGOUT_SUCCESS_URL;
/**
* The all user flows which is created under b2c tenant.
*/
private UserFlows userFlows = new UserFlows();
/**
* Telemetry data will be collected if true, or disable data collection.
*/
private boolean allowTelemetry = true;
private String getReplyURLPath(@URL String replyURL) {
try {
return new java.net.URL(replyURL).getPath();
} catch (MalformedURLException e) {
throw new AADB2CConfigurationException("Failed to get path of given URL.", e);
}
}
@NonNull
public String getLoginProcessingUrl() {
return getReplyURLPath(replyUrl);
}
@Validated
protected static class UserFlows {
protected UserFlows() {
}
/**
* The sign-up-or-sign-in user flow which is created under b2c tenant.
*/
@NotBlank(message = "sign-up-or-in value should not be blank")
private String signUpOrSignIn;
/**
* The profile-edit user flow which is created under b2c tenant.
*/
private String profileEdit;
/**
* The password-reset user flow which is created under b2c tenant.
*/
private String passwordReset;
public String getSignUpOrSignIn() {
return signUpOrSignIn;
}
public void setSignUpOrSignIn(String signUpOrSignIn) {
this.signUpOrSignIn = signUpOrSignIn;
}
public String getProfileEdit() {
return profileEdit;
}
public void setProfileEdit(String profileEdit) {
this.profileEdit = profileEdit;
}
public String getPasswordReset() {
return passwordReset;
}
public void setPasswordReset(String passwordReset) {
this.passwordReset = passwordReset;
}
}
public String getTenant() {
return tenant;
}
public void setTenant(String tenant) {
this.tenant = tenant;
}
public Boolean getOidcEnabled() {
return oidcEnabled;
}
public void setOidcEnabled(Boolean oidcEnabled) {
this.oidcEnabled = oidcEnabled;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public String getReplyUrl() {
return replyUrl;
}
public void setReplyUrl(String replyUrl) {
this.replyUrl = replyUrl;
}
public String getLogoutSuccessUrl() {
return logoutSuccessUrl;
}
public void setLogoutSuccessUrl(String logoutSuccessUrl) {
this.logoutSuccessUrl = logoutSuccessUrl;
}
public UserFlows getUserFlows() {
return userFlows;
}
public void setUserFlows(UserFlows userFlows) {
this.userFlows = userFlows;
}
public boolean isAllowTelemetry() {
return allowTelemetry;
}
public void setAllowTelemetry(boolean allowTelemetry) {
this.allowTelemetry = allowTelemetry;
}
}
| |
/**
* Copyright 2014 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.wallet;
import com.google.bitcoin.crypto.*;
import com.google.bitcoin.store.UnreadableWalletException;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import org.bitcoinj.wallet.Protos;
import org.spongycastle.crypto.params.KeyParameter;
import javax.annotation.Nullable;
import java.io.UnsupportedEncodingException;
import java.security.SecureRandom;
import java.util.List;
import static com.google.bitcoin.core.Utils.HEX;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* Holds the seed bytes for the BIP32 deterministic wallet algorithm, inside a
* {@link com.google.bitcoin.wallet.DeterministicKeyChain}. The purpose of this wrapper is to simplify the encryption
* code.
*/
public class DeterministicSeed implements EncryptableItem {
// It would take more than 10^12 years to brute-force a 128 bit seed using $1B worth of computing equipment.
public static final int DEFAULT_SEED_ENTROPY_BITS = 128;
public static final int MAX_SEED_ENTROPY_BITS = 512;
@Nullable private final byte[] seed;
@Nullable private List<String> mnemonicCode;
@Nullable private EncryptedData encryptedMnemonicCode;
private final long creationTimeSeconds;
DeterministicSeed(String mnemonicCode, String passphrase, long creationTimeSeconds) throws UnreadableWalletException {
this(decodeMnemonicCode(mnemonicCode), passphrase, creationTimeSeconds);
}
DeterministicSeed(byte[] seed, List<String> mnemonic, long creationTimeSeconds) {
this.seed = checkNotNull(seed);
this.mnemonicCode = checkNotNull(mnemonic);
this.encryptedMnemonicCode = null;
this.creationTimeSeconds = creationTimeSeconds;
}
DeterministicSeed(EncryptedData encryptedMnemonic, long creationTimeSeconds) {
this.seed = null;
this.mnemonicCode = null;
this.encryptedMnemonicCode = checkNotNull(encryptedMnemonic);
this.creationTimeSeconds = creationTimeSeconds;
}
/**
* Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more
* details on this scheme.
* @param mnemonicCode A list of words.
* @param passphrase A user supplied passphrase, or an empty string if there is no passphrase
* @param creationTimeSeconds When the seed was originally created, UNIX time.
*/
public DeterministicSeed(List<String> mnemonicCode, String passphrase, long creationTimeSeconds) {
this(MnemonicCode.toSeed(mnemonicCode, passphrase), mnemonicCode, creationTimeSeconds);
}
/**
* Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more
* details on this scheme.
* @param random Entropy source
* @param bits number of bits, must be divisible by 32
* @param passphrase A user supplied passphrase, or an empty string if there is no passphrase
* @param creationTimeSeconds When the seed was originally created, UNIX time.
*/
public DeterministicSeed(SecureRandom random, int bits, String passphrase, long creationTimeSeconds) {
this(getEntropy(random, bits), passphrase, creationTimeSeconds);
}
/**
* Constructs a seed from a BIP 39 mnemonic code. See {@link com.google.bitcoin.crypto.MnemonicCode} for more
* details on this scheme.
* @param entropy entropy bits, length must be divisible by 32
* @param passphrase A user supplied passphrase, or an empty string if there is no passphrase
* @param creationTimeSeconds When the seed was originally created, UNIX time.
*/
public DeterministicSeed(byte[] entropy, String passphrase, long creationTimeSeconds) {
Preconditions.checkArgument(entropy.length % 4 == 0, "entropy size in bits not divisible by 32");
Preconditions.checkArgument(entropy.length * 8 >= DEFAULT_SEED_ENTROPY_BITS, "entropy size too small");
try {
this.mnemonicCode = MnemonicCode.INSTANCE.toMnemonic(entropy);
} catch (MnemonicException.MnemonicLengthException e) {
// cannot happen
throw new RuntimeException(e);
}
this.seed = MnemonicCode.toSeed(mnemonicCode, passphrase);
this.encryptedMnemonicCode = null;
this.creationTimeSeconds = creationTimeSeconds;
}
private static byte[] getEntropy(SecureRandom random, int bits) {
Preconditions.checkArgument(bits <= MAX_SEED_ENTROPY_BITS, "requested entropy size too large");
byte[] seed = new byte[bits / 8];
random.nextBytes(seed);
return seed;
}
@Override
public boolean isEncrypted() {
checkState(mnemonicCode != null || encryptedMnemonicCode != null);
return encryptedMnemonicCode != null;
}
@Override
public String toString() {
if (isEncrypted())
return "DeterministicSeed [encrypted]";
else
return "DeterministicSeed " + toHexString() +
((mnemonicCode != null) ? " " + Joiner.on(" ").join(mnemonicCode) : "");
}
/** Returns the seed as hex or null if encrypted. */
@Nullable
public String toHexString() {
if (seed != null)
return HEX.encode(seed);
else
return null;
}
@Nullable
@Override
public byte[] getSecretBytes() {
return getMnemonicAsBytes();
}
@Nullable
public byte[] getSeedBytes() {
return seed;
}
@Nullable
@Override
public EncryptedData getEncryptedData() {
return encryptedMnemonicCode;
}
@Override
public Protos.Wallet.EncryptionType getEncryptionType() {
return Protos.Wallet.EncryptionType.ENCRYPTED_SCRYPT_AES;
}
@Override
public long getCreationTimeSeconds() {
return creationTimeSeconds;
}
public DeterministicSeed encrypt(KeyCrypter keyCrypter, KeyParameter aesKey) {
checkState(encryptedMnemonicCode == null, "Trying to encrypt seed twice");
checkState(mnemonicCode != null, "Mnemonic missing so cannot encrypt");
EncryptedData mnemonic = keyCrypter.encrypt(getMnemonicAsBytes(), aesKey);
return new DeterministicSeed(mnemonic, creationTimeSeconds);
}
private byte[] getMnemonicAsBytes() {
return Joiner.on(" ").join(mnemonicCode).getBytes(Charsets.UTF_8);
}
public DeterministicSeed decrypt(KeyCrypter crypter, String passphrase, KeyParameter aesKey) {
checkState(isEncrypted());
checkNotNull(encryptedMnemonicCode);
List<String> mnemonic = null;
try {
mnemonic = decodeMnemonicCode(crypter.decrypt(encryptedMnemonicCode, aesKey));
} catch (UnreadableWalletException e) {
// TODO what is the best way to handle this exception?
throw new RuntimeException(e);
}
return new DeterministicSeed(mnemonic, passphrase, creationTimeSeconds);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DeterministicSeed seed = (DeterministicSeed) o;
if (creationTimeSeconds != seed.creationTimeSeconds) return false;
if (encryptedMnemonicCode != null) {
if (seed.encryptedMnemonicCode == null) return false;
if (!encryptedMnemonicCode.equals(seed.encryptedMnemonicCode)) return false;
} else {
if (!mnemonicCode.equals(seed.mnemonicCode)) return false;
}
return true;
}
@Override
public int hashCode() {
int result = encryptedMnemonicCode != null ? encryptedMnemonicCode.hashCode() : mnemonicCode.hashCode();
result = 31 * result + (int) (creationTimeSeconds ^ (creationTimeSeconds >>> 32));
return result;
}
/**
* Check if our mnemonic is a valid mnemonic phrase for our word list.
* Does nothing if we are encrypted.
*
* @throws com.google.bitcoin.crypto.MnemonicException if check fails
*/
public void check() throws MnemonicException {
if (mnemonicCode != null)
MnemonicCode.INSTANCE.check(mnemonicCode);
}
byte[] getEntropyBytes() throws MnemonicException {
return MnemonicCode.INSTANCE.toEntropy(mnemonicCode);
}
/** Get the mnemonic code, or null if unknown. */
@Nullable
public List<String> getMnemonicCode() {
return mnemonicCode;
}
private static List<String> decodeMnemonicCode(byte[] mnemonicCode) throws UnreadableWalletException {
try {
return Splitter.on(" ").splitToList(new String(mnemonicCode, "UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new UnreadableWalletException(e.toString());
}
}
private static List<String> decodeMnemonicCode(String mnemonicCode) {
return Splitter.on(" ").splitToList(mnemonicCode);
}
}
| |
package org.winterblade.minecraft.harmony;
import jdk.nashorn.api.scripting.ScriptObjectMirror;
import net.minecraft.nbt.NBTBase;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagLong;
import net.minecraftforge.common.DimensionManager;
import net.minecraftforge.fml.common.Loader;
import org.winterblade.minecraft.harmony.api.BasicOperation;
import org.winterblade.minecraft.harmony.api.Operation;
import org.winterblade.minecraft.harmony.common.utility.LogHelper;
import org.winterblade.minecraft.harmony.scripting.NashornConfigProcessor;
import java.util.*;
/**
* Created by Matt on 4/8/2016.
*/
public class SetManager {
private final static Map<String, Long> setsOnCooldown = new HashMap<>();
private final static Map<String, Long> setsToExpire = new HashMap<>();
public static final String SETS_TO_EXPIRE_TAG_NAME = "SetsToExpire";
public static final String SETS_ON_COOLDOWN_TAG_NAME = "SetsOnCooldown";
private static long lastTickTime;
private final static Map<String, Class<BasicOperation>> deserializerMap = new TreeMap<>();
public static void CreateDeserializers(Map<String, Class<BasicOperation>> deserializers) {
for(Map.Entry<String, Class<BasicOperation>> deserializer : deserializers.entrySet()) {
Operation anno = deserializer.getValue().getAnnotation(Operation.class);
// Check if we have the specified mod loaded:
if (!anno.dependsOn().equals("") && !Loader.isModLoaded(anno.dependsOn())) {
LogHelper.warn(anno.name() + " depends on '" + anno.dependsOn() + "', which is not loaded.");
continue;
} else {
LogHelper.info("Registering operation '" + anno.name() + "'.");
}
deserializerMap.put(deserializer.getKey().toLowerCase(), deserializer.getValue());
// Also register our aliases...
for (String alias : anno.aliases()) {
deserializerMap.put(alias.toLowerCase(), deserializer.getValue());
}
}
}
public static BasicOperation createOperation(String type, ScriptObjectMirror operation) {
type = type.toLowerCase();
if(!deserializerMap.containsKey(type)) {
return null;
}
Class source = deserializerMap.get(type);
BasicOperation inst;
try {
inst = (BasicOperation)source.newInstance();
} catch (Exception e) {
return null;
}
if(!inst.convert(NashornConfigProcessor.getInstance().nashorn,operation)) return null;
return inst;
}
/**
* Called from our internal scripts in order to create a set
*/
public static OperationSet registerSet(String setName) {
return CraftingHarmonicsMod.getOrCreateSet(setName);
}
/**
* Called by the server tick to see if we need to remove sets.
*/
static boolean update() {
lastTickTime = DimensionManager.getWorld(0).getTotalWorldTime();
boolean updatedConfigs = false;
// Check our currently pending sets:
for (Iterator<Map.Entry<String, Long>> iterator = setsToExpire.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<String, Long> entry = iterator.next();
if (lastTickTime < entry.getValue()) continue;
updatedConfigs = true;
iterator.remove();
LogHelper.info("Set {}'s duration has expired.", entry.getKey());
CraftingHarmonicsMod.undoSet(entry.getKey());
}
// Remove any sets that no longer need to be on cooldown
for (Iterator<Map.Entry<String, Long>> iterator = setsOnCooldown.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Long> entry = iterator.next();
if (lastTickTime < entry.getValue()) continue;
LogHelper.info("Set {} is no longer on cooldown.", entry.getKey());
iterator.remove();
}
return updatedConfigs;
}
/**
* Called when a set with a duration is applied
* @param setName The set name
* @param duration The duration
*/
static void setWithDurationApplied(String setName, int duration) {
// If we haven't run our first update tick: don't care.
if(lastTickTime <= 0) return;
// Otherwise, go ahead and say that we need to expire at some point
LogHelper.info("Set {} will expire in {} ticks.", setName, duration);
setsToExpire.put(setName, lastTickTime + duration);
CraftingHarmonicsMod.updateSavedData();
}
/**
* Checks to see if the given set is on cooldown or not
* @param set The set to check
* @return True if it is, false otherwise.
*/
static boolean isSetOnCooldown(String set) {
return setsOnCooldown.containsKey(set);
}
/**
* Called when a set with a cooldown is removed
* @param setName The set name
* @param cooldown The cooldown
*/
static void setWithCooldownRemoved(String setName, int cooldown) {
// If we haven't run our first update tick: don't care.
if(lastTickTime <= 0) return;
// Otherwise, go ahead and say that we need to expire at some point
LogHelper.info("Set {} on cooldown for {} ticks.", setName, cooldown);
setsOnCooldown.put(setName, lastTickTime + cooldown);
CraftingHarmonicsMod.updateSavedData();
}
/**
* Called internally to deserialie the saved data
* @param nbt The NBT to read
*/
public static void deserializeSavedGameData(NBTTagCompound nbt) {
setsToExpire.clear();
setsOnCooldown.clear();
if(nbt.hasKey(SETS_TO_EXPIRE_TAG_NAME)) {
setsToExpire.putAll(convertNbtCompoundToMap(nbt.getCompoundTag(SETS_TO_EXPIRE_TAG_NAME)));
}
if(nbt.hasKey(SETS_ON_COOLDOWN_TAG_NAME)) {
setsOnCooldown.putAll(convertNbtCompoundToMap(nbt.getCompoundTag(SETS_ON_COOLDOWN_TAG_NAME)));
}
}
/**
* Serializes the setsOnCooldown list to NBT
* @return The output NBT
*/
public static NBTBase serializeSetsOnCooldown() {
return convertMapToCompoundNbt(setsOnCooldown);
}
/**
* Serializes the setsToExpire list to NBT
* @return The output NBT
*/
public static NBTBase serializeSetsToExpire() {
return convertMapToCompoundNbt(setsToExpire);
}
/**
* Converts an NBT tag compound into a map of strings to longs.
* @param nbt The NBT to parse
* @return The map
*/
private static Map<String, Long> convertNbtCompoundToMap(NBTTagCompound nbt) {
Map<String, Long> output = new HashMap<>();
Set<String> keySet = nbt.getKeySet();
for(String key : keySet) {
output.put(key, nbt.getLong(key));
}
return output;
}
/**
* Converts the map to a NBT tag compound
* @param map The map to convert
* @return The output NBT
*/
private static NBTTagCompound convertMapToCompoundNbt(Map<String, Long> map) {
NBTTagCompound output = new NBTTagCompound();
for(Map.Entry<String, Long> entry : map.entrySet()) {
output.setTag(entry.getKey(), new NBTTagLong(entry.getValue()));
}
return output;
}
}
| |
package net.junian.code.qiblasense;
import java.util.List;
import com.google.android.maps.GeoPoint;
import com.google.android.maps.MapActivity;
import com.google.android.maps.MapController;
import com.google.android.maps.MapView;
import com.google.android.maps.Overlay;
import com.google.android.maps.Projection;
import android.app.AlertDialog;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Point;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.util.Log;
import android.view.MenuInflater;
public class QiblaSenseActivity extends MapActivity {
private MapView mapView;
private MapController mapController;
private GeoPoint qibla = new GeoPoint((int) (Const.QIBLA_LATITUDE * 1E6),
(int) (Const.QIBLA_LONGITUDE * 1E6));
private GeoPoint myPosition = new GeoPoint((int) (-7.2796 * 1E6),
(int) (112.797218 * 1E6));
private Projection mapProjection;
private List<Overlay> mapOverlays;
private Location locQibla;
private Location locMyPosition;
private double bearing(Location source, Location dest)
{
double dLong = dest.getLongitude() - source.getLongitude();
double dLon = Math.toRadians(dLong);
double lat1 = Math.toRadians(source.getLatitude());
double lat2 = Math.toRadians(dest.getLatitude());
double y = Math.sin(dLon) * Math.cos(lat2);
double x = Math.cos(lat1)*Math.sin(lat2) -
Math.sin(lat1)*Math.cos(lat2)*Math.cos(dLon);
double brng = Math.atan2(y, x);
return Math.toDegrees(brng);
}
@Override
public boolean onOptionsItemSelected(android.view.MenuItem item)
{
switch(item.getItemId())
{
case R.id.menuExit:
this.finish();
break;
case R.id.menuBearing:
float heading = locMyPosition.bearingTo(locQibla);
Log.d("bearing", heading + "");
//heading = -heading / 360.0f + 180.0f;
Log.d("bearing", heading + "");
AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setTitle("Bearing");
dialog.setMessage("My Location:\n[" + (myPosition.getLatitudeE6()/1E6) + ", " + (myPosition.getLongitudeE6()/1E6)
+"]\n\nAngle:\n" + bearing(locMyPosition, locQibla));
dialog.show();
break;
default:
break;
}
return true;
};
@Override
public boolean onCreateOptionsMenu(android.view.Menu menu)
{
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.mainmenu, menu);
return true;
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.map);
mapView = (MapView) findViewById(R.id.mapview);
mapView.setSatellite(true);
mapController = mapView.getController();
mapProjection = mapView.getProjection();
mapOverlays = mapView.getOverlays();
//mapController.setZoom(mapView.getMaxZoomLevel()/2);
UpdateMapView();
LocationManager locationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
locationManager.requestLocationUpdates(
LocationManager.GPS_PROVIDER, 250, 10,
new LocationListener() {
public void onStatusChanged(String provider, int status,
Bundle extras) {
// TODO Auto-generated method stub
}
public void onProviderEnabled(String provider) {
// TODO Auto-generated method stub
}
public void onProviderDisabled(String provider) {
// TODO Auto-generated method stub
}
public void onLocationChanged(Location location) {
// TODO Auto-generated method stub
double latitude = location.getLatitude();
double longitude = location.getLongitude();
myPosition = new GeoPoint((int) (latitude * 1E6), (int)(longitude * 1E6));
UpdateMapView();
}
});
}
private void UpdateMapView() {
// TODO Auto-generated method stub
mapOverlays.clear();
mapOverlays.add(new LineOverlay(myPosition, qibla));
//GeoPoint west = new GeoPoint((int)(90*1E6),0);
//mapOverlays.add(new LineOverlay(myPosition, west, Color.RED));
mapController.animateTo(myPosition);
mapController.setCenter(myPosition);
mapController.setZoom(mapView.getMaxZoomLevel() - 2);
locQibla = new Location("qibla");
locMyPosition = new Location("myPosition");
locQibla.setLatitude(qibla.getLatitudeE6() / 1E6);
locQibla.setLongitude(qibla.getLongitudeE6() / 1E6);
locMyPosition.setLatitude(myPosition.getLatitudeE6() / 1E6);
locMyPosition.setLongitude(myPosition.getLongitudeE6() / 1E6);
}
@Override
protected boolean isRouteDisplayed() {
// TODO Auto-generated method stub
return false;
}
private class LineOverlay extends Overlay {
GeoPoint gP1 = new GeoPoint(19240000, -99120000);
GeoPoint gP2 = new GeoPoint(37423157, -122085008);
int color = Color.BLUE;
public LineOverlay(GeoPoint gp1, GeoPoint gp2) {
this.gP1 = gp1;
this.gP2 = gp2;
}
public LineOverlay(GeoPoint gp1, GeoPoint gp2, int color) {
this.gP1 = gp1;
this.gP2 = gp2;
this.color = color;
}
public void draw(Canvas canvas, MapView mapv, boolean shadow) {
super.draw(canvas, mapv, shadow);
Paint mPaint = CreatePaint();
Point p1 = new Point();
Point p2 = new Point();
Path path = new Path();
mapProjection.toPixels(gP1, p1);
mapProjection.toPixels(gP2, p2);
path.moveTo(p2.x, p2.y);
path.lineTo(p1.x, p1.y);
canvas.drawPath(path, mPaint);
//west
//mPaint = CreatePaint();
//mPaint.setColor(Color.GREEN);
path.moveTo(p1.x, p1.y);
path.lineTo(p1.x - 256, p1.y);
canvas.drawPath(path, mPaint);
//north
//mPaint = CreatePaint();
//mPaint.setColor(Color.GREEN);
path.moveTo(p1.x, p1.y);
path.lineTo(p1.x, p1.y-256);
canvas.drawPath(path, mPaint);
}
private Paint CreatePaint()
{
Paint mPaint = new Paint();
mPaint.setDither(true);
mPaint.setColor(color);
mPaint.setStyle(Paint.Style.FILL_AND_STROKE);
mPaint.setStrokeJoin(Paint.Join.ROUND);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setStrokeWidth(4);
return mPaint;
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeInsight.daemon.ChangeLocalityDetector;
import com.intellij.codeInspection.SuppressionUtil;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.EditorMarkupModel;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiDocumentManagerImpl;
import com.intellij.psi.impl.PsiDocumentTransactionListener;
import com.intellij.util.SmartList;
import com.intellij.util.messages.MessageBusConnection;
import gnu.trove.THashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Map;
public class PsiChangeHandler extends PsiTreeChangeAdapter implements Disposable {
private static final ExtensionPointName<ChangeLocalityDetector> EP_NAME = ExtensionPointName.create("com.intellij.daemon.changeLocalityDetector");
private /*NOT STATIC!!!*/ final Key<Boolean> UPDATE_ON_COMMIT_ENGAGED = Key.create("UPDATE_ON_COMMIT_ENGAGED");
private final Project myProject;
private final Map<Document, List<Pair<PsiElement, Boolean>>> changedElements = new THashMap<Document, List<Pair<PsiElement, Boolean>>>();
private final FileStatusMap myFileStatusMap;
public PsiChangeHandler(@NotNull Project project,
@NotNull final PsiDocumentManagerImpl documentManager,
@NotNull EditorFactory editorFactory,
@NotNull MessageBusConnection connection,
@NotNull FileStatusMap fileStatusMap) {
myProject = project;
myFileStatusMap = fileStatusMap;
editorFactory.getEventMulticaster().addDocumentListener(new DocumentAdapter() {
@Override
public void beforeDocumentChange(DocumentEvent e) {
final Document document = e.getDocument();
if (documentManager.getSynchronizer().isInSynchronization(document)) return;
if (documentManager.getCachedPsiFile(document) == null) return;
if (document.getUserData(UPDATE_ON_COMMIT_ENGAGED) == null) {
document.putUserData(UPDATE_ON_COMMIT_ENGAGED, Boolean.TRUE);
documentManager.addRunOnCommit(document, new Runnable() {
public void run() {
updateChangesForDocument(document);
document.putUserData(UPDATE_ON_COMMIT_ENGAGED, null);
}
});
}
}
}, this);
connection.subscribe(PsiDocumentTransactionListener.TOPIC, new PsiDocumentTransactionListener() {
public void transactionStarted(final Document doc, final PsiFile file) {
}
public void transactionCompleted(final Document doc, final PsiFile file) {
updateChangesForDocument(doc);
}
});
}
public void dispose() {
}
private void updateChangesForDocument(@NotNull Document document) {
if (DaemonListeners.isUnderIgnoredAction(null)) return;
List<Pair<PsiElement, Boolean>> toUpdate = changedElements.get(document);
if (toUpdate != null) {
for (Pair<PsiElement, Boolean> changedElement : toUpdate) {
PsiElement element = changedElement.getFirst();
Boolean whiteSpaceOptimizationAllowed = changedElement.getSecond();
updateByChange(element, whiteSpaceOptimizationAllowed);
}
changedElements.remove(document);
}
}
public void childAdded(PsiTreeChangeEvent event) {
queueElement(event.getParent(), true, event);
}
public void childRemoved(PsiTreeChangeEvent event) {
queueElement(event.getParent(), true, event);
}
public void childReplaced(PsiTreeChangeEvent event) {
queueElement(event.getNewChild(), typesEqual(event.getNewChild(), event.getOldChild()), event);
}
private static boolean typesEqual(final PsiElement newChild, final PsiElement oldChild) {
return newChild != null && oldChild != null && newChild.getClass() == oldChild.getClass();
}
public void childrenChanged(PsiTreeChangeEvent event) {
queueElement(event.getParent(), true, event);
}
public void beforeChildMovement(PsiTreeChangeEvent event) {
queueElement(event.getOldParent(), true, event);
queueElement(event.getNewParent(), true, event);
}
public void beforeChildrenChange(PsiTreeChangeEvent event) {
// this event sent always before every PSI change, even not significant one (like after quick typing/backspacing char)
// mark file dirty just in case
PsiFile psiFile = event.getFile();
if (psiFile != null) {
myFileStatusMap.markFileScopeDirtyDefensively(psiFile);
}
}
public void propertyChanged(PsiTreeChangeEvent event) {
String propertyName = event.getPropertyName();
if (!propertyName.equals(PsiTreeChangeEvent.PROP_WRITABLE)) {
myFileStatusMap.markAllFilesDirty();
}
}
private void queueElement(PsiElement child, final boolean whitespaceOptimizationAllowed, PsiTreeChangeEvent event) {
PsiFile file = event.getFile();
if (file == null) file = child.getContainingFile();
if (file == null) {
myFileStatusMap.markAllFilesDirty();
return;
}
if (!child.isValid()) return;
Document document = PsiDocumentManager.getInstance(myProject).getCachedDocument(file);
if (document != null) {
List<Pair<PsiElement, Boolean>> toUpdate = changedElements.get(document);
if (toUpdate == null) {
toUpdate = new SmartList<Pair<PsiElement, Boolean>>();
changedElements.put(document, toUpdate);
}
toUpdate.add(Pair.create(child, whitespaceOptimizationAllowed));
}
}
private void updateByChange(PsiElement child, final boolean whitespaceOptimizationAllowed) {
final Editor editor = FileEditorManager.getInstance(myProject).getSelectedTextEditor();
Application application = ApplicationManager.getApplication();
if (editor != null && !application.isUnitTestMode()) {
application.invokeLater(new Runnable() {
public void run() {
if (myProject.isDisposed()) return;
EditorMarkupModel markupModel = (EditorMarkupModel)editor.getMarkupModel();
markupModel.setErrorStripeRenderer(markupModel.getErrorStripeRenderer());
}
}, ModalityState.stateForComponent(editor.getComponent()));
}
PsiFile file;
try {
file = child.getContainingFile();
}
catch (PsiInvalidElementAccessException e) {
myFileStatusMap.markAllFilesDirty();
return;
}
if (file == null || file instanceof PsiCompiledElement) {
myFileStatusMap.markAllFilesDirty();
return;
}
Document document = PsiDocumentManager.getInstance(myProject).getCachedDocument(file);
if (document == null) return;
int fileLength = file.getTextLength();
if (!file.getViewProvider().isPhysical()) {
myFileStatusMap.markFileScopeDirty(document, new TextRange(0, fileLength), fileLength);
return;
}
// optimization
if (whitespaceOptimizationAllowed && UpdateHighlightersUtil.isWhitespaceOptimizationAllowed(document)) {
if (child instanceof PsiWhiteSpace ||
child instanceof PsiComment && !child.getText().contains(SuppressionUtil.SUPPRESS_INSPECTIONS_TAG_NAME)) {
myFileStatusMap.markFileScopeDirty(document, child.getTextRange(), fileLength);
return;
}
}
PsiElement element = child;
while (true) {
if (element instanceof PsiFile || element instanceof PsiDirectory) {
myFileStatusMap.markAllFilesDirty();
return;
}
final PsiElement scope = getChangeHighlightingScope(element);
if (scope != null) {
myFileStatusMap.markFileScopeDirty(document, scope.getTextRange(), fileLength);
return;
}
element = element.getParent();
}
}
@Nullable
private static PsiElement getChangeHighlightingScope(PsiElement element) {
for (ChangeLocalityDetector detector : Extensions.getExtensions(EP_NAME)) {
final PsiElement scope = detector.getChangeHighlightingDirtyScopeFor(element);
if (scope != null) return scope;
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gobblin.data.management.copy.hive;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nonnull;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.reflect.ConstructorUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.Table;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import gobblin.config.client.ConfigClient;
import gobblin.config.client.ConfigClientCache;
import gobblin.config.client.ConfigClientUtils;
import gobblin.config.client.api.ConfigStoreFactoryDoesNotExistsException;
import gobblin.config.client.api.VersionStabilityPolicy;
import gobblin.config.store.api.ConfigStoreCreationException;
import gobblin.configuration.ConfigurationKeys;
import gobblin.data.management.hive.HiveConfigClientUtils;
import gobblin.dataset.IterableDatasetFinder;
import gobblin.hive.HiveMetastoreClientPool;
import gobblin.metrics.event.EventSubmitter;
import gobblin.metrics.event.sla.SlaEventSubmitter;
import gobblin.util.AutoReturnableObject;
import gobblin.util.ConfigUtils;
/**
* Finds {@link HiveDataset}s. Will look for tables in a database using a {@link WhitelistBlacklist},
* and creates a {@link HiveDataset} for each one.
*/
@Slf4j
public class HiveDatasetFinder implements IterableDatasetFinder<HiveDataset> {
public static final String HIVE_DATASET_PREFIX = "hive.dataset";
public static final String HIVE_METASTORE_URI_KEY = HIVE_DATASET_PREFIX + ".hive.metastore.uri";
public static final String DB_KEY = HIVE_DATASET_PREFIX + ".database";
public static final String TABLE_PATTERN_KEY = HIVE_DATASET_PREFIX + ".table.pattern";
public static final String DEFAULT_TABLE_PATTERN = "*";
/*
* By setting the prefix, only config keys with this prefix will be used to build a HiveDataset.
* By passing scoped configurations the same config keys can be used in different contexts.
*
* E.g
* 1. For CopySource, prefix is gobblin.dataset.copy
* 2. For avro to Orc conversion, prefix is hive.dataset.conversion.avro.orc
* 3. For retention, prefix is gobblin.retention.
*
*/
public static final String HIVE_DATASET_CONFIG_PREFIX_KEY = "hive.dataset.configPrefix";
private static final String DEFAULT_HIVE_DATASET_CONIFG_PREFIX = StringUtils.EMPTY;
public static final String HIVE_DATASET_IS_BLACKLISTED_KEY = "is.blacklisted";
private static final boolean DEFAULT_HIVE_DATASET_IS_BLACKLISTED_KEY = false;
/**
* This is an optional key.
* The fully qualified name of a {@link Function} class which returns the relative uri of a dataset in the config store
*/
public static final String CONFIG_STORE_DATASET_URI_BUILDER_CLASS = "gobblin.config.management.datasetUriBuilderClass";
// Event names
private static final String DATASET_FOUND = "DatasetFound";
private static final String DATASET_ERROR = "DatasetError";
private static final String FAILURE_CONTEXT = "FailureContext";
protected final Properties properties;
protected final HiveMetastoreClientPool clientPool;
protected final FileSystem fs;
private final WhitelistBlacklist whitelistBlacklist;
private final Optional<EventSubmitter> eventSubmitter;
protected Optional<String> configStoreUri;
protected final Function<Table, String> configStoreDatasetUriBuilder;
protected final String datasetConfigPrefix;
protected final ConfigClient configClient;
private final Config jobConfig;
public HiveDatasetFinder(FileSystem fs, Properties properties) throws IOException {
this(fs, properties, createClientPool(properties));
}
protected HiveDatasetFinder(FileSystem fs, Properties properties, ConfigClient configClient) throws IOException {
this(fs, properties, createClientPool(properties), null, configClient);
}
public HiveDatasetFinder(FileSystem fs, Properties properties, EventSubmitter eventSubmitter) throws IOException {
this(fs, properties, createClientPool(properties), eventSubmitter);
}
protected HiveDatasetFinder(FileSystem fs, Properties properties, HiveMetastoreClientPool clientPool)
throws IOException {
this(fs, properties, clientPool, null);
}
protected HiveDatasetFinder(FileSystem fs, Properties properties, HiveMetastoreClientPool clientPool,
EventSubmitter eventSubmitter) throws IOException {
this(fs, properties, clientPool, eventSubmitter, ConfigClientCache.getClient(VersionStabilityPolicy.STRONG_LOCAL_STABILITY));
}
@SuppressWarnings("unchecked")
//SupressWarning justification : CONFIG_STORE_DATASET_URI_BUILDER_CLASS must be of type Function<DbAndTable, String>.
//It is safe to throw RuntimeException otherwise
protected HiveDatasetFinder(FileSystem fs, Properties properties, HiveMetastoreClientPool clientPool,
EventSubmitter eventSubmitter, ConfigClient configClient) throws IOException {
this.properties = properties;
this.clientPool = clientPool;
this.fs = fs;
String whitelistKey = HIVE_DATASET_PREFIX + "." + WhitelistBlacklist.WHITELIST;
Preconditions.checkArgument(properties.containsKey(DB_KEY) || properties.containsKey(whitelistKey),
String.format("Must specify %s or %s.", DB_KEY, whitelistKey));
Config config = ConfigFactory.parseProperties(properties);
if (properties.containsKey(DB_KEY)) {
this.whitelistBlacklist = new WhitelistBlacklist(this.properties.getProperty(DB_KEY) + "."
+ this.properties.getProperty(TABLE_PATTERN_KEY, DEFAULT_TABLE_PATTERN), "");
} else {
this.whitelistBlacklist = new WhitelistBlacklist(config.getConfig(HIVE_DATASET_PREFIX));
}
this.eventSubmitter = Optional.fromNullable(eventSubmitter);
this.configStoreUri = StringUtils.isNotBlank(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI)) ?
Optional.of(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_URI)) : Optional.<String>absent();
if (!Boolean.valueOf(properties.getProperty(ConfigurationKeys.CONFIG_MANAGEMENT_STORE_ENABLED,
ConfigurationKeys.DEFAULT_CONFIG_MANAGEMENT_STORE_ENABLED))) {
this.configStoreUri = Optional.<String>absent();
}
this.datasetConfigPrefix = properties.getProperty(HIVE_DATASET_CONFIG_PREFIX_KEY, DEFAULT_HIVE_DATASET_CONIFG_PREFIX);
this.configClient = configClient;
try {
this.configStoreDatasetUriBuilder =
properties.containsKey(CONFIG_STORE_DATASET_URI_BUILDER_CLASS) ? (Function<Table, String>) ConstructorUtils
.invokeConstructor(Class.forName(properties.getProperty(CONFIG_STORE_DATASET_URI_BUILDER_CLASS)))
: DEFAULT_CONFIG_STORE_DATASET_URI_BUILDER;
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
| ClassNotFoundException e) {
throw new RuntimeException(e);
}
this.jobConfig = ConfigUtils.propertiesToConfig(properties);
}
protected static HiveMetastoreClientPool createClientPool(Properties properties) throws IOException {
return HiveMetastoreClientPool.get(properties,
Optional.fromNullable(properties.getProperty(HIVE_METASTORE_URI_KEY)));
}
/**
* Get all tables in db with given table pattern.
*/
public Collection<DbAndTable> getTables() throws IOException {
List<DbAndTable> tables = Lists.newArrayList();
try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) {
Iterable<String> databases = Iterables.filter(client.get().getAllDatabases(), new Predicate<String>() {
@Override
public boolean apply(String db) {
return HiveDatasetFinder.this.whitelistBlacklist.acceptDb(db);
}
});
for (final String db : databases) {
Iterable<String> tableNames = Iterables.filter(client.get().getAllTables(db), new Predicate<String>() {
@Override
public boolean apply(String table) {
return HiveDatasetFinder.this.whitelistBlacklist.acceptTable(db, table);
}
});
for (String tableName : tableNames) {
tables.add(new DbAndTable(db, tableName));
}
}
} catch (Exception exc) {
throw new IOException(exc);
}
return tables;
}
@Data
public static class DbAndTable {
private final String db;
private final String table;
@Override
public String toString() {
return String.format("%s.%s", this.db, this.table);
}
}
@Override
public List<HiveDataset> findDatasets() throws IOException {
return Lists.newArrayList(getDatasetsIterator());
}
@Override
public Iterator<HiveDataset> getDatasetsIterator() throws IOException {
return new AbstractIterator<HiveDataset>() {
private Iterator<DbAndTable> tables = getTables().iterator();
@Override
protected HiveDataset computeNext() {
while (this.tables.hasNext()) {
DbAndTable dbAndTable = this.tables.next();
try (AutoReturnableObject<IMetaStoreClient> client = HiveDatasetFinder.this.clientPool.getClient()) {
Table table = client.get().getTable(dbAndTable.getDb(), dbAndTable.getTable());
Config datasetConfig = getDatasetConfig(table);
if (ConfigUtils.getBoolean(datasetConfig, HIVE_DATASET_IS_BLACKLISTED_KEY, DEFAULT_HIVE_DATASET_IS_BLACKLISTED_KEY)) {
continue;
}
if (HiveDatasetFinder.this.eventSubmitter.isPresent()) {
SlaEventSubmitter.builder().datasetUrn(dbAndTable.toString())
.eventSubmitter(HiveDatasetFinder.this.eventSubmitter.get()).eventName(DATASET_FOUND).build().submit();
}
return createHiveDataset(table, datasetConfig);
} catch (Throwable t) {
log.error(String.format("Failed to create HiveDataset for table %s.%s", dbAndTable.getDb(), dbAndTable.getTable()), t);
if (HiveDatasetFinder.this.eventSubmitter.isPresent()) {
SlaEventSubmitter.builder().datasetUrn(dbAndTable.toString())
.eventSubmitter(HiveDatasetFinder.this.eventSubmitter.get()).eventName(DATASET_ERROR)
.additionalMetadata(FAILURE_CONTEXT, t.toString()).build().submit();
}
}
}
return endOfData();
}
};
}
/**
* @deprecated Use {@link #createHiveDataset(Table, Config)} instead
*/
@Deprecated
protected HiveDataset createHiveDataset(Table table) throws IOException {
return createHiveDataset(table, ConfigFactory.empty());
}
protected HiveDataset createHiveDataset(Table table, Config datasetConfig) throws IOException {
return new HiveDataset(this.fs, this.clientPool, new org.apache.hadoop.hive.ql.metadata.Table(table), this.properties, datasetConfig);
}
@Override
public Path commonDatasetRoot() {
return new Path("/");
}
/**
* Gets the {@link Config} for this <code>dbAndTable</code>.
* Cases:
* <ul>
* <li>If {@link #configStoreUri} is available it gets the dataset config from the config store at this uri
* <li>If {@link #configStoreUri} is not available it uses the job config as dataset config
* <li>If {@link #datasetConfigPrefix} is specified, only configs with this prefix is returned
* <li>If {@link #datasetConfigPrefix} is not specified, all configs are returned
* </ul>
* @param table of the dataset to get config
* @return the {@link Config} for <code>dbAndTable</code>
*/
private Config getDatasetConfig(Table table) throws ConfigStoreFactoryDoesNotExistsException,
ConfigStoreCreationException, URISyntaxException {
Config datasetConfig;
Optional<Config> runtimeConfig = ConfigClientUtils.getOptionalRuntimeConfig(properties);
// Config store enabled
if (this.configStoreUri.isPresent()) {
if (runtimeConfig.isPresent()) {
datasetConfig = this.configClient.getConfig(
this.configStoreUri.get() + Path.SEPARATOR + this.configStoreDatasetUriBuilder.apply(table),
runtimeConfig);
} else {
datasetConfig = this.configClient.getConfig(
this.configStoreUri.get() + Path.SEPARATOR + this.configStoreDatasetUriBuilder.apply(table));
}
// If config store is not enabled use job config
} else {
datasetConfig = this.jobConfig;
}
return StringUtils.isBlank(this.datasetConfigPrefix) ? datasetConfig : ConfigUtils.getConfig(datasetConfig,
this.datasetConfigPrefix, ConfigFactory.empty());
}
private static final Function<Table, String> DEFAULT_CONFIG_STORE_DATASET_URI_BUILDER =
new Function<Table, String>() {
@Override
public String apply(@Nonnull Table table) {
return HiveConfigClientUtils.getDatasetUri(table);
}
};
}
| |
/*
* Copyright 2015 Marc Prengemann
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the License.
*/
package de.mprengemann.intellij.plugin.androidicons.forms;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.openapi.vfs.VirtualFile;
import de.mprengemann.intellij.plugin.androidicons.images.IconPack;
import de.mprengemann.intellij.plugin.androidicons.images.ImageInformation;
import de.mprengemann.intellij.plugin.androidicons.images.ImageUtils;
import de.mprengemann.intellij.plugin.androidicons.images.RefactoringTask;
import de.mprengemann.intellij.plugin.androidicons.images.Resolution;
import de.mprengemann.intellij.plugin.androidicons.settings.SettingsHelper;
import de.mprengemann.intellij.plugin.androidicons.util.AndroidResourcesHelper;
import de.mprengemann.intellij.plugin.androidicons.util.ExportNameUtils;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.io.File;
import java.io.FilenameFilter;
import java.util.Arrays;
import java.util.Comparator;
public class AndroidIconsImporter extends DialogWrapper {
private VirtualFile assetRoot;
private Project project;
private JLabel imageContainer;
private TextFieldWithBrowseButton resRoot;
private JComboBox assetSpinner;
private JComboBox colorSpinner;
private JTextField resExportName;
private JCheckBox LDPICheckBox;
private JCheckBox MDPICheckBox;
private JCheckBox HDPICheckBox;
private JCheckBox XHDPICheckBox;
private JCheckBox XXHDPICheckBox;
private JPanel container;
private String assetColor;
private String assetName;
private boolean exportNameChanged = false;
public AndroidIconsImporter(@Nullable final Project project, Module module) {
super(project, true);
this.project = project;
setTitle("Android Icons Importer");
setResizable(false);
AndroidResourcesHelper.initResourceBrowser(project, module, "Select res root", this.resRoot);
assetRoot = SettingsHelper.getAssetPath(IconPack.ANDROID_ICONS);
colorSpinner.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
assetColor = (String) colorSpinner.getSelectedItem();
updateImage();
}
});
AssetSpinnerRenderer renderer = new AssetSpinnerRenderer();
//noinspection GtkPreferredJComboBoxRenderer
assetSpinner.setRenderer(renderer);
assetSpinner.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
assetName = (String) assetSpinner.getSelectedItem();
updateImage();
}
});
fillComboBoxes();
resExportName.addKeyListener(new KeyAdapter() {
@Override
public void keyTyped(KeyEvent keyEvent) {
super.keyTyped(keyEvent);
if (!exportNameChanged && keyEvent != null && keyEvent.getKeyCode() > -1) {
exportNameChanged = true;
}
}
@Override
public void keyPressed(KeyEvent keyEvent) {
super.keyPressed(keyEvent);
}
@Override
public void keyReleased(KeyEvent keyEvent) {
super.keyReleased(keyEvent);
}
});
imageContainer.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
super.componentResized(e);
updateImage();
}
});
init();
}
private void updateImage() {
if (imageContainer == null) {
return;
}
String path = "/" + assetColor.replace(" ", "_") + "/xxhdpi/ic_action_" + assetName + ".png";
File imageFile = new File(assetRoot.getCanonicalPath() + path);
ImageUtils.updateImage(imageContainer, imageFile);
if (!exportNameChanged) {
resExportName.setText("ic_action_" + assetName);
}
}
private void fillComboBoxes() {
if (this.assetRoot.getCanonicalPath() == null) {
return;
}
File assetRoot = new File(this.assetRoot.getCanonicalPath());
final FilenameFilter systemFileNameFiler = new FilenameFilter() {
@Override
public boolean accept(File file, String s) {
return !s.startsWith(".");
}
};
File[] colorDirs = assetRoot.listFiles(systemFileNameFiler);
Comparator<File> alphabeticalComparator = new Comparator<File>() {
@Override
public int compare(File file1, File file2) {
if (file1 != null && file2 != null) {
return file1.getName().compareTo(file2.getName());
}
return 0;
}
};
Arrays.sort(colorDirs, alphabeticalComparator);
for (File file : colorDirs) {
if (!file.isDirectory()) {
continue;
}
colorSpinner.addItem(file.getName().replace("_", " "));
}
if (colorDirs.length < 1) {
return;
}
File exColorDir = colorDirs[0];
File[] densities = exColorDir.listFiles(systemFileNameFiler);
if (densities == null || densities.length < 1) {
return;
}
File exDensity = densities[0];
File[] assets = exDensity.listFiles(systemFileNameFiler);
if (assets == null || assets.length < 1) {
return;
}
Arrays.sort(assets, alphabeticalComparator);
for (File asset : assets) {
if (asset.isDirectory()) {
continue;
}
String extension = asset.getName().substring(asset.getName().lastIndexOf(".") + 1);
if (!extension.equalsIgnoreCase("png")) {
continue;
}
assetSpinner.addItem(ExportNameUtils.getExportNameFromFilename(asset.getName()).replace("ic_action_", ""));
}
assetColor = (String) colorSpinner.getSelectedItem();
assetName = (String) assetSpinner.getSelectedItem();
}
@Override
protected void doOKAction() {
importIcons();
super.doOKAction();
}
private void importIcons() {
RefactoringTask task = new RefactoringTask(project);
ImageInformation baseInformation = ImageInformation.newBuilder()
.setExportName(resExportName.getText())
.setExportPath(resRoot.getText())
.build(project);
task.addImage(getImageInformation(baseInformation, Resolution.LDPI, LDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.MDPI, MDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.HDPI, HDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.XHDPI, XHDPICheckBox));
task.addImage(getImageInformation(baseInformation, Resolution.XXHDPI, XXHDPICheckBox));
ProgressManager.getInstance().run(task);
}
private ImageInformation getImageInformation(ImageInformation baseInformation,
Resolution resolution,
JCheckBox checkBox) {
if (!checkBox.isSelected()) {
return null;
}
String fromName = "ic_action_" + assetName + ".png";
File source = new File(assetRoot.getCanonicalPath() + "/" + assetColor.replace(" ", "_") + "/" + resolution.toString() + "/" + fromName);
return ImageInformation.newBuilder(baseInformation)
.setImageFile(source)
.setResolution(resolution)
.build(project);
}
@Nullable
@Override
protected ValidationInfo doValidate() {
if (StringUtils.isEmpty(resRoot.getText().trim())) {
return new ValidationInfo("Please select the resources root.", resRoot);
}
if (StringUtils.isEmpty(resExportName.getText().trim())) {
return new ValidationInfo("Please select a name for the drawable.", resExportName);
} else if (!resExportName.getText().matches("[a-z0-9_.]*")) {
return new ValidationInfo(
"Please select a valid name for the drawable. There are just \"[a-z0-9_.]\" allowed.",
resExportName);
}
return null;
}
@Nullable
@Override
protected JComponent createCenterPanel() {
return container;
}
private class AssetSpinnerRenderer extends DefaultListCellRenderer {
@Override
public Component getListCellRendererComponent(JList jList, Object o, int i, boolean b, boolean b2) {
JLabel label = (JLabel) super.getListCellRendererComponent(jList, o, i, b, b2);
if (label != null) {
String item = (String) assetSpinner.getItemAt(i);
String path = "/black/mdpi/ic_action_" + item + ".png";
File imageFile = new File(assetRoot.getCanonicalPath() + path);
if (imageFile.exists()) {
label.setIcon(new ImageIcon(imageFile.getAbsolutePath()));
}
}
return label;
}
}
}
| |
/***
* Excerpted from "Language Implementation Patterns",
* published by The Pragmatic Bookshelf.
* Copyrights apply to this code. It may not be used to create training material,
* courses, books, articles, and the like. Contact us if you are in doubt.
* We make no guarantees that this code is fit for any purpose.
* Visit http://www.pragmaticprogrammer.com/titles/tpdsl for more book information.
***/
// $ANTLR 3.1.4-SNAPSHOT Apr 22, 2009 17:39:25 Reduce.g 2009-05-13 17:21:46
import org.antlr.runtime.*;
import org.antlr.runtime.tree.*;import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
public class Reduce extends TreeRewriter {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "MULT", "SHIFT", "VEC", "ID", "INT", "WS", "'='", "'print'", "'+'", "'.'", "'['", "','", "']'", "'('", "')'"
};
public static final int VEC=6;
public static final int WS=9;
public static final int T__16=16;
public static final int T__15=15;
public static final int T__18=18;
public static final int T__17=17;
public static final int T__12=12;
public static final int T__11=11;
public static final int T__14=14;
public static final int T__13=13;
public static final int T__10=10;
public static final int SHIFT=5;
public static final int INT=8;
public static final int MULT=4;
public static final int ID=7;
public static final int EOF=-1;
// delegates
// delegators
public Reduce(TreeNodeStream input) {
this(input, new RecognizerSharedState());
}
public Reduce(TreeNodeStream input, RecognizerSharedState state) {
super(input, state);
}
protected TreeAdaptor adaptor = new CommonTreeAdaptor();
public void setTreeAdaptor(TreeAdaptor adaptor) {
this.adaptor = adaptor;
}
public TreeAdaptor getTreeAdaptor() {
return adaptor;
}
public String[] getTokenNames() { return Reduce.tokenNames; }
public String getGrammarFileName() { return "Reduce.g"; }
public static class bottomup_return extends TreeRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "bottomup"
// Reduce.g:11:1: bottomup : ( xPlusx | multBy2 | combineShifts );
public final Reduce.bottomup_return bottomup() throws RecognitionException {
Reduce.bottomup_return retval = new Reduce.bottomup_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
CommonTree _first_0 = null;
CommonTree _last = null;
Reduce.xPlusx_return xPlusx1 = null;
Reduce.multBy2_return multBy22 = null;
Reduce.combineShifts_return combineShifts3 = null;
try {
// Reduce.g:12:5: ( xPlusx | multBy2 | combineShifts )
int alt1=3;
switch ( input.LA(1) ) {
case 12:
{
alt1=1;
}
break;
case MULT:
{
alt1=2;
}
break;
case SHIFT:
{
alt1=3;
}
break;
default:
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 1, 0, input);
throw nvae;
}
switch (alt1) {
case 1 :
// Reduce.g:12:8: xPlusx
{
_last = (CommonTree)input.LT(1);
pushFollow(FOLLOW_xPlusx_in_bottomup109);
xPlusx1=xPlusx();
state._fsp--;
if (state.failed) return retval;
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = xPlusx1.tree;
if ( state.backtracking==1 ) {
retval.tree = (CommonTree)_first_0;
if ( adaptor.getParent(retval.tree)!=null && adaptor.isNil( adaptor.getParent(retval.tree) ) )
retval.tree = (CommonTree)adaptor.getParent(retval.tree);}
}
break;
case 2 :
// Reduce.g:13:8: multBy2
{
_last = (CommonTree)input.LT(1);
pushFollow(FOLLOW_multBy2_in_bottomup118);
multBy22=multBy2();
state._fsp--;
if (state.failed) return retval;
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = multBy22.tree;
if ( state.backtracking==1 ) {
retval.tree = (CommonTree)_first_0;
if ( adaptor.getParent(retval.tree)!=null && adaptor.isNil( adaptor.getParent(retval.tree) ) )
retval.tree = (CommonTree)adaptor.getParent(retval.tree);}
}
break;
case 3 :
// Reduce.g:14:8: combineShifts
{
_last = (CommonTree)input.LT(1);
pushFollow(FOLLOW_combineShifts_in_bottomup127);
combineShifts3=combineShifts();
state._fsp--;
if (state.failed) return retval;
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = combineShifts3.tree;
if ( state.backtracking==1 ) {
retval.tree = (CommonTree)_first_0;
if ( adaptor.getParent(retval.tree)!=null && adaptor.isNil( adaptor.getParent(retval.tree) ) )
retval.tree = (CommonTree)adaptor.getParent(retval.tree);}
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return retval;
}
// $ANTLR end "bottomup"
public static class xPlusx_return extends TreeRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "xPlusx"
// Reduce.g:20:1: xPlusx : ^( '+' i= INT j= INT {...}?) -> ^( MULT[\"*\"] INT[\"2\"] $j) ;
public final Reduce.xPlusx_return xPlusx() throws RecognitionException {
Reduce.xPlusx_return retval = new Reduce.xPlusx_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
CommonTree _first_0 = null;
CommonTree _last = null;
CommonTree i=null;
CommonTree j=null;
CommonTree char_literal4=null;
CommonTree i_tree=null;
CommonTree j_tree=null;
CommonTree char_literal4_tree=null;
RewriteRuleNodeStream stream_INT=new RewriteRuleNodeStream(adaptor,"token INT");
RewriteRuleNodeStream stream_12=new RewriteRuleNodeStream(adaptor,"token 12");
try {
// Reduce.g:20:8: ( ^( '+' i= INT j= INT {...}?) -> ^( MULT[\"*\"] INT[\"2\"] $j) )
// Reduce.g:20:10: ^( '+' i= INT j= INT {...}?)
{
_last = (CommonTree)input.LT(1);
{
CommonTree _save_last_1 = _last;
CommonTree _first_1 = null;
_last = (CommonTree)input.LT(1);
char_literal4=(CommonTree)match(input,12,FOLLOW_12_in_xPlusx144); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_12.add(char_literal4);
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = char_literal4;
match(input, Token.DOWN, null); if (state.failed) return retval;
_last = (CommonTree)input.LT(1);
i=(CommonTree)match(input,INT,FOLLOW_INT_in_xPlusx148); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_INT.add(i);
_last = (CommonTree)input.LT(1);
j=(CommonTree)match(input,INT,FOLLOW_INT_in_xPlusx152); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_INT.add(j);
if ( !(((i!=null?Integer.valueOf(i.getText()):0)==(j!=null?Integer.valueOf(j.getText()):0))) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "xPlusx", "$i.int==$j.int");
}
match(input, Token.UP, null); if (state.failed) return retval;_last = _save_last_1;
}
// AST REWRITE
// elements: j, INT
// token labels: j
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
if ( state.backtracking==1 ) {
retval.tree = root_0;
RewriteRuleNodeStream stream_j=new RewriteRuleNodeStream(adaptor,"token j",j);
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 20:47: -> ^( MULT[\"*\"] INT[\"2\"] $j)
{
// Reduce.g:20:50: ^( MULT[\"*\"] INT[\"2\"] $j)
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot((CommonTree)adaptor.create(MULT, "*"), root_1);
adaptor.addChild(root_1, (CommonTree)adaptor.create(INT, "2"));
adaptor.addChild(root_1, stream_j.nextNode());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
input.replaceChildren(adaptor.getParent(retval.start),
adaptor.getChildIndex(retval.start),
adaptor.getChildIndex(_last),
retval.tree);}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return retval;
}
// $ANTLR end "xPlusx"
public static class multBy2_return extends TreeRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "multBy2"
// Reduce.g:25:1: multBy2 : ( ^( '*' x= INT {...}?y= . ) -> ^( SHIFT[\"<<\"] $y INT[\"1\"] ) | ^( '*' a= . b= INT {...}?) -> ^( SHIFT[\"<<\"] $a INT[\"1\"] ) );
public final Reduce.multBy2_return multBy2() throws RecognitionException {
Reduce.multBy2_return retval = new Reduce.multBy2_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
CommonTree _first_0 = null;
CommonTree _last = null;
CommonTree x=null;
CommonTree b=null;
CommonTree char_literal5=null;
CommonTree char_literal6=null;
CommonTree y=null;
CommonTree a=null;
CommonTree x_tree=null;
CommonTree b_tree=null;
CommonTree char_literal5_tree=null;
CommonTree char_literal6_tree=null;
CommonTree y_tree=null;
CommonTree a_tree=null;
RewriteRuleNodeStream stream_INT=new RewriteRuleNodeStream(adaptor,"token INT");
RewriteRuleNodeStream stream_MULT=new RewriteRuleNodeStream(adaptor,"token MULT");
try {
// Reduce.g:26:5: ( ^( '*' x= INT {...}?y= . ) -> ^( SHIFT[\"<<\"] $y INT[\"1\"] ) | ^( '*' a= . b= INT {...}?) -> ^( SHIFT[\"<<\"] $a INT[\"1\"] ) )
int alt2=2;
int LA2_0 = input.LA(1);
if ( (LA2_0==MULT) ) {
int LA2_1 = input.LA(2);
if ( (LA2_1==DOWN) ) {
int LA2_2 = input.LA(3);
if ( (LA2_2==INT) ) {
switch ( input.LA(4) ) {
case INT:
{
int LA2_5 = input.LA(5);
if ( (LA2_5==DOWN) ) {
alt2=1;
}
else if ( (LA2_5==UP) ) {
int LA2_7 = input.LA(6);
if ( (synpred3_Reduce()) ) {
alt2=1;
}
else if ( (true) ) {
alt2=2;
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 2, 7, input);
throw nvae;
}
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 2, 5, input);
throw nvae;
}
}
break;
case MULT:
case SHIFT:
case VEC:
case ID:
case WS:
case 10:
case 11:
case 12:
case 13:
case 14:
case 15:
case 16:
case 17:
case 18:
{
alt2=1;
}
break;
case DOWN:
{
alt2=2;
}
break;
default:
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 2, 3, input);
throw nvae;
}
}
else if ( ((LA2_2>=MULT && LA2_2<=ID)||(LA2_2>=WS && LA2_2<=18)) ) {
alt2=2;
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 2, 2, input);
throw nvae;
}
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 2, 1, input);
throw nvae;
}
}
else {
if (state.backtracking>0) {state.failed=true; return retval;}
NoViableAltException nvae =
new NoViableAltException("", 2, 0, input);
throw nvae;
}
switch (alt2) {
case 1 :
// Reduce.g:26:9: ^( '*' x= INT {...}?y= . )
{
_last = (CommonTree)input.LT(1);
{
CommonTree _save_last_1 = _last;
CommonTree _first_1 = null;
_last = (CommonTree)input.LT(1);
char_literal5=(CommonTree)match(input,MULT,FOLLOW_MULT_in_multBy2187); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_MULT.add(char_literal5);
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = char_literal5;
match(input, Token.DOWN, null); if (state.failed) return retval;
_last = (CommonTree)input.LT(1);
x=(CommonTree)match(input,INT,FOLLOW_INT_in_multBy2191); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_INT.add(x);
if ( !(((x!=null?Integer.valueOf(x.getText()):0)==2)) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "multBy2", "$x.int==2");
}
_last = (CommonTree)input.LT(1);
y=(CommonTree)input.LT(1);
matchAny(input); if (state.failed) return retval;
if ( state.backtracking==1 )
if ( _first_1==null ) _first_1 = y;
match(input, Token.UP, null); if (state.failed) return retval;_last = _save_last_1;
}
// AST REWRITE
// elements: INT, y
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels: y
if ( state.backtracking==1 ) {
retval.tree = root_0;
RewriteRuleSubtreeStream stream_y=new RewriteRuleSubtreeStream(adaptor,"wildcard y",y);
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 26:39: -> ^( SHIFT[\"<<\"] $y INT[\"1\"] )
{
// Reduce.g:26:42: ^( SHIFT[\"<<\"] $y INT[\"1\"] )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot((CommonTree)adaptor.create(SHIFT, "<<"), root_1);
adaptor.addChild(root_1, stream_y.nextTree());
adaptor.addChild(root_1, (CommonTree)adaptor.create(INT, "1"));
adaptor.addChild(root_0, root_1);
}
}
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
input.replaceChildren(adaptor.getParent(retval.start),
adaptor.getChildIndex(retval.start),
adaptor.getChildIndex(_last),
retval.tree);}
}
break;
case 2 :
// Reduce.g:27:9: ^( '*' a= . b= INT {...}?)
{
_last = (CommonTree)input.LT(1);
{
CommonTree _save_last_1 = _last;
CommonTree _first_1 = null;
_last = (CommonTree)input.LT(1);
char_literal6=(CommonTree)match(input,MULT,FOLLOW_MULT_in_multBy2222); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_MULT.add(char_literal6);
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = char_literal6;
match(input, Token.DOWN, null); if (state.failed) return retval;
_last = (CommonTree)input.LT(1);
a=(CommonTree)input.LT(1);
matchAny(input); if (state.failed) return retval;
if ( state.backtracking==1 )
if ( _first_1==null ) _first_1 = a;
_last = (CommonTree)input.LT(1);
b=(CommonTree)match(input,INT,FOLLOW_INT_in_multBy2230); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_INT.add(b);
if ( !(((b!=null?Integer.valueOf(b.getText()):0)==2)) ) {
if (state.backtracking>0) {state.failed=true; return retval;}
throw new FailedPredicateException(input, "multBy2", "$b.int==2");
}
match(input, Token.UP, null); if (state.failed) return retval;_last = _save_last_1;
}
// AST REWRITE
// elements: INT, a
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels: a
if ( state.backtracking==1 ) {
retval.tree = root_0;
RewriteRuleSubtreeStream stream_a=new RewriteRuleSubtreeStream(adaptor,"wildcard a",a);
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 27:39: -> ^( SHIFT[\"<<\"] $a INT[\"1\"] )
{
// Reduce.g:27:42: ^( SHIFT[\"<<\"] $a INT[\"1\"] )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot((CommonTree)adaptor.create(SHIFT, "<<"), root_1);
adaptor.addChild(root_1, stream_a.nextTree());
adaptor.addChild(root_1, (CommonTree)adaptor.create(INT, "1"));
adaptor.addChild(root_0, root_1);
}
}
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
input.replaceChildren(adaptor.getParent(retval.start),
adaptor.getChildIndex(retval.start),
adaptor.getChildIndex(_last),
retval.tree);}
}
break;
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return retval;
}
// $ANTLR end "multBy2"
public static class combineShifts_return extends TreeRuleReturnScope {
CommonTree tree;
public Object getTree() { return tree; }
};
// $ANTLR start "combineShifts"
// Reduce.g:32:1: combineShifts : ^( SHIFT ^( SHIFT e= . n= INT ) m= INT ) -> ^( SHIFT[\"<<\"] $e INT[String.valueOf($n.int+$m.int)] ) ;
public final Reduce.combineShifts_return combineShifts() throws RecognitionException {
Reduce.combineShifts_return retval = new Reduce.combineShifts_return();
retval.start = input.LT(1);
CommonTree root_0 = null;
CommonTree _first_0 = null;
CommonTree _last = null;
CommonTree n=null;
CommonTree m=null;
CommonTree SHIFT7=null;
CommonTree SHIFT8=null;
CommonTree e=null;
CommonTree n_tree=null;
CommonTree m_tree=null;
CommonTree SHIFT7_tree=null;
CommonTree SHIFT8_tree=null;
CommonTree e_tree=null;
RewriteRuleNodeStream stream_SHIFT=new RewriteRuleNodeStream(adaptor,"token SHIFT");
RewriteRuleNodeStream stream_INT=new RewriteRuleNodeStream(adaptor,"token INT");
try {
// Reduce.g:33:5: ( ^( SHIFT ^( SHIFT e= . n= INT ) m= INT ) -> ^( SHIFT[\"<<\"] $e INT[String.valueOf($n.int+$m.int)] ) )
// Reduce.g:33:8: ^( SHIFT ^( SHIFT e= . n= INT ) m= INT )
{
_last = (CommonTree)input.LT(1);
{
CommonTree _save_last_1 = _last;
CommonTree _first_1 = null;
_last = (CommonTree)input.LT(1);
SHIFT7=(CommonTree)match(input,SHIFT,FOLLOW_SHIFT_in_combineShifts268); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_SHIFT.add(SHIFT7);
if ( state.backtracking==1 )
if ( _first_0==null ) _first_0 = SHIFT7;
match(input, Token.DOWN, null); if (state.failed) return retval;
_last = (CommonTree)input.LT(1);
{
CommonTree _save_last_2 = _last;
CommonTree _first_2 = null;
_last = (CommonTree)input.LT(1);
SHIFT8=(CommonTree)match(input,SHIFT,FOLLOW_SHIFT_in_combineShifts271); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_SHIFT.add(SHIFT8);
if ( state.backtracking==1 )
if ( _first_1==null ) _first_1 = SHIFT8;
match(input, Token.DOWN, null); if (state.failed) return retval;
_last = (CommonTree)input.LT(1);
e=(CommonTree)input.LT(1);
matchAny(input); if (state.failed) return retval;
if ( state.backtracking==1 )
if ( _first_2==null ) _first_2 = e;
_last = (CommonTree)input.LT(1);
n=(CommonTree)match(input,INT,FOLLOW_INT_in_combineShifts279); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_INT.add(n);
match(input, Token.UP, null); if (state.failed) return retval;_last = _save_last_2;
}
_last = (CommonTree)input.LT(1);
m=(CommonTree)match(input,INT,FOLLOW_INT_in_combineShifts284); if (state.failed) return retval;
if ( state.backtracking==1 ) stream_INT.add(m);
match(input, Token.UP, null); if (state.failed) return retval;_last = _save_last_1;
}
// AST REWRITE
// elements: SHIFT, INT, e
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels: e
if ( state.backtracking==1 ) {
retval.tree = root_0;
RewriteRuleSubtreeStream stream_e=new RewriteRuleSubtreeStream(adaptor,"wildcard e",e);
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (CommonTree)adaptor.nil();
// 34:8: -> ^( SHIFT[\"<<\"] $e INT[String.valueOf($n.int+$m.int)] )
{
// Reduce.g:34:11: ^( SHIFT[\"<<\"] $e INT[String.valueOf($n.int+$m.int)] )
{
CommonTree root_1 = (CommonTree)adaptor.nil();
root_1 = (CommonTree)adaptor.becomeRoot((CommonTree)adaptor.create(SHIFT, "<<"), root_1);
adaptor.addChild(root_1, stream_e.nextTree());
adaptor.addChild(root_1, (CommonTree)adaptor.create(INT, String.valueOf((n!=null?Integer.valueOf(n.getText()):0)+(m!=null?Integer.valueOf(m.getText()):0))));
adaptor.addChild(root_0, root_1);
}
}
retval.tree = (CommonTree)adaptor.rulePostProcessing(root_0);
input.replaceChildren(adaptor.getParent(retval.start),
adaptor.getChildIndex(retval.start),
adaptor.getChildIndex(_last),
retval.tree);}
}
}
catch (RecognitionException re) {
reportError(re);
recover(input,re);
}
finally {
}
return retval;
}
// $ANTLR end "combineShifts"
// $ANTLR start synpred3_Reduce
public final void synpred3_Reduce_fragment() throws RecognitionException {
CommonTree x=null;
CommonTree y=null;
// Reduce.g:26:9: ( ^( '*' x= INT {...}?y= . ) )
// Reduce.g:26:9: ^( '*' x= INT {...}?y= . )
{
match(input,MULT,FOLLOW_MULT_in_synpred3_Reduce187); if (state.failed) return ;
match(input, Token.DOWN, null); if (state.failed) return ;
x=(CommonTree)match(input,INT,FOLLOW_INT_in_synpred3_Reduce191); if (state.failed) return ;
if ( !(((x!=null?Integer.valueOf(x.getText()):0)==2)) ) {
if (state.backtracking>0) {state.failed=true; return ;}
throw new FailedPredicateException(input, "synpred3_Reduce", "$x.int==2");
}
y=(CommonTree)input.LT(1);
matchAny(input); if (state.failed) return ;
match(input, Token.UP, null); if (state.failed) return ;
}
}
// $ANTLR end synpred3_Reduce
// Delegated rules
public final boolean synpred3_Reduce() {
state.backtracking++;
int start = input.mark();
try {
synpred3_Reduce_fragment(); // can never throw exception
} catch (RecognitionException re) {
System.err.println("impossible: "+re);
}
boolean success = !state.failed;
input.rewind(start);
state.backtracking--;
state.failed=false;
return success;
}
public static final BitSet FOLLOW_xPlusx_in_bottomup109 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_multBy2_in_bottomup118 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_combineShifts_in_bottomup127 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_12_in_xPlusx144 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_INT_in_xPlusx148 = new BitSet(new long[]{0x0000000000000100L});
public static final BitSet FOLLOW_INT_in_xPlusx152 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_MULT_in_multBy2187 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_INT_in_multBy2191 = new BitSet(new long[]{0x000000000007FFF0L});
public static final BitSet FOLLOW_MULT_in_multBy2222 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_INT_in_multBy2230 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_SHIFT_in_combineShifts268 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_SHIFT_in_combineShifts271 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_INT_in_combineShifts279 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_INT_in_combineShifts284 = new BitSet(new long[]{0x0000000000000008L});
public static final BitSet FOLLOW_MULT_in_synpred3_Reduce187 = new BitSet(new long[]{0x0000000000000004L});
public static final BitSet FOLLOW_INT_in_synpred3_Reduce191 = new BitSet(new long[]{0x000000000007FFF0L});
}
| |
/*
* Copyright (c) 2012 Jan Kotek
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mxixm.mapdb;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
/**
* Binding is simple yet powerful way to keep secondary collection synchronized with primary collection.
* Primary collection provides notification on updates and secondary collection is modified accordingly.
* This way MapDB provides secondary indexes, values and keys. It also supports less usual scenarios such
* as histograms, inverse lookup index (on maps), group counters and so on.
*
* There are two things to keep on mind when using binding:
*
* * Binding is not persistent, so it needs to be restored every time store is reopened.
* If you modify primary collection before binding is restored, secondary collection does not get updated and becomes
* inconsistent.
*
* * If secondary collection is empty, binding will recreate its content based on primary collection.
* If there is even single item on secondary collection, binding assumes it is consistent and leaves it as its.
*
* Any thread-safe collection can be used as secondary (not just collections provided by MapDB).
* This gives great flexibility for modeling
* and scaling your data. For example primary data can be stored in durable DB with transactions and large secondary
* indexes may be stored in other faster non-durable DB. Or primary collection may be stored on disk and smaller
* secondary index (such as category counters) can be stored in memory for faster lookups. Also you may use
* ordinary `java.util.*` collections (if they are thread safe) to get additional speed.
*
* There are many [code examples](https://github.com/jankotek/MapDB/tree/master/src/test/java/examples)
* how Collection Binding can be used.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
*
* @author Jan Kotek
*/
public final class Bind {
private Bind(){}
/**
* Listener called when `Map` is modified.
* @param <K> key type in map
* @param <V> value type in map
*/
public interface MapListener<K,V>{
/**
* Callback method called after `Map` was modified.
* It is called on insert, update or delete.
*
* MapDB collections do not support null keys or values.
* Null parameter may be than used to indicate operation:
*
*
*
* @param key key in map
* @param oldVal old value in map (if any, null on inserts)
* @param newVal new value in map (if any, null on deletes)
*/
void update(K key, V oldVal, V newVal);
}
/**
* Primary Maps must provide notifications when it is modified.
* So Primary Maps must implement this interface to allow registering callback listeners.
*
* @param <K> key type in map
* @param <V> value type in map
*/
public interface MapWithModificationListener<K,V> extends Map<K,V> {
/**
* Add new modification listener notified when Map has been updated
* @param listener callback interface notified when map changes
*/
public void modificationListenerAdd(MapListener<K, V> listener);
/**
* Remove registered notification listener
*
* @param listener callback interface notified when map changes
*/
public void modificationListenerRemove(MapListener<K, V> listener);
/**
*
* @return size of map, but in 64bit long which does not overflow at 2e9 items.
*/
public long sizeLong();
}
/**
* Binds {@link Atomic.Long} to Primary Map so the Atomic.Long contains size of Map.
* `Atomic.Long` is incremented on each insert and decremented on each entry removal.
* MapDB collections usually do not keep their size, but require complete traversal to count items.
*
* If `Atomic.Long` has zero value, it will be updated with value from `map.size()` and than
* bind to map.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
*
* NOTE: {@link BTreeMap} and {@link HTreeMap} already supports this directly as optional parameter named `counter`.
* In that case all calls to `Map.size()` are forwarded to underlying counter. Check parameters at
* {@link DB#createHashMap(String)} and
* {@link DB#createTreeMap(String)}
*
*
* @param map primary map whose size needs to be tracked
* @param sizeCounter number updated when Map Entry is added or removed.
*/
public static <K,V> void size(MapWithModificationListener<K,V> map, final Atomic.Long sizeCounter){
//set initial value first if necessary
if(sizeCounter.get() == 0){
long size = map.sizeLong();
if(sizeCounter.get()!=size)
sizeCounter.set(size);
}
map.modificationListenerAdd(new MapListener<K, V>() {
@Override
public void update(K key, V oldVal, V newVal) {
if (oldVal == null && newVal != null) {
sizeCounter.incrementAndGet();
} else if (oldVal != null && newVal == null) {
sizeCounter.decrementAndGet();
}
//update does not change collection size
}
});
}
/**
* Binds Secondary Map so that it contains Key from Primary Map and custom Value.
* Secondary Value is updated every time Primary Map is modified.
*
* If Secondary Map is empty its content will be recreated from Primary Map.
* This binding is not persistent. You need to restore it every time store is reopened.
*
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
* Type params:
*
* * `<K>` - key type in primary and Secondary Map
* * `<V>` - value type in Primary Map
* * `<V2>` - value type in Secondary Map
* .
* @param map Primary Map
* @param secondary Secondary Map with custom
* @param fun function which calculates secondary value from primary key and value
*/
public static <K,V, V2> void secondaryValue(MapWithModificationListener<K, V> map,
final Map<K, V2> secondary,
final Fun.Function2<V2, K, V> fun){
//fill if empty
if(secondary.isEmpty()){
for(Map.Entry<K,V> e:map.entrySet())
secondary.put(e.getKey(), fun.run(e.getKey(),e.getValue()));
}
//hook listener
map.modificationListenerAdd(new MapListener<K, V>() {
@Override
public void update(K key, V oldVal, V newVal) {
if (newVal == null) {
//removal
secondary.remove(key);
} else {
secondary.put(key, fun.run(key, newVal));
}
}
});
}
/**
* Binds Secondary Map so that it contains Key from Primary Map and custom Value.
* Secondary Value is updated every time Primary Map is modified.
*
* If Secondary Map is empty its content will be recreated from Primary Map.
* This binding is not persistent. You need to restore it every time store is reopened.
*
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
* Type params:
*
* * `<K>` - key type in primary and Secondary Map
* * `<V>` - value type in Primary Map
* * `<V2>` - value type in Secondary Map
* .
* @param map Primary Map
* @param secondary Secondary Map with custom
* @param fun function which calculates secondary values from primary key and value
*/
public static <K,V, V2> void secondaryValues(MapWithModificationListener<K, V> map,
final Set<Fun.Tuple2<K, V2>> secondary,
final Fun.Function2<V2[], K, V> fun){
//fill if empty
if(secondary.isEmpty()){
for(Map.Entry<K,V> e:map.entrySet()){
V2[] v = fun.run(e.getKey(),e.getValue());
if(v!=null)
for(V2 v2:v)
secondary.add(Fun.t2(e.getKey(), v2));
}
}
//hook listener
map.modificationListenerAdd(new MapListener<K, V>() {
@Override
public void update(K key, V oldVal, V newVal) {
if (newVal == null) {
//removal
V2[] v = fun.run(key, oldVal);
if (v != null)
for (V2 v2 : v)
secondary.remove(Fun.t2(key, v2));
} else if (oldVal == null) {
//insert
V2[] v = fun.run(key, newVal);
if (v != null)
for (V2 v2 : v)
secondary.add(Fun.t2(key, v2));
} else {
//update, must remove old key and insert new
V2[] oldv = fun.run(key, oldVal);
V2[] newv = fun.run(key, newVal);
if (oldv == null) {
//insert new
if (newv != null)
for (V2 v : newv)
secondary.add(Fun.t2(key, v));
return;
}
if (newv == null) {
//remove old
for (V2 v : oldv)
secondary.remove(Fun.t2(key, v));
return;
}
Set<V2> hashes = new HashSet<V2>();
Collections.addAll(hashes, oldv);
//add new non existing items
for (V2 v : newv) {
if (!hashes.contains(v)) {
secondary.add(Fun.t2(key, v));
}
}
//remove items which are in old, but not in new
for (V2 v : newv) {
hashes.remove(v);
}
for (V2 v : hashes) {
secondary.remove(Fun.t2(key, v));
}
}
}
});
}
/**
* Binds Secondary Set so it contains Secondary Key (Index). Usefull if you need
* to lookup Keys from Primary Map by custom criteria. Other use is for reverse lookup
*
* To lookup keys in Secondary Set use {@link Fun#filter(java.util.NavigableSet, Object)}
*
* If Secondary Set is empty its content will be recreated from Primary Map.
* This binding is not persistent. You need to restore it every time store is reopened.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
* Type params:
*
* * `<K>` - Key in Primary Map
* * `<V>` - Value in Primary Map
* * `<K2>` - Secondary
*
* @param map primary map
* @param secondary secondary set
* @param fun function which calculates Secondary Key from Primary Key and Value
*/
public static <K,V, K2> void secondaryKey(MapWithModificationListener<K, V> map,
final Set<Fun.Tuple2<K2, K>> secondary,
final Fun.Function2<K2, K, V> fun){
//fill if empty
if(secondary.isEmpty()){
for(Map.Entry<K,V> e:map.entrySet()){
secondary.add(Fun.t2(fun.run(e.getKey(),e.getValue()), e.getKey()));
}
}
//hook listener
map.modificationListenerAdd(new MapListener<K, V>() {
@Override
public void update(K key, V oldVal, V newVal) {
if (newVal == null) {
//removal
secondary.remove(Fun.t2(fun.run(key, oldVal), key));
} else if (oldVal == null) {
//insert
secondary.add(Fun.t2(fun.run(key, newVal), key));
} else {
//update, must remove old key and insert new
K2 oldKey = fun.run(key, oldVal);
K2 newKey = fun.run(key, newVal);
if (oldKey == newKey || oldKey.equals(newKey)) return;
secondary.remove(Fun.t2(oldKey, key));
secondary.add(Fun.t2(newKey, key));
}
}
});
}
/**
* Binds Secondary Set so it contains Secondary Key (Index). Usefull if you need
* to lookup Keys from Primary Map by custom criteria. Other use is for reverse lookup
*
* If Secondary Set is empty its content will be recreated from Primary Map.
* This binding is not persistent. You need to restore it every time store is reopened.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
* Type params:
*
* * `<K>` - Key in Primary Map
* * `<V>` - Value in Primary Map
* * `<K2>` - Secondary
*
* @param map primary map
* @param secondary secondary set
* @param fun function which calculates Secondary Key from Primary Key and Value
*/
public static <K,V, K2> void secondaryKey(MapWithModificationListener<K, V> map,
final Map<K2, K> secondary,
final Fun.Function2<K2, K, V> fun){
//fill if empty
if(secondary.isEmpty()){
for(Map.Entry<K,V> e:map.entrySet()){
secondary.put(fun.run(e.getKey(), e.getValue()), e.getKey());
}
}
//hook listener
map.modificationListenerAdd(new MapListener<K, V>() {
@Override
public void update(K key, V oldVal, V newVal) {
if (newVal == null) {
//removal
secondary.remove(fun.run(key, oldVal));
} else if (oldVal == null) {
//insert
secondary.put(fun.run(key, newVal), key);
} else {
//update, must remove old key and insert new
K2 oldKey = fun.run(key, oldVal);
K2 newKey = fun.run(key, newVal);
if (oldKey == newKey || oldKey.equals(newKey)) return;
secondary.remove(oldKey);
secondary.put(newKey, key);
}
}
});
}
/**
* Binds Secondary Set so it contains Secondary Key (Index). Useful if you need
* to lookup Keys from Primary Map by custom criteria. Other use is for reverse lookup
*
* To lookup keys in Secondary Set use {@link Fun#filter(java.util.NavigableSet, Object)}}
*
*
* If Secondary Set is empty its content will be recreated from Primary Map.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
*
* Type params:
*
* * `<K>` - Key in Primary Map
* * `<V>` - Value in Primary Map
* * `<K2>` - Secondary
*
* @param map primary map
* @param secondary secondary set
* @param fun function which calculates Secondary Keys from Primary Key and Value
*/
public static <K,V, K2> void secondaryKeys(MapWithModificationListener<K, V> map,
final Set<Fun.Tuple2<K2, K>> secondary,
final Fun.Function2<K2[], K, V> fun){
//fill if empty
if(secondary.isEmpty()){
for(Map.Entry<K,V> e:map.entrySet()){
K2[] k2 = fun.run(e.getKey(), e.getValue());
if(k2 != null)
for(K2 k22 :k2)
secondary.add(Fun.t2(k22, e.getKey()));
}
}
//hook listener
map.modificationListenerAdd(new MapListener<K, V>() {
@Override
public void update(K key, V oldVal, V newVal) {
if (newVal == null) {
//removal
K2[] k2 = fun.run(key, oldVal);
if (k2 != null)
for (K2 k22 : k2)
secondary.remove(Fun.t2(k22, key));
} else if (oldVal == null) {
//insert
K2[] k2 = fun.run(key, newVal);
if (k2 != null)
for (K2 k22 : k2)
secondary.add(Fun.t2(k22, key));
} else {
//update, must remove old key and insert new
K2[] oldk = fun.run(key, oldVal);
K2[] newk = fun.run(key, newVal);
if (oldk == null) {
//insert new
if (newk != null)
for (K2 k22 : newk)
secondary.add(Fun.t2(k22, key));
return;
}
if (newk == null) {
//remove old
for (K2 k22 : oldk)
secondary.remove(Fun.t2(k22, key));
return;
}
Set<K2> hashes = new HashSet<K2>();
Collections.addAll(hashes, oldk);
//add new non existing items
for (K2 k2 : newk) {
if (!hashes.contains(k2)) {
secondary.add(Fun.t2(k2, key));
}
}
//remove items which are in old, but not in new
for (K2 k2 : newk) {
hashes.remove(k2);
}
for (K2 k2 : hashes) {
secondary.remove(Fun.t2(k2, key));
}
}
}
});
}
/**
* Binds Secondary Set so it contains inverse mapping to Primary Map: Primary Value will become Secondary Key.
* This is useful for creating bi-directional Maps.
*
* To lookup keys in Secondary Set use {@link Fun#filter(java.util.NavigableSet, Object)}
*
* If Secondary Set is empty its content will be recreated from Primary Map.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
* Type params:
*
* * `<K>` - Key in Primary Map and Second Value in Secondary Set
* * `<V>` - Value in Primary Map and Primary Value in Secondary Set
*
* @param primary Primary Map for which inverse mapping will be created
* @param inverse Secondary Set which will contain inverse mapping
*/
public static <K,V> void mapInverse(MapWithModificationListener<K,V> primary,
Set<Fun.Tuple2<V, K>> inverse) {
Bind.secondaryKey(primary,inverse, new Fun.Function2<V, K,V>(){
@Override public V run(K key, V value) {
return value;
}
});
}
/**
* Binds Secondary Set so it contains inverse mapping to Primary Map: Primary Value will become Secondary Key.
* This is useful for creating bi-directional Maps.
*
* In this case some data may be lost, if there are duplicated primary values.
* It is recommended to use multimap: `NavigableSet<Fun.Tuple2<V,K>>` which
* handles value duplicities. Use @{link Bind.mapInverse(MapWithModificationListener<K,V>Set<Fun.Tuple2<V, K>>}
*
* If Secondary Set is empty its content will be recreated from Primary Map.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
* Type params:
*
* * `<K>` - Key in Primary Map and Second Value in Secondary Set
* * `<V>` - Value in Primary Map and Primary Value in Secondary Set
*
* @param primary Primary Map for which inverse mapping will be created
* @param inverse Secondary Set which will contain inverse mapping
*/
public static <K,V> void mapInverse(MapWithModificationListener<K,V> primary,
Map<V, K> inverse) {
Bind.secondaryKey(primary,inverse, new Fun.Function2<V, K,V>(){
@Override public V run(K key, V value) {
return value;
}
});
}
/**
* Binds Secondary Map so it it creates [histogram](http://en.wikipedia.org/wiki/Histogram) from
* data in Primary Map. Histogram keeps count how many items are in each category.
* This method takes function which defines in what category each Primary Map entry is in.
*
*
* If Secondary Map is empty its content will be recreated from Primary Map.
*
* NOTE: Binding just installs Modification Listener on primary collection. Binding itself is not persistent
* and has to be restored after primary collection is loaded. Data contained in secondary collection are persistent.
*
*
* Type params:
*
* * `<K>` - Key type in primary map
* * `<V>` - Value type in primary map
* * `<C>` - Category type
*
* @param primary Primary Map to create histogram for
* @param histogram Secondary Map to create histogram for, key is Category, value is number of items in category
* @param entryToCategory returns Category in which entry from Primary Map belongs to.
*/
public static <K,V,C> void histogram(MapWithModificationListener<K,V> primary, final ConcurrentMap<C,Long> histogram,
final Fun.Function2<C, K, V> entryToCategory){
MapListener<K,V> listener = new MapListener<K, V>() {
@Override public void update(K key, V oldVal, V newVal) {
if(newVal == null){
//removal
C category = entryToCategory.run(key,oldVal);
incrementHistogram(category, -1);
}else if(oldVal==null){
//insert
C category = entryToCategory.run(key,newVal);
incrementHistogram(category, 1);
}else{
//update, must remove old key and insert new
C oldCat = entryToCategory.run(key, oldVal);
C newCat = entryToCategory.run(key, newVal);
if(oldCat == newCat || oldCat.equals(newCat)) return;
incrementHistogram(oldCat,-1);
incrementHistogram(oldCat,1);
}
}
/** atomically update counter in histogram*/
private void incrementHistogram(C category, long i) {
for(;;){
Long oldCount = histogram.get(category);
if(oldCount == null){
//insert new count
if(histogram.putIfAbsent(category,i) == null)
return;
}else{
//increase existing count
Long newCount = oldCount+i;
if(histogram.replace(category,oldCount, newCount))
return;
}
}
}
};
primary.modificationListenerAdd(listener);
if(histogram.isEmpty()){
//recreate content on empty collection
for(Map.Entry<K,V> e:primary.entrySet()){
listener.update(e.getKey(),null,e.getValue());
}
}
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot;
import java.lang.reflect.Constructor;
import java.security.AccessControlException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.groovy.GroovyBeanDefinitionReader;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.BeanNameGenerator;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ApplicationListener;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.AnnotatedBeanDefinitionReader;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.AnnotationConfigUtils;
import org.springframework.context.annotation.ClassPathBeanDefinitionScanner;
import org.springframework.context.support.AbstractApplicationContext;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.core.GenericTypeResolver;
import org.springframework.core.annotation.AnnotationAwareOrderComparator;
import org.springframework.core.env.CommandLinePropertySource;
import org.springframework.core.env.CompositePropertySource;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.SimpleCommandLinePropertySource;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.SpringFactoriesLoader;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ReflectionUtils;
import org.springframework.util.StopWatch;
import org.springframework.util.StringUtils;
import org.springframework.web.context.ConfigurableWebApplicationContext;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.StandardServletEnvironment;
/**
* Classes that can be used to bootstrap and launch a Spring application from a Java main
* method. By default class will perform the following steps to bootstrap your
* application:
*
* <ul>
* <li>Create an appropriate {@link ApplicationContext} instance (depending on your
* classpath)</li>
*
* <li>Register a {@link CommandLinePropertySource} to expose command line arguments as
* Spring properties</li>
*
* <li>Refresh the application context, loading all singleton beans</li>
*
* <li>Trigger any {@link CommandLineRunner} beans</li>
* </ul>
*
* In most circumstances the static {@link #run(Object, String[])} method can be called
* directly from your {@literal main} method to bootstrap your application:
*
* <pre class="code">
* @Configuration
* @EnableAutoConfiguration
* public class MyApplication {
*
* // ... Bean definitions
*
* public static void main(String[] args) throws Exception {
* SpringApplication.run(MyApplication.class, args);
* }
* </pre>
*
* <p>
* For more advanced configuration a {@link SpringApplication} instance can be created and
* customized before being run:
*
* <pre class="code">
* public static void main(String[] args) throws Exception {
* SpringApplication app = new SpringApplication(MyApplication.class);
* // ... customize app settings here
* app.run(args)
* }
* </pre>
*
* {@link SpringApplication}s can read beans from a variety of different sources. It is
* generally recommended that a single {@code @Configuration} class is used to bootstrap
* your application, however, any of the following sources can also be used:
*
* <ul>
* <li>{@link Class} - A Java class to be loaded by {@link AnnotatedBeanDefinitionReader}</li>
*
* <li>{@link Resource} - An XML resource to be loaded by {@link XmlBeanDefinitionReader},
* or a groovy script to be loaded by {@link GroovyBeanDefinitionReader}</li>
*
* <li>{@link Package} - A Java package to be scanned by
* {@link ClassPathBeanDefinitionScanner}</li>
*
* <li>{@link CharSequence} - A class name, resource handle or package name to loaded as
* appropriate. If the {@link CharSequence} cannot be resolved to class and does not
* resolve to a {@link Resource} that exists it will be considered a {@link Package}.</li>
* </ul>
*
* @author Phillip Webb
* @author Dave Syer
* @author Andy Wilkinson
* @author Christian Dupuis
* @author Stephane Nicoll
* @see #run(Object, String[])
* @see #run(Object[], String[])
* @see #SpringApplication(Object...)
*/
public class SpringApplication {
private static final String DEFAULT_CONTEXT_CLASS = "org.springframework.context."
+ "annotation.AnnotationConfigApplicationContext";
public static final String DEFAULT_WEB_CONTEXT_CLASS = "org.springframework."
+ "boot.context.embedded.AnnotationConfigEmbeddedWebApplicationContext";
private static final String[] WEB_ENVIRONMENT_CLASSES = { "javax.servlet.Servlet",
"org.springframework.web.context.ConfigurableWebApplicationContext" };
private static final String SYSTEM_PROPERTY_JAVA_AWT_HEADLESS = "java.awt.headless";
private static final Banner DEFAULT_BANNER = new SpringBootBanner();
private final Log log = LogFactory.getLog(getClass());
private final Set<Object> sources = new LinkedHashSet<Object>();
private Class<?> mainApplicationClass;
private boolean showBanner = true;
private boolean logStartupInfo = true;
private boolean addCommandLineProperties = true;
private Banner banner;
private ResourceLoader resourceLoader;
private BeanNameGenerator beanNameGenerator;
private ConfigurableEnvironment environment;
private Class<? extends ConfigurableApplicationContext> applicationContextClass;
private boolean webEnvironment;
private boolean headless = true;
private boolean registerShutdownHook = true;
private List<ApplicationContextInitializer<?>> initializers;
private List<ApplicationListener<?>> listeners;
private Map<String, Object> defaultProperties;
private Set<String> profiles = new HashSet<String>();
/**
* Create a new {@link SpringApplication} instance. The application context will load
* beans from the specified sources (see {@link SpringApplication class-level}
* documentation for details. The instance can be customized before calling
* {@link #run(String...)}.
* @param sources the bean sources
* @see #run(Object, String[])
* @see #SpringApplication(ResourceLoader, Object...)
*/
public SpringApplication(Object... sources) {
initialize(sources);
}
/**
* Create a new {@link SpringApplication} instance. The application context will load
* beans from the specified sources (see {@link SpringApplication class-level}
* documentation for details. The instance can be customized before calling
* {@link #run(String...)}.
* @param resourceLoader the resource loader to use
* @param sources the bean sources
* @see #run(Object, String[])
* @see #SpringApplication(ResourceLoader, Object...)
*/
public SpringApplication(ResourceLoader resourceLoader, Object... sources) {
this.resourceLoader = resourceLoader;
initialize(sources);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
private void initialize(Object[] sources) {
if (sources != null && sources.length > 0) {
this.sources.addAll(Arrays.asList(sources));
}
this.webEnvironment = deduceWebEnvironment();
setInitializers((Collection) getSpringFactoriesInstances(ApplicationContextInitializer.class));
setListeners((Collection) getSpringFactoriesInstances(ApplicationListener.class));
this.mainApplicationClass = deduceMainApplicationClass();
}
private boolean deduceWebEnvironment() {
for (String className : WEB_ENVIRONMENT_CLASSES) {
if (!ClassUtils.isPresent(className, null)) {
return false;
}
}
return true;
}
private Class<?> deduceMainApplicationClass() {
try {
StackTraceElement[] stackTrace = new RuntimeException().getStackTrace();
for (StackTraceElement stackTraceElement : stackTrace) {
if ("main".equals(stackTraceElement.getMethodName())) {
return Class.forName(stackTraceElement.getClassName());
}
}
}
catch (ClassNotFoundException ex) {
// Swallow and continue
}
return null;
}
/**
* Run the Spring application, creating and refreshing a new
* {@link ApplicationContext}.
* @param args the application arguments (usually passed from a Java main method)
* @return a running {@link ApplicationContext}
*/
public ConfigurableApplicationContext run(String... args) {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
ConfigurableApplicationContext context = null;
configureHeadlessProperty();
SpringApplicationRunListeners listeners = getRunListeners(args);
listeners.started();
try {
context = doRun(listeners, args);
stopWatch.stop();
if (this.logStartupInfo) {
new StartupInfoLogger(this.mainApplicationClass).logStarted(
getApplicationLog(), stopWatch);
}
return context;
}
catch (Throwable ex) {
try {
listeners.finished(context, ex);
this.log.error("Application startup failed", ex);
}
finally {
if (context != null) {
context.close();
}
}
ReflectionUtils.rethrowRuntimeException(ex);
return context;
}
}
/**
* @param listeners
* @param args
* @return
*/
private ConfigurableApplicationContext doRun(SpringApplicationRunListeners listeners,
String... args) {
ConfigurableApplicationContext context;
// Create and configure the environment
ConfigurableEnvironment environment = getOrCreateEnvironment();
configureEnvironment(environment, args);
listeners.environmentPrepared(environment);
if (this.showBanner) {
printBanner(environment);
}
// Create, load, refresh and run the ApplicationContext
context = createApplicationContext();
if (this.registerShutdownHook) {
try {
context.registerShutdownHook();
}
catch (AccessControlException ex) {
// Not allowed in some environments.
}
}
context.setEnvironment(environment);
postProcessApplicationContext(context);
applyInitializers(context);
listeners.contextPrepared(context);
if (this.logStartupInfo) {
logStartupInfo(context.getParent() == null);
}
// Load the sources
Set<Object> sources = getSources();
Assert.notEmpty(sources, "Sources must not be empty");
load(context, sources.toArray(new Object[sources.size()]));
listeners.contextLoaded(context);
// Refresh the context
refresh(context);
afterRefresh(context, args);
listeners.finished(context, null);
return context;
}
private void configureHeadlessProperty() {
System.setProperty(
SYSTEM_PROPERTY_JAVA_AWT_HEADLESS,
System.getProperty(SYSTEM_PROPERTY_JAVA_AWT_HEADLESS,
Boolean.toString(this.headless)));
}
private SpringApplicationRunListeners getRunListeners(String[] args) {
Class<?>[] types = new Class<?>[] { SpringApplication.class, String[].class };
return new SpringApplicationRunListeners(this.log, getSpringFactoriesInstances(
SpringApplicationRunListener.class, types, this, args));
}
private <T> Collection<? extends T> getSpringFactoriesInstances(Class<T> type) {
return getSpringFactoriesInstances(type, new Class<?>[] {});
}
@SuppressWarnings("unchecked")
private <T> Collection<? extends T> getSpringFactoriesInstances(Class<T> type,
Class<?>[] parameterTypes, Object... args) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
// Use names and ensure unique to protect against duplicates
Set<String> names = new LinkedHashSet<String>(
SpringFactoriesLoader.loadFactoryNames(type, classLoader));
List<T> instances = new ArrayList<T>(names.size());
// Create instances from the names
for (String name : names) {
try {
Class<?> instanceClass = ClassUtils.forName(name, classLoader);
Assert.isAssignable(type, instanceClass);
Constructor<?> constructor = instanceClass.getConstructor(parameterTypes);
T instance = (T) constructor.newInstance(args);
instances.add(instance);
}
catch (Throwable ex) {
throw new IllegalArgumentException("Cannot instantiate " + type + " : "
+ name, ex);
}
}
AnnotationAwareOrderComparator.sort(instances);
return instances;
}
private ConfigurableEnvironment getOrCreateEnvironment() {
if (this.environment != null) {
return this.environment;
}
if (this.webEnvironment) {
return new StandardServletEnvironment();
}
return new StandardEnvironment();
}
/**
* Template method delegating to
* {@link #configurePropertySources(ConfigurableEnvironment, String[])} and
* {@link #configureProfiles(ConfigurableEnvironment, String[])} in that order.
* Override this method for complete control over Environment customization, or one of
* the above for fine-grained control over property sources or profiles, respectively.
* @param environment this application's environment
* @param args arguments passed to the {@code run} method
* @see #configureProfiles(ConfigurableEnvironment, String[])
* @see #configurePropertySources(ConfigurableEnvironment, String[])
*/
protected void configureEnvironment(ConfigurableEnvironment environment, String[] args) {
configurePropertySources(environment, args);
configureProfiles(environment, args);
}
/**
* Add, remove or re-order any {@link PropertySource}s in this application's
* environment.
* @param environment this application's environment
* @param args arguments passed to the {@code run} method
* @see #configureEnvironment(ConfigurableEnvironment, String[])
*/
protected void configurePropertySources(ConfigurableEnvironment environment,
String[] args) {
MutablePropertySources sources = environment.getPropertySources();
if (this.defaultProperties != null && !this.defaultProperties.isEmpty()) {
sources.addLast(new MapPropertySource("defaultProperties",
this.defaultProperties));
}
if (this.addCommandLineProperties && args.length > 0) {
String name = CommandLinePropertySource.COMMAND_LINE_PROPERTY_SOURCE_NAME;
if (sources.contains(name)) {
PropertySource<?> source = sources.get(name);
CompositePropertySource composite = new CompositePropertySource(name);
composite.addPropertySource(new SimpleCommandLinePropertySource(name
+ "-" + args.hashCode(), args));
composite.addPropertySource(source);
sources.replace(name, composite);
}
else {
sources.addFirst(new SimpleCommandLinePropertySource(args));
}
}
}
/**
* Configure which profiles are active (or active by default) for this application
* environment. Consider overriding this method to programmatically enforce profile
* rules and semantics, such as ensuring mutual exclusivity of profiles (e.g. 'dev' OR
* 'prod', but never both).
* @param environment this application's environment
* @param args arguments passed to the {@code run} method
* @see #configureEnvironment(ConfigurableEnvironment, String[])
*/
protected void configureProfiles(ConfigurableEnvironment environment, String[] args) {
environment.getActiveProfiles(); // ensure they are initialized
// But these ones should go first (last wins in a property key clash)
Set<String> profiles = new LinkedHashSet<String>(this.profiles);
profiles.addAll(Arrays.asList(environment.getActiveProfiles()));
environment.setActiveProfiles(profiles.toArray(new String[profiles.size()]));
}
/**
* Print a custom banner message to the console, optionally extracting its location or
* content from the Environment (banner.location and banner.charset). The defaults are
* banner.location=classpath:banner.txt, banner.charset=UTF-8. If the banner file does
* not exist or cannot be printed, a simple default is created.
* @param environment the environment
* @see #setShowBanner(boolean)
*/
protected void printBanner(Environment environment) {
String location = environment.getProperty("banner.location", "banner.txt");
ResourceLoader resourceLoader = this.resourceLoader != null ? this.resourceLoader
: new DefaultResourceLoader(getClassLoader());
Resource resource = resourceLoader.getResource(location);
if (resource.exists()) {
new ResourceBanner(resource).printBanner(environment,
this.mainApplicationClass, System.out);
return;
}
if (this.banner != null) {
this.banner.printBanner(environment, this.mainApplicationClass, System.out);
return;
}
printDefaultBanner();
}
private void printDefaultBanner() {
DEFAULT_BANNER.printBanner(null, this.mainApplicationClass, System.out);
}
/**
* Strategy method used to create the {@link ApplicationContext}. By default this
* method will respect any explicitly set application context or application context
* class before falling back to a suitable default.
* @return the application context (not yet refreshed)
* @see #setApplicationContextClass(Class)
*/
protected ConfigurableApplicationContext createApplicationContext() {
Class<?> contextClass = this.applicationContextClass;
if (contextClass == null) {
try {
contextClass = Class
.forName(this.webEnvironment ? DEFAULT_WEB_CONTEXT_CLASS
: DEFAULT_CONTEXT_CLASS);
}
catch (ClassNotFoundException ex) {
throw new IllegalStateException(
"Unable create a default ApplicationContext, "
+ "please specify an ApplicationContextClass", ex);
}
}
return (ConfigurableApplicationContext) BeanUtils.instantiate(contextClass);
}
/**
* Apply any relevant post processing the {@link ApplicationContext}. Subclasses can
* apply additional processing as required.
* @param context the application context
*/
protected void postProcessApplicationContext(ConfigurableApplicationContext context) {
if (this.webEnvironment) {
if (context instanceof ConfigurableWebApplicationContext) {
ConfigurableWebApplicationContext configurableContext = (ConfigurableWebApplicationContext) context;
if (this.beanNameGenerator != null) {
configurableContext.getBeanFactory().registerSingleton(
AnnotationConfigUtils.CONFIGURATION_BEAN_NAME_GENERATOR,
this.beanNameGenerator);
}
}
}
if (this.resourceLoader != null) {
if (context instanceof GenericApplicationContext) {
((GenericApplicationContext) context)
.setResourceLoader(this.resourceLoader);
}
if (context instanceof DefaultResourceLoader) {
((DefaultResourceLoader) context).setClassLoader(this.resourceLoader
.getClassLoader());
}
}
}
/**
* Apply any {@link ApplicationContextInitializer}s to the context before it is
* refreshed.
* @param context the configured ApplicationContext (not refreshed yet)
* @see ConfigurableApplicationContext#refresh()
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected void applyInitializers(ConfigurableApplicationContext context) {
for (ApplicationContextInitializer initializer : getInitializers()) {
Class<?> requiredType = GenericTypeResolver.resolveTypeArgument(
initializer.getClass(), ApplicationContextInitializer.class);
Assert.isInstanceOf(requiredType, context, "Unable to call initializer.");
initializer.initialize(context);
}
}
/**
* Called to log startup information, subclasses may override to add additional
* logging.
* @param isRoot true if this application is the root of a context hierarchy
*/
protected void logStartupInfo(boolean isRoot) {
if (isRoot) {
new StartupInfoLogger(this.mainApplicationClass)
.logStarting(getApplicationLog());
}
}
/**
* Returns the {@link Log} for the application. By default will be deduced.
* @return the application log
*/
protected Log getApplicationLog() {
if (this.mainApplicationClass == null) {
return this.log;
}
return LogFactory.getLog(this.mainApplicationClass);
}
/**
* Load beans into the application context.
* @param context the context to load beans into
* @param sources the sources to load
*/
protected void load(ApplicationContext context, Object[] sources) {
if (this.log.isDebugEnabled()) {
this.log.debug("Loading source "
+ StringUtils.arrayToCommaDelimitedString(sources));
}
BeanDefinitionLoader loader = createBeanDefinitionLoader(
getBeanDefinitionRegistry(context), sources);
if (this.beanNameGenerator != null) {
loader.setBeanNameGenerator(this.beanNameGenerator);
}
if (this.resourceLoader != null) {
loader.setResourceLoader(this.resourceLoader);
}
if (this.environment != null) {
loader.setEnvironment(this.environment);
}
loader.load();
}
/**
* The ResourceLoader that will be used in the ApplicationContext.
* @return the resourceLoader the resource loader that will be used in the
* ApplicationContext (or null if the default)
*/
public ResourceLoader getResourceLoader() {
return this.resourceLoader;
}
/**
* Either the ClassLoader that will be used in the ApplicationContext (if
* {@link #setResourceLoader(ResourceLoader) resourceLoader} is set, or the context
* class loader (if not null), or the loader of the Spring {@link ClassUtils} class.
* @return a ClassLoader (never null)
*/
public ClassLoader getClassLoader() {
if (this.resourceLoader != null) {
return this.resourceLoader.getClassLoader();
}
return ClassUtils.getDefaultClassLoader();
}
/**
* @param context the application context
* @return the BeanDefinitionRegistry if it can be determined
*/
private BeanDefinitionRegistry getBeanDefinitionRegistry(ApplicationContext context) {
if (context instanceof BeanDefinitionRegistry) {
return (BeanDefinitionRegistry) context;
}
if (context instanceof AbstractApplicationContext) {
return (BeanDefinitionRegistry) ((AbstractApplicationContext) context)
.getBeanFactory();
}
throw new IllegalStateException("Could not locate BeanDefinitionRegistry");
}
/**
* Factory method used to create the {@link BeanDefinitionLoader}.
* @param registry the bean definition registry
* @param sources the sources to load
* @return the {@link BeanDefinitionLoader} that will be used to load beans
*/
protected BeanDefinitionLoader createBeanDefinitionLoader(
BeanDefinitionRegistry registry, Object[] sources) {
return new BeanDefinitionLoader(registry, sources);
}
private void runCommandLineRunners(ApplicationContext context, String... args) {
List<CommandLineRunner> runners = new ArrayList<CommandLineRunner>(context
.getBeansOfType(CommandLineRunner.class).values());
AnnotationAwareOrderComparator.sort(runners);
for (CommandLineRunner runner : runners) {
try {
runner.run(args);
}
catch (Exception ex) {
throw new IllegalStateException("Failed to execute CommandLineRunner", ex);
}
}
}
/**
* Refresh the underlying {@link ApplicationContext}.
* @param applicationContext the application context to refresh
*/
protected void refresh(ApplicationContext applicationContext) {
Assert.isInstanceOf(AbstractApplicationContext.class, applicationContext);
((AbstractApplicationContext) applicationContext).refresh();
}
protected void afterRefresh(ConfigurableApplicationContext context, String[] args) {
runCommandLineRunners(context, args);
}
/**
* Set a specific main application class that will be used as a log source and to
* obtain version information. By default the main application class will be deduced.
* Can be set to {@code null} if there is no explicit application class.
* @param mainApplicationClass the mainApplicationClass to set or {@code null}
*/
public void setMainApplicationClass(Class<?> mainApplicationClass) {
this.mainApplicationClass = mainApplicationClass;
}
/**
* Sets if this application is running within a web environment. If not specified will
* attempt to deduce the environment based on the classpath.
* @param webEnvironment if the application is running in a web environment
*/
public void setWebEnvironment(boolean webEnvironment) {
this.webEnvironment = webEnvironment;
}
/**
* Sets if the application is headless and should not instantiate AWT. Defaults to
* {@code true} to prevent java icons appearing.
* @param headless if the application is headless
*/
public void setHeadless(boolean headless) {
this.headless = headless;
}
/**
* Sets if the created {@link ApplicationContext} should have a shutdown hook
* registered. Defaults to {@code true} to ensure that JVM shutdowns are handled
* gracefully.
* @param registerShutdownHook if the shutdown hook should be registered
*/
public void setRegisterShutdownHook(boolean registerShutdownHook) {
this.registerShutdownHook = registerShutdownHook;
}
/**
* Sets the {@link Banner} instance which will be used to print the banner when no
* static banner file is provided.
* @param banner The Banner instance to use
*/
public void setBanner(Banner banner) {
this.banner = banner;
}
/**
* Sets if the Spring banner should be displayed when the application runs. Defaults
* to {@code true}.
* @param showBanner if the banner should be shown
*/
public void setShowBanner(boolean showBanner) {
this.showBanner = showBanner;
}
/**
* Sets if the application information should be logged when the application starts.
* Defaults to {@code true}.
* @param logStartupInfo if startup info should be logged.
*/
public void setLogStartupInfo(boolean logStartupInfo) {
this.logStartupInfo = logStartupInfo;
}
/**
* Sets if a {@link CommandLinePropertySource} should be added to the application
* context in order to expose arguments. Defaults to {@code true}.
* @param addCommandLineProperties if command line arguments should be exposed
*/
public void setAddCommandLineProperties(boolean addCommandLineProperties) {
this.addCommandLineProperties = addCommandLineProperties;
}
/**
* Set default environment properties which will be used in addition to those in the
* existing {@link Environment}.
* @param defaultProperties the additional properties to set
*/
public void setDefaultProperties(Map<String, Object> defaultProperties) {
this.defaultProperties = defaultProperties;
}
/**
* Convenient alternative to {@link #setDefaultProperties(Map)}.
* @param defaultProperties some {@link Properties}
*/
public void setDefaultProperties(Properties defaultProperties) {
this.defaultProperties = new HashMap<String, Object>();
for (Object key : Collections.list(defaultProperties.propertyNames())) {
this.defaultProperties.put((String) key, defaultProperties.get(key));
}
}
/**
* Set additional profile values to use (on top of those set in system or command line
* properties).
* @param profiles the additional profiles to set
*/
public void setAdditionalProfiles(String... profiles) {
this.profiles = new LinkedHashSet<String>(Arrays.asList(profiles));
}
/**
* Sets the bean name generator that should be used when generating bean names.
* @param beanNameGenerator the bean name generator
*/
public void setBeanNameGenerator(BeanNameGenerator beanNameGenerator) {
this.beanNameGenerator = beanNameGenerator;
}
/**
* Sets the underlying environment that should be used with the created application
* context.
* @param environment the environment
*/
public void setEnvironment(ConfigurableEnvironment environment) {
this.environment = environment;
}
/**
* Returns a mutable set of the sources that will be added to an ApplicationContext
* when {@link #run(String...)} is called.
* @return the sources the application sources.
* @see #SpringApplication(Object...)
*/
public Set<Object> getSources() {
return this.sources;
}
/**
* The sources that will be used to create an ApplicationContext. A valid source is
* one of: a class, class name, package, package name, or an XML resource location.
* Can also be set using constructors and static convenience methods (e.g.
* {@link #run(Object[], String[])}).
* <p>
* NOTE: sources defined here will be used in addition to any sources specified on
* construction.
* @param sources the sources to set
* @see #SpringApplication(Object...)
*/
public void setSources(Set<Object> sources) {
Assert.notNull(sources, "Sources must not be null");
this.sources.addAll(sources);
}
/**
* Sets the {@link ResourceLoader} that should be used when loading resources.
* @param resourceLoader the resource loader
*/
public void setResourceLoader(ResourceLoader resourceLoader) {
Assert.notNull(resourceLoader, "ResourceLoader must not be null");
this.resourceLoader = resourceLoader;
}
/**
* Sets the type of Spring {@link ApplicationContext} that will be created. If not
* specified defaults to {@link #DEFAULT_WEB_CONTEXT_CLASS} for web based applications
* or {@link AnnotationConfigApplicationContext} for non web based applications.
* @param applicationContextClass the context class to set
*/
public void setApplicationContextClass(
Class<? extends ConfigurableApplicationContext> applicationContextClass) {
this.applicationContextClass = applicationContextClass;
if (!WebApplicationContext.class.isAssignableFrom(applicationContextClass)) {
this.webEnvironment = false;
}
}
/**
* Sets the {@link ApplicationContextInitializer} that will be applied to the Spring
* {@link ApplicationContext}.
* @param initializers the initializers to set
*/
public void setInitializers(
Collection<? extends ApplicationContextInitializer<?>> initializers) {
this.initializers = new ArrayList<ApplicationContextInitializer<?>>();
this.initializers.addAll(initializers);
}
/**
* Add {@link ApplicationContextInitializer}s to be applied to the Spring
* {@link ApplicationContext}.
* @param initializers the initializers to add
*/
public void addInitializers(ApplicationContextInitializer<?>... initializers) {
this.initializers.addAll(Arrays.asList(initializers));
}
/**
* Returns read-only ordered Set of the {@link ApplicationContextInitializer}s that
* will be applied to the Spring {@link ApplicationContext}.
* @return the initializers
*/
public Set<ApplicationContextInitializer<?>> getInitializers() {
return asUnmodifiableOrderedSet(this.initializers);
}
/**
* Sets the {@link ApplicationListener}s that will be applied to the SpringApplication
* and registered with the {@link ApplicationContext}.
* @param listeners the listeners to set
*/
public void setListeners(Collection<? extends ApplicationListener<?>> listeners) {
this.listeners = new ArrayList<ApplicationListener<?>>();
this.listeners.addAll(listeners);
}
/**
* Add {@link ApplicationListener}s to be applied to the SpringApplication and
* registered with the {@link ApplicationContext}.
* @param listeners the listeners to add
*/
public void addListeners(ApplicationListener<?>... listeners) {
this.listeners.addAll(Arrays.asList(listeners));
}
/**
* Returns read-only ordered Set of the {@link ApplicationListener}s that will be
* applied to the SpringApplication and registered with the {@link ApplicationContext}
* .
* @return the listeners
*/
public Set<ApplicationListener<?>> getListeners() {
return asUnmodifiableOrderedSet(this.listeners);
}
/**
* Static helper that can be used to run a {@link SpringApplication} from the
* specified source using default settings.
* @param source the source to load
* @param args the application arguments (usually passed from a Java main method)
* @return the running {@link ApplicationContext}
*/
public static ConfigurableApplicationContext run(Object source, String... args) {
return run(new Object[] { source }, args);
}
/**
* Static helper that can be used to run a {@link SpringApplication} from the
* specified sources using default settings and user supplied arguments.
* @param sources the sources to load
* @param args the application arguments (usually passed from a Java main method)
* @return the running {@link ApplicationContext}
*/
public static ConfigurableApplicationContext run(Object[] sources, String[] args) {
return new SpringApplication(sources).run(args);
}
/**
* A basic main that can be used to launch an application. This method is useful when
* application sources are defined via a {@literal --spring.main.sources} command line
* argument.
* <p>
* Most developers will want to define their own main method can call the
* {@link #run(Object, String...) run} method instead.
* @param args command line arguments
* @throws Exception if the application cannot be started
* @see SpringApplication#run(Object[], String[])
* @see SpringApplication#run(Object, String...)
*/
public static void main(String[] args) throws Exception {
SpringApplication.run(new Object[0], args);
}
/**
* Static helper that can be used to exit a {@link SpringApplication} and obtain a
* code indicating success (0) or otherwise. Does not throw exceptions but should
* print stack traces of any encountered. Applies the specified
* {@link ExitCodeGenerator} in addition to any Spring beans that implement
* {@link ExitCodeGenerator}. In the case of multiple exit codes the highest value
* will be used (or if all values are negative, the lowest value will be used)
* @param context the context to close if possible
* @param exitCodeGenerators exist code generators
* @return the outcome (0 if successful)
*/
public static int exit(ApplicationContext context,
ExitCodeGenerator... exitCodeGenerators) {
int exitCode = 0;
try {
try {
List<ExitCodeGenerator> generators = new ArrayList<ExitCodeGenerator>();
generators.addAll(Arrays.asList(exitCodeGenerators));
generators.addAll(context.getBeansOfType(ExitCodeGenerator.class)
.values());
exitCode = getExitCode(generators);
}
finally {
close(context);
}
}
catch (Exception ex) {
ex.printStackTrace();
exitCode = (exitCode == 0 ? 1 : exitCode);
}
return exitCode;
}
private static int getExitCode(List<ExitCodeGenerator> exitCodeGenerators) {
int exitCode = 0;
for (ExitCodeGenerator exitCodeGenerator : exitCodeGenerators) {
try {
int value = exitCodeGenerator.getExitCode();
if (value > 0 && value > exitCode || value < 0 && value < exitCode) {
exitCode = value;
}
}
catch (Exception ex) {
exitCode = (exitCode == 0 ? 1 : exitCode);
ex.printStackTrace();
}
}
return exitCode;
}
private static void close(ApplicationContext context) {
if (context instanceof ConfigurableApplicationContext) {
ConfigurableApplicationContext closable = (ConfigurableApplicationContext) context;
closable.close();
}
}
private static <E> Set<E> asUnmodifiableOrderedSet(Collection<E> elements) {
List<E> list = new ArrayList<E>();
list.addAll(elements);
Collections.sort(list, AnnotationAwareOrderComparator.INSTANCE);
return new LinkedHashSet<E>(list);
}
}
| |
/*
* Copyright 2016 Tuntuni.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tuntuni.controllers;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ResourceBundle;
import javafx.application.Platform;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.Cursor;
import javafx.scene.control.Button;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.ColumnConstraints;
import javafx.scene.layout.GridPane;
import javafx.stage.FileChooser;
import org.controlsfx.dialog.ExceptionDialog;
import org.tuntuni.Core;
import org.tuntuni.connection.Client;
import org.tuntuni.util.Database;
import org.tuntuni.util.FileService;
/**
* To view or edit user's profile
*/
public class ProfileController implements Initializable {
@FXML
private TextField userName;
@FXML
private TextField statusText;
@FXML
private TextArea aboutMe;
@FXML
private Button avatarButton;
@FXML
private ImageView avatarImage;
@FXML
private Button editNameButton;
@FXML
private Button editStatusButton;
@FXML
private Button editAboutMeButton;
@FXML
private GridPane aboutGridPane;
@FXML
private Button messageButton;
@FXML
private Button videoCallButton;
private double initialGridWidth = 170;
@Override
public void initialize(URL url, ResourceBundle rb) {
Core.instance().profile(this);
initialGridWidth = aboutGridPane.getColumnConstraints().get(1).getPrefWidth();
editNameButton.setVisible(false);
editStatusButton.setVisible(false);
editAboutMeButton.setVisible(false);
userName.textProperty().addListener((a, b, c) -> {
boolean show = isDefaultProfile()
&& !c.equals(Core.instance().user().getName());
editNameButton.setVisible(show);
});
statusText.textProperty().addListener((a, b, c) -> {
boolean show = isDefaultProfile()
&& !c.equals(Core.instance().user().getStatus());
editStatusButton.setVisible(show);
});
aboutMe.textProperty().addListener((a, b, c) -> {
boolean show = isDefaultProfile()
&& !c.equals(Core.instance().user().getAboutMe());
editAboutMeButton.setVisible(show);
});
}
public void refresh() {
Platform.runLater(() -> {
if (isDefaultProfile()) {
loadDefaultProfile();
} else {
loadClientProfile();
}
});
}
Client client() {
return Core.instance().selected();
}
public boolean isDefaultProfile() {
return (client() == null || !client().isConnected());
}
private void loadClientProfile() {
// preliminary values
final double width = avatarImage.getFitWidth();
final double height = avatarImage.getFitHeight();
final String user = client().getUserData().getUserName();
final String status = client().getUserData().getStatus();
final String about = client().getUserData().getAboutMe();
final Image avatar = client().getUserData().getAvatar(width, height);
// display all data
userName.setText(user + " ");
statusText.setText(status + " ");
aboutMe.setText(about + " ");
avatarImage.setImage(avatar);
// make things appear
ColumnConstraints cc = aboutGridPane.getColumnConstraints().get(1);
cc.setMinWidth(initialGridWidth);
cc.setMaxWidth(initialGridWidth);
messageButton.setVisible(true);
videoCallButton.setVisible(true);
// set edit states of each controls
aboutMe.setEditable(false);
userName.setEditable(false);
statusText.setEditable(false);
userName.setCursor(Cursor.DEFAULT);
statusText.setCursor(Cursor.DEFAULT);
aboutMe.setCursor(Cursor.DEFAULT);
userName.setId("user-name");
statusText.setId("user-status");
aboutMe.setId("user-about");
}
private void loadDefaultProfile() {
// preliminary values
final double width = avatarImage.getFitWidth();
final double height = avatarImage.getFitHeight();
final String user = Core.instance().user().getName();
final String status = Core.instance().user().getStatus();
final String about = Core.instance().user().getAboutMe();
final Image avatar = Core.instance().user().getAvatarImage(width, height);
// display all data
userName.setText(user);
statusText.setText(status);
aboutMe.setText(about);
avatarImage.setImage(avatar);
// make things appear
ColumnConstraints cc = aboutGridPane.getColumnConstraints().get(1);
cc.setMinWidth(0);
cc.setMaxWidth(0);
messageButton.setVisible(false);
videoCallButton.setVisible(false);
// set edit states of each controls
aboutMe.setEditable(true);
userName.setEditable(true);
statusText.setEditable(true);
userName.setCursor(Cursor.TEXT);
statusText.setCursor(Cursor.TEXT);
aboutMe.setCursor(Cursor.TEXT);
userName.setId("my-name");
statusText.setId("my-status");
aboutMe.setId("my-about");
}
@FXML
private void handleSendMessage(ActionEvent event) {
Core.instance().main().selectMessaging();
}
@FXML
private void handleSendCall(ActionEvent event) {
Core.instance().main().selectVideoCall();
}
@FXML
private void changeAvatar(ActionEvent event) {
if (!isDefaultProfile()) {
return;
}
FileChooser fc = new FileChooser();
File init = new File(Database.instance().get("Initial Directory"));
if (init.exists()) {
fc.setInitialDirectory(init);
}
fc.getExtensionFilters().setAll(
new FileChooser.ExtensionFilter("Image Files",
"*.png", "*.jpg", "*.bmp", "*.gif"));
fc.setTitle("Choose your avatar...");
File choosen = fc.showOpenDialog(Core.instance().stage());
if (choosen != null) {
changeAvatar(choosen);
Database.instance().set("Initial Directory",
choosen.getParentFile().toString());
}
refresh();
}
@FXML
private void changeName() {
Core.instance().user().setName(userName.getText());
editNameButton.setVisible(false);
}
@FXML
private void changeStatus() {
Core.instance().user().setStatus(statusText.getText());
editStatusButton.setVisible(false);
}
@FXML
private void changeAboutMe() {
Core.instance().user().setAboutMe(aboutMe.getText());
editAboutMeButton.setVisible(false);
}
private void changeAvatar(File choosen) {
try {
String uploaded = FileService.instance().upload(choosen);
Image image = FileService.instance().getImage(uploaded);
Core.instance().user().setAvatar(uploaded);
} catch (IOException ex) {
ExceptionDialog dialog = new ExceptionDialog(ex);
dialog.setTitle("Failed to upload image");
dialog.showAndWait();
}
}
}
| |
package com.opentransport.rdfmapper.nmbs;
import com.opentransport.rdfmapper.nmbs.containers.StationInfo;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.input.SAXBuilder;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.jsoup.Jsoup;
/**
*
* @author Nicola De Clercq
*/
public class StationFetcher {
public void writeAllStationsToJson() {
List<String> fetchedStationNames = fetchAllStationNamesFromFile();
Set<String> ids = new HashSet<>();
JSONObject json = new JSONObject();
JSONArray stations = new JSONArray();
for (int i = 0; i < fetchedStationNames.size(); i++) {
String stationName = fetchedStationNames.get(i);
StationInfo info = getStationInfo(stationName + " nmbs");
String id = info.getId();
if (!ids.contains(id)) {
ids.add(id);
if (!info.getName().contains("NMBS")) {
System.out.println("Geen NMBS in naam: " + stationName);
}
if (id.length() != 9) {
System.out.println("Verkeerde lengte voor ID: " + stationName);
}
JSONObject station = new JSONObject();
station.put("name",info.getName());
station.put("id",info.getId());
station.put("longitude",info.getLongitude());
station.put("latitude",info.getLatitude());
stations.add(station);
}
else {
System.out.println("Dubbel: " + stationName + " (" + id + ")");
}
}
json.put("stations",stations);
System.out.println("Aantal stations: " + stations.size());
try {
FileWriter fw = new FileWriter("NMBS_stations.json");
fw.write(json.toString());
fw.close();
} catch (IOException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
}
public String getStationId(String stationName) {
return getStationInfo(stationName + " nmbs").getId();
}
private StationInfo getStationInfo(String stationName) {
StationInfo info = new StationInfo();
String data = fetchStationData(stationName,1);
try {
Document doc = new SAXBuilder().build(new StringReader(data));
Element root = doc.getRootElement();
List<Element> res = root.getChildren();
List<Element> stations = res.get(0).getChildren();
Element station = stations.get(0);
info.setName(station.getAttributeValue("name"));
info.setId(station.getAttributeValue("externalStationNr"));
String x = station.getAttributeValue("x");
int xl = x.length() - 6;
String longitude = x.substring(0,xl) + "." + x.substring(xl);
info.setLongitude(longitude);
String y = station.getAttributeValue("y");
int yl = y.length() - 6;
String latitude = y.substring(0,yl) + "." + y.substring(yl);
info.setLatitude(latitude);
} catch (JDOMException | IOException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
return info;
}
private List<String> fetchAllStationNamesFromFile() {
List<String> stations = new ArrayList<>();
try {
String line;
BufferedReader br = new BufferedReader(new FileReader("NMBS_stations.txt"));
while ((line = br.readLine()) != null) {
if (!line.isEmpty()) {
stations.add(line);
}
}
br.close();
} catch (FileNotFoundException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
} catch (IOException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
System.out.println("Aantal stationsnamen: " + stations.size());
return stations;
}
private List<String> fetchAllStationNamesFromWeb() {
List<String> stations = new ArrayList<>();
try {
org.jsoup.nodes.Document doc = Jsoup.connect("http://www.b-rail.be/main/stationsinfo/station_list.php").get();
String html = doc.body().toString();
String[] htmlList = html.toLowerCase().split("<br[^>]*>");
for (int i = 0; i < htmlList.length; i++) {
String station = htmlList[i];
if (station.matches("-[a-z].*")) {
if (station.matches("-[a-z].*\\(.*\\)")) {
station = station.substring(0,station.indexOf("("));
}
stations.add(station.substring(1));
}
}
} catch (IOException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
for (int i = 0; i < stations.size(); i++) {
System.out.println(stations.get(i));
}
System.out.println("Aantal stationsnamen: " + stations.size());
return stations;
}
private String fetchStationData(String stationName, int numberOfResults) {
URL url = null;
try {
url = new URL("http://www.belgianrail.be/jpm/sncb-nmbs-routeplanner/extxml.exe");
} catch (MalformedURLException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
HttpURLConnection conn = null;
try {
conn = (HttpURLConnection) url.openConnection();
} catch (IOException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
conn.setDoOutput(true);
try {
conn.setRequestMethod("POST");
} catch (ProtocolException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
conn.setRequestProperty("Content-Type","application/x-www-form-urlencoded");
String postData = "<?xml version=\"1.0 encoding=\"iso-8859-1\"?>"
+ "<ReqC ver=\"1.1\" prod=\"iRail API v1.0\" lang=\"nl\">"
+ "<LocValReq id=\"stat1\" maxNr=\"" + numberOfResults + "\">"
+ "<ReqLoc match=\"" + stationName + "\" type=\"ST\"/>"
+ "</LocValReq>"
+ "</ReqC>";
conn.setRequestProperty("Content-Length",String.valueOf(postData.length()));
DataOutputStream os;
BufferedReader br;
StringBuilder sb = new StringBuilder();
String line;
try {
os = new DataOutputStream(conn.getOutputStream());
os.writeBytes(postData);
os.close();
br = new BufferedReader(new InputStreamReader(conn.getInputStream()));
while ((line = br.readLine()) != null) {
sb.append(line);
}
br.close();
} catch (IOException ex) {
Logger.getLogger(StationFetcher.class.getName()).log(Level.SEVERE,null,ex);
}
return sb.toString();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.component.pinger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.ejb.Lock;
import javax.ejb.LockType;
import javax.ejb.Schedule;
import javax.ejb.Singleton;
import javax.ejb.Startup;
import org.hawkular.inventory.api.Action;
import org.hawkular.inventory.api.Interest;
import org.hawkular.inventory.api.Inventory;
import org.hawkular.inventory.api.filters.With;
import org.hawkular.inventory.api.model.Resource;
/**
* A SLSB that coordinates the pinging of resources
*
* @author Heiko W. Rupp
* @author <a href="https://github.com/ppalaga">Peter Palaga</a>
*/
@Startup
@Singleton
public class PingManager {
/** How many rounds a WAIT_MILLIS do we wait for results to come in? */
private static final int ROUNDS = 15;
/** How long do we wait between each round in milliseconds */
private static final int WAIT_MILLIS = 500;
/**
* Rough timeout in milliseconds for the pings after which the pings are cancelled and reported as timeouted. Note
* that in practice, the real time given to pings can be longer.
*/
private static final int TIMEOUT_MILLIS = ROUNDS * WAIT_MILLIS;
@EJB
Pinger pinger;
private final Set<PingDestination> destinations = new HashSet<>();
@EJB
MetricPublisher metricPublisher;
@EJB
TraitsPublisher traitsPublisher;
@javax.annotation.Resource(lookup = "java:global/Hawkular/Inventory")
Inventory inventory;
final UrlChangesCollector urlChangesCollector = new UrlChangesCollector();
@PostConstruct
public void startUp() {
/*
* Add the observers before reading the existing URLs from the inventory so that we do not loose the URLs that
* could have been added or removed between those two calls.
*/
inventory.observable(Interest.in(Resource.class).being(Action.created())).subscribe(
urlChangesCollector.getUrlCreatedAction());
inventory.observable(Interest.in(Resource.class).being(Action.deleted())).subscribe(
urlChangesCollector.getUrlDeletedAction());
// we use just an observable inventory here, because it allows us to see all the tenants. This essentially
// circumvents any authz present on the inventory.
// We need that though because pinger doesn't have storage of its own and is considered "trusted", so it's ok.
Set<Resource> urls = inventory.tenants().getAll().allResourceTypes().getAll(With.id(PingDestination.URL_TYPE))
.resources().getAll().entities();
Log.LOG.iInitializedWithUrls(urls.size());
for (Resource r : urls) {
PingDestination dest = PingDestination.from(r);
destinations.add(dest);
Log.LOG.debugf("Added initial URL to ping: %s", dest.getUrl());
}
}
/**
* This method triggers the actual work by starting pingers, collecting their return values and then publishing
* them.
* <p>
* Concurrency assumptions:
* <ul>
* <li>{@link #scheduleWork()} will not overlap with {@link #startUp()} - we assume this to be granted by the EE
* container.
* <li>Individual {@link #scheduleWork()} invocations will not overlap each other - we also assume this to be
* granted by the EE container.
*/
@Lock(LockType.READ)
@Schedule(minute = "*", hour = "*", second = "0,20,40", persistent = false)
public void scheduleWork() {
Log.LOG.debugf("Pinger awake to ping");
/* Apply URL additions and removals collected in between. */
urlChangesCollector.apply(this.destinations);
if (destinations.size() == 0) {
Log.LOG.debugf("Nothing to ping");
return;
}
doThePing(destinations);
}
/**
* Runs the pinging work on the provided list of destinations. The actual pings are scheduled to run in parallel in
* a thread pool. After ROUNDS*WAIT_MILLIS, remaining pings are cancelled and an error
*
* @param destinations Set of destinations to ping
*/
private void doThePing(Set<PingDestination> destinations) {
Log.LOG.debugf("About to ping %d URLs", destinations.size());
List<PingStatus> results = new ArrayList<>(destinations.size());
// In case of timeouts we will not be able to get the PingStatus from the Future, so use a Map
// to keep track of what destination's ping actually hung.
Map<Future<PingStatus>, PingDestination> futures = new HashMap<>(destinations.size());
for (PingDestination destination : destinations) {
Future<PingStatus> result = pinger.ping(destination);
futures.put(result, destination);
}
int round = 1;
while (!futures.isEmpty() && round < ROUNDS) {
Iterator<Future<PingStatus>> iterator = futures.keySet().iterator();
while (iterator.hasNext()) {
Future<PingStatus> f = iterator.next();
if (f.isDone()) {
try {
results.add(f.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace(); // TODO: Customise this generated block
}
iterator.remove();
}
}
try {
Thread.sleep(WAIT_MILLIS); // wait until the next iteration
} catch (InterruptedException e) {
// We don't care
}
round++;
}
// Cancel hanging pings and report them as timeouts
for (Map.Entry<Future<PingStatus>, PingDestination> entry : futures.entrySet()) {
entry.getKey().cancel(true);
PingDestination destination = entry.getValue();
final long now = System.currentTimeMillis();
PingStatus ps = PingStatus.timeout(destination, now, TIMEOUT_MILLIS);
results.add(ps);
Log.LOG.debugf("Timed out: %s", destination.getUrl());
}
reportResults(results);
}
private void reportResults(List<PingStatus> results) {
if (results.size() == 0) {
return;
}
for (PingStatus status : results) {
metricPublisher.sendToMetricsViaRest(status);
metricPublisher.publishToTopic(status);
traitsPublisher.publish(status);
}
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.liferay.knowledgebase.service;
import com.liferay.portal.service.InvokableService;
/**
* @author Brian Wing Shun Chan
* @generated
*/
public class KBTemplateServiceClp implements KBTemplateService {
public KBTemplateServiceClp(InvokableService invokableService) {
_invokableService = invokableService;
_methodName0 = "getBeanIdentifier";
_methodParameterTypes0 = new String[] { };
_methodName1 = "setBeanIdentifier";
_methodParameterTypes1 = new String[] { "java.lang.String" };
_methodName3 = "addKBTemplate";
_methodParameterTypes3 = new String[] {
"java.lang.String", "java.lang.String", "java.lang.String",
"com.liferay.portal.service.ServiceContext"
};
_methodName4 = "deleteKBTemplate";
_methodParameterTypes4 = new String[] { "long" };
_methodName5 = "deleteKBTemplates";
_methodParameterTypes5 = new String[] { "long", "long[][]" };
_methodName6 = "getGroupKBTemplates";
_methodParameterTypes6 = new String[] {
"long", "int", "int",
"com.liferay.portal.kernel.util.OrderByComparator"
};
_methodName7 = "getGroupKBTemplatesCount";
_methodParameterTypes7 = new String[] { "long" };
_methodName8 = "getKBTemplate";
_methodParameterTypes8 = new String[] { "long" };
_methodName9 = "getKBTemplateSearchDisplay";
_methodParameterTypes9 = new String[] {
"long", "java.lang.String", "java.lang.String", "java.util.Date",
"java.util.Date", "boolean", "int[][]", "int", "int",
"com.liferay.portal.kernel.util.OrderByComparator"
};
_methodName10 = "updateKBTemplate";
_methodParameterTypes10 = new String[] {
"long", "java.lang.String", "java.lang.String",
"com.liferay.portal.service.ServiceContext"
};
}
@Override
public java.lang.String getBeanIdentifier() {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName0,
_methodParameterTypes0, new Object[] { });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.lang.String)ClpSerializer.translateOutput(returnObj);
}
@Override
public void setBeanIdentifier(java.lang.String beanIdentifier) {
try {
_invokableService.invokeMethod(_methodName1,
_methodParameterTypes1,
new Object[] { ClpSerializer.translateInput(beanIdentifier) });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public java.lang.Object invokeMethod(java.lang.String name,
java.lang.String[] parameterTypes, java.lang.Object[] arguments)
throws java.lang.Throwable {
throw new UnsupportedOperationException();
}
@Override
public com.liferay.knowledgebase.model.KBTemplate addKBTemplate(
java.lang.String portletId, java.lang.String title,
java.lang.String content,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName3,
_methodParameterTypes3,
new Object[] {
ClpSerializer.translateInput(portletId),
ClpSerializer.translateInput(title),
ClpSerializer.translateInput(content),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.knowledgebase.model.KBTemplate)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.knowledgebase.model.KBTemplate deleteKBTemplate(
long kbTemplateId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName4,
_methodParameterTypes4, new Object[] { kbTemplateId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.knowledgebase.model.KBTemplate)ClpSerializer.translateOutput(returnObj);
}
@Override
public void deleteKBTemplates(long groupId, long[] kbTemplateIds)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
try {
_invokableService.invokeMethod(_methodName5,
_methodParameterTypes5,
new Object[] {
groupId,
ClpSerializer.translateInput(kbTemplateIds)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
}
@Override
public java.util.List<com.liferay.knowledgebase.model.KBTemplate> getGroupKBTemplates(
long groupId, int start, int end,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName6,
_methodParameterTypes6,
new Object[] {
groupId,
start,
end,
ClpSerializer.translateInput(orderByComparator)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (java.util.List<com.liferay.knowledgebase.model.KBTemplate>)ClpSerializer.translateOutput(returnObj);
}
@Override
public int getGroupKBTemplatesCount(long groupId)
throws com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName7,
_methodParameterTypes7, new Object[] { groupId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return ((Integer)returnObj).intValue();
}
@Override
public com.liferay.knowledgebase.model.KBTemplate getKBTemplate(
long kbTemplateId)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName8,
_methodParameterTypes8, new Object[] { kbTemplateId });
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.knowledgebase.model.KBTemplate)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.knowledgebase.model.KBTemplateSearchDisplay getKBTemplateSearchDisplay(
long groupId, java.lang.String title, java.lang.String content,
java.util.Date startDate, java.util.Date endDate, boolean andOperator,
int[] curStartValues, int cur, int delta,
com.liferay.portal.kernel.util.OrderByComparator orderByComparator)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName9,
_methodParameterTypes9,
new Object[] {
groupId,
ClpSerializer.translateInput(title),
ClpSerializer.translateInput(content),
ClpSerializer.translateInput(startDate),
ClpSerializer.translateInput(endDate),
andOperator,
ClpSerializer.translateInput(curStartValues),
cur,
delta,
ClpSerializer.translateInput(orderByComparator)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.knowledgebase.model.KBTemplateSearchDisplay)ClpSerializer.translateOutput(returnObj);
}
@Override
public com.liferay.knowledgebase.model.KBTemplate updateKBTemplate(
long kbTemplateId, java.lang.String title, java.lang.String content,
com.liferay.portal.service.ServiceContext serviceContext)
throws com.liferay.portal.kernel.exception.PortalException,
com.liferay.portal.kernel.exception.SystemException {
Object returnObj = null;
try {
returnObj = _invokableService.invokeMethod(_methodName10,
_methodParameterTypes10,
new Object[] {
kbTemplateId,
ClpSerializer.translateInput(title),
ClpSerializer.translateInput(content),
ClpSerializer.translateInput(serviceContext)
});
}
catch (Throwable t) {
t = ClpSerializer.translateThrowable(t);
if (t instanceof com.liferay.portal.kernel.exception.PortalException) {
throw (com.liferay.portal.kernel.exception.PortalException)t;
}
if (t instanceof com.liferay.portal.kernel.exception.SystemException) {
throw (com.liferay.portal.kernel.exception.SystemException)t;
}
if (t instanceof RuntimeException) {
throw (RuntimeException)t;
}
else {
throw new RuntimeException(t.getClass().getName() +
" is not a valid exception");
}
}
return (com.liferay.knowledgebase.model.KBTemplate)ClpSerializer.translateOutput(returnObj);
}
private InvokableService _invokableService;
private String _methodName0;
private String[] _methodParameterTypes0;
private String _methodName1;
private String[] _methodParameterTypes1;
private String _methodName3;
private String[] _methodParameterTypes3;
private String _methodName4;
private String[] _methodParameterTypes4;
private String _methodName5;
private String[] _methodParameterTypes5;
private String _methodName6;
private String[] _methodParameterTypes6;
private String _methodName7;
private String[] _methodParameterTypes7;
private String _methodName8;
private String[] _methodParameterTypes8;
private String _methodName9;
private String[] _methodParameterTypes9;
private String _methodName10;
private String[] _methodParameterTypes10;
}
| |
package org.ovirt.engine.core.bll;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.inject.Inject;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.bll.context.CommandContext;
import org.ovirt.engine.core.bll.profiles.DiskProfileHelper;
import org.ovirt.engine.core.bll.quota.QuotaConsumptionParameter;
import org.ovirt.engine.core.bll.quota.QuotaStorageConsumptionParameter;
import org.ovirt.engine.core.bll.quota.QuotaStorageDependent;
import org.ovirt.engine.core.bll.tasks.CommandCoordinatorUtil;
import org.ovirt.engine.core.bll.utils.PermissionSubject;
import org.ovirt.engine.core.bll.utils.VmDeviceUtils;
import org.ovirt.engine.core.bll.validator.LocalizedVmStatus;
import org.ovirt.engine.core.bll.validator.storage.DiskImagesValidator;
import org.ovirt.engine.core.bll.validator.storage.DiskValidator;
import org.ovirt.engine.core.bll.validator.storage.StorageDomainValidator;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.VdcActionUtils;
import org.ovirt.engine.core.common.VdcObjectType;
import org.ovirt.engine.core.common.action.ExtendImageSizeParameters;
import org.ovirt.engine.core.common.action.LockProperties;
import org.ovirt.engine.core.common.action.LockProperties.Scope;
import org.ovirt.engine.core.common.action.UpdateVmDiskParameters;
import org.ovirt.engine.core.common.action.VdcActionParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.action.VdcReturnValueBase;
import org.ovirt.engine.core.common.businessentities.ActionGroup;
import org.ovirt.engine.core.common.businessentities.Snapshot.SnapshotType;
import org.ovirt.engine.core.common.businessentities.StorageDomain;
import org.ovirt.engine.core.common.businessentities.StorageDomainStatic;
import org.ovirt.engine.core.common.businessentities.SubjectEntity;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VmDevice;
import org.ovirt.engine.core.common.businessentities.network.VmNic;
import org.ovirt.engine.core.common.businessentities.storage.CinderDisk;
import org.ovirt.engine.core.common.businessentities.storage.Disk;
import org.ovirt.engine.core.common.businessentities.storage.DiskImage;
import org.ovirt.engine.core.common.businessentities.storage.DiskStorageType;
import org.ovirt.engine.core.common.businessentities.storage.ImageStatus;
import org.ovirt.engine.core.common.businessentities.storage.LunDisk;
import org.ovirt.engine.core.common.businessentities.storage.StorageType;
import org.ovirt.engine.core.common.errors.EngineMessage;
import org.ovirt.engine.core.common.locks.LockingGroup;
import org.ovirt.engine.core.common.utils.ObjectUtils;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.vdscommands.SetVolumeDescriptionVDSCommandParameters;
import org.ovirt.engine.core.common.vdscommands.VDSCommandType;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.utils.transaction.TransactionMethod;
import org.ovirt.engine.core.utils.transaction.TransactionSupport;
@NonTransactiveCommandAttribute(forceCompensation = true)
public class UpdateVmDiskCommand<T extends UpdateVmDiskParameters> extends AbstractDiskVmCommand<T>
implements QuotaStorageDependent {
private List<PermissionSubject> listPermissionSubjects;
private final Map<Guid, List<Disk>> otherVmDisks = new HashMap<>();
private final List<VM> vmsDiskSnapshotPluggedTo = new LinkedList<>();
private final List<VM> vmsDiskPluggedTo = new LinkedList<>();
private final List<VM> vmsDiskOrSnapshotPluggedTo = new LinkedList<>();
private final List<VM> vmsDiskOrSnapshotAttachedTo = new LinkedList<>();
@Inject
private VmSlaPolicyUtils vmSlaPolicyUtils;
/**
* vm device for the given vm and disk
*/
private VmDevice vmDeviceForVm;
private Disk oldDisk;
public UpdateVmDiskCommand(T parameters) {
this(parameters, null);
}
public UpdateVmDiskCommand(T parameters, CommandContext commandContext) {
super(parameters, commandContext);
loadVmDiskAttachedToInfo();
}
/**
* This constructor is mandatory for activation of the compensation process
* after the server restart.
* @param commandId
*/
public UpdateVmDiskCommand(Guid commandId) {
super(commandId);
}
@Override
protected LockProperties applyLockProperties(LockProperties lockProperties) {
return lockProperties.withScope(Scope.Command);
}
@Override
protected Map<String, Pair<String, String>> getSharedLocks() {
Map<String, Pair<String, String>> sharedLock = new HashMap<>();
for (VM vm : vmsDiskOrSnapshotPluggedTo) {
sharedLock.put(vm.getId().toString(),
LockMessagesMatchUtil.makeLockingPair(LockingGroup.VM, EngineMessage.ACTION_TYPE_FAILED_VM_IS_LOCKED));
}
return sharedLock.isEmpty() ? null : sharedLock;
}
@Override
protected Map<String, Pair<String, String>> getExclusiveLocks() {
Map<String, Pair<String, String>> exclusiveLock = new HashMap<>();
if (getNewDisk().isBoot()) {
for (VM vm : vmsDiskPluggedTo) {
exclusiveLock.put(vm.getId().toString(),
LockMessagesMatchUtil.makeLockingPair(LockingGroup.VM_DISK_BOOT, EngineMessage.ACTION_TYPE_FAILED_OBJECT_LOCKED));
}
}
if (resizeDiskImageRequested()) {
exclusiveLock.put(getOldDisk().getId().toString(),
LockMessagesMatchUtil.makeLockingPair(LockingGroup.DISK, EngineMessage.ACTION_TYPE_FAILED_DISKS_LOCKED));
}
return exclusiveLock.isEmpty() ? null : exclusiveLock;
}
@Override
protected void executeVmCommand() {
ImagesHandler.setDiskAlias(getParameters().getDiskInfo(), getVm());
if (resizeDiskImageRequested()) {
switch (getOldDisk().getDiskStorageType()) {
case IMAGE:
extendDiskImageSize();
break;
case CINDER:
extendCinderDiskSize();
break;
}
} else {
try {
performDiskUpdate(false);
} finally {
freeLock();
}
}
}
@Override
protected boolean canDoAction() {
if (!isVmExist() || !isDiskExist(getOldDisk())) {
return false;
}
boolean isDiskImageOrCinder = DiskStorageType.IMAGE == getOldDisk().getDiskStorageType() ||
DiskStorageType.CINDER == getOldDisk().getDiskStorageType();
if (isDiskImageOrCinder) {
ValidationResult imagesNotLocked =
new DiskImagesValidator(Collections.singletonList((DiskImage) getOldDisk())).diskImagesNotLocked();
if (!imagesNotLocked.isValid()) {
return validate(imagesNotLocked);
}
}
DiskValidator oldDiskValidator = getDiskValidator(getOldDisk());
ValidationResult isHostedEngineDisk = oldDiskValidator.validateNotHostedEngineDisk();
if (!isHostedEngineDisk.isValid()) {
return validate(isHostedEngineDisk);
}
if (!checkDiskUsedAsOvfStore(getOldDisk())) {
return false;
}
if (!canRunActionOnNonManagedVm()) {
return false;
}
boolean isDiskInterfaceUpdated = getOldDisk().getDiskInterface() != getNewDisk().getDiskInterface();
if (!vmsDiskOrSnapshotPluggedTo.isEmpty()) {
// only virtual drive size can be updated when VMs is running
if (isAtLeastOneVmIsNotDown(vmsDiskOrSnapshotPluggedTo) && updateParametersRequiringVmDownRequested()) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_VM_IS_NOT_DOWN);
}
boolean isUpdatedAsBootable = !getOldDisk().isBoot() && getNewDisk().isBoot();
// multiple boot disk snapshot can be attached to a single vm
if (isUpdatedAsBootable && !validate(noVmsContainBootableDisks(vmsDiskPluggedTo))) {
return false;
}
if (isDiskInterfaceUpdated && !validatePciAndIdeLimit(vmsDiskOrSnapshotPluggedTo)) {
return false;
}
}
if (isDiskImageOrCinder && !validateCanResizeDisk()) {
return false;
}
DiskValidator diskValidator = getDiskValidator(getNewDisk());
return validateCanUpdateShareable() && validateCanUpdateReadOnly(diskValidator) &&
validateVmPoolProperties() &&
validate(diskValidator.isVirtIoScsiValid(getVm())) &&
(!isDiskInterfaceUpdated || validate(diskValidator.isDiskInterfaceSupported(getVm()))) &&
setAndValidateDiskProfiles();
}
protected StorageDomainValidator getStorageDomainValidator(DiskImage diskImage) {
StorageDomain storageDomain = getStorageDomainDao().getForStoragePool(
diskImage.getStorageIds().get(0), diskImage.getStoragePoolId());
return new StorageDomainValidator(storageDomain);
}
@Override
protected void setActionMessageParameters() {
addCanDoActionMessage(EngineMessage.VAR__ACTION__UPDATE);
addCanDoActionMessage(EngineMessage.VAR__TYPE__VM_DISK);
}
protected boolean validatePciAndIdeLimit(List<VM> vmsDiskPluggedTo) {
for (VM vm : vmsDiskPluggedTo) {
List<VmNic> allVmInterfaces = getVmNicDao().getAllForVm(vm.getId());
List<Disk> allVmDisks = new LinkedList<>(getOtherVmDisks(vm.getId()));
allVmDisks.add(getNewDisk());
if (!checkPciAndIdeLimit(vm.getOs(),
vm.getVdsGroupCompatibilityVersion(),
vm.getNumOfMonitors(),
allVmInterfaces,
allVmDisks,
VmDeviceUtils.hasVirtioScsiController(vm.getId()),
VmDeviceUtils.hasWatchdog(vm.getId()),
VmDeviceUtils.hasMemoryBalloon(vm.getId()),
VmDeviceUtils.hasSoundDevice(vm.getId()),
getReturnValue().getCanDoActionMessages())) {
return false;
}
}
return true;
}
protected List<Disk> getOtherVmDisks(Guid vmId) {
List<Disk> disks = otherVmDisks.get(vmId);
if (disks == null) {
disks = getDiskDao().getAllForVm(vmId);
Iterator<Disk> iter = disks.iterator();
while (iter.hasNext()) {
Disk evalDisk = iter.next();
if (evalDisk.getId().equals(getOldDisk().getId())) {
iter.remove();
break;
}
}
otherVmDisks.put(vmId, disks);
}
return disks;
}
/**
* Validate whether a disk can be shareable. Disk can be shareable if it is not based on qcow FS,
* which means it should not be based on a template image with thin provisioning,
* it also should not contain snapshots and it is not bootable.
* @return Indication whether the disk can be shared or not.
*/
private boolean validateCanUpdateShareable() {
if (DiskStorageType.LUN == getOldDisk().getDiskStorageType()) {
return true;
}
// Check if VM is not during snapshot.
if (!isVmNotInPreviewSnapshot()) {
return false;
}
if (isUpdatedToShareable(getOldDisk(), getNewDisk())) {
StorageDomainStatic sds = getStorageDomainStaticDao().get(((DiskImage)getNewDisk()).getStorageIds().get(0));
if (sds.getStorageType() == StorageType.GLUSTERFS) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_SHAREABLE_DISKS_NOT_SUPPORTED_ON_GLUSTER_DOMAIN);
}
List<DiskImage> diskImageList =
getDiskImageDao().getAllSnapshotsForImageGroup(getOldDisk().getId());
// If disk image list is more than one then we assume that it has a snapshot, since one image is the active
// disk and all the other images are the snapshots.
if ((diskImageList.size() > 1) || !Guid.Empty.equals(((DiskImage) getOldDisk()).getImageTemplateId())) {
return failCanDoAction(EngineMessage.SHAREABLE_DISK_IS_NOT_SUPPORTED_FOR_DISK);
}
if (!isVersionSupportedForShareable(getOldDisk(), getStoragePoolDao().get(getVm().getStoragePoolId())
.getCompatibilityVersion()
.getValue())) {
return failCanDoAction(EngineMessage.ACTION_NOT_SUPPORTED_FOR_CLUSTER_POOL_LEVEL);
}
if (!isVolumeFormatSupportedForShareable(((DiskImage) getNewDisk()).getVolumeFormat())) {
return failCanDoAction(EngineMessage.SHAREABLE_DISK_IS_NOT_SUPPORTED_BY_VOLUME_FORMAT);
}
} else if (isUpdatedToNonShareable(getOldDisk(), getNewDisk())) {
if (vmsDiskOrSnapshotAttachedTo.size() > 1) {
return failCanDoAction(EngineMessage.DISK_IS_ALREADY_SHARED_BETWEEN_VMS);
}
}
return true;
}
protected boolean validateCanUpdateReadOnly(DiskValidator diskValidator) {
if (updateReadOnlyRequested()) {
if(getVm().getStatus() != VMStatus.Down && vmDeviceForVm.getIsPlugged()) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_VM_IS_NOT_DOWN);
}
return validate(diskValidator.isReadOnlyPropertyCompatibleWithInterface());
}
return true;
}
protected boolean validateVmPoolProperties() {
if ((updateReadOnlyRequested() || updateWipeAfterDeleteRequested()) && getVm().getVmPoolId() != null)
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_VM_ATTACHED_TO_POOL);
return true;
}
protected boolean validateCanResizeDisk() {
DiskImage newDiskImage = (DiskImage) getNewDisk();
DiskImage oldDiskImage = (DiskImage) getOldDisk();
if (newDiskImage.getSize() != oldDiskImage.getSize()) {
if (Boolean.TRUE.equals(getVmDeviceForVm().getIsReadOnly())) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_CANNOT_RESIZE_READ_ONLY_DISK);
}
if (vmDeviceForVm.getSnapshotId() != null) {
DiskImage snapshotDisk = getDiskImageDao().getDiskSnapshotForVmSnapshot(getParameters().getDiskId(), vmDeviceForVm.getSnapshotId());
if (snapshotDisk.getSize() != newDiskImage.getSize()) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_CANNOT_RESIZE_DISK_SNAPSHOT);
}
}
if (oldDiskImage.getSize() > newDiskImage.getSize()) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_REQUESTED_DISK_SIZE_IS_TOO_SMALL);
}
for (VM vm : getVmsDiskPluggedTo()) {
if (!VdcActionUtils.canExecute(Collections.singletonList(vm), VM.class, VdcActionType.ExtendImageSize)) {
return failCanDoAction(EngineMessage.ACTION_TYPE_FAILED_VM_STATUS_ILLEGAL, LocalizedVmStatus.from(vm.getStatus()));
}
}
StorageDomainValidator storageDomainValidator = getStorageDomainValidator((DiskImage) getNewDisk());
if (!validate(storageDomainValidator.isDomainExistAndActive())) {
return false;
}
// For size allocation validation, we'll create a dummy with the additional size required.
// That way, the validator can hold all the logic about storage types.
long additionalDiskSpaceInGB = newDiskImage.getSizeInGigabytes() - oldDiskImage.getSizeInGigabytes();
DiskImage dummyForValidation = DiskImage.copyOf(newDiskImage);
dummyForValidation.setSizeInGigabytes(additionalDiskSpaceInGB);
return validate(storageDomainValidator.hasSpaceForNewDisk(dummyForValidation));
}
return true;
}
@Override
public List<PermissionSubject> getPermissionCheckSubjects() {
if (listPermissionSubjects == null) {
listPermissionSubjects = new ArrayList<>();
Guid diskId = (getOldDisk() == null) ? null : getOldDisk().getId();
listPermissionSubjects.add(new PermissionSubject(diskId,
VdcObjectType.Disk,
ActionGroup.EDIT_DISK_PROPERTIES));
if (getOldDisk() != null && getNewDisk() != null && getOldDisk().getSgio() != getNewDisk().getSgio()) {
listPermissionSubjects.add(new PermissionSubject(diskId,
VdcObjectType.Disk,
ActionGroup.CONFIGURE_SCSI_GENERIC_IO));
}
}
return listPermissionSubjects;
}
protected void performDiskUpdate(final boolean unlockImage) {
if (shouldPerformMetadataUpdate()) {
updateMetaDataDescription((DiskImage) getNewDisk());
}
final Disk disk = getDiskDao().get(getParameters().getDiskId());
applyUserChanges(disk);
TransactionSupport.executeInNewTransaction(new TransactionMethod<Object>() {
@Override
public Object runInTransaction() {
getVmStaticDao().incrementDbGeneration(getVm().getId());
updateDeviceProperties();
getBaseDiskDao().update(disk);
switch (disk.getDiskStorageType()) {
case IMAGE:
DiskImage diskImage = (DiskImage) disk;
diskImage.setQuotaId(getQuotaId());
if (unlockImage && diskImage.getImageStatus() == ImageStatus.LOCKED) {
diskImage.setImageStatus(ImageStatus.OK);
}
getImageDao().update(diskImage.getImage());
updateQuota(diskImage);
updateDiskProfile();
break;
case CINDER:
CinderDisk cinderDisk = (CinderDisk) disk;
cinderDisk.setQuotaId(getQuotaId());
setStorageDomainId(cinderDisk.getStorageIds().get(0));
getCinderBroker().updateDisk(cinderDisk);
if (unlockImage && cinderDisk.getImageStatus() == ImageStatus.LOCKED) {
cinderDisk.setImageStatus(ImageStatus.OK);
}
getImageDao().update(cinderDisk.getImage());
updateQuota(cinderDisk);
break;
case LUN:
updateLunProperties((LunDisk)getNewDisk());
break;
}
reloadDisks();
updateBootOrder();
setSucceeded(true);
liveUpdateDiskProfile();
return null;
}
private void updateDeviceProperties() {
if (updateReadOnlyRequested()) {
vmDeviceForVm.setIsReadOnly(getNewDisk().getReadOnly());
getVmDeviceDao().update(vmDeviceForVm);
}
if (getOldDisk().getDiskInterface() != getNewDisk().getDiskInterface()) {
vmDeviceForVm.setAddress("");
getVmDeviceDao().clearDeviceAddress(getOldDisk().getId());
}
}
private void updateLunProperties(LunDisk lunDisk) {
if (updateIsUsingScsiReservationRequested(lunDisk)) {
vmDeviceForVm.setUsingScsiReservation(lunDisk.isUsingScsiReservation());
getVmDeviceDao().update(vmDeviceForVm);
}
}
});
}
private boolean shouldPerformMetadataUpdate() {
return ((getNewDisk().getDiskStorageType() == DiskStorageType.IMAGE) && (!ObjectUtils.objectsEqual(getOldDisk().getDiskAlias(),
getNewDisk().getDiskAlias()) || !ObjectUtils.objectsEqual(getOldDisk().getDiskDescription(),
getNewDisk().getDiskDescription())));
}
private void updateMetaDataDescription(DiskImage diskImage) {
StorageDomain storageDomain =
getStorageDomainDao().getForStoragePool(diskImage.getStorageIds().get(0),
getVm().getStoragePoolId());
if (!getStorageDomainValidator((DiskImage) getNewDisk()).isDomainExistAndActive().isValid()) {
auditLogForNoMetadataDescriptionUpdate(AuditLogType.UPDATE_DESCRIPTION_FOR_DISK_SKIPPED_SINCE_STORAGE_DOMAIN_NOT_ACTIVE,
storageDomain,
diskImage);
return;
}
try {
SetVolumeDescriptionVDSCommandParameters vdsCommandParameters =
new SetVolumeDescriptionVDSCommandParameters(getVm().getStoragePoolId(),
diskImage.getStorageIds().get(0),
diskImage.getId(),
diskImage.getImageId(),
getJsonDiskDescription());
runVdsCommand(VDSCommandType.SetVolumeDescription, vdsCommandParameters);
} catch (Exception e) {
log.error("Exception while setting volume description for disk. ERROR: '{}'", e);
auditLogForNoMetadataDescriptionUpdate(AuditLogType.UPDATE_DESCRIPTION_FOR_DISK_FAILED,
storageDomain,
diskImage);
}
}
private void auditLogForNoMetadataDescriptionUpdate(AuditLogType auditLogType, StorageDomain storageDomain, DiskImage diskImage) {
AuditLogableBase auditLogableBase = new AuditLogableBase();
auditLogableBase.addCustomValue("DataCenterName", getStoragePool().getName());
auditLogableBase.addCustomValue("StorageDomainName", storageDomain.getName());
auditLogableBase.addCustomValue("DiskName", diskImage.getDiskAlias());
auditLogDirector.log(auditLogableBase, auditLogType);
}
private String getJsonDiskDescription() throws IOException {
return MetadataDiskDescriptionHandler.getInstance().generateJsonDiskDescription(getParameters().getDiskInfo());
}
protected void updateDiskProfile() {
if (isDiskImage()) {
DiskImage oldDisk = (DiskImage) getOldDisk();
DiskImage newDisk = (DiskImage) getNewDisk();
if (!Objects.equals(oldDisk.getDiskProfileId(), newDisk.getDiskProfileId())) {
getImageStorageDomainMapDao().updateDiskProfileByImageGroupIdAndStorageDomainId(newDisk.getId(),
newDisk.getStorageIds().get(0),
newDisk.getDiskProfileId());
}
}
}
private void liveUpdateDiskProfile() {
if (isDiskImage()) {
DiskImage oldDisk = (DiskImage) getOldDisk();
DiskImage newDisk = (DiskImage) getNewDisk();
if (!Objects.equals(oldDisk.getDiskProfileId(), newDisk.getDiskProfileId())) {
vmSlaPolicyUtils.refreshRunningVmsWithDiskProfile(newDisk.getDiskProfileId());
}
}
}
protected void updateQuota(DiskImage diskImage) {
if (isInternalManagedDisk()) {
DiskImage oldDisk = (DiskImage) getOldDisk();
if (!Objects.equals(oldDisk.getQuotaId(), diskImage.getQuotaId())) {
getImageStorageDomainMapDao().updateQuotaForImageAndSnapshots(diskImage.getId(),
diskImage.getStorageIds().get(0),
diskImage.getQuotaId());
}
}
}
private void applyUserChanges(Disk diskToUpdate) {
updateSnapshotIdOnShareableChange(diskToUpdate, getNewDisk());
diskToUpdate.setBoot(getNewDisk().isBoot());
diskToUpdate.setDiskInterface(getNewDisk().getDiskInterface());
diskToUpdate.setPropagateErrors(getNewDisk().getPropagateErrors());
diskToUpdate.setWipeAfterDelete(getNewDisk().isWipeAfterDelete());
diskToUpdate.setDiskAlias(getNewDisk().getDiskAlias());
diskToUpdate.setDiskDescription(getNewDisk().getDiskDescription());
diskToUpdate.setShareable(getNewDisk().isShareable());
diskToUpdate.setReadOnly(getNewDisk().getReadOnly());
diskToUpdate.setSgio(getNewDisk().getSgio());
}
protected void reloadDisks() {
VmHandler.updateDisksFromDb(getVm());
}
protected void updateBootOrder() {
VmDeviceUtils.updateBootOrder(getVm().getId());
}
private void extendDiskImageSize() {
lockImageInDb();
VdcReturnValueBase ret = runInternalActionWithTasksContext(
VdcActionType.ExtendImageSize,
createExtendImageSizeParameters());
if (ret.getSucceeded()) {
getReturnValue().getVdsmTaskIdList().addAll(ret.getInternalVdsmTaskIdList());
} else {
propagateInternalCommandFailure(ret);
getReturnValue().setFault(ret.getFault());
}
setSucceeded(ret.getSucceeded());
}
private void extendCinderDiskSize() {
lockImageInDb();
CinderDisk newCinderDisk = (CinderDisk) getNewDisk();
Future<VdcReturnValueBase> future = CommandCoordinatorUtil.executeAsyncCommand(
VdcActionType.ExtendCinderDisk,
buildExtendCinderDiskParameters(newCinderDisk),
cloneContextAndDetachFromParent(),
new SubjectEntity(VdcObjectType.Storage, newCinderDisk.getStorageIds().get(0)));
addCustomValue("NewSize", String.valueOf(getNewDiskSizeInGB()));
try {
setReturnValue(future.get());
setSucceeded(getReturnValue().getSucceeded());
} catch (InterruptedException | ExecutionException e) {
log.error("Error extending Cinder disk '{}': {}",
getNewDisk().getDiskAlias(),
e.getMessage());
log.debug("Exception", e);
}
}
private VdcActionParametersBase buildExtendCinderDiskParameters(CinderDisk newCinderDisk) {
UpdateVmDiskParameters parameters = new UpdateVmDiskParameters(
getVmId(), newCinderDisk.getId(), newCinderDisk);
parameters.setParametersCurrentUser(getParameters().getParametersCurrentUser());
return parameters;
}
@Override
protected void endSuccessfully() {
if (!isDiskImage()) {
return;
}
VdcReturnValueBase ret = getBackend().endAction(VdcActionType.ExtendImageSize,
createExtendImageSizeParameters(),
getContext().clone().withoutCompensationContext().withoutExecutionContext().withoutLock());
if (ret.getSucceeded()) {
performDiskUpdate(true);
} else {
unlockImageInDb();
}
getReturnValue().setEndActionTryAgain(false);
setSucceeded(ret.getSucceeded());
}
private ValidationResult noVmsContainBootableDisks(List<VM> vms) {
List<String> vmsWithBoot = new ArrayList<>(vms.size());
for (VM vm : vms) {
Disk bootDisk = getDiskDao().getVmBootActiveDisk(vm.getId());
if (bootDisk != null) {
vmsWithBoot.add(vm.getName());
}
}
if (!vmsWithBoot.isEmpty()) {
addCanDoActionMessageVariable("VmsName", StringUtils.join(vmsWithBoot.toArray(), ", "));
return new ValidationResult(EngineMessage.ACTION_TYPE_FAILED_VMS_BOOT_IN_USE);
}
return ValidationResult.VALID;
}
@Override
protected void endWithFailure() {
endInternalCommandWithFailure();
unlockImageInDb();
getReturnValue().setEndActionTryAgain(false);
setSucceeded(true);
}
private void endInternalCommandWithFailure() {
ExtendImageSizeParameters params = createExtendImageSizeParameters();
params.setTaskGroupSuccess(false);
getBackend().endAction(VdcActionType.ExtendImageSize,
params,
getContext().clone().withoutCompensationContext().withoutExecutionContext().withoutLock());
}
@Override
public AuditLogType getAuditLogTypeValue() {
if (getSucceeded()) {
return isCinderDisk() && resizeDiskImageRequested() ?
AuditLogType.USER_EXTENDED_DISK_SIZE : AuditLogType.USER_UPDATE_VM_DISK;
} else {
return AuditLogType.USER_FAILED_UPDATE_VM_DISK;
}
}
@Override
public String getDiskAlias() {
return getOldDisk().getDiskAlias();
}
@Override
public Map<String, String> getJobMessageProperties() {
if (jobProperties == null) {
jobProperties = super.getJobMessageProperties();
jobProperties.put("diskalias", getDiskAlias());
}
return jobProperties;
}
public long getNewDiskSizeInGB() {
CinderDisk cinderDisk = (CinderDisk) getNewDisk();
return cinderDisk.getSize() / BYTES_IN_GB;
}
private boolean isDiskImage() {
return isDiskStorageType(DiskStorageType.IMAGE);
}
private boolean isCinderDisk() {
return isDiskStorageType(DiskStorageType.CINDER);
}
private boolean isDiskStorageType(DiskStorageType diskStorageType) {
return getOldDisk() != null && getNewDisk() != null && diskStorageType == getOldDisk().getDiskStorageType();
}
protected Guid getQuotaId() {
if (getNewDisk() != null && isInternalManagedDisk()) {
return ((DiskImage) getNewDisk()).getQuotaId();
}
return null;
}
protected boolean setAndValidateDiskProfiles() {
if (isDiskImage()) {
DiskImage diskImage = (DiskImage) getNewDisk();
// when disk profile isn't updated, skip check.
if (diskImage.getDiskProfileId() != null
&& diskImage.getDiskProfileId().equals(((DiskImage) getOldDisk()).getDiskProfileId())) {
return true;
}
Map<DiskImage, Guid> map = new HashMap<>();
map.put(diskImage, diskImage.getStorageIds().get(0));
return validate(DiskProfileHelper.setAndValidateDiskProfiles(map,
getStoragePool().getCompatibilityVersion(), getCurrentUser()));
}
return true;
}
@Override
public List<QuotaConsumptionParameter> getQuotaStorageConsumptionParameters() {
List<QuotaConsumptionParameter> list = new ArrayList<>();
if (isInternalManagedDisk()) {
DiskImage oldDiskImage = (DiskImage) getOldDisk();
DiskImage newDiskImage = (DiskImage) getNewDisk();
boolean emptyOldQuota = oldDiskImage.getQuotaId() == null || Guid.Empty.equals(oldDiskImage.getQuotaId());
boolean differentNewQuota = !emptyOldQuota && !oldDiskImage.getQuotaId().equals(newDiskImage.getQuotaId());
long diskExtendingDiff = newDiskImage.getSizeInGigabytes() - oldDiskImage.getSizeInGigabytes();
if (emptyOldQuota || differentNewQuota ) {
list.add(generateQuotaConsumeParameters(newDiskImage, newDiskImage.getSizeInGigabytes()));
} else if (diskExtendingDiff > 0L) {
list.add(generateQuotaConsumeParameters(newDiskImage, diskExtendingDiff));
}
if (differentNewQuota) {
list.add(new QuotaStorageConsumptionParameter(
oldDiskImage.getQuotaId(),
null,
QuotaStorageConsumptionParameter.QuotaAction.RELEASE,
//TODO: Shared Disk?
oldDiskImage.getStorageIds().get(0),
(double)oldDiskImage.getSizeInGigabytes()));
}
}
return list;
}
protected boolean isInternalManagedDisk() {
return isDiskImage() || isCinderDisk();
}
private QuotaConsumptionParameter generateQuotaConsumeParameters(DiskImage newDiskImage, long sizeInGigabytes) {
return new QuotaStorageConsumptionParameter(
newDiskImage.getQuotaId(),
null,
QuotaStorageConsumptionParameter.QuotaAction.CONSUME,
//TODO: Shared Disk?
newDiskImage.getStorageIds().get(0),
(double) sizeInGigabytes );
}
private boolean resizeDiskImageRequested() {
boolean sizeChanged = getNewDisk().getSize() != getOldDisk().getSize();
switch (getNewDisk().getDiskStorageType()) {
case IMAGE:
return sizeChanged && vmDeviceForVm.getSnapshotId() == null;
case CINDER:
return sizeChanged;
}
return false;
}
private boolean updateParametersRequiringVmDownRequested() {
return updateDiskParametersRequiringVmDownRequested() || updateImageParametersRequiringVmDownRequested();
}
private boolean updateDiskParametersRequiringVmDownRequested() {
return getOldDisk().isBoot() != getNewDisk().isBoot() ||
getOldDisk().getDiskInterface() != getNewDisk().getDiskInterface() ||
getOldDisk().getPropagateErrors() != getNewDisk().getPropagateErrors() ||
getOldDisk().isShareable() != getNewDisk().isShareable() ||
getOldDisk().getSgio() != getNewDisk().getSgio();
}
/**
* Command's canDoAction conditions: requiring all connected VMs down.
* @return true - if disk type is IMAGE or is CINDER, and updating quota
*/
private boolean updateImageParametersRequiringVmDownRequested() {
if (!getOldDisk().getDiskStorageType().isInternal()) {
return false;
}
Guid oldQuotaId = ((DiskImage) getOldDisk()).getQuotaId();
/*
* oldQuotaId == null : Initial quota, not assigned yet.
* happens when: quota is disabled or,
* quota enabled, but disk never attached with a quota
*/
if (oldQuotaId == null) {
return false;
}
return !Objects.equals(oldQuotaId, getQuotaId());
}
protected boolean updateReadOnlyRequested() {
Boolean readOnlyNewValue = getNewDisk().getReadOnly();
return readOnlyNewValue != null && !getVmDeviceForVm().getIsReadOnly().equals(readOnlyNewValue);
}
private boolean updateIsUsingScsiReservationRequested(LunDisk lunDisk) {
Boolean isUsingScsiReservationNewValue = lunDisk.isUsingScsiReservation();
return isUsingScsiReservationNewValue != null &&
getVmDeviceForVm().isUsingScsiReservation() != isUsingScsiReservationNewValue;
}
protected boolean updateWipeAfterDeleteRequested() {
return getNewDisk().isWipeAfterDelete() != getOldDisk().isWipeAfterDelete();
}
protected boolean isAtLeastOneVmIsNotDown(List<VM> vmsDiskPluggedTo) {
for (VM vm : vmsDiskPluggedTo) {
if (vm.getStatus() != VMStatus.Down) {
return true;
}
}
return false;
}
private boolean isUpdatedToShareable(Disk oldDisk, Disk newDisk) {
return newDisk.isShareable() && !oldDisk.isShareable();
}
private boolean isUpdatedToNonShareable(Disk oldDisk, Disk newDisk) {
return !newDisk.isShareable() && oldDisk.isShareable();
}
private void updateSnapshotIdOnShareableChange(Disk oldDisk, Disk newDisk) {
if (oldDisk.isShareable() != newDisk.isShareable() && oldDisk.getDiskStorageType() == DiskStorageType.IMAGE) {
DiskImage oldDiskImage = (DiskImage) oldDisk;
Guid vmSnapshotId = isUpdatedToShareable(oldDisk, newDisk) ? null :
getSnapshotDao().getId(getVmId(), SnapshotType.ACTIVE);
oldDiskImage.setVmSnapshotId(vmSnapshotId);
}
}
protected Disk getOldDisk() {
if (oldDisk == null) {
oldDisk = getDiskDao().get(getParameters().getDiskId());
}
return oldDisk;
}
protected Disk getNewDisk() {
return getParameters().getDiskInfo();
}
protected VmDevice getVmDeviceForVm() {
return vmDeviceForVm;
}
private List<VM> getVmsDiskPluggedTo() {
return vmsDiskPluggedTo;
}
private void loadVmDiskAttachedToInfo() {
if (getOldDisk() != null) {
List<Pair<VM, VmDevice>> attachedVmsInfo = getVmDao().getVmsWithPlugInfo(getOldDisk().getId());
for (Pair<VM, VmDevice> pair : attachedVmsInfo) {
VM vm = pair.getFirst();
vmsDiskOrSnapshotAttachedTo.add(vm);
if (Boolean.TRUE.equals(pair.getSecond().getIsPlugged())) {
if (pair.getSecond().getSnapshotId() != null) {
vmsDiskSnapshotPluggedTo.add(vm);
} else {
vmsDiskPluggedTo.add(vm);
}
vmsDiskOrSnapshotPluggedTo.add(vm);
}
if (vm.getId().equals(getParameters().getVmId())) {
vmDeviceForVm = pair.getSecond();
}
}
}
}
private void lockImageInDb() {
final DiskImage diskImage = (DiskImage) getOldDisk();
TransactionSupport.executeInNewTransaction(new TransactionMethod<Void>() {
@Override
public Void runInTransaction() {
getCompensationContext().snapshotEntityStatus(diskImage.getImage());
getCompensationContext().stateChanged();
diskImage.setImageStatus(ImageStatus.LOCKED);
ImagesHandler.updateImageStatus(diskImage.getImageId(), ImageStatus.LOCKED);
return null;
}
});
}
public void unlockImageInDb() {
final DiskImage diskImage = (DiskImage) getOldDisk();
diskImage.setImageStatus(ImageStatus.OK);
ImagesHandler.updateImageStatus(diskImage.getImageId(), ImageStatus.OK);
}
private ExtendImageSizeParameters createExtendImageSizeParameters() {
DiskImage diskImage = (DiskImage) getNewDisk();
ExtendImageSizeParameters params = new ExtendImageSizeParameters(diskImage.getImageId(), diskImage.getSize());
params.setStoragePoolId(diskImage.getStoragePoolId());
params.setStorageDomainId(diskImage.getStorageIds().get(0));
params.setImageGroupID(diskImage.getId());
params.setParentCommand(VdcActionType.UpdateVmDisk);
params.setParentParameters(getParameters());
return params;
}
private void propagateInternalCommandFailure(VdcReturnValueBase internalReturnValue) {
getReturnValue().getExecuteFailedMessages().clear();
getReturnValue().getExecuteFailedMessages().addAll(internalReturnValue.getExecuteFailedMessages());
getReturnValue().setFault(internalReturnValue.getFault());
getReturnValue().getCanDoActionMessages().clear();
getReturnValue().getCanDoActionMessages().addAll(internalReturnValue.getCanDoActionMessages());
getReturnValue().setCanDoAction(internalReturnValue.getCanDoAction());
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*******************************************************************************/
package org.apache.wink.common.internal.providers.entity.csv;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import javax.ws.rs.ext.Providers;
import org.apache.wink.common.model.csv.CsvSerializer;
import org.apache.wink.common.model.synd.SyndCategory;
import org.apache.wink.common.model.synd.SyndEntry;
import org.apache.wink.common.model.synd.SyndFeed;
@Provider
@Produces("text/csv")
public class CsvSyndFeedSerializerProvider implements MessageBodyWriter<SyndFeed> {
private final static String[] EMPTY_ARRAY = new String[0];
@Context
private Providers providers;
public long getSize(SyndFeed t,
Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType) {
return -1;
}
public boolean isWriteable(Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType) {
MessageBodyWriter<SyndFeedSerializer> messageBodyWriter =
providers.getMessageBodyWriter(SyndFeedSerializer.class,
genericType,
annotations,
mediaType);
return ((SyndFeed.class.isAssignableFrom(type)) && (messageBodyWriter != null));
}
public void writeTo(SyndFeed t,
Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException, WebApplicationException {
MessageBodyWriter<SyndFeedSerializer> messageBodyWriter =
providers.getMessageBodyWriter(SyndFeedSerializer.class,
genericType,
annotations,
mediaType);
// already checked for non-null writer in isWriteable
messageBodyWriter.writeTo(new SyndFeedSerializer(t),
type,
genericType,
annotations,
mediaType,
httpHeaders,
entityStream);
}
private class SyndFeedSerializer implements Iterator<String[]>, CsvSerializer {
private TreeSet<String> categoriesNames; // sorted set of
// categories
private Iterator<SyndEntry> iterator; // iterator of entries
private List<String> header; // table's header
private boolean headerSent = false; // indicates if the
// header was sent
/**
* c'tor
*
* @param collection - Atom collection
*/
public SyndFeedSerializer(SyndFeed syndFeed) {
List<SyndEntry> entries = syndFeed.getEntries();
iterator = entries.iterator();
categoriesNames = new TreeSet<String>();
for (SyndEntry entry : entries) {
List<SyndCategory> categories = entry.getCategories();
if (categories == null) {
continue;
}
for (SyndCategory categoryBean : categories) {
categoriesNames.add(categoryBean.getScheme());
}
}
header = new ArrayList<String>();
// pay attention that the order of header must math the order of
// elements (see next() method below)
header.add("id"); //$NON-NLS-1$
header.add("title"); //$NON-NLS-1$
header.add("content"); //$NON-NLS-1$
header.add("authors"); //$NON-NLS-1$
header.add("published"); //$NON-NLS-1$
header.add("summary"); //$NON-NLS-1$
header.add("updated"); //$NON-NLS-1$
header.add("base"); //$NON-NLS-1$
header.add("lang"); //$NON-NLS-1$
for (String categoryName : categoriesNames) {
header.add(categoryName);
}
}
public boolean hasNext() {
return (!headerSent || iterator.hasNext());
}
public String[] next() {
if (!headerSent) {
// if header was not sent, send it
headerSent = true;
return header.toArray(EMPTY_ARRAY);
}
// send the entire table base on the iterator
// first, fetch the entry using original iterator
SyndEntry entry = iterator.next();
// second, fetch data from entry
String base = entry.getBase();
String id = entry.getId();
String lang = entry.getLang();
String authors =
entry.getAuthors() != null && !entry.getAuthors().isEmpty() ? entry.getAuthors()
.get(0).getName() : ""; //$NON-NLS-1$
String title = entry.getTitle() != null ? entry.getTitle().getValue() : ""; //$NON-NLS-1$
String content = entry.getContent() != null ? entry.getContent().getValue() : ""; //$NON-NLS-1$
String published =
entry.getPublished() != null ? String.valueOf(entry.getPublished()) : ""; //$NON-NLS-1$
String updated = entry.getUpdated() != null ? String.valueOf(entry.getUpdated()) : ""; //$NON-NLS-1$
String summary = entry.getSummary() != null ? entry.getSummary().getValue() : ""; //$NON-NLS-1$
// to improve the search, convert categories to Map
Map<String, String> categoriesMap = new HashMap<String, String>();
List<SyndCategory> categories = entry.getCategories();
if (categories != null) {
for (SyndCategory bean : categories) {
categoriesMap.put(bean.getScheme(), bean.getTerm());
}
}
// fill row
String[] row = new String[header.size()];
int index = 0;
// pay attention that the order of elements in row must math the
// order of header (see constructor above)
row[index++] = id;
row[index++] = title;
row[index++] = content;
row[index++] = authors;
row[index++] = published;
row[index++] = summary;
row[index++] = updated;
row[index++] = base;
row[index++] = lang;
// add categories to the end of the row
for (String categoryName : categoriesNames) {
String category = categoriesMap.get(categoryName);
row[index++] = category != null ? category : ""; //$NON-NLS-1$
}
return row;
}
public void remove() {
throw new UnsupportedOperationException();
}
public Iterator<String[]> getEntities() {
return this;
}
}
}
| |
/*
* Copyright 2014-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.solr.core.query;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.springframework.data.geo.Box;
import org.springframework.data.geo.Distance;
import org.springframework.data.geo.Point;
import org.springframework.lang.Nullable;
/**
* @author Christoph Strobl
* @since 1.2
*/
public class Crotch extends Criteria {
private List<Criteria> siblings = new ArrayList<>();
private @Nullable Node mostRecentSibling = null;
Crotch() {}
@Override
public Field getField() {
if (this.mostRecentSibling instanceof Criteria) {
return ((Criteria) this.mostRecentSibling).getField();
}
return null;
}
@Override
public Crotch is(@Nullable Object o) {
mostRecentSibling.is(o);
return this;
}
@Override
public Crotch boost(float boost) {
mostRecentSibling.boost(boost);
return this;
}
@Override
public Crotch not() {
mostRecentSibling.not();
return this;
}
@Override
public Crotch notOperator() {
if (this.isRoot()) {
this.setNegating(true);
} else {
super.notOperator();
}
return this;
}
@Override
public Crotch endsWith(String postfix) {
mostRecentSibling.endsWith(postfix);
return this;
}
@Override
public Crotch startsWith(String prefix) {
mostRecentSibling.startsWith(prefix);
return this;
}
@Override
public Crotch contains(String value) {
mostRecentSibling.contains(value);
return this;
}
@Override
public Crotch is(Object... values) {
mostRecentSibling.is(values);
return this;
}
@Override
public Crotch is(Iterable<?> values) {
mostRecentSibling.is(values);
return this;
}
@Override
public Crotch isNull() {
mostRecentSibling.isNull();
return this;
}
@Override
public Crotch isNotNull() {
mostRecentSibling.isNotNull();
return this;
}
@Override
public Crotch contains(String... values) {
mostRecentSibling.contains(values);
return this;
}
@Override
public Crotch contains(Iterable<String> values) {
mostRecentSibling.contains(values);
return this;
}
@Override
public Crotch startsWith(String... values) {
mostRecentSibling.startsWith(values);
return this;
}
@Override
public Crotch startsWith(Iterable<String> values) {
mostRecentSibling.startsWith(values);
return this;
}
@Override
public Crotch endsWith(String... values) {
mostRecentSibling.endsWith(values);
return this;
}
@Override
public Crotch endsWith(Iterable<String> values) {
mostRecentSibling.endsWith(values);
return this;
}
@Override
public Crotch fuzzy(String value) {
mostRecentSibling.fuzzy(value);
return this;
}
@Override
public Crotch fuzzy(String values, float levenshteinDistance) {
mostRecentSibling.fuzzy(values, levenshteinDistance);
return this;
}
@Override
public Crotch sloppy(String phrase, int distance) {
mostRecentSibling.sloppy(phrase, distance);
return this;
}
@Override
public Crotch expression(String nativeSolrQueryExpression) {
mostRecentSibling.expression(nativeSolrQueryExpression);
return this;
}
@Override
public Crotch between(@Nullable Object lowerBound, @Nullable Object upperBound) {
mostRecentSibling.between(lowerBound, upperBound);
return this;
}
@Override
public Crotch between(@Nullable Object lowerBound, @Nullable Object upperBound, boolean includeLowerBound, boolean includeUpperBound) {
mostRecentSibling.between(lowerBound, upperBound, includeLowerBound, includeUpperBound);
return this;
}
@Override
public Crotch lessThan(Object upperBound) {
mostRecentSibling.lessThan(upperBound);
return this;
}
@Override
public Crotch lessThanEqual(Object upperBound) {
mostRecentSibling.lessThanEqual(upperBound);
return this;
}
@Override
public Crotch greaterThan(Object lowerBound) {
mostRecentSibling.greaterThan(lowerBound);
return this;
}
@Override
public Crotch greaterThanEqual(Object lowerBound) {
mostRecentSibling.greaterThanEqual(lowerBound);
return this;
}
@Override
public Crotch in(Object... values) {
mostRecentSibling.in(values);
return this;
}
@Override
public Crotch in(Iterable<?> values) {
mostRecentSibling.in(values);
return this;
}
@Override
public Crotch within(Point location, @Nullable Distance distance) {
mostRecentSibling.within(location, distance);
return this;
}
@Override
public Crotch near(Box box) {
mostRecentSibling.near(box);
return this;
}
@Override
public Crotch near(Point location, @Nullable Distance distance) {
mostRecentSibling.near(location, distance);
return this;
}
@Override
public Crotch function(Function function) {
mostRecentSibling.function(function);
return this;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(this.isOr() ? " OR " : " AND ");
sb.append('(');
boolean first = true;
for (Node node : this.siblings) {
String s = node.toString();
if (first) {
s = s.replaceFirst("OR", "").replaceFirst("AND", "");
first = false;
}
sb.append(s);
}
sb.append(')');
return sb.toString();
}
// ------- NODE STUFF --------
void add(Node node) {
if (!(node instanceof Criteria)) {
throw new IllegalArgumentException("Can only add instances of Criteria");
}
node.setParent(this);
boolean containsNearFunction = this.siblings.stream().anyMatch(criteria -> criteria.getPredicates().stream()
.anyMatch(predicate -> predicate.getKey().equalsIgnoreCase("$within")));
Criteria criteria = (Criteria) node;
if (containsNearFunction) {
this.siblings.add(0, criteria);
} else {
this.siblings.add(criteria);
}
this.mostRecentSibling = node;
}
@Override
public Collection<Criteria> getSiblings() {
return Collections.unmodifiableCollection(siblings);
}
@Override
public Crotch and(Node part) {
add(part);
return this;
}
@Override
public Crotch or(Node part) {
part.setPartIsOr(true);
add(part);
return this;
}
@Override
public Crotch and(String fieldname) {
if (this.mostRecentSibling instanceof Crotch) {
((Crotch) mostRecentSibling).add(new Criteria(fieldname));
} else {
and(new Criteria(fieldname));
}
return this;
}
@Override
public Crotch or(String fieldname) {
Criteria criteria = new Criteria(fieldname);
criteria.setPartIsOr(true);
if (this.mostRecentSibling instanceof Crotch) {
((Crotch) mostRecentSibling).add(criteria);
} else {
or(new Criteria(fieldname));
}
return this;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.euphoria.core.translate.provider;
import static java.util.Objects.requireNonNull;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Predicate;
import org.apache.beam.sdk.extensions.euphoria.core.annotation.stability.Experimental;
import org.apache.beam.sdk.extensions.euphoria.core.client.operator.CompositeOperator;
import org.apache.beam.sdk.extensions.euphoria.core.client.operator.FlatMap;
import org.apache.beam.sdk.extensions.euphoria.core.client.operator.Join;
import org.apache.beam.sdk.extensions.euphoria.core.client.operator.ReduceByKey;
import org.apache.beam.sdk.extensions.euphoria.core.client.operator.Union;
import org.apache.beam.sdk.extensions.euphoria.core.client.operator.base.Operator;
import org.apache.beam.sdk.extensions.euphoria.core.translate.CompositeOperatorTranslator;
import org.apache.beam.sdk.extensions.euphoria.core.translate.FlatMapTranslator;
import org.apache.beam.sdk.extensions.euphoria.core.translate.JoinTranslator;
import org.apache.beam.sdk.extensions.euphoria.core.translate.OperatorTranslator;
import org.apache.beam.sdk.extensions.euphoria.core.translate.ReduceByKeyTranslator;
import org.apache.beam.sdk.extensions.euphoria.core.translate.TranslatorProvider;
import org.apache.beam.sdk.extensions.euphoria.core.translate.UnionTranslator;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
/**
* Adjustable {@link TranslatorProvider} that selects first suitable translation for the registered
* operator.
*
* <p>{@link OperatorTranslator Translators} can be added by calling variants of {@link
* GenericTranslatorProvider.Builder#register(Class, OperatorTranslator) register} method during
* build. Order of registration is important. Building is started by {@link #newBuilder()}.
*/
@Experimental
@SuppressWarnings({
"rawtypes" // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
})
public class GenericTranslatorProvider implements TranslatorProvider {
public static GenericTranslatorProvider createWithDefaultTranslators() {
return GenericTranslatorProvider.newBuilder()
.register(FlatMap.class, new FlatMapTranslator<>())
.register(Union.class, new UnionTranslator<>())
.register(ReduceByKey.class, new ReduceByKeyTranslator<>())
.register(Join.class, new JoinTranslator<>())
// register fallback operator translator to decompose composit operators
.register(op -> op instanceof CompositeOperator, new CompositeOperatorTranslator<>())
.build();
}
/**
* Create a new builder for provider.
*
* @return builder
*/
public static Builder newBuilder() {
return new Builder();
}
/** {@link GenericTranslatorProvider} builder. */
public static class Builder {
private final List<TranslationDescriptor> possibleTranslators = new ArrayList<>();
private Builder() {}
/**
* Registers given {@link OperatorTranslator} to be used for given operator type.
*
* @param clazz class of an {@link Operator} to be translated
* @param operatorTranslator translator to register
* @param <OperatorT> type of an {@link Operator} to be translated
* @return this builder
*/
public <OperatorT extends Operator<?>> Builder register(
Class<OperatorT> clazz, OperatorTranslator<?, ?, ? extends OperatorT> operatorTranslator) {
possibleTranslators.add(TranslationDescriptor.of(clazz, operatorTranslator));
return this;
}
/**
* Register given {@link OperatorTranslator} to be used for operator type when given {@link
* Predicate} holds.
*
* @param clazz class of an {@link Operator} to be translated
* @param predicate user defined predicate which is tested to be true in order to apply
* translator to an {@link Operator}
* @param operatorTranslator translator to register
* @param <OperatorT> type of an {@link Operator} to be translated
* @return this builder
*/
public <OperatorT extends Operator<?>> Builder register(
Class<OperatorT> clazz,
Predicate<OperatorT> predicate,
OperatorTranslator<?, ?, ? extends OperatorT> operatorTranslator) {
possibleTranslators.add(TranslationDescriptor.of(clazz, predicate, operatorTranslator));
return this;
}
/**
* Registers given {@link OperatorTranslator} to be used for any operator when given {@link
* Predicate} holds.
*
* @param predicate user defined predicate which is tested to be true in order to apply
* translator to an {@link Operator}. Note that predicate have to be able to test any {@link
* Operator} instance including subtypes.
* @param operatorTranslator translator to register
* @return this builder
*/
public Builder register(
Predicate<Operator> predicate, OperatorTranslator<?, ?, Operator> operatorTranslator) {
possibleTranslators.add(TranslationDescriptor.of(predicate, operatorTranslator));
return this;
}
public GenericTranslatorProvider build() {
return new GenericTranslatorProvider(possibleTranslators);
}
}
/**
* Container for optional {@link Predicate user defined predicate}, optional {@link Class} of na
* operator to be translated and {@link OperatorTranslator} itself. The predicate and operator
* class defines an optional checks. At least one of them have to be present.
*
* <p>The {@link OperatorTranslator} is allowed to translate an operator iff it pass all the
* checks and {@link OperatorTranslator#canTranslate(Operator) can translate} given operator. That
* allows users to write translators specific for any operator.
*
* @param <OperatorT> the type of the euphoria operator
*/
private static class TranslationDescriptor<OperatorT extends Operator<?>> {
/** Class of an {@link Operator} given {@link #translator} can be applied on. */
private final Optional<Class<OperatorT>> operatorClass;
/**
* User specified predicate, which determines whenever given {@link #translator} can be used to
* translate an operator.
*/
private final Optional<Predicate<OperatorT>> userDefinedPredicate;
private final OperatorTranslator<?, ?, OperatorT> translator;
private TranslationDescriptor(
Optional<Class<OperatorT>> operatorClass,
Optional<Predicate<OperatorT>> userDefinedPredicate,
OperatorTranslator<?, ?, ? extends OperatorT> translator) {
Preconditions.checkState(
operatorClass.isPresent() || userDefinedPredicate.isPresent(),
"At least user defined predicate or class of an operator have to be given.");
@SuppressWarnings("unchecked")
OperatorTranslator<?, ?, OperatorT> castTranslator =
(OperatorTranslator<?, ?, OperatorT>) translator;
this.operatorClass = operatorClass;
this.userDefinedPredicate = userDefinedPredicate;
this.translator = castTranslator;
}
static <OperatorT extends Operator<?>> TranslationDescriptor<OperatorT> of(
Class<OperatorT> operatorClass,
Predicate<OperatorT> userDefinedPredicate,
OperatorTranslator<?, ?, ? extends OperatorT> translator) {
return new TranslationDescriptor<>(
Optional.of(requireNonNull(operatorClass)),
Optional.of(requireNonNull(userDefinedPredicate)),
requireNonNull(translator));
}
static <OperatorT extends Operator<?>> TranslationDescriptor<OperatorT> of(
Predicate<OperatorT> userDefinedPredicate,
OperatorTranslator<?, ?, ? extends OperatorT> translator) {
return new TranslationDescriptor<>(
Optional.empty(), Optional.of(userDefinedPredicate), requireNonNull(translator));
}
static <OperatorT extends Operator<?>> TranslationDescriptor<OperatorT> of(
Class<OperatorT> operatorClass, OperatorTranslator<?, ?, ? extends OperatorT> translator) {
return new TranslationDescriptor<>(
Optional.of(requireNonNull(operatorClass)), Optional.empty(), requireNonNull(translator));
}
private boolean checkTranslatorSuitableFor(OperatorT operator) {
// optional class equality check
if (operatorClass.isPresent() && !operatorClass.get().equals(operator.getClass())) {
return false;
}
// optional user-defined predicate check
if (userDefinedPredicate.isPresent() && !userDefinedPredicate.get().test(operator)) {
return false;
}
// mandatory check by translator itself
return translator.canTranslate(operator);
}
Optional<OperatorTranslator<?, ?, OperatorT>> getTranslatorWhenSuitable(OperatorT operator) {
if (checkTranslatorSuitableFor(operator)) {
return Optional.of(translator);
} else {
return Optional.empty();
}
}
}
private final List<TranslationDescriptor> possibleTranslators;
private GenericTranslatorProvider(List<TranslationDescriptor> possibleTranslators) {
this.possibleTranslators = possibleTranslators;
}
@Override
public <InputT, OutputT, OperatorT extends Operator<OutputT>>
Optional<OperatorTranslator<InputT, OutputT, OperatorT>> findTranslator(OperatorT operator) {
for (TranslationDescriptor descriptor : possibleTranslators) {
@SuppressWarnings("unchecked")
Optional<OperatorTranslator<InputT, OutputT, OperatorT>> maybeTranslator =
descriptor.getTranslatorWhenSuitable(operator);
if (maybeTranslator.isPresent()) {
return maybeTranslator;
}
}
return Optional.empty();
}
}
| |
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.simonvt.threepanelayout;
import android.content.Context;
import android.hardware.SensorManager;
import android.os.Build;
import android.util.FloatMath;
import android.view.ViewConfiguration;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
/**
* This class encapsulates scrolling. The duration of the scroll
* can be passed in the constructor and specifies the maximum time that
* the scrolling animation should take. Past this time, the scrolling is
* automatically moved to its final stage and computeScrollOffset()
* will always return false to indicate that scrolling is over.
*/
public class Scroller {
private int mMode;
private int mStartX;
private int mStartY;
private int mFinalX;
private int mFinalY;
private int mMinX;
private int mMaxX;
private int mMinY;
private int mMaxY;
private int mCurrX;
private int mCurrY;
private long mStartTime;
private int mDuration;
private float mDurationReciprocal;
private float mDeltaX;
private float mDeltaY;
private boolean mFinished;
private Interpolator mInterpolator;
private boolean mFlywheel;
private float mVelocity;
private static final int DEFAULT_DURATION = 250;
private static final int SCROLL_MODE = 0;
private static final int FLING_MODE = 1;
private static final float DECELERATION_RATE = (float) (Math.log(0.75) / Math.log(0.9));
private static final float ALPHA = 800; // pixels / seconds
private static final float START_TENSION = 0.4f; // Tension at start: (0.4 * total T, 1.0 * Distance)
private static final float END_TENSION = 1.0f - START_TENSION;
private static final int NB_SAMPLES = 100;
private static final float[] SPLINE = new float[NB_SAMPLES + 1];
private float mDeceleration;
private final float mPpi;
static {
float xMin = 0.0f;
for (int i = 0; i <= NB_SAMPLES; i++) {
final float t = (float) i / NB_SAMPLES;
float xMax = 1.0f;
float x, tx, coef;
while (true) {
x = xMin + (xMax - xMin) / 2.0f;
coef = 3.0f * x * (1.0f - x);
tx = coef * ((1.0f - x) * START_TENSION + x * END_TENSION) + x * x * x;
if (Math.abs(tx - t) < 1E-5) break;
if (tx > t) xMax = x;
else xMin = x;
}
final float d = coef + x * x * x;
SPLINE[i] = d;
}
SPLINE[NB_SAMPLES] = 1.0f;
// This controls the viscous fluid effect (how much of it)
sViscousFluidScale = 8.0f;
// must be set to 1.0 (used in viscousFluid())
sViscousFluidNormalize = 1.0f;
sViscousFluidNormalize = 1.0f / viscousFluid(1.0f);
}
private static float sViscousFluidScale;
private static float sViscousFluidNormalize;
/**
* Create a Scroller with the default duration and interpolator.
*/
public Scroller(Context context) {
this(context, null);
}
/**
* Create a Scroller with the specified interpolator. If the interpolator is
* null, the default (viscous) interpolator will be used. "Flywheel" behavior will
* be in effect for apps targeting Honeycomb or newer.
*/
public Scroller(Context context, Interpolator interpolator) {
this(context, interpolator,
context.getApplicationInfo().targetSdkVersion >= Build.VERSION_CODES.HONEYCOMB);
}
/**
* Create a Scroller with the specified interpolator. If the interpolator is
* null, the default (viscous) interpolator will be used. Specify whether or
* not to support progressive "flywheel" behavior in flinging.
*/
public Scroller(Context context, Interpolator interpolator, boolean flywheel) {
mFinished = true;
mInterpolator = interpolator;
mPpi = context.getResources().getDisplayMetrics().density * 160.0f;
mDeceleration = computeDeceleration(ViewConfiguration.getScrollFriction());
mFlywheel = flywheel;
}
/**
* The amount of friction applied to flings. The default value
* is {@link android.view.ViewConfiguration#getScrollFriction}.
*
* @param friction A scalar dimension-less value representing the coefficient of
* friction.
*/
public final void setFriction(float friction) {
mDeceleration = computeDeceleration(friction);
}
private float computeDeceleration(float friction) {
return SensorManager.GRAVITY_EARTH // g (m/s^2)
* 39.37f // inch/meter
* mPpi // pixels per inch
* friction;
}
/**
*
* Returns whether the scroller has finished scrolling.
*
* @return True if the scroller has finished scrolling, false otherwise.
*/
public final boolean isFinished() {
return mFinished;
}
/**
* Force the finished field to a particular value.
*
* @param finished The new finished value.
*/
public final void forceFinished(boolean finished) {
mFinished = finished;
}
/**
* Returns how long the scroll event will take, in milliseconds.
*
* @return The duration of the scroll in milliseconds.
*/
public final int getDuration() {
return mDuration;
}
/**
* Returns the current X offset in the scroll.
*
* @return The new X offset as an absolute distance from the origin.
*/
public final int getCurrX() {
return mCurrX;
}
/**
* Returns the current Y offset in the scroll.
*
* @return The new Y offset as an absolute distance from the origin.
*/
public final int getCurrY() {
return mCurrY;
}
/**
* Returns the current velocity.
*
* @return The original velocity less the deceleration. Result may be
* negative.
*/
public float getCurrVelocity() {
return mVelocity - mDeceleration * timePassed() / 2000.0f;
}
/**
* Returns the start X offset in the scroll.
*
* @return The start X offset as an absolute distance from the origin.
*/
public final int getStartX() {
return mStartX;
}
/**
* Returns the start Y offset in the scroll.
*
* @return The start Y offset as an absolute distance from the origin.
*/
public final int getStartY() {
return mStartY;
}
/**
* Returns where the scroll will end. Valid only for "fling" scrolls.
*
* @return The final X offset as an absolute distance from the origin.
*/
public final int getFinalX() {
return mFinalX;
}
/**
* Returns where the scroll will end. Valid only for "fling" scrolls.
*
* @return The final Y offset as an absolute distance from the origin.
*/
public final int getFinalY() {
return mFinalY;
}
/**
* Call this when you want to know the new location. If it returns true,
* the animation is not yet finished. loc will be altered to provide the
* new location.
*/
public boolean computeScrollOffset() {
if (mFinished) {
return false;
}
int timePassed = (int) (AnimationUtils.currentAnimationTimeMillis() - mStartTime);
if (timePassed < mDuration) {
switch (mMode) {
case SCROLL_MODE:
float x = timePassed * mDurationReciprocal;
if (mInterpolator == null)
x = viscousFluid(x);
else
x = mInterpolator.getInterpolation(x);
mCurrX = mStartX + Math.round(x * mDeltaX);
mCurrY = mStartY + Math.round(x * mDeltaY);
break;
case FLING_MODE:
final float t = (float) timePassed / mDuration;
final int index = (int) (NB_SAMPLES * t);
final float tInf = (float) index / NB_SAMPLES;
final float tSup = (float) (index + 1) / NB_SAMPLES;
final float dInf = SPLINE[index];
final float dSup = SPLINE[index + 1];
final float distanceCoef = dInf + (t - tInf) / (tSup - tInf) * (dSup - dInf);
mCurrX = mStartX + Math.round(distanceCoef * (mFinalX - mStartX));
// Pin to mMinX <= mCurrX <= mMaxX
mCurrX = Math.min(mCurrX, mMaxX);
mCurrX = Math.max(mCurrX, mMinX);
mCurrY = mStartY + Math.round(distanceCoef * (mFinalY - mStartY));
// Pin to mMinY <= mCurrY <= mMaxY
mCurrY = Math.min(mCurrY, mMaxY);
mCurrY = Math.max(mCurrY, mMinY);
if (mCurrX == mFinalX && mCurrY == mFinalY) {
mFinished = true;
}
break;
}
} else {
mCurrX = mFinalX;
mCurrY = mFinalY;
mFinished = true;
}
return true;
}
/**
* Start scrolling by providing a starting point and the distance to travel.
* The scroll will use the default value of 250 milliseconds for the
* duration.
*
* @param startX Starting horizontal scroll offset in pixels. Positive
* numbers will scroll the content to the left.
* @param startY Starting vertical scroll offset in pixels. Positive numbers
* will scroll the content up.
* @param dx Horizontal distance to travel. Positive numbers will scroll the
* content to the left.
* @param dy Vertical distance to travel. Positive numbers will scroll the
* content up.
*/
public void startScroll(int startX, int startY, int dx, int dy) {
startScroll(startX, startY, dx, dy, DEFAULT_DURATION);
}
/**
* Start scrolling by providing a starting point and the distance to travel.
*
* @param startX Starting horizontal scroll offset in pixels. Positive
* numbers will scroll the content to the left.
* @param startY Starting vertical scroll offset in pixels. Positive numbers
* will scroll the content up.
* @param dx Horizontal distance to travel. Positive numbers will scroll the
* content to the left.
* @param dy Vertical distance to travel. Positive numbers will scroll the
* content up.
* @param duration Duration of the scroll in milliseconds.
*/
public void startScroll(int startX, int startY, int dx, int dy, int duration) {
mMode = SCROLL_MODE;
mFinished = false;
mDuration = duration;
mStartTime = AnimationUtils.currentAnimationTimeMillis();
mStartX = startX;
mStartY = startY;
mFinalX = startX + dx;
mFinalY = startY + dy;
mDeltaX = dx;
mDeltaY = dy;
mDurationReciprocal = 1.0f / (float) mDuration;
}
/**
* Start scrolling based on a fling gesture. The distance travelled will
* depend on the initial velocity of the fling.
*
* @param startX Starting point of the scroll (X)
* @param startY Starting point of the scroll (Y)
* @param velocityX Initial velocity of the fling (X) measured in pixels per
* second.
* @param velocityY Initial velocity of the fling (Y) measured in pixels per
* second
* @param minX Minimum X value. The scroller will not scroll past this
* point.
* @param maxX Maximum X value. The scroller will not scroll past this
* point.
* @param minY Minimum Y value. The scroller will not scroll past this
* point.
* @param maxY Maximum Y value. The scroller will not scroll past this
* point.
*/
public void fling(int startX, int startY, int velocityX, int velocityY,
int minX, int maxX, int minY, int maxY) {
// Continue a scroll or fling in progress
if (mFlywheel && !mFinished) {
float oldVel = getCurrVelocity();
float dx = (float) (mFinalX - mStartX);
float dy = (float) (mFinalY - mStartY);
float hyp = FloatMath.sqrt(dx * dx + dy * dy);
float ndx = dx / hyp;
float ndy = dy / hyp;
float oldVelocityX = ndx * oldVel;
float oldVelocityY = ndy * oldVel;
if (Math.signum(velocityX) == Math.signum(oldVelocityX)
&& Math.signum(velocityY) == Math.signum(oldVelocityY)) {
velocityX += oldVelocityX;
velocityY += oldVelocityY;
}
}
mMode = FLING_MODE;
mFinished = false;
float velocity = FloatMath.sqrt(velocityX * velocityX + velocityY * velocityY);
mVelocity = velocity;
final double l = Math.log(START_TENSION * velocity / ALPHA);
mDuration = (int) (1000.0 * Math.exp(l / (DECELERATION_RATE - 1.0)));
mStartTime = AnimationUtils.currentAnimationTimeMillis();
mStartX = startX;
mStartY = startY;
float coeffX = velocity == 0 ? 1.0f : velocityX / velocity;
float coeffY = velocity == 0 ? 1.0f : velocityY / velocity;
int totalDistance =
(int) (ALPHA * Math.exp(DECELERATION_RATE / (DECELERATION_RATE - 1.0) * l));
mMinX = minX;
mMaxX = maxX;
mMinY = minY;
mMaxY = maxY;
mFinalX = startX + Math.round(totalDistance * coeffX);
// Pin to mMinX <= mFinalX <= mMaxX
mFinalX = Math.min(mFinalX, mMaxX);
mFinalX = Math.max(mFinalX, mMinX);
mFinalY = startY + Math.round(totalDistance * coeffY);
// Pin to mMinY <= mFinalY <= mMaxY
mFinalY = Math.min(mFinalY, mMaxY);
mFinalY = Math.max(mFinalY, mMinY);
}
static float viscousFluid(float x) {
x *= sViscousFluidScale;
if (x < 1.0f) {
x -= (1.0f - (float) Math.exp(-x));
} else {
float start = 0.36787944117f; // 1/e == exp(-1)
x = 1.0f - (float) Math.exp(1.0f - x);
x = start + x * (1.0f - start);
}
x *= sViscousFluidNormalize;
return x;
}
/**
* Stops the animation. Contrary to {@link #forceFinished(boolean)},
* aborting the animating cause the scroller to move to the final x and y
* position
*
* @see #forceFinished(boolean)
*/
public void abortAnimation() {
mCurrX = mFinalX;
mCurrY = mFinalY;
mFinished = true;
}
/**
* Extend the scroll animation. This allows a running animation to scroll
* further and longer, when used with {@link #setFinalX(int)} or {@link #setFinalY(int)}.
*
* @param extend Additional time to scroll in milliseconds.
* @see #setFinalX(int)
* @see #setFinalY(int)
*/
public void extendDuration(int extend) {
int passed = timePassed();
mDuration = passed + extend;
mDurationReciprocal = 1.0f / mDuration;
mFinished = false;
}
/**
* Returns the time elapsed since the beginning of the scrolling.
*
* @return The elapsed time in milliseconds.
*/
public int timePassed() {
return (int) (AnimationUtils.currentAnimationTimeMillis() - mStartTime);
}
/**
* Sets the final position (X) for this scroller.
*
* @param newX The new X offset as an absolute distance from the origin.
* @see #extendDuration(int)
* @see #setFinalY(int)
*/
public void setFinalX(int newX) {
mFinalX = newX;
mDeltaX = mFinalX - mStartX;
mFinished = false;
}
/**
* Sets the final position (Y) for this scroller.
*
* @param newY The new Y offset as an absolute distance from the origin.
* @see #extendDuration(int)
* @see #setFinalX(int)
*/
public void setFinalY(int newY) {
mFinalY = newY;
mDeltaY = mFinalY - mStartY;
mFinished = false;
}
/**
* @hide
*/
public boolean isScrollingInDirection(float xvel, float yvel) {
return !mFinished && Math.signum(xvel) == Math.signum(mFinalX - mStartX)
&& Math.signum(yvel) == Math.signum(mFinalY - mStartY);
}
}
| |
package org.apereo.cas.support.saml.util;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.authentication.principal.WebApplicationService;
import org.apereo.cas.support.saml.OpenSamlConfigBean;
import org.apereo.cas.support.saml.SamlUtils;
import org.apereo.cas.support.saml.authentication.SamlAuthenticationMetaDataPopulator;
import org.apereo.cas.support.saml.authentication.principal.SamlService;
import org.apereo.cas.util.DateTimeUtils;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.lang3.StringUtils;
import org.opensaml.core.xml.XMLObject;
import org.opensaml.messaging.context.MessageContext;
import org.opensaml.saml.common.SAMLObject;
import org.opensaml.saml.common.SAMLVersion;
import org.opensaml.saml.saml1.core.Assertion;
import org.opensaml.saml.saml1.core.Attribute;
import org.opensaml.saml.saml1.core.AttributeStatement;
import org.opensaml.saml.saml1.core.AttributeValue;
import org.opensaml.saml.saml1.core.Audience;
import org.opensaml.saml.saml1.core.AudienceRestrictionCondition;
import org.opensaml.saml.saml1.core.AuthenticationStatement;
import org.opensaml.saml.saml1.core.Conditions;
import org.opensaml.saml.saml1.core.ConfirmationMethod;
import org.opensaml.saml.saml1.core.NameIdentifier;
import org.opensaml.saml.saml1.core.Response;
import org.opensaml.saml.saml1.core.Status;
import org.opensaml.saml.saml1.core.StatusCode;
import org.opensaml.saml.saml1.core.StatusMessage;
import org.opensaml.saml.saml1.core.Subject;
import org.opensaml.saml.saml1.core.SubjectConfirmation;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.namespace.QName;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* This is the response builder for Saml1 Protocol.
*
* @author Misagh Moayyed
* @since 4.1
*/
@Slf4j
public class Saml10ObjectBuilder extends AbstractSamlObjectBuilder {
private static final String CONFIRMATION_METHOD = "urn:oasis:names:tc:SAML:1.0:cm:artifact";
private static final long serialVersionUID = -4711012620700270554L;
public Saml10ObjectBuilder(final OpenSamlConfigBean configBean) {
super(configBean);
}
/**
* Sets in response to for saml 1 response.
*
* @param service the service
* @param samlResponse the saml 1 response
*/
private static void setInResponseToForSamlResponseIfNeeded(final Service service, final Response samlResponse) {
if (service instanceof SamlService) {
val samlService = (SamlService) service;
val requestId = samlService.getRequestId();
if (StringUtils.isNotBlank(requestId)) {
samlResponse.setInResponseTo(requestId);
}
}
}
/**
* Create a new SAML response object.
*
* @param id the id
* @param issueInstant the issue instant
* @param recipient the recipient
* @param service the service
* @return the response
*/
public Response newResponse(final String id, final ZonedDateTime issueInstant,
final String recipient, final WebApplicationService service) {
val samlResponse = newSamlObject(Response.class);
samlResponse.setID(id);
samlResponse.setIssueInstant(DateTimeUtils.dateTimeOf(issueInstant));
samlResponse.setVersion(SAMLVersion.VERSION_11);
samlResponse.setInResponseTo(recipient);
setInResponseToForSamlResponseIfNeeded(service, samlResponse);
return samlResponse;
}
/**
* Create a new SAML1 response object.
*
* @param authnStatement the authn statement
* @param issuer the issuer
* @param issuedAt the issued at
* @param id the id
* @return the assertion
*/
public Assertion newAssertion(final AuthenticationStatement authnStatement, final String issuer,
final ZonedDateTime issuedAt, final String id) {
val assertion = newSamlObject(Assertion.class);
assertion.setID(id);
assertion.setIssueInstant(DateTimeUtils.dateTimeOf(issuedAt));
assertion.setIssuer(issuer);
assertion.getAuthenticationStatements().add(authnStatement);
return assertion;
}
/**
* New conditions element.
*
* @param issuedAt the issued at
* @param audienceUri the service id
* @param issueLength the issue length
* @return the conditions
*/
public Conditions newConditions(final ZonedDateTime issuedAt, final String audienceUri, final long issueLength) {
val conditions = newSamlObject(Conditions.class);
conditions.setNotBefore(DateTimeUtils.dateTimeOf(issuedAt));
conditions.setNotOnOrAfter(DateTimeUtils.dateTimeOf(issuedAt.plus(issueLength, ChronoUnit.SECONDS)));
val audienceRestriction = newSamlObject(AudienceRestrictionCondition.class);
val audience = newSamlObject(Audience.class);
audience.setUri(audienceUri);
audienceRestriction.getAudiences().add(audience);
conditions.getAudienceRestrictionConditions().add(audienceRestriction);
return conditions;
}
/**
* New status status.
*
* @param codeValue the code value
* @return the status
*/
public Status newStatus(final QName codeValue) {
return newStatus(codeValue, StringUtils.EMPTY);
}
/**
* Create a new SAML status object.
*
* @param codeValue the code value
* @param statusMessage the status message
* @return the status
*/
public Status newStatus(final QName codeValue, final String statusMessage) {
val status = newSamlObject(Status.class);
val code = newSamlObject(StatusCode.class);
code.setValue(codeValue);
status.setStatusCode(code);
if (StringUtils.isNotBlank(statusMessage)) {
val message = newSamlObject(StatusMessage.class);
message.setMessage(statusMessage);
status.setStatusMessage(message);
}
return status;
}
/**
* New authentication statement.
*
* @param authenticationDate the authentication date
* @param authenticationMethod the authentication method
* @param subjectId the subject id
* @return the authentication statement
*/
public AuthenticationStatement newAuthenticationStatement(final ZonedDateTime authenticationDate,
final Collection<Object> authenticationMethod,
final String subjectId) {
val authnStatement = newSamlObject(AuthenticationStatement.class);
authnStatement.setAuthenticationInstant(DateTimeUtils.dateTimeOf(authenticationDate));
authnStatement.setAuthenticationMethod(
authenticationMethod != null && !authenticationMethod.isEmpty()
? authenticationMethod.iterator().next().toString()
: SamlAuthenticationMetaDataPopulator.AUTHN_METHOD_UNSPECIFIED);
authnStatement.setSubject(newSubject(subjectId));
return authnStatement;
}
/**
* New subject element that uses the confirmation method
* {@link #CONFIRMATION_METHOD}.
*
* @param identifier the identifier
* @return the subject
*/
public Subject newSubject(final String identifier) {
return newSubject(identifier, CONFIRMATION_METHOD);
}
/**
* New subject element with given confirmation method.
*
* @param identifier the identifier
* @param confirmationMethod the confirmation method
* @return the subject
*/
public Subject newSubject(final String identifier, final String confirmationMethod) {
val confirmation = newSamlObject(SubjectConfirmation.class);
val method = newSamlObject(ConfirmationMethod.class);
method.setConfirmationMethod(confirmationMethod);
confirmation.getConfirmationMethods().add(method);
val nameIdentifier = newSamlObject(NameIdentifier.class);
nameIdentifier.setValue(identifier);
val subject = newSamlObject(Subject.class);
subject.setNameIdentifier(nameIdentifier);
subject.setSubjectConfirmation(confirmation);
return subject;
}
/**
* Add saml1 attribute values for attribute.
*
* @param attributeName the attribute name
* @param attributeValue the attribute value
* @param attributeList the attribute list
*/
public void addAttributeValuesToSaml1Attribute(final String attributeName,
final Object attributeValue,
final List<XMLObject> attributeList) {
addAttributeValuesToSamlAttribute(attributeName, attributeValue, StringUtils.EMPTY,
attributeList, AttributeValue.DEFAULT_ELEMENT_NAME);
}
/**
* New attribute statement.
*
* @param subject the subject
* @param attributes the attributes
* @param attributeNamespace the attribute namespace
* @return the attribute statement
*/
public AttributeStatement newAttributeStatement(final Subject subject,
final Map<String, Object> attributes,
final String attributeNamespace) {
val attrStatement = newSamlObject(AttributeStatement.class);
attrStatement.setSubject(subject);
for (val e : attributes.entrySet()) {
if (e.getValue() instanceof Collection<?> && ((Collection<?>) e.getValue()).isEmpty()) {
LOGGER.info("Skipping attribute [{}] because it does not have any values.", e.getKey());
continue;
}
val attribute = newSamlObject(Attribute.class);
attribute.setAttributeName(e.getKey());
if (StringUtils.isNotBlank(attributeNamespace)) {
attribute.setAttributeNamespace(attributeNamespace);
}
addAttributeValuesToSaml1Attribute(e.getKey(), e.getValue(), attribute.getAttributeValues());
attrStatement.getAttributes().add(attribute);
}
return attrStatement;
}
/**
* Encode response and pass it onto the outbound transport.
* Uses {@link CasHttpSoap11Encoder} to handle encoding.
*
* @param httpResponse the http response
* @param httpRequest the http request
* @param samlMessage the saml response
* @throws Exception the exception in case encoding fails.
*/
public void encodeSamlResponse(final HttpServletResponse httpResponse,
final HttpServletRequest httpRequest,
final Response samlMessage) throws Exception {
SamlUtils.logSamlObject(this.openSamlConfigBean, samlMessage);
val encoder = new CasHttpSoap11Encoder();
val context = new MessageContext<SAMLObject>();
context.setMessage(samlMessage);
encoder.setHttpServletResponse(httpResponse);
encoder.setMessageContext(context);
encoder.initialize();
encoder.prepareContext();
encoder.encode();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.storm.generated;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
public class ReadableBlobMeta implements org.apache.thrift.TBase<ReadableBlobMeta, ReadableBlobMeta._Fields>, java.io.Serializable, Cloneable, Comparable<ReadableBlobMeta> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ReadableBlobMeta");
private static final org.apache.thrift.protocol.TField SETTABLE_FIELD_DESC = new org.apache.thrift.protocol.TField("settable", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final org.apache.thrift.protocol.TField VERSION_FIELD_DESC = new org.apache.thrift.protocol.TField("version", org.apache.thrift.protocol.TType.I64, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new ReadableBlobMetaStandardSchemeFactory());
schemes.put(TupleScheme.class, new ReadableBlobMetaTupleSchemeFactory());
}
private SettableBlobMeta settable; // required
private long version; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SETTABLE((short)1, "settable"),
VERSION((short)2, "version");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // SETTABLE
return SETTABLE;
case 2: // VERSION
return VERSION;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __VERSION_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SETTABLE, new org.apache.thrift.meta_data.FieldMetaData("settable", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SettableBlobMeta.class)));
tmpMap.put(_Fields.VERSION, new org.apache.thrift.meta_data.FieldMetaData("version", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ReadableBlobMeta.class, metaDataMap);
}
public ReadableBlobMeta() {
}
public ReadableBlobMeta(
SettableBlobMeta settable,
long version)
{
this();
this.settable = settable;
this.version = version;
set_version_isSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ReadableBlobMeta(ReadableBlobMeta other) {
__isset_bitfield = other.__isset_bitfield;
if (other.is_set_settable()) {
this.settable = new SettableBlobMeta(other.settable);
}
this.version = other.version;
}
public ReadableBlobMeta deepCopy() {
return new ReadableBlobMeta(this);
}
@Override
public void clear() {
this.settable = null;
set_version_isSet(false);
this.version = 0;
}
public SettableBlobMeta get_settable() {
return this.settable;
}
public void set_settable(SettableBlobMeta settable) {
this.settable = settable;
}
public void unset_settable() {
this.settable = null;
}
/** Returns true if field settable is set (has been assigned a value) and false otherwise */
public boolean is_set_settable() {
return this.settable != null;
}
public void set_settable_isSet(boolean value) {
if (!value) {
this.settable = null;
}
}
public long get_version() {
return this.version;
}
public void set_version(long version) {
this.version = version;
set_version_isSet(true);
}
public void unset_version() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __VERSION_ISSET_ID);
}
/** Returns true if field version is set (has been assigned a value) and false otherwise */
public boolean is_set_version() {
return EncodingUtils.testBit(__isset_bitfield, __VERSION_ISSET_ID);
}
public void set_version_isSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __VERSION_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SETTABLE:
if (value == null) {
unset_settable();
} else {
set_settable((SettableBlobMeta)value);
}
break;
case VERSION:
if (value == null) {
unset_version();
} else {
set_version((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SETTABLE:
return get_settable();
case VERSION:
return get_version();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SETTABLE:
return is_set_settable();
case VERSION:
return is_set_version();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof ReadableBlobMeta)
return this.equals((ReadableBlobMeta)that);
return false;
}
public boolean equals(ReadableBlobMeta that) {
if (that == null)
return false;
boolean this_present_settable = true && this.is_set_settable();
boolean that_present_settable = true && that.is_set_settable();
if (this_present_settable || that_present_settable) {
if (!(this_present_settable && that_present_settable))
return false;
if (!this.settable.equals(that.settable))
return false;
}
boolean this_present_version = true;
boolean that_present_version = true;
if (this_present_version || that_present_version) {
if (!(this_present_version && that_present_version))
return false;
if (this.version != that.version)
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_settable = true && (is_set_settable());
list.add(present_settable);
if (present_settable)
list.add(settable);
boolean present_version = true;
list.add(present_version);
if (present_version)
list.add(version);
return list.hashCode();
}
@Override
public int compareTo(ReadableBlobMeta other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(is_set_settable()).compareTo(other.is_set_settable());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_settable()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.settable, other.settable);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(is_set_version()).compareTo(other.is_set_version());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_version()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.version, other.version);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ReadableBlobMeta(");
boolean first = true;
sb.append("settable:");
if (this.settable == null) {
sb.append("null");
} else {
sb.append(this.settable);
}
first = false;
if (!first) sb.append(", ");
sb.append("version:");
sb.append(this.version);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!is_set_settable()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'settable' is unset! Struct:" + toString());
}
if (!is_set_version()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'version' is unset! Struct:" + toString());
}
// check for sub-struct validity
if (settable != null) {
settable.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ReadableBlobMetaStandardSchemeFactory implements SchemeFactory {
public ReadableBlobMetaStandardScheme getScheme() {
return new ReadableBlobMetaStandardScheme();
}
}
private static class ReadableBlobMetaStandardScheme extends StandardScheme<ReadableBlobMeta> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // SETTABLE
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.settable = new SettableBlobMeta();
struct.settable.read(iprot);
struct.set_settable_isSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // VERSION
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.version = iprot.readI64();
struct.set_version_isSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.settable != null) {
oprot.writeFieldBegin(SETTABLE_FIELD_DESC);
struct.settable.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldBegin(VERSION_FIELD_DESC);
oprot.writeI64(struct.version);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ReadableBlobMetaTupleSchemeFactory implements SchemeFactory {
public ReadableBlobMetaTupleScheme getScheme() {
return new ReadableBlobMetaTupleScheme();
}
}
private static class ReadableBlobMetaTupleScheme extends TupleScheme<ReadableBlobMeta> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
struct.settable.write(oprot);
oprot.writeI64(struct.version);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.settable = new SettableBlobMeta();
struct.settable.read(iprot);
struct.set_settable_isSet(true);
struct.version = iprot.readI64();
struct.set_version_isSet(true);
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleemail.model;
import java.io.Serializable;
/**
* <p>
* Represents the destination of the message, consisting of To:, CC:, and BCC:
* fields.
* </p>
* <p>
* By default, the string must be 7-bit ASCII. If the text must contain any
* other characters, then you must use MIME encoded-word syntax (RFC 2047)
* instead of a literal string. MIME encoded-word syntax uses the following
* form: <code>=?charset?encoding?encoded-text?=</code>. For more information,
* see <a href="http://tools.ietf.org/html/rfc2047">RFC 2047</a>.
* </p>
*/
public class Destination implements Serializable, Cloneable {
/**
* <p>
* The To: field(s) of the message.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> toAddresses;
/**
* <p>
* The CC: field(s) of the message.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> ccAddresses;
/**
* <p>
* The BCC: field(s) of the message.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> bccAddresses;
/**
* Default constructor for Destination object. Callers should use the setter
* or fluent setter (with...) methods to initialize the object after
* creating it.
*/
public Destination() {
}
/**
* Constructs a new Destination object. Callers should use the setter or
* fluent setter (with...) methods to initialize any additional object
* members.
*
* @param toAddresses
* The To: field(s) of the message.
*/
public Destination(java.util.List<String> toAddresses) {
setToAddresses(toAddresses);
}
/**
* <p>
* The To: field(s) of the message.
* </p>
*
* @return The To: field(s) of the message.
*/
public java.util.List<String> getToAddresses() {
if (toAddresses == null) {
toAddresses = new com.amazonaws.internal.SdkInternalList<String>();
}
return toAddresses;
}
/**
* <p>
* The To: field(s) of the message.
* </p>
*
* @param toAddresses
* The To: field(s) of the message.
*/
public void setToAddresses(java.util.Collection<String> toAddresses) {
if (toAddresses == null) {
this.toAddresses = null;
return;
}
this.toAddresses = new com.amazonaws.internal.SdkInternalList<String>(
toAddresses);
}
/**
* <p>
* The To: field(s) of the message.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setToAddresses(java.util.Collection)} or
* {@link #withToAddresses(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param toAddresses
* The To: field(s) of the message.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Destination withToAddresses(String... toAddresses) {
if (this.toAddresses == null) {
setToAddresses(new com.amazonaws.internal.SdkInternalList<String>(
toAddresses.length));
}
for (String ele : toAddresses) {
this.toAddresses.add(ele);
}
return this;
}
/**
* <p>
* The To: field(s) of the message.
* </p>
*
* @param toAddresses
* The To: field(s) of the message.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Destination withToAddresses(java.util.Collection<String> toAddresses) {
setToAddresses(toAddresses);
return this;
}
/**
* <p>
* The CC: field(s) of the message.
* </p>
*
* @return The CC: field(s) of the message.
*/
public java.util.List<String> getCcAddresses() {
if (ccAddresses == null) {
ccAddresses = new com.amazonaws.internal.SdkInternalList<String>();
}
return ccAddresses;
}
/**
* <p>
* The CC: field(s) of the message.
* </p>
*
* @param ccAddresses
* The CC: field(s) of the message.
*/
public void setCcAddresses(java.util.Collection<String> ccAddresses) {
if (ccAddresses == null) {
this.ccAddresses = null;
return;
}
this.ccAddresses = new com.amazonaws.internal.SdkInternalList<String>(
ccAddresses);
}
/**
* <p>
* The CC: field(s) of the message.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setCcAddresses(java.util.Collection)} or
* {@link #withCcAddresses(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param ccAddresses
* The CC: field(s) of the message.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Destination withCcAddresses(String... ccAddresses) {
if (this.ccAddresses == null) {
setCcAddresses(new com.amazonaws.internal.SdkInternalList<String>(
ccAddresses.length));
}
for (String ele : ccAddresses) {
this.ccAddresses.add(ele);
}
return this;
}
/**
* <p>
* The CC: field(s) of the message.
* </p>
*
* @param ccAddresses
* The CC: field(s) of the message.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Destination withCcAddresses(java.util.Collection<String> ccAddresses) {
setCcAddresses(ccAddresses);
return this;
}
/**
* <p>
* The BCC: field(s) of the message.
* </p>
*
* @return The BCC: field(s) of the message.
*/
public java.util.List<String> getBccAddresses() {
if (bccAddresses == null) {
bccAddresses = new com.amazonaws.internal.SdkInternalList<String>();
}
return bccAddresses;
}
/**
* <p>
* The BCC: field(s) of the message.
* </p>
*
* @param bccAddresses
* The BCC: field(s) of the message.
*/
public void setBccAddresses(java.util.Collection<String> bccAddresses) {
if (bccAddresses == null) {
this.bccAddresses = null;
return;
}
this.bccAddresses = new com.amazonaws.internal.SdkInternalList<String>(
bccAddresses);
}
/**
* <p>
* The BCC: field(s) of the message.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setBccAddresses(java.util.Collection)} or
* {@link #withBccAddresses(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param bccAddresses
* The BCC: field(s) of the message.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Destination withBccAddresses(String... bccAddresses) {
if (this.bccAddresses == null) {
setBccAddresses(new com.amazonaws.internal.SdkInternalList<String>(
bccAddresses.length));
}
for (String ele : bccAddresses) {
this.bccAddresses.add(ele);
}
return this;
}
/**
* <p>
* The BCC: field(s) of the message.
* </p>
*
* @param bccAddresses
* The BCC: field(s) of the message.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public Destination withBccAddresses(
java.util.Collection<String> bccAddresses) {
setBccAddresses(bccAddresses);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getToAddresses() != null)
sb.append("ToAddresses: " + getToAddresses() + ",");
if (getCcAddresses() != null)
sb.append("CcAddresses: " + getCcAddresses() + ",");
if (getBccAddresses() != null)
sb.append("BccAddresses: " + getBccAddresses());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Destination == false)
return false;
Destination other = (Destination) obj;
if (other.getToAddresses() == null ^ this.getToAddresses() == null)
return false;
if (other.getToAddresses() != null
&& other.getToAddresses().equals(this.getToAddresses()) == false)
return false;
if (other.getCcAddresses() == null ^ this.getCcAddresses() == null)
return false;
if (other.getCcAddresses() != null
&& other.getCcAddresses().equals(this.getCcAddresses()) == false)
return false;
if (other.getBccAddresses() == null ^ this.getBccAddresses() == null)
return false;
if (other.getBccAddresses() != null
&& other.getBccAddresses().equals(this.getBccAddresses()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getToAddresses() == null) ? 0 : getToAddresses().hashCode());
hashCode = prime
* hashCode
+ ((getCcAddresses() == null) ? 0 : getCcAddresses().hashCode());
hashCode = prime
* hashCode
+ ((getBccAddresses() == null) ? 0 : getBccAddresses()
.hashCode());
return hashCode;
}
@Override
public Destination clone() {
try {
return (Destination) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package org.yeastrc.qc_plots.dao.jdbc;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.log4j.Logger;
import org.yeastrc.ms.dao.DAOFactory;
import org.yeastrc.qc_plots.dao.QCPlotDataDAO;
import org.yeastrc.qc_plots.dto.QCPlotDataDTO;
public class QCPlotDataDAOImpl implements QCPlotDataDAO {
private static final Logger log = Logger.getLogger(QCPlotDataDAOImpl.class);
private static final String insertSQL
= "INSERT INTO qc_plot_data "
+ "(experiment_id, plot_type, plot_data, scan_count, create_time_in_seconds, data_version ) "
+ "VALUES ( ?, ?, ?, ?, ?, ? )"
+ " ON DUPLICATE KEY UPDATE "
+ " plot_data = ?, scan_count = ?, create_time_in_seconds = ?, data_version = ?";
@Override
public int saveOrUpdate(QCPlotDataDTO qcPlotDataDTO) {
// CREATE TABLE mz_scan_count_plot_data (
// experiment_id int(10) unsigned NOT NULL,
// plot_data varchar(4000) NOT NULL,
// scan_count int(10) unsigned NOT NULL,
// create_time_in_seconds int(10) unsigned NOT NULL,
// data_version int(10) unsigned NOT NULL,
// PRIMARY KEY (experiment_id)
// ) ENGINE=MyISAM DEFAULT CHARSET=latin1;
Connection connection = null;
PreparedStatement pstmt = null;
try {
connection = DAOFactory.instance().getConnection();
pstmt = connection.prepareStatement( insertSQL );
int paramCounter = 0;
// For insert portion of statement
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getExperimentId() );
paramCounter++;
pstmt.setString( paramCounter, qcPlotDataDTO.getPlotType() );
paramCounter++;
pstmt.setString( paramCounter, qcPlotDataDTO.getPlotData() );
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getScanCount() );
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getCreateTimeInSeconds() );
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getDataVersion() );
// For update portion of statement
paramCounter++;
pstmt.setString( paramCounter, qcPlotDataDTO.getPlotData() );
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getScanCount() );
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getCreateTimeInSeconds() );
paramCounter++;
pstmt.setInt( paramCounter, qcPlotDataDTO.getDataVersion() );
int rowsUpdated = pstmt.executeUpdate();
if ( rowsUpdated == 0 ) {
}
} catch (Exception sqlEx) {
String msg = "save :Exception '" + sqlEx.toString() + ".\nSQL = " + insertSQL ;
log.error( msg, sqlEx);
throw new RuntimeException( msg, sqlEx );
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException ex) {
// ignore
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException ex) {
// ignore
}
}
}
return qcPlotDataDTO.getExperimentId() ;
}
private String loadFromExperimentIdPlotType
= "SELECT plot_data, data_version FROM qc_plot_data WHERE experiment_id = ? AND plot_type = ?";
@Override
public QCPlotDataDTO load( int experimentId, String plotType ) {
final String querySqlStringComplete = loadFromExperimentIdPlotType;
QCPlotDataDTO qcPlotDataDTO = null;
Connection connection = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
connection = DAOFactory.instance().getConnection();
pstmt = connection.prepareStatement( querySqlStringComplete );
pstmt.setInt( 1, experimentId );
rs = pstmt.executeQuery();
if ( rs.next() ) {
qcPlotDataDTO = new QCPlotDataDTO();
qcPlotDataDTO.setExperimentId( experimentId );
qcPlotDataDTO.setPlotType( plotType );
qcPlotDataDTO.setPlotData( rs.getString( "plot_data" ) );
qcPlotDataDTO.setDataVersion( rs.getInt( "data_version" ) );
}
} catch (Exception sqlEx) {
String msg = "load :Exception '" + sqlEx.toString() + '.';
log.error( msg, sqlEx);
throw new RuntimeException( msg, sqlEx );
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException ex) {
// ignore
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException ex) {
// ignore
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException ex) {
// ignore
}
}
}
return qcPlotDataDTO;
}
private String loadFromExperimentIdPlotTypeAndDataVersionSqlString
= "SELECT plot_data FROM qc_plot_data WHERE experiment_id = ? AND plot_type = ? AND data_version = ?";
@Override
public QCPlotDataDTO loadFromExperimentIdPlotTypeAndDataVersion(int experimentId, String plotType, int dataVersion) {
final String querySqlStringComplete = loadFromExperimentIdPlotTypeAndDataVersionSqlString;
QCPlotDataDTO qcPlotDataDTO = null;
Connection connection = null;
PreparedStatement pstmt = null;
ResultSet rs = null;
try {
connection = DAOFactory.instance().getConnection();
pstmt = connection.prepareStatement( querySqlStringComplete );
pstmt.setInt( 1, experimentId );
pstmt.setString( 2, plotType );
pstmt.setInt( 3, dataVersion );
rs = pstmt.executeQuery();
if ( rs.next() ) {
qcPlotDataDTO = new QCPlotDataDTO();
qcPlotDataDTO.setExperimentId( experimentId );
qcPlotDataDTO.setPlotData( rs.getString( "plot_data" ) );
qcPlotDataDTO.setDataVersion( dataVersion );
}
} catch (Exception sqlEx) {
String msg = "loadFromExperimentIdPlotTypeAndDataVersion :Exception '" + sqlEx.toString() + '.';
log.error( msg, sqlEx);
throw new RuntimeException( msg, sqlEx );
} finally {
if (rs != null) {
try {
rs.close();
} catch (SQLException ex) {
// ignore
}
}
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException ex) {
// ignore
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException ex) {
// ignore
}
}
}
return qcPlotDataDTO;
}
private String deleteFromExperimentIdPlotTypeSqlString
= "DELETE FROM qc_plot_data WHERE experiment_id = ? AND plot_type = ?";
@Override
public void deleteForExperimentIdPlotType( int experimentId, String plotType ) {
final String querySqlStringComplete = deleteFromExperimentIdPlotTypeSqlString;
Connection connection = null;
PreparedStatement pstmt = null;
try {
connection = DAOFactory.instance().getConnection();
pstmt = connection.prepareStatement( querySqlStringComplete );
pstmt.setInt( 1, experimentId );
pstmt.setString( 2, plotType );
pstmt.executeUpdate();
} catch (Exception sqlEx) {
String msg = "deleteForExperimentIdPlotType :Exception '" + sqlEx.toString() + '.';
log.error( msg, sqlEx);
throw new RuntimeException( msg, sqlEx );
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException ex) {
// ignore
}
}
if (connection != null) {
try {
connection.close();
} catch (SQLException ex) {
// ignore
}
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.service;
import java.io.DataOutputStream;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.net.*;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.CreationTimeAwareFuture;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.dht.AbstractBounds;
import org.apache.cassandra.dht.Bounds;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.io.util.FastByteArrayOutputStream;
import org.apache.cassandra.locator.AbstractReplicationStrategy;
import org.apache.cassandra.locator.TokenMetadata;
import org.apache.cassandra.net.*;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.LatencyTracker;
import org.apache.cassandra.utils.Pair;
public class StorageProxy implements StorageProxyMBean
{
private static final Logger logger = LoggerFactory.getLogger(StorageProxy.class);
// mbean stuff
private static final LatencyTracker readStats = new LatencyTracker();
private static final LatencyTracker rangeStats = new LatencyTracker();
private static final LatencyTracker writeStats = new LatencyTracker();
public static final String UNREACHABLE = "UNREACHABLE";
private static final WritePerformer standardWritePerformer;
private static final WritePerformer counterWritePerformer;
private static final WritePerformer counterWriteOnCoordinatorPerformer;
public static final StorageProxy instance = new StorageProxy();
private static volatile boolean hintedHandoffEnabled = DatabaseDescriptor.hintedHandoffEnabled();
private static volatile int maxHintWindow = DatabaseDescriptor.getMaxHintWindow();
private static volatile int maxHintsInProgress = 1024 * Runtime.getRuntime().availableProcessors();
private static final AtomicInteger hintsInProgress = new AtomicInteger();
private static final AtomicLong totalHints = new AtomicLong();
private StorageProxy() {}
static
{
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(new StorageProxy(), new ObjectName("org.apache.cassandra.db:type=StorageProxy"));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
standardWritePerformer = new WritePerformer()
{
public void apply(IMutation mutation,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException, TimeoutException
{
assert mutation instanceof RowMutation;
sendToHintedEndpoints((RowMutation) mutation, targets, responseHandler, localDataCenter, consistency_level);
}
};
/*
* We execute counter writes in 2 places: either directly in the coordinator node if it is a replica, or
* in CounterMutationVerbHandler on a replica othewise. The write must be executed on the MUTATION stage
* but on the latter case, the verb handler already run on the MUTATION stage, so we must not execute the
* underlying on the stage otherwise we risk a deadlock. Hence two different performer.
*/
counterWritePerformer = new WritePerformer()
{
public void apply(IMutation mutation,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException
{
if (logger.isDebugEnabled())
logger.debug("insert writing local & replicate " + mutation.toString(true));
Runnable runnable = counterWriteTask(mutation, targets, responseHandler, localDataCenter, consistency_level);
runnable.run();
}
};
counterWriteOnCoordinatorPerformer = new WritePerformer()
{
public void apply(IMutation mutation,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException
{
if (logger.isDebugEnabled())
logger.debug("insert writing local & replicate " + mutation.toString(true));
Runnable runnable = counterWriteTask(mutation, targets, responseHandler, localDataCenter, consistency_level);
StageManager.getStage(Stage.MUTATION).execute(runnable);
}
};
}
/**
* Use this method to have these Mutations applied
* across all replicas. This method will take care
* of the possibility of a replica being down and hint
* the data across to some other replica.
*
* @param mutations the mutations to be applied across the replicas
* @param consistency_level the consistency level for the operation
*/
public static void mutate(List<? extends IMutation> mutations, ConsistencyLevel consistency_level) throws UnavailableException, TimeoutException
{
final String localDataCenter = DatabaseDescriptor.getEndpointSnitch().getDatacenter(FBUtilities.getBroadcastAddress());
long startTime = System.nanoTime();
List<IWriteResponseHandler> responseHandlers = new ArrayList<IWriteResponseHandler>();
IMutation mostRecentMutation = null;
try
{
for (IMutation mutation : mutations)
{
mostRecentMutation = mutation;
if (mutation instanceof CounterMutation)
{
responseHandlers.add(mutateCounter((CounterMutation)mutation, localDataCenter));
}
else
{
responseHandlers.add(performWrite(mutation, consistency_level, localDataCenter, standardWritePerformer));
}
}
// wait for writes. throws TimeoutException if necessary
for (IWriteResponseHandler responseHandler : responseHandlers)
{
responseHandler.get();
}
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
{
List<String> mstrings = new ArrayList<String>();
for (IMutation mutation : mutations)
mstrings.add(mutation.toString(true));
logger.debug("Write timeout {} for one (or more) of: ", ex.toString(), mstrings);
}
throw ex;
}
catch (IOException e)
{
assert mostRecentMutation != null;
throw new RuntimeException("error writing key " + ByteBufferUtil.bytesToHex(mostRecentMutation.key()), e);
}
finally
{
writeStats.addNano(System.nanoTime() - startTime);
}
}
/**
* Perform the write of a mutation given a WritePerformer.
* Gather the list of write endpoints, apply locally and/or forward the mutation to
* said write endpoint (deletaged to the actual WritePerformer) and wait for the
* responses based on consistency level.
*
* @param mutation the mutation to be applied
* @param consistency_level the consistency level for the write operation
* @param performer the WritePerformer in charge of appliying the mutation
* given the list of write endpoints (either standardWritePerformer for
* standard writes or counterWritePerformer for counter writes).
*/
public static IWriteResponseHandler performWrite(IMutation mutation,
ConsistencyLevel consistency_level,
String localDataCenter,
WritePerformer performer)
throws UnavailableException, TimeoutException, IOException
{
String table = mutation.getTable();
AbstractReplicationStrategy rs = Table.open(table).getReplicationStrategy();
Collection<InetAddress> writeEndpoints = getWriteEndpoints(table, mutation.key());
IWriteResponseHandler responseHandler = rs.getWriteResponseHandler(writeEndpoints, consistency_level);
// exit early if we can't fulfill the CL at this time
responseHandler.assureSufficientLiveNodes();
performer.apply(mutation, writeEndpoints, responseHandler, localDataCenter, consistency_level);
return responseHandler;
}
private static Collection<InetAddress> getWriteEndpoints(String table, ByteBuffer key)
{
StorageService ss = StorageService.instance;
List<InetAddress> naturalEndpoints = ss.getNaturalEndpoints(table, key);
return ss.getTokenMetadata().getWriteEndpoints(StorageService.getPartitioner().getToken(key), table, naturalEndpoints);
}
/**
* Send the mutations to the right targets, write it locally if it corresponds or writes a hint when the node
* is not available.
*
* Note about hints:
*
* | Hinted Handoff | Consist. Level |
* | on | >=1 | --> wait for hints. We DO NOT notify the handler with handler.response() for hints;
* | on | ANY | --> wait for hints. Responses count towards consistency.
* | off | >=1 | --> DO NOT fire hints. And DO NOT wait for them to complete.
* | off | ANY | --> DO NOT fire hints. And DO NOT wait for them to complete.
*
* @throws TimeoutException if the hints cannot be written/enqueued
*/
private static void sendToHintedEndpoints(final RowMutation rm,
Collection<InetAddress> targets,
IWriteResponseHandler responseHandler,
String localDataCenter,
ConsistencyLevel consistency_level)
throws IOException, TimeoutException
{
// Multimap that holds onto all the messages and addresses meant for a specific datacenter
Map<String, Multimap<Message, InetAddress>> dcMessages = new HashMap<String, Multimap<Message, InetAddress>>(targets.size());
MessageProducer producer = new CachingMessageProducer(rm);
for (InetAddress destination : targets)
{
if (FailureDetector.instance.isAlive(destination))
{
String dc = DatabaseDescriptor.getEndpointSnitch().getDatacenter(destination);
if (destination.equals(FBUtilities.getBroadcastAddress()))
{
insertLocal(rm, responseHandler);
}
else
{
// belongs on a different server
if (logger.isDebugEnabled())
logger.debug("insert writing key " + ByteBufferUtil.bytesToHex(rm.key()) + " to " + destination);
Multimap<Message, InetAddress> messages = dcMessages.get(dc);
if (messages == null)
{
messages = HashMultimap.create();
dcMessages.put(dc, messages);
}
messages.put(producer.getMessage(Gossiper.instance.getVersion(destination)), destination);
}
}
else
{
if (!shouldHint(destination))
continue;
// Avoid OOMing from hints waiting to be written. (Unlike ordinary mutations, hint
// not eligible to drop if we fall behind.)
if (hintsInProgress.get() > maxHintsInProgress)
throw new TimeoutException();
// Schedule a local hint and let the handler know it needs to wait for the hint to complete too
Future<Void> hintfuture = scheduleLocalHint(rm, destination, responseHandler, consistency_level);
responseHandler.addFutureForHint(new CreationTimeAwareFuture<Void>(hintfuture));
}
}
sendMessages(localDataCenter, dcMessages, responseHandler);
}
public static Future<Void> scheduleLocalHint(final RowMutation mutation,
final InetAddress target,
final IWriteResponseHandler responseHandler,
final ConsistencyLevel consistencyLevel)
throws IOException
{
// Hint of itself doesn't make sense.
assert !target.equals(FBUtilities.getBroadcastAddress()) : target;
hintsInProgress.incrementAndGet();
Runnable runnable = new Runnable()
{
public void run()
{
if (logger.isDebugEnabled())
logger.debug("Adding hint for " + target);
try
{
Token<?> token = StorageService.instance.getTokenMetadata().getToken(target);
ByteBuffer tokenbytes = StorageService.getPartitioner().getTokenFactory().toByteArray(token);
RowMutation hintedMutation = RowMutation.hintFor(mutation, tokenbytes);
hintedMutation.apply();
totalHints.incrementAndGet();
// Notify the handler only for CL == ANY
if (responseHandler != null && consistencyLevel == ConsistencyLevel.ANY)
responseHandler.response(null);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
finally
{
// Decrement the current hint in the execution after the task is done.
hintsInProgress.decrementAndGet();
}
}
};
return (Future<Void>) StageManager.getStage(Stage.MUTATION).submit(runnable);
}
/**
* for each datacenter, send a message to one node to relay the write to other replicas
*/
private static void sendMessages(String localDataCenter, Map<String, Multimap<Message, InetAddress>> dcMessages, IWriteResponseHandler handler)
throws IOException
{
for (Map.Entry<String, Multimap<Message, InetAddress>> entry: dcMessages.entrySet())
{
String dataCenter = entry.getKey();
// send the messages corresponding to this datacenter
for (Map.Entry<Message, Collection<InetAddress>> messages: entry.getValue().asMap().entrySet())
{
Message message = messages.getKey();
// a single message object is used for unhinted writes, so clean out any forwards
// from previous loop iterations
message.removeHeader(RowMutation.FORWARD_HEADER);
if (dataCenter.equals(localDataCenter) || StorageService.instance.useEfficientCrossDCWrites())
{
// direct writes to local DC or old Cassadra versions
for (InetAddress destination : messages.getValue())
MessagingService.instance().sendRR(message, destination, handler);
}
else
{
// Non-local DC. First endpoint in list is the destination for this group
Iterator<InetAddress> iter = messages.getValue().iterator();
InetAddress target = iter.next();
// Add all the other destinations of the same message as a header in the primary message.
while (iter.hasNext())
{
InetAddress destination = iter.next();
// group all nodes in this DC as forward headers on the primary message
FastByteArrayOutputStream bos = new FastByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
// append to older addresses
byte[] previousHints = message.getHeader(RowMutation.FORWARD_HEADER);
if (previousHints != null)
dos.write(previousHints);
dos.write(destination.getAddress());
message.setHeader(RowMutation.FORWARD_HEADER, bos.toByteArray());
}
// send the combined message + forward headers
MessagingService.instance().sendRR(message, target, handler);
}
}
}
}
private static void insertLocal(final RowMutation rm, final IWriteResponseHandler responseHandler)
{
if (logger.isDebugEnabled())
logger.debug("insert writing local " + rm.toString(true));
Runnable runnable = new DroppableRunnable(StorageService.Verb.MUTATION)
{
public void runMayThrow() throws IOException
{
rm.apply();
responseHandler.response(null);
}
};
StageManager.getStage(Stage.MUTATION).execute(runnable);
}
/**
* Handle counter mutation on the coordinator host.
*
* A counter mutation needs to first be applied to a replica (that we'll call the leader for the mutation) before being
* replicated to the other endpoint. To achieve so, there is two case:
* 1) the coordinator host is a replica: we proceed to applying the update locally and replicate throug
* applyCounterMutationOnCoordinator
* 2) the coordinator is not a replica: we forward the (counter)mutation to a chosen replica (that will proceed through
* applyCounterMutationOnLeader upon receive) and wait for its acknowledgment.
*
* Implementation note: We check if we can fulfill the CL on the coordinator host even if he is not a replica to allow
* quicker response and because the WriteResponseHandlers don't make it easy to send back an error. We also always gather
* the write latencies at the coordinator node to make gathering point similar to the case of standard writes.
*/
public static IWriteResponseHandler mutateCounter(CounterMutation cm, String localDataCenter) throws UnavailableException, TimeoutException, IOException
{
InetAddress endpoint = findSuitableEndpoint(cm.getTable(), cm.key());
if (endpoint.equals(FBUtilities.getBroadcastAddress()))
{
return applyCounterMutationOnCoordinator(cm, localDataCenter);
}
else
{
// Exit now if we can't fulfill the CL here instead of forwarding to the leader replica
String table = cm.getTable();
AbstractReplicationStrategy rs = Table.open(table).getReplicationStrategy();
Collection<InetAddress> writeEndpoints = getWriteEndpoints(table, cm.key());
rs.getWriteResponseHandler(writeEndpoints, cm.consistency()).assureSufficientLiveNodes();
// Forward the actual update to the chosen leader replica
IWriteResponseHandler responseHandler = WriteResponseHandler.create(endpoint);
Message message = cm.makeMutationMessage(Gossiper.instance.getVersion(endpoint));
if (logger.isDebugEnabled())
logger.debug("forwarding counter update of key " + ByteBufferUtil.bytesToHex(cm.key()) + " to " + endpoint);
MessagingService.instance().sendRR(message, endpoint, responseHandler);
return responseHandler;
}
}
private static InetAddress findSuitableEndpoint(String table, ByteBuffer key) throws UnavailableException
{
List<InetAddress> endpoints = StorageService.instance.getLiveNaturalEndpoints(table, key);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), endpoints);
if (endpoints.isEmpty())
throw new UnavailableException();
return endpoints.get(0);
}
// Must be called on a replica of the mutation. This replica becomes the
// leader of this mutation.
public static IWriteResponseHandler applyCounterMutationOnLeader(CounterMutation cm, String localDataCenter) throws UnavailableException, TimeoutException, IOException
{
return performWrite(cm, cm.consistency(), localDataCenter, counterWritePerformer);
}
// Same as applyCounterMutationOnLeader but must with the difference that it use the MUTATION stage to execute the write (while
// applyCounterMutationOnLeader assumes it is on the MUTATION stage already)
public static IWriteResponseHandler applyCounterMutationOnCoordinator(CounterMutation cm, String localDataCenter) throws UnavailableException, TimeoutException, IOException
{
return performWrite(cm, cm.consistency(), localDataCenter, counterWriteOnCoordinatorPerformer);
}
private static Runnable counterWriteTask(final IMutation mutation,
final Collection<InetAddress> targets,
final IWriteResponseHandler responseHandler,
final String localDataCenter,
final ConsistencyLevel consistency_level)
{
return new DroppableRunnable(StorageService.Verb.MUTATION)
{
public void runMayThrow() throws IOException
{
assert mutation instanceof CounterMutation;
final CounterMutation cm = (CounterMutation) mutation;
// apply mutation
cm.apply();
responseHandler.response(null);
// then send to replicas, if any
targets.remove(FBUtilities.getBroadcastAddress());
if (cm.shouldReplicateOnWrite() && !targets.isEmpty())
{
// We do the replication on another stage because it involves a read (see CM.makeReplicationMutation)
// and we want to avoid blocking too much the MUTATION stage
StageManager.getStage(Stage.REPLICATE_ON_WRITE).execute(new DroppableRunnable(StorageService.Verb.READ)
{
public void runMayThrow() throws IOException, TimeoutException
{
// send mutation to other replica
sendToHintedEndpoints(cm.makeReplicationMutation(), targets, responseHandler, localDataCenter, consistency_level);
}
});
}
}
};
}
/**
* Performs the actual reading of a row out of the StorageService, fetching
* a specific set of column names from a given column family.
*/
public static List<Row> read(List<ReadCommand> commands, ConsistencyLevel consistency_level)
throws IOException, UnavailableException, TimeoutException, InvalidRequestException
{
if (StorageService.instance.isBootstrapMode())
throw new UnavailableException();
long startTime = System.nanoTime();
List<Row> rows;
try
{
rows = fetchRows(commands, consistency_level);
}
finally
{
readStats.addNano(System.nanoTime() - startTime);
}
return rows;
}
/**
* This function executes local and remote reads, and blocks for the results:
*
* 1. Get the replica locations, sorted by response time according to the snitch
* 2. Send a data request to the closest replica, and digest requests to either
* a) all the replicas, if read repair is enabled
* b) the closest R-1 replicas, where R is the number required to satisfy the ConsistencyLevel
* 3. Wait for a response from R replicas
* 4. If the digests (if any) match the data return the data
* 5. else carry out read repair by getting data from all the nodes.
*/
private static List<Row> fetchRows(List<ReadCommand> commands, ConsistencyLevel consistency_level) throws IOException, UnavailableException, TimeoutException
{
List<ReadCallback<Row>> readCallbacks = new ArrayList<ReadCallback<Row>>();
List<Row> rows = new ArrayList<Row>();
List<ReadCommand> commandsToRetry = Collections.emptyList();
List<ReadCommand> repairCommands = Collections.emptyList();
do
{
List<ReadCommand> commandsToSend = commandsToRetry.isEmpty() ? commands : commandsToRetry;
if (!commandsToRetry.isEmpty())
logger.debug("Retrying {} commands", commandsToRetry.size());
// send out read requests
for (ReadCommand command : commandsToSend)
{
assert !command.isDigestQuery();
logger.debug("Command/ConsistencyLevel is {}/{}", command, consistency_level);
List<InetAddress> endpoints = StorageService.instance.getLiveNaturalEndpoints(command.table,
command.key);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), endpoints);
RowDigestResolver resolver = new RowDigestResolver(command.table, command.key);
ReadCallback<Row> handler = getReadCallback(resolver, command, consistency_level, endpoints);
handler.assureSufficientLiveNodes();
assert !handler.endpoints.isEmpty();
// The data-request message is sent to dataPoint, the node that will actually get
// the data for us. The other replicas are only sent a digest query.
ReadCommand digestCommand = null;
if (handler.endpoints.size() > 1)
{
digestCommand = command.copy();
digestCommand.setDigestQuery(true);
}
InetAddress dataPoint = handler.endpoints.get(0);
if (dataPoint.equals(FBUtilities.getBroadcastAddress()))
{
logger.debug("reading data locally");
StageManager.getStage(Stage.READ).execute(new LocalReadRunnable(command, handler));
}
else
{
logger.debug("reading data from {}", dataPoint);
MessagingService.instance().sendRR(command, dataPoint, handler);
}
// We lazy-construct the digest Message object since it may not be necessary if we
// are doing a local digest read, or no digest reads at all.
MessageProducer producer = new CachingMessageProducer(digestCommand);
for (InetAddress digestPoint : handler.endpoints.subList(1, handler.endpoints.size()))
{
if (digestPoint.equals(FBUtilities.getBroadcastAddress()))
{
logger.debug("reading digest locally");
StageManager.getStage(Stage.READ).execute(new LocalReadRunnable(digestCommand, handler));
}
else
{
logger.debug("reading digest from {}", digestPoint);
MessagingService.instance().sendRR(producer, digestPoint, handler);
}
}
readCallbacks.add(handler);
}
if (repairCommands != Collections.EMPTY_LIST)
repairCommands.clear();
// read results and make a second pass for any digest mismatches
List<RepairCallback> repairResponseHandlers = null;
for (int i = 0; i < commandsToSend.size(); i++)
{
ReadCallback<Row> handler = readCallbacks.get(i);
Row row;
ReadCommand command = commands.get(i);
try
{
long startTime2 = System.currentTimeMillis();
row = handler.get(); // CL.ONE is special cased here to ignore digests even if some have arrived
if (row != null)
rows.add(row);
if (logger.isDebugEnabled())
logger.debug("Read: " + (System.currentTimeMillis() - startTime2) + " ms.");
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
logger.debug("Read timeout: {}", ex.toString());
throw ex;
}
catch (DigestMismatchException ex)
{
if (logger.isDebugEnabled())
logger.debug("Digest mismatch: {}", ex.toString());
RowRepairResolver resolver = new RowRepairResolver(command.table, command.key);
RepairCallback repairHandler = new RepairCallback(resolver, handler.endpoints);
if (repairCommands == Collections.EMPTY_LIST)
repairCommands = new ArrayList<ReadCommand>();
repairCommands.add(command);
for (InetAddress endpoint : handler.endpoints)
MessagingService.instance().sendRR(command, endpoint, repairHandler);
if (repairResponseHandlers == null)
repairResponseHandlers = new ArrayList<RepairCallback>();
repairResponseHandlers.add(repairHandler);
}
}
if (commandsToRetry != Collections.EMPTY_LIST)
commandsToRetry.clear();
// read the results for the digest mismatch retries
if (repairResponseHandlers != null)
{
for (int i = 0; i < repairCommands.size(); i++)
{
ReadCommand command = repairCommands.get(i);
RepairCallback handler = repairResponseHandlers.get(i);
FBUtilities.waitOnFutures(handler.resolver.repairResults, DatabaseDescriptor.getRpcTimeout());
Row row;
try
{
row = handler.get();
}
catch (DigestMismatchException e)
{
throw new AssertionError(e); // full data requested from each node here, no digests should be sent
}
// retry short reads, otherwise add the row to our resultset
if (command instanceof SliceFromReadCommand)
{
// short reads are only possible on SliceFromReadCommand
SliceFromReadCommand sliceCommand = (SliceFromReadCommand) command;
int maxLiveColumns = handler.getMaxLiveColumns();
int liveColumnsInRow = row != null ? row.cf.getLiveColumnCount() : 0;
assert maxLiveColumns <= sliceCommand.count;
if ((maxLiveColumns == sliceCommand.count) && (liveColumnsInRow < sliceCommand.count))
{
if (logger.isDebugEnabled())
logger.debug("detected short read: expected {} columns, but only resolved {} columns",
sliceCommand.count, liveColumnsInRow);
int retryCount = sliceCommand.count + sliceCommand.count - liveColumnsInRow;
SliceFromReadCommand retryCommand = new SliceFromReadCommand(command.table,
command.key,
command.queryPath,
sliceCommand.start,
sliceCommand.finish,
sliceCommand.reversed,
retryCount);
if (commandsToRetry == Collections.EMPTY_LIST)
commandsToRetry = new ArrayList<ReadCommand>();
commandsToRetry.add(retryCommand);
continue;
}
}
rows.add(row);
}
}
} while (!commandsToRetry.isEmpty());
return rows;
}
static class LocalReadRunnable extends DroppableRunnable
{
private final ReadCommand command;
private final ReadCallback<Row> handler;
private final long start = System.currentTimeMillis();
LocalReadRunnable(ReadCommand command, ReadCallback<Row> handler)
{
super(StorageService.Verb.READ);
this.command = command;
this.handler = handler;
}
protected void runMayThrow() throws IOException
{
if (logger.isDebugEnabled())
logger.debug("LocalReadRunnable reading " + command);
Table table = Table.open(command.table);
ReadResponse result = ReadVerbHandler.getResponse(command, command.getRow(table));
MessagingService.instance().addLatency(FBUtilities.getBroadcastAddress(), System.currentTimeMillis() - start);
handler.response(result);
}
}
static <T> ReadCallback<T> getReadCallback(IResponseResolver<T> resolver, IReadCommand command, ConsistencyLevel consistencyLevel, List<InetAddress> endpoints)
{
if (consistencyLevel == ConsistencyLevel.LOCAL_QUORUM || consistencyLevel == ConsistencyLevel.EACH_QUORUM)
{
return new DatacenterReadCallback(resolver, consistencyLevel, command, endpoints);
}
return new ReadCallback(resolver, consistencyLevel, command, endpoints);
}
/*
* This function executes the read protocol locally. Consistency checks are performed in the background.
*/
public static List<Row> getRangeSlice(RangeSliceCommand command, ConsistencyLevel consistency_level)
throws IOException, UnavailableException, TimeoutException
{
if (logger.isDebugEnabled())
logger.debug(command.toString());
long startTime = System.nanoTime();
List<Row> rows;
// now scan until we have enough results
try
{
rows = new ArrayList<Row>(command.max_keys);
List<AbstractBounds> ranges = getRestrictedRanges(command.range);
for (AbstractBounds range : ranges)
{
List<InetAddress> liveEndpoints = StorageService.instance.getLiveNaturalEndpoints(command.keyspace, range.right);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), liveEndpoints);
if (consistency_level == ConsistencyLevel.ONE && !liveEndpoints.isEmpty() && liveEndpoints.get(0).equals(FBUtilities.getBroadcastAddress()))
{
if (logger.isDebugEnabled())
logger.debug("local range slice");
ColumnFamilyStore cfs = Table.open(command.keyspace).getColumnFamilyStore(command.column_family);
try
{
rows.addAll(cfs.getRangeSlice(command.super_column,
range,
command.max_keys,
QueryFilter.getFilter(command.predicate, cfs.getComparator())));
}
catch (ExecutionException e)
{
throw new RuntimeException(e.getCause());
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
}
else
{
RangeSliceCommand c2 = new RangeSliceCommand(command.keyspace, command.column_family, command.super_column, command.predicate, range, command.max_keys);
// collect replies and resolve according to consistency level
RangeSliceResponseResolver resolver = new RangeSliceResponseResolver(command.keyspace, liveEndpoints);
ReadCallback<Iterable<Row>> handler = getReadCallback(resolver, command, consistency_level, liveEndpoints);
handler.assureSufficientLiveNodes();
for (InetAddress endpoint : liveEndpoints)
{
MessagingService.instance().sendRR(c2, endpoint, handler);
if (logger.isDebugEnabled())
logger.debug("reading " + c2 + " from " + endpoint);
}
try
{
for (Row row : handler.get())
{
rows.add(row);
logger.debug("range slices read {}", row.key);
}
FBUtilities.waitOnFutures(resolver.repairResults, DatabaseDescriptor.getRpcTimeout());
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
logger.debug("Range slice timeout: {}", ex.toString());
throw ex;
}
catch (DigestMismatchException e)
{
throw new AssertionError(e); // no digests in range slices yet
}
}
// if we're done, great, otherwise, move to the next range
if (rows.size() >= command.max_keys)
break;
}
}
finally
{
rangeStats.addNano(System.nanoTime() - startTime);
}
return rows.size() > command.max_keys ? rows.subList(0, command.max_keys) : rows;
}
/**
* initiate a request/response session with each live node to check whether or not everybody is using the same
* migration id. This is useful for determining if a schema change has propagated through the cluster. Disagreement
* is assumed if any node fails to respond.
*/
public static Map<String, List<String>> describeSchemaVersions()
{
final String myVersion = Schema.instance.getVersion().toString();
final Map<InetAddress, UUID> versions = new ConcurrentHashMap<InetAddress, UUID>();
final Set<InetAddress> liveHosts = Gossiper.instance.getLiveMembers();
final CountDownLatch latch = new CountDownLatch(liveHosts.size());
IAsyncCallback cb = new IAsyncCallback()
{
public void response(Message message)
{
// record the response from the remote node.
logger.debug("Received schema check response from " + message.getFrom().getHostAddress());
UUID theirVersion = UUID.fromString(new String(message.getMessageBody()));
versions.put(message.getFrom(), theirVersion);
latch.countDown();
}
public boolean isLatencyForSnitch()
{
return false;
}
};
// an empty message acts as a request to the SchemaCheckVerbHandler.
for (InetAddress endpoint : liveHosts)
{
Message message = new Message(FBUtilities.getBroadcastAddress(),
StorageService.Verb.SCHEMA_CHECK,
ArrayUtils.EMPTY_BYTE_ARRAY,
Gossiper.instance.getVersion(endpoint));
MessagingService.instance().sendRR(message, endpoint, cb);
}
try
{
// wait for as long as possible. timeout-1s if possible.
latch.await(DatabaseDescriptor.getRpcTimeout(), TimeUnit.MILLISECONDS);
}
catch (InterruptedException ex)
{
throw new AssertionError("This latch shouldn't have been interrupted.");
}
logger.debug("My version is " + myVersion);
// maps versions to hosts that are on that version.
Map<String, List<String>> results = new HashMap<String, List<String>>();
Iterable<InetAddress> allHosts = Iterables.concat(Gossiper.instance.getLiveMembers(), Gossiper.instance.getUnreachableMembers());
for (InetAddress host : allHosts)
{
UUID version = versions.get(host);
String stringVersion = version == null ? UNREACHABLE : version.toString();
List<String> hosts = results.get(stringVersion);
if (hosts == null)
{
hosts = new ArrayList<String>();
results.put(stringVersion, hosts);
}
hosts.add(host.getHostAddress());
}
// we're done: the results map is ready to return to the client. the rest is just debug logging:
if (results.get(UNREACHABLE) != null)
logger.debug("Hosts not in agreement. Didn't get a response from everybody: " + StringUtils.join(results.get(UNREACHABLE), ","));
for (Map.Entry<String, List<String>> entry : results.entrySet())
{
// check for version disagreement. log the hosts that don't agree.
if (entry.getKey().equals(UNREACHABLE) || entry.getKey().equals(myVersion))
continue;
for (String host : entry.getValue())
logger.debug("%s disagrees (%s)", host, entry.getKey());
}
if (results.size() == 1)
logger.debug("Schemas are in agreement.");
return results;
}
/**
* Compute all ranges we're going to query, in sorted order. Nodes can be replica destinations for many ranges,
* so we need to restrict each scan to the specific range we want, or else we'd get duplicate results.
*/
static List<AbstractBounds> getRestrictedRanges(final AbstractBounds queryRange)
{
// special case for bounds containing exactly 1 (non-minimum) token
if (queryRange instanceof Bounds && queryRange.left.equals(queryRange.right) && !queryRange.left.equals(StorageService.getPartitioner().getMinimumToken()))
{
if (logger.isDebugEnabled())
logger.debug("restricted single token match for query " + queryRange);
return Collections.singletonList(queryRange);
}
TokenMetadata tokenMetadata = StorageService.instance.getTokenMetadata();
List<AbstractBounds> ranges = new ArrayList<AbstractBounds>();
// divide the queryRange into pieces delimited by the ring and minimum tokens
Iterator<Token> ringIter = TokenMetadata.ringIterator(tokenMetadata.sortedTokens(), queryRange.left, true);
AbstractBounds remainder = queryRange;
while (ringIter.hasNext())
{
Token token = ringIter.next();
if (remainder == null || !(remainder.left.equals(token) || remainder.contains(token)))
// no more splits
break;
Pair<AbstractBounds,AbstractBounds> splits = remainder.split(token);
if (splits.left != null)
ranges.add(splits.left);
remainder = splits.right;
}
if (remainder != null)
ranges.add(remainder);
if (logger.isDebugEnabled())
logger.debug("restricted ranges for query " + queryRange + " are " + ranges);
return ranges;
}
public long getReadOperations()
{
return readStats.getOpCount();
}
public long getTotalReadLatencyMicros()
{
return readStats.getTotalLatencyMicros();
}
public double getRecentReadLatencyMicros()
{
return readStats.getRecentLatencyMicros();
}
public long[] getTotalReadLatencyHistogramMicros()
{
return readStats.getTotalLatencyHistogramMicros();
}
public long[] getRecentReadLatencyHistogramMicros()
{
return readStats.getRecentLatencyHistogramMicros();
}
public long getRangeOperations()
{
return rangeStats.getOpCount();
}
public long getTotalRangeLatencyMicros()
{
return rangeStats.getTotalLatencyMicros();
}
public double getRecentRangeLatencyMicros()
{
return rangeStats.getRecentLatencyMicros();
}
public long[] getTotalRangeLatencyHistogramMicros()
{
return rangeStats.getTotalLatencyHistogramMicros();
}
public long[] getRecentRangeLatencyHistogramMicros()
{
return rangeStats.getRecentLatencyHistogramMicros();
}
public long getWriteOperations()
{
return writeStats.getOpCount();
}
public long getTotalWriteLatencyMicros()
{
return writeStats.getTotalLatencyMicros();
}
public double getRecentWriteLatencyMicros()
{
return writeStats.getRecentLatencyMicros();
}
public long[] getTotalWriteLatencyHistogramMicros()
{
return writeStats.getTotalLatencyHistogramMicros();
}
public long[] getRecentWriteLatencyHistogramMicros()
{
return writeStats.getRecentLatencyHistogramMicros();
}
public static List<Row> scan(final String keyspace, String column_family, IndexClause index_clause, SlicePredicate column_predicate, ConsistencyLevel consistency_level)
throws IOException, TimeoutException, UnavailableException
{
IPartitioner p = StorageService.getPartitioner();
Token leftToken = index_clause.start_key == null ? p.getMinimumToken() : p.getToken(index_clause.start_key);
List<AbstractBounds> ranges = getRestrictedRanges(new Bounds(leftToken, p.getMinimumToken()));
logger.debug("scan ranges are " + StringUtils.join(ranges, ","));
// now scan until we have enough results
List<Row> rows = new ArrayList<Row>(index_clause.count);
for (AbstractBounds range : ranges)
{
List<InetAddress> liveEndpoints = StorageService.instance.getLiveNaturalEndpoints(keyspace, range.right);
DatabaseDescriptor.getEndpointSnitch().sortByProximity(FBUtilities.getBroadcastAddress(), liveEndpoints);
// collect replies and resolve according to consistency level
RangeSliceResponseResolver resolver = new RangeSliceResponseResolver(keyspace, liveEndpoints);
IReadCommand iCommand = new IReadCommand()
{
public String getKeyspace()
{
return keyspace;
}
};
ReadCallback<Iterable<Row>> handler = getReadCallback(resolver, iCommand, consistency_level, liveEndpoints);
handler.assureSufficientLiveNodes();
IndexScanCommand command = new IndexScanCommand(keyspace, column_family, index_clause, column_predicate, range);
MessageProducer producer = new CachingMessageProducer(command);
for (InetAddress endpoint : liveEndpoints)
{
MessagingService.instance().sendRR(producer, endpoint, handler);
if (logger.isDebugEnabled())
logger.debug("reading " + command + " from " + endpoint);
}
try
{
for (Row row : handler.get())
{
rows.add(row);
logger.debug("read {}", row);
}
FBUtilities.waitOnFutures(resolver.repairResults, DatabaseDescriptor.getRpcTimeout());
}
catch (TimeoutException ex)
{
if (logger.isDebugEnabled())
logger.debug("Index scan timeout: {}", ex.toString());
throw ex;
}
catch (DigestMismatchException e)
{
throw new AssertionError(e);
}
if (rows.size() >= index_clause.count)
return rows.subList(0, index_clause.count);
}
return rows;
}
public boolean getHintedHandoffEnabled()
{
return hintedHandoffEnabled;
}
public void setHintedHandoffEnabled(boolean b)
{
hintedHandoffEnabled = b;
}
public int getMaxHintWindow()
{
return maxHintWindow;
}
public void setMaxHintWindow(int ms)
{
maxHintWindow = ms;
}
public static boolean shouldHint(InetAddress ep)
{
if (!hintedHandoffEnabled)
return false;
boolean hintWindowExpired = Gossiper.instance.getEndpointDowntime(ep) > maxHintWindow;
if (hintWindowExpired)
logger.debug("not hinting {} which has been down {}ms", ep, Gossiper.instance.getEndpointDowntime(ep));
return !hintWindowExpired;
}
/**
* Performs the truncate operatoin, which effectively deletes all data from
* the column family cfname
* @param keyspace
* @param cfname
* @throws UnavailableException If some of the hosts in the ring are down.
* @throws TimeoutException
* @throws IOException
*/
public static void truncateBlocking(String keyspace, String cfname) throws UnavailableException, TimeoutException, IOException
{
logger.debug("Starting a blocking truncate operation on keyspace {}, CF ", keyspace, cfname);
if (isAnyHostDown())
{
logger.info("Cannot perform truncate, some hosts are down");
// Since the truncate operation is so aggressive and is typically only
// invoked by an admin, for simplicity we require that all nodes are up
// to perform the operation.
throw new UnavailableException();
}
Set<InetAddress> allEndpoints = Gossiper.instance.getLiveMembers();
int blockFor = allEndpoints.size();
final TruncateResponseHandler responseHandler = new TruncateResponseHandler(blockFor);
// Send out the truncate calls and track the responses with the callbacks.
logger.debug("Starting to send truncate messages to hosts {}", allEndpoints);
final Truncation truncation = new Truncation(keyspace, cfname);
MessageProducer producer = new CachingMessageProducer(truncation);
for (InetAddress endpoint : allEndpoints)
MessagingService.instance().sendRR(producer, endpoint, responseHandler);
// Wait for all
logger.debug("Sent all truncate messages, now waiting for {} responses", blockFor);
responseHandler.get();
logger.debug("truncate done");
}
/**
* Asks the gossiper if there are any nodes that are currently down.
* @return true if the gossiper thinks all nodes are up.
*/
private static boolean isAnyHostDown()
{
return !Gossiper.instance.getUnreachableMembers().isEmpty();
}
private interface WritePerformer
{
public void apply(IMutation mutation, Collection<InetAddress> targets, IWriteResponseHandler responseHandler, String localDataCenter, ConsistencyLevel consistency_level) throws IOException, TimeoutException;
}
private static abstract class DroppableRunnable implements Runnable
{
private final long constructionTime = System.currentTimeMillis();
private final StorageService.Verb verb;
public DroppableRunnable(StorageService.Verb verb)
{
this.verb = verb;
}
public final void run()
{
if (System.currentTimeMillis() > constructionTime + DatabaseDescriptor.getRpcTimeout())
{
MessagingService.instance().incrementDroppedMessages(verb);
return;
}
try
{
runMayThrow();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
abstract protected void runMayThrow() throws Exception;
}
public long getTotalHints()
{
return totalHints.get();
}
public int getMaxHintsInProgress()
{
return maxHintsInProgress;
}
public void setMaxHintsInProgress(int qs)
{
maxHintsInProgress = qs;
}
public int getHintsInProgress()
{
return hintsInProgress.get();
}
public void verifyNoHintsInProgress()
{
if (getHintsInProgress() > 0)
logger.warn("Some hints were not written before shutdown. This is not supposed to happen. You should (a) run repair, and (b) file a bug report");
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto
package com.google.cloud.videointelligence.v1p2beta1;
/**
*
*
* <pre>
* Config for EXPLICIT_CONTENT_DETECTION.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig}
*/
public final class ExplicitContentDetectionConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig)
ExplicitContentDetectionConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExplicitContentDetectionConfig.newBuilder() to construct.
private ExplicitContentDetectionConfig(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExplicitContentDetectionConfig() {
model_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExplicitContentDetectionConfig();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ExplicitContentDetectionConfig(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
model_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig.class,
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig.Builder
.class);
}
public static final int MODEL_FIELD_NUMBER = 1;
private volatile java.lang.Object model_;
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @return The model.
*/
@java.lang.Override
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
}
}
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @return The bytes for model.
*/
@java.lang.Override
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, model_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, model_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig other =
(com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig) obj;
if (!getModel().equals(other.getModel())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Config for EXPLICIT_CONTENT_DETECTION.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig)
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig.class,
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig.Builder
.class);
}
// Construct using
// com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
model_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig build() {
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
buildPartial() {
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig result =
new com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig(this);
result.model_ = model_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig) {
return mergeFrom(
(com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig other) {
if (other
== com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
.getDefaultInstance()) return this;
if (!other.getModel().isEmpty()) {
model_ = other.model_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig parsedMessage =
null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @return The model.
*/
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @return The bytes for model.
*/
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @param value The model to set.
* @return This builder for chaining.
*/
public Builder setModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearModel() {
model_ = getDefaultInstance().getModel();
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for explicit content detection.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest".
* </pre>
*
* <code>string model = 1;</code>
*
* @param value The bytes for model to set.
* @return This builder for chaining.
*/
public Builder setModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
model_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig)
private static final com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig();
}
public static com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExplicitContentDetectionConfig> PARSER =
new com.google.protobuf.AbstractParser<ExplicitContentDetectionConfig>() {
@java.lang.Override
public ExplicitContentDetectionConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ExplicitContentDetectionConfig(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ExplicitContentDetectionConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExplicitContentDetectionConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.mindforger.coachingnotebook.client;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.History;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.RootPanel;
import com.mindforger.coachingnotebook.client.ui.ActionsTable;
import com.mindforger.coachingnotebook.client.ui.CheatSheetPanel;
import com.mindforger.coachingnotebook.client.ui.DashboardPanel;
import com.mindforger.coachingnotebook.client.ui.GrowPanel;
import com.mindforger.coachingnotebook.client.ui.GrowsTable;
import com.mindforger.coachingnotebook.client.ui.ImportanceUrgencyChart;
import com.mindforger.coachingnotebook.client.ui.LeftMenubar;
import com.mindforger.coachingnotebook.client.ui.PageTitlePanel;
import com.mindforger.coachingnotebook.client.ui.QuestionAnswerPanel;
import com.mindforger.coachingnotebook.client.ui.RightCornerPanel;
import com.mindforger.coachingnotebook.client.ui.SharedGrowsTable;
import com.mindforger.coachingnotebook.client.ui.StatusLine;
import com.mindforger.coachingnotebook.client.ui.social.ConnectionsPanel;
import com.mindforger.coachingnotebook.client.ui.social.ConnectionsPanel.ConnectionsPanelMode;
import com.mindforger.coachingnotebook.shared.MindForgerConstants;
import com.mindforger.coachingnotebook.shared.MindForgerResourceType;
import com.mindforger.coachingnotebook.shared.MindForgerSettings;
import com.mindforger.coachingnotebook.shared.beans.CommentBean;
import com.mindforger.coachingnotebook.shared.beans.GrowBean;
import com.mindforger.coachingnotebook.shared.beans.QuestionAnswerBean;
import com.mindforger.coachingnotebook.shared.beans.RiaBootImageBean;
import com.mindforger.coachingnotebook.shared.beans.UserLimitsBean;
import com.mindforger.coachingnotebook.shared.verifiers.FieldVerifier;
import com.mindforger.coachingnotebook.shared.verifiers.FieldVerifier.Field;
public class Ria implements EntryPoint, MindForgerConstants, MindForgerSettings {
private static final Logger LOG=Logger.getLogger("MindForgerRia");
private RiaContext ctx;
private RiaState state;
private RiaMessages i18n;
private MindForgerServiceAsync service;
private FieldVerifier fieldVerifier;
private PageTitlePanel pageTitle;
private LeftMenubar leftMenubar;
private StatusLine statusLine;
private GrowPanel growPanel;
private GrowsTable growsTable;
private SharedGrowsTable sharedGrowsTable;
private ActionsTable actionsTable;
private DashboardPanel dashboard;
private ImportanceUrgencyChart importanceUrgencyChart;
private CheatSheetPanel cheatsheet;
private ConnectionsPanel connectionsPanel;
private RightCornerPanel rightCornerPanel;
public Ria() {
ctx=new RiaContext(this);
i18n=ctx.getI18n();
}
public void onModuleLoad() {
GWT.log("Loading MindForger!");
RootPanel.get(CONTAINER_STATUS_LINE).add(ctx.getStatusLine());
RootPanel.get(CONTAINER_PAGE_TITLE).add(ctx.getPageTitle());
ctx.getStatusLine().showProgress(i18n.loadingYourGoalsEtc());
ctx.getService().getRiaBootImage(new AsyncCallback<RiaBootImageBean>() {
public void onSuccess(RiaBootImageBean bean) {
ctx.getStatusLine().showProgress(i18n.initializingMf());
ctx.getState().init(bean);
int growsCount=(ctx.getState().getGrowBeans()!=null?ctx.getState().getGrowBeans().length:0);
RootPanel.get(CONTAINER_MY_LIFE).add(ctx.getLifeDesignerPanel());
RootPanel.get(CONTAINER_USER_PROFILE).add(ctx.getUserProfilePanel());
RootPanel.get(CONTAINER_USER_PROFILE).setVisible(false);
RootPanel.get(CONTAINER_CONNECTIONS).add(ctx.getConnectionsPanel());
RootPanel.get(CONTAINER_CONNECTIONS).setVisible(false);
if(BLUE_LIFE_ENABLED) {
RootPanel.get(CONTAINER_BLUE_LIFE).add(ctx.getBlueLifePanel());
RootPanel.get(CONTAINER_BLUE_LIFE).setVisible(false);
}
RootPanel.get(CONTAINER_SEARCH_RESULTS).add(ctx.getSearchResultsPanel());
RootPanel.get(CONTAINER_SEARCH).add(ctx.getSearchPanel());
ctx.getSearchPanel().setVisible(true);
RootPanel.get(CONTAINER_RIGHT_CORNER).add(ctx.getRightCornerPanel());
RootPanel.get(CONTAINER_LEFT_MENUBAR).add(ctx.getLeftMenubar());
ctx.getConnectionsPanel().refreshConnections(ConnectionsPanelMode.CONNECTIONS, true);
RootPanel.get(CONTAINER_GROWS_TABS).add(ctx.getGrowPanel());
ctx.getLeftMenubar().setGrowsCount(growsCount);
// TODO do result processing later
RootPanel.get(CONTAINER_GROWS_TABLE).add(ctx.getGrowsTable());
// TODO do result processing later
RootPanel.get(CONTAINER_SHARED_GROWS_TABLE).add(ctx.getSharedGrowsTable());
// TODO do result processing later
RootPanel.get(CONTAINER_IMPORTANCE_URGENCY_CHART).add(ctx.getImportanceUrgencyChart());
RootPanel.get(CONTAINER_DASHBOARD).add(ctx.getDashboard());
// TODO do result processing later
RootPanel.get(CONTAINER_ACTIONS_TABLE).add(ctx.getActionsTable());
// TODO do result processing later
RootPanel.get(CONTAINER_LESSONS_LEARNED_TABLE).add(ctx.getLessonsLearnedTable());
RootPanel.get(CONTAINER_CHEATSHEET).add(ctx.getCheatSheet());
// ctx initialized > set up beans
state=ctx.getState();
service=ctx.getService();
fieldVerifier=ctx.getFieldVerifier();
pageTitle=ctx.getPageTitle();
leftMenubar=ctx.getLeftMenubar();
statusLine=ctx.getStatusLine();
growPanel=ctx.getGrowPanel();
growsTable=ctx.getGrowsTable();
sharedGrowsTable=ctx.getSharedGrowsTable();
actionsTable=ctx.getActionsTable();
dashboard=ctx.getDashboard();
importanceUrgencyChart=ctx.getImportanceUrgencyChart();
cheatsheet=ctx.getCheatSheet();
connectionsPanel=ctx.getConnectionsPanel();
rightCornerPanel=ctx.getRightCornerPanel();
// hide loading stuff
RootPanel.get(CONTAINER_LOADING_PANEL).setVisible(false);
RootPanel.get(CONTAINER_TOP_HR).setVisible(true);
RootPanel.get(CONTAINER_LOGO).setVisible(true);
RootPanel.get(CONTAINER_FOOTNOTE).setVisible(true);
// show welcome page if there are no grows
if(ctx.getState().getGrowBeans().length==0) {
ctx.getCheatSheet().showAsWelcomePage(true);
leftMenubar.showCheatSheet();
} else {
if(ctx.getState().inPerspective(new String[]{PERSPECTIVE_PROBLEM_SOLVER})) {
leftMenubar.showGrowsTable();
} else {
if(ctx.getState().inPerspective(new String[]{PERSPECTIVE_MIND_FORGER})) {
ctx.getLeftMenubar().showOrganizer();
} else {
if(ctx.getState().inPerspective(new String[]{PERSPECTIVE_COACH})) {
ctx.getLeftMenubar().showSharedGoals();
} else {
ctx.getLeftMenubar().showMyLife();
}
}
}
ctx.getDashboard().refreshGrows(ctx.getState().getGrowBeans());
}
initHistorySupport();
ctx.getStatusLine().hideStatus();
}
public void onFailure(Throwable caught) {
handleServiceError(caught);
}
});
}
public void initHistorySupport() {
History.addValueChangeHandler(new ValueChangeHandler<String>() {
public void onValueChange(ValueChangeEvent<String> event) {
// TODO implement handling of the history
}
});
// check to see if there are any tokens passed at startup via the browser's URI
String token = History.getToken();
if (token.length() == 0) {
onHistoryChanged(null);
}
else {
onHistoryChanged(token);
}
}
public void onHistoryChanged(String historyToken) {
if(historyToken==null) {
LOG.log(Level.INFO, "There is no token passed in the browser URL");
}
else {
LOG.log(Level.INFO, "Got a new browser history change token:" + historyToken);
if("connections".equals(historyToken)) {
leftMenubar.showConnections();
} else {
if("actions".equals(historyToken)) {
statusLine.showProgress(i18n.loadingActions());
service.getActions(new AsyncCallback<QuestionAnswerBean[]>() {
public void onSuccess(QuestionAnswerBean[] result) {
statusLine.hideStatus();
actionsTable.refresh(result);
leftMenubar.showActionsTable();
}
public void onFailure(Throwable caught) {
handleServiceError(caught);
}
});
} else {
if("cheatsheet".equals(historyToken)) {
if(state.getGrowBeans().length==0) {
cheatsheet.showAsWelcomePage(true);
} else {
cheatsheet.showAsWelcomePage(false);
}
leftMenubar.showCheatSheet();
}
}
}
}
}
public void loadGrow(final String growId, final int tabToSelect) {
service.getGrow(growId, new AsyncCallback<GrowBean>() {
public void onSuccess(GrowBean bean) {
// TODO this one should replace the bean in RIA array of grows
// TODO this method should be somewhere on ria
leftMenubar.getNewGrowButton().setStyleName("mf-button");
leftMenubar.getNewGrowButton().addStyleName("mf-newGoalButton");
// bean 2 RIA
growPanel.getGrowTabs().selectTab(tabToSelect);
growPanel.toRia(bean);
// show grow after it is loaded and pushed to RIA
showGrowTabs();
statusLine.showInfo(bean.getNumberOfQuestions()+" "+i18n.itemsLowerCase());
// load also comments - but separately from grow (there might be many comments)
service.getCommentsForGrow(growId, new AsyncCallback<CommentBean[]>() {
public void onFailure(Throwable caught) {
handleServiceError(caught);
}
public void onSuccess(CommentBean[] result) {
if(result!=null && result.length>0) {
Map<String, QuestionAnswerPanel> map = growPanel.getQuestionIdToQAPanelMap();
for (int i = 0; i < result.length; i++) {
QuestionAnswerPanel panel = map.get(result[i].getQuestionId());
if(panel!=null) {
panel.addComment(result[i]);
}
}
}
}
});
ctx.getLeftMenubar().getRecentPanel().addRow(growId, "", bean.getName(), bean.getDescription(), MindForgerResourceType.GROW);
}
public void onFailure(Throwable caught) {
handleServiceError(caught);
}
});
}
public void refreshRiaOnGrowSave() {
statusLine.showProgress(i18n.loadingGrows());
service.getGrows(new AsyncCallback<GrowBean[]>() {
public void onFailure(Throwable caught) {
handleServiceError(caught);
}
public void onSuccess(GrowBean[] result) {
statusLine.hideStatus();
state.setGrowBeans(result);
int growsCount=(result!=null?result.length:0);
leftMenubar.setGrowsCount(growsCount);
// TODO dashboard not refreshed
growsTable.refresh(result);
importanceUrgencyChart.refresh(result);
}
});
}
private void hideAllContainers() {
pageTitle.setHTML("");
RootPanel.get(CONTAINER_DASHBOARD).setVisible(false);
RootPanel.get(CONTAINER_MY_LIFE).setVisible(false);
RootPanel.get(CONTAINER_IMPORTANCE_URGENCY_CHART).setVisible(false);
RootPanel.get(CONTAINER_GROWS_TABLE).setVisible(false);
RootPanel.get(CONTAINER_SHARED_GROWS_TABLE).setVisible(false);
RootPanel.get(CONTAINER_ACTIONS_TABLE).setVisible(false);
RootPanel.get(CONTAINER_LESSONS_LEARNED_TABLE).setVisible(false);
RootPanel.get(CONTAINER_GROWS_TABS).setVisible(false);
RootPanel.get(CONTAINER_SEARCH_RESULTS).setVisible(false);
RootPanel.get(CONTAINER_CONNECTIONS).setVisible(false);
RootPanel.get(CONTAINER_CHEATSHEET).setVisible(false);
RootPanel.get(CONTAINER_USER_PROFILE).setVisible(false);
RootPanel.get(CONTAINER_BLUE_LIFE).setVisible(false);
}
public void showDashboard() {
hideAllContainers();
pageTitle.setHTML(i18n.dashboard());
// TODO to be LAZILY refreshed - same as organizer and grows table
RootPanel.get(CONTAINER_DASHBOARD).setVisible(true);
}
public void showMyLife() {
hideAllContainers();
pageTitle.setHTML(i18n.lifeDesigner());
RootPanel.get(CONTAINER_MY_LIFE).setVisible(true);
}
public void showUserProfile() {
hideAllContainers();
pageTitle.setHTML(i18n.userProfile());
RootPanel.get(CONTAINER_USER_PROFILE).setVisible(true);
}
public void showBlueLife() {
hideAllContainers();
pageTitle.setHTML(i18n.blueLife());
RootPanel.get(CONTAINER_BLUE_LIFE).setVisible(true);
}
public void showSearchResults() {
hideAllContainers();
pageTitle.setHTML(i18n.searchResults());
RootPanel.get(CONTAINER_SEARCH_RESULTS).setVisible(true);
}
public void showOrganizer() {
hideAllContainers();
pageTitle.setHTML(i18n.organizer());
// TODO add LAZY refresh - which is OK, but elsewhere are refreshes that are no longer needed because of this one
RootPanel.get(CONTAINER_IMPORTANCE_URGENCY_CHART).setVisible(true);
}
public void showGrowTabs() {
hideAllContainers();
pageTitle.setHTML(i18n.goal());
RootPanel.get(CONTAINER_GROWS_TABS).setVisible(true);
}
public void showGrowsTable() {
hideAllContainers();
pageTitle.setHTML(i18n.goals());
// TODO add LAZY refresh - which is OK, but elsewhere are refreshes that are no longer needed because of this one
RootPanel.get(CONTAINER_GROWS_TABLE).setVisible(true);
}
public void showActionsTable() {
hideAllContainers();
pageTitle.setHTML(i18n.actions());
RootPanel.get(CONTAINER_ACTIONS_TABLE).setVisible(true);
}
public void showLessonsLearnedTable() {
hideAllContainers();
pageTitle.setHTML(i18n.lessonsLearned());
RootPanel.get(CONTAINER_LESSONS_LEARNED_TABLE).setVisible(true);
}
public void showSharedGoals() {
hideAllContainers();
pageTitle.setHTML(i18n.sharedGoals());
sharedGrowsTable.refreshWithNewSortingCriteria();
RootPanel.get(CONTAINER_SHARED_GROWS_TABLE).setVisible(true);
}
public void showConnections(ConnectionsPanelMode connectionsPanelMode) {
hideAllContainers();
pageTitle.setHTML(i18n.connections());
RootPanel.get(CONTAINER_CONNECTIONS).setVisible(true);
connectionsPanel.refreshConnections(connectionsPanelMode, false);
}
public void showCheatSheet() {
hideAllContainers();
pageTitle.setHTML(i18n.home());
RootPanel.get(CONTAINER_CHEATSHEET).setVisible(true);
}
public boolean verifyField(Field fieldType, String field, String fieldName) {
int limit=fieldVerifier.verify(fieldType, field);
if(limit<0) {
statusLine.hideStatus();
return true;
} else {
statusLine.showError(i18n.errorFieldMustBeShorterThan(fieldName, ""+limit));
return false;
}
}
public void handleServiceError(Throwable caught) {
final String errorMessage = caught.getMessage();
GWT.log("Error: "+errorMessage, caught);
LOG.log(Level.SEVERE, errorMessage,caught);
statusLine.showError(i18n.ooops());
}
public void handleSetPerspective(final String value) {
state.getUserSettings().setPerspective(value);
leftMenubar.reinitialize();
rightCornerPanel.setPerspectiveName();
cheatsheet.refresh();
statusLine.showProgress(i18n.savingThePerspectiveSelection());
service.saveUserSettings(state.getUserSettings(), new AsyncCallback<Void>() {
public void onFailure(Throwable caught) {
handleServiceError(caught);
}
public void onSuccess(Void result) {
statusLine.hideStatus();
}
});
}
public void refreshDashboard() {
dashboard.refreshGrows(state.getGrowBeans());
}
// TODO OPTIMIZE - prepare hashtable
public String getGrowNameForId(String id) {
GrowBean[] growBeans = state.getGrowBeans();
if(growBeans!=null && id!=null) {
for (int i = 0; i < growBeans.length; i++) {
if(id.equals(growBeans[i].getKey())) {
return growBeans[i].getName();
}
}
}
return "";
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.extractMethod;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.SuggestedNameInfo;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.introduceField.ElementToWorkOn;
import com.intellij.refactoring.introduceParameter.IntroduceParameterHandler;
import com.intellij.refactoring.util.VariableData;
import com.intellij.refactoring.util.duplicates.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.UniqueNameGenerator;
import gnu.trove.THashMap;
import gnu.trove.THashSet;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
import static com.intellij.refactoring.extractMethod.ExtractMethodHandler.REFACTORING_NAME;
/**
* @author Pavel.Dolgov
*/
public class ParametrizedDuplicates {
private static final Logger LOG = Logger.getInstance(ParametrizedDuplicates.class);
private final PsiElement[] myElements;
private List<Match> myMatches;
private List<ClusterOfUsages> myUsagesList;
private PsiMethod myParametrizedMethod;
private PsiMethodCallExpression myParametrizedCall;
private VariableData[] myVariableDatum;
private ParametrizedDuplicates(@NotNull PsiElement[] pattern,
@NotNull ExtractMethodProcessor originalProcessor) {
PsiElement[] filteredPattern = getFilteredElements(pattern);
PsiElement firstElement = filteredPattern.length != 0 ? filteredPattern[0] : null;
if (firstElement instanceof PsiStatement) {
PsiElement[] copy = copyElements(pattern);
myElements = wrapWithCodeBlock(copy, originalProcessor.getInputVariables());
}
else if (firstElement instanceof PsiExpression) {
PsiElement[] copy = copyElements(pattern);
PsiExpression wrapped = wrapExpressionWithCodeBlock(copy, originalProcessor);
myElements = wrapped != null ? new PsiElement[]{wrapped} : PsiElement.EMPTY_ARRAY;
}
else {
myElements = PsiElement.EMPTY_ARRAY;
}
}
private static PsiElement[] copyElements(@NotNull PsiElement[] pattern) {
Project project = pattern[0].getProject();
return IntroduceParameterHandler.getElementsInCopy(project, pattern[0].getContainingFile(), pattern, false);
}
@Nullable
public static ParametrizedDuplicates findDuplicates(@NotNull ExtractMethodProcessor originalProcessor,
@NotNull DuplicatesFinder.MatchType matchType) {
DuplicatesFinder finder = createDuplicatesFinder(originalProcessor, matchType);
if (finder == null) {
return null;
}
List<Match> matches = finder.findDuplicates(originalProcessor.myTargetClass);
matches = filterNestedSubexpressions(matches);
if (matches.isEmpty()) {
return null;
}
Map<PsiExpression, String> predefinedNames = foldParameters(originalProcessor, matches);
PsiElement[] pattern = originalProcessor.myElements;
ParametrizedDuplicates duplicates = new ParametrizedDuplicates(pattern, originalProcessor);
if (!duplicates.initMatches(pattern, matches)) {
return null;
}
if (!duplicates.extract(originalProcessor, predefinedNames)) {
return null;
}
return duplicates;
}
@NotNull
private static Map<PsiExpression, String> foldParameters(ExtractMethodProcessor originalProcessor, List<Match> matches) {
if (matches.isEmpty() || !originalProcessor.getInputVariables().isFoldable()) {
return Collections.emptyMap();
}
// As folded parameters don't work along with extracted parameters we need to apply the finder again to actually fold the parameters
DuplicatesFinder finder = createDuplicatesFinder(originalProcessor, DuplicatesFinder.MatchType.FOLDED);
if (finder == null) {
return Collections.emptyMap();
}
Map<Match, Match> foldedMatches = new HashMap<>();
Map<DuplicatesFinder.Parameter, VariableData> parametersToFold = new LinkedHashMap<>();
for (VariableData data : originalProcessor.getInputVariables().getInputVariables()) {
parametersToFold.put(new DuplicatesFinder.Parameter(data.variable, data.type, true), data);
}
for (Match match : matches) {
Match foldedMatch = finder.isDuplicate(match.getMatchStart(), false);
LOG.assertTrue(foldedMatch != null, "folded match should exist");
LOG.assertTrue(match.getMatchStart() == foldedMatch.getMatchStart() &&
match.getMatchEnd() == foldedMatch.getMatchEnd(), "folded match range should be the same");
foldedMatches.put(match, foldedMatch);
parametersToFold.keySet().removeIf(parameter -> !canFoldParameter(match, foldedMatch, parameter));
}
if (parametersToFold.isEmpty()) {
return Collections.emptyMap();
}
Map<PsiExpression, String> predefinedNames = new HashMap<>();
for (Match match : matches) {
Match foldedMatch = foldedMatches.get(match);
LOG.assertTrue(foldedMatch != null, "folded match");
for (Map.Entry<DuplicatesFinder.Parameter, VariableData> entry : parametersToFold.entrySet()) {
DuplicatesFinder.Parameter parameter = entry.getKey();
VariableData variableData = entry.getValue();
List<Pair.NonNull<PsiExpression, PsiExpression>> expressionMappings = foldedMatch.getFoldedExpressionMappings(parameter);
LOG.assertTrue(!ContainerUtil.isEmpty(expressionMappings), "foldedExpressionMappings can't be empty");
PsiType type = parameter.getType();
ExtractedParameter extractedParameter = null;
for (Pair.NonNull<PsiExpression, PsiExpression> expressionMapping : expressionMappings) {
PsiExpression patternExpression = expressionMapping.getFirst();
ExtractableExpressionPart patternPart = ExtractableExpressionPart.fromUsage(patternExpression, type);
if (extractedParameter == null) {
PsiExpression candidateExpression = expressionMapping.getSecond();
ExtractableExpressionPart candidatePart = ExtractableExpressionPart.fromUsage(candidateExpression, type);
extractedParameter = new ExtractedParameter(patternPart, candidatePart, type);
}
else {
extractedParameter.addUsages(patternPart);
}
predefinedNames.put(patternExpression, variableData.name);
}
LOG.assertTrue(extractedParameter != null, "extractedParameter can't be null");
match.getExtractedParameters().add(extractedParameter);
}
}
return predefinedNames;
}
private static boolean canFoldParameter(Match match, Match foldedMatch, DuplicatesFinder.Parameter parameter) {
List<Pair.NonNull<PsiExpression, PsiExpression>> expressionMappings = foldedMatch.getFoldedExpressionMappings(parameter);
if (ContainerUtil.isEmpty(expressionMappings)) {
return false;
}
// Extracted parameters and folded parameters shouldn't overlap
for (Pair.NonNull<PsiExpression, PsiExpression> expressionMapping : expressionMappings) {
PsiExpression patternExpression = expressionMapping.getFirst();
for (ExtractedParameter extractedParameter : match.getExtractedParameters()) {
for (PsiExpression extractedUsage : extractedParameter.myPatternUsages) {
if (PsiTreeUtil.isAncestor(patternExpression, extractedUsage, false) ||
PsiTreeUtil.isAncestor(extractedUsage, patternExpression, false)) {
return false;
}
}
}
}
return true;
}
@Nullable
private static DuplicatesFinder createDuplicatesFinder(@NotNull ExtractMethodProcessor processor,
@NotNull DuplicatesFinder.MatchType matchType) {
PsiElement[] elements = getFilteredElements(processor.myElements);
if (elements.length == 0) {
return null;
}
Set<PsiVariable> effectivelyLocal = processor.getEffectivelyLocalVariables();
InputVariables inputVariables = matchType == DuplicatesFinder.MatchType.PARAMETRIZED
? processor.myInputVariables.copyWithoutFolding() : processor.myInputVariables;
ReturnValue returnValue = processor.myOutputVariable != null ? new VariableReturnValue(processor.myOutputVariable) : null;
return new DuplicatesFinder(elements, inputVariables, returnValue,
Collections.emptyList(), matchType, effectivelyLocal);
}
@NotNull
public PsiMethod replaceMethod(@NotNull PsiMethod originalMethod) {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(originalMethod.getProject());
String text = myParametrizedMethod.getText();
PsiMethod method = factory.createMethodFromText(text, originalMethod.getParent());
return (PsiMethod)originalMethod.replace(method);
}
@NotNull
public PsiMethodCallExpression replaceCall(@NotNull PsiMethodCallExpression originalCall) {
PsiElementFactory factory = JavaPsiFacade.getElementFactory(originalCall.getProject());
String text = myParametrizedCall.getText();
PsiMethodCallExpression call = (PsiMethodCallExpression)factory.createExpressionFromText(text, originalCall.getParent());
return (PsiMethodCallExpression)originalCall.replace(call);
}
private boolean initMatches(@NotNull PsiElement[] pattern, @NotNull List<Match> matches) {
if (myElements.length == 0) {
return false;
}
myUsagesList = new ArrayList<>();
Map<PsiExpression, ClusterOfUsages> usagesMap = new THashMap<>();
Set<Match> badMatches = new THashSet<>();
for (Match match : matches) {
List<ClusterOfUsages> usagesInMatch = getUsagesInMatch(usagesMap, match);
if (usagesInMatch == null) {
badMatches.add(match);
continue;
}
for (ClusterOfUsages usages : usagesInMatch) {
myUsagesList.add(usages);
for (PsiExpression expression : usages.myPatterns) {
usagesMap.put(expression, usages);
}
}
}
if (!badMatches.isEmpty()) {
matches = new ArrayList<>(matches);
matches.removeAll(badMatches);
}
myMatches = matches;
if (myMatches.isEmpty()) {
return false;
}
Map<Match, Map<PsiExpression, PsiExpression>> expressionsMapping = new HashMap<>();
for (ClusterOfUsages usages : myUsagesList) {
for (Match match : myMatches) {
ExtractedParameter parameter = usages.myParameters.get(match);
if (parameter == null) {
Map<PsiExpression, PsiExpression> expressions =
expressionsMapping.computeIfAbsent(match, unused -> {
Map<PsiExpression, PsiExpression> result = new HashMap<>();
collectCopyMapping(pattern, match.getMatchElements(), usagesMap.keySet()::contains, result::put, (unused1, unused2) -> {});
return result;
});
PsiExpression candidateUsage = usages.myPatterns.stream().map(expressions::get).findAny().orElse(null);
LOG.assertTrue(candidateUsage != null, "candidateUsage shouldn't be null");
ExtractedParameter fromParameter = usages.myParameter;
parameter = fromParameter.copyWithCandidateUsage(candidateUsage);
match.addExtractedParameter(parameter);
usages.putParameter(match, parameter);
}
}
}
myUsagesList.sort(Comparator.comparing(usages -> usages.myFirstOffset));
return true;
}
private static List<Match> filterNestedSubexpressions(List<Match> matches) {
Map<PsiExpression, Set<Match>> patternUsages = new THashMap<>();
for (Match match : matches) {
for (ExtractedParameter parameter : match.getExtractedParameters()) {
for (PsiExpression patternUsage : parameter.myPatternUsages) {
patternUsages.computeIfAbsent(patternUsage, k -> new THashSet<>()).add(match);
}
}
}
Set<Match> badMatches = new THashSet<>();
for (Map.Entry<PsiExpression, Set<Match>> entry : patternUsages.entrySet()) {
PsiExpression patternUsage = entry.getKey();
Set<Match> patternMatches = entry.getValue();
for (PsiExpression maybeNestedUsage : patternUsages.keySet()) {
if (patternUsage == maybeNestedUsage) {
continue;
}
if (PsiTreeUtil.isAncestor(patternUsage, maybeNestedUsage, true)) {
badMatches.addAll(patternMatches);
break;
}
}
}
if (!badMatches.isEmpty()) {
matches = new ArrayList<>(matches);
matches.removeAll(badMatches);
}
return matches;
}
@Nullable
private static List<ClusterOfUsages> getUsagesInMatch(@NotNull Map<PsiExpression, ClusterOfUsages> usagesMap, @NotNull Match match) {
List<ClusterOfUsages> result = new ArrayList<>();
List<ExtractedParameter> parameters = match.getExtractedParameters();
for (ExtractedParameter parameter : parameters) {
ClusterOfUsages usages = usagesMap.get(parameter.myPattern.getUsage());
if (usages != null && !usages.isEquivalent(parameter) ||
usages == null && ClusterOfUsages.isPresent(usagesMap, parameter)) {
return null;
}
if (usages == null) {
result.add(usages = new ClusterOfUsages(parameter));
}
usages.putParameter(match, parameter);
}
return result;
}
private boolean extract(@NotNull ExtractMethodProcessor originalProcessor, @NotNull Map<PsiExpression, String> predefinedNames) {
Map<PsiExpression, PsiExpression> expressionsMapping = new THashMap<>();
Map<PsiVariable, PsiVariable> variablesMapping = new THashMap<>();
collectCopyMapping(originalProcessor.myElements, myElements, myUsagesList, expressionsMapping, variablesMapping);
Map<PsiLocalVariable, ClusterOfUsages> parameterDeclarations =
createParameterDeclarations(originalProcessor, expressionsMapping, predefinedNames);
putMatchParameters(parameterDeclarations);
JavaDuplicatesExtractMethodProcessor parametrizedProcessor = new JavaDuplicatesExtractMethodProcessor(myElements, REFACTORING_NAME);
if (!parametrizedProcessor.prepare(false)) {
return false;
}
parametrizedProcessor.applyFrom(originalProcessor, variablesMapping);
parametrizedProcessor.doExtract();
myParametrizedMethod = parametrizedProcessor.getExtractedMethod();
myParametrizedCall = parametrizedProcessor.getMethodCall();
myVariableDatum = unmapVariableData(parametrizedProcessor.myVariableDatum, variablesMapping);
replaceArguments(parameterDeclarations, myParametrizedCall);
return true;
}
@NotNull
private static VariableData[] unmapVariableData(@NotNull VariableData[] variableDatum,
@NotNull Map<PsiVariable, PsiVariable> variablesMapping) {
Map<PsiVariable, PsiVariable> reverseMapping = ContainerUtil.reverseMap(variablesMapping);
return StreamEx.of(variableDatum)
.map(data -> data.substitute(reverseMapping.get(data.variable)))
.toArray(VariableData[]::new);
}
private static void replaceArguments(@NotNull Map<PsiLocalVariable, ClusterOfUsages> parameterDeclarations,
@NotNull PsiMethodCallExpression parametrizedCall) {
PsiExpression[] arguments = parametrizedCall.getArgumentList().getExpressions();
for (PsiExpression argument : arguments) {
if (argument instanceof PsiReferenceExpression) {
PsiElement resolved = ((PsiReferenceExpression)argument).resolve();
if (resolved instanceof PsiLocalVariable && parameterDeclarations.containsKey(resolved)) {
PsiExpression initializer = ((PsiLocalVariable)resolved).getInitializer();
if (initializer != null) {
argument.replace(initializer);
}
}
}
}
}
private void putMatchParameters(@NotNull Map<PsiLocalVariable, ClusterOfUsages> parameterDeclarations) {
Map<PsiExpression, PsiLocalVariable> patternUsageToParameter = new THashMap<>();
for (Map.Entry<PsiLocalVariable, ClusterOfUsages> entry : parameterDeclarations.entrySet()) {
PsiExpression usage = entry.getValue().myParameter.myPattern.getUsage();
patternUsageToParameter.put(usage, entry.getKey());
}
for (Match match : myMatches) {
List<ExtractedParameter> matchedParameters = match.getExtractedParameters();
for (ExtractedParameter matchedParameter : matchedParameters) {
PsiLocalVariable localVariable = patternUsageToParameter.get(matchedParameter.myPattern.getUsage());
LOG.assertTrue(localVariable != null, "match local variable");
DuplicatesFinder.Parameter parameter = new DuplicatesFinder.Parameter(localVariable, matchedParameter.myType);
boolean ok = match.putParameter(parameter, matchedParameter.myCandidate.getUsage());
LOG.assertTrue(ok, "put match parameter");
}
}
}
public PsiMethod getParametrizedMethod() {
return myParametrizedMethod;
}
public PsiMethodCallExpression getParametrizedCall() {
return myParametrizedCall;
}
public VariableData[] getVariableDatum() {
return myVariableDatum;
}
public int getSize() {
return myMatches != null ? myMatches.size() : 0;
}
public List<Match> getDuplicates() {
return myMatches;
}
boolean isEmpty() {
return ContainerUtil.isEmpty(myMatches);
}
@NotNull
private static PsiElement[] wrapWithCodeBlock(@NotNull PsiElement[] elements, @NotNull InputVariables inputVariables) {
PsiElement fragmentStart = elements[0];
PsiElement fragmentEnd = elements[elements.length - 1];
List<ReusedLocalVariable> reusedLocalVariables =
ReusedLocalVariablesFinder.findReusedLocalVariables(fragmentStart, fragmentEnd, Collections.emptySet(), inputVariables);
PsiElement parent = fragmentStart.getParent();
PsiElementFactory factory = JavaPsiFacade.getElementFactory(fragmentStart.getProject());
PsiBlockStatement statement = (PsiBlockStatement)factory.createStatementFromText("{}", parent);
statement.getCodeBlock().addRange(fragmentStart, fragmentEnd);
statement = (PsiBlockStatement)parent.addBefore(statement, fragmentStart);
parent.deleteChildRange(fragmentStart, fragmentEnd);
PsiCodeBlock codeBlock = statement.getCodeBlock();
PsiElement[] elementsInBlock = codeBlock.getChildren();
LOG.assertTrue(elementsInBlock.length >= elements.length + 2, "wrapper block length is too small");
elementsInBlock = Arrays.copyOfRange(elementsInBlock, 1, elementsInBlock.length - 1);
declareReusedLocalVariables(reusedLocalVariables, statement, factory);
return elementsInBlock;
}
private static void declareReusedLocalVariables(@NotNull List<ReusedLocalVariable> reusedLocalVariables,
@NotNull PsiBlockStatement statement,
@NotNull PsiElementFactory factory) {
PsiElement parent = statement.getParent();
PsiCodeBlock codeBlock = statement.getCodeBlock();
PsiStatement addAfter = statement;
for (ReusedLocalVariable variable : reusedLocalVariables) {
if (variable.reuseValue()) {
PsiStatement declarationBefore = factory.createStatementFromText(variable.getTempDeclarationText(), codeBlock.getRBrace());
parent.addBefore(declarationBefore, statement);
PsiStatement assignment = factory.createStatementFromText(variable.getAssignmentText(), codeBlock.getRBrace());
codeBlock.addBefore(assignment, codeBlock.getRBrace());
}
PsiStatement declarationAfter = factory.createStatementFromText(variable.getDeclarationText(), statement);
parent.addAfter(declarationAfter, addAfter);
addAfter = declarationAfter;
}
}
@Nullable
private static PsiExpression wrapExpressionWithCodeBlock(@NotNull PsiElement[] copy,
@NotNull ExtractMethodProcessor originalProcessor) {
if (copy.length != 1 || !(copy[0] instanceof PsiExpression)) return null;
PsiExpression expression = (PsiExpression)copy[0];
PsiType type = expression.getType();
if (type == null || PsiType.NULL.equals(type)) return null;
PsiElement parent = expression.getParent();
PsiElementFactory factory = JavaPsiFacade.getElementFactory(expression.getProject());
PsiClass parentClass = PsiTreeUtil.getParentOfType(expression, PsiClass.class);
if (parentClass == null) return null;
PsiElement parentClassStart = parentClass.getLBrace();
if (parentClassStart == null) return null;
// It's syntactically correct to write "new Object() {void foo(){}}.foo()" - see JLS 15.9.5
String wrapperBodyText = (PsiType.VOID.equals(type) ? "" : "return ") + expression.getText() + ";";
String wrapperClassImmediateCallText = "new " + CommonClassNames.JAVA_LANG_OBJECT + "() { " +
type.getCanonicalText() + " wrapperMethod() {" + wrapperBodyText + "} " +
"}.wrapperMethod()";
PsiExpression wrapperClassImmediateCall = factory.createExpressionFromText(wrapperClassImmediateCallText, parent);
wrapperClassImmediateCall = (PsiExpression)expression.replace(wrapperClassImmediateCall);
PsiMethod method = PsiTreeUtil.findChildOfType(wrapperClassImmediateCall, PsiMethod.class);
LOG.assertTrue(method != null, "wrapper class method is null");
PsiCodeBlock body = method.getBody();
LOG.assertTrue(body != null, "wrapper class method's body is null");
PsiStatement[] statements = body.getStatements();
LOG.assertTrue(statements.length == 1, "wrapper class method's body statement count");
PsiStatement bodyStatement = statements[0];
Set<PsiVariable> effectivelyLocal = originalProcessor.getEffectivelyLocalVariables();
for (PsiVariable variable : effectivelyLocal) {
String name = variable.getName();
LOG.assertTrue(name != null, "effectively local variable's name is null");
PsiDeclarationStatement declaration = factory.createVariableDeclarationStatement(name, variable.getType(), null);
body.addBefore(declaration, bodyStatement);
}
PsiExpression wrapped = null;
if (PsiType.VOID.equals(type) && bodyStatement instanceof PsiExpressionStatement) {
wrapped = ((PsiExpressionStatement)bodyStatement).getExpression();
}
else if (bodyStatement instanceof PsiReturnStatement) {
wrapped = ((PsiReturnStatement)bodyStatement).getReturnValue();
}
else {
LOG.error("Unexpected statement in expression code block " + bodyStatement);
}
if (wrapped != null) {
// this key is not copyable so replace() doesn't preserve it - have to do it here
wrapped.putUserData(ElementToWorkOn.REPLACE_NON_PHYSICAL, expression.getUserData(ElementToWorkOn.REPLACE_NON_PHYSICAL));
}
return wrapped;
}
@NotNull
private Map<PsiLocalVariable, ClusterOfUsages> createParameterDeclarations(@NotNull ExtractMethodProcessor originalProcessor,
@NotNull Map<PsiExpression, PsiExpression> expressionsMapping,
@NotNull Map<PsiExpression, String> predefinedNames) {
Project project = myElements[0].getProject();
Map<PsiLocalVariable, ClusterOfUsages> parameterDeclarations = new THashMap<>();
UniqueNameGenerator generator = originalProcessor.getParameterNameGenerator(myElements[0]);
PsiElementFactory factory = JavaPsiFacade.getElementFactory(project);
PsiStatement statement =
myElements[0] instanceof PsiStatement ? (PsiStatement)myElements[0] : PsiTreeUtil.getParentOfType(myElements[0], PsiStatement.class);
LOG.assertTrue(statement != null, "first statement is null");
PsiElement parent = statement.getParent();
LOG.assertTrue(parent instanceof PsiCodeBlock, "first statement's parent isn't a code block");
for (ClusterOfUsages usages : myUsagesList) {
ExtractedParameter parameter = usages.myParameter;
PsiExpression patternUsage = parameter.myPattern.getUsage();
String initializerText = patternUsage.getText();
PsiExpression initializer = factory.createExpressionFromText(initializerText, parent);
String predefinedName = predefinedNames.get(patternUsage);
final SuggestedNameInfo info =
JavaCodeStyleManager.getInstance(project).suggestVariableName(VariableKind.PARAMETER, predefinedName, initializer, null);
final String parameterName = generator.generateUniqueName(info.names.length > 0 ? info.names[0] : "p");
String declarationText = parameter.getLocalVariableTypeText() + " " + parameterName + " = " + initializerText + ";";
PsiDeclarationStatement paramDeclaration = (PsiDeclarationStatement)factory.createStatementFromText(declarationText, parent);
paramDeclaration = (PsiDeclarationStatement)parent.addBefore(paramDeclaration, statement);
PsiLocalVariable localVariable = (PsiLocalVariable)paramDeclaration.getDeclaredElements()[0];
parameterDeclarations.put(localVariable, usages);
for (PsiExpression expression : parameter.myPatternUsages) {
PsiExpression mapped = expressionsMapping.get(expression);
if (mapped != null) {
PsiExpression replacement = factory.createExpressionFromText(parameterName, expression);
mapped.replace(replacement);
}
}
}
return parameterDeclarations;
}
private static void collectCopyMapping(@NotNull PsiElement[] pattern,
@NotNull PsiElement[] copy,
@NotNull List<ClusterOfUsages> patternUsages,
@NotNull Map<PsiExpression, PsiExpression> expressions,
@NotNull Map<PsiVariable, PsiVariable> variables) {
Set<PsiExpression> patternExpressions = new THashSet<>();
for (ClusterOfUsages usages : patternUsages) {
patternExpressions.addAll(usages.myPatterns);
}
collectCopyMapping(pattern, copy, patternExpressions::contains, expressions::put, variables::put);
}
public static void collectCopyMapping(@NotNull PsiElement[] pattern,
@NotNull PsiElement[] copy,
@NotNull Predicate<PsiExpression> isReplaceablePattern,
@NotNull BiConsumer<PsiExpression, PsiExpression> expressionsMapping,
@NotNull BiConsumer<PsiVariable, PsiVariable> variablesMapping) {
pattern = DuplicatesFinder.getDeeplyFilteredElements(pattern);
copy = DuplicatesFinder.getDeeplyFilteredElements(copy);
if (copy.length != pattern.length) {
return; // it's an extracted parameter, so there's no need to go deeper
}
for (int i = 0; i < pattern.length; i++) {
collectCopyMapping(pattern[i], copy[i], isReplaceablePattern, expressionsMapping, variablesMapping);
}
}
private static void collectCopyMapping(@NotNull PsiElement pattern,
@NotNull PsiElement copy,
@NotNull Predicate<PsiExpression> isReplaceablePattern,
@NotNull BiConsumer<PsiExpression, PsiExpression> expressionsMapping,
@NotNull BiConsumer<PsiVariable, PsiVariable> variablesMapping) {
if (pattern == copy) return;
if (pattern instanceof PsiExpression && copy instanceof PsiExpression && isReplaceablePattern.test((PsiExpression)pattern)) {
expressionsMapping.accept((PsiExpression)pattern, (PsiExpression)copy);
return;
}
if (pattern instanceof PsiJavaCodeReferenceElement && copy instanceof PsiJavaCodeReferenceElement) {
PsiElement resolvedPattern = ((PsiJavaCodeReferenceElement)pattern).resolve();
PsiElement resolvedCopy = ((PsiJavaCodeReferenceElement)copy).resolve();
if (resolvedPattern != resolvedCopy && resolvedPattern instanceof PsiVariable && resolvedCopy instanceof PsiVariable) {
variablesMapping.accept((PsiVariable)resolvedPattern, (PsiVariable)resolvedCopy);
}
PsiElement patternQualifier = ((PsiJavaCodeReferenceElement)pattern).getQualifier();
PsiElement copyQualifier = ((PsiJavaCodeReferenceElement)copy).getQualifier();
if (patternQualifier != null && copyQualifier != null) {
collectCopyMapping(patternQualifier, copyQualifier, isReplaceablePattern, expressionsMapping, variablesMapping);
}
return;
}
if (pattern instanceof PsiVariable && copy instanceof PsiVariable) {
variablesMapping.accept((PsiVariable)pattern, (PsiVariable)copy);
}
collectCopyMapping(pattern.getChildren(), copy.getChildren(), isReplaceablePattern, expressionsMapping, variablesMapping);
}
@NotNull
private static PsiElement[] getFilteredElements(@NotNull PsiElement[] elements) {
if (elements.length == 0) {
return elements;
}
ArrayList<PsiElement> result = new ArrayList<>(elements.length);
for (PsiElement e : elements) {
if (e == null || e instanceof PsiWhiteSpace || e instanceof PsiComment || e instanceof PsiEmptyStatement) {
continue;
}
if (e instanceof PsiParenthesizedExpression) {
e = PsiUtil.skipParenthesizedExprDown((PsiParenthesizedExpression)e);
}
result.add(e);
}
return result.toArray(PsiElement.EMPTY_ARRAY);
}
private static class ClusterOfUsages {
@NotNull private final Set<PsiExpression> myPatterns;
@NotNull private final Map<Match, ExtractedParameter> myParameters;
@NotNull private final ExtractedParameter myParameter;
private final int myFirstOffset;
public ClusterOfUsages(@NotNull ExtractedParameter parameter) {
myPatterns = parameter.myPatternUsages;
myParameters = new THashMap<>();
myParameter = parameter;
myFirstOffset = myPatterns.stream().mapToInt(PsiElement::getTextOffset).min().orElse(0);
}
public void putParameter(Match match, ExtractedParameter parameter) {
myParameters.put(match, parameter);
}
public boolean isEquivalent(ExtractedParameter parameter) {
return myPatterns.equals(parameter.myPatternUsages);
}
public static boolean isPresent(Map<PsiExpression, ClusterOfUsages> usagesMap, @NotNull ExtractedParameter parameter) {
return parameter.myPatternUsages.stream().anyMatch(usagesMap::containsKey);
}
}
}
| |
/*
* Swift Parallel Scripting Language (http://swift-lang.org)
* Code from Java CoG Kit Project (see notice below) with modifications.
*
* Copyright 2005-2014 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//----------------------------------------------------------------------
//This code is developed as part of the Java CoG Kit project
//The terms of the license can be found at http://www.cogkit.org/license
//This message may not be removed or altered.
//----------------------------------------------------------------------
/*
* Created on Nov 20, 2013
*/
package org.griphyn.vdl.mapping.file;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MappingParamFileGenerator {
public static void main(String[] args) {
try {
generate(args[0]);
System.exit(0);
}
catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
private static void generate(String basedir) throws IOException {
File bd = new File(basedir);
if (!bd.isDirectory()) {
throw new IllegalArgumentException("Not a directory: " + basedir);
}
generateRecursive(bd, bd);
}
private static void generateRecursive(File dir, File base) throws IOException {
File[] ps = dir.listFiles();
for (File f : ps) {
if (f.isDirectory()) {
generateRecursive(f, base);
}
else if (f.getName().endsWith(".params")) {
generate(f, base);
}
}
}
private static class Param {
public String type, name, value;
public boolean internal;
public Param(String type, String name, String value, boolean internal) {
this.type = type;
this.name = name;
this.value = value;
this.internal = internal;
}
}
private static void generate(File f, File base) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(f));
List<Param> params = new ArrayList<Param>();
String line = br.readLine();
Map<String, Object> opts = new HashMap<String, Object>();
List<String> imports = new ArrayList<String>();
opts.put("import", imports);
while (line != null) {
if (line.startsWith("#")) {
// comment
}
else if (line.trim().isEmpty()) {
// empty line
}
else if (line.startsWith("@")) {
String[] s = line.substring(1).trim().split("\\s+", 2);
if (s[0].equals("import")) {
imports.add(s[1]);
}
else if (s[0].equals("extends")) {
opts.put("extends", s[1]);
}
else if (s[0].equals("internal")) {
params.add(buildParam(s[1], true));
}
else if (s[0].equals("suppressUnusedWarning")) {
opts.put("suppressUnusedWarning", Boolean.TRUE);
}
else {
throw new IllegalArgumentException("Unknown directive: '" + s[0] + "'");
}
}
else {
params.add(buildParam(line, false));
}
line = br.readLine();
}
br.close();
String pkg = f.getParentFile().getAbsolutePath().substring(
base.getAbsolutePath().length() + 1).replace(File.separatorChar, '.');
File nf = new File(makeFileName(f));
writeFile(nf, pkg, params, opts);
}
private static Param buildParam(String line, boolean internal) {
String value = null;
if (line.contains("=")) {
String[] s = line.trim().split("=", 2);
value = s[1].trim();
line = s[0];
}
String[] s = line.trim().split("\\s+");
return new Param(join(s, " ", 0, s.length - 1), s[s.length - 1], value, internal);
}
private static String join(String[] s, String sep, int start, int end) {
StringBuilder sb = new StringBuilder();
for (int i = start; i < end; i++) {
if (i != start) {
sb.append(sep);
}
sb.append(s[i]);
}
return sb.toString();
}
private static final List<String> IMPORTS = Arrays.asList("java.util.Arrays", "java.util.Collection",
"java.util.List", "org.griphyn.vdl.mapping.nodes.AbstractDataNode", "org.griphyn.vdl.mapping.MappingParamSet");
private static void writeFile(File nf, String pkg, List<Param> params, Map<String, Object> opts) throws IOException {
String name = nf.getName().substring(0, nf.getName().lastIndexOf('.'));
BufferedWriter bw = new BufferedWriter(new FileWriter(nf));
int year = Calendar.getInstance().get(Calendar.YEAR);
bw.write("/*\n");
bw.write(" * Swift Parallel Scripting Language (http://swift-lang.org)\n");
bw.write(" *\n");
bw.write(" * Copyright 2013-" + year + " University of Chicago\n");
bw.write(" *\n");
bw.write(" * Licensed under the Apache License, Version 2.0 (the \"License\");\n");
bw.write(" * you may not use this file except in compliance with the License.\n");
bw.write(" * You may obtain a copy of the License at\n");
bw.write(" *\n");
bw.write(" * http://www.apache.org/licenses/LICENSE-2.0\n");
bw.write(" *\n");
bw.write(" * Unless required by applicable law or agreed to in writing, software\n");
bw.write(" * distributed under the License is distributed on an \"AS IS\" BASIS,\n");
bw.write(" * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n");
bw.write(" * See the License for the specific language governing permissions and\n");
bw.write(" * limitations under the License.\n");
bw.write(" */\n");
bw.write("\n");
bw.write("/*\n");
bw.write(" * This file is automatically generated\n");
bw.write(" */\n");
bw.write("package ");
bw.write(pkg);
bw.write(";\n\n");
for (String imp : IMPORTS) {
bw.write("import ");
bw.write(imp);
bw.write(";\n");
}
@SuppressWarnings("unchecked")
List<String> l = (List<String>) opts.get("import");
for (String imp : l) {
bw.write("import ");
bw.write(imp);
bw.write(";\n");
}
if (!opts.containsKey("extends")) {
opts.put("extends", "MappingParamSet");
}
bw.write("\n\n");
if (opts.containsKey("suppressUnusedWarning")) {
bw.write("@SuppressWarnings(\"unused\")\n");
}
bw.write("public class ");
bw.write(name);
bw.write(" extends ");
bw.write(opts.get("extends").toString());
bw.write(" {\n\n");
bw.write("\tpublic static final List<String> NAMES = Arrays.asList(");
join(bw, params, "\"", "\", \"", "\"", false);
bw.write(");\n\n");
for (Param p : params) {
bw.write("\tprivate ");
if (p.internal) {
bw.write(p.type);
}
else {
bw.write("Object");
}
bw.write(" ");
bw.write(p.name);
if (p.value != null) {
bw.write(" = ");
bw.write(p.value);
}
bw.write(";\n");
}
bw.write("\n");
bw.write("\t@Override\n");
bw.write("\tpublic Collection<String> getNames() {\n");
bw.write("\t\treturn NAMES;\n");
bw.write("\t}\n\n");
generateGettersAndSetters(bw, params);
if (!allInternal(params)) {
generateDynamicSetter(bw, params);
generateGetFirstOpen(bw, params);
generateToString(bw, params);
generateUnwrap(bw, params);
}
bw.write("\n}\n");
bw.close();
}
private static boolean allInternal(List<Param> params) {
int c = 0;
for (Param p : params) {
if (!p.internal) {
c++;
}
}
return c == 0;
}
private static void generateToString(BufferedWriter bw, List<Param> params) throws IOException {
bw.write("\t@Override\n");
bw.write("\tpublic void toString(StringBuilder sb) {\n");
for (Param p : params) {
if (!p.internal) {
bw.write(String.format("\t\taddParam(sb, \"%s\", %s);\n", p.name, p.name));
}
}
bw.write("\t\tsuper.toString(sb);\n");
bw.write("\t}\n\n");
}
private static void generateUnwrap(BufferedWriter bw, List<Param> params) throws IOException {
bw.write("\t@Override\n");
bw.write("\tpublic void unwrapPrimitives() {\n");
for (Param p : params) {
if (!p.internal) {
if (p.value == null) {
// mandatory
bw.write(String.format("\t\tif (%s == null) {\n", p.name));
bw.write(String.format("\t\t\tthrow new IllegalArgumentException(\"Missing required argument '%s'\");\n", p.name));
bw.write("\t\t}\n");
}
if (!p.type.equals("DSHandle")) {
bw.write(String.format("\t\t%s = unwrap(%s, %s.class);\n", p.name, p.name, p.type));
}
}
}
bw.write("\t\tsuper.unwrapPrimitives();\n");
bw.write("\t}\n\n");
}
private static void generateGettersAndSetters(BufferedWriter bw, List<Param> params) throws IOException {
for (Param p : params) {
bw.write(String.format("\tpublic void set%s(%s %s) {\n", capitalize(p.name), p.type, p.name));
bw.write(String.format("\t\tthis.%s = %s;\n", p.name, p.name));
bw.write("\t}\n\n");
bw.write(String.format("\tpublic %s get%s() {\n", p.type, capitalize(p.name)));
if (p.type.equals("Object") || p.internal) {
bw.write(String.format("\t\treturn %s;\n", p.name));
}
else {
bw.write(String.format("\t\treturn (%s) %s;\n", p.type, p.name));
}
bw.write("\t}\n\n");
}
}
private static void generateDynamicSetter(BufferedWriter bw, List<Param> params) throws IOException {
bw.write("\t@Override\n");
bw.write("\tprotected boolean set0(String name, Object value) {\n");
boolean first = true;
for (Param p : params) {
if (p.internal) {
continue;
}
bw.write("\t\t");
if (first) {
first = false;
}
else {
bw.write("else ");
}
bw.write(String.format("if (name.equals(\"%s\")) {\n", p.name));
bw.write(String.format("\t\t\tthis.%s = value;\n", p.name));
bw.write("\t\t}\n");
}
bw.write("\t\telse {\n");
bw.write("\t\t\treturn super.set0(name, value);\n");
bw.write("\t\t}\n");
bw.write("\t\treturn true;\n");
bw.write("\t}\n\n");
}
private static void generateGetFirstOpen(BufferedWriter bw, List<Param> params) throws IOException {
bw.write("\t@Override\n");
bw.write("\tpublic AbstractDataNode getFirstOpen() {\n");
boolean first = true;
for (Param p : params) {
if (p.internal) {
continue;
}
bw.write("\t\t");
if (first) {
first = false;
}
else {
bw.write("else ");
}
bw.write(String.format("if (checkOpen(%s)) {\n", p.name));
bw.write(String.format("\t\t\treturn (AbstractDataNode) %s;\n", p.name));
bw.write("\t\t}\n");
}
bw.write("\t\telse {\n");
bw.write("\t\t\treturn super.getFirstOpen();\n");
bw.write("\t\t}\n");
bw.write("\t}\n\n");
}
private static Object capitalize(String n) {
return Character.toUpperCase(n.charAt(0)) + n.substring(1);
}
private static void join(BufferedWriter bw, List<Param> params, String before, String between,
String after, boolean internals) throws IOException {
List<String> l = new ArrayList<String>();
for (Param p : params) {
if (internals || !p.internal) {
l.add(p.name);
}
}
if (l.isEmpty()) {
return;
}
bw.write(before);
bw.write(l.get(0));
if (l.size() != 1) {
for (int i = 1; i < l.size(); i++) {
bw.write(between);
bw.write(l.get(i));
}
}
bw.write(after);
}
private static String makeFileName(File f) {
String abs = f.getAbsolutePath();
int i = abs.lastIndexOf('.');
return abs.substring(0, i) + "Params.java";
}
}
| |
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeInsight.daemon.DaemonBundle;
import com.intellij.ide.PowerSaveMode;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.ui.HintHint;
import com.intellij.ui.components.panels.NonOpaquePanel;
import com.intellij.ui.components.panels.VerticalBox;
import com.intellij.ui.components.panels.Wrapper;
import com.intellij.util.containers.HashMap;
import com.intellij.util.containers.hash.LinkedHashMap;
import com.intellij.util.ui.AwtVisitor;
import org.intellij.lang.annotations.Language;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.Map;
/**
* User: cdr
*/
public class TrafficProgressPanel extends JPanel {
private static final int MAX = 100;
private static final String MAX_TEXT = "100%";
private static final String MIN_TEXT = "0%";
private final JLabel statistics = new JLabel();
private final Map<ProgressableTextEditorHighlightingPass, Pair<JProgressBar, JLabel>> passes = new LinkedHashMap<ProgressableTextEditorHighlightingPass, Pair<JProgressBar, JLabel>>();
private final Map<JProgressBar, JLabel> myProgressToText = new HashMap<JProgressBar, JLabel>();
private final JLabel statusLabel = new JLabel();
private final TrafficLightRenderer myTrafficLightRenderer;
private final JPanel myPassStatuses = new JPanel();
private final JPanel myEmptyPassStatuses = new NonOpaquePanel();
private final Wrapper myPassStatusesContainer = new Wrapper();
private final HintHint myHintHint;
public TrafficProgressPanel(TrafficLightRenderer trafficLightRenderer, Editor editor, HintHint hintHint) {
myHintHint = hintHint;
myTrafficLightRenderer = trafficLightRenderer;
setLayout(new BorderLayout());
VerticalBox center = new VerticalBox();
add(center, BorderLayout.NORTH);
center.add(statusLabel);
center.add(new Separator());
center.add(Box.createVerticalStrut(6));
TrafficLightRenderer.DaemonCodeAnalyzerStatus fakeStatusLargeEnough = new TrafficLightRenderer.DaemonCodeAnalyzerStatus();
fakeStatusLargeEnough.errorCount = new int[]{1,1,1,1};
Project project = trafficLightRenderer.getProject();
PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
fakeStatusLargeEnough.passStati = new ArrayList<ProgressableTextEditorHighlightingPass>();
for (int i=0; i<3;i++) {
fakeStatusLargeEnough.passStati.add(new ProgressableTextEditorHighlightingPass(project, null, DaemonBundle.message("pass.wolf"), psiFile, false) {
@Override
protected void collectInformationWithProgress(ProgressIndicator progress) {
}
@Override
protected void applyInformationWithProgress() {
}
});
}
rebuildPassesPanel(fakeStatusLargeEnough);
for (Pair<JProgressBar, JLabel> pair : passes.values()) {
JProgressBar bar = pair.first;
bar.setMaximum(MAX);
JLabel label = pair.second;
label.setText(MAX_TEXT);
}
center.add(myPassStatusesContainer);
add(statistics, BorderLayout.SOUTH);
updatePanel(fakeStatusLargeEnough, true);
hintHint.initStyle(this, true);
}
private class Separator extends NonOpaquePanel {
@Override
protected void paintComponent(Graphics g) {
Insets insets = getInsets();
if (insets == null) {
insets = new Insets(0, 0, 0, 0);
}
g.setColor(myHintHint.getTextForeground());
g.drawLine(insets.left, insets.top, getWidth() - insets.left - insets.right, insets.top);
}
@Override
public Dimension getPreferredSize() {
return new Dimension(1, 1);
}
@Override
public Dimension getMinimumSize() {
return new Dimension(1, 1);
}
}
private void rebuildPassesPanel(TrafficLightRenderer.DaemonCodeAnalyzerStatus status) {
myPassStatuses.removeAll();
myPassStatuses.setLayout(new GridBagLayout());
passes.clear();
GridBagConstraints c = new GridBagConstraints();
c.gridy = 0;
c.fill = GridBagConstraints.HORIZONTAL;
for (ProgressableTextEditorHighlightingPass pass : status.passStati) {
JLabel label = new JLabel(pass.getPresentableName() + ": ");
label.setHorizontalTextPosition(SwingConstants.RIGHT);
JProgressBar progressBar = new JProgressBar(0, MAX);
progressBar.putClientProperty("JComponent.sizeVariant", "mini");
JLabel percLabel = new JLabel();
passes.put(pass, Pair.create(progressBar, percLabel));
myProgressToText.put(progressBar, percLabel);
c.gridx = 0;
myPassStatuses.add(label, c);
c.gridx = 1;
myPassStatuses.add(progressBar, c);
c.gridx = 2;
c.weightx = 1;
myPassStatuses.add(percLabel, c);
c.gridy++;
}
myHintHint.initStyle(myPassStatuses, true);
statusLabel.setFont(statusLabel.getFont().deriveFont(Font.BOLD));
}
public void updatePanel(TrafficLightRenderer.DaemonCodeAnalyzerStatus status, boolean isFake) {
if (status == null) return;
try {
if (PowerSaveMode.isEnabled()) {
statusLabel.setText("Code analysis is disabled in power save mode");
myPassStatuses.setVisible(false);
statistics.setText("");
} else if (status.errorAnalyzingFinished) {
statusLabel.setText(DaemonBundle.message("analysis.completed"));
myPassStatuses.setVisible(true);
setPassesEnabled(false, Boolean.TRUE);
} else if (!status.enabled) {
statusLabel.setText("Code analysis has been suspended");
myPassStatuses.setVisible(true);
setPassesEnabled(false, Boolean.FALSE);
statistics.setText("");
} else if (status.noHighlightingRoots != null && status.noHighlightingRoots.length == status.rootsNumber) {
statusLabel.setText(DaemonBundle.message("analysis.hasnot.been.run"));
myPassStatuses.setVisible(true);
setPassesEnabled(false, Boolean.FALSE);
statistics.setText("");
} else {
statusLabel.setText(DaemonBundle.message("performing.code.analysis"));
myPassStatuses.setVisible(true);
setPassesEnabled(true, null);
}
if (!status.passStati.equals(new ArrayList<ProgressableTextEditorHighlightingPass>(passes.keySet()))) {
// passes set has changed
rebuildPassesPanel(status);
}
for (ProgressableTextEditorHighlightingPass pass : status.passStati) {
double progress = pass.getProgress();
Pair<JProgressBar, JLabel> pair = passes.get(pass);
JProgressBar progressBar = pair.first;
int percent = (int)Math.round(progress * MAX);
progressBar.setValue(percent);
JLabel percentage = pair.second;
percentage.setText(percent + "%");
}
int currentSeverityErrors = 0;
@Language("HTML")
String text = "<html><body>";
for (int i = status.errorCount.length - 1; i >= 0; i--) {
if (status.errorCount[i] > 0) {
final HighlightSeverity severity = SeverityRegistrar.getInstance(myTrafficLightRenderer.getProject()).getSeverityByIndex(i);
String name = status.errorCount[i] > 1 ? StringUtil.pluralize(severity.toString().toLowerCase()) : severity.toString().toLowerCase();
text += status.errorAnalyzingFinished
? DaemonBundle.message("errors.found", status.errorCount[i], name)
: DaemonBundle.message("errors.found.so.far", status.errorCount[i], name);
text += "<br>";
currentSeverityErrors += status.errorCount[i];
}
}
if (currentSeverityErrors == 0) {
text += status.errorAnalyzingFinished
? DaemonBundle.message("no.errors.or.warnings.found")
: DaemonBundle.message("no.errors.or.warnings.found.so.far") + "<br>";
}
statistics.setText(text);
}
finally {
if (isFake) {
myEmptyPassStatuses.setPreferredSize(myPassStatuses.getPreferredSize());
myPassStatusesContainer.setContent(myEmptyPassStatuses);
} else {
myPassStatusesContainer.setContent(myPassStatuses);
}
}
}
private void setPassesEnabled(final boolean enabled, final Boolean completed) {
new AwtVisitor(myPassStatuses) {
@Override
public boolean visit(Component component) {
if (component instanceof JProgressBar) {
JProgressBar progress = (JProgressBar)component;
progress.setEnabled(enabled);
if (completed != null) {
if (completed) {
progress.setValue(MAX);
myProgressToText.get(progress).setText(MAX_TEXT);
} else {
progress.setValue(0);
myProgressToText.get(progress).setText(MIN_TEXT);
}
}
}
return false;
}
};
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.consumer.tri;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.ByteConsumer;
import at.gridtec.lambda4j.consumer.bi.BiByteConsumer;
import at.gridtec.lambda4j.function.conversion.BooleanToByteFunction;
import at.gridtec.lambda4j.function.conversion.CharToByteFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToByteFunction;
import at.gridtec.lambda4j.function.conversion.FloatToByteFunction;
import at.gridtec.lambda4j.function.conversion.IntToByteFunction;
import at.gridtec.lambda4j.function.conversion.LongToByteFunction;
import at.gridtec.lambda4j.function.conversion.ShortToByteFunction;
import at.gridtec.lambda4j.function.to.ToByteFunction;
import at.gridtec.lambda4j.operator.unary.ByteUnaryOperator;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Objects;
/**
* Represents an operation that accepts three {@code byte}-valued input arguments and returns no result.
* This is a primitive specialization of {@link TriConsumer}.
* Unlike most other functional interfaces, {@code TriByteConsumer} is expected to operate via side-effects.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #accept(byte, byte, byte)}.
*
* @see TriConsumer
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface TriByteConsumer extends Lambda {
/**
* Constructs a {@link TriByteConsumer} based on a lambda expression or a method reference. Thereby the given lambda
* expression or method reference is returned on an as-is basis to implicitly transform it to the desired type. With
* this method, it is possible to ensure that correct type is used from lambda expression or method reference.
*
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code TriByteConsumer} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static TriByteConsumer of(@Nullable final TriByteConsumer expression) {
return expression;
}
/**
* Calls the given {@link TriByteConsumer} with the given arguments and returns its result.
*
* @param consumer The consumer to be called
* @param value1 The first argument to the consumer
* @param value2 The second argument to the consumer
* @param value3 The third argument to the consumer
* @throws NullPointerException If given argument is {@code null}
*/
static void call(@Nonnull final TriByteConsumer consumer, byte value1, byte value2, byte value3) {
Objects.requireNonNull(consumer);
consumer.accept(value1, value2, value3);
}
/**
* Creates a {@link TriByteConsumer} which uses the {@code first} parameter of this one as argument for the given
* {@link ByteConsumer}.
*
* @param consumer The consumer which accepts the {@code first} parameter of this one
* @return Creates a {@code TriByteConsumer} which uses the {@code first} parameter of this one as argument for the
* given {@code ByteConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static TriByteConsumer onlyFirst(@Nonnull final ByteConsumer consumer) {
Objects.requireNonNull(consumer);
return (value1, value2, value3) -> consumer.accept(value1);
}
/**
* Creates a {@link TriByteConsumer} which uses the {@code second} parameter of this one as argument for the given
* {@link ByteConsumer}.
*
* @param consumer The consumer which accepts the {@code second} parameter of this one
* @return Creates a {@code TriByteConsumer} which uses the {@code second} parameter of this one as argument for the
* given {@code ByteConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static TriByteConsumer onlySecond(@Nonnull final ByteConsumer consumer) {
Objects.requireNonNull(consumer);
return (value1, value2, value3) -> consumer.accept(value2);
}
/**
* Creates a {@link TriByteConsumer} which uses the {@code third} parameter of this one as argument for the given
* {@link ByteConsumer}.
*
* @param consumer The consumer which accepts the {@code third} parameter of this one
* @return Creates a {@code TriByteConsumer} which uses the {@code third} parameter of this one as argument for the
* given {@code ByteConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static TriByteConsumer onlyThird(@Nonnull final ByteConsumer consumer) {
Objects.requireNonNull(consumer);
return (value1, value2, value3) -> consumer.accept(value3);
}
/**
* Applies this consumer to the given arguments.
*
* @param value1 The first argument to the consumer
* @param value2 The second argument to the consumer
* @param value3 The third argument to the consumer
*/
void accept(byte value1, byte value2, byte value3);
/**
* Applies this consumer partially to some arguments of this one, producing a {@link BiByteConsumer} as result.
*
* @param value1 The first argument to this consumer used to partially apply this function
* @return A {@code BiByteConsumer} that represents this consumer partially applied the some arguments.
*/
@Nonnull
default BiByteConsumer paccept(byte value1) {
return (value2, value3) -> this.accept(value1, value2, value3);
}
/**
* Applies this consumer partially to some arguments of this one, producing a {@link ByteConsumer} as result.
*
* @param value1 The first argument to this consumer used to partially apply this function
* @param value2 The second argument to this consumer used to partially apply this function
* @return A {@code ByteConsumer} that represents this consumer partially applied the some arguments.
*/
@Nonnull
default ByteConsumer paccept(byte value1, byte value2) {
return (value3) -> this.accept(value1, value2, value3);
}
/**
* Returns the number of arguments for this consumer.
*
* @return The number of arguments for this consumer.
* @implSpec The default implementation always returns {@code 3}.
*/
@Nonnegative
default int arity() {
return 3;
}
/**
* Returns a composed {@link TriConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the first given function, and of composed consumer
* @param <B> The type of the argument to the second given function, and of composed consumer
* @param <C> The type of the argument to the third given function, and of composed consumer
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriConsumer} that first applies the {@code before} functions to its input, and then
* applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B, C> TriConsumer<A, B, C> compose(@Nonnull final ToByteFunction<? super A> before1,
@Nonnull final ToByteFunction<? super B> before2, @Nonnull final ToByteFunction<? super C> before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (a, b, c) -> accept(before1.applyAsByte(a), before2.applyAsByte(b), before3.applyAsByte(c));
}
/**
* Returns a composed {@link TriBooleanConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to execute an
* operation which accepts {@code boolean} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriBooleanConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default TriBooleanConsumer composeFromBoolean(@Nonnull final BooleanToByteFunction before1,
@Nonnull final BooleanToByteFunction before2, @Nonnull final BooleanToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriByteConsumer} that first applies the {@code before} operators to
* its input, and then applies this consumer to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive consumer is executed.
*
* @param before1 The first operator to apply before this consumer is applied
* @param before2 The second operator to apply before this consumer is applied
* @param before3 The third operator to apply before this consumer is applied
* @return A composed {@code TriByteConsumer} that first applies the {@code before} operators to its input, and then
* applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default TriByteConsumer composeFromByte(@Nonnull final ByteUnaryOperator before1,
@Nonnull final ByteUnaryOperator before2, @Nonnull final ByteUnaryOperator before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriCharConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriCharConsumer} that first applies the {@code before} functions to its input, and then
* applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default TriCharConsumer composeFromChar(@Nonnull final CharToByteFunction before1,
@Nonnull final CharToByteFunction before2, @Nonnull final CharToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriDoubleConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to execute an
* operation which accepts {@code double} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriDoubleConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default TriDoubleConsumer composeFromDouble(@Nonnull final DoubleToByteFunction before1,
@Nonnull final DoubleToByteFunction before2, @Nonnull final DoubleToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriFloatConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to execute an
* operation which accepts {@code float} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriFloatConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default TriFloatConsumer composeFromFloat(@Nonnull final FloatToByteFunction before1,
@Nonnull final FloatToByteFunction before2, @Nonnull final FloatToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriIntConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriIntConsumer} that first applies the {@code before} functions to its input, and then
* applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default TriIntConsumer composeFromInt(@Nonnull final IntToByteFunction before1,
@Nonnull final IntToByteFunction before2, @Nonnull final IntToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriLongConsumer} that first applies the {@code before} functions to
* its input, and then applies this consumer to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriLongConsumer} that first applies the {@code before} functions to its input, and then
* applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default TriLongConsumer composeFromLong(@Nonnull final LongToByteFunction before1,
@Nonnull final LongToByteFunction before2, @Nonnull final LongToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriShortConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to execute an
* operation which accepts {@code short} input, before this primitive consumer is executed.
*
* @param before1 The first function to apply before this consumer is applied
* @param before2 The second function to apply before this consumer is applied
* @param before3 The third function to apply before this consumer is applied
* @return A composed {@code TriShortConsumer} that first applies the {@code before} functions to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default TriShortConsumer composeFromShort(@Nonnull final ShortToByteFunction before1,
@Nonnull final ShortToByteFunction before2, @Nonnull final ShortToByteFunction before3) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
Objects.requireNonNull(before3);
return (value1, value2, value3) -> accept(before1.applyAsByte(value1), before2.applyAsByte(value2),
before3.applyAsByte(value3));
}
/**
* Returns a composed {@link TriByteConsumer} that performs, in sequence, this consumer followed by the {@code
* after} consumer. If evaluation of either operation throws an exception, it is relayed to the caller of the
* composed operation. If performing this consumer throws an exception, the {@code after} consumer will not be
* performed.
*
* @param after The consumer to apply after this consumer is applied
* @return A composed {@link TriByteConsumer} that performs, in sequence, this consumer followed by the {@code
* after} consumer.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default TriByteConsumer andThen(@Nonnull final TriByteConsumer after) {
Objects.requireNonNull(after);
return (value1, value2, value3) -> {
accept(value1, value2, value3);
after.accept(value1, value2, value3);
};
}
/**
* Returns a composed {@link TriConsumer} which represents this {@link TriByteConsumer}. Thereby the primitive
* input argument for this consumer is autoboxed. This method provides the possibility to use this
* {@code TriByteConsumer} with methods provided by the {@code JDK}.
*
* @return A composed {@code TriConsumer} which represents this {@code TriByteConsumer}.
*/
@Nonnull
default TriConsumer<Byte, Byte, Byte> boxed() {
return this::accept;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.console.agent.handlers;
import io.socket.client.Socket;
import java.io.IOException;
import java.net.ConnectException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.ignite.IgniteLogger;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.ignite.console.agent.AgentConfiguration;
import org.apache.ignite.console.agent.rest.RestExecutor;
import org.apache.ignite.console.agent.rest.RestResult;
import org.apache.ignite.internal.processors.rest.client.message.GridClientNodeBean;
import org.apache.ignite.internal.processors.rest.protocols.http.jetty.GridJettyObjectMapper;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.LT;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteClosure;
import org.apache.ignite.lang.IgniteProductVersion;
import org.apache.ignite.logger.slf4j.Slf4jLogger;
import org.slf4j.LoggerFactory;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_CLUSTER_NAME;
import static org.apache.ignite.console.agent.AgentUtils.toJSON;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_BUILD_VER;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_CLIENT_MODE;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_IPS;
import static org.apache.ignite.internal.processors.rest.GridRestResponse.STATUS_SUCCESS;
import static org.apache.ignite.internal.processors.rest.client.message.GridClientResponse.STATUS_FAILED;
import static org.apache.ignite.internal.visor.util.VisorTaskUtils.sortAddresses;
import static org.apache.ignite.internal.visor.util.VisorTaskUtils.splitAddresses;
/**
* API to transfer topology from Ignite cluster available by node-uri.
*/
public class ClusterListener implements AutoCloseable {
/** */
private static final IgniteLogger log = new Slf4jLogger(LoggerFactory.getLogger(ClusterListener.class));
/** */
private static final IgniteProductVersion IGNITE_2_1 = IgniteProductVersion.fromString("2.1.0");
/** */
private static final IgniteProductVersion IGNITE_2_3 = IgniteProductVersion.fromString("2.3.0");
/** Unique Visor key to get events last order. */
private static final String EVT_LAST_ORDER_KEY = "WEB_AGENT_" + UUID.randomUUID().toString();
/** Unique Visor key to get events throttle counter. */
private static final String EVT_THROTTLE_CNTR_KEY = "WEB_AGENT_" + UUID.randomUUID().toString();
/** */
private static final String EVENT_CLUSTER_CONNECTED = "cluster:connected";
/** */
private static final String EVENT_CLUSTER_TOPOLOGY = "cluster:topology";
/** */
private static final String EVENT_CLUSTER_DISCONNECTED = "cluster:disconnected";
/** Default timeout. */
private static final long DFLT_TIMEOUT = 3000L;
/** JSON object mapper. */
private static final ObjectMapper MAPPER = new GridJettyObjectMapper();
/** Latest topology snapshot. */
private TopologySnapshot top;
/** */
private final WatchTask watchTask = new WatchTask();
/** */
private static final IgniteClosure<UUID, String> ID2ID8 = new IgniteClosure<UUID, String>() {
@Override public String apply(UUID nid) {
return U.id8(nid).toUpperCase();
}
@Override public String toString() {
return "Node ID to ID8 transformer closure.";
}
};
/** */
private AgentConfiguration cfg;
/** */
private Socket client;
/** */
private RestExecutor restExecutor;
/** */
private static final ScheduledExecutorService pool = Executors.newScheduledThreadPool(1);
/** */
private ScheduledFuture<?> refreshTask;
/**
* @param client Client.
* @param restExecutor Client.
*/
public ClusterListener(AgentConfiguration cfg, Socket client, RestExecutor restExecutor) {
this.cfg = cfg;
this.client = client;
this.restExecutor = restExecutor;
}
/**
* Callback on cluster connect.
*
* @param nids Cluster nodes IDs.
*/
private void clusterConnect(Collection<UUID> nids) {
log.info("Connection successfully established to cluster with nodes: " + F.viewReadOnly(nids, ID2ID8));
client.emit(EVENT_CLUSTER_CONNECTED, toJSON(nids));
}
/**
* Callback on disconnect from cluster.
*/
private void clusterDisconnect() {
if (top == null)
return;
top = null;
log.info("Connection to cluster was lost");
client.emit(EVENT_CLUSTER_DISCONNECTED);
}
/**
* Stop refresh task.
*/
private void safeStopRefresh() {
if (refreshTask != null)
refreshTask.cancel(true);
}
/**
* Start watch cluster.
*/
public void watch() {
safeStopRefresh();
refreshTask = pool.scheduleWithFixedDelay(watchTask, 0L, DFLT_TIMEOUT, TimeUnit.MILLISECONDS);
}
/** {@inheritDoc} */
@Override public void close() {
refreshTask.cancel(true);
pool.shutdownNow();
}
/** */
private static class TopologySnapshot {
/** */
private String clusterName;
/** */
private Collection<UUID> nids;
/** */
private Map<UUID, String> addrs;
/** */
private Map<UUID, Boolean> clients;
/** */
private String clusterVerStr;
/** */
private IgniteProductVersion clusterVer;
/** */
private boolean active;
/** */
private boolean secured;
/**
* Helper method to get attribute.
*
* @param attrs Map with attributes.
* @param name Attribute name.
* @return Attribute value.
*/
private static <T> T attribute(Map<String, Object> attrs, String name) {
return (T)attrs.get(name);
}
/**
* @param nodes Nodes.
*/
TopologySnapshot(Collection<GridClientNodeBean> nodes) {
int sz = nodes.size();
nids = new ArrayList<>(sz);
addrs = U.newHashMap(sz);
clients = U.newHashMap(sz);
active = false;
secured = false;
for (GridClientNodeBean node : nodes) {
UUID nid = node.getNodeId();
nids.add(nid);
Map<String, Object> attrs = node.getAttributes();
if (F.isEmpty(clusterName))
clusterName = attribute(attrs, IGNITE_CLUSTER_NAME);
Boolean client = attribute(attrs, ATTR_CLIENT_MODE);
clients.put(nid, client);
Collection<String> nodeAddrs = client
? splitAddresses(attribute(attrs, ATTR_IPS))
: node.getTcpAddresses();
String firstIP = F.first(sortAddresses(nodeAddrs));
addrs.put(nid, firstIP);
String nodeVerStr = attribute(attrs, ATTR_BUILD_VER);
IgniteProductVersion nodeVer = IgniteProductVersion.fromString(nodeVerStr);
if (clusterVer == null || clusterVer.compareTo(nodeVer) > 0) {
clusterVer = nodeVer;
clusterVerStr = nodeVerStr;
}
}
}
/**
* @return Cluster name.
*/
public String getClusterName() {
return clusterName;
}
/**
* @return Cluster version.
*/
public String getClusterVersion() {
return clusterVerStr;
}
/**
* @return Cluster active flag.
*/
public boolean isActive() {
return active;
}
/**
* @param active New cluster active state.
*/
public void setActive(boolean active) {
this.active = active;
}
/**
* @return {@code true} If cluster has configured security.
*/
public boolean isSecured() {
return secured;
}
/**
* @param secured Configured security flag.
*/
public void setSecured(boolean secured) {
this.secured = secured;
}
/**
* @return Cluster nodes IDs.
*/
public Collection<UUID> getNids() {
return nids;
}
/**
* @return Cluster nodes with IPs.
*/
public Map<UUID, String> getAddresses() {
return addrs;
}
/**
* @return Cluster nodes with client mode flag.
*/
public Map<UUID, Boolean> getClients() {
return clients;
}
/**
* @return Cluster version.
*/
public IgniteProductVersion clusterVersion() {
return clusterVer;
}
/**
* @return Collection of short UUIDs.
*/
Collection<String> nid8() {
return F.viewReadOnly(nids, ID2ID8);
}
/**
* @param prev Previous topology.
* @return {@code true} in case if current topology is a new cluster.
*/
boolean differentCluster(TopologySnapshot prev) {
return prev == null || F.isEmpty(prev.nids) || Collections.disjoint(nids, prev.nids);
}
}
/** */
private class WatchTask implements Runnable {
/** */
private static final String EXPIRED_SES_ERROR_MSG = "Failed to handle request - unknown session token (maybe expired session)";
/** */
private String sesTok;
/**
* Execute REST command under agent user.
*
* @param params Command params.
* @return Command result.
* @throws IOException If failed to execute.
*/
private RestResult restCommand(Map<String, Object> params) throws IOException {
if (!F.isEmpty(sesTok))
params.put("sessionToken", sesTok);
else if (!F.isEmpty(cfg.nodeLogin()) && !F.isEmpty(cfg.nodePassword())) {
params.put("user", cfg.nodeLogin());
params.put("password", cfg.nodePassword());
}
RestResult res = restExecutor.sendRequest(cfg.nodeURIs(), params, null);
switch (res.getStatus()) {
case STATUS_SUCCESS:
sesTok = res.getSessionToken();
return res;
case STATUS_FAILED:
if (res.getError().startsWith(EXPIRED_SES_ERROR_MSG)) {
sesTok = null;
params.remove("sessionToken");
return restCommand(params);
}
default:
return res;
}
}
/**
* Collect topology.
*
* @param full Full.
*/
private RestResult topology(boolean full) throws IOException {
Map<String, Object> params = U.newHashMap(3);
params.put("cmd", "top");
params.put("attr", true);
params.put("mtr", full);
return restCommand(params);
}
/**
* @param ver Cluster version.
* @param nid Node ID.
* @return Cluster active state.
* @throws IOException If failed to collect cluster active state.
*/
public boolean active(IgniteProductVersion ver, UUID nid) throws IOException {
Map<String, Object> params = U.newHashMap(10);
boolean v23 = ver.compareTo(IGNITE_2_3) >= 0;
if (v23)
params.put("cmd", "currentState");
else {
params.put("cmd", "exe");
params.put("name", "org.apache.ignite.internal.visor.compute.VisorGatewayTask");
params.put("p1", nid);
params.put("p2", "org.apache.ignite.internal.visor.node.VisorNodeDataCollectorTask");
params.put("p3", "org.apache.ignite.internal.visor.node.VisorNodeDataCollectorTaskArg");
params.put("p4", false);
params.put("p5", EVT_LAST_ORDER_KEY);
params.put("p6", EVT_THROTTLE_CNTR_KEY);
if (ver.compareTo(IGNITE_2_1) >= 0)
params.put("p7", false);
else {
params.put("p7", 10);
params.put("p8", false);
}
}
RestResult res = restCommand(params);
switch (res.getStatus()) {
case STATUS_SUCCESS:
if (v23)
return Boolean.valueOf(res.getData());
return res.getData().contains("\"active\":true");
default:
throw new IOException(res.getError());
}
}
/** {@inheritDoc} */
@Override public void run() {
try {
RestResult res = topology(false);
switch (res.getStatus()) {
case STATUS_SUCCESS:
List<GridClientNodeBean> nodes = MAPPER.readValue(res.getData(),
new TypeReference<List<GridClientNodeBean>>() {});
TopologySnapshot newTop = new TopologySnapshot(nodes);
if (newTop.differentCluster(top))
log.info("Connection successfully established to cluster with nodes: " + newTop.nid8());
boolean active = active(newTop.clusterVersion(), F.first(newTop.getNids()));
newTop.setActive(active);
newTop.setSecured(!F.isEmpty(res.getSessionToken()));
top = newTop;
client.emit(EVENT_CLUSTER_TOPOLOGY, toJSON(top));
break;
default:
LT.warn(log, res.getError());
clusterDisconnect();
}
}
catch (ConnectException ignored) {
clusterDisconnect();
}
catch (Exception e) {
log.error("WatchTask failed", e);
clusterDisconnect();
}
}
}
}
| |
/*******************************************************************************
* Copyright 2015 Dorian Perkins, Younghwan Go, Nitin Agrawal, Akshat Aranya
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.necla.simba.client;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.necla.simba.client.ControlListener;
import com.necla.simba.client.SeqNumManager;
import com.necla.simba.protocol.Common.DataRow;
import com.necla.simba.protocol.Common.ObjectFragment;
import com.necla.simba.protocol.Common.SyncHeader;
import com.necla.simba.protocol.Common.SyncResponse;
import com.necla.simba.protocol.Client.ActivePullResponse;
import com.necla.simba.protocol.Client.BitmapNotify;
import com.necla.simba.protocol.Client.ControlResponse;
import com.necla.simba.protocol.Client.ClientMessage;
import com.necla.simba.protocol.Client.ClientMultiMessage;
import com.necla.simba.util.Utils;
public class SimbaMessageHandler implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(SimbaMessageHandler.class);
private List<ClientMessage> queue;
private ControlListener listener;
private LinkedBlockingQueue requestQueue;
private SeqNumManager sequencer;
private String token;
public SimbaMessageHandler(ControlListener listener, SeqNumManager sequencer, LinkedBlockingQueue rq) {
this.listener = listener;
this.sequencer = sequencer;
this.queue = new LinkedList<ClientMessage>();
this.requestQueue = rq;
}
// Process sync response returned by server
public void process(byte[] buf) {
synchronized (queue) {
try {
ClientMultiMessage mm = ClientMultiMessage.parseFrom(buf);
List<ClientMessage> msgs = mm.getMessagesList();
for (ClientMessage msg: msgs) {
LOG.debug("Received: seq=" +msg.getSeq() + " type="+ msg.getType());
queue.add(msg);
}
} catch (IOException e) {
e.printStackTrace();
}
queue.notify();
}
}
private void process_sync_response(String tbl_id, SyncResponse sr) {
LOG.debug("Got sync response: " + Utils.stringify(sr));
List<DataRow> syncedRows = sr.getSyncedRowsList();
for(DataRow row : syncedRows){
System.out.println("Synced row: " + row.getId());
}
List<DataRow> conflictedRows = sr.getConflictedRowsList();
for(DataRow row : conflictedRows){
System.out.println("Conflicted row: " + row.getId());
}
}
@SuppressWarnings("unchecked")
public void run() {
while (true) {
ClientMessage mmsg;
// Wait for data to become available
synchronized (queue) {
while (queue.isEmpty()) {
try {
queue.wait();
} catch (InterruptedException e) {
}
}
mmsg = queue.remove(0);
}
final int type = mmsg.getType().getNumber();
String token;
if (type == ClientMessage.Type.CONTROL_RESPONSE_VALUE) {
ControlResponse crsp = mmsg.getControlResponse();
ClientMessage pendingMsg = sequencer.getPendingMsg(mmsg.getSeq());
int pendingType;
if(pendingMsg != null){
pendingType = pendingMsg.getType().getNumber();
} else {
LOG.debug("pendingMsg == null");
continue;
}
if (pendingType == ClientMessage.Type.REG_DEV_VALUE) {
if (crsp.getStatus()) {
// store token for later use
token = crsp.getMsg();
listener.registrationDone(token);
} else
// TODO: authentication failed, notify user
LOG.debug(crsp.getMsg());
} else if (pendingType == ClientMessage.Type.RECONN_VALUE) {
if (!crsp.getStatus()) {
LOG.debug("Reconnect failed, retrying authentication");
listener.redoRegistration();
}
} else if (pendingType == ClientMessage.Type.CREATE_TABLE_VALUE) {
listener.tableCreated();
} else if (!crsp.getStatus()) {
// Some other failure for some other pending message
// just log it for now, in the future we might need to process this
LOG.debug("Operation "
+ pendingMsg.getType()
+ " failed : " + crsp.getMsg());
}
sequencer.removePendingSeq(mmsg.getSeq());
} else if (type == ClientMessage.Type.PULL_DATA_VALUE) {
SyncHeader s = mmsg.getPullData().getData();
//TODO: THIS IS ONLY FOR THROUGHPUT TESTING. NEED TO DROP
try {
LOG.debug("PUT " + mmsg.getSeq() + " to requestQueue");
if(requestQueue != null){
requestQueue.put(mmsg);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
sequencer.removePendingSeq(mmsg.getSeq());
} else if (type == ClientMessage.Type.SYNC_RESPONSE_VALUE) {
SyncResponse s = mmsg.getSyncResponse();
//TODO: THIS IS ONLY FOR THROUGHPUT TESTING. NEED TO DROP
try {
LOG.debug("PUT " + mmsg.getSeq() + " to requestQueue");
// LOG.error("RECEIVED: " + mmsg.getSyncResponse().getSyncedRows(0).getId());
requestQueue.put(mmsg.getSeq());
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ClientMessage pendingMsg = sequencer.getPendingMsg(mmsg.getSeq());
assert pendingMsg.getType() == ClientMessage.Type.SYNC_REQUEST;
// SyncData d = pendingMsg.getSyncResponse();
sequencer.removePendingSeq(mmsg.getSeq());
process_sync_response(s.getTbl(), s);
} else if (type == ClientMessage.Type.BITMAP_NOTIFY_VALUE) {
BitmapNotify n = mmsg.getBitmapNotify();
// process_sync_notification(n, mmsg.getSeq());
LOG.debug("Received Bitmap Notification:");
LOG.debug(n.getBitmap().toString());
sequencer.removePendingSeq(mmsg.getSeq());
try {
LOG.debug("PUT " + mmsg.getSeq() + " to requestQueue");
if(requestQueue != null){
requestQueue.put(mmsg);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else if (type == ClientMessage.Type.ACTIVE_PULL_RESPONSE_VALUE) {
ActivePullResponse apr = mmsg.getActivePullResponse();
ClientMessage pendingMsg = sequencer.getPendingMsg(mmsg.getSeq());
assert pendingMsg.getType() == ClientMessage.Type.ACTIVE_PULL;
sequencer.removePendingSeq(mmsg.getSeq());
// process_sync_data_single(apr.getData());
} else if (type == ClientMessage.Type.OBJECT_FRAGMENT_VALUE){
try {
LOG.debug("PUT " + mmsg.getObjectFragment().getTransId() + " to requestQueue");
if(requestQueue != null){
requestQueue.put(mmsg);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
handleObjectFragment(mmsg.getObjectFragment());
} else if (type == ClientMessage.Type.SUB_RESPONSE_VALUE){
try {
LOG.debug("PUT " + mmsg.getSeq() + " to requestQueue");
if(requestQueue != null){
requestQueue.put(mmsg);
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
/**
* @param objectFragment
*/
private void handleObjectFragment(ObjectFragment objectFragment) {
LOG.debug("oid=" + objectFragment.getOid() + ", offset= "+ objectFragment.getOffset() + ", eof?=" + objectFragment.getEof());
}
private void check_seq(ClientMessage mmsg) {
// Make sure the response corresponds to some of our sent messages
ClientMessage pending_msg = sequencer.getPendingMsg(mmsg.getSeq());
LOG.debug("Got seq: " + mmsg.getSeq());
assert (pending_msg != null) : "Received message with invalid seq num...";
LOG.debug("pending_msg.seq: " + pending_msg.getSeq());
}
}
| |
/*
* Copyright 2012 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.console.ng.ht.client.editors.taskslist.grid;
import java.util.List;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import com.google.gwt.core.client.GWT;
import com.google.gwt.user.cellview.client.ColumnSortList;
import com.google.gwt.view.client.AsyncDataProvider;
import com.google.gwt.view.client.HasData;
import com.google.gwt.view.client.Range;
import org.jboss.errai.bus.client.api.messaging.Message;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.ErrorCallback;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.jbpm.console.ng.ga.model.PortableQueryFilter;
import org.jbpm.console.ng.gc.client.list.base.AbstractListView.ListView;
import org.jbpm.console.ng.gc.client.list.base.AbstractScreenListPresenter;
import org.jbpm.console.ng.gc.client.util.TaskUtils;
import org.jbpm.console.ng.gc.client.util.TaskUtils.TaskType;
import org.jbpm.console.ng.ht.client.i18n.Constants;
import org.jbpm.console.ng.ht.model.TaskSummary;
import org.jbpm.console.ng.ht.service.TaskLifeCycleService;
import org.jbpm.console.ng.ht.service.TaskQueryService;
import org.uberfire.client.annotations.WorkbenchPartTitle;
import org.uberfire.client.annotations.WorkbenchPartView;
import org.uberfire.client.annotations.WorkbenchScreen;
import org.uberfire.client.mvp.UberView;
import org.uberfire.client.workbench.widgets.common.ErrorPopupPresenter;
import org.uberfire.paging.PageResponse;
import static org.jbpm.console.ng.ht.util.TaskRoleDefinition.*;
@Dependent
@WorkbenchScreen(identifier = "Tasks List")
public class TasksListGridPresenter extends AbstractScreenListPresenter<TaskSummary> {
public static String FILTER_STATUSES_PARAM_NAME = "statuses";
public static String FILTER_CURRENT_ROLE_PARAM_NAME = "filter";
public interface TaskListView extends ListView<TaskSummary, TasksListGridPresenter> {
}
@Inject
private TaskListView view;
private Constants constants = GWT.create(Constants.class);
@Inject
private ErrorPopupPresenter errorPopup;
@Inject
private Caller<TaskQueryService> taskQueryService;
@Inject
private Caller<TaskLifeCycleService> taskOperationsService;
private String currentRole;
private List<String> currentStatuses;
private TaskType currentStatusFilter = TaskUtils.TaskType.ACTIVE;
public TasksListGridPresenter() {
dataProvider = new AsyncDataProvider<TaskSummary>() {
@Override
protected void onRangeChanged(HasData<TaskSummary> display) {
view.showBusyIndicator(constants.Loading());
final Range visibleRange = view.getListGrid().getVisibleRange();
getData(visibleRange);
}
};
}
@Override
protected ListView getListView() {
return view;
}
@Override
public void getData(Range visibleRange) {
ColumnSortList columnSortList = view.getListGrid().getColumnSortList();
if (currentFilter == null) {
currentFilter = new PortableQueryFilter(visibleRange.getStart(),
visibleRange.getLength(),
false, "",
(columnSortList.size() > 0) ? columnSortList.get(0)
.getColumn().getDataStoreName() : "",
(columnSortList.size() > 0) ? columnSortList.get(0)
.isAscending() : true);
}
// If we are refreshing after a search action, we need to go back to offset 0
if (currentFilter.getParams() == null || currentFilter.getParams().isEmpty()
|| currentFilter.getParams().get("textSearch") == null || currentFilter.getParams().get("textSearch").equals("")) {
currentFilter.setOffset(visibleRange.getStart());
currentFilter.setCount(visibleRange.getLength());
currentFilter.setFilterParams("");
} else {
currentFilter.setFilterParams("(LOWER(t.name) like '"+currentFilter.getParams().get("textSearch")
+"' or LOWER(t.description) like '"+currentFilter.getParams().get("textSearch")+"') ");
currentFilter.setOffset(0);
currentFilter.setCount(view.getListGrid().getPageSize());
}
if(currentStatusFilter==null) {
currentFilter.getParams().put( "statuses", TaskUtils.getStatusByType( currentStatusFilter ) );
} else {
currentFilter.getParams().put( "statuses", currentStatuses );
}
currentFilter.getParams().put("filter", currentStatusFilter.toString());
currentFilter.getParams().put("userId", identity.getIdentifier());
currentFilter.getParams().put("taskRole",currentRole);
currentFilter.setOrderBy((columnSortList.size() > 0) ? columnSortList.get(0)
.getColumn().getDataStoreName() : "");
currentFilter.setIsAscending((columnSortList.size() > 0) ? columnSortList.get(0)
.isAscending() : true);
taskQueryService.call(new RemoteCallback<PageResponse<TaskSummary>>() {
@Override
public void callback(PageResponse<TaskSummary> response) {
updateDataOnCallback(response);
}
}, new ErrorCallback<Message>() {
@Override
public boolean error(Message message, Throwable throwable) {
view.hideBusyIndicator();
view.displayNotification("Error: Getting Tasks: " + throwable.toString());
GWT.log(message.toString());
return true;
}
}).getData(currentFilter);
}
public void filterGrid(String currentRole, List<String> currentStatuses) {
this.currentRole = currentRole;
this.currentStatuses= currentStatuses;
refreshGrid();
}
public void refreshActiveTasks() {
currentRole = TASK_ROLE_POTENTIALOWNER;
currentStatusFilter = TaskUtils.TaskType.ACTIVE;
refreshGrid();
}
public void refreshPersonalTasks() {
currentRole = TASK_ROLE_POTENTIALOWNER;
currentStatusFilter = TaskUtils.TaskType.PERSONAL;
refreshGrid();
}
public void refreshGroupTasks() {
currentRole = TASK_ROLE_POTENTIALOWNER;
currentStatusFilter = TaskUtils.TaskType.GROUP;
refreshGrid();
}
public void refreshAllTasks() {
currentRole = TASK_ROLE_POTENTIALOWNER;
currentStatusFilter = TaskUtils.TaskType.ALL;
refreshGrid();
}
public void refreshAdminTasks() {
currentRole = TASK_ROLE_ADMINISTRATOR;
currentStatusFilter = TaskUtils.TaskType.ADMIN;
refreshGrid();
}
@WorkbenchPartTitle
public String getTitle() {
return constants.Tasks_List();
}
@WorkbenchPartView
public UberView<TasksListGridPresenter> getView() {
return view;
}
public void releaseTask(final Long taskId, final String userId) {
taskOperationsService.call(new RemoteCallback<Void>() {
@Override
public void callback(Void nothing) {
view.displayNotification("Task Released");
refreshGrid();
}
}, new ErrorCallback<Message>() {
@Override
public boolean error(Message message, Throwable throwable) {
errorPopup.showMessage("Unexpected error encountered : " + throwable.getMessage());
return true;
}
}).release(taskId, userId);
}
public void claimTask(final Long taskId, final String userId, final String deploymentId) {
taskOperationsService.call(new RemoteCallback<Void>() {
@Override
public void callback(Void nothing) {
view.displayNotification("Task Claimed");
refreshGrid();
}
}, new ErrorCallback<Message>() {
@Override
public boolean error(Message message, Throwable throwable) {
errorPopup.showMessage("Unexpected error encountered : " + throwable.getMessage());
return true;
}
}).claim(taskId, userId, deploymentId);
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.common.trace;
import static com.navercorp.pinpoint.common.trace.AnnotationKeyProperty.*;
import java.util.Collections;
import java.util.List;
import java.util.NoSuchElementException;
import com.navercorp.pinpoint.common.util.StaticFieldLookUp;
import com.navercorp.pinpoint.common.util.apache.IntHashMap;
/**
* @author netspider
* @author emeroad
* @author Jongho Moon
*/
public class AnnotationKey {
public AnnotationKey(int code, String name, AnnotationKeyProperty... properties) {
this.code = code;
this.name = name;
boolean viewInRecordSet = false;
boolean errorApiMetadata = false;
for (AnnotationKeyProperty property : properties) {
switch (property) {
case VIEW_IN_RECORD_SET:
viewInRecordSet = true;
break;
case ERROR_API_METADATA:
errorApiMetadata = true;
break;
}
}
this.viewInRecordSet = viewInRecordSet;
this.errorApiMetadata = errorApiMetadata;
}
public String getName() {
return name;
}
public int getCode() {
return code;
}
public boolean isErrorApiMetadata() {
return errorApiMetadata;
}
public boolean isViewInRecordSet() {
return viewInRecordSet;
}
// because of using variable-length encoding,
// a small number should be used mainly for data contained in network packets and a big number for internal used code.
// 2147483647
// -2147483648
// @Deprecated // moved apiId to spanEvent and span. dump by int
// API_DID(10, "API-DID"),
// @Deprecated // you should remove static API code. Use only API-DID. dump by int
// API_ID(11, "API-ID"),
// used for developing the annotation that dumps api by string. you also consider to remove it later.
public static final AnnotationKey API = new AnnotationKey(12, "API");
public static final AnnotationKey API_METADATA = new AnnotationKey(13, "API-METADATA");
public static final AnnotationKey RETURN_DATA = new AnnotationKey(14, "RETURN_DATA", VIEW_IN_RECORD_SET);
public static final AnnotationKey API_TAG = new AnnotationKey(10015, "API-TAG");
// when you don't know the correct cause of errors.
public static final AnnotationKey ERROR_API_METADATA_ERROR = new AnnotationKey(10000010, "API-METADATA-ERROR", ERROR_API_METADATA);
// when agentInfo not found
public static final AnnotationKey ERROR_API_METADATA_AGENT_INFO_NOT_FOUND = new AnnotationKey(10000011, "API-METADATA-AGENT-INFO-NOT-FOUND", ERROR_API_METADATA);
// when checksum is not correct even if agentInfo exists
public static final AnnotationKey ERROR_API_METADATA_IDENTIFIER_CHECK_ERROR = new AnnotationKey(10000012, "API-METADATA-IDENTIFIER-CHECK_ERROR", ERROR_API_METADATA);
// when meta data itself not found
public static final AnnotationKey ERROR_API_METADATA_NOT_FOUND = new AnnotationKey(10000013, "API-METADATA-NOT-FOUND", ERROR_API_METADATA);
// when the same hashId of meta data exists
public static final AnnotationKey ERROR_API_METADATA_DID_COLLSION = new AnnotationKey(10000014, "API-METADATA-DID-COLLSION", ERROR_API_METADATA);
// it's not clear to handle a error code. so ApiMetaDataError with searching ERROR_API_META_DATA has been used.
// automatically generated id
public static final AnnotationKey SQL_ID = new AnnotationKey(20, "SQL-ID");
public static final AnnotationKey SQL = new AnnotationKey(21, "SQL", VIEW_IN_RECORD_SET);
public static final AnnotationKey SQL_METADATA = new AnnotationKey(22, "SQL-METADATA");
public static final AnnotationKey SQL_PARAM = new AnnotationKey(23, "SQL-PARAM");
public static final AnnotationKey SQL_BINDVALUE = new AnnotationKey(24, "SQL-BindValue", VIEW_IN_RECORD_SET);
public static final AnnotationKey STRING_ID = new AnnotationKey(30, "STRING_ID");
// HTTP_URL is replaced by argument. So viewInRecordSet parameter name is not true.
public static final AnnotationKey HTTP_URL = new AnnotationKey(40, "http.url");
public static final AnnotationKey HTTP_PARAM = new AnnotationKey(41, "http.param", VIEW_IN_RECORD_SET);
public static final AnnotationKey HTTP_PARAM_ENTITY = new AnnotationKey(42, "http.entity", VIEW_IN_RECORD_SET);
public static final AnnotationKey HTTP_COOKIE = new AnnotationKey(45, "http.cookie", VIEW_IN_RECORD_SET);
public static final AnnotationKey HTTP_STATUS_CODE = new AnnotationKey(46, "http.status.code", VIEW_IN_RECORD_SET);
public static final AnnotationKey HTTP_CALL_RETRY_COUNT = new AnnotationKey(48, "retryCount");
// post method parameter of httpclient
// ARCUS_COMMAND(50, "arcus.command");
public static final AnnotationKey NPC_URL = new AnnotationKey(60, "npc.url");
public static final AnnotationKey NPC_PARAM = new AnnotationKey(61, "npc.param");
public static final AnnotationKey NPC_CONNECT_OPTION = new AnnotationKey(62, "npc.connect.options");
public static final AnnotationKey NIMM_OBJECT_NAME = new AnnotationKey(70, "nimm.objectName");
public static final AnnotationKey NIMM_METHOD_NAME = new AnnotationKey(71, "nimm.methodName");
public static final AnnotationKey NIMM_PARAM = new AnnotationKey(72, "nimm.param");
public static final AnnotationKey NIMM_CONNECT_OPTION = new AnnotationKey(73, "nimm.connect.options");
// 9000 gson.json.length
// 9001 jackson.json.length
// 9002 json-lib.json.length
public static final AnnotationKey ARGS0 = new AnnotationKey(-1, "args[0]");
public static final AnnotationKey ARGS1 = new AnnotationKey(-2, "args[1]");
public static final AnnotationKey ARGS2 = new AnnotationKey(-3, "args[2]");
public static final AnnotationKey ARGS3 = new AnnotationKey(-4, "args[3]");
public static final AnnotationKey ARGS4 = new AnnotationKey(-5, "args[4]");
public static final AnnotationKey ARGS5 = new AnnotationKey(-6, "args[5]");
public static final AnnotationKey ARGS6 = new AnnotationKey(-7, "args[6]");
public static final AnnotationKey ARGS7 = new AnnotationKey(-8, "args[7]");
public static final AnnotationKey ARGS8 = new AnnotationKey(-9, "args[8]");
public static final AnnotationKey ARGS9 = new AnnotationKey(-10, "args[9]");
public static final AnnotationKey ARGSN = new AnnotationKey(-11, "args[N]");
public static final AnnotationKey CACHE_ARGS0 = new AnnotationKey(-30, "cached_args[0]");
public static final AnnotationKey CACHE_ARGS1 = new AnnotationKey(-31, "cached_args[1]");
public static final AnnotationKey CACHE_ARGS2 = new AnnotationKey(-32, "cached_args[2]");
public static final AnnotationKey CACHE_ARGS3 = new AnnotationKey(-33, "cached_args[3]");
public static final AnnotationKey CACHE_ARGS4 = new AnnotationKey(-34, "cached_args[4]");
public static final AnnotationKey CACHE_ARGS5 = new AnnotationKey(-35, "cached_args[5]");
public static final AnnotationKey CACHE_ARGS6 = new AnnotationKey(-36, "cached_args[6]");
public static final AnnotationKey CACHE_ARGS7 = new AnnotationKey(-37, "cached_args[7]");
public static final AnnotationKey CACHE_ARGS8 = new AnnotationKey(-38, "cached_args[8]");
public static final AnnotationKey CACHE_ARGS9 = new AnnotationKey(-39, "cached_args[9]");
public static final AnnotationKey CACHE_ARGSN = new AnnotationKey(-40, "cached_args[N]");
@Deprecated
public static final AnnotationKey EXCEPTION = new AnnotationKey(-50, "Exception", VIEW_IN_RECORD_SET);
@Deprecated
public static final AnnotationKey EXCEPTION_CLASS = new AnnotationKey(-51, "ExceptionClass");
public static final AnnotationKey UNKNOWN = new AnnotationKey(-9999, "UNKNOWN");
public static final AnnotationKey ASYNC = new AnnotationKey(-100, "Asynchronous Invocation", VIEW_IN_RECORD_SET);
private final int code;
private final String name;
private final boolean viewInRecordSet;
private final boolean errorApiMetadata;
public final static int MAX_ARGS_SIZE = 10;
////////////////////////////////
// Arguments
////////////////////////////////
public static AnnotationKey getArgs(int index) {
if (index < 0) {
throw new IllegalArgumentException("negative index:" + index);
}
switch (index) {
case 0:
return ARGS0;
case 1:
return ARGS1;
case 2:
return ARGS2;
case 3:
return ARGS3;
case 4:
return ARGS4;
case 5:
return ARGS5;
case 6:
return ARGS6;
case 7:
return ARGS7;
case 8:
return ARGS8;
case 9:
return ARGS9;
default:
return ARGSN;
}
}
public static boolean isArgsKey(int index) {
if (index <= ARGS0.getCode() && index >= ARGSN.getCode()) {
return true;
}
return false;
}
public static AnnotationKey getCachedArgs(int index) {
if (index < 0) {
throw new IllegalArgumentException("negative index:" + index);
}
switch (index) {
case 0:
return CACHE_ARGS0;
case 1:
return CACHE_ARGS1;
case 2:
return CACHE_ARGS2;
case 3:
return CACHE_ARGS3;
case 4:
return CACHE_ARGS4;
case 5:
return CACHE_ARGS5;
case 6:
return CACHE_ARGS6;
case 7:
return CACHE_ARGS7;
case 8:
return CACHE_ARGS8;
case 9:
return CACHE_ARGS9;
default:
return CACHE_ARGSN;
}
}
public static boolean isCachedArgsKey(int index) {
if (index <= CACHE_ARGS0.getCode() && index >= CACHE_ARGSN.getCode()) {
return true;
}
return false;
}
public static int cachedArgsToArgs(int index) {
if (!isCachedArgsKey(index)) {
throw new IllegalArgumentException("non CACHED_ARGS:" + index);
}
final int cachedIndex = CACHE_ARGS0.getCode() - ARGS0.getCode();
// you have to - (minus) operation because of negative name
return index - cachedIndex;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("AnnotationKey{");
sb.append("code=").append(code);
sb.append(", name='").append(name);
sb.append('}');
return sb.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.10.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.accumulo.proxy.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.10.0)")
public class AccumuloException extends org.apache.thrift.TException implements org.apache.thrift.TBase<AccumuloException, AccumuloException._Fields>, java.io.Serializable, Cloneable, Comparable<AccumuloException> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AccumuloException");
private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new AccumuloExceptionStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new AccumuloExceptionTupleSchemeFactory();
public java.lang.String msg; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
MSG((short)1, "msg");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // MSG
return MSG;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AccumuloException.class, metaDataMap);
}
public AccumuloException() {
}
public AccumuloException(
java.lang.String msg)
{
this();
this.msg = msg;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public AccumuloException(AccumuloException other) {
if (other.isSetMsg()) {
this.msg = other.msg;
}
}
public AccumuloException deepCopy() {
return new AccumuloException(this);
}
@Override
public void clear() {
this.msg = null;
}
public java.lang.String getMsg() {
return this.msg;
}
public AccumuloException setMsg(java.lang.String msg) {
this.msg = msg;
return this;
}
public void unsetMsg() {
this.msg = null;
}
/** Returns true if field msg is set (has been assigned a value) and false otherwise */
public boolean isSetMsg() {
return this.msg != null;
}
public void setMsgIsSet(boolean value) {
if (!value) {
this.msg = null;
}
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case MSG:
if (value == null) {
unsetMsg();
} else {
setMsg((java.lang.String)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case MSG:
return getMsg();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case MSG:
return isSetMsg();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof AccumuloException)
return this.equals((AccumuloException)that);
return false;
}
public boolean equals(AccumuloException that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_msg = true && this.isSetMsg();
boolean that_present_msg = true && that.isSetMsg();
if (this_present_msg || that_present_msg) {
if (!(this_present_msg && that_present_msg))
return false;
if (!this.msg.equals(that.msg))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetMsg()) ? 131071 : 524287);
if (isSetMsg())
hashCode = hashCode * 8191 + msg.hashCode();
return hashCode;
}
@Override
public int compareTo(AccumuloException other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetMsg()).compareTo(other.isSetMsg());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMsg()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("AccumuloException(");
boolean first = true;
sb.append("msg:");
if (this.msg == null) {
sb.append("null");
} else {
sb.append(this.msg);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class AccumuloExceptionStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public AccumuloExceptionStandardScheme getScheme() {
return new AccumuloExceptionStandardScheme();
}
}
private static class AccumuloExceptionStandardScheme extends org.apache.thrift.scheme.StandardScheme<AccumuloException> {
public void read(org.apache.thrift.protocol.TProtocol iprot, AccumuloException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // MSG
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.msg = iprot.readString();
struct.setMsgIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, AccumuloException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.msg != null) {
oprot.writeFieldBegin(MSG_FIELD_DESC);
oprot.writeString(struct.msg);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class AccumuloExceptionTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public AccumuloExceptionTupleScheme getScheme() {
return new AccumuloExceptionTupleScheme();
}
}
private static class AccumuloExceptionTupleScheme extends org.apache.thrift.scheme.TupleScheme<AccumuloException> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, AccumuloException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetMsg()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetMsg()) {
oprot.writeString(struct.msg);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, AccumuloException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.msg = iprot.readString();
struct.setMsgIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
private static void unusedMethod() {}
}
| |
package org.basex.query.util.fingertree;
import org.basex.query.*;
import org.basex.util.*;
/**
* A <i>deep</i> node containing elements in the left and right digit and a sub-tree in
* the middle.
*
* @author BaseX Team 2005-22, BSD License
* @author Leo Woerteler
*
* @param <N> node type
* @param <E> element type
*/
final class DeepTree<N, E> extends FingerTree<N, E> {
/** Preferred size of an inner node. */
private static final int NODE_SIZE = MAX_ARITY;
/** Left digit. */
final Node<N, E>[] left;
/** Size of the left digit, cached for speeding up indexing. */
final long leftSize;
/** Middle tree. */
final FingerTree<Node<N, E>, E> middle;
/** Right digit. */
final Node<N, E>[] right;
/** Size of this tree. */
private final long size;
/**
* Constructor.
* @param left left digit
* @param leftSize size of the left digit
* @param middle middle tree
* @param right right digit
* @param size size of this tree
*/
DeepTree(final Node<N, E>[] left, final long leftSize, final FingerTree<Node<N, E>, E> middle,
final Node<N, E>[] right, final long size) {
this.left = left;
this.leftSize = leftSize;
this.middle = middle;
this.right = right;
this.size = size;
assert left.length > 0 && right.length > 0
&& size == leftSize + middle.size() + size(right);
}
/**
* Factory method calculating the size of the left digit.
* @param <N> node type
* @param <E> element type
* @param left left digit
* @param middle middle tree
* @param right right digit
* @param size size of this tree
* @return the deep node
*/
static <N, E> DeepTree<N, E> get(final Node<N, E>[] left, final FingerTree<Node<N, E>, E> middle,
final Node<N, E>[] right, final long size) {
return new DeepTree<>(left, size(left), middle, right, size);
}
/**
* Factory method for deep nodes with an empty middle tree.
* @param <N> node type
* @param <E> element type
* @param left left digit
* @param leftSize size of the left sub-tree
* @param right right digit
* @param size size of this tree
* @return the deep node
*/
static <N, E> DeepTree<N, E> get(final Node<N, E>[] left, final long leftSize,
final Node<N, E>[] right, final long size) {
return new DeepTree<>(left, leftSize, EmptyTree.getInstance(), right, size);
}
/**
* Factory method for deep nodes with an empty middle tree calculating the size of the left digit.
* @param <N> node type
* @param <E> element type
* @param left left digit
* @param right right digit
* @param size size of this tree
* @return the deep node
*/
static <N, E> DeepTree<N, E> get(final Node<N, E>[] left, final Node<N, E>[] right,
final long size) {
return new DeepTree<>(left, size(left), EmptyTree.getInstance(), right, size);
}
/**
* Factory method for deep nodes calculating all cached sizes.
* @param <N> node type
* @param <E> element type
* @param left left digit
* @param middle middle sub-tree
* @param right right digit
* @return the deep node
*/
static <N, E> DeepTree<N, E> get(final Node<N, E>[] left,
final FingerTree<Node<N, E>, E> middle, final Node<N, E>[] right) {
final long l = size(left), m = middle.size(), r = size(right);
return new DeepTree<>(left, l, middle, right, l + m + r);
}
/**
* Factory method for deep nodes with empty middle tree calculating all cached sizes.
* @param <N> node type
* @param <E> element type
* @param left left digit
* @param right right digit
* @return the deep node
*/
static <N, E> DeepTree<N, E> get(final Node<N, E>[] left, final Node<N, E>[] right) {
final long l = size(left), r = size(right);
return new DeepTree<>(left, l, EmptyTree.getInstance(), right, l + r);
}
@Override
public DeepTree<N, E> cons(final Node<N, E> fst) {
final long sz = fst.size();
if(left.length < MAX_DIGIT) {
final Node<N, E>[] newLeft = slice(left, -1, left.length);
newLeft[0] = fst;
return new DeepTree<>(newLeft, leftSize + sz, middle, right, size + sz);
}
final int ll = left.length, m = ll - NODE_SIZE;
final Node<N, E>[] newLeft = slice(left, -1, m), sub = slice(left, m, ll);
newLeft[0] = fst;
final FingerTree<Node<N, E>, E> mid = middle.cons(new InnerNode<>(sub));
return get(newLeft, mid, right, size + sz);
}
@Override
public DeepTree<N, E> snoc(final Node<N, E> lst) {
if(right.length < MAX_DIGIT) {
final Node<N, E>[] newRight = slice(right, 0, right.length + 1);
newRight[right.length] = lst;
return new DeepTree<>(left, leftSize, middle, newRight, size + lst.size());
}
final int rl = right.length, m = NODE_SIZE;
final Node<N, E>[] sub = slice(right, 0, m), newRight = slice(right, m, rl + 1);
newRight[rl - m] = lst;
final FingerTree<Node<N, E>, E> mid = middle.snoc(new InnerNode<>(sub));
return new DeepTree<>(left, leftSize, mid, newRight, size + lst.size());
}
@Override
public Node<N, E> head() {
return left[0];
}
@Override
public Node<N, E> last() {
return right[right.length - 1];
}
@Override
public FingerTree<N, E> init() {
final long newSize = size - right[right.length - 1].size();
if(right.length > 1) {
// right digit is safe, just shrink it
return new DeepTree<>(left, leftSize, middle, slice(right, 0, right.length - 1), newSize);
}
if(middle.isEmpty()) {
// middle tree empty, make a tree from the left list
if(left.length == 1) return new SingletonTree<>(left[0]);
final int mid = left.length / 2;
return get(slice(left, 0, mid), slice(left, mid, left.length), newSize);
}
// extract values for the right digit from the middle
final InnerNode<N, E> last = (InnerNode<N, E>) middle.last();
return new DeepTree<>(left, leftSize, middle.init(), last.children, newSize);
}
@Override
public FingerTree<N, E> tail() {
final long fstSize = left[0].size(), newSize = size - fstSize;
if(left.length > 1) {
// left digit is safe, just shrink it
final Node<N, E>[] newLeft = slice(left, 1, left.length);
return new DeepTree<>(newLeft, leftSize - fstSize, middle, right, newSize);
}
if(middle.isEmpty()) {
// middle tree empty, make a tree from the right list
if(right.length == 1) return new SingletonTree<>(right[0]);
final int mid = right.length / 2;
return get(slice(right, 0, mid), slice(right, mid, right.length), newSize);
}
// extract values for the left digit from the middle
final InnerNode<N, E> head = (InnerNode<N, E>) middle.head();
return new DeepTree<>(head.children, head.size(), middle.tail(), right, newSize);
}
@Override
public long size() {
return size;
}
@Override
public DeepTree<N, E> concat(final Node<N, E>[] nodes, final long sz,
final FingerTree<N, E> other) {
final DeepTree<N, E> lft = (DeepTree<N, E>) addAll(nodes, sz, false);
if(!(other instanceof DeepTree)) return other.isEmpty() ? lft : lft.snoc(other.head());
final DeepTree<N, E> rght = (DeepTree<N, E>) other;
final Node<N, E>[] as = lft.right, bs = rght.left;
final int l = as.length, n = l + bs.length, k = (n + MAX_ARITY - 1) / MAX_ARITY;
@SuppressWarnings("unchecked")
final Node<Node<N, E>, E>[] out = new Node[k];
for(int i = 0, p = 0; i < k; i++) {
final int rem = k - i, curr = (n - p + rem - 1) / rem;
@SuppressWarnings("unchecked")
final Node<N, E>[] ch = new Node[curr];
final int inL = l - p;
if(curr <= inL) {
Array.copyToStart(as, p, curr, ch);
} else if(inL > 0) {
Array.copyToStart(as, p, inL, ch);
Array.copyFromStart(bs, curr - inL, ch, inL);
} else {
Array.copyToStart(bs, -inL, curr, ch);
}
out[i] = new InnerNode<>(ch);
p += curr;
}
final long inMid = lft.rightSize() + rght.leftSize;
final FingerTree<Node<N, E>, E> newMid = lft.middle.concat(out, inMid, rght.middle);
final long newSize = lft.leftSize + newMid.size() + rght.rightSize();
return new DeepTree<>(lft.left, lft.leftSize, newMid, rght.right, newSize);
}
@Override
public FingerTree<N, E> reverse(final QueryContext qc) {
qc.checkStop();
final int l = left.length, r = right.length;
@SuppressWarnings("unchecked")
final Node<N, E>[] newLeft = new Node[r], newRight = new Node[l];
for(int i = 0; i < r; i++) newLeft[i] = right[r - 1 - i].reverse();
for(int i = 0; i < l; i++) newRight[i] = left[l - 1 - i].reverse();
return new DeepTree<>(newLeft, rightSize(), middle.reverse(qc), newRight, size);
}
@Override
public FingerTree<N, E> set(final long pos, final E val) {
long off = pos;
if(off < leftSize) {
final Node<N, E>[] newLeft = left.clone();
int i = 0;
for(;; i++) {
final long sub = newLeft[i].size();
if(off < sub) break;
off -= sub;
}
newLeft[i] = newLeft[i].set(off, val);
return new DeepTree<>(newLeft, leftSize, middle, right, size);
}
off -= leftSize;
final long mid = middle.size();
if(off < mid) {
return new DeepTree<>(left, leftSize, middle.set(off, val), right, size);
}
off -= mid;
final Node<N, E>[] newRight = right.clone();
int i = 0;
for(;; i++) {
final long sub = newRight[i].size();
if(off < sub) break;
off -= sub;
}
newRight[i] = newRight[i].set(off, val);
return new DeepTree<>(left, leftSize, middle, newRight, size);
}
@Override
public FingerTree<N, E> insert(final long pos, final E val, final QueryContext qc) {
qc.checkStop();
if(pos <= leftSize) {
// insert into left digit
int i = 0;
long p = pos;
for(;; i++) {
final long sub = left[i].size();
if(p <= sub) break;
p -= sub;
}
final int ll = left.length;
final Node<N, E> l = i > 0 ? left[i - 1] : null, r = i + 1 < ll ? left[i + 1] : null;
@SuppressWarnings("unchecked")
final Node<N, E>[] siblings = new Node[] { l, null, r, null };
if(!left[i].insert(siblings, p, val)) {
// no split
final Node<N, E>[] newLeft = left.clone();
if(i > 0) newLeft[i - 1] = siblings[0];
newLeft[i] = siblings[1];
if(i + 1 < ll) newLeft[i + 1] = siblings[2];
return new DeepTree<>(newLeft, leftSize + 1, middle, right, size + 1);
}
// node was split
@SuppressWarnings("unchecked")
final Node<N, E>[] temp = new Node[ll + 1];
if(i > 0) {
Array.copy(left, i - 1, temp);
temp[i - 1] = siblings[0];
}
temp[i] = siblings[1];
temp[i + 1] = siblings[2];
if(i + 1 < ll) {
temp[i + 2] = siblings[3];
Array.copy(left, i + 2, ll - i - 2, temp, i + 3);
}
if(ll < MAX_DIGIT) return new DeepTree<>(temp, leftSize + 1, middle, right, size + 1);
// digit has to be split
final int m = temp.length - NODE_SIZE;
final Node<N, E>[] newLeft = slice(temp, 0, m), ch = slice(temp, m, temp.length);
return get(newLeft, middle.cons(new InnerNode<>(ch)), right, size + 1);
}
long p = pos - leftSize;
final long midSize = middle.size();
if(p < midSize)
return new DeepTree<>(left, leftSize, middle.insert(p, val, qc), right, size + 1);
// insert into right digit
p -= midSize;
int i = 0;
for(;; i++) {
final long sub = right[i].size();
if(p <= sub) break;
p -= sub;
}
final int rl = right.length;
final Node<N, E> l = i > 0 ? right[i - 1] : null, r = i + 1 < rl ? right[i + 1] : null;
@SuppressWarnings("unchecked")
final Node<N, E>[] siblings = new Node[] { l, null, r, null };
if(!right[i].insert(siblings, p, val)) {
// no split
final Node<N, E>[] newRight = right.clone();
if(i > 0) newRight[i - 1] = siblings[0];
newRight[i] = siblings[1];
if(i + 1 < rl) newRight[i + 1] = siblings[2];
return new DeepTree<>(left, leftSize, middle, newRight, size + 1);
}
// node was split
@SuppressWarnings("unchecked")
final Node<N, E>[] temp = new Node[rl + 1];
if(i > 0) {
Array.copy(right, i - 1, temp);
temp[i - 1] = siblings[0];
}
temp[i] = siblings[1];
temp[i + 1] = siblings[2];
if(i + 1 < rl) {
temp[i + 2] = siblings[3];
Array.copy(right, i + 2, rl - i - 2, temp, i + 3);
}
if(right.length < MAX_DIGIT) return new DeepTree<>(left, leftSize, middle, temp, size + 1);
// digit has to be split
final int m = NODE_SIZE;
final Node<N, E>[] ch = slice(temp, 0, m), newRight = slice(temp, m, temp.length);
return new DeepTree<>(left, leftSize, middle.snoc(new InnerNode<>(ch)), newRight, size + 1);
}
@Override
public TreeSlice<N, E> remove(final long pos, final QueryContext qc) {
qc.checkStop();
if(pos < leftSize) return new TreeSlice<>(removeLeft(pos));
final long rightStart = leftSize + middle.size();
if(pos >= rightStart) return new TreeSlice<>(removeRight(pos - rightStart));
final TreeSlice<Node<N, E>, E> slice = middle.remove(pos - leftSize, qc);
if(slice.isTree()) {
// no underflow
final FingerTree<Node<N, E>, E> newMiddle = slice.getTree();
return slice.setTree(new DeepTree<>(left, leftSize, newMiddle, right, size - 1));
}
// middle tree had an underflow, one sub-node left
final Node<N, E> node = (Node<N, E>) ((PartialInnerNode<N, E>) slice.getPartial()).sub;
// try to extend the smaller digit
if(left.length < right.length) {
// merge into left digit
final Node<N, E>[] newLeft = slice(left, 0, left.length + 1);
newLeft[left.length] = node;
return slice.setTree(get(newLeft, leftSize + node.size(), right, size - 1));
}
if(right.length < MAX_DIGIT) {
// merge into right digit
final Node<N, E>[] newRight = slice(right, -1, right.length);
newRight[0] = node;
return slice.setTree(get(left, leftSize, newRight, size - 1));
}
// redistribute the nodes
final int n = 2 * MAX_DIGIT + 1, ll = (n - NODE_SIZE) / 2;
@SuppressWarnings("unchecked")
final Node<N, E>[] newLeft = slice(left, 0, ll), ch = new Node[NODE_SIZE];
final int inL = left.length - ll, inR = NODE_SIZE - inL - 1;
Array.copyToStart(left, ll, inL, ch);
ch[inL] = node;
Array.copyFromStart(right, inR, ch, inL + 1);
final Node<N, E>[] newRight = slice(right, inR, MAX_DIGIT);
final Node<Node<N, E>, E> newMid = new InnerNode<>(ch);
return slice.setTree(get(newLeft, new SingletonTree<>(newMid), newRight, size - 1));
}
/**
* Remove an element from the left digit.
* @param pos position inside the left digit
* @return resulting tree
*/
private FingerTree<N, E> removeLeft(final long pos) {
if(left.length > 1) {
// left digit cannot underflow, just delete the element
return new DeepTree<>(remove(left, pos), leftSize - 1, middle, right, size - 1);
}
// singleton digit might underflow
final Node<N, E> node = left[0];
if(!middle.isEmpty()) {
// next node for balancing is in middle tree
final InnerNode<N, E> head = (InnerNode<N, E>) middle.head();
final Node<N, E> first = head.getSub(0);
final NodeLike<N, E>[] rem = node.remove(null, first, pos);
final Node<N, E> newNode = (Node<N, E>) rem[1], newFirst = (Node<N, E>) rem[2];
if(newNode == null) {
// nodes were merged
final Node<N, E>[] newLeft = head.children.clone();
newLeft[0] = newFirst;
return get(newLeft, middle.tail(), right, size - 1);
}
@SuppressWarnings("unchecked")
final Node<N, E>[] newLeft = new Node[] { newNode };
if(newFirst != first) {
// nodes were balanced
final FingerTree<Node<N, E>, E> newMid = middle.replaceHead(head.replaceFirst(newFirst));
return new DeepTree<>(newLeft, newNode.size(), newMid, right, size - 1);
}
// no changes to this tree's structure
return new DeepTree<>(newLeft, newNode.size(), middle, right, size - 1);
}
// potentially balance with right digit
final NodeLike<N, E>[] rem = node.remove(null, right[0], pos);
final Node<N, E> newNode = (Node<N, E>) rem[1], newFirstRight = (Node<N, E>) rem[2];
if(newNode == null) {
// nodes were merged
if(right.length == 1) return new SingletonTree<>(newFirstRight);
final int mid = right.length / 2;
final Node<N, E>[] newLeft = slice(right, 0, mid);
newLeft[0] = newFirstRight;
return get(newLeft, middle, slice(right, mid, right.length), size - 1);
}
// structure does not change
@SuppressWarnings("unchecked")
final Node<N, E>[] newLeft = new Node[] { newNode };
if(newFirstRight == right[0]) {
// right digit stays the same
return new DeepTree<>(newLeft, newLeft[0].size(), middle, right, size - 1);
}
// adapt the right digit
final Node<N, E>[] newRight = right.clone();
newRight[0] = newFirstRight;
return new DeepTree<>(newLeft, newNode.size(), middle, newRight, size - 1);
}
/**
* Remove an element from the right digit.
* @param pos position inside the right digit
* @return resulting tree
*/
private FingerTree<N, E> removeRight(final long pos) {
if(right.length > 1) {
// right digit cannot underflow, just delete the element
return new DeepTree<>(left, leftSize, middle, remove(right, pos), size - 1);
}
// singleton digit might underflow
final Node<N, E> node = right[0];
if(!middle.isEmpty()) {
// potentially balance with middle tree
final InnerNode<N, E> last = (InnerNode<N, E>) middle.last();
final Node<N, E> lastSub = last.getSub(last.arity() - 1);
final NodeLike<N, E>[] rem = node.remove(lastSub, null, pos);
final Node<N, E> newLastSub = (Node<N, E>) rem[0], newNode = (Node<N, E>) rem[1];
if(newNode == null) {
// nodes were merged
final Node<N, E>[] newRight = last.children.clone();
newRight[newRight.length - 1] = newLastSub;
return new DeepTree<>(left, leftSize, middle.init(), newRight, size - 1);
}
@SuppressWarnings("unchecked")
final Node<N, E>[] newRight = new Node[] { newNode };
// replace last node in middle tree
final Node<Node<N, E>, E> newLast = last.replaceLast(newLastSub);
return new DeepTree<>(left, leftSize, middle.replaceLast(newLast), newRight, size - 1);
}
// balance with left digit
final Node<N, E> lastLeft = left[left.length - 1];
final NodeLike<N, E>[] rem = node.remove(lastLeft, null, pos);
final Node<N, E> newLastLeft = (Node<N, E>) rem[0], newNode = (Node<N, E>) rem[1];
if(newNode == null) {
// nodes were merged
if(left.length == 1) {
// only one node left
return new SingletonTree<>(newLastLeft);
}
@SuppressWarnings("unchecked")
final Node<N, E>[] newRight = new Node[] { newLastLeft };
return get(slice(left, 0, left.length - 1), newRight, size - 1);
}
@SuppressWarnings("unchecked")
final Node<N, E>[] newRight = new Node[] { newNode };
if(newLastLeft == lastLeft) {
// deletion could be absorbed
return get(left, leftSize, newRight, size - 1);
}
// adapt the left digit
final Node<N, E>[] newLeft = left.clone();
newLeft[newLeft.length - 1] = newLastLeft;
return get(newLeft, newRight, size - 1);
}
/**
* Deletes an element from the given digit containing at least two nodes.
* @param <N> node type
* @param <E> element type
* @param arr array of nodes
* @param pos deletion position
* @return new digit
*/
private static <N, E> Node<N, E>[] remove(final Node<N, E>[] arr, final long pos) {
int i = 0;
long off = pos;
Node<N, E> node;
while(true) {
node = arr[i];
final long nodeSize = node.size();
if(off < nodeSize) break;
off -= nodeSize;
i++;
}
final int n = arr.length;
final NodeLike<N, E>[] res = arr[i].remove(
i == 0 ? null : arr[i - 1], i == n - 1 ? null : arr[i + 1], off);
final Node<N, E> l = (Node<N, E>) res[0], m = (Node<N, E>) res[1], r = (Node<N, E>) res[2];
if(m != null) {
// same number of nodes
final Node<N, E>[] out = arr.clone();
if(i > 0) out[i - 1] = l;
out[i] = m;
if(i < n - 1) out[i + 1] = r;
return out;
}
// the node was merged
@SuppressWarnings("unchecked")
final Node<N, E>[] out = new Node[n - 1];
if(i > 0) {
// nodes to the left
Array.copy(arr, i - 1, out);
out[i - 1] = l;
}
if(i < n - 1) {
// nodes to the right
out[i] = r;
Array.copy(arr, i + 2, n - i - 2, out, i + 1);
}
return out;
}
@Override
public TreeSlice<N, E> slice(final long from, final long len) {
if(from == 0 && len == size) return new TreeSlice<>(this);
final long midSize = middle.size(), rightOff = leftSize + midSize;
final long inLeft = from + len <= leftSize ? len : from < leftSize ? leftSize - from : 0;
final long inRight = from >= rightOff ? len : from + len > rightOff ? from + len - rightOff : 0;
@SuppressWarnings("unchecked")
final NodeLike<N, E>[] buffer = new NodeLike[2 * MAX_DIGIT + 1];
int inBuffer = splitDigit(left, from, inLeft, buffer, 0);
if(inLeft == len) {
if(inBuffer == 1) return new TreeSlice<>(buffer[0]);
final int mid1 = inBuffer / 2;
final Node<N, E>[] ls = slice(buffer, 0, mid1), rs = slice(buffer, mid1, inBuffer);
return new TreeSlice<>(get(ls, rs, len));
}
final long inMiddle = len - inLeft - inRight;
final FingerTree<Node<N, E>, E> mid;
final TreeSlice<Node<N, E>, E> slice;
if(inMiddle == 0) {
mid = EmptyTree.getInstance();
slice = new TreeSlice<>(mid);
} else {
final long midOff = from <= leftSize ? 0 : from - leftSize;
slice = middle.slice(midOff, inMiddle);
if(slice.isTree()) {
mid = slice.getTree();
} else {
final NodeLike<N, E> sub = ((PartialInnerNode<N, E>) slice.getPartial()).sub;
inBuffer = sub.append(buffer, inBuffer);
mid = EmptyTree.getInstance();
}
}
final long rightFrom = from < rightOff ? 0 : from - rightOff;
if(mid.isEmpty()) {
inBuffer = splitDigit(right, rightFrom, inRight, buffer, inBuffer);
return slice.setNodes(buffer, inBuffer, len);
}
final FingerTree<Node<N, E>, E> mid2;
if(inBuffer > 1 || buffer[0] instanceof Node) {
mid2 = mid;
} else {
final InnerNode<N, E> head = (InnerNode<N, E>) mid.head();
final int k = head.arity();
inBuffer = head.getSub(0).append(buffer, inBuffer);
for(int i = 1; i < k; i++) buffer[inBuffer++] = head.getSub(i);
mid2 = mid.tail();
}
if(mid2.isEmpty()) {
inBuffer = splitDigit(right, rightFrom, inRight, buffer, inBuffer);
return slice.setNodes(buffer, inBuffer, len);
}
final Node<N, E>[] newLeft = slice(buffer, 0, inBuffer);
inBuffer = splitDigit(right, rightFrom, inRight, buffer, 0);
final FingerTree<Node<N, E>, E> mid3;
final Node<N, E>[] newRight;
if(inBuffer == 0) {
mid3 = mid2.init();
newRight = ((InnerNode<N, E>) mid2.last()).children;
} else {
if(inBuffer > 1 || buffer[0] instanceof Node) {
mid3 = mid2;
} else {
final NodeLike<N, E> partial = buffer[0];
final InnerNode<N, E> last = (InnerNode<N, E>) mid2.last();
final int k = last.arity();
for(int i = 0; i < k; i++) buffer[i] = last.getSub(i);
inBuffer = partial.append(buffer, k);
mid3 = mid2.init();
}
newRight = slice(buffer, 0, inBuffer);
}
return slice.setTree(get(newLeft, mid3, newRight, len));
}
/**
* Creates a tree slice from a digit.
* @param <N> node type
* @param <E> element type
* @param nodes the digit
* @param from element offset
* @param len number of elements
* @param buffer buffer to insert the node slice into
* @param inBuffer initial number of nodes in the buffer
* @return the slice
*/
private static <N, E> int splitDigit(final Node<N, E>[] nodes, final long from,
final long len, final NodeLike<N, E>[] buffer, final int inBuffer) {
if(len <= 0) return inBuffer;
// find the first sub-node containing used elements
int firstPos = 0;
long firstOff = from;
Node<N, E> first = nodes[0];
long firstSize = first.size();
while(firstOff >= firstSize) {
firstOff -= firstSize;
first = nodes[++firstPos];
firstSize = first.size();
}
// firstOff < firstSize
final long inFirst = firstSize - firstOff;
if(inFirst >= len) {
// everything in first sub-node
final NodeLike<N, E> part = len == firstSize ? first : first.slice(firstOff, len);
return part.append(buffer, inBuffer);
}
final NodeLike<N, E> firstSlice = firstOff == 0 ? first : first.slice(firstOff, inFirst);
int numMerged = firstSlice.append(buffer, inBuffer);
int pos = firstPos;
long remaining = len - inFirst;
while(remaining > 0) {
final Node<N, E> curr = nodes[++pos];
final long currSize = curr.size();
final NodeLike<N, E> slice = remaining >= currSize ? curr : curr.slice(0, remaining);
numMerged = slice.append(buffer, numMerged);
remaining -= currSize;
}
return numMerged;
}
/**
* Calculates the size of the right digit.
* @return number of elements in the right digit
*/
private long rightSize() {
return size - leftSize - middle.size();
}
@Override
FingerTree<N, E> addAll(final Node<N, E>[] nodes, final long sz, final boolean appendLeft) {
final int k = nodes.length;
if(k == 0) return this;
if(k == 1) return appendLeft ? cons(nodes[0]) : snoc(nodes[0]);
if(appendLeft) {
int l = k + left.length;
final Node<N, E>[] ls = slice(nodes, 0, l);
Array.copyFromStart(left, left.length, ls, k);
if(l <= MAX_DIGIT) return get(ls, middle, right);
FingerTree<Node<N, E>, E> newMid = middle;
for(int rem = (l + MAX_ARITY - 1) / MAX_ARITY; rem > 1; rem--) {
final int curr = (l + rem - 1) / rem;
newMid = newMid.cons(new InnerNode<>(slice(ls, l - curr, l)));
l -= curr;
}
return get(slice(ls, 0, l), newMid, right);
}
final int r = right.length + k;
final Node<N, E>[] rs = slice(right, 0, r);
Array.copyFromStart(nodes, k, rs, right.length);
if(k + right.length <= MAX_DIGIT) return get(left, middle, rs);
int i = 0;
FingerTree<Node<N, E>, E> newMid = middle;
for(int rem = (r + MAX_ARITY - 1) / MAX_ARITY; rem > 1; rem--) {
final int curr = (r - i + rem - 1) / rem;
newMid = newMid.snoc(new InnerNode<>(slice(rs, i, i + curr)));
i += curr;
}
return get(left, newMid, slice(rs, i, r));
}
@Override
public FingerTree<N, E> replaceHead(final Node<N, E> head) {
final long sizeDiff = head.size() - left[0].size();
final Node<N, E>[] newLeft = left.clone();
newLeft[0] = head;
return new DeepTree<>(newLeft, leftSize + sizeDiff, middle, right, size + sizeDiff);
}
@Override
public FingerTree<N, E> replaceLast(final Node<N, E> last) {
final int lst = right.length - 1;
final Node<N, E>[] newRight = right.clone();
newRight[lst] = last;
return new DeepTree<>(left, leftSize, middle, newRight, size + last.size()
- right[lst].size());
}
@Override
void toString(final StringBuilder sb, final int indent) {
for(int i = 0; i < indent; i++) sb.append(" ");
sb.append("Deep(").append(size).append(")[\n");
// left digit
for(int i = 0; i < indent + 1; i++) sb.append(" ");
sb.append("Left(").append(leftSize).append(")[\n");
for(final Node<N, E> e : left) {
toString(e, sb, indent + 2);
sb.append('\n');
}
for(int i = 0; i < indent + 1; i++) sb.append(" ");
sb.append("]\n");
// middle tree
middle.toString(sb, indent + 1);
sb.append('\n');
// right digit
for(int i = 0; i < indent + 1; i++) sb.append(" ");
sb.append("Right[\n");
for(final Node<N, E> e : right) {
toString(e, sb, indent + 2);
sb.append('\n');
}
for(int i = 0; i < indent + 1; i++) sb.append(" ");
sb.append("]\n");
for(int i = 0; i < indent; i++) sb.append(" ");
sb.append(']');
}
@Override
public long checkInvariants() {
if(left.length < 1 || left.length > MAX_DIGIT) throw new AssertionError(
"Wrong left digit length: " + left.length);
long sz = 0;
for(final Node<N, E> nd : left)
sz += nd.checkInvariants();
if(sz != leftSize) throw new AssertionError("Wrong leftSize: " + leftSize + " vs. " + sz);
sz += middle.checkInvariants();
if(right.length < 1 || right.length > MAX_DIGIT) throw new AssertionError(
"Wrong right digit length: " + right.length);
for(final Node<N, E> nd : right)
sz += nd.checkInvariants();
if(sz != size) throw new AssertionError("Wrong size: " + size + " vs. " + sz);
return sz;
}
/**
* Calculates the size of a digit.
* @param <N> node type
* @param arr digit
* @return size
*/
static <N extends Node<?, ?>> long size(final N[] arr) {
long size = 0;
for(final N o : arr) size += o.size();
return size;
}
/**
* Returns an array containing the values at the indices {@code from} to {@code to - 1}
* in the given array. Its length is always {@code to - from}. If {@code from} is
* smaller than zero, the first {@code -from} entries in the resulting array are
* {@code null}. If {@code to > arr.length} then the last {@code to - arr.length}
* entries are {@code null}.
* @param <N> node type
* @param <E> element type
* @param arr input array
* @param from first index, inclusive (may be negative)
* @param to last index, exclusive (may be greater than {@code arr.length})
* @return resulting array
*/
static <N, E> Node<N, E>[] slice(final NodeLike<N, E>[] arr, final int from, final int to) {
@SuppressWarnings("unchecked")
final Node<N, E>[] out = new Node[to - from];
final int in0 = Math.max(0, from), in1 = Math.min(to, arr.length);
final int out0 = Math.max(-from, 0);
Array.copy(arr, in0, in1 - in0, out, out0);
return out;
}
}
| |
// Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.standardplugins.callresolver;
import com.google.security.zynamics.binnavi.API.debug.DebugException;
import com.google.security.zynamics.binnavi.API.disassembly.Address;
import com.google.security.zynamics.binnavi.API.disassembly.CouldntLoadDataException;
import com.google.security.zynamics.binnavi.API.disassembly.CouldntSaveDataException;
import com.google.security.zynamics.binnavi.API.disassembly.Function;
import com.google.security.zynamics.binnavi.API.disassembly.Module;
import com.google.security.zynamics.binnavi.API.disassembly.View;
import com.google.security.zynamics.binnavi.API.helpers.Settings;
import com.google.security.zynamics.binnavi.API.plugins.PluginInterface;
import com.google.security.zynamics.binnavi.yfileswrap.API.disassembly.View2D;
import com.google.security.zynamics.zylib.gui.GuiHelper;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextArea;
import javax.swing.border.TitledBorder;
/**
* This dialog shows the call resolver progress. The user can use this dialog to step through the
* individual steps of call resolving and to receive feedback of what is going on.
*/
public final class CallResolverDialog extends JDialog {
/**
* These panels show the individual steps necessary for resolving indirect calls. They give the
* user a way to follow the progress of the call resolver operation.
*/
private final IconPanel[] panels = new IconPanel[] {new IconPanel("1. Loading target"),
new IconPanel("2. Find indirect calls"),
new IconPanel("3. Start debugger", new ResumeButton()), new IconPanel("4. Set breakpoints"),
new IconPanel("5. Resolving breakpoints"), new IconPanel("6. Stop call resolving")};
/**
* Information about the call resolving progress is printed here.
*/
private final JTextArea outputArea = new JTextArea();
/**
* This button is used to show the resolves functions as a list in the output text field.
*/
private final JButton listResolvedFunctionsButton =
new JButton(new ListResolvedFunctionsAction());
/**
* This button is used to turn the resolved functions into a graph which is then opened in a new
* window.
*/
private final JButton graphResolvedFunctionsButton =
new JButton(new GraphResolvedFunctionsAction());
private final JButton portResultsButton = new JButton(new GraphAllFunctionsAction());
/**
* Action of the Next button.
*/
private final NextAction nextAction = new NextAction();
/**
* Used to resolve the indirect function calls.
*/
private final CallResolver callResolver;
/**
* Creates a new dialog object.
*
* @param parent Parent window of the dialog.
* @param target The target whose calls are resolved.
*/
private CallResolverDialog(final JFrame parent, final ICallResolverTarget target) {
super(parent, "Call Resolver");
assert parent != null;
assert target != null;
callResolver = new InternalCallResolver(target, parent);
setLayout(new BorderLayout());
add(new LabelPanel(), BorderLayout.NORTH);
add(new OutputPanel());
add(new ButtonPanel(), BorderLayout.SOUTH);
setSize(700, 600);
setResizable(false);
setLocationRelativeTo(parent);
// We need to disable all dialogs to smoothen the lookup.
Settings.setShowDialogs(false);
setDefaultCloseOperation(DO_NOTHING_ON_CLOSE);
addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(final WindowEvent e) {
close();
}
@Override
public void windowClosing(final WindowEvent e) {
setVisible(false);
dispose();
}
});
updateGui();
}
/**
* Shows a call resolver dialog.
*
* @param parent Parent window of the dialog.
* @param target The target whose calls are resolved.
*/
public static void show(final JFrame parent, final ICallResolverTarget target) {
final CallResolverDialog dialog = new CallResolverDialog(parent, target);
dialog.setVisible(true);
}
/**
* Adds a line of text to the output panel.
*
* @param string The text to add.
*/
private void appendOutput(final String string) {
outputArea.setText(outputArea.getText() + string + "\n");
outputArea.setCaretPosition(outputArea.getDocument().getLength());
}
private void close() {
Settings.setShowDialogs(true);
callResolver.dispose();
setVisible(false);
}
private void graphAllFunctions() {
new Thread() {
@Override
public void run() {
final View view = OutputGraphGenerator.createCompleteView(
callResolver.getTarget(), callResolver.getIndirectAddresses(),
callResolver.getResolvedAddresses());
final View2D view2d = PluginInterface.instance().showInLastWindow(view);
view2d.doHierarchicalLayout();
try {
view.save();
} catch (final CouldntSaveDataException e) {
e.printStackTrace();
}
}
}.start();
}
/**
* Shows the results of the call resolver operation in a new call graph view.
*/
private void graphResolvedFunctions() {
new Thread() {
@Override
public void run() {
final View view = OutputGraphGenerator.createLoggedView(
callResolver.getTarget(), callResolver.getIndirectAddresses(),
callResolver.getResolvedAddresses());
final View2D view2d = PluginInterface.instance().showInLastWindow(view);
view2d.doHierarchicalLayout();
try {
view.save();
} catch (final CouldntSaveDataException e) {
e.printStackTrace();
}
}
}.start();
}
/**
* Shows the results of the call resolver operation in the text output field.
*/
private void listResolvedFunctions() {
outputArea.setText(OutputListGenerator.generate(callResolver.getResolvedAddresses()));
outputArea.setCaretPosition(0);
}
/**
* Updates the GUI depending on the state of the resolver process.
*/
private void updateGui() {
final int currentStep = callResolver.getCurrentStep();
for (int i = 0; i < panels.length; i++) {
panels[i].setEnabled(i <= currentStep);
panels[i].setDone(i < currentStep);
}
listResolvedFunctionsButton.setEnabled(currentStep == panels.length);
graphResolvedFunctionsButton.setEnabled(currentStep == panels.length);
portResultsButton.setEnabled(currentStep == panels.length);
nextAction.putValue(Action.NAME, currentStep == panels.length ? "Reset" : "Next");
}
/**
* This panel contains the Next/Reset and Cancel buttons shown at the bottom of the dialog.
*/
private class ButtonPanel extends JPanel {
public ButtonPanel() {
super(new BorderLayout());
final JPanel innerButtonPanel = new JPanel(new BorderLayout());
innerButtonPanel.add(new JButton(nextAction), BorderLayout.WEST);
innerButtonPanel.add(new JButton(new CancelAction()), BorderLayout.EAST);
add(innerButtonPanel, BorderLayout.EAST);
}
}
/**
* Action that is used to close the dialog when the user clicks on the Cancel button.
*/
private class CancelAction extends AbstractAction {
public CancelAction() {
super("Cancel");
}
@Override
public void actionPerformed(final ActionEvent e) {
close();
}
}
/**
* Action that is used to show all resolver results in a new graph.
*/
private class GraphAllFunctionsAction extends AbstractAction {
public GraphAllFunctionsAction() {
super("Create complete call graph view");
}
@Override
public void actionPerformed(final ActionEvent e) {
graphAllFunctions();
}
}
/**
* Action that is used to show the resolver results in a new graph.
*/
private class GraphResolvedFunctionsAction extends AbstractAction {
public GraphResolvedFunctionsAction() {
super("Create limited call graph view");
}
@Override
public void actionPerformed(final ActionEvent e) {
graphResolvedFunctions();
}
}
/**
* Class for displaying the panels that show the progress.
*/
private static class IconPanel extends JPanel {
/**
* Shows text that describes the step.
*/
private final JLabel textLabel;
private JComponent additionalComponent;
/**
* Image shown when the step is complete.
*/
private static ImageIcon ACCEPT_IMAGE;
/**
* Image shown when the step is active.
*/
private static ImageIcon BULLET_IMAGE;
public IconPanel(final String text) {
this(text, null);
}
/**
* Creates a new panel object.
*
* @param text Text that describes the step.
*/
public IconPanel(final String text, final JComponent additionalComponent) {
super(new BorderLayout());
if (ACCEPT_IMAGE == null) {
try {
ACCEPT_IMAGE =
new ImageIcon(CallResolverDialog.class.getResource("accept.png").toURI().toURL());
BULLET_IMAGE = new ImageIcon(
CallResolverDialog.class.getResource("bullet_blue.png").toURI().toURL());
} catch (MalformedURLException | URISyntaxException e) {
e.printStackTrace();
}
}
textLabel = new JLabel(text);
textLabel.setEnabled(false);
add(textLabel);
if (additionalComponent != null) {
this.additionalComponent = additionalComponent;
add(additionalComponent, BorderLayout.EAST);
}
setPreferredSize(new Dimension(200, 20));
}
/**
* Sets a flag that says whether the step is done or not.
*
* @param done True, if the step is done. False, if it is not.
*/
public void setDone(final boolean done) {
textLabel.setIcon(done ? ACCEPT_IMAGE : BULLET_IMAGE);
}
@Override
public void setEnabled(final boolean enabled) {
super.setEnabled(enabled);
if (additionalComponent != null) {
additionalComponent.setEnabled(enabled);
}
textLabel.setEnabled(enabled);
}
}
/**
* Extended call resolver class that updates the dialog on relevant events.
*/
private class InternalCallResolver extends CallResolver {
/**
* Creates a new call resolver object.
*
* @param target The target whose calls are resolved.
* @param parent
*/
public InternalCallResolver(final ICallResolverTarget target, final JFrame parent) {
super(target, parent);
}
@Override
protected void debuggerChanged() {
appendOutput("Error: Target debugger changed. Resetting.");
updateGui();
}
@Override
protected void debuggerClosed() {
appendOutput("Target debugger was closed.");
updateGui();
}
@Override
protected void errorConnectingDebugger(final DebugException e) {
appendOutput("Error: Could not start the debugger.");
}
@Override
protected void errorLoadingModule(final Module module, final CouldntLoadDataException e) {
appendOutput(
String.format("Error loading module '%s' (%s)", module.getName(), e.getMessage()));
}
@Override
protected void errorNoDebugger() {
appendOutput("Error: No debugger configured for the selected target.");
}
@Override
protected void errorNotAttached() {
appendOutput("Error: The debugger is not attached to the target process.");
}
@Override
protected void errorResuming(final DebugException e) {
appendOutput("Error: Debugger could not be resumed after a breakpoint was hit.");
}
@Override
protected void foundIndirectCallAddresses(final List<IndirectCall> indirectCallAddresses) {
appendOutput(String.format("Found %d indirect calls", indirectCallAddresses.size()));
if (indirectCallAddresses.isEmpty()) {
appendOutput("No indirect function calls found: The resolving process is complete");
}
}
@Override
protected void resolvedCall(
final BigInteger lastIndirectCall, final ResolvedFunction resolvedFunction) {
final Function function = resolvedFunction.getFunction();
final Address functionAddress =
function == null ? resolvedFunction.getAddress() : function.getAddress();
final String functionName =
function == null ? resolvedFunction.getMemoryModule().getName() + "!???"
: function.getModule().getName() + "!" + function.getName();
appendOutput(String.format("Done resolving: %08X -> %08X (%s)", lastIndirectCall.longValue(),
functionAddress.toLong(), functionName));
}
}
/**
* The upper part of the dialog that contains the progress labels and the results buttons.
*/
private class LabelPanel extends JPanel {
public LabelPanel() {
super(new GridLayout(panels.length + 1, 1));
for (final IconPanel panel : panels) {
add(panel);
}
final JPanel resultsPanel = new JPanel();
resultsPanel.add(listResolvedFunctionsButton);
resultsPanel.add(graphResolvedFunctionsButton);
resultsPanel.add(portResultsButton);
add(resultsPanel);
setBorder(new TitledBorder(""));
}
}
/**
* Action class used to show the resolved functions in the output list.
*/
private class ListResolvedFunctionsAction extends AbstractAction {
public ListResolvedFunctionsAction() {
super("Show resolved functions");
}
@Override
public void actionPerformed(final ActionEvent e) {
listResolvedFunctions();
}
}
/**
* Action class that handles clicks on the Next button.
*/
private class NextAction extends AbstractAction {
/**
* Creates a new action object.
*/
public NextAction() {
super("Next");
}
@Override
public void actionPerformed(final ActionEvent event) {
callResolver.next();
updateGui();
}
}
/**
* The part of the panel that contains the output field.
*/
private class OutputPanel extends JPanel {
public OutputPanel() {
super(new BorderLayout());
outputArea.setEditable(false);
outputArea.setFont(GuiHelper.MONOSPACED_FONT);
add(new JScrollPane(outputArea));
}
}
/**
* Action class for the Resume button.
*/
private class ResumeAction extends AbstractAction {
public ResumeAction() {
super("Resume");
}
@Override
public void actionPerformed(final ActionEvent event) {
try {
callResolver.getTarget().getDebugger().resume();
} catch (final DebugException exception) {
appendOutput(
String.format("Error: Could not resume the debugger (%s)", exception.toString()));
}
}
}
/**
* Button for resuming the debugger.
*/
private class ResumeButton extends JButton {
public ResumeButton() {
super(new ResumeAction());
setPreferredSize(new Dimension(100, 20));
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.filters;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.editor.colors.*;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* @author Yura Cangea
* @version 1.0
*/
public interface Filter {
Filter[] EMPTY_ARRAY = new Filter[0];
class Result extends ResultItem {
private NextAction myNextAction = NextAction.EXIT;
private final List<ResultItem> myResultItems;
public Result(final int highlightStartOffset, final int highlightEndOffset, @Nullable final HyperlinkInfo hyperlinkInfo) {
this(highlightStartOffset, highlightEndOffset, hyperlinkInfo, null);
}
public Result(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final HyperlinkInfo hyperlinkInfo,
@Nullable final TextAttributes highlightAttributes) {
super(highlightStartOffset, highlightEndOffset, hyperlinkInfo, highlightAttributes, null);
myResultItems = null;
}
public Result(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final HyperlinkInfo hyperlinkInfo,
@Nullable final TextAttributes highlightAttributes,
@Nullable final TextAttributes followedHyperlinkAttributes) {
super(highlightStartOffset, highlightEndOffset, hyperlinkInfo, highlightAttributes, followedHyperlinkAttributes);
myResultItems = null;
}
public Result(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final HyperlinkInfo hyperlinkInfo,
final boolean grayedHyperlink) {
super(highlightStartOffset, highlightEndOffset, hyperlinkInfo, grayedHyperlink);
myResultItems = null;
}
public Result(@NotNull List<ResultItem> resultItems) {
super(-1, -1, null, null, null);
myResultItems = resultItems;
}
@NotNull
public List<ResultItem> getResultItems() {
List<ResultItem> resultItems = myResultItems;
if (resultItems == null) {
resultItems = Collections.singletonList(this);
}
return resultItems;
}
/**
* @deprecated This method will be removed. Result may be constructed using ResultItems, in that case this method will return incorrect value. Use {@link #getResultItems()} instead.
*/
@Deprecated
@Override
public int getHighlightStartOffset() {
return super.getHighlightStartOffset();
}
/**
* @deprecated This method will be removed. Result may be constructed using ResultItems, in that case this method will return incorrect value. Use {@link #getResultItems()} instead.
*/
@Deprecated
@Override
public int getHighlightEndOffset() {
return super.getHighlightEndOffset();
}
/**
* @deprecated This method will be removed. Result may be constructed using ResultItems, in that case this method will return incorrect value. Use {@link #getResultItems()} instead.
*/
@Deprecated
@Nullable
@Override
public TextAttributes getHighlightAttributes() {
return super.getHighlightAttributes();
}
/**
* @deprecated This method will be removed. Result may be constructed using ResultItems, in that case this method will return incorrect value. Use {@link #getResultItems()} or {@link #getFirstHyperlinkInfo()} instead.
*/
@Deprecated
@Nullable
@Override
public HyperlinkInfo getHyperlinkInfo() {
return super.getHyperlinkInfo();
}
@Nullable
public HyperlinkInfo getFirstHyperlinkInfo() {
HyperlinkInfo info = super.getHyperlinkInfo();
if (info == null && myResultItems != null) {
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < myResultItems.size(); i++) {
ResultItem resultItem = myResultItems.get(i);
if (resultItem.getHyperlinkInfo() != null) {
return resultItem.getHyperlinkInfo();
}
}
}
return info;
}
public NextAction getNextAction() {
return myNextAction;
}
public void setNextAction(NextAction nextAction) {
myNextAction = nextAction;
}
}
enum NextAction {
EXIT, CONTINUE_FILTERING,
}
class ResultItem {
private static final Map<TextAttributesKey, TextAttributes> GRAYED_BY_NORMAL_CACHE = ContainerUtil.newConcurrentMap(2);
static {
Application application = ApplicationManager.getApplication();
if (application != null) {
application.getMessageBus().connect().subscribe(EditorColorsManager.TOPIC, __ -> {
// invalidate cache on Appearance Theme/Editor Scheme change
GRAYED_BY_NORMAL_CACHE.clear();
});
}
}
/**
* @deprecated use getter, the visibility of this field will be decreased.
*/
@Deprecated
public final int highlightStartOffset;
/**
* @deprecated use getter, the visibility of this field will be decreased.
*/
@Deprecated
public final int highlightEndOffset;
/**
* @deprecated use getter, the visibility of this field will be decreased.
*/
@Deprecated @Nullable
public final TextAttributes highlightAttributes;
/**
* @deprecated use getter, the visibility of this field will be decreased.
*/
@Deprecated @Nullable
public final HyperlinkInfo hyperlinkInfo;
private final TextAttributes myFollowedHyperlinkAttributes;
public ResultItem(final int highlightStartOffset, final int highlightEndOffset, @Nullable final HyperlinkInfo hyperlinkInfo) {
this(highlightStartOffset, highlightEndOffset, hyperlinkInfo, null, null);
}
public ResultItem(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final HyperlinkInfo hyperlinkInfo,
@Nullable final TextAttributes highlightAttributes) {
this(highlightStartOffset, highlightEndOffset, hyperlinkInfo, highlightAttributes, null);
}
public ResultItem(int highlightStartOffset,
int highlightEndOffset,
@Nullable HyperlinkInfo hyperlinkInfo,
boolean grayedHyperlink) {
this(highlightStartOffset, highlightEndOffset, hyperlinkInfo,
grayedHyperlink ? getGrayedHyperlinkAttributes(CodeInsightColors.HYPERLINK_ATTRIBUTES) : null,
grayedHyperlink ? getGrayedHyperlinkAttributes(CodeInsightColors.FOLLOWED_HYPERLINK_ATTRIBUTES) : null);
}
@SuppressWarnings("deprecation")
public ResultItem(final int highlightStartOffset,
final int highlightEndOffset,
@Nullable final HyperlinkInfo hyperlinkInfo,
@Nullable final TextAttributes highlightAttributes,
@Nullable final TextAttributes followedHyperlinkAttributes) {
this.highlightStartOffset = highlightStartOffset;
this.highlightEndOffset = highlightEndOffset;
this.hyperlinkInfo = hyperlinkInfo;
this.highlightAttributes = highlightAttributes;
myFollowedHyperlinkAttributes = followedHyperlinkAttributes;
}
public int getHighlightStartOffset() {
return highlightStartOffset;
}
public int getHighlightEndOffset() {
return highlightEndOffset;
}
@Nullable
public TextAttributes getHighlightAttributes() {
return highlightAttributes;
}
@Nullable
public TextAttributes getFollowedHyperlinkAttributes() {
return myFollowedHyperlinkAttributes;
}
@Nullable
public HyperlinkInfo getHyperlinkInfo() {
return hyperlinkInfo;
}
/**
* See {@link HighlighterLayer} for available predefined layers.
*/
public int getHighlighterLayer() {
return getHyperlinkInfo() != null ? HighlighterLayer.HYPERLINK : HighlighterLayer.CONSOLE_FILTER;
}
@Nullable
private static TextAttributes getGrayedHyperlinkAttributes(@NotNull TextAttributesKey normalHyperlinkAttrsKey) {
EditorColorsScheme globalScheme = EditorColorsManager.getInstance().getGlobalScheme();
TextAttributes grayedHyperlinkAttrs = GRAYED_BY_NORMAL_CACHE.get(normalHyperlinkAttrsKey);
if (grayedHyperlinkAttrs == null) {
TextAttributes normalHyperlinkAttrs = globalScheme.getAttributes(normalHyperlinkAttrsKey);
if (normalHyperlinkAttrs != null) {
grayedHyperlinkAttrs = normalHyperlinkAttrs.clone();
grayedHyperlinkAttrs.setForegroundColor(UIUtil.getInactiveTextColor());
grayedHyperlinkAttrs.setEffectColor(UIUtil.getInactiveTextColor());
GRAYED_BY_NORMAL_CACHE.put(normalHyperlinkAttrsKey, grayedHyperlinkAttrs);
}
}
return grayedHyperlinkAttrs;
}
}
/**
* Filters line by creating an instance of {@link Result}.
*
* @param line The line to be filtered. Note that the line must contain a line
* separator at the end.
* @param entireLength The length of the entire text including the line passed for filtration.
* @return <tt>null</tt>, if there was no match, otherwise, an instance of {@link Result}
*/
@Nullable
Result applyFilter(@NotNull String line, int entireLength);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.common;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RetriableException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
import org.apache.hadoop.test.LambdaTestUtils;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.net.InetSocketAddress;
import static org.junit.Assert.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestJspHelper {
private final Configuration conf = new HdfsConfiguration();
// allow user with TGT to run tests
@BeforeClass
public static void setupKerb() {
System.setProperty("java.security.krb5.kdc", "");
System.setProperty("java.security.krb5.realm", "NONE");
}
public static class DummySecretManager extends
AbstractDelegationTokenSecretManager<DelegationTokenIdentifier> {
public DummySecretManager(long delegationKeyUpdateInterval,
long delegationTokenMaxLifetime, long delegationTokenRenewInterval,
long delegationTokenRemoverScanInterval) {
super(delegationKeyUpdateInterval, delegationTokenMaxLifetime,
delegationTokenRenewInterval, delegationTokenRemoverScanInterval);
}
@Override
public DelegationTokenIdentifier createIdentifier() {
return null;
}
@Override
public byte[] createPassword(DelegationTokenIdentifier dtId) {
return new byte[1];
}
}
@Test
public void testGetUgi() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
HttpServletRequest request = mock(HttpServletRequest.class);
ServletContext context = mock(ServletContext.class);
String user = "TheDoctor";
Text userText = new Text(user);
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(userText,
userText, null);
Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(
dtId, new DummySecretManager(0, 0, 0, 0));
String tokenString = token.encodeToUrlString();
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
when(request.getRemoteUser()).thenReturn(user);
//Test attribute in the url to be used as service in the token.
when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(
"1.1.1.1:1111");
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
verifyServiceInToken(context, request, "1.1.1.1:1111");
//Test attribute name.node.address
//Set the nnaddr url parameter to null.
token.decodeIdentifier().clearCache();
when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(null);
InetSocketAddress addr = new InetSocketAddress("localhost", 2222);
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
.thenReturn(addr);
verifyServiceInToken(context, request, addr.getAddress().getHostAddress()
+ ":2222");
//Test service already set in the token and DN doesn't change service
//when it doesn't know the NN service addr
userText = new Text(user+"2");
dtId = new DelegationTokenIdentifier(userText, userText, null);
token = new Token<DelegationTokenIdentifier>(
dtId, new DummySecretManager(0, 0, 0, 0));
token.setService(new Text("3.3.3.3:3333"));
tokenString = token.encodeToUrlString();
//Set the name.node.address attribute in Servlet context to null
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
.thenReturn(null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
verifyServiceInToken(context, request, "3.3.3.3:3333");
}
private void verifyServiceInToken(ServletContext context,
HttpServletRequest request, String expected) throws IOException {
UserGroupInformation ugi = JspHelper.getUGI(context, request, conf);
Token<? extends TokenIdentifier> tokenInUgi = ugi.getTokens().iterator()
.next();
Assert.assertEquals(expected, tokenInUgi.getService().toString());
}
@Test
public void testGetUgiFromToken() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
ServletContext context = mock(ServletContext.class);
String realUser = "TheDoctor";
String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi;
HttpServletRequest request;
Text ownerText = new Text(user);
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(
ownerText, ownerText, new Text(realUser));
Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(
dtId, new DummySecretManager(0, 0, 0, 0));
String tokenString = token.encodeToUrlString();
// token with no auth-ed user
request = getMockRequest(null, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// token with auth-ed user
request = getMockRequest(realUser, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// completely different user, token trumps auth
request = getMockRequest("rogue", null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// expected case
request = getMockRequest(null, user, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// if present token, ignore doas parameter
request = getMockRequest(null, null, "rogue");
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// if present token, ignore user.name parameter
request = getMockRequest(null, "rogue", null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
// if present token, ignore user.name and doas parameter
request = getMockRequest(null, user, "rogue");
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromToken(ugi);
}
@Test
public void testGetNonProxyUgi() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
ServletContext context = mock(ServletContext.class);
String realUser = "TheDoctor";
String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi;
HttpServletRequest request;
// have to be auth-ed with remote user
request = getMockRequest(null, null, null);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals(
"Security enabled but user not authenticated by filter",
ioe.getMessage());
}
request = getMockRequest(null, realUser, null);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals(
"Security enabled but user not authenticated by filter",
ioe.getMessage());
}
// ugi for remote user
request = getMockRequest(realUser, null, null);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNull(ugi.getRealUser());
Assert.assertEquals(ugi.getShortUserName(), realUser);
checkUgiFromAuth(ugi);
// ugi for remote user = real user
request = getMockRequest(realUser, realUser, null);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNull(ugi.getRealUser());
Assert.assertEquals(ugi.getShortUserName(), realUser);
checkUgiFromAuth(ugi);
// if there is remote user via SPNEGO, ignore user.name param
request = getMockRequest(realUser, user, null);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNull(ugi.getRealUser());
Assert.assertEquals(ugi.getShortUserName(), realUser);
checkUgiFromAuth(ugi);
}
@Test
public void testGetProxyUgi() throws IOException {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
ServletContext context = mock(ServletContext.class);
String realUser = "TheDoctor";
String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
conf.set(DefaultImpersonationProvider.getTestProvider().
getProxySuperuserGroupConfKey(realUser), "*");
conf.set(DefaultImpersonationProvider.getTestProvider().
getProxySuperuserIpConfKey(realUser), "*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi;
HttpServletRequest request;
// have to be auth-ed with remote user
request = getMockRequest(null, null, user);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals(
"Security enabled but user not authenticated by filter",
ioe.getMessage());
}
request = getMockRequest(null, realUser, user);
try {
JspHelper.getUGI(context, request, conf);
Assert.fail("bad request allowed");
} catch (IOException ioe) {
Assert.assertEquals(
"Security enabled but user not authenticated by filter",
ioe.getMessage());
}
// proxy ugi for user via remote user
request = getMockRequest(realUser, null, user);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromAuth(ugi);
// proxy ugi for user vi a remote user = real user
request = getMockRequest(realUser, realUser, user);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromAuth(ugi);
// if there is remote user via SPNEGO, ignore user.name, doas param
request = getMockRequest(realUser, user, user);
ugi = JspHelper.getUGI(context, request, conf);
Assert.assertNotNull(ugi.getRealUser());
Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
Assert.assertEquals(ugi.getShortUserName(), user);
checkUgiFromAuth(ugi);
// try to get get a proxy user with unauthorized user
try {
request = getMockRequest(user, null, realUser);
JspHelper.getUGI(context, request, conf);
Assert.fail("bad proxy request allowed");
} catch (AuthorizationException ae) {
Assert.assertEquals(
"User: " + user + " is not allowed to impersonate " + realUser,
ae.getMessage());
}
try {
request = getMockRequest(user, user, realUser);
JspHelper.getUGI(context, request, conf);
Assert.fail("bad proxy request allowed");
} catch (AuthorizationException ae) {
Assert.assertEquals(
"User: " + user + " is not allowed to impersonate " + realUser,
ae.getMessage());
}
}
@Test
public void testGetUgiDuringStartup() throws Exception {
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "hdfs://localhost:4321/");
ServletContext context = mock(ServletContext.class);
String realUser = "TheDoctor";
String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
HttpServletRequest request;
Text ownerText = new Text(user);
DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(
ownerText, ownerText, new Text(realUser));
Token<DelegationTokenIdentifier> token =
new Token<DelegationTokenIdentifier>(dtId,
new DummySecretManager(0, 0, 0, 0));
String tokenString = token.encodeToUrlString();
// token with auth-ed user
request = getMockRequest(realUser, null, null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);
NameNode mockNN = mock(NameNode.class);
Mockito.doCallRealMethod().when(mockNN)
.verifyToken(Mockito.any(), Mockito.any());
when(context.getAttribute("name.node")).thenReturn(mockNN);
LambdaTestUtils.intercept(RetriableException.class,
"Namenode is in startup mode",
() -> JspHelper.getUGI(context, request, conf));
}
private HttpServletRequest getMockRequest(String remoteUser, String user, String doAs) {
HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getParameter(UserParam.NAME)).thenReturn(user);
if (doAs != null) {
when(request.getParameter(DoAsParam.NAME)).thenReturn(doAs);
}
when(request.getRemoteUser()).thenReturn(remoteUser);
return request;
}
private void checkUgiFromAuth(UserGroupInformation ugi) {
if (ugi.getRealUser() != null) {
Assert.assertEquals(AuthenticationMethod.PROXY,
ugi.getAuthenticationMethod());
Assert.assertEquals(AuthenticationMethod.KERBEROS_SSL,
ugi.getRealUser().getAuthenticationMethod());
} else {
Assert.assertEquals(AuthenticationMethod.KERBEROS_SSL,
ugi.getAuthenticationMethod());
}
}
private void checkUgiFromToken(UserGroupInformation ugi) {
if (ugi.getRealUser() != null) {
Assert.assertEquals(AuthenticationMethod.PROXY,
ugi.getAuthenticationMethod());
Assert.assertEquals(AuthenticationMethod.TOKEN,
ugi.getRealUser().getAuthenticationMethod());
} else {
Assert.assertEquals(AuthenticationMethod.TOKEN,
ugi.getAuthenticationMethod());
}
}
@Test
public void testReadWriteReplicaState() {
try {
DataOutputBuffer out = new DataOutputBuffer();
DataInputBuffer in = new DataInputBuffer();
for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState
.values()) {
repState.write(out);
in.reset(out.getData(), out.getLength());
HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState
.read(in);
assertTrue("testReadWrite error !!!", repState == result);
out.reset();
in.reset();
}
} catch (Exception ex) {
fail("testReadWrite ex error ReplicaState");
}
}
private static String clientAddr = "1.1.1.1";
private static String chainedClientAddr = clientAddr+", 2.2.2.2";
private static String proxyAddr = "3.3.3.3";
@Test
public void testRemoteAddr() {
assertEquals(clientAddr, getRemoteAddr(clientAddr, null, false));
}
@Test
public void testRemoteAddrWithUntrustedProxy() {
assertEquals(proxyAddr, getRemoteAddr(clientAddr, proxyAddr, false));
}
@Test
public void testRemoteAddrWithTrustedProxy() {
assertEquals(clientAddr, getRemoteAddr(clientAddr, proxyAddr, true));
assertEquals(clientAddr, getRemoteAddr(chainedClientAddr, proxyAddr, true));
}
@Test
public void testRemoteAddrWithTrustedProxyAndEmptyClient() {
assertEquals(proxyAddr, getRemoteAddr(null, proxyAddr, true));
assertEquals(proxyAddr, getRemoteAddr("", proxyAddr, true));
}
private String getRemoteAddr(String clientAddr, String proxyAddr, boolean trusted) {
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getRemoteAddr()).thenReturn("1.2.3.4");
Configuration conf = new Configuration();
if (proxyAddr == null) {
when(req.getRemoteAddr()).thenReturn(clientAddr);
} else {
when(req.getRemoteAddr()).thenReturn(proxyAddr);
when(req.getHeader("X-Forwarded-For")).thenReturn(clientAddr);
if (trusted) {
conf.set(ProxyServers.CONF_HADOOP_PROXYSERVERS, proxyAddr);
}
}
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
return JspHelper.getRemoteAddr(req);
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
import org.elasticsearch.common.unit.TimeValue;
import java.text.NumberFormat;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Simple stop watch, allowing for timing of a number of tasks,
* exposing total running time and running time for each named task.
* <p/>
* <p>Conceals use of <code>System.currentTimeMillis()</code>, improving the
* readability of application code and reducing the likelihood of calculation errors.
* <p/>
* <p>Note that this object is not designed to be thread-safe and does not
* use synchronization.
* <p/>
* <p>This class is normally used to verify performance during proof-of-concepts
* and in development, rather than as part of production applications.
*
*
*/
public class StopWatch {
/**
* Identifier of this stop watch.
* Handy when we have output from multiple stop watches
* and need to distinguish between them in log or console output.
*/
private final String id;
private boolean keepTaskList = true;
private final List<TaskInfo> taskList = new LinkedList<TaskInfo>();
/**
* Start time of the current task
*/
private long startTimeMillis;
/**
* Is the stop watch currently running?
*/
private boolean running;
/**
* Name of the current task
*/
private String currentTaskName;
private TaskInfo lastTaskInfo;
private int taskCount;
/**
* Total running time
*/
private long totalTimeMillis;
/**
* Construct a new stop watch. Does not start any task.
*/
public StopWatch() {
this.id = "";
}
/**
* Construct a new stop watch with the given id.
* Does not start any task.
*
* @param id identifier for this stop watch.
* Handy when we have output from multiple stop watches
* and need to distinguish between them.
*/
public StopWatch(String id) {
this.id = id;
}
/**
* Determine whether the TaskInfo array is built over time. Set this to
* "false" when using a StopWatch for millions of intervals, or the task
* info structure will consume excessive memory. Default is "true".
*/
public StopWatch keepTaskList(boolean keepTaskList) {
this.keepTaskList = keepTaskList;
return this;
}
/**
* Start an unnamed task. The results are undefined if {@link #stop()}
* or timing methods are called without invoking this method.
*
* @see #stop()
*/
public StopWatch start() throws IllegalStateException {
return start("");
}
/**
* Start a named task. The results are undefined if {@link #stop()}
* or timing methods are called without invoking this method.
*
* @param taskName the name of the task to start
* @see #stop()
*/
public StopWatch start(String taskName) throws IllegalStateException {
if (this.running) {
throw new IllegalStateException("Can't start StopWatch: it's already running");
}
this.startTimeMillis = System.currentTimeMillis();
this.running = true;
this.currentTaskName = taskName;
return this;
}
/**
* Stop the current task. The results are undefined if timing
* methods are called without invoking at least one pair
* {@link #start()} / {@link #stop()} methods.
*
* @see #start()
*/
public StopWatch stop() throws IllegalStateException {
if (!this.running) {
throw new IllegalStateException("Can't stop StopWatch: it's not running");
}
long lastTime = System.currentTimeMillis() - this.startTimeMillis;
this.totalTimeMillis += lastTime;
this.lastTaskInfo = new TaskInfo(this.currentTaskName, lastTime);
if (this.keepTaskList) {
this.taskList.add(lastTaskInfo);
}
++this.taskCount;
this.running = false;
this.currentTaskName = null;
return this;
}
/**
* Return whether the stop watch is currently running.
*/
public boolean isRunning() {
return this.running;
}
/**
* Return the time taken by the last task.
*/
public TimeValue lastTaskTime() throws IllegalStateException {
if (this.lastTaskInfo == null) {
throw new IllegalStateException("No tests run: can't get last interval");
}
return this.lastTaskInfo.getTime();
}
/**
* Return the name of the last task.
*/
public String lastTaskName() throws IllegalStateException {
if (this.lastTaskInfo == null) {
throw new IllegalStateException("No tests run: can't get last interval");
}
return this.lastTaskInfo.getTaskName();
}
/**
* Return the total time for all tasks.
*/
public TimeValue totalTime() {
return new TimeValue(totalTimeMillis, TimeUnit.MILLISECONDS);
}
/**
* Return the number of tasks timed.
*/
public int taskCount() {
return taskCount;
}
/**
* Return an array of the data for tasks performed.
*/
public TaskInfo[] taskInfo() {
if (!this.keepTaskList) {
throw new UnsupportedOperationException("Task info is not being kept!");
}
return this.taskList.toArray(new TaskInfo[this.taskList.size()]);
}
/**
* Return a short description of the total running time.
*/
public String shortSummary() {
return "StopWatch '" + this.id + "': running time = " + totalTime();
}
/**
* Return a string with a table describing all tasks performed.
* For custom reporting, call getTaskInfo() and use the task info directly.
*/
public String prettyPrint() {
StringBuilder sb = new StringBuilder(shortSummary());
sb.append('\n');
if (!this.keepTaskList) {
sb.append("No task info kept");
} else {
sb.append("-----------------------------------------\n");
sb.append("ms % Task name\n");
sb.append("-----------------------------------------\n");
NumberFormat nf = NumberFormat.getNumberInstance();
nf.setMinimumIntegerDigits(5);
nf.setGroupingUsed(false);
NumberFormat pf = NumberFormat.getPercentInstance();
pf.setMinimumIntegerDigits(3);
pf.setGroupingUsed(false);
for (TaskInfo task : taskInfo()) {
sb.append(nf.format(task.getTime().millis())).append(" ");
sb.append(pf.format(task.getTime().secondsFrac() / totalTime().secondsFrac())).append(" ");
sb.append(task.getTaskName()).append("\n");
}
}
return sb.toString();
}
/**
* Return an informative string describing all tasks performed
* For custom reporting, call <code>getTaskInfo()</code> and use the task info directly.
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder(shortSummary());
if (this.keepTaskList) {
for (TaskInfo task : taskInfo()) {
sb.append("; [").append(task.getTaskName()).append("] took ").append(task.getTime());
long percent = Math.round((100.0f * task.getTime().millis()) / totalTime().millis());
sb.append(" = ").append(percent).append("%");
}
} else {
sb.append("; no task info kept");
}
return sb.toString();
}
/**
* Inner class to hold data about one task executed within the stop watch.
*/
public static class TaskInfo {
private final String taskName;
private final TimeValue timeValue;
private TaskInfo(String taskName, long timeMillis) {
this.taskName = taskName;
this.timeValue = new TimeValue(timeMillis, TimeUnit.MILLISECONDS);
}
/**
* Return the name of this task.
*/
public String getTaskName() {
return taskName;
}
/**
* Return the time this task took.
*/
public TimeValue getTime() {
return timeValue;
}
}
}
| |
package rover;
import org.iids.aos.service.ServiceBroker;
import org.iids.aos.service.ServiceException;
import rover.MonitorInfo.Rover;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.util.ArrayList;
public class RoverDisplay extends JFrame implements ActionListener,WindowListener {
private WorldPanel drawPanel;
private JButton btnStart;
private JButton btnStop;
private JButton btnSelect;
private JLabel lblStatus;
private JComboBox scenarioList;
private JComboBox zoomList;
private JComboBox speedList;
private JTable roverTable;
private RoverTableModel roverTableModel;
private IRoverService service;
private ServiceBroker sb;
public RoverDisplay(ServiceBroker sb) {
super("Rover Monitor");
this.sb = sb;
try {
service = sb.bind(IRoverService.class);
} catch (ServiceException e) {
e.printStackTrace();
}
BorderLayout bl = new BorderLayout();
this.setLayout(bl);
drawPanel = new WorldPanel();
drawPanel.setSize(200, 200);
this.setPreferredSize(new Dimension(200,200));
lblStatus = new JLabel();
lblStatus.setText("World Stopped");
btnStart = new JButton("Start");
btnStart.setActionCommand("Start");
btnStart.addActionListener(this);
btnStop = new JButton("Stop");
btnStop.setActionCommand("Stop");
btnStop.addActionListener(this);
btnSelect = new JButton("Select Scenario");
btnSelect.setActionCommand("Select");
btnSelect.addActionListener(this);
btnStop.setEnabled(false);
//create list to select scenario
Integer[] scenarioKeys = (service != null) ? service.getScenarioIDs() : new Integer[0];
String[] scenarios = new String[scenarioKeys.length];
for (int i =0;i<scenarioKeys.length;i++)
scenarios[i] = "Scenario "+scenarioKeys[i].intValue();
scenarioList = new JComboBox(scenarios);
String[] zooms = { "Zoom 10", "Zoom 9", "Zoom 8", "Zoom 7", "Zoom 6", "Zoom 5", "Zoom 4", "Zoom 3", "Zoom 2", "Zoom 1" };
zoomList = new JComboBox(zooms);
String[] speeds = {"1x", "2x", "3x", "4x", "5x", "6x", "7x", "8x", "9x", "10x" };
speedList = new JComboBox(speeds);
roverTableModel = new RoverTableModel();
roverTable = new JTable(roverTableModel);
roverTable.setPreferredSize(new Dimension(0, 200));
roverTable.setSize(new Dimension(0, 200));
JPanel topPanel = new JPanel();
topPanel.add(lblStatus);
topPanel.add(scenarioList);
topPanel.add(btnSelect);
topPanel.add(speedList);
topPanel.add(btnStart);
topPanel.add(btnStop);
topPanel.add(zoomList);
add(topPanel, BorderLayout.PAGE_START);
JScrollPane drawScroll = new JScrollPane(drawPanel);
add(drawScroll, BorderLayout.CENTER);
JScrollPane scroll = new JScrollPane(roverTable);
scroll.setPreferredSize(new Dimension(0, 200));
add(scroll, BorderLayout.PAGE_END);
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setSize(800,800);
setPreferredSize(new Dimension(800,800));
}
public void actionPerformed(ActionEvent e) {
switch (e.getActionCommand()){
case "Start":
lblStatus.setText("World Running");
btnStart.setEnabled(false);
scenarioList.setEnabled(false);
speedList.setEnabled(false);
btnSelect.setEnabled(false);
btnStop.setEnabled(true);
try {
//service = sb.bind(IRoverService.class);
service.startWorld(speedList.getSelectedIndex() + 1);
} catch (Exception ex) {
ex.printStackTrace();
}
break;
case "Stop":
lblStatus.setText("World Stopped");
btnStart.setEnabled(true);
speedList.setEnabled(true);
scenarioList.setEnabled(true);
btnSelect.setEnabled(true);
btnStop.setEnabled(false);
try {
//service = sb.bind(IRoverService.class);
service.stopWorld();
service.resetWorld(service.getScenario());
} catch (Exception ex) {
ex.printStackTrace();
}
break;
case "Select":
try {
//service = sb.bind(IRoverService.class);
String value = (String)scenarioList.getSelectedItem();
service.resetWorld(Integer.parseInt(value.split(" ")[1]));
} catch (Exception ex) {
ex.printStackTrace();
}
break;
default:
break;
}
}
public void UpdateDisplay(MonitorInfo info) {
//"Rover", "X", "Y", "Task", "% Complete", "Carrying", "Power", "Max Speed", "Max Range", "Max Load" };
ArrayList<Object[]> rovers = roverTableModel.getRowData();
rovers.clear();
for(Rover ri : info.getRovers()) {
String task = "";
switch(ri.getTask()) {
case PollResult.MOVE:
task = "MOVE";
break;
case PollResult.SCAN:
task = "SCAN";
break;
case PollResult.COLLECT:
task = "COLLECT";
break;
case PollResult.DEPOSIT:
task = "DEPOSIT";
break;
}
Object[] rov = { ri.getKey(), ri.getX(), ri.getY(),
task, (int) (ri.getTaskCompletion() * 100),
ri.getCurrentLoad(), ri.getEnergy(),
ri.getSpeed(), ri.getScanRange(), ri.getMaxLoad() };
rovers.add(rov);
}
roverTableModel.fireTableDataChanged();
drawPanel.setScale(10 - zoomList.getSelectedIndex());
drawPanel.setMonitorInfo(info);
}
@Override
public void windowOpened(WindowEvent windowEvent) {
}
@Override
public void windowClosing(WindowEvent windowEvent) {
}
@Override
public void windowClosed(WindowEvent windowEvent) {
service.stopWorld();
}
@Override
public void windowIconified(WindowEvent windowEvent) {
}
@Override
public void windowDeiconified(WindowEvent windowEvent) {
}
@Override
public void windowActivated(WindowEvent windowEvent) {
}
@Override
public void windowDeactivated(WindowEvent windowEvent) {
}
}
| |
package fr.free.nrw.commons.nearby;
import android.net.Uri;
import android.os.StrictMode;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import fr.free.nrw.commons.Utils;
import fr.free.nrw.commons.location.LatLng;
import fr.free.nrw.commons.utils.FileUtils;
import timber.log.Timber;
public class NearbyPlaces {
private static final int MIN_RESULTS = 40;
private static final double INITIAL_RADIUS = 1.0; // in kilometers
private static final double MAX_RADIUS = 300.0; // in kilometers
private static final double RADIUS_MULTIPLIER = 1.618;
private static final Uri WIKIDATA_QUERY_URL = Uri.parse("https://query.wikidata.org/sparql");
private static final Uri WIKIDATA_QUERY_UI_URL = Uri.parse("https://query.wikidata.org/");
private final String wikidataQuery;
private double radius = INITIAL_RADIUS;
private List<Place> places;
public NearbyPlaces() {
try {
wikidataQuery = FileUtils.readFromResource("/assets/queries/nearby_query.rq");
Timber.v(wikidataQuery);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
List<Place> getFromWikidataQuery(LatLng curLatLng, String lang) {
List<Place> places = Collections.emptyList();
try {
// increase the radius gradually to find a satisfactory number of nearby places
while (radius < MAX_RADIUS) {
places = getFromWikidataQuery(curLatLng, lang, radius);
Timber.d("%d results at radius: %f", places.size(), radius);
if (places.size() >= MIN_RESULTS) {
break;
} else {
radius *= RADIUS_MULTIPLIER;
}
}
} catch (IOException e) {
Timber.d(e.toString());
// errors tend to be caused by too many results (and time out)
// try a small radius next time
Timber.d("back to initial radius: %f", radius);
radius = INITIAL_RADIUS;
}
return places;
}
private List<Place> getFromWikidataQuery(LatLng cur,
String lang,
double radius)
throws IOException {
List<Place> places = new ArrayList<>();
String query = wikidataQuery
.replace("${RAD}", String.format(Locale.ROOT, "%.2f", radius))
.replace("${LAT}", String.format(Locale.ROOT, "%.4f", cur.getLatitude()))
.replace("${LONG}", String.format(Locale.ROOT, "%.4f", cur.getLongitude()))
.replace("${LANG}", lang);
Timber.v("# Wikidata query: \n" + query);
// format as a URL
Timber.d(WIKIDATA_QUERY_UI_URL.buildUpon().fragment(query).build().toString());
String url = WIKIDATA_QUERY_URL.buildUpon()
.appendQueryParameter("query", query).build().toString();
URLConnection conn = new URL(url).openConnection();
conn.setRequestProperty("Accept", "text/tab-separated-values");
BufferedReader in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line;
Timber.d("Reading from query result...");
while ((line = in.readLine()) != null) {
Timber.v(line);
line = line + "\n"; // to pad columns and make fields a fixed size
if (!line.startsWith("\"Point")) {
continue;
}
String[] fields = line.split("\t");
String point = fields[0];
String name = Utils.stripLocalizedString(fields[2]);
String type = Utils.stripLocalizedString(fields[4]);
String wikipediaSitelink = Utils.stripLocalizedString(fields[7]);
String commonsSitelink = Utils.stripLocalizedString(fields[8]);
String wikiDataLink = Utils.stripLocalizedString(fields[1]);
String icon = fields[5];
double latitude;
double longitude;
Matcher matcher =
Pattern.compile("Point\\(([^ ]+) ([^ ]+)\\)").matcher(point);
if (!matcher.find()) {
continue;
}
try {
longitude = Double.parseDouble(matcher.group(1));
latitude = Double.parseDouble(matcher.group(2));
} catch (NumberFormatException e) {
throw new RuntimeException("LatLng parse error: " + point);
}
places.add(new Place(
name,
Place.Description.fromText(type), // list
type, // details
Uri.parse(icon),
new LatLng(latitude, longitude, 0),
new Sitelinks.Builder()
.setWikipediaLink(wikipediaSitelink)
.setCommonsLink(commonsSitelink)
.setWikidataLink(wikiDataLink)
.build()
));
}
in.close();
return places;
}
List<Place> getFromWikiNeedsPictures() {
if (places != null) {
return places;
} else {
try {
places = new ArrayList<>();
StrictMode.ThreadPolicy policy
= new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
URL file = new URL("https://tools.wmflabs.org/wiki-needs-pictures/data/data.csv");
BufferedReader in = new BufferedReader(new InputStreamReader(file.openStream()));
boolean firstLine = true;
String line;
Timber.d("Reading from CSV file...");
while ((line = in.readLine()) != null) {
// Skip CSV header.
if (firstLine) {
firstLine = false;
continue;
}
String[] fields = line.split(",");
String name = Utils.stripLocalizedString(fields[0]);
double latitude;
double longitude;
try {
latitude = Double.parseDouble(fields[1]);
} catch (NumberFormatException e) {
latitude = 0;
}
try {
longitude = Double.parseDouble(fields[2]);
} catch (NumberFormatException e) {
longitude = 0;
}
String type = fields[3];
places.add(new Place(
name,
Place.Description.fromText(type), // list
type, // details
null,
new LatLng(latitude, longitude, 0),
new Sitelinks.Builder().build()
));
}
in.close();
} catch (IOException e) {
Timber.d(e.toString());
}
}
return places;
}
}
| |
/*
* ZmppPanel
*
* Created on 2015/08/24
* Copyright 2005-2015 by Wandora Team
* This file is part of The Z-machine Preservation Project (ZMPP).
*
* ZMPP is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ZMPP is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ZMPP. If not, see <http://www.gnu.org/licenses/>.
*/
package org.zmpp.swingui;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.EventQueue;
import java.awt.FlowLayout;
import java.awt.GridLayout;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionAdapter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.util.prefs.Preferences;
import javax.swing.JComponent;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import org.zmpp.io.IOSystem;
import org.zmpp.io.InputStream;
import org.zmpp.media.Resources;
import org.zmpp.media.StoryMetadata;
import org.zmpp.vm.Machine;
import org.zmpp.vm.ScreenModel;
import org.zmpp.vm.StatusLine;
/**
*
* @author akivela
*/
public class ZmppPanel extends JPanel implements InputStream, StatusLine, IOSystem {
/**
* Serial version UID.
*/
private static final long serialVersionUID = 1L;
private JLabel global1ObjectLabel;
private JLabel statusLabel;
private ScreenModel screen;
private Machine machine;
private LineEditorImpl lineEditor;
private GameThread currentGame;
private DisplaySettings settings;
private Preferences preferences;
/**
* Constructor.
*
* @param machine a Machine object
*/
public ZmppPanel(final Machine machine) {
super();
this.machine = machine;
lineEditor = new LineEditorImpl(machine.getGameData().getStoryFileHeader(),
machine.getGameData().getZsciiEncoding());
JComponent view = null;
preferences = Preferences.userNodeForPackage(ZmppFrame.class);
settings = createDisplaySettings(preferences);
if (machine.getGameData().getStoryFileHeader().getVersion() == 6) {
view = new Viewport6(machine, lineEditor, settings);
screen = (ScreenModel) view;
} else {
view = new TextViewport(machine, lineEditor, settings);
screen = (ScreenModel) view;
}
view.setPreferredSize(new Dimension(640, 476));
view.setMinimumSize(new Dimension(400, 300));
if (machine.getGameData().getStoryFileHeader().getVersion() <= 3) {
JPanel statusPanel = new JPanel(new GridLayout(1, 2));
JPanel status1Panel = new JPanel(new FlowLayout(FlowLayout.LEFT));
JPanel status2Panel = new JPanel(new FlowLayout(FlowLayout.RIGHT));
statusPanel.add(status1Panel);
statusPanel.add(status2Panel);
global1ObjectLabel = new JLabel(" ");
statusLabel = new JLabel(" ");
status1Panel.add(global1ObjectLabel);
status2Panel.add(statusLabel);
this.add(statusPanel, BorderLayout.NORTH);
this.add(view, BorderLayout.CENTER);
} else {
this.add(view, BorderLayout.CENTER);
}
//addKeyListener(lineEditor);
view.addKeyListener(lineEditor);
view.addMouseListener(lineEditor);
// just for debugging
view.addMouseMotionListener(new MouseMotionAdapter() {
public void mouseMoved(MouseEvent e) {
//System.out.printf("mouse pos: %d %d\n", e.getX(), e.getY());
}
});
// Add an info dialog and a title if metadata exists
Resources resources = machine.getGameData().getResources();
if (resources != null && resources.getMetadata() != null) {
StoryMetadata storyinfo = resources.getMetadata().getStoryInfo();
setTitle(Main.APPNAME + " - " + storyinfo.getTitle()
+ " (" + storyinfo.getAuthor() + ")");
}
}
public void setTitle(String title) {
System.out.println("title: " + title);
}
/**
* Access to screen model.
*
* @return the screen model
*/
public ScreenModel getScreenModel() {
return screen;
}
public void startMachine() {
currentGame = new GameThread(machine, screen);
currentGame.start();
}
// *************************************************************************
// ******** StatusLine interface
// *************************************************************************
public void updateStatusScore(final String objectName, final int score,
final int steps) {
EventQueue.invokeLater(new Runnable() {
public void run() {
global1ObjectLabel.setText(objectName);
statusLabel.setText(score + "/" + steps);
}
});
}
public void updateStatusTime(final String objectName, final int hours,
final int minutes) {
EventQueue.invokeLater(new Runnable() {
public void run() {
global1ObjectLabel.setText(objectName);
statusLabel.setText(String.format("%02d:%02d", hours, minutes));
}
});
}
// *************************************************************************
// ******** IOSystem interface
// *************************************************************************
public Writer getTranscriptWriter() {
File currentdir = new File(System.getProperty("user.dir"));
JFileChooser fileChooser = new JFileChooser(currentdir);
fileChooser.setDialogTitle(getMessage("dialog.settranscript.title"));
if (fileChooser.showSaveDialog(this) == JFileChooser.APPROVE_OPTION) {
try {
return new FileWriter(fileChooser.getSelectedFile());
} catch (IOException ex) {
ex.printStackTrace();
}
}
return null;
}
public Reader getInputStreamReader() {
File currentdir = new File(System.getProperty("user.dir"));
JFileChooser fileChooser = new JFileChooser(currentdir);
fileChooser.setDialogTitle(getMessage("dialog.setinput.title"));
if (fileChooser.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) {
try {
return new FileReader(fileChooser.getSelectedFile());
} catch (IOException ex) {
ex.printStackTrace();
}
}
return null;
}
// *************************************************************************
// ******** InputStream interface
// *************************************************************************
public void close() {
}
public void cancelInput() {
lineEditor.cancelInput();
}
/**
* {@inheritDoc}
*/
public char getZsciiChar(boolean flushBeforeGet) {
enterEditMode(flushBeforeGet);
char zsciiChar = lineEditor.nextZsciiChar();
leaveEditMode(flushBeforeGet);
return zsciiChar;
}
private void enterEditMode(boolean flushbuffer) {
if (!lineEditor.isInputMode()) {
screen.resetPagers();
lineEditor.setInputMode(true, flushbuffer);
}
}
private void leaveEditMode(boolean flushbuffer) {
lineEditor.setInputMode(false, flushbuffer);
}
public void about(JFrame parent) {
JOptionPane.showMessageDialog(parent,
Main.APPNAME + getMessage("dialog.about.msg"),
getMessage("dialog.about.title"),
JOptionPane.INFORMATION_MESSAGE);
}
public void quit() {
// Can't quit panel
}
public void aboutGame(JFrame parent) {
GameInfoDialog dialog = new GameInfoDialog(parent,
machine.getGameData().getResources());
dialog.setVisible(true);
}
public void editPreferences(JFrame parent) {
PreferencesDialog dialog = new PreferencesDialog(parent, preferences,
settings);
dialog.setLocationRelativeTo(this);
dialog.setVisible(true);
}
private DisplaySettings createDisplaySettings(Preferences preferences) {
int stdfontsize = preferences.getInt("stdfontsize", 12);
int fixedfontsize = preferences.getInt("fixedfontsize", 12);
int defaultforeground = preferences.getInt("defaultforeground",
ColorTranslator.UNDEFINED);
int defaultbackground = preferences.getInt("defaultbackground",
ColorTranslator.UNDEFINED);
boolean antialias = preferences.getBoolean("antialias", true);
return new DisplaySettings(stdfontsize, fixedfontsize, defaultbackground,
defaultforeground, antialias);
}
private String getMessage(String key) {
return Main.getMessage(key);
}
}
| |
/*
* RED5 Open Source Flash Server - https://github.com/Red5/
*
* Copyright 2006-2016 by respective authors (see below). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.red5.server.tomcat;
import java.io.File;
import java.io.FilenameFilter;
import java.lang.management.ManagementFactory;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.management.JMX;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.servlet.ServletException;
import org.red5.server.jmx.mxbeans.LoaderMXBean;
import org.red5.server.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
/**
* This service provides the means to auto-deploy a war.
*
* @author Paul Gregoire (mondain@gmail.com)
*/
public class WarDeployer implements InitializingBean, DisposableBean {
private Logger log = LoggerFactory.getLogger(WarDeployer.class);
//that wars are currently being installed
private static AtomicBoolean deploying = new AtomicBoolean(false);
private ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor();
private ScheduledFuture<DeployJob> future;
/**
* How often to check for new war files
*/
private int checkInterval = 600000; //ten minutes
/**
* Deployment directory
*/
private String webappFolder;
/**
* Expand WAR files in the webapps directory prior to start up
*/
private boolean expandWars;
{
log.info("War deployer service created");
}
@SuppressWarnings("unchecked")
@Override
public void afterPropertiesSet() throws Exception {
log.info("Starting WarDeployer");
// create the job and schedule it
future = (ScheduledFuture<DeployJob>) scheduler.scheduleAtFixedRate(new DeployJob(), 60000L, checkInterval, TimeUnit.MILLISECONDS);
// check the deploy from directory
log.debug("Webapps directory: {}", webappFolder);
File dir = new File(webappFolder);
if (!dir.exists()) {
log.warn("Source directory not found");
} else {
if (!dir.isDirectory()) {
throw new Exception("Webapps directory is not a directory");
}
}
dir = null;
// expand wars if so requested
if (expandWars) {
log.debug("Deploying wars");
deploy(false);
}
}
public void deploy(boolean startApplication) {
log.info("Deploy wars {} app start", (startApplication ? "with" : "without"));
if (deploying.compareAndSet(false, true)) {
// short name
String application = null;
// file name
String applicationWarName = null;
// look for web application archives
File dir = new File(webappFolder);
// get a list of wars
File[] files = dir.listFiles(new DirectoryFilter());
for (File f : files) {
// get the war name
applicationWarName = f.getName();
int dashIndex = applicationWarName.indexOf('-');
if (dashIndex != -1) {
// strip everything except the applications name
application = applicationWarName.substring(0, dashIndex);
} else {
// grab every char up to the last '.'
application = applicationWarName.substring(0, applicationWarName.lastIndexOf('.'));
}
log.debug("Application name: {}", application);
// setup context
String contextPath = '/' + application;
String contextDir = webappFolder + contextPath;
log.debug("Web context: {} context directory: {}", contextPath, contextDir);
// verify this is a unique app
File appDir = new File(dir, application);
if (appDir.exists()) {
if (appDir.isDirectory()) {
log.debug("Application directory exists");
} else {
log.warn("Application destination is not a directory");
}
log.info("Application {} already installed, please un-install before attempting another install", application);
} else {
log.debug("Unwaring and starting...");
// un-archive it to app dir
FileUtil.unzip(webappFolder + '/' + applicationWarName, contextDir);
// load and start the context
if (startApplication) {
// get the webapp loader from jmx
LoaderMXBean loader = getLoader();
if (loader != null) {
try {
loader.startWebApplication(application);
} catch (ServletException e) {
log.error("Unexpected error while staring web application", e);
}
}
}
// remove the war file
File warFile = new File(dir, applicationWarName);
if (warFile.delete()) {
log.debug("{} was deleted", warFile.getName());
} else {
log.debug("{} was not deleted", warFile.getName());
warFile.deleteOnExit();
}
warFile = null;
}
appDir = null;
}
dir = null;
// reset sentinel
deploying.set(false);
}
}
@Override
public void destroy() throws Exception {
if (future != null) {
future.cancel(true);
}
scheduler.shutdownNow();
}
public void setCheckInterval(int checkInterval) {
this.checkInterval = checkInterval;
}
public int getCheckInterval() {
return checkInterval;
}
public String getWebappFolder() {
return webappFolder;
}
public void setWebappFolder(String webappFolder) {
this.webappFolder = webappFolder;
}
/**
* Whether or not to expand war files prior to start up.
*
* @param expandWars
* to expand or not
*/
public void setExpandWars(boolean expandWars) {
this.expandWars = expandWars;
}
/**
* Returns the LoaderMBean.
*
* @return LoadeerMBean
*/
@SuppressWarnings("cast")
public LoaderMXBean getLoader() {
LoaderMXBean loader = null;
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName oName;
try {
// TODO support all loaders
oName = new ObjectName("org.red5.server:type=TomcatLoader");
if (mbs.isRegistered(oName)) {
loader = JMX.newMXBeanProxy(mbs, oName, LoaderMXBean.class, true);
log.debug("Loader was found");
} else {
log.warn("Loader not found");
}
} catch (Exception e) {
log.error("Exception getting loader", e);
}
return loader;
}
/**
* Filters directory content
*/
protected class DirectoryFilter implements FilenameFilter {
/**
* Check whether file matches filter rules
*
* @param dir
* Directory
* @param name
* File name
* @return true If file does match filter rules, false otherwise
*/
public boolean accept(File dir, String name) {
File f = new File(dir, name);
log.trace("Filtering: {} name: {}", dir.getName(), name);
// filter out all but war files
boolean result = f.getName().endsWith("war");
// nullify
f = null;
return result;
}
}
private class DeployJob implements Runnable {
public void run() {
log.debug("Starting scheduled deployment of wars");
deploy(true);
}
}
public void undeploy(String name) {
LoaderMXBean loader = getLoader();
if (loader != null) {
loader.removeContext("/"+name);
}
}
}
| |
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.internal.util;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.util.ArraySet;
import dalvik.system.VMRuntime;
import libcore.util.EmptyArray;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* ArrayUtils contains some methods that you can call to find out
* the most efficient increments by which to grow arrays.
*/
public class ArrayUtils {
private static final int CACHE_SIZE = 73;
private static Object[] sCache = new Object[CACHE_SIZE];
private ArrayUtils() { /* cannot be instantiated */ }
public static byte[] newUnpaddedByteArray(int minLen) {
return (byte[])VMRuntime.getRuntime().newUnpaddedArray(byte.class, minLen);
}
public static char[] newUnpaddedCharArray(int minLen) {
return (char[])VMRuntime.getRuntime().newUnpaddedArray(char.class, minLen);
}
public static int[] newUnpaddedIntArray(int minLen) {
return (int[])VMRuntime.getRuntime().newUnpaddedArray(int.class, minLen);
}
public static boolean[] newUnpaddedBooleanArray(int minLen) {
return (boolean[])VMRuntime.getRuntime().newUnpaddedArray(boolean.class, minLen);
}
public static long[] newUnpaddedLongArray(int minLen) {
return (long[])VMRuntime.getRuntime().newUnpaddedArray(long.class, minLen);
}
public static float[] newUnpaddedFloatArray(int minLen) {
return (float[])VMRuntime.getRuntime().newUnpaddedArray(float.class, minLen);
}
public static Object[] newUnpaddedObjectArray(int minLen) {
return (Object[])VMRuntime.getRuntime().newUnpaddedArray(Object.class, minLen);
}
@SuppressWarnings("unchecked")
public static <T> T[] newUnpaddedArray(Class<T> clazz, int minLen) {
return (T[])VMRuntime.getRuntime().newUnpaddedArray(clazz, minLen);
}
/**
* Checks if the beginnings of two byte arrays are equal.
*
* @param array1 the first byte array
* @param array2 the second byte array
* @param length the number of bytes to check
* @return true if they're equal, false otherwise
*/
public static boolean equals(byte[] array1, byte[] array2, int length) {
if (length < 0) {
throw new IllegalArgumentException();
}
if (array1 == array2) {
return true;
}
if (array1 == null || array2 == null || array1.length < length || array2.length < length) {
return false;
}
for (int i = 0; i < length; i++) {
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
/**
* Returns an empty array of the specified type. The intent is that
* it will return the same empty array every time to avoid reallocation,
* although this is not guaranteed.
*/
@SuppressWarnings("unchecked")
public static <T> T[] emptyArray(Class<T> kind) {
if (kind == Object.class) {
return (T[]) EmptyArray.OBJECT;
}
int bucket = (kind.hashCode() & 0x7FFFFFFF) % CACHE_SIZE;
Object cache = sCache[bucket];
if (cache == null || cache.getClass().getComponentType() != kind) {
cache = Array.newInstance(kind, 0);
sCache[bucket] = cache;
// Log.e("cache", "new empty " + kind.getName() + " at " + bucket);
}
return (T[]) cache;
}
/**
* Checks if given array is null or has zero elements.
*/
public static boolean isEmpty(@Nullable Collection<?> array) {
return array == null || array.isEmpty();
}
/**
* Checks if given array is null or has zero elements.
*/
public static <T> boolean isEmpty(@Nullable T[] array) {
return array == null || array.length == 0;
}
/**
* Checks if given array is null or has zero elements.
*/
public static boolean isEmpty(@Nullable int[] array) {
return array == null || array.length == 0;
}
/**
* Checks if given array is null or has zero elements.
*/
public static boolean isEmpty(@Nullable long[] array) {
return array == null || array.length == 0;
}
/**
* Checks if given array is null or has zero elements.
*/
public static boolean isEmpty(@Nullable byte[] array) {
return array == null || array.length == 0;
}
/**
* Checks if given array is null or has zero elements.
*/
public static boolean isEmpty(@Nullable boolean[] array) {
return array == null || array.length == 0;
}
/**
* Checks that value is present as at least one of the elements of the array.
* @param array the array to check in
* @param value the value to check for
* @return true if the value is present in the array
*/
public static <T> boolean contains(@Nullable T[] array, T value) {
return indexOf(array, value) != -1;
}
/**
* Return first index of {@code value} in {@code array}, or {@code -1} if
* not found.
*/
public static <T> int indexOf(@Nullable T[] array, T value) {
if (array == null) return -1;
for (int i = 0; i < array.length; i++) {
if (Objects.equals(array[i], value)) return i;
}
return -1;
}
/**
* Test if all {@code check} items are contained in {@code array}.
*/
public static <T> boolean containsAll(@Nullable T[] array, T[] check) {
if (check == null) return true;
for (T checkItem : check) {
if (!contains(array, checkItem)) {
return false;
}
}
return true;
}
/**
* Test if any {@code check} items are contained in {@code array}.
*/
public static <T> boolean containsAny(@Nullable T[] array, T[] check) {
if (check == null) return false;
for (T checkItem : check) {
if (contains(array, checkItem)) {
return true;
}
}
return false;
}
public static boolean contains(@Nullable int[] array, int value) {
if (array == null) return false;
for (int element : array) {
if (element == value) {
return true;
}
}
return false;
}
public static boolean contains(@Nullable long[] array, long value) {
if (array == null) return false;
for (long element : array) {
if (element == value) {
return true;
}
}
return false;
}
public static long total(@Nullable long[] array) {
long total = 0;
if (array != null) {
for (long value : array) {
total += value;
}
}
return total;
}
public static int[] convertToIntArray(List<Integer> list) {
int[] array = new int[list.size()];
for (int i = 0; i < list.size(); i++) {
array[i] = list.get(i);
}
return array;
}
/**
* Adds value to given array if not already present, providing set-like
* behavior.
*/
@SuppressWarnings("unchecked")
public static @NonNull <T> T[] appendElement(Class<T> kind, @Nullable T[] array, T element) {
final T[] result;
final int end;
if (array != null) {
if (contains(array, element)) return array;
end = array.length;
result = (T[])Array.newInstance(kind, end + 1);
System.arraycopy(array, 0, result, 0, end);
} else {
end = 0;
result = (T[])Array.newInstance(kind, 1);
}
result[end] = element;
return result;
}
/**
* Removes value from given array if present, providing set-like behavior.
*/
@SuppressWarnings("unchecked")
public static @Nullable <T> T[] removeElement(Class<T> kind, @Nullable T[] array, T element) {
if (array != null) {
if (!contains(array, element)) return array;
final int length = array.length;
for (int i = 0; i < length; i++) {
if (Objects.equals(array[i], element)) {
if (length == 1) {
return null;
}
T[] result = (T[])Array.newInstance(kind, length - 1);
System.arraycopy(array, 0, result, 0, i);
System.arraycopy(array, i + 1, result, i, length - i - 1);
return result;
}
}
}
return array;
}
/**
* Adds value to given array if not already present, providing set-like
* behavior.
*/
public static @NonNull int[] appendInt(@Nullable int[] cur, int val) {
if (cur == null) {
return new int[] { val };
}
final int N = cur.length;
for (int i = 0; i < N; i++) {
if (cur[i] == val) {
return cur;
}
}
int[] ret = new int[N + 1];
System.arraycopy(cur, 0, ret, 0, N);
ret[N] = val;
return ret;
}
/**
* Removes value from given array if present, providing set-like behavior.
*/
public static @Nullable int[] removeInt(@Nullable int[] cur, int val) {
if (cur == null) {
return null;
}
final int N = cur.length;
for (int i = 0; i < N; i++) {
if (cur[i] == val) {
int[] ret = new int[N - 1];
if (i > 0) {
System.arraycopy(cur, 0, ret, 0, i);
}
if (i < (N - 1)) {
System.arraycopy(cur, i + 1, ret, i, N - i - 1);
}
return ret;
}
}
return cur;
}
/**
* Removes value from given array if present, providing set-like behavior.
*/
public static @Nullable String[] removeString(@Nullable String[] cur, String val) {
if (cur == null) {
return null;
}
final int N = cur.length;
for (int i = 0; i < N; i++) {
if (Objects.equals(cur[i], val)) {
String[] ret = new String[N - 1];
if (i > 0) {
System.arraycopy(cur, 0, ret, 0, i);
}
if (i < (N - 1)) {
System.arraycopy(cur, i + 1, ret, i, N - i - 1);
}
return ret;
}
}
return cur;
}
/**
* Adds value to given array if not already present, providing set-like
* behavior.
*/
public static @NonNull long[] appendLong(@Nullable long[] cur, long val) {
if (cur == null) {
return new long[] { val };
}
final int N = cur.length;
for (int i = 0; i < N; i++) {
if (cur[i] == val) {
return cur;
}
}
long[] ret = new long[N + 1];
System.arraycopy(cur, 0, ret, 0, N);
ret[N] = val;
return ret;
}
/**
* Removes value from given array if present, providing set-like behavior.
*/
public static @Nullable long[] removeLong(@Nullable long[] cur, long val) {
if (cur == null) {
return null;
}
final int N = cur.length;
for (int i = 0; i < N; i++) {
if (cur[i] == val) {
long[] ret = new long[N - 1];
if (i > 0) {
System.arraycopy(cur, 0, ret, 0, i);
}
if (i < (N - 1)) {
System.arraycopy(cur, i + 1, ret, i, N - i - 1);
}
return ret;
}
}
return cur;
}
public static @Nullable long[] cloneOrNull(@Nullable long[] array) {
return (array != null) ? array.clone() : null;
}
public static @Nullable <T> ArraySet<T> cloneOrNull(@Nullable ArraySet<T> array) {
return (array != null) ? new ArraySet<T>(array) : null;
}
public static @NonNull <T> ArraySet<T> add(@Nullable ArraySet<T> cur, T val) {
if (cur == null) {
cur = new ArraySet<>();
}
cur.add(val);
return cur;
}
public static @Nullable <T> ArraySet<T> remove(@Nullable ArraySet<T> cur, T val) {
if (cur == null) {
return null;
}
cur.remove(val);
if (cur.isEmpty()) {
return null;
} else {
return cur;
}
}
public static <T> boolean contains(@Nullable ArraySet<T> cur, T val) {
return (cur != null) ? cur.contains(val) : false;
}
public static @NonNull <T> ArrayList<T> add(@Nullable ArrayList<T> cur, T val) {
if (cur == null) {
cur = new ArrayList<>();
}
cur.add(val);
return cur;
}
public static @Nullable <T> ArrayList<T> remove(@Nullable ArrayList<T> cur, T val) {
if (cur == null) {
return null;
}
cur.remove(val);
if (cur.isEmpty()) {
return null;
} else {
return cur;
}
}
public static <T> boolean contains(@Nullable Collection<T> cur, T val) {
return (cur != null) ? cur.contains(val) : false;
}
public static @Nullable <T> T[] trimToSize(@Nullable T[] array, int size) {
if (array == null || size == 0) {
return null;
} else if (array.length == size) {
return array;
} else {
return Arrays.copyOf(array, size);
}
}
/**
* Returns true if the two ArrayLists are equal with respect to the objects they contain.
* The objects must be in the same order and be reference equal (== not .equals()).
*/
public static <T> boolean referenceEquals(ArrayList<T> a, ArrayList<T> b) {
if (a == b) {
return true;
}
final int sizeA = a.size();
final int sizeB = b.size();
if (a == null || b == null || sizeA != sizeB) {
return false;
}
boolean diff = false;
for (int i = 0; i < sizeA && !diff; i++) {
diff |= a.get(i) != b.get(i);
}
return !diff;
}
/**
* Removes elements that match the predicate in an efficient way that alters the order of
* elements in the collection. This should only be used if order is not important.
* @param collection The ArrayList from which to remove elements.
* @param predicate The predicate that each element is tested against.
* @return the number of elements removed.
*/
public static <T> int unstableRemoveIf(@Nullable ArrayList<T> collection,
@NonNull java.util.function.Predicate<T> predicate) {
if (collection == null) {
return 0;
}
final int size = collection.size();
int leftIdx = 0;
int rightIdx = size - 1;
while (leftIdx <= rightIdx) {
// Find the next element to remove moving left to right.
while (leftIdx < size && !predicate.test(collection.get(leftIdx))) {
leftIdx++;
}
// Find the next element to keep moving right to left.
while (rightIdx > leftIdx && predicate.test(collection.get(rightIdx))) {
rightIdx--;
}
if (leftIdx >= rightIdx) {
// Done.
break;
}
Collections.swap(collection, leftIdx, rightIdx);
leftIdx++;
rightIdx--;
}
// leftIdx is now at the end.
for (int i = size - 1; i >= leftIdx; i--) {
collection.remove(i);
}
return size - leftIdx;
}
}
| |
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.change;
/**
* Adpater class for the {@link ChangeSwitch} interface.
*
* @param <R> method return type
*/
public class ChangeAdapter<R> implements ChangeSwitch<R> {
/**
* Handle a {@link ObjectCreate} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseChange caseChange()}.
* </p>
*/
@Override
public <T> R caseObjectCreate(ObjectCreate<T> change) {
return this.caseChange(change);
}
/**
* Handle a {@link ObjectDelete} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseChange caseChange()}.
* </p>
*/
@Override
public <T> R caseObjectDelete(ObjectDelete<T> change) {
return this.caseChange(change);
}
/**
* Handle a {@link ListFieldAdd} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}.
* </p>
*/
@Override
public <T, E> R caseListFieldAdd(ListFieldAdd<T, E> change) {
return this.caseListFieldChange(change);
}
/**
* Handle a {@link ListFieldClear} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}.
* </p>
*/
@Override
public <T> R caseListFieldClear(ListFieldClear<T> change) {
return this.caseListFieldChange(change);
}
/**
* Handle a {@link ListFieldRemove} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}.
* </p>
*/
@Override
public <T, E> R caseListFieldRemove(ListFieldRemove<T, E> change) {
return this.caseListFieldChange(change);
}
/**
* Handle a {@link ListFieldReplace} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseListFieldChange caseListFieldChange()}.
* </p>
*/
@Override
public <T, E> R caseListFieldReplace(ListFieldReplace<T, E> change) {
return this.caseListFieldChange(change);
}
/**
* Handle a {@link MapFieldAdd} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}.
* </p>
*/
@Override
public <T, K, V> R caseMapFieldAdd(MapFieldAdd<T, K, V> change) {
return this.caseMapFieldChange(change);
}
/**
* Handle a {@link MapFieldClear} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}.
* </p>
*/
@Override
public <T> R caseMapFieldClear(MapFieldClear<T> change) {
return this.caseMapFieldChange(change);
}
/**
* Handle a {@link MapFieldRemove} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}.
* </p>
*/
@Override
public <T, K, V> R caseMapFieldRemove(MapFieldRemove<T, K, V> change) {
return this.caseMapFieldChange(change);
}
/**
* Handle a {@link MapFieldReplace} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseMapFieldChange caseMapFieldChange()}.
* </p>
*/
@Override
public <T, K, V> R caseMapFieldReplace(MapFieldReplace<T, K, V> change) {
return this.caseMapFieldChange(change);
}
/**
* Handle a {@link SetFieldAdd} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseSetFieldChange()}.
* </p>
*/
@Override
public <T, E> R caseSetFieldAdd(SetFieldAdd<T, E> change) {
return this.caseSetFieldChange(change);
}
/**
* Handle a {@link SetFieldClear} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseSetFieldChange()}.
* </p>
*/
@Override
public <T> R caseSetFieldClear(SetFieldClear<T> change) {
return this.caseSetFieldChange(change);
}
/**
* Handle a {@link SetFieldRemove} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseSetFieldChange()}.
* </p>
*/
@Override
public <T, E> R caseSetFieldRemove(SetFieldRemove<T, E> change) {
return this.caseSetFieldChange(change);
}
/**
* Handle a {@link SimpleFieldChange} event.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseSetFieldChange caseFieldChange()}.
* </p>
*/
@Override
public <T, V> R caseSimpleFieldChange(SimpleFieldChange<T, V> change) {
return this.caseFieldChange(change);
}
// Roll-Up Methods
/**
* Internal roll-up method.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseFieldChange caseFieldChange()}.
* </p>
*
* @param change visiting change
* @param <T> changed object type
* @return visitor return value
*/
protected <T> R caseListFieldChange(ListFieldChange<T> change) {
return this.caseFieldChange(change);
}
/**
* Internal roll-up method.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseFieldChange caseFieldChange()}.
* </p>
*
* @param change visiting change
* @param <T> changed object type
* @return visitor return value
*/
protected <T> R caseMapFieldChange(MapFieldChange<T> change) {
return this.caseFieldChange(change);
}
/**
* Internal roll-up method.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseFieldChange caseFieldChange()}.
* </p>
*
* @param change visiting change
* @param <T> changed object type
* @return visitor return value
*/
protected <T> R caseSetFieldChange(SetFieldChange<T> change) {
return this.caseFieldChange(change);
}
/**
* Internal roll-up method.
*
* <p>
* The implementation in {@link ChangeAdapter} delegates to {@link #caseChange caseChange()}.
* </p>
*
* @param change visiting change
* @param <T> changed object type
* @return visitor return value
*/
protected <T> R caseFieldChange(FieldChange<T> change) {
return this.caseChange(change);
}
/**
* Internal roll-up method.
*
* <p>
* The implementation in {@link ChangeAdapter} returns null.
* </p>
*
* @param change visiting change
* @param <T> changed object type
* @return visitor return value
*/
protected <T> R caseChange(Change<T> change) {
return null;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.ui.configuration.classpath;
import com.intellij.CommonBundle;
import com.intellij.analysis.AnalysisScope;
import com.intellij.find.FindBundle;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.impl.scopes.LibraryScope;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectBundle;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.libraries.LibraryTableImplUtil;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.roots.libraries.LibraryTablePresentation;
import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
import com.intellij.openapi.roots.ui.CellAppearanceEx;
import com.intellij.openapi.roots.ui.OrderEntryAppearanceService;
import com.intellij.openapi.roots.ui.configuration.LibraryTableModifiableModelProvider;
import com.intellij.openapi.roots.ui.configuration.ModuleConfigurationState;
import com.intellij.openapi.roots.ui.configuration.ProjectStructureConfigurable;
import com.intellij.openapi.roots.ui.configuration.dependencyAnalysis.AnalyzeDependenciesDialog;
import com.intellij.openapi.roots.ui.configuration.libraries.LibraryEditingUtil;
import com.intellij.openapi.roots.ui.configuration.libraryEditor.EditExistingLibraryDialog;
import com.intellij.openapi.roots.ui.configuration.projectRoot.FindUsagesInProjectStructureActionBase;
import com.intellij.openapi.roots.ui.configuration.projectRoot.ModuleStructureConfigurable;
import com.intellij.openapi.roots.ui.configuration.projectRoot.StructureConfigurableContext;
import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.LibraryProjectStructureElement;
import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ModuleProjectStructureElement;
import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.ProjectStructureElement;
import com.intellij.openapi.roots.ui.configuration.projectRoot.daemon.SdkProjectStructureElement;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.ui.ComboBoxTableRenderer;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.packageDependencies.DependenciesBuilder;
import com.intellij.packageDependencies.actions.AnalyzeDependenciesOnSpecifiedTargetHandler;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.table.JBTable;
import com.intellij.util.EventDispatcher;
import com.intellij.util.IconUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.TextTransferable;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableColumn;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import java.awt.*;
import java.awt.datatransfer.Transferable;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.*;
import java.util.List;
public class ClasspathPanelImpl extends JPanel implements ClasspathPanel {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.roots.ui.configuration.classpath.ClasspathPanelImpl");
private final JBTable myEntryTable;
private final ClasspathTableModel myModel;
private final EventDispatcher<OrderPanelListener> myListeners = EventDispatcher.create(OrderPanelListener.class);
private List<AddItemPopupAction<?>> myPopupActions = null;
private AnActionButton myEditButton;
private final ModuleConfigurationState myState;
private AnActionButton myRemoveButton;
public ClasspathPanelImpl(ModuleConfigurationState state) {
super(new BorderLayout());
myState = state;
myModel = new ClasspathTableModel(state, getStructureConfigurableContext());
myEntryTable = new JBTable(myModel) {
@Override
protected TableRowSorter<TableModel> createRowSorter(TableModel model) {
return new DefaultColumnInfoBasedRowSorter(model) {
@Override
public void toggleSortOrder(int column) {
if (isSortable(column)) {
SortKey oldKey = ContainerUtil.getFirstItem(getSortKeys());
SortOrder oldOrder;
if (oldKey == null || oldKey.getColumn() != column) {
oldOrder = SortOrder.UNSORTED;
}
else {
oldOrder = oldKey.getSortOrder();
}
setSortKeys(Collections.singletonList(new SortKey(column, getNextSortOrder(oldOrder))));
}
}
};
}
};
myEntryTable.setShowGrid(false);
myEntryTable.setDragEnabled(false);
myEntryTable.setIntercellSpacing(new Dimension(0, 0));
myEntryTable.setDefaultRenderer(ClasspathTableItem.class, new TableItemRenderer(getStructureConfigurableContext()));
myEntryTable.setDefaultRenderer(Boolean.class, new ExportFlagRenderer(myEntryTable.getDefaultRenderer(Boolean.class)));
JComboBox scopeEditor = new ComboBox(new EnumComboBoxModel<DependencyScope>(DependencyScope.class));
myEntryTable.setDefaultEditor(DependencyScope.class, new DefaultCellEditor(scopeEditor));
myEntryTable.setDefaultRenderer(DependencyScope.class, new ComboBoxTableRenderer<DependencyScope>(DependencyScope.values()) {
@Override
protected String getTextFor(@NotNull final DependencyScope value) {
return value.getDisplayName();
}
});
myEntryTable.setTransferHandler(new TransferHandler() {
@Nullable
@Override
protected Transferable createTransferable(JComponent c) {
OrderEntry entry = getSelectedEntry();
if (entry == null) return null;
String text = entry.getPresentableName();
return new TextTransferable(text);
}
@Override
public int getSourceActions(JComponent c) {
return COPY;
}
});
myEntryTable.getSelectionModel().setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
new SpeedSearchBase<JBTable>(myEntryTable) {
@Override
public int getSelectedIndex() {
return myEntryTable.getSelectedRow();
}
@Override
protected int convertIndexToModel(int viewIndex) {
return myEntryTable.convertRowIndexToModel(viewIndex);
}
@Override
public Object[] getAllElements() {
final int count = myModel.getRowCount();
Object[] elements = new Object[count];
for (int idx = 0; idx < count; idx++) {
elements[idx] = myModel.getItem(idx);
}
return elements;
}
@Override
public String getElementText(Object element) {
return getCellAppearance((ClasspathTableItem<?>)element, getStructureConfigurableContext(), false).getText();
}
@Override
public void selectElement(Object element, String selectedText) {
final int count = myModel.getRowCount();
for (int row = 0; row < count; row++) {
if (element.equals(myModel.getItem(row))) {
final int viewRow = myEntryTable.convertRowIndexToView(row);
myEntryTable.getSelectionModel().setSelectionInterval(viewRow, viewRow);
TableUtil.scrollSelectionToVisible(myEntryTable);
break;
}
}
}
};
setFixedColumnWidth(ClasspathTableModel.EXPORT_COLUMN);
setFixedColumnWidth(ClasspathTableModel.SCOPE_COLUMN); // leave space for combobox border
myEntryTable.registerKeyboardAction(
new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final int[] selectedRows = myEntryTable.getSelectedRows();
boolean currentlyMarked = true;
for (final int selectedRow : selectedRows) {
final ClasspathTableItem<?> item = getItemAt(selectedRow);
if (selectedRow < 0 || !item.isExportable()) {
return;
}
currentlyMarked &= item.isExported();
}
for (final int selectedRow : selectedRows) {
getItemAt(selectedRow).setExported(!currentlyMarked);
}
myModel.fireTableDataChanged();
TableUtil.selectRows(myEntryTable, selectedRows);
}
},
KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0),
WHEN_FOCUSED
);
myEditButton = new AnActionButton(ProjectBundle.message("module.classpath.button.edit"), null, IconUtil.getEditIcon()) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
doEdit();
}
@Override
public boolean isDumbAware() {
return true;
}
};
add(createTableWithButtons(), BorderLayout.CENTER);
if (myEntryTable.getRowCount() > 0) {
myEntryTable.getSelectionModel().setSelectionInterval(0,0);
}
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent e) {
navigate(true);
return true;
}
}.installOn(myEntryTable);
DefaultActionGroup actionGroup = new DefaultActionGroup();
final AnAction navigateAction = new AnAction(ProjectBundle.message("classpath.panel.navigate.action.text")) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
navigate(false);
}
@Override
public void update(@NotNull AnActionEvent e) {
final Presentation presentation = e.getPresentation();
presentation.setEnabled(false);
final OrderEntry entry = getSelectedEntry();
if (entry != null && entry.isValid()){
if (!(entry instanceof ModuleSourceOrderEntry)){
presentation.setEnabled(true);
}
}
}
};
navigateAction.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE).getShortcutSet(),
myEntryTable);
actionGroup.add(myEditButton);
actionGroup.add(myRemoveButton);
actionGroup.add(navigateAction);
actionGroup.add(new MyFindUsagesAction());
actionGroup.add(new AnalyzeDependencyAction());
addChangeLibraryLevelAction(actionGroup, LibraryTablesRegistrar.PROJECT_LEVEL);
addChangeLibraryLevelAction(actionGroup, LibraryTablesRegistrar.APPLICATION_LEVEL);
addChangeLibraryLevelAction(actionGroup, LibraryTableImplUtil.MODULE_LEVEL);
PopupHandler.installPopupHandler(myEntryTable, actionGroup, ActionPlaces.UNKNOWN, ActionManager.getInstance());
}
@NotNull
private static SortOrder getNextSortOrder(@NotNull SortOrder order) {
switch (order) {
case ASCENDING:
return SortOrder.DESCENDING;
case DESCENDING:
return SortOrder.UNSORTED;
case UNSORTED:
default:
return SortOrder.ASCENDING;
}
}
private ClasspathTableItem<?> getItemAt(int selectedRow) {
return myModel.getItem(myEntryTable.convertRowIndexToModel(selectedRow));
}
private void addChangeLibraryLevelAction(DefaultActionGroup actionGroup, String tableLevel) {
final LibraryTablePresentation presentation = LibraryEditingUtil.getLibraryTablePresentation(getProject(), tableLevel);
actionGroup.add(new ChangeLibraryLevelInClasspathAction(this, presentation.getDisplayName(true), tableLevel));
}
@Override
@Nullable
public OrderEntry getSelectedEntry() {
if (myEntryTable.getSelectedRowCount() != 1) return null;
return getItemAt(myEntryTable.getSelectedRow()).getEntry();
}
private void setFixedColumnWidth(final int columnIndex) {
final TableColumn column = myEntryTable.getTableHeader().getColumnModel().getColumn(columnIndex);
column.setResizable(false);
column.setMaxWidth(column.getPreferredWidth());
}
@Override
public void navigate(boolean openLibraryEditor) {
final OrderEntry entry = getSelectedEntry();
final ProjectStructureConfigurable rootConfigurable = ProjectStructureConfigurable.getInstance(myState.getProject());
if (entry instanceof ModuleOrderEntry){
Module module = ((ModuleOrderEntry)entry).getModule();
if (module != null) {
rootConfigurable.select(module.getName(), null, true);
}
}
else if (entry instanceof LibraryOrderEntry){
if (!openLibraryEditor) {
rootConfigurable.select((LibraryOrderEntry)entry, true);
}
else {
doEdit();
}
}
else if (entry instanceof JdkOrderEntry) {
Sdk jdk = ((JdkOrderEntry)entry).getJdk();
if (jdk != null) {
rootConfigurable.select(jdk, true);
}
}
}
private JComponent createTableWithButtons() {
final boolean isAnalyzeShown = false;
final ClasspathPanelAction removeAction = new ClasspathPanelAction(this) {
@Override
public void run() {
removeSelectedItems(TableUtil.removeSelectedItems(myEntryTable));
}
};
final AnActionButton analyzeButton = new AnActionButton(ProjectBundle.message("classpath.panel.analyze"), null, IconUtil.getAnalyzeIcon()) {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
AnalyzeDependenciesDialog.show(getRootModel().getModule());
}
};
//addButton.setShortcut(CustomShortcutSet.fromString("alt A", "INSERT"));
//removeButton.setShortcut(CustomShortcutSet.fromString("alt DELETE"));
//upButton.setShortcut(CustomShortcutSet.fromString("alt UP"));
//downButton.setShortcut(CustomShortcutSet.fromString("alt DOWN"));
// we need to register our listener before ToolbarDecorator registers its own. Otherwise
myEntryTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) {
return;
}
updateButtons();
}
});
final ToolbarDecorator decorator = ToolbarDecorator.createDecorator(myEntryTable);
AnActionButtonUpdater moveUpDownUpdater = new AnActionButtonUpdater() {
@Override
public boolean isEnabled(AnActionEvent e) {
for (RowSorter.SortKey key : myEntryTable.getRowSorter().getSortKeys()) {
if (key.getSortOrder() != SortOrder.UNSORTED) {
return false;
}
}
return true;
}
};
decorator.setAddAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
initPopupActions();
final JBPopup popup = JBPopupFactory.getInstance().createListPopup(
new BaseListPopupStep<AddItemPopupAction<?>>(null, myPopupActions) {
@Override
public Icon getIconFor(AddItemPopupAction<?> aValue) {
return aValue.getIcon();
}
@Override
public boolean hasSubstep(AddItemPopupAction<?> selectedValue) {
return selectedValue.hasSubStep();
}
@Override
public boolean isMnemonicsNavigationEnabled() {
return true;
}
@Override
public PopupStep onChosen(final AddItemPopupAction<?> selectedValue, final boolean finalChoice) {
if (selectedValue.hasSubStep()) {
return selectedValue.createSubStep();
}
return doFinalStep(new Runnable() {
@Override
public void run() {
selectedValue.execute();
}
});
}
@Override
@NotNull
public String getTextFor(AddItemPopupAction<?> value) {
return "&" + value.getIndex() + " " + value.getTitle();
}
});
popup.show(button.getPreferredPopupPoint());
}
})
.setRemoveAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
removeAction.actionPerformed(null);
}
})
.setMoveUpAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
moveSelectedRows(-1);
}
})
.setMoveUpActionUpdater(moveUpDownUpdater)
.setMoveUpActionName("Move Up (disabled if items are shown in sorted order)")
.setMoveDownAction(new AnActionButtonRunnable() {
@Override
public void run(AnActionButton button) {
moveSelectedRows(+1);
}
})
.setMoveDownActionUpdater(moveUpDownUpdater)
.setMoveDownActionName("Move Down (disabled if items are shown in sorted order)")
.addExtraAction(myEditButton);
if (isAnalyzeShown) {
decorator.addExtraAction(analyzeButton);
}
final JPanel panel = decorator.createPanel();
myRemoveButton = ToolbarDecorator.findRemoveButton(panel);
return panel;
}
private void doEdit() {
final OrderEntry entry = getSelectedEntry();
if (!(entry instanceof LibraryOrderEntry)) return;
final Library library = ((LibraryOrderEntry)entry).getLibrary();
if (library == null) {
return;
}
final LibraryTable table = library.getTable();
final String tableLevel = table != null ? table.getTableLevel() : LibraryTableImplUtil.MODULE_LEVEL;
final LibraryTablePresentation presentation = LibraryEditingUtil.getLibraryTablePresentation(getProject(), tableLevel);
final LibraryTableModifiableModelProvider provider = getModifiableModelProvider(tableLevel);
EditExistingLibraryDialog dialog = EditExistingLibraryDialog.createDialog(this, provider, library, myState.getProject(),
presentation, getStructureConfigurableContext());
dialog.setContextModule(getRootModel().getModule());
dialog.show();
myEntryTable.repaint();
ModuleStructureConfigurable.getInstance(myState.getProject()).getTree().repaint();
}
@Override
public void addNotify() {
super.addNotify();
updateButtons();
}
private void updateButtons() {
final int[] selectedRows = myEntryTable.getSelectedRows();
boolean removeButtonEnabled = true;
int minRow = myEntryTable.getRowCount() + 1;
int maxRow = -1;
for (final int selectedRow : selectedRows) {
minRow = Math.min(minRow, selectedRow);
maxRow = Math.max(maxRow, selectedRow);
final ClasspathTableItem<?> item = getItemAt(selectedRow);
if (!item.isRemovable()) {
removeButtonEnabled = false;
}
}
if (myRemoveButton != null) {
myRemoveButton.setEnabled(removeButtonEnabled && selectedRows.length > 0);
}
ClasspathTableItem<?> selectedItem = selectedRows.length == 1 ? getItemAt(selectedRows[0]) : null;
myEditButton.setEnabled(selectedItem != null && selectedItem.isEditable());
}
private void removeSelectedItems(final List removedRows) {
if (removedRows.isEmpty()) {
return;
}
for (final Object removedRow : removedRows) {
final ClasspathTableItem<?> item = (ClasspathTableItem<?>)((Object[])removedRow)[ClasspathTableModel.ITEM_COLUMN];
final OrderEntry orderEntry = item.getEntry();
if (orderEntry == null) {
continue;
}
getRootModel().removeOrderEntry(orderEntry);
}
final int[] selectedRows = myEntryTable.getSelectedRows();
myModel.fireTableDataChanged();
TableUtil.selectRows(myEntryTable, selectedRows);
final StructureConfigurableContext context = ModuleStructureConfigurable.getInstance(myState.getProject()).getContext();
context.getDaemonAnalyzer().queueUpdate(new ModuleProjectStructureElement(context, getRootModel().getModule()));
}
@Override
@NotNull
public LibraryTableModifiableModelProvider getModifiableModelProvider(@NotNull String tableLevel) {
if (LibraryTableImplUtil.MODULE_LEVEL.equals(tableLevel)) {
final LibraryTable moduleLibraryTable = getRootModel().getModuleLibraryTable();
return new LibraryTableModifiableModelProvider() {
@Override
public LibraryTable.ModifiableModel getModifiableModel() {
return moduleLibraryTable.getModifiableModel();
}
};
}
else {
return getStructureConfigurableContext().createModifiableModelProvider(tableLevel);
}
}
@Override
public void runClasspathPanelAction(Runnable action) {
try {
disableModelUpdate();
action.run();
}
finally {
enableModelUpdate();
myEntryTable.requestFocus();
}
}
@Override
public void addItems(List<ClasspathTableItem<?>> toAdd) {
for (ClasspathTableItem<?> item : toAdd) {
myModel.addRow(item);
}
TIntArrayList toSelect = new TIntArrayList();
for (int i = myModel.getRowCount() - toAdd.size(); i < myModel.getRowCount(); i++) {
toSelect.add(myEntryTable.convertRowIndexToView(i));
}
TableUtil.selectRows(myEntryTable, toSelect.toNativeArray());
TableUtil.scrollSelectionToVisible(myEntryTable);
final StructureConfigurableContext context = ModuleStructureConfigurable.getInstance(myState.getProject()).getContext();
context.getDaemonAnalyzer().queueUpdate(new ModuleProjectStructureElement(context, getRootModel().getModule()));
}
@Override
public ModifiableRootModel getRootModel() {
return myState.getRootModel();
}
@Override
public Project getProject() {
return myState.getProject();
}
@Override
public ModuleConfigurationState getModuleConfigurationState() {
return myState;
}
@Override
public JComponent getComponent() {
return this;
}
public void rootsChanged() {
forceInitFromModel();
}
private void initPopupActions() {
if (myPopupActions == null) {
int actionIndex = 1;
final List<AddItemPopupAction<?>> actions = new ArrayList<AddItemPopupAction<?>>();
final StructureConfigurableContext context = getStructureConfigurableContext();
actions.add(new AddNewModuleLibraryAction(this, actionIndex++, context));
actions.add(new AddLibraryDependencyAction(this, actionIndex++, ProjectBundle.message("classpath.add.library.action"), context));
actions.add(new AddModuleDependencyAction(this, actionIndex, context)
);
myPopupActions = actions;
}
}
private StructureConfigurableContext getStructureConfigurableContext() {
return ProjectStructureConfigurable.getInstance(myState.getProject()).getContext();
}
private void enableModelUpdate() {
myInsideChange--;
}
private void disableModelUpdate() {
myInsideChange++;
}
public void addListener(OrderPanelListener listener) {
myListeners.addListener(listener);
}
public void removeListener(OrderPanelListener listener) {
myListeners.removeListener(listener);
}
private void moveSelectedRows(int increment) {
LOG.assertTrue(increment == -1 || increment == 1);
if (myEntryTable.isEditing()) {
myEntryTable.getCellEditor().stopCellEditing();
}
final ListSelectionModel selectionModel = myEntryTable.getSelectionModel();
for (int row = increment < 0 ? 0 : myModel.getRowCount() - 1; increment < 0 ? row < myModel.getRowCount() : row >= 0; row +=
increment < 0 ? +1 : -1) {
if (selectionModel.isSelectedIndex(row)) {
final int newRow = moveRow(row, increment);
selectionModel.removeSelectionInterval(row, row);
selectionModel.addSelectionInterval(newRow, newRow);
}
}
Rectangle cellRect = myEntryTable.getCellRect(selectionModel.getMinSelectionIndex(), 0, true);
myEntryTable.scrollRectToVisible(cellRect);
myEntryTable.repaint();
}
public void selectOrderEntry(@NotNull OrderEntry entry) {
for (int row = 0; row < myModel.getRowCount(); row++) {
final OrderEntry orderEntry = getItemAt(row).getEntry();
if (orderEntry != null && entry.getPresentableName().equals(orderEntry.getPresentableName())) {
myEntryTable.getSelectionModel().setSelectionInterval(row, row);
TableUtil.scrollSelectionToVisible(myEntryTable);
}
}
IdeFocusManager.getInstance(myState.getProject()).requestFocus(myEntryTable, true);
}
private int moveRow(final int row, final int increment) {
int newIndex = Math.abs(row + increment) % myModel.getRowCount();
myModel.exchangeRows(row, newIndex);
return newIndex;
}
public void stopEditing() {
TableUtil.stopEditing(myEntryTable);
}
private int myInsideChange = 0;
public void initFromModel() {
if (myInsideChange == 0) {
forceInitFromModel();
}
}
public void forceInitFromModel() {
Set<ClasspathTableItem<?>> oldSelection = new HashSet<ClasspathTableItem<?>>();
for (int i : myEntryTable.getSelectedRows()) {
ContainerUtil.addIfNotNull(getItemAt(i), oldSelection);
}
myModel.clear();
myModel.init();
myModel.fireTableDataChanged();
TIntArrayList newSelection = new TIntArrayList();
for (int i = 0; i < myModel.getRowCount(); i++) {
if (oldSelection.contains(getItemAt(i))) {
newSelection.add(i);
}
}
TableUtil.selectRows(myEntryTable, newSelection.toNativeArray());
}
static CellAppearanceEx getCellAppearance(final ClasspathTableItem<?> item,
final StructureConfigurableContext context,
final boolean selected) {
final OrderEntryAppearanceService service = OrderEntryAppearanceService.getInstance();
if (item instanceof InvalidJdkItem) {
return service.forJdk(null, false, selected, true);
}
else {
final OrderEntry entry = item.getEntry();
assert entry != null : item;
return service.forOrderEntry(context.getProject(), entry, selected);
}
}
private static class TableItemRenderer extends ColoredTableCellRenderer {
private final Border NO_FOCUS_BORDER = BorderFactory.createEmptyBorder(1, 1, 1, 1);
private StructureConfigurableContext myContext;
public TableItemRenderer(StructureConfigurableContext context) {
myContext = context;
}
@Override
protected void customizeCellRenderer(JTable table, Object value, boolean selected, boolean hasFocus, int row, int column) {
setPaintFocusBorder(false);
setFocusBorderAroundIcon(true);
setBorder(NO_FOCUS_BORDER);
if (value instanceof ClasspathTableItem<?>) {
final ClasspathTableItem<?> tableItem = (ClasspathTableItem<?>)value;
getCellAppearance(tableItem, myContext, selected).customize(this);
setToolTipText(tableItem.getTooltipText());
}
}
}
private static class ExportFlagRenderer implements TableCellRenderer {
private final TableCellRenderer myDelegate;
private final JPanel myBlankPanel;
public ExportFlagRenderer(TableCellRenderer delegate) {
myDelegate = delegate;
myBlankPanel = new JPanel();
}
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
if (!table.isCellEditable(row, column)) {
myBlankPanel.setBackground(isSelected ? table.getSelectionBackground() : table.getBackground());
return myBlankPanel;
}
return myDelegate.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
}
}
private class MyFindUsagesAction extends FindUsagesInProjectStructureActionBase {
private MyFindUsagesAction() {
super(myEntryTable, myState.getProject());
}
@Override
protected boolean isEnabled() {
return getSelectedElement() != null;
}
@Override
protected ProjectStructureElement getSelectedElement() {
final OrderEntry entry = getSelectedEntry();
if (entry instanceof LibraryOrderEntry) {
final Library library = ((LibraryOrderEntry)entry).getLibrary();
if (library != null) {
return new LibraryProjectStructureElement(getContext(), library);
}
}
else if (entry instanceof ModuleOrderEntry) {
final Module module = ((ModuleOrderEntry)entry).getModule();
if (module != null) {
return new ModuleProjectStructureElement(getContext(), module);
}
}
else if (entry instanceof JdkOrderEntry) {
final Sdk jdk = ((JdkOrderEntry)entry).getJdk();
if (jdk != null) {
return new SdkProjectStructureElement(getContext(), jdk);
}
}
return null;
}
@Override
protected RelativePoint getPointToShowResults() {
Rectangle rect = myEntryTable.getCellRect(myEntryTable.getSelectedRow(), 1, false);
Point location = rect.getLocation();
location.y += rect.height;
return new RelativePoint(myEntryTable, location);
}
}
private class AnalyzeDependencyAction extends AnAction {
private AnalyzeDependencyAction() {
super("Analyze This Dependency");
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final OrderEntry selectedEntry = getSelectedEntry();
GlobalSearchScope targetScope;
if (selectedEntry instanceof ModuleOrderEntry) {
final Module module = ((ModuleOrderEntry)selectedEntry).getModule();
LOG.assertTrue(module != null);
targetScope = GlobalSearchScope.moduleScope(module);
}
else {
Library library = ((LibraryOrderEntry)selectedEntry).getLibrary();
LOG.assertTrue(library != null);
targetScope = new LibraryScope(getProject(), library);
}
new AnalyzeDependenciesOnSpecifiedTargetHandler(getProject(), new AnalysisScope(myState.getRootModel().getModule()),
targetScope) {
@Override
protected boolean shouldShowDependenciesPanel(List<DependenciesBuilder> builders) {
for (DependenciesBuilder builder : builders) {
for (Set<PsiFile> files : builder.getDependencies().values()) {
if (!files.isEmpty()) {
Messages.showInfoMessage(myProject,
"Dependencies were successfully collected in \"" +
ToolWindowId.DEPENDENCIES + "\" toolwindow",
FindBundle.message("find.pointcut.applications.not.found.title"));
return true;
}
}
}
if (Messages.showOkCancelDialog(myProject,
"No code dependencies were found. Would you like to remove the dependency?",
CommonBundle.getWarningTitle(), Messages.getWarningIcon()) == Messages.OK) {
removeSelectedItems(TableUtil.removeSelectedItems(myEntryTable));
}
return false;
}
}.analyze();
}
@Override
public void update(@NotNull AnActionEvent e) {
final OrderEntry entry = getSelectedEntry();
e.getPresentation().setVisible(entry instanceof ModuleOrderEntry && ((ModuleOrderEntry)entry).getModule() != null
|| entry instanceof LibraryOrderEntry && ((LibraryOrderEntry)entry).getLibrary() != null);
}
}
}
| |
/*
* Copyright 2015 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.cdn.traffic_control.traffic_router.core.cache;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.log4j.Logger;
import org.json.JSONObject;
import com.comcast.cdn.traffic_control.traffic_router.core.hash.HashFunction;
import com.comcast.cdn.traffic_control.traffic_router.core.hash.MD5HashFunction;
public class Cache implements Comparable<Cache> {
private static final Logger LOGGER = Logger.getLogger(Cache.class);
private static final int REPLICAS = 1000;
/*
* Configuration Attributes
*/
private final String id;
private String fqdn;
private List<InetRecord> ipAddresses;
private int port;
private Collection<DeliveryServiceReference> deliveryServices = new ArrayList<DeliveryServiceReference>();
final private List<Double> hashValues;
final private int replicas;
/**
* Creates a new {@link Cache}.
*
* @param id
* the id of the new cache
* @param hashCount
*/
public Cache(final String id, final String hashId, final int hashCount) {
this.id = id;
final SortedSet<Double> sorter = new TreeSet<Double>();
final HashFunction hash = new MD5HashFunction();
replicas = (hashCount==0)? REPLICAS : hashCount;
for (int i = 0; i < replicas; i++) {
sorter.add(hash.hash(hashId + "--" + i));
// hashValues.add(hash.hash(id + i));
}
hashValues = new ArrayList<Double>(sorter);
}
@Override
public int compareTo(final Cache o) {
return getId().compareTo(o.getId());
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
} else if (obj instanceof Cache) {
final Cache rhs = (Cache) obj;
return new EqualsBuilder()
.append(getId(), rhs.getId())
.isEquals();
} else {
return false;
}
}
public Collection<DeliveryServiceReference> getDeliveryServices() {
return deliveryServices;
}
public String getFqdn() {
return fqdn;
}
public List<Double> getHashValues() {
return hashValues;
}
public double getClosestHash(final double hash) {
// assume hashValues sorted
int hi = hashValues.size() -1;
int lo = 0;
int i = (hi-lo)/2;
// you can tell a match if it's closer to hash than it's neighbors
for(int j = 0; j < replicas; j++) { // j is just for an escape hatch, should be found O(log(REPLICAS))
final int r = match(hashValues, i, hash);
if(r==0) {
return hashValues.get(i);
}
if(r < 0) {
hi = i-1;
} else {
lo = i+1;
}
i = (hi+lo)/2;
}
return 0;
}
private int match(final List<Double> a, final int i, final double hash) {
// you can tell a match if it's closer to hash than it's neighbors
final double v = a.get(i).doubleValue();
if(i+1 < a.size() && Math.abs(hash - a.get(i+1).doubleValue() ) < Math.abs(hash-v)) {
return 1; // closer to hi neighbor
}
if(i-1 >= 0 && Math.abs(hash - a.get(i-1).doubleValue() ) < Math.abs(hash-v)) {
return -1; // closer to lo neighbor
}
return 0; // match!
}
public String getId() {
return id;
}
public List<InetRecord> getIpAddresses(final JSONObject ttls, final Resolver resolver) {
return getIpAddresses(ttls, resolver, true);
}
public List<InetRecord> getIpAddresses(final JSONObject ttls, final Resolver resolver, final boolean ip6RoutingEnabled) {
if(ipAddresses == null || ipAddresses.isEmpty()) {
ipAddresses = resolver.resolve(this.getFqdn()+".");
}
if(ipAddresses == null) { return null; }
final List<InetRecord> ret = new ArrayList<InetRecord>();
for(InetRecord ir : ipAddresses) {
if (ir.isInet6() && !ip6RoutingEnabled) {
continue;
}
long ttl = 0;
if(ttls == null) {
ttl = -1;
} else if(ir.isInet6()) {
ttl = ttls.optLong("AAAA");
} else {
ttl = ttls.optLong("A");
}
ret.add(new InetRecord(ir.getAddress(), ttl));
}
return ret;
}
// static Resolver resolver = new Resolver();
// private static Resolver getResolver() {
// return resolver;
// }
// public static void setResolver(final Resolver r) {
// resolver = r;
// }
public int getPort() {
return port;
}
@Override
public int hashCode() {
return new HashCodeBuilder(1, 31)
.append(getId())
.toHashCode();
}
public void setDeliveryServices(final Collection<DeliveryServiceReference> deliveryServices) {
this.deliveryServices = deliveryServices;
}
public void setFqdn(final String fqdn) {
this.fqdn = fqdn;
}
public void setIpAddresses(final List<InetRecord> ipAddresses) {
this.ipAddresses = ipAddresses;
}
public void setPort(final int port) {
this.port = port;
}
@Override
public String toString() {
return "Cache [id=" + id + "] ";
}
/**
* Status enumeration for administratively reported status.
*/
// public enum AdminStatus {
// ONLINE, OFFLINE, REPORTED, ADMIN_DOWN
// }
/**
* Contains a reference to a DeliveryService ID and the FQDN that should be used if this Cache
* is used when supporting the DeliveryService.
*/
public static class DeliveryServiceReference {
private final String deliveryServiceId;
private final String fqdn;
public DeliveryServiceReference(final String deliveryServiceId, final String fqdn) {
this.deliveryServiceId = deliveryServiceId;
this.fqdn = fqdn;
}
public String getDeliveryServiceId() {
return deliveryServiceId;
}
public String getFqdn() {
return fqdn;
}
}
boolean isAvailable = false;
boolean hasAuthority = false;
public void setIsAvailable(final boolean isAvailable) {
this.hasAuthority = true;
this.isAvailable = isAvailable;
}
public boolean hasAuthority() {
return hasAuthority;
}
public boolean isAvailable() {
return isAvailable;
}
InetAddress ip4;
InetAddress ip6;
public void setIpAddress(final String ip, final String ip6, final long ttl) throws UnknownHostException {
this.ipAddresses = new ArrayList<InetRecord>();
if (ip != null && !ip.isEmpty()) {
this.ip4 = InetAddress.getByName(ip);
ipAddresses.add(new InetRecord(ip4, ttl));
} else {
LOGGER.error(getFqdn() + " - no IPv4 address configured!");
}
if (ip6 != null && !ip6.isEmpty()) {
final String ip6addr = ip6.replaceAll("/.*", "");
this.ip6 = Inet6Address.getByName(ip6addr);
ipAddresses.add(new InetRecord(this.ip6, ttl));
} else {
LOGGER.error(getFqdn() + " - no IPv6 address configured!");
}
}
public InetAddress getIp4() {
return ip4;
}
public InetAddress getIp6() {
return ip6;
}
public void setState(final JSONObject state) {
boolean isAvailable = true;
if(state != null && state.has("isAvailable")) {
isAvailable = state.optBoolean("isAvailable");
}
this.setIsAvailable(isAvailable);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics.cardinality;
import com.carrotsearch.hppc.BitMixer;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.common.util.ObjectArray;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* An aggregator that computes approximate counts of unique values.
*/
public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue {
private final int precision;
private final ValuesSource valuesSource;
// Expensive to initialize, so we only initialize it when we have an actual value source
@Nullable
private HyperLogLogPlusPlus counts;
private Collector collector;
private boolean sumDirectly;
public CardinalityAggregator(String name, ValuesSource valuesSource, int precision,
AggregationContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
super(name, context, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource;
this.precision = precision;
this.counts = valuesSource == null ? null : new HyperLogLogPlusPlus(precision, context.bigArrays(), 1);
}
public boolean isSumDirectly() {
return sumDirectly;
}
public void setSumDirectly(boolean sumDirectly) {
this.sumDirectly = sumDirectly;
}
@Override
public boolean needsScores() {
return valuesSource != null && valuesSource.needsScores();
}
private Collector pickCollector(LeafReaderContext ctx) throws IOException {
if (valuesSource == null) {
return new EmptyCollector();
}
if (valuesSource instanceof ValuesSource.Numeric) {
ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource;
MurmurHash3Values hashValues = source.isFloatingPoint() ? MurmurHash3Values.hash(source.doubleValues(ctx)) : MurmurHash3Values.hash(source.longValues(ctx));
return new DirectCollector(counts, hashValues);
}
if (valuesSource instanceof ValuesSource.Bytes.WithOrdinals) {
ValuesSource.Bytes.WithOrdinals source = (ValuesSource.Bytes.WithOrdinals) valuesSource;
final RandomAccessOrds ordinalValues = source.ordinalsValues(ctx);
final long maxOrd = ordinalValues.getValueCount();
if (maxOrd == 0) {
return new EmptyCollector();
}
final long ordinalsMemoryUsage = OrdinalsCollector.memoryOverhead(maxOrd);
final long countsMemoryUsage = HyperLogLogPlusPlus.memoryUsage(precision);
// only use ordinals if they don't increase memory usage by more than 25%
if (ordinalsMemoryUsage < countsMemoryUsage / 4) {
return new OrdinalsCollector(counts, ordinalValues, context.bigArrays());
}
}
return new DirectCollector(counts, MurmurHash3Values.hash(valuesSource.bytesValues(ctx)));
}
@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException {
postCollectLastCollector();
collector = pickCollector(ctx);
return collector;
}
private void postCollectLastCollector() {
if (collector != null) {
try {
collector.postCollect();
collector.close();
} finally {
collector = null;
}
}
}
@Override
protected void doPostCollection() {
postCollectLastCollector();
}
@Override
public double metric(long owningBucketOrd) {
return counts == null ? 0 : counts.cardinality(owningBucketOrd);
}
@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) {
if (counts == null || owningBucketOrdinal >= counts.maxBucket() || counts.cardinality(owningBucketOrdinal) == 0) {
return buildEmptyAggregation();
}
// We need to build a copy because the returned Aggregation needs remain usable after
// this Aggregator (and its HLL++ counters) is released.
HyperLogLogPlusPlus copy = new HyperLogLogPlusPlus(precision, BigArrays.NON_RECYCLING_INSTANCE, 1);
copy.merge(0, counts, owningBucketOrdinal);
final InternalCardinality internalCardinality = new InternalCardinality(name, copy, pipelineAggregators(), metaData());
internalCardinality.setSumDirectly(sumDirectly);
return internalCardinality;
}
@Override
public InternalAggregation buildEmptyAggregation() {
final InternalCardinality internalCardinality = new InternalCardinality(name, null, pipelineAggregators(), metaData());
internalCardinality.setSumDirectly(sumDirectly);
return internalCardinality;
}
@Override
protected void doClose() {
Releasables.close(counts, collector);
}
private static abstract class Collector extends LeafBucketCollector implements Releasable {
public abstract void postCollect();
}
private static class EmptyCollector extends Collector {
@Override
public void collect(int doc, long bucketOrd) {
// no-op
}
@Override
public void postCollect() {
// no-op
}
@Override
public void close() {
// no-op
}
}
private static class DirectCollector extends Collector {
private final MurmurHash3Values hashes;
private final HyperLogLogPlusPlus counts;
DirectCollector(HyperLogLogPlusPlus counts, MurmurHash3Values values) {
this.counts = counts;
this.hashes = values;
}
@Override
public void collect(int doc, long bucketOrd) {
hashes.setDocument(doc);
final int valueCount = hashes.count();
for (int i = 0; i < valueCount; ++i) {
counts.collect(bucketOrd, hashes.valueAt(i));
}
}
@Override
public void postCollect() {
// no-op
}
@Override
public void close() {
// no-op
}
}
private static class OrdinalsCollector extends Collector {
private static final long SHALLOW_FIXEDBITSET_SIZE = RamUsageEstimator.shallowSizeOfInstance(FixedBitSet.class);
/**
* Return an approximate memory overhead per bucket for this collector.
*/
public static long memoryOverhead(long maxOrd) {
return RamUsageEstimator.NUM_BYTES_OBJECT_REF + SHALLOW_FIXEDBITSET_SIZE + (maxOrd + 7) / 8; // 1 bit per ord
}
private final BigArrays bigArrays;
private final RandomAccessOrds values;
private final int maxOrd;
private final HyperLogLogPlusPlus counts;
private ObjectArray<FixedBitSet> visitedOrds;
OrdinalsCollector(HyperLogLogPlusPlus counts, RandomAccessOrds values, BigArrays bigArrays) {
if (values.getValueCount() > Integer.MAX_VALUE) {
throw new IllegalArgumentException();
}
maxOrd = (int) values.getValueCount();
this.bigArrays = bigArrays;
this.counts = counts;
this.values = values;
visitedOrds = bigArrays.newObjectArray(1);
}
@Override
public void collect(int doc, long bucketOrd) {
visitedOrds = bigArrays.grow(visitedOrds, bucketOrd + 1);
FixedBitSet bits = visitedOrds.get(bucketOrd);
if (bits == null) {
bits = new FixedBitSet(maxOrd);
visitedOrds.set(bucketOrd, bits);
}
values.setDocument(doc);
final int valueCount = values.cardinality();
for (int i = 0; i < valueCount; ++i) {
bits.set((int) values.ordAt(i));
}
}
@Override
public void postCollect() {
final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd);
for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) {
final FixedBitSet bits = visitedOrds.get(bucket);
if (bits != null) {
allVisitedOrds.or(bits);
}
}
final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128();
try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) {
for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) {
final BytesRef value = values.lookupOrd(ord);
org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash);
hashes.set(ord, hash.h1);
}
for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) {
final FixedBitSet bits = visitedOrds.get(bucket);
if (bits != null) {
for (int ord = bits.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) {
counts.collect(bucket, hashes.get(ord));
}
}
}
}
}
@Override
public void close() {
Releasables.close(visitedOrds);
}
}
/**
* Representation of a list of hash values. There might be dups and there is no guarantee on the order.
*/
static abstract class MurmurHash3Values {
public abstract void setDocument(int docId);
public abstract int count();
public abstract long valueAt(int index);
/**
* Return a {@link MurmurHash3Values} instance that returns each value as its hash.
*/
public static MurmurHash3Values cast(final SortedNumericDocValues values) {
return new MurmurHash3Values() {
@Override
public void setDocument(int docId) {
values.setDocument(docId);
}
@Override
public int count() {
return values.count();
}
@Override
public long valueAt(int index) {
return values.valueAt(index);
}
};
}
/**
* Return a {@link MurmurHash3Values} instance that computes hashes on the fly for each double value.
*/
public static MurmurHash3Values hash(SortedNumericDoubleValues values) {
return new Double(values);
}
/**
* Return a {@link MurmurHash3Values} instance that computes hashes on the fly for each long value.
*/
public static MurmurHash3Values hash(SortedNumericDocValues values) {
return new Long(values);
}
/**
* Return a {@link MurmurHash3Values} instance that computes hashes on the fly for each binary value.
*/
public static MurmurHash3Values hash(SortedBinaryDocValues values) {
return new Bytes(values);
}
private static class Long extends MurmurHash3Values {
private final SortedNumericDocValues values;
public Long(SortedNumericDocValues values) {
this.values = values;
}
@Override
public void setDocument(int docId) {
values.setDocument(docId);
}
@Override
public int count() {
return values.count();
}
@Override
public long valueAt(int index) {
return BitMixer.mix64(values.valueAt(index));
}
}
private static class Double extends MurmurHash3Values {
private final SortedNumericDoubleValues values;
public Double(SortedNumericDoubleValues values) {
this.values = values;
}
@Override
public void setDocument(int docId) {
values.setDocument(docId);
}
@Override
public int count() {
return values.count();
}
@Override
public long valueAt(int index) {
return BitMixer.mix64(java.lang.Double.doubleToLongBits(values.valueAt(index)));
}
}
private static class Bytes extends MurmurHash3Values {
private final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128();
private final SortedBinaryDocValues values;
public Bytes(SortedBinaryDocValues values) {
this.values = values;
}
@Override
public void setDocument(int docId) {
values.setDocument(docId);
}
@Override
public int count() {
return values.count();
}
@Override
public long valueAt(int index) {
final BytesRef bytes = values.valueAt(index);
org.elasticsearch.common.hash.MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, hash);
return hash.h1;
}
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p4beta1/product_search_service.proto
package com.google.cloud.vision.v1p4beta1;
/**
*
*
* <pre>
* Request message for the `DeleteProduct` method.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p4beta1.DeleteProductRequest}
*/
public final class DeleteProductRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.DeleteProductRequest)
DeleteProductRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteProductRequest.newBuilder() to construct.
private DeleteProductRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteProductRequest() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteProductRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1p4beta1_DeleteProductRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1p4beta1_DeleteProductRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p4beta1.DeleteProductRequest.class,
com.google.cloud.vision.v1p4beta1.DeleteProductRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1p4beta1.DeleteProductRequest)) {
return super.equals(obj);
}
com.google.cloud.vision.v1p4beta1.DeleteProductRequest other =
(com.google.cloud.vision.v1p4beta1.DeleteProductRequest) obj;
boolean result = true;
result = result && getName().equals(other.getName());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vision.v1p4beta1.DeleteProductRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `DeleteProduct` method.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p4beta1.DeleteProductRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.DeleteProductRequest)
com.google.cloud.vision.v1p4beta1.DeleteProductRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1p4beta1_DeleteProductRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1p4beta1_DeleteProductRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p4beta1.DeleteProductRequest.class,
com.google.cloud.vision.v1p4beta1.DeleteProductRequest.Builder.class);
}
// Construct using com.google.cloud.vision.v1p4beta1.DeleteProductRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1p4beta1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1p4beta1_DeleteProductRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.DeleteProductRequest getDefaultInstanceForType() {
return com.google.cloud.vision.v1p4beta1.DeleteProductRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.DeleteProductRequest build() {
com.google.cloud.vision.v1p4beta1.DeleteProductRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.DeleteProductRequest buildPartial() {
com.google.cloud.vision.v1p4beta1.DeleteProductRequest result =
new com.google.cloud.vision.v1p4beta1.DeleteProductRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1p4beta1.DeleteProductRequest) {
return mergeFrom((com.google.cloud.vision.v1p4beta1.DeleteProductRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.DeleteProductRequest other) {
if (other == com.google.cloud.vision.v1p4beta1.DeleteProductRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1p4beta1.DeleteProductRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.vision.v1p4beta1.DeleteProductRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource name of product to delete.
* Format is:
* `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.DeleteProductRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.DeleteProductRequest)
private static final com.google.cloud.vision.v1p4beta1.DeleteProductRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.DeleteProductRequest();
}
public static com.google.cloud.vision.v1p4beta1.DeleteProductRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteProductRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteProductRequest>() {
@java.lang.Override
public DeleteProductRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteProductRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteProductRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteProductRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.DeleteProductRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.logging;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import org.apache.logging.log4j.Logger;
import org.apache.geode.SystemFailure;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.i18n.StringId;
import org.apache.geode.internal.Assert;
import org.apache.geode.internal.i18n.LocalizedStrings;
import org.apache.geode.internal.logging.log4j.LocalizedMessage;
/**
* A <code>ThreadGroup</code> that logs all {@linkplain #uncaughtException uncaught exceptions} to a
* GemFire <code>LogWriterI18n</code>. It also keeps track of the uncaught exceptions that were
* thrown by its threads. This is comes in handy when a thread fails to initialize properly (see bug
* 32550).
*
* @see LoggingThreadGroup#createThreadGroup
*
* @since GemFire 4.0
*/
public class LoggingThreadGroup extends ThreadGroup {
/** A "local" log writer that logs exceptions to standard error */
private static final StandardErrorPrinter stderr =
new StandardErrorPrinter(InternalLogWriter.ALL_LEVEL);
/** A set of all created LoggingThreadGroups */
private static final Collection<LoggingThreadGroup> loggingThreadGroups =
new ArrayList<LoggingThreadGroup>();
/**
* Returns a <code>ThreadGroup</code> whose {@link ThreadGroup#uncaughtException} method logs to
* both {#link System#err} and the given <code>InternalLogWriter</code>.
*
* @param name The name of the <code>ThreadGroup</code>
*/
public static LoggingThreadGroup createThreadGroup(final String name) {
return createThreadGroup(name, (Logger) null);
}
/**
* Returns a <code>ThreadGroup</code> whose {@link ThreadGroup#uncaughtException} method logs to
* both {#link System#err} and the given <code>InternalLogWriter</code>.
*
* @param name The name of the <code>ThreadGroup</code>
* @param logWriter A <code>InternalLogWriter</code> to log uncaught exceptions to. It is okay for
* this argument to be <code>null</code>.
*
* author David Whitlock
* @since GemFire 3.0
*/
public static LoggingThreadGroup createThreadGroup(final String name,
final InternalLogWriter logWriter) {
// Cache the LoggingThreadGroups so that we don't create a
// gazillion of them.
LoggingThreadGroup group = null;
synchronized (loggingThreadGroups) {
for (Iterator<LoggingThreadGroup> iter = loggingThreadGroups.iterator(); iter.hasNext();) {
LoggingThreadGroup group2 = (LoggingThreadGroup) iter.next();
if (group2.isDestroyed()) {
// Clean is this guy out
iter.remove();
continue;
}
if (name.equals(group2.getName())) {
// We already have one!
// Change the underlying logger to point to new one (creating new
// thread groups for different loggers leaks groups for repeated
// connect/disconnects as in dunits for example)
if (logWriter != group2.logWriter) {
group2.logWriter = logWriter;
}
group = group2;
break;
}
}
if (group == null) {
group = new LoggingThreadGroup(name, logWriter);
// force autoclean to false and not inherit from parent group
group.setDaemon(false);
loggingThreadGroups.add(group);
}
}
Assert.assertTrue(!group.isDestroyed());
return group;
}
/**
* Returns a <code>ThreadGroup</code> whose {@link ThreadGroup#uncaughtException} method logs to
* both {#link System#err} and the given <code>InternalLogWriter</code>.
*
* @param name The name of the <code>ThreadGroup</code>
* @param logger A <code>InternalLogWriter</code> to log uncaught exceptions to. It is okay for
* this argument to be <code>null</code>.
*
* author David Whitlock
* @since GemFire 3.0
*/
public static LoggingThreadGroup createThreadGroup(final String name, final Logger logger) {
// Cache the LoggingThreadGroups so that we don't create a
// gazillion of them.
LoggingThreadGroup group = null;
synchronized (loggingThreadGroups) {
for (Iterator<LoggingThreadGroup> iter = loggingThreadGroups.iterator(); iter.hasNext();) {
LoggingThreadGroup group2 = (LoggingThreadGroup) iter.next();
if (group2.isDestroyed()) {
// Clean is this guy out
iter.remove();
continue;
}
if (name.equals(group2.getName())) {
// We already have one!
// Change the underlying logger to point to new one (creating new
// thread groups for different loggers leaks groups for repeated
// connect/disconnects as in dunits for example)
if (logger != group2.logger) {
group2.logger = logger;
}
group = group2;
break;
}
}
if (group == null) {
group = new LoggingThreadGroup(name, logger);
// force autoclean to false and not inherit from parent group
group.setDaemon(false);
loggingThreadGroups.add(group);
}
}
Assert.assertTrue(!group.isDestroyed());
return group;
}
// /**
// * @deprecated Only for use by hydra for backwards compatability reasons.
// * Returns a <code>ThreadGroup</code> whose {@link
// * ThreadGroup#uncaughtException} method logs to both {#link
// * System#err} and the given <code>LogWriterI18n</code>.
// *
// * @param name
// * The name of the <code>ThreadGroup</code>
// * @param logger
// * A <code>LogWriter</code> to log uncaught exceptions to. It
// * is okay for this argument to be <code>null</code>.
// *
// * author kbanks
// * @since GemFire 6.0
// */
// @Deprecated public static LoggingThreadGroup createThreadGroup(final String name,
// final LogWriter logger) {
// return createThreadGroup(name,
// logger != null ? logger.convertToLogWriterI18n() : null);
// }
public static void cleanUpThreadGroups() {
synchronized (loggingThreadGroups) {
LoggingThreadGroup group;
Iterator<?> itr = loggingThreadGroups.iterator();
while (itr.hasNext()) {
group = (LoggingThreadGroup) itr.next();
if (!group.getName().equals(InternalDistributedSystem.SHUTDOWN_HOOK_NAME)
&& !group.getName().equals("GemFireConnectionFactory Shutdown Hook")) {
group.cleanup();
}
}
}
}
/**
* Note: Must be used for test purposes ONLY.
*
* @param threadGroupName
* @return thread group with given name.
*/
public static ThreadGroup getThreadGroup(final String threadGroupName) {
synchronized (loggingThreadGroups) {
for (Object object : loggingThreadGroups) {
LoggingThreadGroup threadGroup = (LoggingThreadGroup) object;
if (threadGroup.getName().equals(threadGroupName)) {
return threadGroup;
}
}
return null;
}
}
/**
* A log writer that the user has specified for logging uncaught exceptions.
*/
protected volatile InternalLogWriter logWriter;
/**
* A logger that the user has specified for logging uncaught exceptions.
*/
protected volatile Logger logger;
/**
* The count uncaught exceptions that were thrown by threads in this thread group.
*/
private long uncaughtExceptionsCount;
/**
* Creates a new <code>LoggingThreadGroup</code> that logs uncaught exceptions to the given log
* writer.
*
* @param name The name of the thread group
* @param logWriter A logWriter to which uncaught exceptions are logged. May be <code>null</code>.
*/
LoggingThreadGroup(final String name, final InternalLogWriter logWriter) {
super(name);
this.logWriter = logWriter;
}
/**
* Creates a new <code>LoggingThreadGroup</code> that logs uncaught exceptions to the given
* logger.
*
* @param name The name of the thread group
* @param logger A logger to which uncaught exceptions are logged. May be <code>null</code>.
*/
LoggingThreadGroup(final String name, final Logger logger) {
super(name);
this.logger = logger;
}
private Object dispatchLock = new Object();
/**
* Logs an uncaught exception to a log writer
*/
@Override
public void uncaughtException(final Thread t, final Throwable ex) {
synchronized (this.dispatchLock) {
if (ex instanceof VirtualMachineError) {
SystemFailure.setFailure((VirtualMachineError) ex); // don't throw
}
// Solution to treat the shutdown hook error as a special case.
// Do not change the hook's thread name without also changing it here.
String threadName = t.getName();
if ((ex instanceof NoClassDefFoundError)
&& (threadName.equals(InternalDistributedSystem.SHUTDOWN_HOOK_NAME))) {
final StringId msg =
LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0_THIS_MESSAGE_CAN_BE_DISREGARDED_IF_IT_OCCURRED_DURING_AN_APPLICATION_SERVER_SHUTDOWN_THE_EXCEPTION_MESSAGE_WAS_1;
final Object[] msgArgs = new Object[] {t, ex.getLocalizedMessage()};
stderr.info(msg, msgArgs);
if (this.logger != null) {
this.logger.info(LocalizedMessage.create(msg, msgArgs));
}
if (this.logWriter != null) {
this.logWriter.info(msg, msgArgs);
}
} else {
stderr.severe(LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0, t, ex);
if (this.logger != null) {
this.logger.fatal(
LocalizedMessage.create(LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0, t), ex);
}
if (this.logWriter != null) {
this.logWriter.severe(LocalizedStrings.UNCAUGHT_EXCEPTION_IN_THREAD_0, t, ex);
}
}
// if (!(ex instanceof RuntimeException) && (ex instanceof Exception)) {
// something's fishy - checked exceptions shouldn't get here
// this.logger.severe("stack trace showing origin of uncaught checked exception", new
// Exception("stack trace"));
// }
this.uncaughtExceptionsCount++;
}
}
/**
* clear number of uncaught exceptions
*/
public void clearUncaughtExceptionsCount() {
synchronized (this.dispatchLock) {
this.uncaughtExceptionsCount = 0;
}
}
/**
* Returns the number of uncaught exceptions that occurred in threads in this thread group.
*/
public long getUncaughtExceptionsCount() {
synchronized (this.dispatchLock) {
return uncaughtExceptionsCount;
}
}
/**
* clean up the threadgroup, releasing resources that could be problematic (bug 35388)
*
* @since GemFire 4.2.3
*/
public synchronized void cleanup() {
// the logwriter holds onto a distribution config, which holds onto
// the InternalDistributedSystem, which holds onto the
// DistributionManager, which holds onto ... you get the idea
this.logger = null;
this.logWriter = null;
}
}
| |
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.portlet.tck.portlets;
import static javax.portlet.PortletSession.APPLICATION_SCOPE;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4;
import static javax.portlet.tck.constants.Constants.RESULT_ATTR_PREFIX;
import static javax.portlet.tck.constants.Constants.THREADID_ATTR;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.Portlet;
import javax.portlet.PortletConfig;
import javax.portlet.PortletException;
import javax.portlet.PortletSession;
import javax.portlet.PortletURL;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.ResourceRequest;
import javax.portlet.ResourceResponse;
import javax.portlet.ResourceServingPortlet;
import javax.portlet.tck.beans.JSR286ApiTestCaseDetails;
import javax.portlet.tck.beans.TestButton;
import javax.portlet.tck.beans.TestResult;
import javax.portlet.tck.constants.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This portlet implements several test cases for the JSR 362 TCK. The test case names
* are defined in the /src/main/resources/xml-resources/additionalTCs.xml
* file. The build process will integrate the test case names defined in the
* additionalTCs.xml file into the complete list of test case names for execution by the driver.
*
* This is the main portlet for the test cases. If the test cases call for events, this portlet
* will initiate the events, but not process them. The processing is done in the companion
* portlet RequestTests_ClientDataRequest_ApiAction_event
*
*/
public class RequestTests_ClientDataRequest_ApiAction implements Portlet, ResourceServingPortlet {
private final Logger LOGGER = LoggerFactory.getLogger(RequestTests_ClientDataRequest_ApiAction.class);
@Override
public void init(PortletConfig config) throws PortletException {
}
@Override
public void destroy() {
}
@Override
public void processAction(ActionRequest portletReq, ActionResponse portletResp)
throws PortletException, IOException {
LOGGER.trace("main portlet processAction entry");
portletResp.setRenderParameters(portletReq.getParameterMap());
long tid = Thread.currentThread().getId();
portletReq.setAttribute(THREADID_ATTR, tid);
StringWriter writer = new StringWriter();
JSR286ApiTestCaseDetails tcd = new JSR286ApiTestCaseDetails();
// Create result objects for the tests
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getPortletInputStream1 */
/* Details: "Method getPortletInputStream(): Returns an InputStream */
/* object" */
TestResult tr0 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1);
String getparm1=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1);
if(getparm1!=null && getparm1.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1)) {
tr0.setTcSuccess(true);
tr0.appendTcDetail("This method could not be tested for Test Portlet which uses POST type : application/x-www-form-urlencoded");
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1);
}
tr0.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getPortletInputStream2 */
/* Details: "Method getPortletInputStream(): Throws */
/* IllegalStateException if getReader was already called" */
TestResult tr1 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2);
String getparm2=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2);
if(getparm2!=null && getparm2.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2)) {
try {
portletReq.getReader();
try {
portletReq.getPortletInputStream();
tr1.appendTcDetail("Method did not throw Exception");
} catch (IllegalStateException ise) {
tr1.setTcSuccess(true);
}
} catch (IllegalStateException ise) {
tr1.appendTcDetail("getReader should not throw an exception, but did throw an IllegalStateException.");
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2);
}
tr1.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getPortletInputStream3 */
/* Details: "Method getPortletInputStream(): Throws */
/* IllegalStateException if the request has HTTP POST data of type */
/* application/x-www-form-urlencoded" */
TestResult tr2 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3);
String getparm3=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3);
if(getparm3!=null && getparm3.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3) ) {
try {
portletReq.getPortletInputStream();
tr2.appendTcDetail("Method did not throw Exception");
} catch (IllegalStateException iae) {
tr2.setTcSuccess(true);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3);
}
tr2.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding1 */
/* Details: "Method setCharacterEncoding(String): Allows the */
/* character encoding for the body of the request to be overridden" */
TestResult tr3 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1);
String setchar1=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1);
if(setchar1!=null && setchar1.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1)) {
portletReq.setCharacterEncoding("UTF-16");
String getcharcode=portletReq.getCharacterEncoding();
if(getcharcode!=null && getcharcode.equals("UTF-16")) {
tr3.setTcSuccess(true);
} else {
tr3.appendTcDetail("CharacterEncoding used in the body of HTTP request has value : " +getcharcode);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1);
}
tr3.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding2 */
/* Details: "Method setCharacterEncoding(String): Throws */
/* IllegalStateException if method is called after reading request */
/* parameters" */
TestResult tr4 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2);
String setchar2=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2);
if(setchar2!=null && setchar2.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2)) {
try {
portletReq.getParameter("Test");
portletReq.setCharacterEncoding("UTF-16");
tr4.setTcSuccess(true);
tr4.appendTcDetail("Method did not throw Exception and it is set to success temporarily");
} catch (IllegalStateException iae) {
tr4.setTcSuccess(true);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2);
}
tr4.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding3 */
/* Details: "Method setCharacterEncoding(String): Throws */
/* IllegalStateException if method is called after using the */
/* getReader(): method" */
TestResult tr5 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3);
String setchar3=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3);
if(setchar3!=null && setchar3.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3)) {
try {
portletReq.getReader();
portletReq.setCharacterEncoding("UTF-16");
tr5.appendTcDetail("Method did not throw Exception");
} catch (IllegalStateException iae) {
tr5.setTcSuccess(true);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3);
}
tr5.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding4 */
/* Details: "Method setCharacterEncoding(String): Throws */
/* UnsupportedEncodingException if the specified encoding is not */
/* valid" */
TestResult tr6 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4);
String setchar4=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4);
if(setchar4!=null && setchar4.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4)) {
try {
portletReq.setCharacterEncoding("UTF-NotValid");
tr6.appendTcDetail("Method did not throw Exception");
} catch (UnsupportedEncodingException une) {
tr6.setTcSuccess(true);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4);
}
tr6.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader1 */
/* Details: "Method getReader(): Returns a BufferedReader object for */
/* reading the request" */
TestResult tr7 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1);
String getRead1=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1);
if(getRead1!=null && getRead1.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1)) {
tr7.setTcSuccess(true);
tr7.appendTcDetail("This Method could not be tested for this Test Portlet which has Content type : application/x-www-form-urlencoded");
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1);
}
tr7.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader2 */
/* Details: "Method getReader(): Throws IllegalStateException if */
/* getPortletInputStream was already called" */
TestResult tr8 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2);
String getRead2=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2);
if(getRead2!=null && getRead2.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2)) {
try {
portletReq.getPortletInputStream();
try {
portletReq.getReader();
tr8.appendTcDetail("Method did not throw Exception");
} catch (IllegalStateException ise) {
tr8.setTcSuccess(true);
}
} catch (IllegalStateException ise) {
tr1.appendTcDetail("getPortletInputStream should not throw an exception, but did throw an IllegalStateException.");
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2);
}
tr8.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader3 */
/* Details: "Method getReader(): Throws IllegalStateException if the */
/* request has HTTP POST data of type */
/* application/x-www-form-urlencoded" */
TestResult tr9 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3);
String getRead3=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3);
if(getRead3!=null && getRead3.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3)) {
try {
portletReq.getReader();
tr9.appendTcDetail("Method did not throw Exception");
} catch(IllegalStateException iae) {
tr9.setTcSuccess(true);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3);
}
tr9.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader5 */
/* Details: "Method getReader(): Throws UnsupportedEncodingException */
/* if the character set encoding is not valid so that the text cannot */
/* be decoded" */
TestResult tr10 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5);
String getRead5=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5);
if(getRead5!=null && getRead5.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5)) {
try {
portletReq.setCharacterEncoding("UTF-NotValid");
portletReq.getReader();
tr6.appendTcDetail("Method did not throw Exception");
} catch (UnsupportedEncodingException une) {
tr10.setTcSuccess(true);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5);
}
tr10.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getCharacterEncoding1 */
/* Details: "Method getCharacterEncoding(): Returns a String */
/* containing the name of the character encoding used in the request */
/* body" */
TestResult tr11 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1);
String getChar1=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1);
if(getChar1!=null && getChar1.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1)) {
String getCharEncde=portletReq.getCharacterEncoding();
if(getCharEncde!=null) {
tr11.setTcSuccess(true);
} else {
tr11.appendTcDetail("The characted Encoding used in HTTP request has null value :");
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1);
}
tr11.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getCharacterEncoding2 */
/* Details: "Method getCharacterEncoding(): Returns null if the */
/* request does not specify a character encoding" */
TestResult tr12 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2);
String getChar2=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2);
if(getChar2!=null && getChar2.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2)) {
tr12.setTcSuccess(true);
tr12.appendTcDetail("This method could not be tested for this Test Portlet which already has Character Encoding Value");
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2);
}
tr12.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentType1 */
/* Details: "Method getContentType(): Returns a String containing the */
/* MIME type of the request body" */
TestResult tr13 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1);
String getCnt1=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1);
if(getCnt1!=null && getCnt1.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1)) {
String getContype=portletReq.getContentType();
if(getContype!=null) {
tr13.setTcSuccess(true);
} else {
tr13.appendTcDetail("The ContentType of the HTTP request has values : " +getContype);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1);
}
tr13.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentType2 */
/* Details: "Method getContentType(): Returns null if the MIME type */
/* is unknown" */
TestResult tr14 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2);
String getCnt2=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2);
if(getCnt2!=null && getCnt2.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2)) {
tr14.setTcSuccess(true);
tr14.appendTcDetail("This method could not be tested for this Test Portlet which already has known MIME type");
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2);
}
tr14.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentLength1 */
/* Details: "Method getContentLength(): Returns the length in bytes */
/* of the request body" */
TestResult tr15 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1);
String getCntl1=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1);
if(getCntl1!=null && getCntl1.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1)) {
int getConLgt=portletReq.getContentLength();
if(getConLgt!=-1) {
tr15.setTcSuccess(true);
} else {
tr15.appendTcDetail("The HTTP request has Length in bytes :"+getConLgt);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1);
}
tr15.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentLength2 */
/* Details: "Method getContentLength(): Returns -1 if the length is */
/* unknown" */
TestResult tr16 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2);
String getCntl2=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2);
if(getCntl2!=null && getCntl2.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2)) {
tr16.setTcSuccess(true);
tr16.appendTcDetail("This method could not be tested for this Test Portlet which already has Known Length");
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2);
}
tr16.writeTo(writer);
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getMethod */
/* Details: "Method getMethod(): Returns a String containing the name */
/* of the HTTP method with which the request was made" */
TestResult tr17 = tcd.getTestResultFailed(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD);
String getMethd=portletReq.getParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD);
if(getMethd!=null && getMethd.equals(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD)) {
String getmethod=portletReq.getMethod();
if(getmethod!=null && getmethod.equals("POST")) {
tr17.setTcSuccess(true);
} else {
tr17.appendTcDetail("The getMethod() for HTTP Request has the value :" +getmethod);
}
} else {
portletResp.setRenderParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD);
}
tr17.writeTo(writer);
portletReq.getPortletSession().setAttribute(
Constants.RESULT_ATTR_PREFIX + "RequestTests_ClientDataRequest_ApiAction",
writer.toString(), APPLICATION_SCOPE);
}
@Override
public void serveResource(ResourceRequest portletReq, ResourceResponse portletResp)
throws PortletException, IOException {
LOGGER.trace("main portlet serveResource entry");
long tid = Thread.currentThread().getId();
portletReq.setAttribute(THREADID_ATTR, tid);
}
@Override
public void render(RenderRequest portletReq, RenderResponse portletResp)
throws PortletException, IOException {
LOGGER.trace("main portlet render entry");
long tid = Thread.currentThread().getId();
portletReq.setAttribute(THREADID_ATTR, tid);
PrintWriter writer = portletResp.getWriter();
PortletSession ps = portletReq.getPortletSession();
String msg = (String) ps.getAttribute(RESULT_ATTR_PREFIX + "RequestTests_ClientDataRequest_ApiAction", APPLICATION_SCOPE);
if (msg != null) {
writer.write("<p>" + msg + "</p><br/>\n");
ps.removeAttribute(RESULT_ATTR_PREFIX + "RequestTests_ClientDataRequest_ApiAction", APPLICATION_SCOPE);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getPortletInputStream1 */
/* Details: "Method getPortletInputStream(): Returns an InputStream */
/* object" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1,V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM1, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getPortletInputStream2 */
/* Details: "Method getPortletInputStream(): Throws */
/* IllegalStateException if getReader was already called" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM2, aurl);
tb.setEncType("text/plain");
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getPortletInputStream3 */
/* Details: "Method getPortletInputStream(): Throws */
/* IllegalStateException if the request has HTTP POST data of type */
/* application/x-www-form-urlencoded" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETPORTLETINPUTSTREAM3, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding1 */
/* Details: "Method setCharacterEncoding(String): Allows the */
/* character encoding for the body of the request to be overridden" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING1, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding2 */
/* Details: "Method setCharacterEncoding(String): Throws */
/* IllegalStateException if method is called after reading request */
/* parameters" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING2, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding3 */
/* Details: "Method setCharacterEncoding(String): Throws */
/* IllegalStateException if method is called after using the */
/* getReader(): method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING3, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_setCharacterEncoding4 */
/* Details: "Method setCharacterEncoding(String): Throws */
/* UnsupportedEncodingException if the specified encoding is not */
/* valid" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_SETCHARACTERENCODING4, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader1 */
/* Details: "Method getReader(): Returns a BufferedReader object for */
/* reading the request" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER1, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader2 */
/* Details: "Method getReader(): Throws IllegalStateException if */
/* getPortletInputStream was already called" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER2, aurl);
tb.setEncType("text/plain");
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader3 */
/* Details: "Method getReader(): Throws IllegalStateException if the */
/* request has HTTP POST data of type */
/* application/x-www-form-urlencoded" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER3, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getReader5 */
/* Details: "Method getReader(): Throws UnsupportedEncodingException */
/* if the character set encoding is not valid so that the text cannot */
/* be decoded" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETREADER5, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getCharacterEncoding1 */
/* Details: "Method getCharacterEncoding(): Returns a String */
/* containing the name of the character encoding used in the request */
/* body" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING1, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getCharacterEncoding2 */
/* Details: "Method getCharacterEncoding(): Returns null if the */
/* request does not specify a character encoding" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCHARACTERENCODING2, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentType1 */
/* Details: "Method getContentType(): Returns a String containing the */
/* MIME type of the request body" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE1, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentType2 */
/* Details: "Method getContentType(): Returns null if the MIME type */
/* is unknown" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTTYPE2, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentLength1 */
/* Details: "Method getContentLength(): Returns the length in bytes */
/* of the request body" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH1, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getContentLength2 */
/* Details: "Method getContentLength(): Returns -1 if the length is */
/* unknown" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2,V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETCONTENTLENGTH2, aurl);
tb.writeTo(writer);
}
/* TestCase: V2RequestTests_ClientDataRequest_ApiAction_getMethod */
/* Details: "Method getMethod(): Returns a String containing the name */
/* of the HTTP method with which the request was made" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameter(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD, V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD);
TestButton tb = new TestButton(V2REQUESTTESTS_CLIENTDATAREQUEST_APIACTION_GETMETHOD, aurl);
tb.writeTo(writer);
}
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.uiDesigner.designSurface;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.designer.DesignerEditorPanelFacade;
import com.intellij.designer.LightFillLayout;
import com.intellij.ide.DeleteProvider;
import com.intellij.ide.highlighter.XmlFileHighlighter;
import com.intellij.ide.palette.impl.PaletteToolWindowManager;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.util.LexerEditorHighlighter;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogBuilder;
import com.intellij.openapi.ui.ThreeComponentsSplitter;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.ui.JBColor;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.components.JBLayeredPane;
import com.intellij.uiDesigner.*;
import com.intellij.uiDesigner.compiler.Utils;
import com.intellij.uiDesigner.componentTree.ComponentPtr;
import com.intellij.uiDesigner.componentTree.ComponentSelectionListener;
import com.intellij.uiDesigner.componentTree.ComponentTree;
import com.intellij.uiDesigner.core.GridLayoutManager;
import com.intellij.uiDesigner.core.Util;
import com.intellij.uiDesigner.editor.UIFormEditor;
import com.intellij.uiDesigner.lw.CompiledClassPropertiesProvider;
import com.intellij.uiDesigner.lw.IProperty;
import com.intellij.uiDesigner.lw.LwRootContainer;
import com.intellij.uiDesigner.palette.ComponentItem;
import com.intellij.uiDesigner.propertyInspector.DesignerToolWindow;
import com.intellij.uiDesigner.propertyInspector.DesignerToolWindowManager;
import com.intellij.uiDesigner.propertyInspector.PropertyInspector;
import com.intellij.uiDesigner.propertyInspector.properties.IntroStringProperty;
import com.intellij.uiDesigner.radComponents.RadComponent;
import com.intellij.uiDesigner.radComponents.RadContainer;
import com.intellij.uiDesigner.radComponents.RadRootContainer;
import com.intellij.uiDesigner.radComponents.RadTabbedPane;
import com.intellij.util.Alarm;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.EventListenerList;
import javax.swing.event.ListSelectionEvent;
import java.awt.*;
import java.awt.dnd.DnDConstants;
import java.awt.dnd.DropTarget;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.lang.reflect.InvocationTargetException;
import java.util.*;
/**
* {@code GuiEditor} is a panel with border layout. It has palette at the north,
* tree of component with property editor at the west and editor area at the center.
* This editor area contains internal component where user edit the UI.
*
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class GuiEditor extends JPanel implements DesignerEditorPanelFacade, DataProvider, ModuleProvider {
private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.GuiEditor");
private final Project myProject;
@NotNull private final UIFormEditor myEditor;
private Module myModule;
@NotNull private final VirtualFile myFile;
/**
* for debug purposes
*/
private Exception myWhere;
/**
* All component are on this layer
*/
private static final Integer LAYER_COMPONENT = JLayeredPane.DEFAULT_LAYER;
/**
* This layer contains all "passive" decorators such as component boundaries
* and selection rectangle.
*/
private static final Integer LAYER_PASSIVE_DECORATION = JLayeredPane.POPUP_LAYER;
/**
* We show (and move) dragged component at this layer
*/
private static final Integer LAYER_DND = JLayeredPane.DRAG_LAYER;
/**
* This is the topmost layer. It gets and redispatch all incoming events
*/
private static final Integer LAYER_GLASS = new Integer(JLayeredPane.DRAG_LAYER.intValue() + 100);
/**
* This layer contains all "active" decorators. This layer should be over
* LAYER_GLASS because active decorators must get AWT events to work correctly.
*/
private static final Integer LAYER_ACTIVE_DECORATION = new Integer(LAYER_GLASS.intValue() + 100);
/**
* This layer contains all inplace editors.
*/
private static final Integer LAYER_INPLACE_EDITING = new Integer(LAYER_ACTIVE_DECORATION.intValue() + 100);
private final EventListenerList myListenerList;
/**
* we have to store document here but not file because there can be a situation when
* document we added listener to has been disposed, and remove listener will be applied to
* a new document (got by file) -> assertion (see SCR 14143)
*/
private final Document myDocument;
final MainProcessor myProcessor;
@NotNull private final JScrollPane myScrollPane;
/**
* This layered pane contains all layers to lay components out and to
* show all necessary decoration items
*/
@NotNull private final MyLayeredPane myLayeredPane;
/**
* The component which represents decoration layer. All passive
* decorators are on this layer.
*/
private final PassiveDecorationLayer myDecorationLayer;
/**
* The component which represents layer where located all dragged
* components
*/
private final DragLayer myDragLayer;
/**
* This layer contains all inplace editors
*/
private final InplaceEditingLayer myInplaceEditingLayer;
/**
* Brings functionality to "DEL" button
*/
private final MyDeleteProvider myDeleteProvider;
/**
* Rerun error analizer
*/
private final MyPsiTreeChangeListener myPsiTreeChangeListener;
private RadRootContainer myRootContainer;
/**
* GuiEditor should not react on own events. If {@code myInsideChange}
* is {@code true} then we do not react on incoming DocumentEvent.
*/
private boolean myInsideChange;
private final DocumentListener myDocumentListener;
private final CardLayout myCardLayout = new CardLayout();
private final ThreeComponentsSplitter myContentSplitter = new ThreeComponentsSplitter();
private final JPanel myCardPanel = new JPanel(myCardLayout);
@NonNls private static final String CARD_VALID = "valid";
@NonNls private static final String CARD_INVALID = "invalid";
private final JPanel myValidCard;
private final JPanel myInvalidCard;
private boolean myInvalid;
private final CutCopyPasteSupport myCutCopyPasteSupport;
/**
* Implementation of Crtl+W and Ctrl+Shift+W behavior
*/
private final SelectionState mySelectionState;
@NotNull private final GlassLayer myGlassLayer;
private final ActiveDecorationLayer myActiveDecorationLayer;
private boolean myShowGrid = true;
private boolean myShowComponentTags = true;
private final DesignDropTargetListener myDropTargetListener;
private JLabel myFormInvalidLabel;
private final QuickFixManagerImpl myQuickFixManager;
private final GridCaptionPanel myHorzCaptionPanel;
private final GridCaptionPanel myVertCaptionPanel;
private ComponentPtr mySelectionAnchor;
private ComponentPtr mySelectionLead;
/**
* Undo group ID for undoing actions that need to be undone together with the form modification.
*/
private Object myNextSaveGroupId = new Object();
@NonNls private static final String ourHelpID = "guiDesigner.uiTour.workspace";
public static final DataKey<GuiEditor> DATA_KEY = DataKey.create(GuiEditor.class.getName());
/**
* @param file file to be edited
* @throws IllegalArgumentException if the {@code file}
* is {@code null} or {@code file} is not valid PsiFile
*/
public GuiEditor(@NotNull UIFormEditor editor, @NotNull Project project, @NotNull Module module, @NotNull VirtualFile file) {
myEditor = editor;
LOG.assertTrue(file.isValid());
myProject = project;
myModule = module;
myFile = file;
myCutCopyPasteSupport = new CutCopyPasteSupport(this);
setLayout(new BorderLayout());
myContentSplitter.setDividerWidth(0);
myContentSplitter.setDividerMouseZoneSize(Registry.intValue("ide.splitter.mouseZone"));
add(myContentSplitter, BorderLayout.CENTER);
myValidCard = new JPanel(new BorderLayout());
myInvalidCard = createInvalidCard();
myCardPanel.add(myValidCard, CARD_VALID);
myCardPanel.add(myInvalidCard, CARD_INVALID);
JPanel contentPanel = new JPanel(new LightFillLayout());
JLabel toolbar = new JLabel();
toolbar.setVisible(false);
contentPanel.add(toolbar);
contentPanel.add(myCardPanel);
myContentSplitter.setInnerComponent(contentPanel);
myListenerList = new EventListenerList();
myDecorationLayer = new PassiveDecorationLayer(this);
myDragLayer = new DragLayer(this);
myLayeredPane = new MyLayeredPane();
myInplaceEditingLayer = new InplaceEditingLayer(this);
myLayeredPane.add(myInplaceEditingLayer, LAYER_INPLACE_EDITING);
myActiveDecorationLayer = new ActiveDecorationLayer(this);
myLayeredPane.add(myActiveDecorationLayer, LAYER_ACTIVE_DECORATION);
myGlassLayer = new GlassLayer(this);
myLayeredPane.add(myGlassLayer, LAYER_GLASS);
myLayeredPane.add(myDecorationLayer, LAYER_PASSIVE_DECORATION);
myLayeredPane.add(myDragLayer, LAYER_DND);
myGlassLayer.addFocusListener(new FocusListener() {
@Override
public void focusGained(FocusEvent e) {
myDecorationLayer.repaint();
//fireSelectedComponentChanged(); // EA-36478
}
@Override
public void focusLost(FocusEvent e) {
myDecorationLayer.repaint();
}
});
// Ctrl+W / Ctrl+Shift+W support
mySelectionState = new SelectionState(this);
// DeleteProvider
myDeleteProvider = new MyDeleteProvider();
// We need to synchronize GUI editor with the document
final Alarm alarm = new Alarm();
myDocumentListener = new DocumentListener() {
@Override
public void documentChanged(@NotNull final DocumentEvent e) {
if (!myInsideChange) {
UndoManager undoManager = UndoManager.getInstance(getProject());
alarm.cancelAllRequests();
alarm.addRequest(new MySynchronizeRequest(undoManager.isUndoInProgress() || undoManager.isRedoInProgress()),
100/*any arbitrary delay*/, ModalityState.stateForComponent(GuiEditor.this));
}
}
};
// Prepare document
myDocument = FileDocumentManager.getInstance().getDocument(file);
myDocument.addDocumentListener(myDocumentListener);
// Read form from file
readFromFile(false);
JPanel panel = new JPanel(new GridBagLayout());
panel.setBackground(GridCaptionPanel.getGutterColor());
myHorzCaptionPanel = new GridCaptionPanel(this, false);
myVertCaptionPanel = new GridCaptionPanel(this, true);
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 0;
gbc.gridy = 1;
gbc.weightx = 0.0;
gbc.weighty = 0.0;
gbc.fill = GridBagConstraints.BOTH;
panel.add(myVertCaptionPanel, gbc);
gbc.gridx = 1;
gbc.gridy = 0;
panel.add(myHorzCaptionPanel, gbc);
gbc.gridx = 1;
gbc.gridy = 1;
gbc.weightx = 1.0;
gbc.weighty = 1.0;
myScrollPane = ScrollPaneFactory.createScrollPane(myLayeredPane);
myScrollPane.setBackground(new JBColor(() -> EditorColorsManager.getInstance().getGlobalScheme().getDefaultBackground()));
panel.add(myScrollPane, gbc);
myHorzCaptionPanel.attachToScrollPane(myScrollPane);
myVertCaptionPanel.attachToScrollPane(myScrollPane);
myValidCard.add(panel, BorderLayout.CENTER);
final CancelCurrentOperationAction cancelCurrentOperationAction = new CancelCurrentOperationAction();
cancelCurrentOperationAction.registerCustomShortcutSet(CommonShortcuts.ESCAPE, this);
myProcessor = new MainProcessor(this);
// PSI listener to restart error highlighter
myPsiTreeChangeListener = new MyPsiTreeChangeListener();
PsiManager.getInstance(getProject()).addPsiTreeChangeListener(myPsiTreeChangeListener);
myQuickFixManager = new QuickFixManagerImpl(this, myGlassLayer, myScrollPane.getViewport());
myDropTargetListener = new DesignDropTargetListener(this);
if (!ApplicationManager.getApplication().isHeadlessEnvironment()) {
new DropTarget(getGlassLayer(), DnDConstants.ACTION_COPY_OR_MOVE, myDropTargetListener);
}
myActiveDecorationLayer.installSelectionWatcher();
EmptyAction.registerWithShortcutSet("GuiDesigner.IncreaseIndent",
new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_TAB, 0)),
myGlassLayer);
EmptyAction.registerWithShortcutSet("GuiDesigner.DecreaseIndent",
new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_TAB, InputEvent.SHIFT_MASK)),
myGlassLayer);
if (!ApplicationManager.getApplication().isUnitTestMode()) {
UIUtil.invokeLaterIfNeeded(() -> {
DesignerToolWindowManager.getInstance(myProject).bind(this);
PaletteToolWindowManager.getInstance(myProject).bind(this);
});
}
}
@Override
public ThreeComponentsSplitter getContentSplitter() {
return myContentSplitter;
}
@NotNull
public UIFormEditor getEditor() {
return myEditor;
}
@NotNull
public SelectionState getSelectionState() {
return mySelectionState;
}
public void dispose() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (myWhere != null) {
LOG.error("Already disposed: old trace: ", myWhere);
LOG.error("Already disposed: new trace: ");
}
else {
myWhere = new Exception();
}
myDocument.removeDocumentListener(myDocumentListener);
PsiManager.getInstance(getProject()).removePsiTreeChangeListener(myPsiTreeChangeListener);
if (!ApplicationManager.getApplication().isUnitTestMode()) {
DesignerToolWindowManager.getInstance(myProject).dispose(this);
PaletteToolWindowManager.getInstance(myProject).dispose(this);
}
myPsiTreeChangeListener.dispose();
Disposer.dispose(myContentSplitter);
}
@NotNull
@Override
public Module getModule() {
if (myModule.isDisposed()) {
myModule = ModuleUtilCore.findModuleForFile(myFile, myProject);
if (myModule == null) {
throw new IllegalArgumentException("No module for file " + myFile);
}
}
return myModule;
}
@NotNull
@Override
public Project getProject() {
return myProject;
}
@NotNull
public VirtualFile getFile() {
return myFile;
}
public PsiFile getPsiFile() {
return PsiManager.getInstance(getProject()).findFile(myFile);
}
public boolean isEditable() {
final Document document = FileDocumentManager.getInstance().getDocument(myFile);
return document != null && document.isWritable();
}
public boolean ensureEditable() {
if (isEditable()) {
return true;
}
VirtualFile sourceFileToCheckOut = null;
if (!GuiDesignerConfiguration.getInstance(getProject()).INSTRUMENT_CLASSES) {
final String classToBind = myRootContainer.getClassToBind();
if (classToBind != null && !classToBind.isEmpty()) {
PsiClass psiClass = FormEditingUtil.findClassToBind(getModule(), classToBind);
if (psiClass != null) {
sourceFileToCheckOut = psiClass.getContainingFile().getVirtualFile();
}
}
}
final ReadonlyStatusHandler.OperationStatus status;
if (sourceFileToCheckOut != null) {
status = ReadonlyStatusHandler.getInstance(getProject()).ensureFilesWritable(Arrays.asList(myFile, sourceFileToCheckOut));
}
else {
status = ReadonlyStatusHandler.getInstance(getProject()).ensureFilesWritable(Collections.singletonList(myFile));
}
return !status.hasReadonlyFiles();
}
public void refresh() {
refreshImpl(myRootContainer);
myRootContainer.getDelegee().revalidate();
repaintLayeredPane();
}
public void refreshAndSave(final boolean forceSync) {
DesignerToolWindow toolWindow = DesignerToolWindowManager.getInstance(this);
if (toolWindow == null) {
return;
}
// Update property inspector
final PropertyInspector propertyInspector = toolWindow.getPropertyInspector();
if (propertyInspector != null) {
propertyInspector.synchWithTree(forceSync);
}
refresh();
saveToFile();
// TODO[yole]: install appropriate listeners so that the captions repaint themselves at correct time
myHorzCaptionPanel.repaint();
myVertCaptionPanel.repaint();
}
public Object getNextSaveGroupId() {
return myNextSaveGroupId;
}
private static void refreshImpl(final RadComponent component) {
if (component.getParent() != null) {
final Dimension size = component.getSize();
final int oldWidth = size.width;
final int oldHeight = size.height;
Util.adjustSize(component.getDelegee(), component.getConstraints(), size);
if (oldWidth != size.width || oldHeight != size.height) {
if (component.getParent().isXY()) {
component.setSize(size);
}
component.getDelegee().invalidate();
}
}
if (component instanceof RadContainer) {
component.refresh();
final RadContainer container = (RadContainer)component;
for (int i = container.getComponentCount() - 1; i >= 0; i--) {
refreshImpl(container.getComponent(i));
}
}
}
@Override
public Object getData(@NotNull final String dataId) {
if (PlatformDataKeys.HELP_ID.is(dataId)) {
return ourHelpID;
}
// Standard Swing cut/copy/paste actions should work if user is editing something inside property inspector
Project project = getProject();
if (project.isDisposed()) return null;
DesignerToolWindow toolWindow = DesignerToolWindowManager.getInstance(this);
if (toolWindow == null) return null;
final PropertyInspector inspector = toolWindow.getPropertyInspector();
if (inspector != null && inspector.isEditing()) {
return null;
}
if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
return myDeleteProvider;
}
if (PlatformDataKeys.COPY_PROVIDER.is(dataId) ||
PlatformDataKeys.CUT_PROVIDER.is(dataId) ||
PlatformDataKeys.PASTE_PROVIDER.is(dataId)) {
return myCutCopyPasteSupport;
}
return null;
}
private JPanel createInvalidCard() {
final JPanel panel = new JPanel(new GridBagLayout());
myFormInvalidLabel = new JLabel(UIDesignerBundle.message("error.form.file.is.invalid"));
panel.add(myFormInvalidLabel,
new GridBagConstraints(0, 0, 1, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
return panel;
}
/**
* @return the component which represents DnD layer. All currently
* dragged (moved) component are on this layer.
*/
public DragLayer getDragLayer() {
return myDragLayer;
}
/**
* @return the topmost {@code UiConainer} which in the root of
* component hierarchy. This method never returns {@code null}.
*/
@NotNull
public RadRootContainer getRootContainer() {
return myRootContainer;
}
/**
* Fires event that selection changes
*/
public void fireSelectedComponentChanged() {
final ComponentSelectionListener[] listeners = myListenerList.getListeners(ComponentSelectionListener.class);
for (ComponentSelectionListener listener : listeners) {
listener.selectedComponentChanged(this);
}
}
private void fireHierarchyChanged() {
final HierarchyChangeListener[] listeners = myListenerList.getListeners(HierarchyChangeListener.class);
for (final HierarchyChangeListener listener : listeners) {
listener.hierarchyChanged();
}
}
@NotNull
public GlassLayer getGlassLayer() {
return myGlassLayer;
}
/**
* @return the component which represents layer with active decorators
* such as grid edit controls, inplace editors, etc.
*/
public InplaceEditingLayer getInplaceEditingLayer() {
return myInplaceEditingLayer;
}
@NotNull
public JLayeredPane getLayeredPane() {
return myLayeredPane;
}
public void repaintLayeredPane() {
myLayeredPane.repaint();
}
/**
* Adds specified selection listener. This listener gets notification each time
* the selection in the component the changes.
*/
public void addComponentSelectionListener(final ComponentSelectionListener l) {
myListenerList.add(ComponentSelectionListener.class, l);
}
/**
* Removes specified selection listener
*/
public void removeComponentSelectionListener(final ComponentSelectionListener l) {
myListenerList.remove(ComponentSelectionListener.class, l);
}
/**
* Adds specified hierarchy change listener
*/
public void addHierarchyChangeListener(@NotNull final HierarchyChangeListener l) {
myListenerList.add(HierarchyChangeListener.class, l);
}
/**
* Removes specified hierarchy change listener
*/
public void removeHierarchyChangeListener(@NotNull final HierarchyChangeListener l) {
myListenerList.remove(HierarchyChangeListener.class, l);
}
private void saveToFile() {
LOG.debug("GuiEditor.saveToFile(): group ID=" + myNextSaveGroupId);
CommandProcessor.getInstance().executeCommand(getProject(), () -> ApplicationManager.getApplication().runWriteAction(() -> {
myInsideChange = true;
try {
final XmlWriter writer = new XmlWriter();
getRootContainer().write(writer);
final String newText = writer.getText();
final String oldText = myDocument.getText();
try {
final ReplaceInfo replaceInfo = findFragmentToChange(oldText, newText);
if (replaceInfo.getStartOffset() == -1) {
// do nothing - texts are equal
}
else {
myDocument.replaceString(replaceInfo.getStartOffset(), replaceInfo.getEndOffset(), replaceInfo.getReplacement());
}
}
catch (Exception e) {
LOG.error(e);
myDocument.replaceString(0, oldText.length(), newText);
}
}
finally {
myInsideChange = false;
}
}), "UI Designer Save", myNextSaveGroupId);
myNextSaveGroupId = new Object();
fireHierarchyChanged();
}
public ActiveDecorationLayer getActiveDecorationLayer() {
return myActiveDecorationLayer;
}
public void setStringDescriptorLocale(final Locale locale) {
myRootContainer.setStringDescriptorLocale(locale);
refreshProperties();
DesignerToolWindowManager.getInstance(this).updateComponentTree();
DaemonCodeAnalyzer.getInstance(getProject()).restart();
}
@Nullable
public Locale getStringDescriptorLocale() {
return myRootContainer.getStringDescriptorLocale();
}
private void refreshProperties() {
final Ref<Boolean> anythingModified = new Ref<>();
FormEditingUtil.iterate(myRootContainer, component -> {
final RadComponent radComponent = (RadComponent)component;
boolean componentModified = false;
for (IProperty prop : component.getModifiedProperties()) {
if (prop instanceof IntroStringProperty) {
IntroStringProperty strProp = (IntroStringProperty)prop;
componentModified = strProp.refreshValue(radComponent) || componentModified;
}
}
if (component instanceof RadContainer) {
componentModified = ((RadContainer)component).updateBorder() || componentModified;
}
if (component.getParentContainer() instanceof RadTabbedPane) {
componentModified = ((RadTabbedPane)component.getParentContainer()).refreshChildTitle(radComponent) || componentModified;
}
if (componentModified) {
anythingModified.set(Boolean.TRUE);
}
return true;
});
if (!anythingModified.isNull()) {
refresh();
DesignerToolWindow designerToolWindow = DesignerToolWindowManager.getInstance(this);
ComponentTree tree = designerToolWindow.getComponentTree();
if (tree != null) tree.repaint();
PropertyInspector inspector = designerToolWindow.getPropertyInspector();
if (inspector != null) inspector.synchWithTree(true);
}
}
public MainProcessor getMainProcessor() {
return myProcessor;
}
public void refreshIntentionHint() {
myQuickFixManager.refreshIntentionHint();
}
public void setSelectionAnchor(final RadComponent component) {
mySelectionAnchor = new ComponentPtr(this, component);
}
@Nullable
public RadComponent getSelectionAnchor() {
if (mySelectionAnchor == null) return null;
mySelectionAnchor.validate();
return mySelectionAnchor.getComponent();
}
public void setSelectionLead(final RadComponent component) {
mySelectionLead = new ComponentPtr(this, component);
}
@Nullable
public RadComponent getSelectionLead() {
if (mySelectionLead == null) return null;
mySelectionLead.validate();
return mySelectionLead.getComponent();
}
public void scrollComponentInView(final RadComponent component) {
Rectangle rect = SwingUtilities.convertRectangle(component.getDelegee().getParent(), component.getBounds(), myLayeredPane);
myLayeredPane.scrollRectToVisible(rect);
}
public static final class ReplaceInfo {
private final int myStartOffset;
private final int myEndOffset;
private final String myReplacement;
public ReplaceInfo(final int startOffset, final int endOffset, final String replacement) {
myStartOffset = startOffset;
myEndOffset = endOffset;
myReplacement = replacement;
}
public int getStartOffset() {
return myStartOffset;
}
public int getEndOffset() {
return myEndOffset;
}
public String getReplacement() {
return myReplacement;
}
}
public static ReplaceInfo findFragmentToChange(final String oldText, final String newText) {
if (oldText.equals(newText)) {
return new ReplaceInfo(-1, -1, null);
}
final int oldLength = oldText.length();
final int newLength = newText.length();
int startOffset = 0;
while (
startOffset < oldLength && startOffset < newLength &&
oldText.charAt(startOffset) == newText.charAt(startOffset)
) {
startOffset++;
}
int endOffset = oldLength;
while (true) {
if (endOffset <= startOffset) {
break;
}
final int idxInNew = newLength - (oldLength - endOffset) - 1;
if (idxInNew < startOffset) {
break;
}
final char c1 = oldText.charAt(endOffset - 1);
final char c2 = newText.charAt(idxInNew);
if (c1 != c2) {
break;
}
endOffset--;
}
return new ReplaceInfo(startOffset, endOffset, newText.substring(startOffset, newLength - (oldLength - endOffset)));
}
/**
* @param rootContainer new container to be set as a root.
*/
private void setRootContainer(@NotNull final RadRootContainer rootContainer) {
if (myRootContainer != null) {
myLayeredPane.remove(myRootContainer.getDelegee());
}
myRootContainer = rootContainer;
setDesignTimeInsets(2);
myLayeredPane.add(myRootContainer.getDelegee(), LAYER_COMPONENT);
fireHierarchyChanged();
}
public void setDesignTimeInsets(final int insets) {
Integer oldInsets = (Integer)myRootContainer.getDelegee().getClientProperty(GridLayoutManager.DESIGN_TIME_INSETS);
if (oldInsets == null || oldInsets.intValue() != insets) {
myRootContainer.getDelegee().putClientProperty(GridLayoutManager.DESIGN_TIME_INSETS, insets);
revalidateRecursive(myRootContainer.getDelegee());
}
}
private static void revalidateRecursive(final JComponent component) {
for (Component child : component.getComponents()) {
if (child instanceof JComponent) {
revalidateRecursive((JComponent)child);
}
}
component.revalidate();
component.repaint();
}
/**
* Creates and sets new {@code RadRootContainer}
*
* @param keepSelection if true, the GUI designer tries to preserve the selection state after reload.
*/
public void readFromFile(final boolean keepSelection) {
try {
ComponentPtr[] selection = null;
Map<String, String> tabbedPaneSelectedTabs = null;
if (keepSelection) {
selection = SelectionState.getSelection(this);
tabbedPaneSelectedTabs = saveTabbedPaneSelectedTabs();
}
Locale oldLocale = null;
if (myRootContainer != null) {
oldLocale = myRootContainer.getStringDescriptorLocale();
}
final String text = myDocument.getText();
final ClassLoader classLoader = LoaderFactory.getInstance(getProject()).getLoader(myFile);
final LwRootContainer rootContainer = Utils.getRootContainer(text, new CompiledClassPropertiesProvider(classLoader));
final RadRootContainer container = XmlReader.createRoot(this, rootContainer, classLoader, oldLocale);
setRootContainer(container);
if (keepSelection) {
SelectionState.restoreSelection(this, selection);
restoreTabbedPaneSelectedTabs(tabbedPaneSelectedTabs);
}
myInvalid = false;
myCardLayout.show(myCardPanel, CARD_VALID);
refresh();
}
catch (Exception exc) {
Throwable original = exc;
while (original instanceof InvocationTargetException) {
original = original.getCause();
}
showInvalidCard(original);
}
catch (final LinkageError exc) {
showInvalidCard(exc);
}
}
private void showInvalidCard(final Throwable exc) {
LOG.info(exc);
// setting fictive container
setRootContainer(new RadRootContainer(this, "0"));
myFormInvalidLabel.setText(UIDesignerBundle.message("error.form.file.is.invalid.message", FormEditingUtil.getExceptionMessage(exc)));
myInvalid = true;
myCardLayout.show(myCardPanel, CARD_INVALID);
repaint();
}
public boolean isFormInvalid() {
return myInvalid;
}
private Map<String, String> saveTabbedPaneSelectedTabs() {
final Map<String, String> result = new HashMap<>();
FormEditingUtil.iterate(getRootContainer(), component -> {
if (component instanceof RadTabbedPane) {
RadTabbedPane tabbedPane = (RadTabbedPane)component;
RadComponent c = tabbedPane.getSelectedTab();
if (c != null) {
result.put(tabbedPane.getId(), c.getId());
}
}
return true;
});
return result;
}
private void restoreTabbedPaneSelectedTabs(final Map<String, String> tabbedPaneSelectedTabs) {
FormEditingUtil.iterate(getRootContainer(), component -> {
if (component instanceof RadTabbedPane) {
RadTabbedPane tabbedPane = (RadTabbedPane)component;
String selectedTabId = tabbedPaneSelectedTabs.get(tabbedPane.getId());
if (selectedTabId != null) {
for (RadComponent c : tabbedPane.getComponents()) {
if (c.getId().equals(selectedTabId)) {
tabbedPane.selectTab(c);
break;
}
}
}
}
return true;
});
}
public JComponent getPreferredFocusedComponent() {
if (myValidCard.isVisible()) {
return myGlassLayer;
}
else {
return myInvalidCard;
}
}
public static void repaintLayeredPane(final RadComponent component) {
final GuiEditor uiEditor = (GuiEditor)SwingUtilities.getAncestorOfClass(GuiEditor.class, component.getDelegee());
if (uiEditor != null) {
uiEditor.repaintLayeredPane();
}
}
public boolean isShowGrid() {
return myShowGrid;
}
public void setShowGrid(final boolean showGrid) {
if (myShowGrid != showGrid) {
myShowGrid = showGrid;
repaint();
}
}
public boolean isShowComponentTags() {
return myShowComponentTags;
}
public void setShowComponentTags(final boolean showComponentTags) {
if (myShowComponentTags != showComponentTags) {
myShowComponentTags = showComponentTags;
repaint();
}
}
public DesignDropTargetListener getDropTargetListener() {
return myDropTargetListener;
}
@Nullable
public GridCaptionPanel getFocusedCaptionPanel() {
if (myHorzCaptionPanel.isFocusOwner()) {
return myHorzCaptionPanel;
}
if (myVertCaptionPanel.isFocusOwner()) {
return myVertCaptionPanel;
}
return null;
}
public boolean isUndoRedoInProgress() {
UndoManager undoManager = UndoManager.getInstance(getProject());
return undoManager.isUndoInProgress() || undoManager.isRedoInProgress();
}
void hideIntentionHint() {
myQuickFixManager.hideIntentionHint();
}
public void showFormSource() {
EditorFactory editorFactory = EditorFactory.getInstance();
Editor editor = editorFactory.createViewer(myDocument, myProject);
try {
((EditorEx)editor).setHighlighter(
new LexerEditorHighlighter(new XmlFileHighlighter(), EditorColorsManager.getInstance().getGlobalScheme()));
JComponent component = editor.getComponent();
component.setPreferredSize(new Dimension(640, 480));
DialogBuilder dialog = new DialogBuilder(myProject);
dialog.title("Form - " + myFile.getPresentableName()).dimensionKey("GuiDesigner.FormSource.Dialog");
dialog.centerPanel(component).setPreferredFocusComponent(editor.getContentComponent());
dialog.addOkAction();
dialog.show();
}
finally {
editorFactory.releaseEditor(editor);
}
}
private final class MyLayeredPane extends JBLayeredPane implements Scrollable {
/**
* All components allocate whole pane's area.
*/
@Override
public void doLayout() {
for (int i = getComponentCount() - 1; i >= 0; i--) {
final Component component = getComponent(i);
component.setBounds(0, 0, getWidth(), getHeight());
}
}
@Override
public Dimension getMinimumSize() {
return getPreferredSize();
}
@Override
public Dimension getPreferredSize() {
// make sure all components fit
int width = 0;
int height = 0;
for (int i = 0; i < myRootContainer.getComponentCount(); i++) {
final RadComponent component = myRootContainer.getComponent(i);
width = Math.max(width, component.getX() + component.getWidth());
height = Math.max(height, component.getY() + component.getHeight());
}
width += 50;
height += 40;
Rectangle bounds = myScrollPane.getViewport().getBounds();
return new Dimension(Math.max(width, bounds.width), Math.max(height, bounds.height));
}
@Override
public Dimension getPreferredScrollableViewportSize() {
return getPreferredSize();
}
@Override
public int getScrollableUnitIncrement(Rectangle visibleRect, int orientation, int direction) {
return 10;
}
@Override
public int getScrollableBlockIncrement(Rectangle visibleRect, int orientation, int direction) {
if (orientation == SwingConstants.HORIZONTAL) {
return visibleRect.width - 10;
}
return visibleRect.height - 10;
}
@Override
public boolean getScrollableTracksViewportWidth() {
return false;
}
@Override
public boolean getScrollableTracksViewportHeight() {
return false;
}
}
/**
* Action works only if we are not editing something in the property inspector
*/
private final class CancelCurrentOperationAction extends AnAction {
@Override
public void actionPerformed(@NotNull final AnActionEvent e) {
myProcessor.cancelOperation();
myQuickFixManager.hideIntentionHint();
}
@Override
public void update(@NotNull final AnActionEvent e) {
PropertyInspector inspector = DesignerToolWindowManager.getInstance(GuiEditor.this).getPropertyInspector();
e.getPresentation().setEnabled(inspector != null && !inspector.isEditing());
}
}
/**
* Allows "DEL" button to work through the standard mechanism
*/
private final class MyDeleteProvider implements DeleteProvider {
@Override
public void deleteElement(@NotNull final DataContext dataContext) {
if (!ensureEditable()) {
return;
}
CommandProcessor.getInstance().executeCommand(getProject(), () -> FormEditingUtil.deleteSelection(GuiEditor.this), UIDesignerBundle.message("command.delete.selection"), null);
}
@Override
public boolean canDeleteElement(@NotNull final DataContext dataContext) {
return
!DesignerToolWindowManager.getInstance(GuiEditor.this).getPropertyInspector().isEditing() &&
!myInplaceEditingLayer.isEditing() &&
FormEditingUtil.canDeleteSelection(GuiEditor.this);
}
}
/**
* Listens PSI event and update error highlighting in the UI editor
*/
private final class MyPsiTreeChangeListener extends PsiTreeChangeAdapter {
private final Alarm myAlarm;
private final MyRefreshPropertiesRequest myRefreshPropertiesRequest = new MyRefreshPropertiesRequest();
private final MySynchronizeRequest mySynchronizeRequest = new MySynchronizeRequest(true);
MyPsiTreeChangeListener() {
myAlarm = new Alarm();
}
/**
* Cancels all pending update requests. You have to cancel all pending requests
* to not access to closed project.
*/
public void dispose() {
myAlarm.cancelAllRequests();
}
@Override
public void childAdded(@NotNull final PsiTreeChangeEvent event) {
handleEvent(event);
}
@Override
public void childMoved(@NotNull final PsiTreeChangeEvent event) {
handleEvent(event);
}
@Override
public void childrenChanged(@NotNull final PsiTreeChangeEvent event) {
handleEvent(event);
}
@Override
public void childRemoved(@NotNull PsiTreeChangeEvent event) {
handleEvent(event);
}
@Override
public void childReplaced(@NotNull PsiTreeChangeEvent event) {
handleEvent(event);
}
@Override
public void propertyChanged(@NotNull final PsiTreeChangeEvent event) {
if (PsiTreeChangeEvent.PROP_ROOTS.equals(event.getPropertyName())) {
myAlarm.cancelRequest(myRefreshPropertiesRequest);
myAlarm.addRequest(myRefreshPropertiesRequest, 500, ModalityState.stateForComponent(GuiEditor.this));
}
}
private void handleEvent(final PsiTreeChangeEvent event) {
if (event.getParent() != null) {
PsiFile containingFile = event.getParent().getContainingFile();
if (containingFile instanceof PropertiesFile) {
LOG.debug("Received PSI change event for properties file");
myAlarm.cancelRequest(myRefreshPropertiesRequest);
myAlarm.addRequest(myRefreshPropertiesRequest, 500, ModalityState.stateForComponent(GuiEditor.this));
}
else if (containingFile instanceof PsiPlainTextFile && containingFile.getFileType().equals(StdFileTypes.GUI_DESIGNER_FORM)) {
// quick check if relevant
String resourceName = FormEditingUtil.buildResourceName(containingFile);
if (myDocument.getText().contains(resourceName)) {
LOG.debug("Received PSI change event for nested form");
// TODO[yole]: handle multiple nesting
myAlarm.cancelRequest(mySynchronizeRequest);
myAlarm.addRequest(mySynchronizeRequest, 500, ModalityState.stateForComponent(GuiEditor.this));
}
}
}
}
}
private class MySynchronizeRequest implements Runnable {
private final boolean myKeepSelection;
MySynchronizeRequest(final boolean keepSelection) {
myKeepSelection = keepSelection;
}
@Override
public void run() {
if (getModule().isDisposed()) {
return;
}
Project project = getProject();
if (project.isDisposed()) {
return;
}
LOG.debug("Synchronizing GUI editor " + myFile.getName() + " to document");
PsiDocumentManager.getInstance(project).commitDocument(myDocument);
readFromFile(myKeepSelection);
}
}
private class MyRefreshPropertiesRequest implements Runnable {
@Override
public void run() {
if (!getModule().isDisposed() && !getProject().isDisposed() && !DumbService.isDumb(getProject())) {
refreshProperties();
}
}
}
public void paletteKeyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_SHIFT && PaletteToolWindowManager.getInstance(this).getActiveItem(ComponentItem.class) != null) {
setDesignTimeInsets(12);
}
}
public void paletteKeyReleased(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_SHIFT) {
setDesignTimeInsets(2);
}
}
public void paletteDropActionChanged(int gestureModifiers) {
if ((gestureModifiers & InputEvent.SHIFT_MASK) != 0) {
setDesignTimeInsets(12);
}
else {
setDesignTimeInsets(2);
}
}
public void paletteValueChanged(ListSelectionEvent e) {
if (PaletteToolWindowManager.getInstance(this).getActiveItem() == null) {
myProcessor.cancelPaletteInsert();
}
}
}
| |
package com.likya.tlossw.web.management;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.faces.application.FacesMessage;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.ViewScoped;
import javax.faces.event.ActionEvent;
import javax.faces.model.SelectItem;
import javax.xml.namespace.QName;
import org.apache.xmlbeans.XmlOptions;
import org.primefaces.component.datatable.DataTable;
import com.likya.tlos.model.xmlbeans.common.ActiveDocument.Active;
import com.likya.tlos.model.xmlbeans.dbconnections.DbConnectionProfileDocument.DbConnectionProfile;
import com.likya.tlos.model.xmlbeans.dbconnections.DbPropertiesDocument.DbProperties;
import com.likya.tlos.model.xmlbeans.dbconnections.DeployedDocument.Deployed;
import com.likya.tlos.model.xmlbeans.dbconnections.JdbcConnectionPoolParamsDocument.JdbcConnectionPoolParams;
import com.likya.tlos.model.xmlbeans.user.PersonDocument.Person;
import com.likya.tlossw.model.DBAccessInfoTypeClient;
import com.likya.tlossw.utils.xml.XMLNameSpaceTransformer;
import com.likya.tlossw.web.TlosSWBaseBean;
@ManagedBean(name = "dbAccessSearchPanelMBean")
@ViewScoped
public class DBAccessSearchPanelMBean extends TlosSWBaseBean implements Serializable {
private static final long serialVersionUID = 5578347263656801312L;
private DbConnectionProfile dbConnectionProfile;
private DBAccessInfoTypeClient dbAccessInfoTypeClient;
private String dbConnectionName = null;
private String dbProfileName = null;
private Collection<SelectItem> dbConnectionNameList = null;
private String deployed;
private String active;
private ArrayList<DBAccessInfoTypeClient> searchDBAccessProfileList;
private transient DataTable searchDBAccessProfileTable;
private DBAccessInfoTypeClient selectedRow;
private List<Person> filteredDBAccessList;
public void dispose() {
dbConnectionProfile = null;
dbAccessInfoTypeClient = null;
dbConnectionNameList = null;
dbProfileName = null;
}
@PostConstruct
public void init() {
dbConnectionProfile = DbConnectionProfile.Factory.newInstance();
searchDBAccessProfileList = new ArrayList<DBAccessInfoTypeClient>();
fillDBConnectionNameList();
}
public void fillDBConnectionNameList() {
dbConnectionNameList = new ArrayList<SelectItem>();
for (DbProperties dbProperties : getDbOperations().getDBConnections()) {
SelectItem item = new SelectItem();
item.setValue(dbProperties.getID());
item.setLabel(dbProperties.getConnectionName());
dbConnectionNameList.add(item);
}
}
public String getDBAccessXML() {
QName qName = DbConnectionProfile.type.getOuterType().getDocumentElementName();
XmlOptions xmlOptions = XMLNameSpaceTransformer.transformXML(qName);
String dbProfileXML = dbConnectionProfile.xmlText(xmlOptions);
return dbProfileXML;
}
public void resetDBAccessPropertiesAction() {
dbConnectionProfile = DbConnectionProfile.Factory.newInstance();
JdbcConnectionPoolParams jdbcConnectionPoolParams = JdbcConnectionPoolParams.Factory.newInstance();
dbConnectionProfile.setJdbcConnectionPoolParams(jdbcConnectionPoolParams);
deployed = "";
active = "";
searchDBAccessProfileList = null;
dbConnectionName = "";
dbProfileName="";
}
public void searchDBAccessAction(ActionEvent e) {
if (!dbConnectionName.equals("")) {
dbConnectionProfile.setDbDefinitionId(new BigInteger(dbConnectionName));
} else {
dbConnectionProfile.setDbDefinitionId(null);
}
if (!dbProfileName.equals("")) {
dbConnectionProfile.setProfileName(dbProfileName);
} else {
dbConnectionProfile.setProfileName(null);
}
if (!deployed.equals("")) {
dbConnectionProfile.setDeployed(Deployed.Enum.forString(deployed));
} else {
dbConnectionProfile.setDeployed(null);
}
if (!active.equals("")) {
dbConnectionProfile.setActive(Active.Enum.forString(active));
} else {
dbConnectionProfile.setActive(null);
}
searchDBAccessProfileList = getDbOperations().searchDBAccessProfile(getDBAccessXML());
if (searchDBAccessProfileList == null || searchDBAccessProfileList.size() == 0) {
addMessage("searchDBAccess", FacesMessage.SEVERITY_INFO, "tlos.info.search.noRecord", null);
}
}
public void deleteDBAccessAction(ActionEvent e) {
// dbAccessInfoTypeClient = (DBAccessInfoTypeClient) searchDBAccessProfileTable.getRowData();
dbAccessInfoTypeClient = selectedRow;
dbConnectionProfile = dbAccessInfoTypeClient.getDbConnectionProfile();
if (getDbOperations().deleteDBAccessProfile(getDBAccessXML())) {
searchDBAccessProfileList.remove(dbAccessInfoTypeClient);
dbAccessInfoTypeClient = new DBAccessInfoTypeClient();
addMessage("searchDBAccess", FacesMessage.SEVERITY_INFO, "tlos.success.dbAccessDef.delete", null);
} else {
addMessage("searchDBAccess", FacesMessage.SEVERITY_ERROR, "tlos.error.dbConnection.delete", null);
}
}
public void deployDBAccessAction(ActionEvent e) {
// TODO deploy yapilacak
}
public void undeployDBAccessAction(ActionEvent e) {
// TODO undeploy yapilacak
}
public DbConnectionProfile getDbConnectionProfile() {
return dbConnectionProfile;
}
public void setDbConnectionProfile(DbConnectionProfile dbConnectionProfile) {
this.dbConnectionProfile = dbConnectionProfile;
}
public DBAccessInfoTypeClient getDbAccessInfoTypeClient() {
return dbAccessInfoTypeClient;
}
public void setDbAccessInfoTypeClient(DBAccessInfoTypeClient dbAccessInfoTypeClient) {
this.dbAccessInfoTypeClient = dbAccessInfoTypeClient;
}
public String getDbConnectionName() {
return dbConnectionName;
}
public void setDbConnectionName(String dbConnectionName) {
this.dbConnectionName = dbConnectionName;
}
public Collection<SelectItem> getDbConnectionNameList() {
return dbConnectionNameList;
}
public void setDbConnectionNameList(Collection<SelectItem> dbConnectionNameList) {
this.dbConnectionNameList = dbConnectionNameList;
}
public String getDeployed() {
return deployed;
}
public void setDeployed(String deployed) {
this.deployed = deployed;
}
public String getActive() {
return active;
}
public void setActive(String active) {
this.active = active;
}
public ArrayList<DBAccessInfoTypeClient> getSearchDBAccessProfileList() {
return searchDBAccessProfileList;
}
public void setSearchDBAccessProfileList(ArrayList<DBAccessInfoTypeClient> searchDBAccessProfileList) {
this.searchDBAccessProfileList = searchDBAccessProfileList;
}
public DataTable getSearchDBAccessProfileTable() {
return searchDBAccessProfileTable;
}
public void setSearchDBAccessProfileTable(DataTable searchDBAccessProfileTable) {
this.searchDBAccessProfileTable = searchDBAccessProfileTable;
}
public List<Person> getFilteredDBAccessList() {
return filteredDBAccessList;
}
public void setFilteredDBAccessList(List<Person> filteredDBAccessList) {
this.filteredDBAccessList = filteredDBAccessList;
}
public DBAccessInfoTypeClient getSelectedRow() {
return selectedRow;
}
public void setSelectedRow(DBAccessInfoTypeClient selectedRow) {
this.selectedRow = selectedRow;
}
public String getDbProfileName() {
return dbProfileName;
}
public void setDbProfileName(String dbProfileName) {
this.dbProfileName = dbProfileName;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.hypervisor.ovm3.objects;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.w3c.dom.Document;
/*
* synonym to the pool python lib in the ovs-agent
*/
public class Pool extends OvmObject {
private final List<String> validRoles = new ArrayList<String>() {
{
add("xen");
add("utility");
}
private static final long serialVersionUID = 1L;
};
private List<String> poolHosts = new ArrayList<String>();
private final List<String> poolRoles = new ArrayList<String>();
private String poolMasterVip;
private String poolAlias;
private String poolId = null;
public Pool(Connection connection) {
setClient(connection);
}
public String getPoolMasterVip() {
return poolMasterVip;
}
public String getPoolAlias() {
return poolAlias;
}
public String getPoolId() {
return poolId;
}
public List<String> getValidRoles() {
return validRoles;
}
public Boolean isInPool(String id) throws Ovm3ResourceException {
if (poolId == null) {
discoverServerPool();
}
if (poolId == null) {
return false;
}
if (isInAPool() && poolId.equals(id)) {
return true;
}
return false;
}
public Boolean isInAPool() throws Ovm3ResourceException {
if (poolId == null) {
discoverServerPool();
}
if (poolId == null) {
return false;
}
return true;
}
private Boolean createServerPool(String alias, String id, String vip,
int num, String name, String host, List<String> roles) throws Ovm3ResourceException {
final String role = StringUtils.join(roles, ",");
if (!isInAPool()) {
final Object x = callWrapper("create_server_pool", alias, id, vip, num, name,
host, role);
if (x == null) {
return true;
}
return false;
} else if (isInPool(id)) {
return true;
} else {
throw new Ovm3ResourceException("Unable to add host is already in a pool with id : " + poolId);
}
}
public Boolean createServerPool(String alias, String id, String vip,
int num, String name, String ip) throws Ovm3ResourceException {
return createServerPool(alias, id, vip, num, name, ip,
getValidRoles());
}
/*
* public Boolean updatePoolVirtualIp(String ip) throws Ovm3ResourceException { Object x =
* callWrapper("update_pool_virtual_ip", ip); if (x == null) { poolMasterVip = ip; return true; } return false; }
*/
public Boolean leaveServerPool(String uuid) throws Ovm3ResourceException {
return nullIsTrueCallWrapper("leave_server_pool", uuid);
}
public Boolean takeOwnership(String uuid, String apiurl) throws Ovm3ResourceException {
return nullIsTrueCallWrapper("take_ownership", uuid, apiurl);
}
public Boolean takeOwnership33x(final String uuid,
final String eventUrl,
final String statUrl,
final String managerCert,
final String signedCert) throws Ovm3ResourceException {
final Map<String, String> mgrConfig = new HashMap<String, String>() {
{
put("manager_uuid", uuid);
put("manager_event_url", eventUrl);
put("manager_statistic_url", statUrl);
put("manager_certificate", managerCert);
put("signed_server_certificate", signedCert);
}
private static final long serialVersionUID = 1L;
};
final Boolean rc = nullIsTrueCallWrapper("take_ownership", mgrConfig);
/* because it restarts when it's done.... 2000? -sigh- */
try {
Thread.sleep(2000);
} catch (final InterruptedException e) {
throw new Ovm3ResourceException(e.getMessage());
}
return rc;
}
/*
* destroy_server_pool, <class 'agent.api.serverpool.ServerPool'> argument: self - default: None argument: pool_uuid -
* default: None
*/
public Boolean destroyServerPool(String uuid) throws Ovm3ResourceException {
return nullIsTrueCallWrapper("destroy_server_pool", uuid);
}
/*
* release_ownership, <class 'agent.api.serverpool.ServerPool'> argument: self - default: None argument: manager_uuid
* - default: None
*/
public Boolean releaseOwnership(String uuid) throws Ovm3ResourceException {
return nullIsTrueCallWrapper("release_ownership", uuid);
}
/* server.discover_pool_filesystem */
/*
* discover_server_pool, <class 'agent.api.serverpool.ServerPool'> argument: self - default: None
*/
public Boolean discoverServerPool() throws Ovm3ResourceException {
final Object x = callWrapper("discover_server_pool");
if (x == null) {
return false;
}
final Document xmlDocument = prepParse((String) x);
final String path = "//Discover_Server_Pool_Result/Server_Pool";
poolId = xmlToString(path + "/Unique_Id", xmlDocument);
poolAlias = xmlToString(path + "/Pool_Alias", xmlDocument);
poolMasterVip = xmlToString(path + "/Master_Virtual_Ip",
xmlDocument);
poolHosts.addAll(xmlToList(path + "//Registered_IP", xmlDocument));
if (poolId == null) {
return false;
}
return true;
}
private Boolean setServerRoles() throws Ovm3ResourceException {
final String roles = StringUtils.join(poolRoles.toArray(), ",");
return nullIsTrueCallWrapper("update_server_roles", roles);
}
/* do some sanity check on the valid poolroles */
public Boolean setServerRoles(List<String> roles) throws Ovm3ResourceException {
poolRoles.addAll(roles);
return setServerRoles();
}
private Boolean joinServerPool(String alias, String id, String vip, int num,
String name, String host, List<String> roles) throws Ovm3ResourceException {
final String role = StringUtils.join(roles.toArray(), ",");
if (!isInAPool()) {
final Object x = callWrapper("join_server_pool", alias, id, vip, num, name,
host, role);
if (x == null) {
return true;
}
return false;
} else if (isInPool(id)) {
return true;
} else {
throw new Ovm3ResourceException("Unable to add host is already in a pool with id : " + poolId);
}
}
public Boolean joinServerPool(String alias, String id, String vip, int num,
String name, String host) throws Ovm3ResourceException {
return joinServerPool(alias, id, vip, num, name, host, getValidRoles());
}
private Boolean setPoolMemberList() throws Ovm3ResourceException {
// should throw exception if no poolHosts set
return nullIsTrueCallWrapper("set_pool_member_ip_list", poolHosts);
}
public Boolean setPoolMemberList(List<String> hosts) throws Ovm3ResourceException {
poolHosts = new ArrayList<String>();
poolHosts.addAll(hosts);
return setPoolMemberList();
}
public List<String> getPoolMemberList() throws Ovm3ResourceException {
if (poolId == null) {
discoverServerPool();
}
return poolHosts;
}
public Boolean addPoolMember(String host) throws Ovm3ResourceException {
getPoolMemberList();
poolHosts.add(host);
return setPoolMemberList();
}
public Boolean removePoolMember(String host) throws Ovm3ResourceException {
getPoolMemberList();
poolHosts.remove(host);
return setPoolMemberList();
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal.shared.unsafe;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.WritableByteChannel;
import com.gemstone.gemfire.internal.shared.ChannelBufferOutputStream;
import com.gemstone.gemfire.internal.shared.ClientSharedUtils;
import com.gemstone.gemfire.internal.shared.OutputStreamChannel;
/**
* A somewhat more efficient implementation of {@link ChannelBufferOutputStream}
* using internal unsafe class (~30% in raw single byte write calls). Use this
* only when {@link UnsafeHolder#getDirectByteBufferAddressMethod()} returns
* non-null. Alternatively use {@link UnsafeHolder#newChannelBufferOutputStream}
* method to create either this or {@link ChannelBufferOutputStream} depending
* on availability.
* <p>
* NOTE: THIS CLASS IS NOT THREAD-SAFE BY DESIGN. IF IT IS USED CONCURRENTLY
* BY MULTIPLE THREADS THEN BAD THINGS CAN HAPPEN DUE TO UNSAFE MEMORY WRITES.
* <p>
* Note that the close() method of this class does not closing the underlying
* channel.
*
* @author swale
* @since gfxd 1.1
*/
public class ChannelBufferUnsafeOutputStream extends OutputStreamChannel {
protected final ByteBuffer buffer;
protected final long baseAddress;
/**
* Actual buffer position (+baseAddress) accounting is done by this. Buffer
* position is adjusted during refill and other places where required using
* this.
*/
protected long addrPosition;
protected long addrLimit;
protected static final sun.misc.Unsafe unsafe = UnsafeHolder.getUnsafe();
/**
* Some minimum buffer size, particularly for longs and encoding UTF strings
* efficiently. If reducing this, then consider the logic in
* {@link ChannelBufferUnsafeDataOutputStream#writeUTF(String)} carefully.
*/
protected static final int MIN_BUFFER_SIZE = 10;
public ChannelBufferUnsafeOutputStream(WritableByteChannel channel)
throws IOException {
this(channel, ChannelBufferOutputStream.DEFAULT_BUFFER_SIZE);
}
public ChannelBufferUnsafeOutputStream(WritableByteChannel channel,
int bufferSize) throws IOException {
super(channel);
// expect minimum bufferSize of 10 bytes
if (bufferSize < MIN_BUFFER_SIZE) {
throw new IllegalArgumentException(
"ChannelBufferUnsafeDataOutputStream: buffersize=" + bufferSize
+ " too small (minimum " + MIN_BUFFER_SIZE + ')');
}
this.buffer = allocateBuffer(bufferSize);
try {
this.baseAddress = (Long)UnsafeHolder.getDirectByteBufferAddressMethod()
.invoke(this.buffer);
resetBufferPositions();
} catch (Exception e) {
throw ClientSharedUtils.newRuntimeException(
"failed in creating an 'unsafe' buffered channel stream", e);
}
}
protected final void resetBufferPositions() {
this.addrPosition = this.baseAddress + this.buffer.position();
this.addrLimit = this.baseAddress + this.buffer.limit();
}
protected ByteBuffer allocateBuffer(int bufferSize) {
return ByteBuffer.allocateDirect(bufferSize);
}
/**
* {@inheritDoc}
*/
@Override
public final void write(int b) throws IOException {
final long addrPos = this.addrPosition;
if (addrPos < this.addrLimit) {
unsafe.putByte(addrPos, (byte)(b & 0xff));
this.addrPosition++;
}
else {
flushBufferBlocking(this.buffer);
unsafe.putByte(this.addrPosition++, (byte)(b & 0xff));
}
}
protected final void write_(byte[] b, int off, int len) throws IOException {
if (len == 1) {
final long addrPos = this.addrPosition;
if (addrPos < this.addrLimit) {
unsafe.putByte(addrPos, b[off]);
this.addrPosition++;
}
else {
flushBufferBlocking(this.buffer);
unsafe.putByte(this.addrPosition++, b[off]);
}
return;
}
while (len > 0) {
final long addrPos = this.addrPosition;
final int remaining = (int)(this.addrLimit - addrPos);
if (len <= remaining) {
UnsafeHolder.bufferPut(b, addrPos, off, len, unsafe);
this.addrPosition += len;
return;
}
else {
// copy b to buffer and flush
if (remaining > 0) {
UnsafeHolder.bufferPut(b, addrPos, off, remaining, unsafe);
this.addrPosition += remaining;
len -= remaining;
off += remaining;
}
flushBufferBlocking(this.buffer);
}
}
}
/**
* {@inheritDoc}
*/
@Override
public final void write(byte[] b) throws IOException {
write_(b, 0, b.length);
}
/**
* {@inheritDoc}
*/
@Override
public final void write(byte[] b, int off, int len) throws IOException {
if (UnsafeHolder.checkBounds(off, len, b.length)) {
write_(b, off, len);
}
else {
throw new IndexOutOfBoundsException("offset=" + off + " length=" + len
+ " size=" + b.length);
}
}
/**
* {@inheritDoc}
*/
@Override
public final int write(ByteBuffer src) throws IOException {
// We will just use our ByteBuffer for the write. It might be possible
// to get slight performance advantage in using unsafe instead, but
// copying from source ByteBuffer will not be efficient without
// reflection to get src's native address in case it is a direct
// byte buffer. Avoiding the complication since the benefit will be
// very small in any case (and reflection cost may well offset that).
// adjust this buffer position first
this.buffer.position((int)(this.addrPosition - this.baseAddress));
// now we are actually set to just call base class method
try {
return super.writeBuffered(src, this.buffer);
} finally {
// finally reset the raw positions from buffer
resetBufferPositions();
}
}
/**
* {@inheritDoc}
*/
@Override
public void flush() throws IOException {
if (this.addrPosition > this.baseAddress) {
flushBufferBlocking(this.buffer);
}
}
/**
* {@inheritDoc}
*/
@Override
public final boolean isOpen() {
return this.channel.isOpen();
}
/**
* {@inheritDoc}
*/
@Override
public void close() throws IOException {
flushBufferBlocking(this.buffer);
}
protected void flushBufferBlocking(final ByteBuffer buffer)
throws IOException {
buffer.position((int)(this.addrPosition - this.baseAddress));
buffer.flip();
try {
do {
writeBuffer(buffer);
} while (buffer.hasRemaining());
} finally {
if (buffer.hasRemaining()) {
buffer.compact();
}
else {
buffer.clear();
}
resetBufferPositions();
}
}
@Override
protected boolean flushBufferNonBlocking(final ByteBuffer buffer,
boolean isChannelBuffer) throws IOException {
if (isChannelBuffer) {
try {
return super.flushBufferNonBlocking(buffer, true);
} finally {
resetBufferPositions();
}
}
else {
return super.flushBufferNonBlocking(buffer, false);
}
}
}
| |
/*
* Copyright 2014 Alejandro Barocio A. <abarocio80@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.extranjero.web;
import javafx.application.Application;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.concurrent.Worker;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Rectangle2D;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.*;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.StackPane;
import javafx.scene.text.Text;
import javafx.scene.web.WebEngine;
import javafx.scene.web.WebEvent;
import javafx.scene.web.WebView;
import javafx.stage.Stage;
import java.time.LocalDateTime;
/**
* Created by alex on 3/31/14.
*/
public class Browser extends Application {
private final Image previousI = new Image(Browser.class.getResourceAsStream("img/back.png"));
private final ImageView previousIV = new ImageView(previousI);
private final Image nextI = new Image(Browser.class.getResourceAsStream("img/next.png"));
private final ImageView nextIV = new ImageView(nextI);
private final Image reloadI = new Image(Browser.class.getResourceAsStream("img/reload.png"));
private final ImageView reloadIV = new ImageView(reloadI);
private final Image cancelI = new Image(Browser.class.getResourceAsStream("img/cancel.png"));
private final ImageView cancelIV = new ImageView(cancelI);
private final Image settingsI = new Image(Browser.class.getResourceAsStream("img/settings.png"));
private final ImageView settingsIV = new ImageView(settingsI);
@FXML
ProgressIndicator pi = new ProgressIndicator();
@FXML
ProgressBar pb = new ProgressBar();
private Stage mStage;
private Scene mScene;
@FXML
private StackPane progress;
@FXML
private TextField uri;
@FXML
private Button title;
@FXML
private Text pt;
@FXML
private Text pu;
@FXML
private WebEngine webEngine;
@FXML
private Text url;
@FXML
private TextField addressbar;
@FXML
private Button reload;
@FXML
private Button previous;
@FXML
private Button next;
@FXML
private Menu settings;
@FXML
private WebView mWebView;
private boolean loggedLoad;
public static void main(String[] args) {
launch(args);
}
@Override
public void start(Stage stage) throws Exception {
Parent root = (Parent) FXMLLoader.load(getClass().getResource("ui/BrowserUI.fxml"));
stage.setTitle("Web.Browser");
mStage = stage;
mScene = new Scene(root, 800, 600);
mStage.setScene(mScene);
mScene.getStylesheets().add(getClass().getResource("skins/light.css").toExternalForm());
mStage.show();
ChangeListener<Number> widthListener = new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> observableValue, Number number, Number number2) {
title.setPrefWidth(uri.getWidth());
pb.setPrefWidth(uri.getWidth());
progress.setPrefWidth(uri.getWidth());
mWebView.setPrefWidth(mStage.getWidth());
mWebView.setPrefHeight(mStage.getHeight());
}
};
/*
mStage.widthProperty().addListener(widthListener);
mStage.heightProperty().addListener(widthListener);
mScene.widthProperty().addListener(widthListener);
mScene.heightProperty().addListener(widthListener);
*/
}
@FXML
void reload(ActionEvent event) {
if (webEngine.getLoadWorker().isRunning()) {
webEngine.getLoadWorker().cancel();
System.out.println(LocalDateTime.now() + " CancelLoading: " + webEngine.getLocation());
reload.setGraphic(reloadIV);
uri.setText(webEngine.getLocation());
title.setText(webEngine.getTitle());
title.setPrefWidth(uri.getWidth());
uri.setVisible(false);
progress.setVisible(false);
title.setVisible(true);
} else {
go(event);
}
}
@FXML
void go(ActionEvent event) {
reload.setGraphic(cancelIV);
//System.out.println(event.toString());
String url = uri.getText();
if (url.matches("^https?://") || url.startsWith("http://") || url.startsWith("https://")) {
//System.out.println("uri OK");
} else {
//System.out.println("uri normalization");
url = "http://" + url;
}
uri.setVisible(false);
progress.setPrefWidth(uri.getWidth());
progress.setVisible(true);
webEngine.load(url);
System.out.println(LocalDateTime.now() + " StartLoading: " + webEngine.getLocation());
loggedLoad = false;
webEngine.getLoadWorker().stateProperty().addListener(
new ChangeListener<Worker.State>() {
public void changed(ObservableValue<? extends Worker.State> arg0,
Worker.State arg1, Worker.State arg2) {
}
}
);
webEngine.getLoadWorker().progressProperty().addListener(new ChangeListener<Object>() {
@Override
public void changed(ObservableValue<?> arg0, Object arg1, Object arg2) {
progress.setVisible(true);
double p = webEngine.getLoadWorker().progressProperty().get();
String l = webEngine.getLoadWorker().getMessage();
pb.setPrefWidth(uri.getWidth());
if (p < 1.0) {
pt.setText((int) (p * 100) + "%");
pu.setText(l);
} else if (p > 0.0) {
if (!loggedLoad) {
System.out.println(LocalDateTime.now() + " DoneLoading: " + webEngine.getLocation());
loggedLoad = true;
}
title.setPrefWidth(uri.getWidth());
title.setText(webEngine.getTitle());
progress.setVisible(false);
reload.setGraphic(reloadIV);
title.setVisible(true);
}
uri.setText(webEngine.getLocation());
pi.setProgress((Double) arg0.getValue());
pb.setProgress((Double) arg0.getValue());
}
});
webEngine.locationProperty().addListener(new ChangeListener<String>() {
@Override
public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) {
uri.setText(newValue);
}
});
uri.setText(webEngine.getLocation());
}
@FXML
void toggle(ActionEvent event) {
if (title.isVisible()) {
title.setVisible(false);
uri.setVisible(true);
uri.requestFocus();
uri.selectAll();
} else {
uri.setVisible(false);
title.setVisible(true);
}
}
@FXML
void next(ActionEvent event) {
webEngine.executeScript("history.forward()");
}
@FXML
void previous(ActionEvent event) {
webEngine.executeScript("history.back()");
}
@FXML
void initialize() {
webEngine = mWebView.getEngine();
webEngine.setOnResized(new EventHandler<WebEvent<Rectangle2D>>() {
@Override
public void handle(WebEvent<Rectangle2D> rectangle2DWebEvent) {
title.setPrefWidth(uri.getWidth());
progress.setPrefWidth(uri.getWidth());
pb.setPrefWidth(uri.getWidth());
pb.setMaxWidth(uri.getWidth());
title.setMaxWidth(uri.getWidth());
}
});
double size = 16.0;
previousIV.setFitHeight(size);
previousIV.setFitWidth(size);
previous.setGraphic(previousIV);
nextIV.setFitHeight(size);
nextIV.setFitWidth(size);
next.setGraphic(nextIV);
reloadIV.setFitHeight(size);
reloadIV.setFitWidth(size);
reload.setGraphic(reloadIV);
settingsIV.setFitWidth(size);
settingsIV.setFitHeight(size);
settings.setGraphic(settingsIV);
cancelIV.setFitHeight(16.0);
cancelIV.setFitWidth(16.0);
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Lists;
import com.google.javascript.jscomp.ControlFlowGraph.AbstractCfgNodeTraversalCallback;
import com.google.javascript.jscomp.ControlFlowGraph.Branch;
import com.google.javascript.jscomp.DataFlowAnalysis.FlowState;
import com.google.javascript.jscomp.MustBeReachingVariableDef.MustDef;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.NodeTraversal.AbstractShallowCallback;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphNode;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Collection;
import java.util.List;
/**
* Inline variables when possible. Using the information from
* {@link MaybeReachingVariableUse} and {@link MustBeReachingVariableDef},
* this pass attempts to inline a variable by placing the value at the
* definition where the variable is used. The basic requirements for inlining
* are the following:
*
* <ul>
* <li> There is exactly one reaching definition at the use of that variable
* </li>
* <li> There is exactly one use for that definition of the variable
* </li>
* </ul>
*
* <p>Other requirements can be found in {@link Candidate#canInline}. Currently
* this pass does not operate on the global scope due to compilation time.
*
*/
class FlowSensitiveInlineVariables extends AbstractPostOrderCallback
implements CompilerPass, ScopedCallback {
/**
* Implementation:
*
* This pass first perform a traversal to gather a list of Candidates that
* could be inlined using {@link GatherCandiates}.
*
* The second step involves verifying that each candidate is actually safe
* to inline with {@link Candidate#canInline()} and finally perform inlining
* using {@link Candidate#inlineVariable()}.
*
* The reason for the delayed evaluation of the candidates is because we
* need two separate dataflow result.
*/
private final AbstractCompiler compiler;
// These two pieces of data is persistent in the whole execution of enter
// scope.
private ControlFlowGraph<Node> cfg;
private List<Candidate> candidates;
private MustBeReachingVariableDef reachingDef;
private MaybeReachingVariableUse reachingUses;
private static final Predicate<Node> SIDE_EFFECT_PREDICATE =
new Predicate<Node>() {
@Override
public boolean apply(Node n) {
// When the node is null it means, we reached the implicit return
// where the function returns (possibly without an return statement)
if (n == null) {
return false;
}
// TODO(user): We only care about calls to functions that
// passes one of the dependent variable to a non-sideeffect free
// function.
if (NodeUtil.isCall(n) && NodeUtil.functionCallHasSideEffects(n)) {
return true;
}
if (NodeUtil.isNew(n) && NodeUtil.constructorCallHasSideEffects(n)) {
return true;
}
for (Node c = n.getFirstChild(); c != null; c = c.getNext()) {
if (!ControlFlowGraph.isEnteringNewCfgNode(c) && apply(c)) {
return true;
}
}
return false;
}
};
public FlowSensitiveInlineVariables(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void enterScope(NodeTraversal t) {
if (t.inGlobalScope()) {
return; // Don't even brother. All global variables are likely escaped.
}
if (LiveVariablesAnalysis.MAX_VARIABLES_TO_ANALYZE <
t.getScope().getVarCount()) {
return;
}
// Compute the forward reaching definition.
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false, true);
// Process the body of the function.
Preconditions.checkState(NodeUtil.isFunction(t.getScopeRoot()));
cfa.process(null, t.getScopeRoot().getLastChild());
cfg = cfa.getCfg();
reachingDef = new MustBeReachingVariableDef(cfg, t.getScope(), compiler);
reachingDef.analyze();
candidates = Lists.newLinkedList();
// Using the forward reaching definition search to find all the inline
// candiates
new NodeTraversal(compiler, new GatherCandiates()).traverse(
t.getScopeRoot().getLastChild());
// Compute the backward reaching use. The CFG can be reused.
reachingUses = new MaybeReachingVariableUse(cfg, t.getScope(), compiler);
reachingUses.analyze();
for (Candidate c : candidates) {
if (c.canInline()) {
c.inlineVariable();
}
}
}
@Override
public void exitScope(NodeTraversal t) {}
@Override
public void process(Node externs, Node root) {
(new NodeTraversal(compiler, this)).traverse(root);
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
// TODO(user): While the helpers do a subtree traversal on the AST, the
// compiler pass itself only traverse the AST to look for function
// declarations to perform dataflow analysis on. We could combine
// the traversal in DataFlowAnalysis's computeEscaped later to save some
// time.
}
/**
* Gathers a list of possible candidates for inlining based only on
* information from {@link MustBeReachingVariableDef}. The list will be stored
* in {@code candidiates} and the validity of each inlining Candidate should
* be later verified with {@link Candidate#canInline()} when
* {@link MaybeReachingVariableUse} has been performed.
*/
private class GatherCandiates extends AbstractShallowCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
DiGraphNode<Node, Branch> graphNode = cfg.getDirectedGraphNode(n);
if (graphNode == null) {
// Not a CFG node.
return;
}
FlowState<MustDef> state = graphNode.getAnnotation();
final MustDef defs = state.getIn();
final Node cfgNode = n;
AbstractCfgNodeTraversalCallback gatherCb =
new AbstractCfgNodeTraversalCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (NodeUtil.isName(n)) {
// Make sure that the name node is purely a read.
if ((NodeUtil.isAssignmentOp(parent) && parent.getFirstChild() == n)
|| NodeUtil.isVar(parent) || parent.getType() == Token.INC ||
parent.getType() == Token.DEC || parent.getType() == Token.LP ||
parent.getType() == Token.CATCH) {
return;
}
String name = n.getString();
if (compiler.getCodingConvention().isExported(name)) {
return;
}
Node defNode = reachingDef.getDef(name, cfgNode);
if (defNode != null &&
!reachingDef.dependsOnOuterScopeVars(name, cfgNode)) {
candidates.add(new Candidate(name, defNode, n, cfgNode));
}
}
}
};
NodeTraversal.traverse(compiler, cfgNode, gatherCb);
}
}
/**
* Models the connection between a definition and a use of that definition.
*/
private class Candidate {
// Name of the variable.
private final String varName;
// Nodes related to the definition.
private Node def;
private final Node defCfgNode;
// Nodes related to the use.
private final Node use;
private final Node useCfgNode;
// Number of uses of the variable within the CFG node that represented the
// use in the CFG.
private int numUseWithinUseCfgNode;
Candidate(String varName, Node defCfgNode, Node use, Node useCfgNode) {
Preconditions.checkArgument(NodeUtil.isName(use));
this.varName = varName;
this.defCfgNode = defCfgNode;
this.use = use;
this.useCfgNode = useCfgNode;
}
private boolean canInline() {
// Cannot inline a parameter.
if (NodeUtil.isFunction(defCfgNode)) {
return false;
}
getDefinition(defCfgNode, null);
getNumUseInUseCfgNode(useCfgNode, null);
// Definition was not found.
if (def == null) {
return false;
}
// Check that the assignment isn't used as a R-Value.
// TODO(user): Certain cases we can still inline.
if (NodeUtil.isAssign(def) && !NodeUtil.isExprAssign(def.getParent())) {
return false;
}
// The right of the definition has side effect:
// Example, for x:
// x = readProp(b), modifyProp(b); print(x);
if (checkRightOf(def, defCfgNode, SIDE_EFFECT_PREDICATE)) {
return false;
}
// Similar check as the above but this time, all the sub-expressions
// left of the use of the variable.
// x = readProp(b); modifyProp(b), print(x);
if (checkLeftOf(use, useCfgNode, SIDE_EFFECT_PREDICATE)) {
return false;
}
// TODO(user): Side-effect is ok sometimes. As long as there are no
// side-effect function down all paths to the use. Once we have all the
// side-effect analysis tool.
if (NodeUtil.mayHaveSideEffects(def.getLastChild())) {
return false;
}
// TODO(user): We could inline all the uses if the expression is short.
// Finally we have to make sure that there are no more than one use
// in the program and in the CFG node. Even when it is semantically
// correctly inlining twice increases code size.
if (numUseWithinUseCfgNode != 1) {
return false;
}
// Make sure that the name is not within a loop
if (NodeUtil.isWithinLoop(use)) {
return false;
}
Collection<Node> uses = reachingUses.getUses(varName, defCfgNode);
if (uses.size() != 1) {
return false;
}
// We give up inling stuff with R-Value that has GETPROP, GETELEM,
// or anything that creates a new object.
// Example:
// var x = a.b.c; j.c = 1; print(x);
// Inlining print(a.b.c) is not safe consider j and be alias to a.b.
// TODO(user): We could get more accuracy by looking more in-detail
// what j is and what x is trying to into to.
if (NodeUtil.has(def.getLastChild(),
new Predicate<Node>() {
@Override
public boolean apply(Node input) {
switch (input.getType()) {
case Token.GETELEM:
case Token.GETPROP:
case Token.ARRAYLIT:
case Token.OBJECTLIT:
case Token.REGEXP:
case Token.NEW:
return true;
}
return false;
}
},
new Predicate<Node>() {
@Override
public boolean apply(Node input) {
// Recurse if the node is not a function.
return !NodeUtil.isFunction(input);
}
})) {
return false;
}
// We can skip the side effect check along the paths of two nodes if
// they are just next to each other.
if (NodeUtil.isStatementBlock(defCfgNode.getParent()) &&
defCfgNode.getNext() != useCfgNode) {
// Similar side effect check as above but this time the side effect is
// else where along the path.
// x = readProp(b); while(modifyProp(b)) {}; print(x);
CheckPathsBetweenNodes<Node, ControlFlowGraph.Branch>
pathCheck = new CheckPathsBetweenNodes<Node, ControlFlowGraph.Branch>(
cfg,
cfg.getDirectedGraphNode(defCfgNode),
cfg.getDirectedGraphNode(useCfgNode),
SIDE_EFFECT_PREDICATE,
Predicates.
<DiGraphEdge<Node, ControlFlowGraph.Branch>>alwaysTrue(),
false);
if (pathCheck.somePathsSatisfyPredicate()) {
return false;
}
}
return true;
}
/**
* Actual transformation.
*/
private void inlineVariable() {
Node defParent = def.getParent();
Node useParent = use.getParent();
if (NodeUtil.isAssign(def)) {
Node rhs = def.getLastChild();
rhs.detachFromParent();
// Oh yes! I have grandparent to remove this.
Preconditions.checkState(NodeUtil.isExpressionNode(defParent));
while (defParent.getParent().getType() == Token.LABEL) {
defParent = defParent.getParent();
}
defParent.detachFromParent();
useParent.replaceChild(use, rhs);
} else if (NodeUtil.isVar(defParent)) {
Node rhs = def.getLastChild();
def.removeChild(rhs);
useParent.replaceChild(use, rhs);
} else {
Preconditions.checkState(false, "No other definitions can be inlined.");
}
compiler.reportCodeChange();
}
/**
* Set the def node
*
* @param n A node that has a corresponding CFG node in the CFG.
*/
private void getDefinition(Node n, Node parent) {
AbstractCfgNodeTraversalCallback gatherCb =
new AbstractCfgNodeTraversalCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
switch (n.getType()) {
case Token.NAME:
if (n.getString().equals(varName) && n.hasChildren()) {
def = n;
}
return;
case Token.ASSIGN:
Node lhs = n.getFirstChild();
if (NodeUtil.isName(lhs) && lhs.getString().equals(varName)) {
def = n;
}
return;
}
}
};
NodeTraversal.traverse(compiler, n, gatherCb);
}
/**
* Computes the number of uses of the variable varName and store it in
* numUseWithinUseCfgNode.
*/
private void getNumUseInUseCfgNode(Node n, Node parant) {
AbstractCfgNodeTraversalCallback gatherCb =
new AbstractCfgNodeTraversalCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (NodeUtil.isName(n) && n.getString().equals(varName) &&
// do not count in if it is left child of an assignment operator
!(NodeUtil.isAssign(parent) &&
(parent.getFirstChild() == n))) {
numUseWithinUseCfgNode++;
}
}
};
NodeTraversal.traverse(compiler, n, gatherCb);
}
}
/**
* Given an expression by its root and sub-expression n, return true if there
* the predicate is true for some expression on the right of n.
*
* Example:
*
* NotChecked(), NotChecked(), n, Checked(), Checked();
*/
private static boolean checkRightOf(
Node n, Node expressionRoot, Predicate<Node> predicate) {
for (Node p = n; p != expressionRoot; p = p.getParent()) {
for (Node cur = p.getNext(); cur != null; cur = cur.getNext()) {
if (predicate.apply(cur)) {
return true;
}
}
}
return false;
}
/**
* Given an expression by its root and sub-expression n, return true if there
* the predicate is true for some expression on the left of n.
*
* Example:
*
* Checked(), Checked(), n, NotChecked(), NotChecked();
*/
private static boolean checkLeftOf(
Node n, Node expressionRoot, Predicate<Node> predicate) {
for (Node p = n.getParent(); p != expressionRoot; p = p.getParent()) {
for (Node cur = p.getParent().getFirstChild(); cur != p;
cur = cur.getNext()) {
if (predicate.apply(cur)) {
return true;
}
}
}
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.util.List;
import com.google.common.collect.ComparisonChain;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.plugins.document.Collection.SETTINGS;
import org.jetbrains.annotations.NotNull;
/**
* The format version currently in use by the DocumentNodeStore and written
* to the underlying DocumentStore. A version {@link #canRead} the current or
* older versions.
*/
public final class FormatVersion implements Comparable<FormatVersion> {
/**
* A dummy version when none is available.
*/
static final FormatVersion V0 = new FormatVersion(0, 0, 0);
/**
* Format version for Oak 1.0.
*/
static final FormatVersion V1_0 = new FormatVersion(1, 0, 0);
/**
* Format version for Oak 1.2.
* <p>
* Changes introduced with this version:
* <ul>
* <li>_lastRev entries are only updated for implicit changes (OAK-2131)</li>
* </ul>
*/
static final FormatVersion V1_2 = new FormatVersion(1, 2, 0);
/**
* Format version for Oak 1.4.
* <p>
* Changes introduced with this version:
* <ul>
* <li>journalGC in settings collection (OAK-4528)</li>
* <li>startTime in clusterNode entries, revision vector in checkpoint (OAK-3646)</li>
* <li>discovery lite with clusterView in settings collection (OAK-2844)</li>
* </ul>
*/
static final FormatVersion V1_4 = new FormatVersion(1, 4, 0);
/**
* Format version for Oak 1.6.
* <p>
* Changes introduced with this version:
* <ul>
* <li>bundle nodes into document (OAK-1312)</li>
* <li>journal entries with change set summary for JCR observation (OAK-5101)</li>
* </ul>
*/
static final FormatVersion V1_6 = new FormatVersion(1, 6, 0);
/**
* Format version for Oak 1.8.
* <p>
* Changes introduced with this version:
* <ul>
* <li>SplitDocType.DEFAULT_NO_BRANCH (OAK-5869)</li>
* <li>journal entries with invalidate-only changes (OAK-5964)</li>
* </ul>
*/
static final FormatVersion V1_8 = new FormatVersion(1, 8, 0);
/**
* The ID of the document in the settings collection that contains the
* version information.
*/
private static final String VERSION_ID = "version";
/**
* @return well known format versions.
*/
public static Iterable<FormatVersion> values() {
return ImmutableList.of(V0, V1_0, V1_2, V1_4, V1_6, V1_8);
}
/**
* Name of the version property.
*/
private static final String PROP_VERSION = "_v";
private final int major, minor, micro;
private FormatVersion(int major, int minor, int micro) {
this.major = major;
this.minor = minor;
this.micro = micro;
}
/**
* Returns {@code true} if {@code this} version can read data written by the
* {@code other} version.
*
* @param other the version the data was written in.
* @return {@code true} if this version can read, {@code false} otherwise.
*/
public boolean canRead(FormatVersion other) {
return compareTo(checkNotNull(other)) >= 0;
}
/**
* Reads the {@link FormatVersion} from the given store. This method
* returns {@link FormatVersion#V0} if the store currently does not have a
* version set.
*
* @param store the store to read from.
* @return the format version of the store.
* @throws DocumentStoreException if an error occurs while reading from the
* store.
*/
@NotNull
public static FormatVersion versionOf(@NotNull DocumentStore store)
throws DocumentStoreException {
checkNotNull(store);
FormatVersion v = V0;
Document d = store.find(SETTINGS, VERSION_ID);
if (d != null) {
Object p = d.get(PROP_VERSION);
if (p != null) {
try {
v = valueOf(p.toString());
} catch (IllegalArgumentException e) {
throw new DocumentStoreException(e);
}
}
}
return v;
}
/**
* Writes this version to the given document store. The write operation will
* fail with a {@link DocumentStoreException} if the version change is
* considered incompatible or cannot be applied for some other reason. This
* includes:
* <ul>
* <li>An attempt to downgrade the existing version</li>
* <li>There are active cluster nodes using an existing version</li>
* <li>The version was changed concurrently</li>
* </ul>
*
* @param store the document store.
* @return {@code true} if the version in the store was updated,
* {@code false} otherwise. This method will also return {@code false}
* if the version in the store equals this version and now update was
* required.
* @throws DocumentStoreException if the write operation fails. Reasons
* include: 1) an attempt to downgrade the existing version, 2) there
* are active cluster nodes using an existing version, 3) the version
* was changed concurrently.
*/
public boolean writeTo(@NotNull DocumentStore store)
throws DocumentStoreException {
checkNotNull(store);
FormatVersion v = versionOf(store);
if (v == this) {
// already on this version
return false;
}
if (!canRead(v)) {
// never downgrade
throw unableToWrite("Version " + this + " cannot read " + v);
}
List<Integer> active = Lists.newArrayList();
for (ClusterNodeInfoDocument d : ClusterNodeInfoDocument.all(store)) {
if (d.isActive()) {
active.add(d.getClusterId());
}
}
if (!active.isEmpty() && v != V0) {
throw unableToWrite("There are active cluster nodes: " + active);
}
if (v == V0) {
UpdateOp op = new UpdateOp(VERSION_ID, true);
op.set(PROP_VERSION, toString());
if (!store.create(SETTINGS, Lists.newArrayList(op))) {
throw concurrentUpdate();
}
} else {
UpdateOp op = new UpdateOp(VERSION_ID, false);
op.equals(PROP_VERSION, v.toString());
op.set(PROP_VERSION, toString());
if (store.findAndUpdate(SETTINGS, op) == null) {
throw concurrentUpdate();
}
}
return true;
}
/**
* Returns a format version for the given String representation. This method
* either returns one of the well known versions or an entirely new version
* if the version is not well known.
*
* @param s the String representation of a format version.
* @return the parsed format version.
* @throws IllegalArgumentException if the string is malformed.
*/
public static FormatVersion valueOf(String s)
throws IllegalArgumentException {
String[] parts = s.split("\\.");
if (parts.length != 3) {
throw new IllegalArgumentException(s);
}
int[] elements = new int[parts.length];
for (int i = 0; i < parts.length; i++) {
try {
elements[i] = Integer.parseInt(parts[i]);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(s);
}
}
FormatVersion v = new FormatVersion(elements[0], elements[1], elements[2]);
for (FormatVersion known : values()) {
if (v.equals(known)) {
v = known;
break;
}
}
return v;
}
@Override
public String toString() {
return major + "." + minor + "." + micro;
}
@Override
public boolean equals(Object obj) {
return obj instanceof FormatVersion
&& compareTo((FormatVersion) obj) == 0;
}
@Override
public int compareTo(@NotNull FormatVersion other) {
checkNotNull(other);
return ComparisonChain.start()
.compare(major, other.major)
.compare(minor, other.minor)
.compare(micro, other.micro)
.result();
}
private static DocumentStoreException concurrentUpdate() {
return unableToWrite("Version was updated concurrently");
}
private static DocumentStoreException unableToWrite(String reason) {
return new DocumentStoreException(
"Unable to write format version. " + reason);
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v8/services/campaign_audience_view_service.proto
package com.google.ads.googleads.v8.services;
/**
* <pre>
* Request message for [CampaignAudienceViewService.GetCampaignAudienceView][google.ads.googleads.v8.services.CampaignAudienceViewService.GetCampaignAudienceView].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.GetCampaignAudienceViewRequest}
*/
public final class GetCampaignAudienceViewRequest extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v8.services.GetCampaignAudienceViewRequest)
GetCampaignAudienceViewRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetCampaignAudienceViewRequest.newBuilder() to construct.
private GetCampaignAudienceViewRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetCampaignAudienceViewRequest() {
resourceName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new GetCampaignAudienceViewRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetCampaignAudienceViewRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
java.lang.String s = input.readStringRequireUtf8();
resourceName_ = s;
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v8_services_GetCampaignAudienceViewRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v8_services_GetCampaignAudienceViewRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.class, com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.Builder.class);
}
public static final int RESOURCE_NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object resourceName_;
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
@java.lang.Override
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
}
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest)) {
return super.equals(obj);
}
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest other = (com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest) obj;
if (!getResourceName()
.equals(other.getResourceName())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getResourceName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Request message for [CampaignAudienceViewService.GetCampaignAudienceView][google.ads.googleads.v8.services.CampaignAudienceViewService.GetCampaignAudienceView].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v8.services.GetCampaignAudienceViewRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v8.services.GetCampaignAudienceViewRequest)
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v8.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v8_services_GetCampaignAudienceViewRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v8.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v8_services_GetCampaignAudienceViewRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.class, com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.Builder.class);
}
// Construct using com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
resourceName_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v8.services.CampaignAudienceViewServiceProto.internal_static_google_ads_googleads_v8_services_GetCampaignAudienceViewRequest_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest getDefaultInstanceForType() {
return com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest build() {
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest buildPartial() {
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest result = new com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest(this);
result.resourceName_ = resourceName_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest) {
return mergeFrom((com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest other) {
if (other == com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest.getDefaultInstance()) return this;
if (!other.getResourceName().isEmpty()) {
resourceName_ = other.resourceName_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object resourceName_ = "";
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
public java.lang.String getResourceName() {
java.lang.Object ref = resourceName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resourceName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
public com.google.protobuf.ByteString
getResourceNameBytes() {
java.lang.Object ref = resourceName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
resourceName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resourceName_ = value;
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @return This builder for chaining.
*/
public Builder clearResourceName() {
resourceName_ = getDefaultInstance().getResourceName();
onChanged();
return this;
}
/**
* <pre>
* Required. The resource name of the campaign audience view to fetch.
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }</code>
* @param value The bytes for resourceName to set.
* @return This builder for chaining.
*/
public Builder setResourceNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resourceName_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v8.services.GetCampaignAudienceViewRequest)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v8.services.GetCampaignAudienceViewRequest)
private static final com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest();
}
public static com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetCampaignAudienceViewRequest>
PARSER = new com.google.protobuf.AbstractParser<GetCampaignAudienceViewRequest>() {
@java.lang.Override
public GetCampaignAudienceViewRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetCampaignAudienceViewRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<GetCampaignAudienceViewRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetCampaignAudienceViewRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v8.services.GetCampaignAudienceViewRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.api.action;
import java.io.Serializable;
import java.util.Collection;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.core.api.CoreConstants;
import org.kuali.rice.core.api.mo.AbstractDataTransferObject;
import org.kuali.rice.core.api.mo.ModelBuilder;
import org.kuali.rice.kew.api.document.DocumentContentUpdate;
import org.kuali.rice.kew.api.document.DocumentUpdate;
import org.w3c.dom.Element;
@XmlRootElement(name = DocumentActionParameters.Constants.ROOT_ELEMENT_NAME)
@XmlAccessorType(XmlAccessType.NONE)
@XmlType(name = DocumentActionParameters.Constants.TYPE_NAME, propOrder = {
DocumentActionParameters.Elements.DOCUMENT_ID,
DocumentActionParameters.Elements.PRINCIPAL_ID,
DocumentActionParameters.Elements.ANNOTATION,
DocumentActionParameters.Elements.DOCUMENT_UPDATE,
DocumentActionParameters.Elements.DOCUMENT_CONTENT_UPDATE,
CoreConstants.CommonElements.FUTURE_ELEMENTS
})
public final class DocumentActionParameters extends AbstractDataTransferObject {
private static final long serialVersionUID = -7589214734683758734L;
@XmlElement(name = Elements.DOCUMENT_ID, required = true)
private final String documentId;
@XmlElement(name = Elements.PRINCIPAL_ID, required = true)
private final String principalId;
@XmlElement(name = Elements.ANNOTATION, required = false)
private final String annotation;
@XmlElement(name = Elements.DOCUMENT_UPDATE, required = false)
private final DocumentUpdate documentUpdate;
@XmlElement(name = Elements.DOCUMENT_CONTENT_UPDATE, required = false)
private final DocumentContentUpdate documentContentUpdate;
@SuppressWarnings("unused")
@XmlAnyElement
private final Collection<Element> _futureElements = null;
private DocumentActionParameters() {
this.documentId = null;
this.principalId = null;
this.annotation = null;
this.documentUpdate = null;
this.documentContentUpdate = null;
}
private DocumentActionParameters(Builder builder) {
this.documentId = builder.getDocumentId();
this.principalId = builder.getPrincipalId();
this.annotation = builder.getAnnotation();
this.documentUpdate = builder.getDocumentUpdate();
this.documentContentUpdate = builder.getDocumentContentUpdate();
}
public static DocumentActionParameters create(String documentId, String principalId) {
return create(documentId, principalId, "");
}
public static DocumentActionParameters create(String documentId, String principalId, String annotation) {
Builder builder = Builder.create(documentId, principalId);
builder.setAnnotation(annotation);
return builder.build();
}
public String getDocumentId() {
return documentId;
}
public String getPrincipalId() {
return principalId;
}
public String getAnnotation() {
return annotation;
}
public DocumentUpdate getDocumentUpdate() {
return documentUpdate;
}
public DocumentContentUpdate getDocumentContentUpdate() {
return documentContentUpdate;
}
/**
* A builder which can be used to construct {@link DocumentActionParameters} instances.
*
*/
public final static class Builder implements Serializable, ModelBuilder {
private static final long serialVersionUID = -9209748637365086000L;
private String documentId;
private String principalId;
private String annotation;
private DocumentUpdate documentUpdate;
private DocumentContentUpdate documentContentUpdate;
private Builder(String documentId, String principalId) {
setDocumentId(documentId);
setPrincipalId(principalId);
}
public static Builder create(String documentId, String principalId) {
return new Builder(documentId, principalId);
}
public DocumentActionParameters build() {
return new DocumentActionParameters(this);
}
public String getDocumentId() {
return documentId;
}
public void setDocumentId(String documentId) {
if (StringUtils.isBlank(documentId)) {
throw new IllegalArgumentException("documentId was null or blank");
}
this.documentId = documentId;
}
public String getPrincipalId() {
return principalId;
}
public void setPrincipalId(String principalId) {
if (StringUtils.isBlank(principalId)) {
throw new IllegalArgumentException("principalId was null or blank");
}
this.principalId = principalId;
}
public String getAnnotation() {
return annotation;
}
public void setAnnotation(String annotation) {
this.annotation = annotation;
}
public DocumentUpdate getDocumentUpdate() {
return documentUpdate;
}
public void setDocumentUpdate(DocumentUpdate documentUpdate) {
this.documentUpdate = documentUpdate;
}
public DocumentContentUpdate getDocumentContentUpdate() {
return documentContentUpdate;
}
public void setDocumentContentUpdate(DocumentContentUpdate documentContentUpdate) {
this.documentContentUpdate = documentContentUpdate;
}
}
/**
* Defines some internal constants used on this class.
*/
static class Constants {
final static String ROOT_ELEMENT_NAME = "documentActionParameters";
final static String TYPE_NAME = "DocumentActionParametersType";
}
/**
* A private class which exposes constants which define the XML element names to use when this object is marshalled to XML.
*/
static class Elements {
final static String DOCUMENT_ID = "documentId";
final static String PRINCIPAL_ID = "principalId";
final static String ANNOTATION = "annotation";
final static String DOCUMENT_UPDATE = "documentUpdate";
final static String DOCUMENT_CONTENT_UPDATE = "documentContentUpdate";
}
}
| |
/*
* MegaMek - Copyright (C) 2000,2001,2002,2003,2004 Ben Mazur (bmazur@sev.org)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
/*
* MechView.java
*
* Created on January 20, 2003 by Ryan McConnell
*/
package megamek.client;
import java.util.Enumeration;
import java.util.Vector;
import megamek.common.*;
/**
* A utility class for retrieving mech information in a formatted string.
*
*/
public class MechView {
private Entity mech;
private boolean isMech;
private boolean isInf;
private boolean isVehicle;
private boolean isProto;
StringBuffer sBasic = new StringBuffer();
StringBuffer sLoadout = new StringBuffer();
public MechView(Entity entity) {
mech = entity;
isMech = entity instanceof Mech;
isInf = entity instanceof Infantry;
isVehicle = entity instanceof Tank;
isProto = entity instanceof Protomech;
sLoadout.append( getWeapons() )
.append("\n")
.append(getAmmo())
.append("\n")
.append(getMisc()) //has to occur before basic is processed
.append("\n")
.append(getFailed());
sBasic.append( mech.getShortName() );
sBasic.append("\n");
if ( !isInf ) {
sBasic.append( Math.round(mech.getWeight()) )
.append(" tons " );
}
sBasic.append(TechConstants.T_NAMES[mech.getTechLevel()]);
sBasic.append("\n");
if ( mech.hasC3M() || mech.hasC3S() || mech.hasC3i()) {
sBasic.append( "Linked c3 BV: ");
sBasic.append( mech.calculateBattleValue(true) );
}
sBasic.append("\n");
sBasic.append( "Movement: " )
.append( mech.getWalkMP() )
.append( "/" )
.append( mech.getRunMPasString() );
if (mech.getJumpMP() > 0) {
sBasic.append( "/" )
.append( mech.getJumpMP() );
}
if (isVehicle) {
sBasic.append(" (")
.append(entity.getMovementTypeAsString())
.append(")");
}
sBasic.append( "\n" );
if ( isMech ) {
Mech aMech = (Mech) mech;
sBasic.append( "Engine: " )
.append( aMech.engineRating() );
if (aMech.hasXL()) {
sBasic.append( " XL" );
}
if (aMech.hasLightEngine()) {
sBasic.append( " Light" );
}
sBasic.append("\n");
sBasic.append( "Heat Sinks: " )
.append( aMech.heatSinks() );
if (aMech.getHeatCapacity() > aMech.heatSinks()) {
sBasic.append( " [" )
.append( aMech.getHeatCapacity() )
.append( "]" );
}
sBasic.append("\n");
}
sBasic.append("\n")
.append( getInternalAndArmor() );
}
public String getMechReadoutBasic() {
return sBasic.toString();
}
public String getMechReadoutLoadout() {
return sLoadout.toString();
}
public String getMechReadout() {
return getMechReadoutBasic() + "\n" + getMechReadoutLoadout();
}
private String getInternalAndArmor() {
StringBuffer sIntArm = new StringBuffer();
int maxArmor = mech.getTotalInternal() * 2 + 3;
sIntArm.append( "Internal: " )
.append( mech.getTotalInternal() );
if (isMech && ((Mech)mech).hasEndo()) {
sIntArm.append(" (Endo Steel)");
}
sIntArm.append( "\n" );
sIntArm.append("Armor: ")
.append( mech.getTotalArmor() );
if ( isMech ) {
sIntArm.append( "/" )
.append( maxArmor );
if (((Mech)mech).hasFerro()) {
sIntArm.append(" (Ferro-Fibrous)");
}
}
sIntArm.append( "\n" );
// Walk through the entity's locations.
for ( int loc = 0; loc < mech.locations(); loc++ ) {
// Skip empty sections.
if ( Entity.ARMOR_NA == mech.getInternal(loc) ||
( isVehicle && (( loc == Tank.LOC_TURRET &&
((Tank)mech).hasNoTurret() ) ||
(loc == Tank.LOC_BODY))) ) {
continue;
}
if ( mech.getLocationAbbr(loc).length() < 2 ) {
sIntArm.append( " " );
}
sIntArm.append( mech.getLocationAbbr(loc) )
.append( ": " );
sIntArm.append( renderArmor(mech.getInternal(loc)) )
.append(" ");
if ( Entity.ARMOR_NA != mech.getArmor(loc) ) {
sIntArm.append( renderArmor(mech.getArmor(loc)) );
}
if ( mech.hasRearArmor(loc) ) {
sIntArm.append( " (" )
.append( renderArmor(mech.getArmor(loc, true)) )
.append( ")" );
}
sIntArm.append( "\n" );
}
return sIntArm.toString();
}
private String getWeapons() {
StringBuffer sWeapons = new StringBuffer();
Vector vWeapons = mech.getWeaponList();
for (int j = 0; j < vWeapons.size(); j++) {
Mounted mounted = (Mounted) vWeapons.elementAt(j);
WeaponType wtype = (WeaponType)mounted.getType();
sWeapons.append( mounted.getDesc() )
.append( " [" )
.append( mech.getLocationAbbr(mounted.getLocation()) )
.append( "]" );
if (mech.isClan() &&
mounted.getType().getInternalName().substring(0,2).equals("IS")) {
sWeapons.append(" (IS)");
}
if (!mech.isClan() &&
mounted.getType().getInternalName().substring(0,2).equals("CL")) {
sWeapons.append(" (Clan)");
}
if (wtype.hasFlag(WeaponType.F_ONESHOT)) {
sWeapons.append(" <")
.append(mounted.getLinked().getDesc())
.append(">");
}
sWeapons.append(" ").append(wtype.getHeat()).append(" Heat");
sWeapons.append("\n");
}
return sWeapons.toString();
}
private String getAmmo() {
Enumeration eAmmo = mech.getAmmo();
StringBuffer sAmmo = new StringBuffer();
while (eAmmo.hasMoreElements()) {
Mounted mounted = (Mounted)eAmmo.nextElement();
if (mounted.getLocation() != Entity.LOC_NONE) {
sAmmo.append( mounted.getDesc() )
.append( " [" )
.append( mech.getLocationAbbr(mounted.getLocation()) )
.append( "]\n" );
}
}
return sAmmo.toString();
}
private String getMisc() {
StringBuffer sMisc = new StringBuffer();
Enumeration eMisc = mech.getMisc();
while (eMisc.hasMoreElements()) {
Mounted mounted = (Mounted)eMisc.nextElement();
if ( mounted.getDesc().indexOf("Jump Jet") != -1 ||
( mounted.getDesc().indexOf("CASE") != -1 &&
mech.isClan() ) ||
mounted.getDesc().indexOf("Heat Sink") != -1 ||
mounted.getDesc().indexOf("Endo Steel") != -1 ||
mounted.getDesc().indexOf("Ferro-Fibrous") != -1) {
// These items are displayed elsewhere, so skip them here.
continue;
}
sMisc.append( mounted.getDesc() )
.append( " [" )
.append( mech.getLocationAbbr(mounted.getLocation()) )
.append( "]" );
if (mech.isClan() &&
mounted.getType().getInternalName().substring(0,2).equals("IS")) {
sMisc.append(" (IS)");
}
if (!mech.isClan() &&
mounted.getType().getInternalName().substring(0,2).equals("CL")) {
sMisc.append(" (Clan)");
}
sMisc.append("\n");
}
String capacity = mech.getUnusedString();
if ( capacity != null && capacity.length() > 0 ) {
sMisc.append( "\nCarrying Capacity:\n" )
.append( capacity )
.append( "\n" );
}
return sMisc.toString();
}
private String getFailed() {
StringBuffer sFailed = new StringBuffer();
Enumeration eFailed = mech.getFailedEquipment();
if (eFailed.hasMoreElements()) {
sFailed.append("The following equipment\n slots failed to load:\n");
while (eFailed.hasMoreElements()) {
sFailed.append(eFailed.nextElement()).append("\n");
}
}
return sFailed.toString();
}
private static String renderArmor(int nArmor)
{
if (nArmor <= 0) {
return "xx";
}
else {
return makeLength(String.valueOf(nArmor), 2, true);
}
}
private static final String SPACES = " ";
private static String makeLength(String s, int n, boolean bRightJustify)
{
int l = s.length();
if (l == n) {
return s;
}
else if (l < n) {
if (bRightJustify) {
return SPACES.substring(0, n - l) + s;
}
else {
return s + SPACES.substring(0, n - l);
}
}
else {
return s.substring(0, n - 2) + "..";
}
}
}
| |
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import org.apache.commons.io.FileUtils;
import static org.openqa.selenium.Ignore.Driver.CHROME;
import static org.openqa.selenium.Ignore.Driver.FIREFOX;
import static org.openqa.selenium.Ignore.Driver.IE;
import static org.openqa.selenium.Ignore.Driver.IPHONE;
import static org.openqa.selenium.Ignore.Driver.REMOTE;
import static org.openqa.selenium.Ignore.Driver.SELENESE;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
public class ExecutingJavascriptTest extends AbstractDriverTestCase {
@JavascriptEnabled
public void testShouldBeAbleToExecuteSimpleJavascriptAndReturnAString() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(xhtmlTestPage);
Object result = executeScript("return document.title;");
assertTrue(result instanceof String);
assertEquals("XHTML Test Page", result);
}
@JavascriptEnabled
public void testShouldBeAbleToExecuteSimpleJavascriptAndReturnALong() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(nestedPage);
Object result = executeScript("return document.getElementsByName('checky').length;");
assertTrue(result.getClass().getName(), result instanceof Long);
assertTrue((Long) result > 1);
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testShouldBeAbleToExecuteSimpleJavascriptAndReturnAWebElement() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(xhtmlTestPage);
Object result = executeScript("return document.getElementById('id1');");
assertNotNull(result);
assertTrue("Expected WebElement, got: " + result.getClass(), result instanceof WebElement);
}
@JavascriptEnabled
public void testShouldBeAbleToExecuteSimpleJavascriptAndReturnABoolean() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(xhtmlTestPage);
Object result = executeScript("return true;");
assertNotNull(result);
assertTrue(result instanceof Boolean);
assertTrue((Boolean) result);
}
@SuppressWarnings("unchecked")
@JavascriptEnabled
public void testShouldBeAbleToExecuteSimpleJavascriptAndAStringsArray() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
List<Object> expectedResult = new ArrayList<Object>();
expectedResult.add("zero");
expectedResult.add("one");
expectedResult.add("two");
Object result = ((JavascriptExecutor) driver).executeScript(
"return ['zero', 'one', 'two'];");
ExecutingJavascriptTest.compareLists(expectedResult, (List<Object>) result);
}
@SuppressWarnings("unchecked")
@JavascriptEnabled
@Ignore({SELENESE, IPHONE})
public void testShouldBeAbleToExecuteSimpleJavascriptAndReturnAnArray() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
List<Object> expectedResult = new ArrayList<Object>();
expectedResult.add("zero");
List<Object> subList = new ArrayList<Object>();
subList.add(true);
subList.add(false);
expectedResult.add(subList);
Object result = executeScript("return ['zero', [true, false]];");
assertNotNull(result);
assertTrue("result was: " + result + " (" + result.getClass() + ")", result instanceof List);
List<Object> list = (List<Object>) result;
assertTrue(compareLists(expectedResult, list));
}
private static boolean compareLists(List<?> first, List<?> second) {
if (first.size() != second.size()) {
return false;
}
for (int i = 0; i < first.size(); ++i) {
if (first.get(i) instanceof List<?>) {
if (!(second instanceof List<?>)) {
return false;
} else {
if (!compareLists((List<?>) first.get(i), (List<?>) second.get(i))) {
return false;
}
}
} else {
if (!first.get(i).equals(second.get(i))) {
return false;
}
}
}
return true;
}
@JavascriptEnabled
public void testPassingAndReturningALongShouldReturnAWholeNumber() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
Long expectedResult = 1L;
Object result = executeScript("return arguments[0];", expectedResult);
assertTrue("Expected result to be an Integer or Long but was a " +
result.getClass(), result instanceof Integer || result instanceof Long);
assertEquals(expectedResult.longValue(), result);
}
@JavascriptEnabled
public void testPassingAndReturningADoubleShouldReturnADecimal() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
Double expectedResult = 1.2;
Object result = executeScript("return arguments[0];", expectedResult);
assertTrue("Expected result to be a Double or Float but was a " +
result.getClass(), result instanceof Float || result instanceof Double);
assertEquals(expectedResult.doubleValue(), result);
}
@JavascriptEnabled
public void testShouldThrowAnExceptionWhenTheJavascriptIsBad() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(xhtmlTestPage);
try {
executeScript("return squiggle();");
fail("Expected an exception");
} catch (Exception e) {
// This is expected
}
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testShouldBeAbleToCallFunctionsDefinedOnThePage() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
executeScript("displayMessage('I like cheese');");
String text = driver.findElement(By.id("result")).getText();
assertEquals("I like cheese", text.trim());
}
private Object executeScript(String script, Object... args) {
return ((JavascriptExecutor) driver).executeScript(script, args);
}
@JavascriptEnabled
public void testShouldBeAbleToPassAStringAnAsArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
String
value =
(String) executeScript("return arguments[0] == 'fish' ? 'fish' : 'not fish';", "fish");
assertEquals("fish", value);
}
@JavascriptEnabled
public void testShouldBeAbleToPassABooleanAnAsArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
boolean value = (Boolean) executeScript("return arguments[0] == true;", true);
assertTrue(value);
}
@JavascriptEnabled
public void testShouldBeAbleToPassANumberAnAsArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
boolean value = (Boolean) executeScript("return arguments[0] == 1 ? true : false;", 1);
assertTrue(value);
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testShouldBeAbleToPassAWebElementAsArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
WebElement button = driver.findElement(By.id("plainButton"));
String value = (String) executeScript(
"arguments[0]['flibble'] = arguments[0].getAttribute('id'); return arguments[0]['flibble'];",
button);
assertEquals("plainButton", value);
}
@JavascriptEnabled
@Ignore({IE, SELENESE})
public void testShouldBeAbleToPassAnArrayAsArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
Object[] array = new Object[]{"zero", 1, true, 3.14159};
long length = (Long) executeScript("return arguments[0].length", array);
assertEquals(array.length, length);
}
@JavascriptEnabled
@Ignore({IE, SELENESE})
public void testShouldBeAbleToPassACollectionAsArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
Collection<Object> collection = new ArrayList<Object>();
collection.add("Cheddar");
collection.add("Brie");
collection.add(7);
long length = (Long) executeScript("return arguments[0].length", collection);
assertEquals(collection.size(), length);
collection = new HashSet<Object>();
collection.add("Gouda");
collection.add("Stilton");
collection.add("Stilton");
collection.add(true);
length = (Long) executeScript("return arguments[0].length", collection);
assertEquals(collection.size(), length);
}
@JavascriptEnabled
public void testShouldThrowAnExceptionIfAnArgumentIsNotValid() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
try {
executeScript("return arguments[0];", driver);
fail("Exception should have been thrown");
} catch (IllegalArgumentException e) {
// this is expected
}
}
@JavascriptEnabled
public void testShouldBeAbleToPassInMoreThanOneArgument() {
if (!(driver instanceof JavascriptExecutor)) {
return;
}
driver.get(javascriptPage);
String result = (String) executeScript("return arguments[0] + arguments[1];", "one", "two");
assertEquals("onetwo", result);
}
@Ignore(value = {CHROME, SELENESE, IPHONE},
reason = "Can't execute script in iframe, track crbug 20773\n"
+ "iPhone: Frame switching not yet implemented.")
@JavascriptEnabled
public void testShouldBeAbleToGrabTheBodyOfFrameOnceSwitchedTo() {
driver.get(richTextPage);
driver.switchTo().frame("editFrame");
WebElement body =
(WebElement) ((JavascriptExecutor) driver).executeScript("return document.body");
assertEquals("", body.getText());
}
@Ignore
public void testShouldBeAbleToReturnAnArrayOfWebElements() {
driver.get(formPage);
List<WebElement> items = (List<WebElement>) ((JavascriptExecutor) driver)
.executeScript("return document.getElementsByName('snack');");
assertTrue(items.size() > 0);
}
@JavascriptEnabled
@Ignore(SELENESE)
public void testJavascriptStringHandlingShouldWorkAsExpected() {
driver.get(javascriptPage);
String value = (String) executeScript("return '';");
assertEquals("", value);
value = (String) executeScript("return undefined;");
assertNull(value);
value = (String) executeScript("return ' '");
assertEquals(" ", value);
}
@JavascriptEnabled
@Ignore({CHROME, FIREFOX})
public void testShouldBeAbleToExecuteABigChunkOfJavascriptCode() throws IOException {
driver.get(javascriptPage);
File jqueryFile = new File("common/src/web/jquery-1.3.2.js");
if(!jqueryFile.isFile()) {
jqueryFile = new File("../common/src/web/jquery-1.3.2.js");
if(!jqueryFile.isFile()) {
jqueryFile = new File("../../common/src/web/jquery-1.3.2.js");
}
}
String jquery = FileUtils.readFileToString(jqueryFile, "US-ASCII");
assertTrue("The javascript code should be at least 50 KB.", jquery.length() > 50000);
// This should not throw an exception ...
executeScript(jquery);
}
@SuppressWarnings("unchecked")
@JavascriptEnabled
@Ignore({SELENESE, CHROME, REMOTE, IPHONE})
public void testShouldBeAbleToExecuteScriptAndReturnElementsList() {
driver.get(formPage);
String scriptToExec = "return document.getElementsByName('snack');";
List<WebElement> resultsList = (List<WebElement>) ((JavascriptExecutor) driver)
.executeScript(scriptToExec);
assertTrue(resultsList.size() > 0);
}
@NeedsFreshDriver
@NoDriverAfterTest
@Ignore //Reason for ignore: Failure indicates hang condition,
//which would break the test suite. Really needs a timeout set.
public void testShouldThrowExceptionIfExecutingOnNoPage() {
try {
((JavascriptExecutor)driver).executeScript("return 1;");
} catch (WebDriverException e) {
//Expected
return;
}
fail("Expected exception to be thrown");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nginx.unit.websocket;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.Inflater;
import javax.websocket.Extension;
import javax.websocket.Extension.Parameter;
import javax.websocket.SendHandler;
import org.apache.tomcat.util.res.StringManager;
public class PerMessageDeflate implements Transformation {
private static final StringManager sm = StringManager.getManager(PerMessageDeflate.class);
private static final String SERVER_NO_CONTEXT_TAKEOVER = "server_no_context_takeover";
private static final String CLIENT_NO_CONTEXT_TAKEOVER = "client_no_context_takeover";
private static final String SERVER_MAX_WINDOW_BITS = "server_max_window_bits";
private static final String CLIENT_MAX_WINDOW_BITS = "client_max_window_bits";
private static final int RSV_BITMASK = 0b100;
private static final byte[] EOM_BYTES = new byte[] {0, 0, -1, -1};
public static final String NAME = "permessage-deflate";
private final boolean serverContextTakeover;
private final int serverMaxWindowBits;
private final boolean clientContextTakeover;
private final int clientMaxWindowBits;
private final boolean isServer;
private final Inflater inflater = new Inflater(true);
private final ByteBuffer readBuffer = ByteBuffer.allocate(Constants.DEFAULT_BUFFER_SIZE);
private final Deflater deflater = new Deflater(Deflater.DEFAULT_COMPRESSION, true);
private final byte[] EOM_BUFFER = new byte[EOM_BYTES.length + 1];
private volatile Transformation next;
private volatile boolean skipDecompression = false;
private volatile ByteBuffer writeBuffer = ByteBuffer.allocate(Constants.DEFAULT_BUFFER_SIZE);
private volatile boolean firstCompressedFrameWritten = false;
// Flag to track if a message is completely empty
private volatile boolean emptyMessage = true;
static PerMessageDeflate negotiate(List<List<Parameter>> preferences, boolean isServer) {
// Accept the first preference that the endpoint is able to support
for (List<Parameter> preference : preferences) {
boolean ok = true;
boolean serverContextTakeover = true;
int serverMaxWindowBits = -1;
boolean clientContextTakeover = true;
int clientMaxWindowBits = -1;
for (Parameter param : preference) {
if (SERVER_NO_CONTEXT_TAKEOVER.equals(param.getName())) {
if (serverContextTakeover) {
serverContextTakeover = false;
} else {
// Duplicate definition
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.duplicateParameter",
SERVER_NO_CONTEXT_TAKEOVER ));
}
} else if (CLIENT_NO_CONTEXT_TAKEOVER.equals(param.getName())) {
if (clientContextTakeover) {
clientContextTakeover = false;
} else {
// Duplicate definition
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.duplicateParameter",
CLIENT_NO_CONTEXT_TAKEOVER ));
}
} else if (SERVER_MAX_WINDOW_BITS.equals(param.getName())) {
if (serverMaxWindowBits == -1) {
serverMaxWindowBits = Integer.parseInt(param.getValue());
if (serverMaxWindowBits < 8 || serverMaxWindowBits > 15) {
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.invalidWindowSize",
SERVER_MAX_WINDOW_BITS,
Integer.valueOf(serverMaxWindowBits)));
}
// Java SE API (as of Java 8) does not expose the API to
// control the Window size. It is effectively hard-coded
// to 15
if (isServer && serverMaxWindowBits != 15) {
ok = false;
break;
// Note server window size is not an issue for the
// client since the client will assume 15 and if the
// server uses a smaller window everything will
// still work
}
} else {
// Duplicate definition
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.duplicateParameter",
SERVER_MAX_WINDOW_BITS ));
}
} else if (CLIENT_MAX_WINDOW_BITS.equals(param.getName())) {
if (clientMaxWindowBits == -1) {
if (param.getValue() == null) {
// Hint to server that the client supports this
// option. Java SE API (as of Java 8) does not
// expose the API to control the Window size. It is
// effectively hard-coded to 15
clientMaxWindowBits = 15;
} else {
clientMaxWindowBits = Integer.parseInt(param.getValue());
if (clientMaxWindowBits < 8 || clientMaxWindowBits > 15) {
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.invalidWindowSize",
CLIENT_MAX_WINDOW_BITS,
Integer.valueOf(clientMaxWindowBits)));
}
}
// Java SE API (as of Java 8) does not expose the API to
// control the Window size. It is effectively hard-coded
// to 15
if (!isServer && clientMaxWindowBits != 15) {
ok = false;
break;
// Note client window size is not an issue for the
// server since the server will assume 15 and if the
// client uses a smaller window everything will
// still work
}
} else {
// Duplicate definition
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.duplicateParameter",
CLIENT_MAX_WINDOW_BITS ));
}
} else {
// Unknown parameter
throw new IllegalArgumentException(sm.getString(
"perMessageDeflate.unknownParameter", param.getName()));
}
}
if (ok) {
return new PerMessageDeflate(serverContextTakeover, serverMaxWindowBits,
clientContextTakeover, clientMaxWindowBits, isServer);
}
}
// Failed to negotiate agreeable terms
return null;
}
private PerMessageDeflate(boolean serverContextTakeover, int serverMaxWindowBits,
boolean clientContextTakeover, int clientMaxWindowBits, boolean isServer) {
this.serverContextTakeover = serverContextTakeover;
this.serverMaxWindowBits = serverMaxWindowBits;
this.clientContextTakeover = clientContextTakeover;
this.clientMaxWindowBits = clientMaxWindowBits;
this.isServer = isServer;
}
@Override
public TransformationResult getMoreData(byte opCode, boolean fin, int rsv, ByteBuffer dest)
throws IOException {
// Control frames are never compressed and may appear in the middle of
// a WebSocket method. Pass them straight through.
if (Util.isControl(opCode)) {
return next.getMoreData(opCode, fin, rsv, dest);
}
if (!Util.isContinuation(opCode)) {
// First frame in new message
skipDecompression = (rsv & RSV_BITMASK) == 0;
}
// Pass uncompressed frames straight through.
if (skipDecompression) {
return next.getMoreData(opCode, fin, rsv, dest);
}
int written;
boolean usedEomBytes = false;
while (dest.remaining() > 0) {
// Space available in destination. Try and fill it.
try {
written = inflater.inflate(
dest.array(), dest.arrayOffset() + dest.position(), dest.remaining());
} catch (DataFormatException e) {
throw new IOException(sm.getString("perMessageDeflate.deflateFailed"), e);
}
dest.position(dest.position() + written);
if (inflater.needsInput() && !usedEomBytes ) {
if (dest.hasRemaining()) {
readBuffer.clear();
TransformationResult nextResult =
next.getMoreData(opCode, fin, (rsv ^ RSV_BITMASK), readBuffer);
inflater.setInput(
readBuffer.array(), readBuffer.arrayOffset(), readBuffer.position());
if (TransformationResult.UNDERFLOW.equals(nextResult)) {
return nextResult;
} else if (TransformationResult.END_OF_FRAME.equals(nextResult) &&
readBuffer.position() == 0) {
if (fin) {
inflater.setInput(EOM_BYTES);
usedEomBytes = true;
} else {
return TransformationResult.END_OF_FRAME;
}
}
}
} else if (written == 0) {
if (fin && (isServer && !clientContextTakeover ||
!isServer && !serverContextTakeover)) {
inflater.reset();
}
return TransformationResult.END_OF_FRAME;
}
}
return TransformationResult.OVERFLOW;
}
@Override
public boolean validateRsv(int rsv, byte opCode) {
if (Util.isControl(opCode)) {
if ((rsv & RSV_BITMASK) != 0) {
return false;
} else {
if (next == null) {
return true;
} else {
return next.validateRsv(rsv, opCode);
}
}
} else {
int rsvNext = rsv;
if ((rsv & RSV_BITMASK) != 0) {
rsvNext = rsv ^ RSV_BITMASK;
}
if (next == null) {
return true;
} else {
return next.validateRsv(rsvNext, opCode);
}
}
}
@Override
public Extension getExtensionResponse() {
Extension result = new WsExtension(NAME);
List<Extension.Parameter> params = result.getParameters();
if (!serverContextTakeover) {
params.add(new WsExtensionParameter(SERVER_NO_CONTEXT_TAKEOVER, null));
}
if (serverMaxWindowBits != -1) {
params.add(new WsExtensionParameter(SERVER_MAX_WINDOW_BITS,
Integer.toString(serverMaxWindowBits)));
}
if (!clientContextTakeover) {
params.add(new WsExtensionParameter(CLIENT_NO_CONTEXT_TAKEOVER, null));
}
if (clientMaxWindowBits != -1) {
params.add(new WsExtensionParameter(CLIENT_MAX_WINDOW_BITS,
Integer.toString(clientMaxWindowBits)));
}
return result;
}
@Override
public void setNext(Transformation t) {
if (next == null) {
this.next = t;
} else {
next.setNext(t);
}
}
@Override
public boolean validateRsvBits(int i) {
if ((i & RSV_BITMASK) != 0) {
return false;
}
if (next == null) {
return true;
} else {
return next.validateRsvBits(i | RSV_BITMASK);
}
}
@Override
public List<MessagePart> sendMessagePart(List<MessagePart> uncompressedParts) {
List<MessagePart> allCompressedParts = new ArrayList<>();
for (MessagePart uncompressedPart : uncompressedParts) {
byte opCode = uncompressedPart.getOpCode();
boolean emptyPart = uncompressedPart.getPayload().limit() == 0;
emptyMessage = emptyMessage && emptyPart;
if (Util.isControl(opCode)) {
// Control messages can appear in the middle of other messages
// and must not be compressed. Pass it straight through
allCompressedParts.add(uncompressedPart);
} else if (emptyMessage && uncompressedPart.isFin()) {
// Zero length messages can't be compressed so pass the
// final (empty) part straight through.
allCompressedParts.add(uncompressedPart);
} else {
List<MessagePart> compressedParts = new ArrayList<>();
ByteBuffer uncompressedPayload = uncompressedPart.getPayload();
SendHandler uncompressedIntermediateHandler =
uncompressedPart.getIntermediateHandler();
deflater.setInput(uncompressedPayload.array(),
uncompressedPayload.arrayOffset() + uncompressedPayload.position(),
uncompressedPayload.remaining());
int flush = (uncompressedPart.isFin() ? Deflater.SYNC_FLUSH : Deflater.NO_FLUSH);
boolean deflateRequired = true;
while (deflateRequired) {
ByteBuffer compressedPayload = writeBuffer;
int written = deflater.deflate(compressedPayload.array(),
compressedPayload.arrayOffset() + compressedPayload.position(),
compressedPayload.remaining(), flush);
compressedPayload.position(compressedPayload.position() + written);
if (!uncompressedPart.isFin() && compressedPayload.hasRemaining() && deflater.needsInput()) {
// This message part has been fully processed by the
// deflater. Fire the send handler for this message part
// and move on to the next message part.
break;
}
// If this point is reached, a new compressed message part
// will be created...
MessagePart compressedPart;
// .. and a new writeBuffer will be required.
writeBuffer = ByteBuffer.allocate(Constants.DEFAULT_BUFFER_SIZE);
// Flip the compressed payload ready for writing
compressedPayload.flip();
boolean fin = uncompressedPart.isFin();
boolean full = compressedPayload.limit() == compressedPayload.capacity();
boolean needsInput = deflater.needsInput();
long blockingWriteTimeoutExpiry = uncompressedPart.getBlockingWriteTimeoutExpiry();
if (fin && !full && needsInput) {
// End of compressed message. Drop EOM bytes and output.
compressedPayload.limit(compressedPayload.limit() - EOM_BYTES.length);
compressedPart = new MessagePart(true, getRsv(uncompressedPart),
opCode, compressedPayload, uncompressedIntermediateHandler,
uncompressedIntermediateHandler, blockingWriteTimeoutExpiry);
deflateRequired = false;
startNewMessage();
} else if (full && !needsInput) {
// Write buffer full and input message not fully read.
// Output and start new compressed part.
compressedPart = new MessagePart(false, getRsv(uncompressedPart),
opCode, compressedPayload, uncompressedIntermediateHandler,
uncompressedIntermediateHandler, blockingWriteTimeoutExpiry);
} else if (!fin && full && needsInput) {
// Write buffer full and input message not fully read.
// Output and get more data.
compressedPart = new MessagePart(false, getRsv(uncompressedPart),
opCode, compressedPayload, uncompressedIntermediateHandler,
uncompressedIntermediateHandler, blockingWriteTimeoutExpiry);
deflateRequired = false;
} else if (fin && full && needsInput) {
// Write buffer full. Input fully read. Deflater may be
// in one of four states:
// - output complete (just happened to align with end of
// buffer
// - in middle of EOM bytes
// - about to write EOM bytes
// - more data to write
int eomBufferWritten = deflater.deflate(EOM_BUFFER, 0, EOM_BUFFER.length, Deflater.SYNC_FLUSH);
if (eomBufferWritten < EOM_BUFFER.length) {
// EOM has just been completed
compressedPayload.limit(compressedPayload.limit() - EOM_BYTES.length + eomBufferWritten);
compressedPart = new MessagePart(true,
getRsv(uncompressedPart), opCode, compressedPayload,
uncompressedIntermediateHandler, uncompressedIntermediateHandler,
blockingWriteTimeoutExpiry);
deflateRequired = false;
startNewMessage();
} else {
// More data to write
// Copy bytes to new write buffer
writeBuffer.put(EOM_BUFFER, 0, eomBufferWritten);
compressedPart = new MessagePart(false,
getRsv(uncompressedPart), opCode, compressedPayload,
uncompressedIntermediateHandler, uncompressedIntermediateHandler,
blockingWriteTimeoutExpiry);
}
} else {
throw new IllegalStateException("Should never happen");
}
// Add the newly created compressed part to the set of parts
// to pass on to the next transformation.
compressedParts.add(compressedPart);
}
SendHandler uncompressedEndHandler = uncompressedPart.getEndHandler();
int size = compressedParts.size();
if (size > 0) {
compressedParts.get(size - 1).setEndHandler(uncompressedEndHandler);
}
allCompressedParts.addAll(compressedParts);
}
}
if (next == null) {
return allCompressedParts;
} else {
return next.sendMessagePart(allCompressedParts);
}
}
private void startNewMessage() {
firstCompressedFrameWritten = false;
emptyMessage = true;
if (isServer && !serverContextTakeover || !isServer && !clientContextTakeover) {
deflater.reset();
}
}
private int getRsv(MessagePart uncompressedMessagePart) {
int result = uncompressedMessagePart.getRsv();
if (!firstCompressedFrameWritten) {
result += RSV_BITMASK;
firstCompressedFrameWritten = true;
}
return result;
}
@Override
public void close() {
// There will always be a next transformation
next.close();
inflater.end();
deflater.end();
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.portal.service;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import org.sakaiproject.alias.api.Alias;
import org.sakaiproject.alias.api.AliasService;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.portal.api.PortalService;
import org.sakaiproject.portal.api.SiteNeighbourhoodService;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.thread_local.api.ThreadLocalManager;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.user.api.Preferences;
import org.sakaiproject.user.api.PreferencesService;
import org.sakaiproject.user.api.UserDirectoryService;
import org.sakaiproject.user.api.UserNotDefinedException;
import org.sakaiproject.util.comparator.AliasIdComparator;
import lombok.extern.slf4j.Slf4j;
/**
* @author ieb
*/
@Slf4j
public class SiteNeighbourhoodServiceImpl implements SiteNeighbourhoodService
{
private static final String SITE_ALIAS = "/sitealias/";
private SiteService siteService;
private PreferencesService preferencesService;
private UserDirectoryService userDirectoryService;
private ServerConfigurationService serverConfigurationService;
private AliasService aliasService;
private ThreadLocalManager threadLocalManager;
/** Should all site aliases have a prefix */
private boolean useAliasPrefix = false;
private boolean useSiteAliases = false;
public void init()
{
useSiteAliases = serverConfigurationService.getBoolean("portal.use.site.aliases", false);
}
public void destroy()
{
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.SiteNeighbourhoodService#getSitesAtNode(javax.servlet.http.HttpServletRequest,
* org.sakaiproject.tool.api.Session, boolean)
*/
public List<Site> getSitesAtNode(HttpServletRequest request, Session session,
boolean includeMyWorkspace)
{
return getAllSites(request, session, includeMyWorkspace);
}
/**
* Get All Sites for the current user. If the user is not logged in we
* return the list of publically viewable gateway sites.
*
* @param includeMyWorkspace
* When this is true - include the user's My Workspace as the first
* parameter. If false, do not include the MyWorkspace anywhere in
* the list. Some uses - such as the portlet styled portal or the rss
* styled portal simply want all of the sites with the MyWorkspace
* first. Other portals like the basic tabbed portal treats My
* Workspace separately from all of the rest of the workspaces.
* @see org.sakaiproject.portal.api.PortalSiteHelper#getAllSites(javax.servlet.http.HttpServletRequest,
* org.sakaiproject.tool.api.Session, boolean)
*/
public List<Site> getAllSites(HttpServletRequest req, Session session,
boolean includeMyWorkspace)
{
boolean loggedIn = session.getUserId() != null;
List<Site> mySites;
// collect the Publically Viewable Sites
if (!loggedIn)
{
mySites = getGatewaySites();
return mySites;
}
// collect the user's preferences
List prefExclude = new ArrayList();
List prefOrder = new ArrayList();
if (session.getUserId() != null)
{
Preferences prefs = preferencesService.getPreferences(session.getUserId());
ResourceProperties props = prefs.getProperties(PreferencesService.SITENAV_PREFS_KEY);
List l = props.getPropertyList("exclude");
if (l != null)
{
prefExclude = l;
}
l = props.getPropertyList("order");
if (l != null)
{
prefOrder = l;
}
}
// collect the user's sites - don't care whether long descriptions are loaded
// don't load excluded sites
mySites = siteService.getUserSites(false, false, prefExclude);
// Prepare to put sites in the right order
Vector<Site> ordered = new Vector<Site>();
Set<String> added = new HashSet<String>();
List<String> actualOrder = new ArrayList<String>(mySites.size());
for (Site site : mySites) {
actualOrder.add(site.getId());
}
// First, place or remove MyWorkspace as requested
Site myWorkspace = getMyWorkspace(session);
if (myWorkspace != null)
{
if (includeMyWorkspace)
{
ordered.add(myWorkspace);
added.add(myWorkspace.getId());
}
else
{
int pos = actualOrder.indexOf(myWorkspace.getId());
if (pos != -1) {
mySites.remove(pos);
actualOrder.remove(pos);
};
}
}
// re-order mySites to have order first, the rest later
for (Iterator i = prefOrder.iterator(); i.hasNext();)
{
String id = (String) i.next();
// find this site in the mySites list
int pos = actualOrder.indexOf(id);
if (pos != -1)
{
Site s = mySites.get(pos);
if (!added.contains(s.getId()))
{
ordered.add(s);
added.add(s.getId());
}
}
}
// We only do the child processing if we have less than 200 sites
boolean haveChildren = false;
int siteCount = mySites.size();
// pick up the rest of the top-level-sites
for (int i = 0; i < mySites.size(); i++)
{
Site s = mySites.get(i);
if (added.contains(s.getId())) continue;
// Once the user takes over the order,
// ignore parent/child sorting put all the sites
// at the top
String ourParent = null;
if ( prefOrder.size() == 0 )
{
ResourceProperties rp = s.getProperties();
ourParent = rp.getProperty(SiteService.PROP_PARENT_ID);
}
log.debug("Top Site:{} parent={}", s.getTitle(), ourParent);
if (siteCount > 200 || ourParent == null)
{
log.debug("Added at root");
ordered.add(s);
added.add(s.getId());
}
else
{
haveChildren = true;
}
}
// If and only if we have some child nodes, we repeatedly
// pull up children nodes to be behind their parents
// This is O N**2 - so if we had thousands of sites it
// it would be costly - hence we only do it for < 200 sites
// and limited depth - that makes it O(N) not O(N**2)
boolean addedSites = true;
int depth = 0;
while (depth < 20 && addedSites && haveChildren)
{
depth++;
addedSites = false;
haveChildren = false;
for (int i = mySites.size() - 1; i >= 0; i--)
{
Site s = mySites.get(i);
if (added.contains(s.getId())) continue;
ResourceProperties rp = s.getProperties();
String ourParent = rp.getProperty(SiteService.PROP_PARENT_ID);
if (ourParent == null) continue;
haveChildren = true;
log.debug("Child Site:{} parent={}", s.getTitle(), ourParent);
// Search the already added pages for a parent
// or sibling node
boolean found = false;
int j = -1;
for (j = ordered.size() - 1; j >= 0; j--)
{
Site ps = ordered.get(j);
// See if this site is our parent
if (ourParent.equals(ps.getId()))
{
found = true;
break;
}
// See if this site is our sibling
rp = ps.getProperties();
String peerParent = rp.getProperty(SiteService.PROP_PARENT_ID);
if (ourParent.equals(peerParent))
{
found = true;
break;
}
}
// We want to insert *after* the identified node
j = j + 1;
if (found && j >= 0 && j < ordered.size())
{
log.debug("Added after parent");
ordered.insertElementAt(s, j);
added.add(s.getId());
addedSites = true; // Worth going another level deeper
}
}
} // End while depth
// If we still have children drop them at the end
if (haveChildren) for (int i = 0; i < mySites.size(); i++)
{
Site s = mySites.get(i);
if (added.contains(s.getId())) continue;
log.debug("Orphan Site:{} {}", s.getId(), s.getTitle());
ordered.add(s);
}
// All done
mySites = ordered;
return mySites;
}
// Get the sites which are to be displayed for the gateway
/**
* @return
*/
private List<Site> getGatewaySites()
{
List<Site> mySites = new ArrayList<Site>();
String[] gatewaySiteIds = getGatewaySiteList();
if (gatewaySiteIds == null)
{
return mySites; // An empty list - deal with this higher up in the
// food chain
}
// Loop throught the sites making sure they exist and are visitable
for (int i = 0; i < gatewaySiteIds.length; i++)
{
String siteId = gatewaySiteIds[i];
Site site = null;
try
{
site = getSiteVisit(siteId);
}
catch (IdUnusedException e)
{
continue;
}
catch (PermissionException e)
{
continue;
}
if (site != null)
{
mySites.add(site);
}
}
if (mySites.size() < 1)
{
log.warn("No suitable gateway sites found, gatewaySiteList preference had "
+ gatewaySiteIds.length + " sites.");
}
return mySites;
}
/**
* @see org.sakaiproject.portal.api.PortalSiteHelper#getMyWorkspace(org.sakaiproject.tool.api.Session)
*/
private Site getMyWorkspace(Session session)
{
String siteId = siteService.getUserSiteId(session.getUserId());
// Make sure we can visit
Site site = null;
try
{
site = getSiteVisit(siteId);
}
catch (IdUnusedException e)
{
site = null;
}
catch (PermissionException e)
{
site = null;
}
return site;
}
// Return the list of tabs for the anonymous view (Gateway)
// If we have a list of sites, return that - if not simply pull in the
// single
// Gateway site
/**
* @return
*/
private String[] getGatewaySiteList()
{
String gatewaySiteListPref = serverConfigurationService
.getString("gatewaySiteList");
if (gatewaySiteListPref == null || gatewaySiteListPref.trim().length() < 1)
{
gatewaySiteListPref = serverConfigurationService.getGatewaySiteId();
}
if (gatewaySiteListPref == null || gatewaySiteListPref.trim().length() < 1)
return null;
String[] gatewaySites = gatewaySiteListPref.split(",");
if (gatewaySites.length < 1) return null;
return gatewaySites;
}
/**
* Do the getSiteVisit, but if not found and the id is a user site, try
* translating from user EID to ID.
*
* @param siteId
* The Site Id.
* @return The Site.
* @throws PermissionException
* If not allowed.
* @throws IdUnusedException
* If not found.
*/
public Site getSiteVisit(String siteId) throws PermissionException, IdUnusedException
{
try
{
return siteService.getSiteVisit(siteId);
}
catch (IdUnusedException e)
{
if (siteService.isUserSite(siteId))
{
try
{
String userEid = siteService.getSiteUserId(siteId);
String userId = userDirectoryService.getUserId(userEid);
String alternateSiteId = siteService.getUserSiteId(userId);
return siteService.getSiteVisit(alternateSiteId);
}
catch (UserNotDefinedException ee)
{
}
}
// re-throw if that didn't work
throw e;
}
}
/**
* @return the preferencesService
*/
public PreferencesService getPreferencesService()
{
return preferencesService;
}
/**
* @param preferencesService
* the preferencesService to set
*/
public void setPreferencesService(PreferencesService preferencesService)
{
this.preferencesService = preferencesService;
}
/**
* @return the serverConfigurationService
*/
public ServerConfigurationService getServerConfigurationService()
{
return serverConfigurationService;
}
/**
* @param serverConfigurationService
* the serverConfigurationService to set
*/
public void setServerConfigurationService(
ServerConfigurationService serverConfigurationService)
{
this.serverConfigurationService = serverConfigurationService;
}
/**
* @return the siteService
*/
public SiteService getSiteService()
{
return siteService;
}
/**
* @param siteService
* the siteService to set
*/
public void setSiteService(SiteService siteService)
{
this.siteService = siteService;
}
/**
* @return the userDirectoryService
*/
public UserDirectoryService getUserDirectoryService()
{
return userDirectoryService;
}
/**
* @param userDirectoryService
* the userDirectoryService to set
*/
public void setUserDirectoryService(UserDirectoryService userDirectoryService)
{
this.userDirectoryService = userDirectoryService;
}
public void setThreadLocalManager(ThreadLocalManager threadLocalManager)
{
this.threadLocalManager = threadLocalManager;
}
public String lookupSiteAlias(String id, String context)
{
// TODO Constant extraction
if ("/site/!error".equals(id)) {
Object originalId = threadLocalManager.get(PortalService.SAKAI_PORTAL_ORIGINAL_SITEID);
if (originalId instanceof String) {
return (String)originalId;
}
}
if (!useSiteAliases)
{
return null;
}
List<Alias> aliases = aliasService.getAliases(id);
if (aliases.size() > 0)
{
if (aliases.size() > 1 && log.isInfoEnabled())
{
log.debug("More than one alias for {} sorting.", id);
Collections.sort(aliases, new AliasIdComparator());
}
for (Alias alias : aliases)
{
String aliasId = alias.getId();
boolean startsWithPrefix = aliasId.startsWith(SITE_ALIAS);
if (startsWithPrefix)
{
if (useAliasPrefix)
{
return aliasId.substring(SITE_ALIAS.length());
}
}
else
{
if (!useAliasPrefix)
{
return aliasId;
}
}
}
}
return null;
}
public String parseSiteAlias(String alias)
{
if (alias == null)
{
return null;
}
// Prepend site alias prefix if it's being used.
String id = ((useAliasPrefix)?SITE_ALIAS:"")+alias;
try
{
String reference = aliasService.getTarget(id);
return reference;
}
catch (IdUnusedException e)
{
log.debug("No alias found for {}", id);
}
return null;
}
public void setAliasService(AliasService aliasService) {
this.aliasService = aliasService;
}
public boolean isUseAliasPrefix()
{
return useAliasPrefix;
}
public void setUseAliasPrefix(boolean useAliasPrefix)
{
this.useAliasPrefix = useAliasPrefix;
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.compression;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.ChannelPromise;
import io.netty.handler.codec.MessageToByteEncoder;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.PromiseNotifier;
import java.util.concurrent.TimeUnit;
import static io.netty.handler.codec.compression.Bzip2Constants.BASE_BLOCK_SIZE;
import static io.netty.handler.codec.compression.Bzip2Constants.END_OF_STREAM_MAGIC_1;
import static io.netty.handler.codec.compression.Bzip2Constants.END_OF_STREAM_MAGIC_2;
import static io.netty.handler.codec.compression.Bzip2Constants.MAGIC_NUMBER;
import static io.netty.handler.codec.compression.Bzip2Constants.MAX_BLOCK_SIZE;
import static io.netty.handler.codec.compression.Bzip2Constants.MIN_BLOCK_SIZE;
import static io.netty.handler.codec.compression.Bzip2Constants.THREAD_POOL_DELAY_SECONDS;
/**
* Compresses a {@link ByteBuf} using the Bzip2 algorithm.
*
* See <a href="https://en.wikipedia.org/wiki/Bzip2">Bzip2</a>.
*/
public class Bzip2Encoder extends MessageToByteEncoder<ByteBuf> {
/**
* Current state of stream.
*/
private enum State {
INIT,
INIT_BLOCK,
WRITE_DATA,
CLOSE_BLOCK
}
private State currentState = State.INIT;
/**
* A writer that provides bit-level writes.
*/
private final Bzip2BitWriter writer = new Bzip2BitWriter();
/**
* The declared maximum block size of the stream (before final run-length decoding).
*/
private final int streamBlockSize;
/**
* The merged CRC of all blocks compressed so far.
*/
private int streamCRC;
/**
* The compressor for the current block.
*/
private Bzip2BlockCompressor blockCompressor;
/**
* (@code true} if the compressed stream has been finished, otherwise {@code false}.
*/
private volatile boolean finished;
/**
* Used to interact with its {@link ChannelPipeline} and other handlers.
*/
private volatile ChannelHandlerContext ctx;
/**
* Creates a new bzip2 encoder with the maximum (900,000 byte) block size.
*/
public Bzip2Encoder() {
this(MAX_BLOCK_SIZE);
}
/**
* Creates a new bzip2 encoder with the specified {@code blockSizeMultiplier}.
* @param blockSizeMultiplier
* The Bzip2 block size as a multiple of 100,000 bytes (minimum {@code 1}, maximum {@code 9}).
* Larger block sizes require more memory for both compression and decompression,
* but give better compression ratios. {@code 9} will usually be the best value to use.
*/
public Bzip2Encoder(final int blockSizeMultiplier) {
if (blockSizeMultiplier < MIN_BLOCK_SIZE || blockSizeMultiplier > MAX_BLOCK_SIZE) {
throw new IllegalArgumentException(
"blockSizeMultiplier: " + blockSizeMultiplier + " (expected: 1-9)");
}
streamBlockSize = blockSizeMultiplier * BASE_BLOCK_SIZE;
}
@Override
protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) throws Exception {
if (finished) {
out.writeBytes(in);
return;
}
for (;;) {
switch (currentState) {
case INIT:
out.ensureWritable(4);
out.writeMedium(MAGIC_NUMBER);
out.writeByte('0' + streamBlockSize / BASE_BLOCK_SIZE);
currentState = State.INIT_BLOCK;
// fall through
case INIT_BLOCK:
blockCompressor = new Bzip2BlockCompressor(writer, streamBlockSize);
currentState = State.WRITE_DATA;
// fall through
case WRITE_DATA:
if (!in.isReadable()) {
return;
}
Bzip2BlockCompressor blockCompressor = this.blockCompressor;
final int length = Math.min(in.readableBytes(), blockCompressor.availableSize());
final int bytesWritten = blockCompressor.write(in, in.readerIndex(), length);
in.skipBytes(bytesWritten);
if (!blockCompressor.isFull()) {
if (in.isReadable()) {
break;
} else {
return;
}
}
currentState = State.CLOSE_BLOCK;
// fall through
case CLOSE_BLOCK:
closeBlock(out);
currentState = State.INIT_BLOCK;
break;
default:
throw new IllegalStateException();
}
}
}
/**
* Close current block and update {@link #streamCRC}.
*/
private void closeBlock(ByteBuf out) {
final Bzip2BlockCompressor blockCompressor = this.blockCompressor;
if (!blockCompressor.isEmpty()) {
blockCompressor.close(out);
final int blockCRC = blockCompressor.crc();
streamCRC = (streamCRC << 1 | streamCRC >>> 31) ^ blockCRC;
}
}
/**
* Returns {@code true} if and only if the end of the compressed stream has been reached.
*/
public boolean isClosed() {
return finished;
}
/**
* Close this {@link Bzip2Encoder} and so finish the encoding.
*
* The returned {@link ChannelFuture} will be notified once the operation completes.
*/
public ChannelFuture close() {
return close(ctx().newPromise());
}
/**
* Close this {@link Bzip2Encoder} and so finish the encoding.
* The given {@link ChannelFuture} will be notified once the operation
* completes and will also be returned.
*/
public ChannelFuture close(final ChannelPromise promise) {
ChannelHandlerContext ctx = ctx();
EventExecutor executor = ctx.executor();
if (executor.inEventLoop()) {
return finishEncode(ctx, promise);
} else {
executor.execute(new Runnable() {
@Override
public void run() {
ChannelFuture f = finishEncode(ctx(), promise);
PromiseNotifier.cascade(f, promise);
}
});
return promise;
}
}
@Override
public void close(final ChannelHandlerContext ctx, final ChannelPromise promise) throws Exception {
ChannelFuture f = finishEncode(ctx, ctx.newPromise());
f.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture f) throws Exception {
ctx.close(promise);
}
});
if (!f.isDone()) {
// Ensure the channel is closed even if the write operation completes in time.
ctx.executor().schedule(new Runnable() {
@Override
public void run() {
ctx.close(promise);
}
}, THREAD_POOL_DELAY_SECONDS, TimeUnit.SECONDS);
}
}
private ChannelFuture finishEncode(final ChannelHandlerContext ctx, ChannelPromise promise) {
if (finished) {
promise.setSuccess();
return promise;
}
finished = true;
final ByteBuf footer = ctx.alloc().buffer();
closeBlock(footer);
final int streamCRC = this.streamCRC;
final Bzip2BitWriter writer = this.writer;
try {
writer.writeBits(footer, 24, END_OF_STREAM_MAGIC_1);
writer.writeBits(footer, 24, END_OF_STREAM_MAGIC_2);
writer.writeInt(footer, streamCRC);
writer.flush(footer);
} finally {
blockCompressor = null;
}
return ctx.writeAndFlush(footer, promise);
}
private ChannelHandlerContext ctx() {
ChannelHandlerContext ctx = this.ctx;
if (ctx == null) {
throw new IllegalStateException("not added to a pipeline");
}
return ctx;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
this.ctx = ctx;
}
}
| |
package liquibase.util;
import java.util.*;
import java.util.regex.Pattern;
/**
* Various utility methods for working with strings.
*/
public class StringUtils {
private static final Pattern upperCasePattern = Pattern.compile(".*[A-Z].*");
private static final Pattern lowerCasePattern = Pattern.compile(".*[a-z].*");
public static String trimToEmpty(String string) {
if (string == null) {
return "";
}
return string.trim();
}
public static String trimToNull(String string) {
if (string == null) {
return null;
}
String returnString = string.trim();
if (returnString.length() == 0) {
return null;
} else {
return returnString;
}
}
/**
* Removes any comments from multiple line SQL using {@link #stripComments(String)}
* and then extracts each individual statement using {@link #splitSQL(String, String)}.
*
* @param multiLineSQL A String containing all the SQL statements
* @param stripComments If true then comments will be stripped, if false then they will be left in the code
*/
public static String[] processMutliLineSQL(String multiLineSQL, boolean stripComments, boolean splitStatements, String endDelimiter) {
StringClauses parsed = SqlParser.parse(multiLineSQL, true, !stripComments);
List<String> returnArray = new ArrayList<String>();
StringBuilder currentString = new StringBuilder();
String previousPiece = null;
boolean previousDelimiter = false;
for (Object piece : parsed.toArray(true)) {
if (splitStatements && piece instanceof String && isDelimiter((String) piece, previousPiece, endDelimiter)) {
String trimmedString = StringUtils.trimToNull(currentString.toString());
if (trimmedString != null) {
returnArray.add(trimmedString);
}
currentString = new StringBuilder();
previousDelimiter = true;
} else {
if (!previousDelimiter || StringUtils.trimToNull((String) piece) != null) { //don't include whitespace after a delimiter
if (!currentString.toString().equals("") || StringUtils.trimToNull((String) piece) != null) { //don't include whitespace before the statement
currentString.append(piece);
}
}
previousDelimiter = false;
}
previousPiece = (String) piece;
}
String trimmedString = StringUtils.trimToNull(currentString.toString());
if (trimmedString != null) {
returnArray.add(trimmedString);
}
return returnArray.toArray(new String[returnArray.size()]);
}
protected static boolean isDelimiter(String piece, String previousPiece, String endDelimiter) {
if (endDelimiter == null) {
return piece.equals(";") || ((piece.equalsIgnoreCase("go") || piece.equals("/")) && (previousPiece == null || previousPiece.endsWith("\n")));
} else {
if (endDelimiter.length() == 1) {
return piece.toLowerCase().equalsIgnoreCase(endDelimiter.toLowerCase());
} else {
return piece.toLowerCase().matches(endDelimiter.toLowerCase()) || (previousPiece+piece).toLowerCase().matches("[.\n\r]*"+endDelimiter.toLowerCase());
}
}
}
/**
* Splits a (possible) multi-line SQL statement along ;'s and "go"'s.
*/
public static String[] splitSQL(String multiLineSQL, String endDelimiter) {
return processMutliLineSQL(multiLineSQL, false, true, endDelimiter);
}
/**
* Searches through a String which contains SQL code and strips out
* any comments that are between \/**\/ or anything that matches
* SP--SP<text>\n (to support the ANSI standard commenting of --
* at the end of a line).
*
* @return The String without the comments in
*/
public static String stripComments(String multiLineSQL) {
return SqlParser.parse(multiLineSQL, true, false).toString().trim();
}
public static String join(Object[] array, String delimiter, StringUtilsFormatter formatter) {
if (array == null) {
return null;
}
return join(Arrays.asList(array), delimiter, formatter);
}
public static String join(String[] array, String delimiter) {
return join(Arrays.asList(array), delimiter);
}
public static String join(Collection<String> collection, String delimiter) {
return join(collection, delimiter, new ToStringFormatter());
}
public static String join(Collection collection, String delimiter, StringUtilsFormatter formatter) {
if (collection == null) {
return null;
}
if (collection.size() == 0) {
return "";
}
StringBuffer buffer = new StringBuffer();
for (Object val : collection) {
buffer.append(formatter.toString(val)).append(delimiter);
}
String returnString = buffer.toString();
return returnString.substring(0, returnString.length() - delimiter.length());
}
public static String join(Collection collection, String delimiter, StringUtilsFormatter formatter, boolean sorted) {
if (sorted) {
TreeSet<String> sortedSet = new TreeSet<String>();
for (Object obj : collection) {
sortedSet.add(formatter.toString(obj));
}
return join(sortedSet, delimiter);
}
return join(collection, delimiter, formatter);
}
public static String join(Collection<String> collection, String delimiter, boolean sorted) {
if (sorted) {
return join(new TreeSet<String>(collection), delimiter);
} else {
return join(collection, delimiter);
}
}
public static String join(Map map, String delimiter) {
return join(map, delimiter, new ToStringFormatter());
}
public static String join(Map map, String delimiter, StringUtilsFormatter formatter) {
List<String> list = new ArrayList<String>();
for (Map.Entry entry : (Set<Map.Entry>) map.entrySet()) {
list.add(entry.getKey().toString()+"="+formatter.toString(entry.getValue()));
}
return join(list, delimiter);
}
public static List<String> splitAndTrim(String s, String regex) {
if (s == null) {
return null;
}
List<String> returnList = new ArrayList<String>();
for (String string : s.split(regex)) {
returnList.add(string.trim());
}
return returnList;
}
public static String repeat(String string, int times) {
String returnString = "";
for (int i=0; i<times; i++) {
returnString += string;
}
return returnString;
}
public static String join(Integer[] array, String delimiter) {
if (array == null) {
return null;
}
int[] ints = new int[array.length];
for (int i=0; i < ints.length; i++)
{
ints[i] = array[i];
}
return StringUtils.join(ints, delimiter);
}
public static String join(int[] array, String delimiter) {
if (array == null) {
return null;
}
if (array.length == 0) {
return "";
}
StringBuffer buffer = new StringBuffer();
for (int val : array) {
buffer.append(val).append(delimiter);
}
String returnString = buffer.toString();
return returnString.substring(0, returnString.length() - delimiter.length());
}
public static String indent(String string) {
return indent(string, 4);
}
public static String indent(String string, int padding) {
String pad = StringUtils.repeat(" ", padding);
return pad+(string.replaceAll("\n", "\n" + pad));
}
public static String lowerCaseFirst(String string) {
return string.substring(0, 1).toLowerCase()+string.substring(1);
}
public static String upperCaseFirst(String string) {
return string.substring(0, 1).toUpperCase()+string.substring(1);
}
public static boolean hasUpperCase(String string) {
return upperCasePattern.matcher(string).matches();
}
public static boolean hasLowerCase(String string) {
return lowerCasePattern.matcher(string).matches();
}
public static String standardizeLineEndings(String string) {
if (string == null) {
return null;
}
return string.replace("\r\n", "\n").replace("\r", "\n");
}
public static boolean isAscii(String string) {
if (string == null) {
return true;
}
for (char c : string.toCharArray()) {
if (!isAscii(c)) {
return false;
}
}
return true;
}
public static boolean isAscii(char ch) {
return ch < 128;
}
public static String escapeHtml(String str) {
StringBuilder out = new StringBuilder();
int len = str.length();
for (int i = 0; i < len; i++) {
char c = str.charAt(i);
if (c > 0x7F) {
out.append("&#");
out.append(Integer.toString(c, 10));
out.append(';');
} else {
out.append(c);
}
}
return out.toString();
}
public static String pad(String value, int length) {
value = StringUtils.trimToEmpty(value);
if (value.length() >= length) {
return value;
}
return value + StringUtils.repeat(" ", length - value.length());
}
/**
* Null-safe check if string is empty.
*
* @param value String to be checked
* @return true if String is null or empty
*/
public static boolean isEmpty(String value) {
return value == null || value.length() == 0;
}
/**
* Null-safe check if string is not empty
*
* @param value String to be checked
* @return true if string is not null and not empty (length > 0)
*/
public static boolean isNotEmpty(String value) {
return !isEmpty(value);
}
/**
* Checks if <code>value</code> starts with <code>startsWith</code>.
* @param value
* @param startsWith
* @return true if <code>value</code> starts with <code>startsWith</code>, otherwise false. If any of arguments is null returns false
*/
public static boolean startsWith(String value, String startsWith) {
if(value == null || startsWith == null){
return false;
}
return value.startsWith(startsWith);
}
public static boolean isWhitespace(CharSequence string) {
if (string == null) {
return true;
}
return StringUtils.trimToNull(string.toString()) == null;
}
public static interface StringUtilsFormatter<Type> {
public String toString(Type obj);
}
public static class ToStringFormatter implements StringUtilsFormatter {
@Override
public String toString(Object obj) {
if (obj == null) {
return null;
}
return obj.toString();
}
}
public static String limitSize(String string, int maxLength) {
if (string.length() > maxLength) {
return string.substring(0, maxLength - 3) + "...";
}
return string;
}
}
| |
// $Id: Pinger.java,v 1.6 2005/11/11 00:20:25 jwhui Exp $
/* tab:2
*
*
* "Copyright (c) 2000-2005 The Regents of the University of California.
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software and its
* documentation for any purpose, without fee, and without written agreement is
* hereby granted, provided that the above copyright notice, the following
* two paragraphs and the author appear in all copies of this software.
*
* IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT
* OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE UNIVERSITY OF
* CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS."
*
*/
/**
* @author Jonathan Hui <jwhui@cs.berkeley.edu>
*/
package net.tinyos.deluge;
import net.tinyos.message.*;
import net.tinyos.util.*;
import java.util.*;
public class Pinger implements MessageListener {
public static final short TOS_UART_ADDR = 0x007e;
private static final int STRING_SIZE = 16;
private static final int MAX_ATTEMPTS = 3;
private static final int START_PKT = DelugeConsts.DELUGE_CRC_BLOCK_SIZE/DelugeConsts.DELUGE_PKT_PAYLOAD_SIZE;
// ONE PACKET WORTH OF DATA ARE RESERVED BUT UNUSED
// IF MORE IDENT DATA IS REQUIRED, REMOVE SUBTRACTION ON END POINT
private static final int END_PKT = DelugeConsts.DELUGE_IDENT_SIZE/DelugeConsts.DELUGE_PKT_PAYLOAD_SIZE - 1;
private MoteIF moteif;
private boolean verbose;
private short curImage = -1;
private DelugeAdvMsg advMsg = new DelugeAdvMsg();
private NetProgMsg netProgMsg = new NetProgMsg();
private Hashtable pingReplies = new Hashtable();
private Hashtable images = new Hashtable();
private short pcAddr = 0x7e;
private short pktsToReceive[] = new short[DelugeReqMsg.totalSize_requestedPkts()];
private short imageData[] = new short[DelugeConsts.DELUGE_PKTS_PER_PAGE*DelugeConsts.DELUGE_PKT_PAYLOAD_SIZE];
private int pktsReceived = 0;
private boolean resolvedNodeType = false;
private int numImages = -1;
private boolean pingComplete = false;
private boolean receivedExecutingIdent = false;
private boolean requesting = false;
private int dest = MoteIF.TOS_BCAST_ADDR;
private boolean skipNextAdv = false;
private int attempts = 0;
private String execProgName = "";
private long execUnixTime = 0;
private long execUserHash = 0;
private String identString = "";
private boolean error = false;
private int unicast = MoteIF.TOS_BCAST_ADDR;
public Pinger(MoteIF moteif, boolean verbose, int dest) {
this.moteif = moteif;
this.verbose = verbose;
this.dest = dest;
this.unicast = dest;
this.moteif.registerListener(new DelugeAdvMsg(), this);
this.moteif.registerListener(new DelugeDataMsg(), this);
this.moteif.registerListener(new NetProgMsg(), this);
}
public void ping() {
advMsg.set_sourceAddr(pcAddr);
advMsg.set_version(DelugeConsts.DELUGE_VERSION);
advMsg.set_type((short)DelugeConsts.DELUGE_ADV_PING);
advMsg.set_nodeDesc_vNum((short)DelugeConsts.DELUGE_INVALID_VNUM);
advMsg.set_imgDesc_vNum(DelugeConsts.DELUGE_INVALID_VNUM);
if ( dest == MoteIF.TOS_BCAST_ADDR )
System.out.println("Pinging node ...");
else
System.out.println("Pinging node " + dest + " ...");
// get executing image
for (;;) {
try {
if (receivedExecutingIdent)
break;
netProgMsg.set_sourceAddr(pcAddr);
send(netProgMsg);
if (verbose) System.out.print(netProgMsg);
if (!resolvedNodeType) {
if (pcAddr == (short)MoteIF.TOS_BCAST_ADDR)
pcAddr = TOS_UART_ADDR;
else
pcAddr = (short)MoteIF.TOS_BCAST_ADDR;
}
Thread.currentThread().sleep(500);
} catch (Exception e) {
e.printStackTrace();
}
}
setupNewImage();
// get deluge image info
for (;;) {
try {
attempts++;
if ( attempts > MAX_ATTEMPTS ) {
attempts = 0;
dest = MoteIF.TOS_BCAST_ADDR;
}
if ( ( curImage >= numImages && numImages != -1) )
break;
advMsg.set_sourceAddr(pcAddr);
advMsg.set_imgDesc_imgNum(curImage);
advMsg = DelugeCrc.computeAdvCrc(advMsg);
requesting = false;
if (!skipNextAdv) send(advMsg);
skipNextAdv = false;
if (verbose) System.out.print(advMsg);
if (!resolvedNodeType) {
if (pcAddr == (short)MoteIF.TOS_BCAST_ADDR)
pcAddr = TOS_UART_ADDR;
else
pcAddr = (short)MoteIF.TOS_BCAST_ADDR;
}
Thread.currentThread().sleep(500);
} catch (Exception e) {
e.printStackTrace();
}
}
moteif.deregisterListener(new DelugeAdvMsg(), this);
moteif.deregisterListener(new DelugeDataMsg(), this);
moteif.deregisterListener(new NetProgMsg(), this);
}
public boolean existsError() {
return error;
}
public DelugeAdvMsg getPingReply(int imageNum) {
return (DelugeAdvMsg)pingReplies.get(new Integer(imageNum));
}
public TOSBootImage getImage(int imageNum) {
return (TOSBootImage)images.get(new Integer(imageNum));
}
public short getPCAddr() {
return pcAddr;
}
public int getNumImages() {
return numImages;
}
private void send(Message m) {
int addr;
try {
addr = ( unicast != MoteIF.TOS_BCAST_ADDR ) ? unicast : dest;
moteif.send(addr, m);
} catch (Exception e) {
e.printStackTrace();
}
}
private void setupNewImage() {
for ( int i = START_PKT; i < START_PKT+END_PKT; i++ )
pktsToReceive[i/8] |= (0x1 << (i%8));
pktsReceived = 0;
curImage++;
System.out.print( "\r ");
System.out.print( "\rGetting data for image [" + curImage + "] " );
skipNextAdv = true;
if ( curImage < numImages || numImages == -1 ) {
advMsg.set_sourceAddr(pcAddr);
advMsg.set_imgDesc_imgNum(curImage);
advMsg = DelugeCrc.computeAdvCrc(advMsg);
send(advMsg);
}
if (verbose) System.out.print(advMsg);
}
public String getExecName() {
return execProgName;
}
public long getExecUnixTime() {
return execUnixTime;
}
public long getExecUserHash() {
return execUserHash;
}
public String getExecutingIdent() {
Date date = new Date(netProgMsg.get_ident_unix_time()*1000);
return " Prog Name: " + execProgName + "\n" +
" Compiled On: " + date + "\n" +
" User Hash: 0x" + Long.toHexString(execUserHash);
}
public void messageReceived(int toAddr, Message m) {
// figure out what type of node we're connected to
if (!resolvedNodeType) {
if (toAddr != TOS_UART_ADDR) {
pcAddr = (short)MoteIF.TOS_BCAST_ADDR;
resolvedNodeType = true;
System.out.println("Connected to TOSBase node.");
}
else if (toAddr == TOS_UART_ADDR) {
pcAddr = TOS_UART_ADDR;
resolvedNodeType = true;
System.out.println("Connected to Deluge node.");
}
}
switch(m.amType()) {
case NetProgMsg.AM_TYPE:
netProgMsg = (NetProgMsg)m;
if (verbose) System.out.print(netProgMsg);
if ( netProgMsg.get_sourceAddr() == MoteIF.TOS_BCAST_ADDR ||
netProgMsg.get_sourceAddr() == TOS_UART_ADDR )
return;
// extract ident information
{
byte tmpBytes[] = new byte[STRING_SIZE];
for ( int i = 0; i < STRING_SIZE; i++ )
tmpBytes[i] = (byte)(netProgMsg.getElement_ident_program_name(i) & 0xff);
String name = new String(tmpBytes);
if (name.indexOf('\0') != -1)
name = name.substring(0, name.indexOf('\0'));
execProgName = name;
execUnixTime = netProgMsg.get_ident_unix_time();
execUserHash = netProgMsg.get_ident_user_hash();
receivedExecutingIdent = true;
}
break;
case DelugeAdvMsg.AM_TYPE:
DelugeAdvMsg pingReply = (DelugeAdvMsg)m;
int imgNum = pingReply.get_imgDesc_imgNum();
if (verbose) System.out.print(pingReply);
if ( pingReply.get_type() == DelugeConsts.DELUGE_ADV_ERROR )
error = true;
if (dest == MoteIF.TOS_BCAST_ADDR)
dest = pingReply.get_sourceAddr();
if ( pingReply.get_type() != DelugeConsts.DELUGE_ADV_NORMAL )
return;
if (numImages == -1 || pingReply.get_numImages() > numImages) {
numImages = pingReply.get_numImages();
dest = pingReply.get_sourceAddr();
}
if (dest != pingReply.get_sourceAddr())
return;
if (pingReplies.get(new Integer(imgNum)) == null) {
pingReplies.put(new Integer(imgNum), pingReply);
}
if (curImage == imgNum) {
if (pingReply.get_imgDesc_numPgsComplete() == 0) {
setupNewImage();
return;
}
DelugeReqMsg reqMsg = new DelugeReqMsg();
reqMsg.set_sourceAddr(pcAddr);
reqMsg.set_dest(pingReply.get_sourceAddr());
reqMsg.set_vNum(pingReply.get_imgDesc_vNum());
reqMsg.set_imgNum(pingReply.get_imgDesc_imgNum());
reqMsg.set_pgNum((short)0);
reqMsg.set_requestedPkts(pktsToReceive);
if (verbose) System.out.print(reqMsg);
send(reqMsg);
}
break;
case DelugeDataMsg.AM_TYPE:
DelugeDataMsg dataMsg = (DelugeDataMsg)m;
short pktNum = dataMsg.get_pktNum();
if (verbose) System.out.print(dataMsg);
pingReply = (DelugeAdvMsg)pingReplies.get(new Integer(curImage));
if (pingReply == null
|| dataMsg.get_vNum() != pingReply.get_imgDesc_vNum()
|| dataMsg.get_imgNum() != curImage
|| dataMsg.get_pgNum() != 0
|| dataMsg.get_pktNum() >= DelugeConsts.DELUGE_PKTS_PER_PAGE)
return;
if ((pktsToReceive[pktNum/8] & (0x1 << (pktNum%8))) != 0) {
pktsToReceive[pktNum/8] &= ~(0x1 << (pktNum%8));
System.arraycopy(dataMsg.get_data(), 0, imageData,
pktNum*DelugeConsts.DELUGE_PKT_PAYLOAD_SIZE,
DelugeConsts.DELUGE_PKT_PAYLOAD_SIZE);
System.out.print( "." );
pktsReceived++;
if (pktsReceived >= END_PKT) {
byte[] bytes = new byte[TOSBootImage.METADATA_SIZE];
for ( int i = 0; i < bytes.length; i++ )
bytes[i] = (byte)(imageData[i+256] & 0xff);
images.put(new Integer(curImage), new TOSBootImage(bytes));
setupNewImage();
}
}
break;
}
}
}
| |
package controllers.sso.auth;
import com.google.common.base.Strings;
import com.google.inject.persist.Transactional;
import controllers.annotations.SecureHtmlHeaders;
import controllers.sso.auth.state.SignInState;
import controllers.sso.filters.AuthenticationFilter;
import controllers.sso.filters.HitsPerIpCheckFilter;
import controllers.sso.filters.IpAddressFilter;
import controllers.sso.filters.LanguageFilter;
import controllers.sso.filters.RequireUnauthenticatedUserFilter;
import controllers.sso.web.Controllers;
import controllers.sso.web.UrlBuilder;
import dto.sso.common.Constants;
import models.sso.User;
import models.sso.UserCredentials;
import models.sso.token.ExpirableToken;
import models.sso.token.ExpiredTokenException;
import models.sso.token.IllegalTokenException;
import ninja.Context;
import ninja.FilterWith;
import ninja.Result;
import ninja.metrics.Timed;
import ninja.params.Param;
import ninja.utils.NinjaProperties;
import services.sso.UserEventService;
import services.sso.UserService;
import services.sso.token.ExpirableTokenEncryptor;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.inject.Singleton;
import java.util.IllegalFormatException;
/**
* Restore password controller.
*/
@Singleton
@FilterWith({
LanguageFilter.class,
IpAddressFilter.class,
HitsPerIpCheckFilter.class,
AuthenticationFilter.class,
RequireUnauthenticatedUserFilter.class
})
public class RestorePasswordController {
/**
* Template to render sign up page.
*/
private static final String TEMPLATE = "views/sso/auth/restorePassword.ftl.html";
/**
* User service.
*/
private final UserService userService;
/**
* User's event service.
*/
private final UserEventService userEventService;
/**
* Expirable token encryptor.
*/
private final ExpirableTokenEncryptor expirableTokenEncryptor;
/**
* URL builder provider for controller. Instance per request.
*/
private final Provider<UrlBuilder> urlBuilderProvider;
/**
* Html result with secure headers.
*/
private final Provider<Result> htmlWithSecureHeadersProvider;
/**
* Application properties.
*/
private final NinjaProperties properties;
/**
* Constructs controller.
*
* @param userService User service.
* @param userEventService User's event service.
* @param expirableTokenEncryptor Expirable token encryptor.
* @param urlBuilderProvider URL builder provider.
* @param properties Application properties.
*/
@Inject
public RestorePasswordController(UserService userService,
UserEventService userEventService,
ExpirableTokenEncryptor expirableTokenEncryptor,
Provider<UrlBuilder> urlBuilderProvider,
@SecureHtmlHeaders Provider<Result> htmlWithSecureHeadersProvider,
NinjaProperties properties) {
this.userService = userService;
this.userEventService = userEventService;
this.expirableTokenEncryptor = expirableTokenEncryptor;
this.urlBuilderProvider = urlBuilderProvider;
this.htmlWithSecureHeadersProvider = htmlWithSecureHeadersProvider;
this.properties = properties;
}
/**
* Renders restore password form.
*
* @param context Context.
* @param restoreToken Restore token from email sent to user by {@link ForgotPasswordController}.
* @return Result with data for restore password form.
*/
@Timed
@Transactional
public Result restorePasswordGet(Context context, @Param(value = "restoreToken") String restoreToken) {
Result result = createResult(context, restoreToken);
try {
ExpirableToken token = expirableTokenEncryptor.decrypt(restoreToken);
Long userId = token.getAttributeAsLong("userId");
User user = userService.get(userId);
if (user == null) {
throw new ExpiredTokenException();
}
result.render("user", user);
} catch (ExpiredTokenException ex) {
result.render("restorePasswordError", "expired");
} catch (IllegalTokenException | IllegalFormatException ex) {
result.render("restorePasswordError", "unknown");
}
return result;
}
/**
* Processes submit of the restore password form.
*
* @param context Context.
* @param restoreToken Restore token.
* @param password Password.
* @param confirmPassword Password confirmation.
* @return Result.
*/
@Timed
@Transactional
public Result restorePassword(Context context,
@Param(value = "restoreToken") String restoreToken,
@Param(value = "password") String password,
@Param(value = "confirmPassword") String confirmPassword) {
password = Strings.nullToEmpty(password);
confirmPassword = Strings.nullToEmpty(confirmPassword);
Result result = createResult(context, restoreToken);
try {
ExpirableToken token = expirableTokenEncryptor.decrypt(restoreToken);
Long userId = token.getAttributeAsLong("userId");
User user = userService.get(userId);
if (user == null) {
throw new ExpiredTokenException();
}
if (isValidPassword(password, confirmPassword)) {
String ip = (String) context.getAttribute(IpAddressFilter.REMOTE_IP);
UserCredentials credentials = userService.getCredentials(user);
byte[] oldSalt = credentials.getPasswordSalt();
byte[] oldHash = credentials.getPasswordHash();
userService.updatePasswordAndConfirm(user, password);
userEventService.onUserPasswordUpdate(user, oldSalt, oldHash, ip, context.getHeaders());
String url = urlBuilderProvider.get().getSignInUrl(SignInState.PASSWORD_CHANGED);
return Controllers.redirect(url);
} else {
result.render("restorePasswordError", "password");
}
result.render("user", user);
} catch (ExpiredTokenException ex) {
result.render("restorePasswordError", "expired");
} catch (IllegalTokenException | IllegalFormatException ex) {
result.render("restorePasswordError", "unknown");
}
return result;
}
/**
* Creates common result for GET and POST forms.
*
* @param context Context.
* @param restoreToken Restore token.
* @return Result with data and template.
*/
private Result createResult(Context context, String restoreToken) {
String locale = (String) context.getAttribute(LanguageFilter.LANG);
return htmlWithSecureHeadersProvider.get()
.render("context", context)
.render("continue", urlBuilderProvider.get().getContinueUrlParameter())
.render("config", properties)
.render("restoreToken", restoreToken)
.render("lang", locale)
.template(TEMPLATE);
}
/**
* Validates given password, verifies it with confirmation password.
*
* @param password Password.
* @param confirmPassword Confirm password.
* @return Whether the given password is valid.
*/
private static boolean isValidPassword(String password, String confirmPassword) {
return password.length() >= Constants.PASSWORD_MIN_LENGTH
&& password.length() <= Constants.PASSWORD_MAX_LENGTH
&& password.equals(confirmPassword);
}
}
| |
package edu.cmu.minorthird.classify.transform;
import java.io.File;
import java.io.Serializable;
import java.util.Iterator;
import edu.cmu.minorthird.classify.BasicDataset;
import edu.cmu.minorthird.classify.ClassLabel;
import edu.cmu.minorthird.classify.Classifier;
import edu.cmu.minorthird.classify.ClassifierLearner;
import edu.cmu.minorthird.classify.Dataset;
import edu.cmu.minorthird.classify.DatasetClassifierTeacher;
import edu.cmu.minorthird.classify.Example;
import edu.cmu.minorthird.classify.Explanation;
import edu.cmu.minorthird.classify.Feature;
import edu.cmu.minorthird.classify.Instance;
import edu.cmu.minorthird.classify.MutableInstance;
import edu.cmu.minorthird.classify.Splitter;
import edu.cmu.minorthird.classify.algorithms.linear.Hyperplane;
import edu.cmu.minorthird.classify.algorithms.linear.VotedPerceptron;
import edu.cmu.minorthird.classify.experiments.Evaluation;
import edu.cmu.minorthird.classify.experiments.Expt;
import edu.cmu.minorthird.text.Span;
import edu.cmu.minorthird.text.TextBase;
import edu.cmu.minorthird.text.TextLabels;
import edu.cmu.minorthird.text.learn.SpanFeatureExtractor;
import edu.cmu.minorthird.util.LineProcessingUtil;
/**
* @author Vitor R. Carvalho
*
* Classifiers that, from a linear binary classfier, perform the classification of new instances
* by disregarding all features whose hyperplane axis are negative.
*
*
*/
public class PositiveScoresTransformClassifier implements Classifier,
Serializable{
static private final long serialVersionUID=20080201L;;
private double threshold; //positive threshold, instead of zero
private Hyperplane hyp;
private final double minFeatScore=0;
//constructor
public PositiveScoresTransformClassifier(Classifier c,Dataset data){
hyp=(Hyperplane)c;
//optional: just for fun, change meanFeatScore - never tested
//minFeatScore = calculateMinFeatScore(data, 0.5);
threshold=calculatePositiveThreshold(data);
System.out.println("Threshold = "+threshold);
}
@Override
public ClassLabel classification(Instance instance){
double s=score(instance,minFeatScore);
double th=s-threshold;
return s>=threshold?ClassLabel.positiveLabel(th):ClassLabel
.negativeLabel(th);
//need to change
//return s>=threshold ? ClassLabel.positiveLabel(s) : ClassLabel.negativeLabel(s);
}
/**
* iterate over training data to discover positive threshold.
* the threshold is the weighted average between positive and negative mean scores
*/
public double calculatePositiveThreshold(Dataset data){
double posScore=0.0,negScore=0.0;
int numPos=0,numNeg=0;
for(Iterator<Example> i=data.iterator();i.hasNext();){
Example ex=i.next();
Instance inst=ex.asInstance();
if(ex.getLabel().isPositive()){
numPos++;
posScore+=score(inst,minFeatScore);
}else{
numNeg++;
negScore+=score(inst,minFeatScore);
}
}
//calculate means
double negTh=(negScore/numNeg);
double posTh=(posScore/numPos);
//System.out.println("posTh/negTh = "+posTh+" / "+negTh);
//double myTh = (negTh+posTh)/2; //simple average
double myTh=(negTh*numPos+posTh*numNeg)/((numPos+numNeg)*2);//a weighted average
return myTh;
}
/**
* Finds the maximum positive axis weigth of hyperplane and
* calculates minFeatScore based on percentage this value
*
* this was never tested!
*
*/
// private double calculateMinFeatScore(Dataset data,double percent){
// double lastMax=0.0;
// if((percent>1)||(percent<0)){
// System.out.println("ERROR ; percentage should be a valid number[0,1]");
// return 0;
// }
// for(Iterator<Feature> i=hyp.featureIterator();i.hasNext();){
// Feature f=i.next();
// double aa=hyp.featureScore(f);
// if(lastMax<aa)
// lastMax=aa;
// }
// double b=lastMax*percent;
// System.out.println("lastMAx = "+b);
// return b;
//
// }
public double score(Instance instance){
return score(instance,0);
}
/** Inner product of hyperplane and instance weights, disregarding
* the negative dimensions of hyperplane. */
public double score(Instance instance,double minFeatScore){
double score=0.0;
for(Iterator<Feature> j=instance.featureIterator();j.hasNext();){
Feature f=j.next();
//only positive dimensions of hyperplane
if(hyp.featureScore(f)>minFeatScore){
score+=instance.getWeight(f)*hyp.featureScore(f);
}
}
score+=hyp.featureScore(Hyperplane.BIAS_TERM);
return score;
}
@Override
public String explain(Instance instance){
return "classify using only features with positive hyperplane weights";
}
@Override
public Explanation getExplanation(Instance instance){
Explanation ex=new Explanation(explain(instance));
return ex;
}
// private static void usage(){
// System.out.println("PositiveScoresTransformClassifier dataset");
// }
public static void main(String[] args){
if((args.length<1)||(args.length>1)){
System.out.println("Usage: PositiveScoresTransformClassifier classname");
return;
}
String mytag=args[0];
Dataset dataset=new BasicDataset();
SpanFeatureExtractor fe=
edu.cmu.minorthird.text.learn.SampleFE.BAG_OF_LC_WORDS;
TextLabels labels;
try{
labels=
LineProcessingUtil.readBsh(new File("C:/m3test/total/data/"),
new File("C:/m3test/total/env/all"+mytag+".env"));
TextBase tb=labels.getTextBase();
for(Iterator<Span> it=tb.documentSpanIterator();it.hasNext();){
Span docspan=it.next();
//String docid=docspan.getDocumentId();
MutableInstance ins=(MutableInstance)fe.extractInstance(labels,docspan);
ClassLabel mylabel=new ClassLabel();
mylabel=
labels.hasType(docspan,mytag)?ClassLabel.binaryLabel(+1):ClassLabel
.binaryLabel(-1);
dataset.add(new Example(ins,mylabel));
}
//only works for linear classifiers
ClassifierLearner learner=new VotedPerceptron();
// ClassifierLearner learner = new MarginPerceptron();
// ClassifierLearner learner = new NaiveBayes();
Splitter<Example> split=Expt.toSplitter("k2");
Evaluation v=new Evaluation(dataset.getSchema());
Dataset.Split s=dataset.split(split);
for(int k=0;k<s.getNumPartitions();k++){
Dataset trainData=s.getTrain(k);
Dataset testData=s.getTest(k);
System.out.println("splitting with "+split+", preparing to train on "+
trainData.size()+" and test on "+testData.size());
Classifier cc=new DatasetClassifierTeacher(trainData).train(learner);
// apply transformation
Classifier cc_transformed=
new PositiveScoresTransformClassifier(cc,trainData);
v.extend(cc_transformed,testData,k);
//or, without the transformation
// v.extend( cc, testData, k );
}
v.summarize();
}catch(Exception e){
e.printStackTrace();
System.out.println("Usage: PositiveScoresTransformClassifier classname");
System.out.println("for instance, nameclass = Req, Dlv, Cmt, POS, etc");
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.