gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.antsapps.tennotrumps;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import android.content.Intent;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.Button;
import android.widget.Toast;
import com.actionbarsherlock.app.SherlockActivity;
import com.antsapps.tennotrumps.backend.Application;
import com.antsapps.tennotrumps.backend.Match;
import com.antsapps.tennotrumps.backend.Player;
import com.antsapps.tennotrumps.backend.Round;
import com.antsapps.tennotrumps.backend.Team;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class AddMatch extends SherlockActivity {
public Application application;
private Collection<Team> allTeams;
private Collection<Player> allPlayers;
private final Map<AutoCompleteTextView, List<AutoCompleteTextView>> playerViews = Maps
.newHashMap();
private List<String> teamNames, playerNames;
private Button createMatch;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
application = Application.getInstance(getApplicationContext());
setContentView(R.layout.add_match);
initTeamNames();
initPlayerNames();
ArrayAdapter<String> teamsAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_dropdown_item_1line, teamNames);
ArrayAdapter<String> playersAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_dropdown_item_1line, playerNames);
final AutoCompleteTextView team1view = (AutoCompleteTextView) findViewById(R.id.autoCompleteTeam1);
List<AutoCompleteTextView> team1PlayerViews = Lists.newArrayList();
team1PlayerViews
.add((AutoCompleteTextView) findViewById(R.id.autocompletePlayer1i));
team1PlayerViews
.add((AutoCompleteTextView) findViewById(R.id.autocompletePlayer1ii));
playerViews.put(team1view, team1PlayerViews);
final AutoCompleteTextView team2view = (AutoCompleteTextView) findViewById(R.id.autoCompleteTeam2);
List<AutoCompleteTextView> team2PlayerViews = Lists.newArrayList();
team2PlayerViews
.add((AutoCompleteTextView) findViewById(R.id.autocompletePlayer2i));
team2PlayerViews
.add((AutoCompleteTextView) findViewById(R.id.autocompletePlayer2ii));
playerViews.put(team2view, team2PlayerViews);
for (final AutoCompleteTextView teamView : playerViews.keySet()) {
teamView.setAdapter(teamsAdapter);
for (AutoCompleteTextView playerView : playerViews.get(teamView)) {
playerView.setAdapter(playersAdapter);
}
teamView.addTextChangedListener(new TextWatcher() {
@Override
public void onTextChanged(CharSequence s, int start, int before,
int count) {
}
@Override
public void beforeTextChanged(CharSequence s, int start, int count,
int after) {
}
@Override
public void afterTextChanged(Editable s) {
String teamName = s.toString();
Team team = application.getTeam(teamName);
if (team != null) {
int i = 0;
for (AutoCompleteTextView playerView : playerViews.get(teamView)) {
Player player = team.getPlayer(i);
if (player != null) {
playerView.setText(player.getName());
}
playerView.setEnabled(false);
i++;
}
} else {
for (AutoCompleteTextView playerView : playerViews.get(teamView)) {
playerView.setEnabled(true);
}
}
checkTeamViews();
}
});
}
createMatch = (Button) findViewById(R.id.createMatch);
createMatch.setEnabled(false);
}
private void checkTeamViews() {
boolean enabled = true;
for (AutoCompleteTextView teamView : playerViews.keySet()) {
enabled &= (teamView.getText().length() > 0);
}
createMatch.setEnabled(enabled);
}
public void createMatch(View view) {
if (!(checkUniqueNames())) {
Toast.makeText(getApplicationContext(),
"Team and Player names must be unique", Toast.LENGTH_SHORT).show();
return;
}
List<Team> teams = Lists.newArrayList();
for (AutoCompleteTextView teamView : playerViews.keySet()) {
Team team = application.getTeam(teamView.getText().toString());
if (team == null) {
team = new Team(teamView.getText().toString());
int i = 0;
for (AutoCompleteTextView playerView : playerViews.get(teamView)) {
String playerName = (playerView.getText().toString());
if (playerName.length() > 0) {
Player player = application.getPlayer(playerName);
if (player == null) {
player = new Player(playerName);
application.addPlayer(player);
}
team.setPlayer(i, player);
}
i++;
}
application.addTeam(team);
}
teams.add(team);
}
Match match = new Match(teams.get(0), teams.get(1));
application.addMatch(match);
Round round = new Round(match);
application.addRound(round);
Log.i("AddMatch", match.toString());
Intent intent = new Intent(this, HandList.class);
intent.putExtra(Match.ID_TAG, match.getId());
intent.putExtra(Round.ID_TAG, round.getId());
startActivity(intent);
finish();
}
private boolean checkUniqueNames() {
boolean uniqueTeams = true;
boolean uniquePlayers = true;
Set<String> teamSet = Sets.newHashSet();
Set<String> playerSet = Sets.newHashSet();
for (AutoCompleteTextView teamView : playerViews.keySet()) {
uniqueTeams &= teamSet.add(teamView.getText().toString());
for (AutoCompleteTextView playerView : playerViews.get(teamView)) {
String playerName = (playerView.getText().toString());
if (playerName.length() > 0) {
uniquePlayers &= playerSet.add(playerName);
}
}
}
return uniqueTeams && uniquePlayers;
}
private void initTeamNames() {
allTeams = application.getTeams();
teamNames = new ArrayList<String>();
for (Team team : allTeams) {
teamNames.add(team.getName());
}
Collections.sort(teamNames);
}
private void initPlayerNames() {
allPlayers = application.getPlayers();
playerNames = new ArrayList<String>();
for (Player player : allPlayers) {
playerNames.add(player.getName());
}
Collections.sort(playerNames);
}
}
| |
package org.ovirt.engine.core.utils.hostinstall;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
//Doron Fediuck: IPWorksInstallWrapper class is not being used at all. I kept it as a backup until we have full functional alternative by Apache, and I still wait for internal scp & fingerprint implementation.
public class IPWorksInstallWrapper implements IVdsInstallWrapper {
private IVdsInstallCallBack callback;
Session session = null;
public IPWorksInstallWrapper() {
}
private boolean _do_connect(String server, Credentials creds) {
boolean returnValue = true;
try {
JSch jsch = new JSch();
if (creds.getCertPath() != null) {
log.debug("Using Public Key Authentication.");
jsch.addIdentity(creds.getCertPath(), creds.getPassphrase());
}
ByteArrayOutputStream output = new ByteArrayOutputStream();
session = jsch.getSession(creds.getUsername(), server, 22);
session.setOutputStream(output);
session.setUserInfo(creds);
session.connect();
if (log.isDebugEnabled()) {
log.debug(output.toString());
}
callbackConnected();
} catch (JSchException e) {
callbackFailed(e.getMessage());
returnValue = false;
log.error("Could not connect to server " + server, e);
}
return returnValue;
}
protected void callbackAddError(String message) {
if (haveCallback()) {
callback.AddError(message);
}
}
protected void callbackAddMessage(String message) {
if (haveCallback()) {
callback.AddMessage(message);
}
}
protected void callbackConnected() {
if (haveCallback()) {
// TODO: Remove comment when Apache MINA support Finger-Print
// callback.AddMessage(String.format(
// "<BSTRAP component='RHEV_INSTALL' status='OK' message='Connected to Host %s with SSH Key Fingerprint: %s'/>",
// session.getHost(),
// session.getFingerprint()));
callback.Connected();
}
}
protected void callbackEndTransfer() {
if (haveCallback()) {
callback.EndTransfer();
}
}
protected void callbackFailed(String message) {
if (haveCallback()) {
callback.Failed(message);
}
}
public final boolean ConnectToServer(String server) {
return ConnectToServer(server, Config.resolveCertificatePath(),
Config.<String> GetValue(ConfigValues.CertificatePassword));
}
public final boolean ConnectToServer(String server, String rootPassword) {
Credentials creds = new Credentials();
creds.setPassword(rootPassword);
creds.setUsername("root");
return _do_connect(server, creds);
}
public final boolean ConnectToServer(String server, String certPath, String password) {
Credentials creds = new Credentials();
creds.setPassphrase(password);
creds.setCertPath(certPath);
creds.setUsername("root");
return _do_connect(server, creds);
}
public final boolean DownloadFile(String source, String destination) {
log.info(String.format("Downloading file %s to %s on %s", source, destination, session.getHost()));
boolean returnValue = true;
try {
ChannelSftp channel = (ChannelSftp) session.openChannel("sftp");
channel.connect();
channel.get(source, destination);
String output = null;
int exitStatus = 0;
exitStatus = channel.getExitStatus();
if (exitStatus > 0) {
returnValue = false;
callbackAddError("Exit Status from transfer: " + exitStatus);
log.error(output);
} else {
callbackEndTransfer();
}
} catch (Exception e) {
callbackAddError(e.getMessage());
log.error(e);
returnValue = false;
}
return returnValue;
}
protected boolean haveCallback() {
return callback != null;
}
public final void InitCallback(IVdsInstallCallBack callback) {
this.callback = callback;
}
protected String readInput(InputStream input) {
StringBuilder builder = new StringBuilder();
byte[] tmp = new byte[1024];
try {
while (input.available() > 0) {
int i = input.read(tmp, 0, 1024);
if (i < 0)
break;
builder.append(new String(tmp, 0, i));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return builder.toString();
}
public final boolean RunSSHCommand(String command) {
boolean returnValue = true;
log.info(String.format("Invoking %s on %s", command, session.getHost()));
try {
ChannelExec channel = (ChannelExec) session.openChannel("exec");
channel.setCommand(command);
InputStream result = channel.getInputStream();
InputStream error = channel.getErrStream();
channel.connect();
int exitStatus = 0;
while (true) {
String sshMessage = readInput(result);
if (sshMessage != null && sshMessage.length() > 0) {
log.debug(sshMessage);
callbackAddMessage(sshMessage);
}
String errorSshMessage = readInput(error);
if (errorSshMessage != null && errorSshMessage.length() > 0) {
log.debug(errorSshMessage);
callbackAddError(errorSshMessage);
}
if (channel.isClosed()) {
exitStatus = channel.getExitStatus();
log.debug("exit-status: " + exitStatus);
break;
}
try {
Thread.sleep(1000);
} catch (Exception ee) {
}
}
if (exitStatus != 0) {
returnValue = false;
String errorString = readInput(error);
callbackAddError(errorString);
log.error(errorString);
}
} catch (Exception e) {
callbackFailed(e.getMessage());
log.error("Error running command " + command, e);
returnValue = false;
}
return returnValue;
}
public final boolean UploadFile(String source, String destination) {
log.info(String.format("Uploading file %s to %s on %s", source, destination, session.getHost()));
File file = new File(source);
boolean returnValue = true;
if (file.exists()) {
try {
ChannelSftp channel = (ChannelSftp) session.openChannel("sftp");
channel.connect();
channel.put(source, destination);
String output = null;
int exitStatus = 0;
exitStatus = channel.getExitStatus();
if (exitStatus > 0) {
returnValue = false;
callbackAddError("Exit Status from transfer: " + exitStatus);
log.error(output);
} else {
callbackEndTransfer();
}
} catch (Exception e) {
callbackAddError(e.getMessage());
log.error(e);
returnValue = false;
}
} else {
this.callbackFailed(String.format("Upload failed. File: %s not exist", source));
log.error("File to upload does not exist: " + source);
returnValue = false;
}
return returnValue;
}
private static Log log = LogFactory.getLog(IPWorksInstallWrapper.class);
@Override
public void wrapperShutdown() {
throw new UnsupportedOperationException("This class should not be used, and have no support in installation actions.");
}
/**
* The methods does not implement the timeout.
*/
@Override
public boolean ConnectToServer(String server, String rootPassword, long timeout) {
return ConnectToServer(server, rootPassword);
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.modifier;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.navercorp.pinpoint.bootstrap.Agent;
import com.navercorp.pinpoint.bootstrap.config.ProfilerConfig;
import com.navercorp.pinpoint.bootstrap.instrument.ByteCodeInstrumentor;
import com.navercorp.pinpoint.bootstrap.instrument.matcher.ClassNameMatcher;
import com.navercorp.pinpoint.bootstrap.instrument.matcher.Matcher;
import com.navercorp.pinpoint.bootstrap.instrument.matcher.MultiClassNameMatcher;
import com.navercorp.pinpoint.profiler.modifier.connector.asynchttpclient.AsyncHttpClientModifier;
import com.navercorp.pinpoint.profiler.modifier.db.cubrid.CubridConnectionModifier;
import com.navercorp.pinpoint.profiler.modifier.db.cubrid.CubridDriverModifier;
import com.navercorp.pinpoint.profiler.modifier.db.cubrid.CubridPreparedStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.cubrid.CubridResultSetModifier;
import com.navercorp.pinpoint.profiler.modifier.db.cubrid.CubridStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.dbcp.DBCPBasicDataSourceModifier;
import com.navercorp.pinpoint.profiler.modifier.db.dbcp.DBCPPoolGuardConnectionWrapperModifier;
import com.navercorp.pinpoint.profiler.modifier.db.jtds.Jdbc2ConnectionModifier;
import com.navercorp.pinpoint.profiler.modifier.db.jtds.Jdbc4_1ConnectionModifier;
import com.navercorp.pinpoint.profiler.modifier.db.jtds.JtdsDriverModifier;
import com.navercorp.pinpoint.profiler.modifier.db.jtds.JtdsPreparedStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.jtds.JtdsResultSetModifier;
import com.navercorp.pinpoint.profiler.modifier.db.jtds.JtdsStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.mysql.MySQLConnectionImplModifier;
import com.navercorp.pinpoint.profiler.modifier.db.mysql.MySQLConnectionModifier;
import com.navercorp.pinpoint.profiler.modifier.db.mysql.MySQLNonRegisteringDriverModifier;
import com.navercorp.pinpoint.profiler.modifier.db.mysql.MySQLPreparedStatementJDBC4Modifier;
import com.navercorp.pinpoint.profiler.modifier.db.mysql.MySQLPreparedStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.mysql.MySQLStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.oracle.OracleDriverModifier;
import com.navercorp.pinpoint.profiler.modifier.db.oracle.OraclePreparedStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.oracle.OracleStatementModifier;
import com.navercorp.pinpoint.profiler.modifier.db.oracle.PhysicalConnectionModifier;
import com.navercorp.pinpoint.profiler.modifier.log.log4j.LoggingEventOfLog4jModifier;
import com.navercorp.pinpoint.profiler.modifier.log.logback.LoggingEventOfLogbackModifier;
import com.navercorp.pinpoint.profiler.modifier.method.MethodModifier;
import com.navercorp.pinpoint.profiler.modifier.orm.ibatis.SqlMapModifier;
import com.navercorp.pinpoint.profiler.modifier.orm.mybatis.MyBatisModifier;
import com.navercorp.pinpoint.profiler.modifier.servlet.SpringFrameworkServletModifier;
import com.navercorp.pinpoint.profiler.modifier.spring.beans.AbstractAutowireCapableBeanFactoryModifier;
import com.navercorp.pinpoint.profiler.modifier.spring.orm.ibatis.SqlMapClientTemplateModifier;
import com.navercorp.pinpoint.profiler.util.JavaAssistUtils;
/**
* @author emeroad
* @author netspider
* @author hyungil.jeong
* @author Minwoo Jung
* @author jaehong.kim
*/
public class DefaultModifierRegistry implements ModifierRegistry {
// No concurrent issue because only one thread put entries to the map and get operations are started AFTER the map is completely build.
// Set the map size big intentionally to keep hash collision low.
private final Map<String, AbstractModifier> registry = new HashMap<String, AbstractModifier>(512);
private final ByteCodeInstrumentor byteCodeInstrumentor;
private final ProfilerConfig profilerConfig;
private final Agent agent;
public DefaultModifierRegistry(Agent agent, ByteCodeInstrumentor byteCodeInstrumentor) {
this.agent = agent;
this.byteCodeInstrumentor = byteCodeInstrumentor;
this.profilerConfig = agent.getProfilerConfig();
}
@Override
public AbstractModifier findModifier(String className) {
return registry.get(className);
}
public void addModifier(AbstractModifier modifier) {
final Matcher matcher = modifier.getMatcher();
// TODO extract matcher process
if (matcher instanceof ClassNameMatcher) {
final ClassNameMatcher classNameMatcher = (ClassNameMatcher)matcher;
String className = classNameMatcher.getClassName();
addModifier0(modifier, className);
} else if (matcher instanceof MultiClassNameMatcher) {
final MultiClassNameMatcher classNameMatcher = (MultiClassNameMatcher)matcher;
List<String> classNameList = classNameMatcher.getClassNames();
for (String className : classNameList) {
addModifier0(modifier, className);
}
} else {
throw new IllegalArgumentException("unsupported matcher :" + matcher);
}
}
private void addModifier0(AbstractModifier modifier, String className) {
// check jvmClassName
final String checkJvmClassName = JavaAssistUtils.javaNameToJvmName(className);
AbstractModifier old = registry.put(checkJvmClassName, modifier);
if (old != null) {
throw new IllegalStateException("Modifier already exist. className:" + checkJvmClassName + " new:" + modifier.getClass() + " old:" + old.getClass());
}
}
public void addMethodModifier() {
MethodModifier methodModifier = new MethodModifier(byteCodeInstrumentor, agent);
addModifier(methodModifier);
}
public void addConnectorModifier() {
// ning async http client
addModifier(new AsyncHttpClientModifier(byteCodeInstrumentor, agent));
}
public void addTomcatModifier() {
SpringFrameworkServletModifier springServletModifier = new SpringFrameworkServletModifier(byteCodeInstrumentor, agent);
addModifier(springServletModifier);
}
public void addJdbcModifier() {
// TODO Can we check if JDBC driver exists here?
if (!profilerConfig.isJdbcProfile()) {
return;
}
if (profilerConfig.isJdbcProfileMySql()) {
addMySqlDriver();
}
if (profilerConfig.isJdbcProfileJtds()) {
addJtdsDriver();
}
if (profilerConfig.isJdbcProfileOracle()) {
addOracleDriver();
}
// if (profilerConfig.isJdbcProfileCubrid()) {
// addCubridDriver();
// }
if (profilerConfig.isJdbcProfileDbcp()) {
addDbcpDriver();
}
}
private void addMySqlDriver() {
// TODO In some MySQL drivers Connection is an interface and in the others it's a class. Is this OK?
AbstractModifier mysqlNonRegisteringDriverModifier = new MySQLNonRegisteringDriverModifier(byteCodeInstrumentor, agent);
addModifier(mysqlNonRegisteringDriverModifier);
// From MySQL driver 5.1.x, backward compatibility is broken.
// Driver returns not com.mysql.jdbc.Connection but com.mysql.jdbc.JDBC4Connection which extends com.mysql.jdbc.ConnectionImpl from 5.1.x
AbstractModifier mysqlConnectionImplModifier = new MySQLConnectionImplModifier(byteCodeInstrumentor, agent);
addModifier(mysqlConnectionImplModifier);
AbstractModifier mysqlConnectionModifier = new MySQLConnectionModifier(byteCodeInstrumentor, agent);
addModifier(mysqlConnectionModifier);
AbstractModifier mysqlStatementModifier = new MySQLStatementModifier(byteCodeInstrumentor, agent);
addModifier(mysqlStatementModifier);
AbstractModifier mysqlPreparedStatementModifier = new MySQLPreparedStatementModifier(byteCodeInstrumentor, agent);
addModifier(mysqlPreparedStatementModifier);
MySQLPreparedStatementJDBC4Modifier myqlPreparedStatementJDBC4Modifier = new MySQLPreparedStatementJDBC4Modifier(byteCodeInstrumentor, agent);
addModifier(myqlPreparedStatementJDBC4Modifier);
// TODO Need to create result set fetch counter
// Modifier mysqlResultSetModifier = new MySQLResultSetModifier(byteCodeInstrumentor, agent);
// addModifier(mysqlResultSetModifier);
}
private void addJtdsDriver() {
JtdsDriverModifier jtdsDriverModifier = new JtdsDriverModifier(byteCodeInstrumentor, agent);
addModifier(jtdsDriverModifier);
AbstractModifier jdbc2ConnectionModifier = new Jdbc2ConnectionModifier(byteCodeInstrumentor, agent);
addModifier(jdbc2ConnectionModifier);
AbstractModifier jdbc4_1ConnectionModifier = new Jdbc4_1ConnectionModifier(byteCodeInstrumentor, agent);
addModifier(jdbc4_1ConnectionModifier);
AbstractModifier mssqlStatementModifier = new JtdsStatementModifier(byteCodeInstrumentor, agent);
addModifier(mssqlStatementModifier);
AbstractModifier mssqlPreparedStatementModifier = new JtdsPreparedStatementModifier(byteCodeInstrumentor, agent);
addModifier(mssqlPreparedStatementModifier);
AbstractModifier mssqlResultSetModifier = new JtdsResultSetModifier(byteCodeInstrumentor, agent);
addModifier(mssqlResultSetModifier);
}
private void addOracleDriver() {
AbstractModifier oracleDriverModifier = new OracleDriverModifier(byteCodeInstrumentor, agent);
addModifier(oracleDriverModifier);
// TODO Intercepting PhysicalConnection makes view ugly.
// We'd better intercept top-level classes T4C, T2C and OCI each to makes view more readable.
AbstractModifier oracleConnectionModifier = new PhysicalConnectionModifier(byteCodeInstrumentor, agent);
addModifier(oracleConnectionModifier);
AbstractModifier oraclePreparedStatementModifier = new OraclePreparedStatementModifier(byteCodeInstrumentor, agent);
addModifier(oraclePreparedStatementModifier);
AbstractModifier oracleStatementModifier = new OracleStatementModifier(byteCodeInstrumentor, agent);
addModifier(oracleStatementModifier);
//
// Modifier oracleResultSetModifier = new OracleResultSetModifier(byteCodeInstrumentor, agent);
// addModifier(oracleResultSetModifier);
}
private void addCubridDriver() {
// TODO Cubrid doesn't have connection impl too. Check it out.
addModifier(new CubridConnectionModifier(byteCodeInstrumentor, agent));
addModifier(new CubridDriverModifier(byteCodeInstrumentor, agent));
addModifier(new CubridStatementModifier(byteCodeInstrumentor, agent));
addModifier(new CubridPreparedStatementModifier(byteCodeInstrumentor, agent));
addModifier(new CubridResultSetModifier(byteCodeInstrumentor, agent));
// addModifier(new CubridUStatementModifier(byteCodeInstrumentor, agent));
}
private void addDbcpDriver() {
// TODO Cubrid doesn't have connection impl too. Check it out.
AbstractModifier dbcpBasicDataSourceModifier = new DBCPBasicDataSourceModifier(byteCodeInstrumentor, agent);
addModifier(dbcpBasicDataSourceModifier);
if (profilerConfig.isJdbcProfileDbcpConnectionClose()) {
AbstractModifier dbcpPoolModifier = new DBCPPoolGuardConnectionWrapperModifier(byteCodeInstrumentor, agent);
addModifier(dbcpPoolModifier);
}
}
/**
* Support ORM(iBatis, myBatis, etc.)
*/
public void addOrmModifier() {
addIBatisSupport();
addMyBatisSupport();
}
private void addIBatisSupport() {
if (profilerConfig.isIBatisEnabled()) {
addModifier(new SqlMapModifier(byteCodeInstrumentor, agent));
addModifier(new SqlMapClientTemplateModifier(byteCodeInstrumentor, agent));
}
}
private void addMyBatisSupport() {
if (profilerConfig.isMyBatisEnabled()) {
addModifier(new MyBatisModifier(byteCodeInstrumentor, agent));
}
}
public void addSpringBeansModifier() {
if (profilerConfig.isSpringBeansEnabled()) {
addModifier(AbstractAutowireCapableBeanFactoryModifier.of(byteCodeInstrumentor, agent.getProfilerConfig()));
}
}
public void addLog4jModifier() {
if (profilerConfig.isLog4jLoggingTransactionInfo()) {
addModifier(new LoggingEventOfLog4jModifier(byteCodeInstrumentor, agent));
// addModifier(new Nelo2AsyncAppenderModifier(byteCodeInstrumentor, agent));
// addModifier(new NeloAppenderModifier(byteCodeInstrumentor, agent));
}
}
public void addLogbackModifier() {
if (profilerConfig.isLogbackLoggingTransactionInfo()) {
addModifier(new LoggingEventOfLogbackModifier(byteCodeInstrumentor, agent));
}
}
}
| |
package org.zstack.network.l3;
import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.orm.jpa.JpaSystemException;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.MessageSafe;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.*;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.header.AbstractService;
import org.zstack.header.apimediator.ApiMessageInterceptionException;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.identity.AccountResourceRefInventory;
import org.zstack.header.identity.Quota;
import org.zstack.header.identity.Quota.QuotaOperator;
import org.zstack.header.identity.Quota.QuotaPair;
import org.zstack.header.identity.ReportQuotaExtensionPoint;
import org.zstack.header.identity.ResourceOwnerPreChangeExtensionPoint;
import org.zstack.header.managementnode.PrepareDbInitialValueExtensionPoint;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.NeedQuotaCheckMessage;
import org.zstack.header.network.l2.L2NetworkVO;
import org.zstack.header.network.l2.L2NetworkVO_;
import org.zstack.header.network.l3.*;
import org.zstack.header.network.l3.datatypes.IpCapacityData;
import org.zstack.header.vm.VmNicInventory;
import org.zstack.header.vm.VmNicVO;
import org.zstack.header.vm.VmNicVO_;
import org.zstack.header.zone.ZoneVO;
import org.zstack.identity.AccountManager;
import org.zstack.identity.QuotaUtil;
import org.zstack.network.service.MtuGetter;
import org.zstack.network.service.NetworkServiceSystemTag;
import org.zstack.tag.SystemTagCreator;
import org.zstack.tag.TagManager;
import org.zstack.utils.ObjectUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.network.IPv6Constants;
import org.zstack.utils.network.IPv6NetworkUtils;
import org.zstack.utils.network.NetworkUtils;
import javax.persistence.Tuple;
import javax.persistence.TypedQuery;
import java.math.BigInteger;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.stream.Collectors;
import static org.zstack.core.Platform.err;
import static org.zstack.utils.CollectionDSL.*;
public class L3NetworkManagerImpl extends AbstractService implements L3NetworkManager, ReportQuotaExtensionPoint,
ResourceOwnerPreChangeExtensionPoint, PrepareDbInitialValueExtensionPoint {
private static final CLogger logger = Utils.getLogger(L3NetworkManagerImpl.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private DbEntityLister dl;
@Autowired
private AccountManager acntMgr;
@Autowired
private ErrorFacade errf;
@Autowired
private TagManager tagMgr;
private Map<String, IpRangeFactory> ipRangeFactories = Collections.synchronizedMap(new HashMap<String, IpRangeFactory>());
private Map<String, L3NetworkFactory> l3NetworkFactories = Collections.synchronizedMap(new HashMap<String, L3NetworkFactory>());
private Map<String, IpAllocatorStrategy> ipAllocatorStrategies = Collections.synchronizedMap(new HashMap<String, IpAllocatorStrategy>());
private Set<String> notAccountMetaDatas = Collections.synchronizedSet(new HashSet<>());
private static final Set<Class> allowedMessageAfterSoftDeletion = new HashSet<Class>();
static {
allowedMessageAfterSoftDeletion.add(L3NetworkDeletionMsg.class);
}
@Override
@MessageSafe
public void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof L3NetworkMessage) {
passThrough((L3NetworkMessage) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateL3NetworkMsg) {
handle((APICreateL3NetworkMsg) msg);
} else if (msg instanceof APISetL3NetworkMtuMsg) {
handle((APISetL3NetworkMtuMsg) msg);
} else if (msg instanceof APIGetL3NetworkMtuMsg) {
handle((APIGetL3NetworkMtuMsg) msg);
} else if (msg instanceof L3NetworkMessage) {
passThrough((L3NetworkMessage) msg);
} else if (msg instanceof APIGetL3NetworkTypesMsg) {
handle((APIGetL3NetworkTypesMsg) msg);
} else if (msg instanceof APIGetIpAddressCapacityMsg) {
handle((APIGetIpAddressCapacityMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(final APISetL3NetworkMtuMsg msg) {
final APISetL3NetworkMtuEvent evt = new APISetL3NetworkMtuEvent(msg.getId());
NetworkServiceSystemTag.L3_MTU.delete(msg.getL3NetworkUuid());
SystemTagCreator creator = NetworkServiceSystemTag.L3_MTU.newSystemTagCreator(msg.getL3NetworkUuid());
creator.ignoreIfExisting = true;
creator.inherent = false;
creator.setTagByTokens(
map(
e(NetworkServiceSystemTag.MTU_TOKEN, msg.getMtu()),
e(NetworkServiceSystemTag.L3_UUID_TOKEN, msg.getL3NetworkUuid())
)
);
creator.create();
bus.publish(evt);
}
private void handle(final APIGetL3NetworkMtuMsg msg) {
APIGetL3NetworkMtuReply reply = new APIGetL3NetworkMtuReply();
reply.setMtu(new MtuGetter().getMtu(msg.getL3NetworkUuid()));
bus.reply(msg, reply);
}
private void handle(final APIGetIpAddressCapacityMsg msg) {
APIGetIpAddressCapacityReply reply = new APIGetIpAddressCapacityReply();
class IpCapacity {
Map<String, IpCapacity> elements;
long total;
long avail;
long ipv4TotalCapacity;
long ipv4AvailableCapacity;
long ipv4UsedIpAddressNumber;
long ipv6TotalCapacity;
long ipv6AvailableCapacity;
long ipv6UsedIpAddressNumber;
long used = 0L;
}
IpCapacity ret = new Callable<IpCapacity>() {
private long calcTotalIp(List<Tuple> ts) {
long total = 0;
for (Tuple t : ts) {
String sip = t.get(0, String.class);
String eip = t.get(1, String.class);
int ipVersion = t.get(2, Integer.class);
if (ipVersion == IPv6Constants.IPv4) {
total = total + NetworkUtils.getTotalIpInRange(sip, eip);
} else {
total += IPv6NetworkUtils.getIpv6RangeSize(sip, eip);
if (total > Integer.MAX_VALUE) {
total = Integer.MAX_VALUE;
}
}
}
return total;
}
private void calcElementTotalIp(List<Tuple> tuples, IpCapacity capacity) {
if (capacity.elements == null) {
capacity.elements = new HashMap<>();
}
Map<String, IpCapacity> elements = capacity.elements;
long total = 0;
long ipv4TotalCapacity = 0;
long ipv6TotalCapacity = 0;
for (Tuple tuple : tuples) {
String sip = tuple.get(0, String.class);
String eip = tuple.get(1, String.class);
int ipVersion = tuple.get(2, Integer.class);
String elementUuid = tuple.get(3, String.class);
IpCapacity element = elements.getOrDefault(elementUuid, new IpCapacity());
elements.put(elementUuid, element);
if (ipVersion == IPv6Constants.IPv4) {
int t = NetworkUtils.getTotalIpInRange(sip, eip);
element.total += t;
element.total = Math.min(element.total, Integer.MAX_VALUE);
element.avail = element.total;
element.ipv4TotalCapacity += t;
element.ipv4TotalCapacity = Math.min(element.ipv4TotalCapacity, Integer.MAX_VALUE);
element.ipv4AvailableCapacity = element.ipv4TotalCapacity;
ipv4TotalCapacity += t;
total += t;
ipv4TotalCapacity = Math.min(ipv4TotalCapacity, (long)Integer.MAX_VALUE);
total = Math.min(total, (long)Integer.MAX_VALUE);
} else {
long t = IPv6NetworkUtils.getIpv6RangeSize(sip, eip);
element.total += t;
element.total = Math.min(element.total, Integer.MAX_VALUE);
element.avail = element.total;
element.ipv6TotalCapacity += t;
element.ipv6TotalCapacity = Math.min(element.ipv6TotalCapacity, Integer.MAX_VALUE);
element.ipv6AvailableCapacity = element.ipv6TotalCapacity;
ipv6TotalCapacity += t;
total += t;
ipv6TotalCapacity = Math.min(ipv6TotalCapacity, (long)Integer.MAX_VALUE);
total = Math.min(total, (long)Integer.MAX_VALUE);
}
}
capacity.ipv4TotalCapacity = ipv4TotalCapacity;
capacity.ipv4AvailableCapacity = ipv4TotalCapacity;
capacity.ipv6AvailableCapacity = ipv6TotalCapacity;
capacity.ipv6TotalCapacity = ipv6TotalCapacity;
capacity.total = total;
capacity.avail = total;
}
private void calcElementUsedIp(List<Tuple> tuples, IpCapacity capacity) {
if (capacity == null) {
return;
}
if (capacity.elements == null) {
capacity.elements = new HashMap<>();
}
Map<String, IpCapacity> elements = capacity.elements;
long total = 0;
long ipv4UsedIpAddressNumber = 0;
long ipv6UsedIpAddressNumber = 0;
for (Tuple tuple : tuples) {
long used = tuple.get(0, Long.class);
String elementUuid = tuple.get(1, String.class);
int ipVersion = tuple.get(2, Integer.class);
IpCapacity element = elements.getOrDefault(elementUuid, new IpCapacity());
elements.put(elementUuid, element);
if (ipVersion == IPv6Constants.IPv4) {
element.ipv4UsedIpAddressNumber += used;
element.ipv4AvailableCapacity -= used;
ipv4UsedIpAddressNumber += used;
ipv4UsedIpAddressNumber = Math.min(ipv4UsedIpAddressNumber, Integer.MAX_VALUE);
} else {
element.ipv6UsedIpAddressNumber += used;
element.ipv6AvailableCapacity -= used;
ipv6UsedIpAddressNumber += used;
ipv6UsedIpAddressNumber = Math.min(ipv6UsedIpAddressNumber, Integer.MAX_VALUE);
}
element.used += used;
element.avail -= used;
total += used;
total = Math.min(total, Integer.MAX_VALUE);
}
capacity.ipv4AvailableCapacity -= ipv4UsedIpAddressNumber;
capacity.ipv4UsedIpAddressNumber = ipv4UsedIpAddressNumber;
capacity.ipv6AvailableCapacity -= ipv6UsedIpAddressNumber;
capacity.ipv6UsedIpAddressNumber = ipv6UsedIpAddressNumber;
capacity.used = total;
capacity.avail -= total;
}
@Override
@Transactional(readOnly = true)
public IpCapacity call() {
IpCapacity ret = new IpCapacity();
if (notAccountMetaDatas.isEmpty()) {
notAccountMetaDatas.add(""); // Avoid NULL
}
if (msg.getIpRangeUuids() != null && !msg.getIpRangeUuids().isEmpty()) {
reply.setResourceType(IpRangeVO.class.getSimpleName());
String sql = "select ipr.startIp, ipr.endIp, ipr.ipVersion, ipr.uuid from IpRangeVO ipr where ipr.uuid in (:uuids)";
TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class);
q.setParameter("uuids", msg.getIpRangeUuids());
List<Tuple> ts = q.getResultList();
calcElementTotalIp(ts, ret);
sql = "select count(distinct uip.ip), uip.ipRangeUuid, uip.ipVersion from UsedIpVO uip where uip.ipRangeUuid in (:uuids) and (uip.metaData not in (:notAccountMetaData) or uip.metaData IS NULL) group by uip.ipRangeUuid, uip.ipVersion";
TypedQuery<Tuple> cq = dbf.getEntityManager().createQuery(sql, Tuple.class);
cq.setParameter("uuids", msg.getIpRangeUuids());
cq.setParameter("notAccountMetaData", notAccountMetaDatas);
List<Tuple> uts = cq.getResultList();
calcElementUsedIp(uts, ret);
return ret;
} else if (msg.getL3NetworkUuids() != null && !msg.getL3NetworkUuids().isEmpty()) {
reply.setResourceType(L3NetworkVO.class.getSimpleName());
String sql = "select ipr.startIp, ipr.endIp, ipr.ipVersion, l3.uuid from IpRangeVO ipr, L3NetworkVO l3 where ipr.l3NetworkUuid = l3.uuid and l3.uuid in (:uuids)";
TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class);
q.setParameter("uuids", msg.getL3NetworkUuids());
List<Tuple> ts = q.getResultList();
calcElementTotalIp(ts, ret);
sql = "select count(distinct uip.ip), uip.l3NetworkUuid, uip.ipVersion from UsedIpVO uip where uip.l3NetworkUuid in (:uuids) and (uip.metaData not in (:notAccountMetaData) or uip.metaData IS NULL) group by uip.l3NetworkUuid, uip.ipVersion";
TypedQuery<Tuple> cq = dbf.getEntityManager().createQuery(sql, Tuple.class);
cq.setParameter("uuids", msg.getL3NetworkUuids());
cq.setParameter("notAccountMetaData", notAccountMetaDatas);
List<Tuple> uts = cq.getResultList();
calcElementUsedIp(uts, ret);
return ret;
} else if (msg.getZoneUuids() != null && !msg.getZoneUuids().isEmpty()) {
reply.setResourceType(ZoneVO.class.getSimpleName());
String sql = "select ipr.startIp, ipr.endIp, ipr.ipVersion, zone.uuid from IpRangeVO ipr, L3NetworkVO l3, ZoneVO zone where ipr.l3NetworkUuid = l3.uuid and l3.zoneUuid = zone.uuid and zone.uuid in (:uuids)";
TypedQuery<Tuple> q = dbf.getEntityManager().createQuery(sql, Tuple.class);
q.setParameter("uuids", msg.getZoneUuids());
List<Tuple> ts = q.getResultList();
calcElementTotalIp(ts, ret);
sql = "select count(distinct uip.ip), zone.uuid, uip.ipVersion from UsedIpVO uip, L3NetworkVO l3, ZoneVO zone where uip.l3NetworkUuid = l3.uuid and l3.zoneUuid = zone.uuid and zone.uuid in (:uuids) and (uip.metaData not in (:notAccountMetaData) or uip.metaData IS NULL) group by zone.uuid, uip.ipVersion";
TypedQuery<Tuple> cq = dbf.getEntityManager().createQuery(sql, Tuple.class);
cq.setParameter("uuids", msg.getZoneUuids());
cq.setParameter("notAccountMetaData", notAccountMetaDatas);
List<Tuple> uts = cq.getResultList();
calcElementUsedIp(uts, ret);
return ret;
}
throw new CloudRuntimeException("should not be here");
}
}.call();
if (ret.elements != null) {
List<IpCapacityData> capacityData = new ArrayList<>();
ret.elements.forEach((uuid, element) -> {
IpCapacityData data = new IpCapacityData();
capacityData.add(data);
data.setResourceUuid(uuid);
data.setTotalCapacity(element.total);
data.setAvailableCapacity(element.avail);
data.setUsedIpAddressNumber(element.used);
data.setIpv4TotalCapacity(element.ipv4TotalCapacity);
data.setIpv4AvailableCapacity(element.ipv4AvailableCapacity);
data.setIpv4UsedIpAddressNumber(element.ipv4UsedIpAddressNumber);
data.setIpv6TotalCapacity(element.ipv6TotalCapacity);
data.setIpv6AvailableCapacity(element.ipv6AvailableCapacity);
data.setIpv6UsedIpAddressNumber(element.ipv6UsedIpAddressNumber);
});
reply.setCapacityData(capacityData);
}
reply.setIpv4TotalCapacity(ret.ipv4TotalCapacity);
reply.setIpv4UsedIpAddressNumber(ret.ipv4UsedIpAddressNumber);
reply.setIpv4AvailableCapacity(ret.ipv4AvailableCapacity);
reply.setIpv6TotalCapacity(ret.ipv6TotalCapacity);
reply.setIpv6UsedIpAddressNumber(ret.ipv6UsedIpAddressNumber);
reply.setIpv6AvailableCapacity(ret.ipv6AvailableCapacity);
reply.setTotalCapacity(ret.total);
reply.setAvailableCapacity(ret.avail);
reply.setUsedIpAddressNumber(ret.used);
bus.reply(msg, reply);
}
private void handle(APIGetL3NetworkTypesMsg msg) {
APIGetL3NetworkTypesReply reply = new APIGetL3NetworkTypesReply();
List<String> lst = new ArrayList<String>(L3NetworkType.getAllTypeNames());
reply.setL3NetworkTypes(lst);
bus.reply(msg, reply);
}
private void passThrough(String l3NetworkUuid, Message msg) {
L3NetworkVO vo = dbf.findByUuid(l3NetworkUuid, L3NetworkVO.class);
if (vo == null && allowedMessageAfterSoftDeletion.contains(msg.getClass())) {
L3NetworkEO eo = dbf.findByUuid(l3NetworkUuid, L3NetworkEO.class);
vo = ObjectUtils.newAndCopy(eo, L3NetworkVO.class);
}
if (vo == null) {
ErrorCode err = err(SysErrors.RESOURCE_NOT_FOUND,
"Unable to find L3Network[uuid:%s], it may have been deleted", l3NetworkUuid);
bus.replyErrorByMessageType(msg, err);
return;
}
L3NetworkFactory factory = getL3NetworkFactory(L3NetworkType.valueOf(vo.getType()));
L3Network nw = factory.getL3Network(vo);
nw.handleMessage(msg);
}
private void passThrough(L3NetworkMessage msg) {
passThrough(msg.getL3NetworkUuid(), (Message) msg);
}
private void handle(APICreateL3NetworkMsg msg) {
SimpleQuery<L2NetworkVO> query = dbf.createQuery(L2NetworkVO.class);
query.select(L2NetworkVO_.zoneUuid);
query.add(L2NetworkVO_.uuid, Op.EQ, msg.getL2NetworkUuid());
String zoneUuid = query.findValue();
assert zoneUuid != null;
L3NetworkVO vo = new L3NetworkVO();
if (msg.getResourceUuid() != null) {
vo.setUuid(msg.getResourceUuid());
} else {
vo.setUuid(Platform.getUuid());
}
vo.setDescription(msg.getDescription());
vo.setDnsDomain(msg.getDnsDomain());
vo.setL2NetworkUuid(msg.getL2NetworkUuid());
vo.setName(msg.getName());
vo.setSystem(msg.isSystem());
vo.setZoneUuid(zoneUuid);
vo.setState(L3NetworkState.Enabled);
vo.setCategory(L3NetworkCategory.valueOf(msg.getCategory()));
if (msg.getIpVersion() != null) {
vo.setIpVersion(msg.getIpVersion());
} else {
vo.setIpVersion(IPv6Constants.IPv4);
}
L3NetworkFactory factory = getL3NetworkFactory(L3NetworkType.valueOf(msg.getType()));
L3NetworkInventory inv = new SQLBatchWithReturn<L3NetworkInventory>() {
@Override
protected L3NetworkInventory scripts() {
vo.setAccountUuid(msg.getSession().getAccountUuid());
L3NetworkInventory inv = factory.createL3Network(vo, msg);
tagMgr.createTagsFromAPICreateMessage(msg, vo.getUuid(), L3NetworkVO.class.getSimpleName());
return inv;
}
}.execute();
APICreateL3NetworkEvent evt = new APICreateL3NetworkEvent(msg.getId());
evt.setInventory(inv);
logger.debug(String.format("Successfully created L3Network[name:%s, uuid:%s]", inv.getName(), inv.getUuid()));
bus.publish(evt);
}
@Override
public String getId() {
return bus.makeLocalServiceId(L3NetworkConstant.SERVICE_ID);
}
@Override
public boolean start() {
populateExtensions();
return true;
}
@Override
public boolean applyNetworkServiceWhenVmStateChange(String type) {
L3NetworkFactory factory = l3NetworkFactories.get(type);
if (factory == null) {
throw new CloudRuntimeException(String.format("Cannot find L3NetworkFactory for type(%s)", type));
}
return factory.applyNetworkServiceWhenVmStateChange();
}
private void populateExtensions() {
for (L3NetworkFactory f : pluginRgty.getExtensionList(L3NetworkFactory.class)) {
L3NetworkFactory old = l3NetworkFactories.get(f.getType().toString());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate L3NetworkFactory[%s, %s] for type[%s]", f.getClass().getName(),
old.getClass().getName(), f.getType()));
}
l3NetworkFactories.put(f.getType().toString(), f);
}
for (IpAllocatorStrategy f : pluginRgty.getExtensionList(IpAllocatorStrategy.class)) {
IpAllocatorStrategy old = ipAllocatorStrategies.get(f.getType().toString());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate IpAllocatorStrategy[%s, %s] for type[%s]", f.getClass().getName(),
old.getClass().getName(), f.getType()));
}
ipAllocatorStrategies.put(f.getType().toString(), f);
}
for (UsedIpNotAccountMetaDataExtensionPoint f : pluginRgty.getExtensionList(UsedIpNotAccountMetaDataExtensionPoint.class)) {
notAccountMetaDatas.add(f.usedIpNotAccountMetaData());
}
for (IpRangeFactory f : pluginRgty.getExtensionList(IpRangeFactory.class)) {
IpRangeFactory old = ipRangeFactories.get(f.getType().toString());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate L2NetworkFactory[%s, %s] for type[%s]",
f.getClass().getName(), old.getClass().getName(), f.getType()));
}
ipRangeFactories.put(f.getType().toString(), f);
}
}
@Override
public boolean stop() {
return true;
}
@Override
public IpRangeFactory getIpRangeFactory(IpRangeType type) {
IpRangeFactory factory = ipRangeFactories.get(type.toString());
if (factory == null) {
throw new CloudRuntimeException(String.format("Cannot find IpRangeFactory for type(%s)", type));
}
return factory;
}
@Override
public L3NetworkFactory getL3NetworkFactory(L3NetworkType type) {
L3NetworkFactory factory = l3NetworkFactories.get(type.toString());
if (factory == null) {
throw new CloudRuntimeException(String.format("Cannot find L3NetworkFactory for type(%s)", type));
}
return factory;
}
@Override
public IpAllocatorStrategy getIpAllocatorStrategy(IpAllocatorType type) {
IpAllocatorStrategy factory = ipAllocatorStrategies.get(type.toString());
if (factory == null) {
throw new CloudRuntimeException(String.format("Cannot find IpAllocatorStrategy for type(%s)", type));
}
return factory;
}
private UsedIpInventory reserveIpv6(IpRangeVO ipRange, String ip, boolean allowDuplicatedAddress) {
try {
UsedIpVO vo = new UsedIpVO();
//vo.setIpInLong(NetworkUtils.ipv4StringToLong(ip));
String uuid;
if (allowDuplicatedAddress) {
uuid = Platform.getUuid();
} else {
uuid = ipRange.getUuid() + ip;
}
uuid = UUID.nameUUIDFromBytes(uuid.getBytes()).toString().replaceAll("-", "");
vo.setUuid(uuid);
vo.setIpRangeUuid(ipRange.getUuid());
vo.setIp(IPv6NetworkUtils.getIpv6AddressCanonicalString(ip));
vo.setL3NetworkUuid(ipRange.getL3NetworkUuid());
vo.setNetmask(ipRange.getNetmask());
vo.setGateway(ipRange.getGateway());
vo.setIpVersion(IPv6Constants.IPv6);
vo = dbf.persistAndRefresh(vo);
return UsedIpInventory.valueOf(vo);
} catch (JpaSystemException e) {
if (e.getRootCause() instanceof MySQLIntegrityConstraintViolationException) {
logger.debug(String.format("Concurrent ip allocation. " +
"Ip[%s] in ip range[uuid:%s] has been allocated, try allocating another one. " +
"The error[Duplicate entry] printed by jdbc.spi.SqlExceptionHelper is no harm, " +
"we will try finding another ip", ip, ipRange.getUuid()));
logger.trace("", e);
} else {
throw e;
}
return null;
}
}
private UsedIpInventory reserveIpv4(IpRangeVO ipRange, String ip, boolean allowDuplicatedAddress) {
try {
UsedIpVO vo = new UsedIpVO(ipRange.getUuid(), ip);
vo.setIpInLong(NetworkUtils.ipv4StringToLong(ip));
String uuid;
if (allowDuplicatedAddress) {
uuid = Platform.getUuid();
} else {
uuid = ipRange.getUuid() + ip;
uuid = UUID.nameUUIDFromBytes(uuid.getBytes()).toString().replaceAll("-", "");
}
vo.setUuid(uuid);
vo.setL3NetworkUuid(ipRange.getL3NetworkUuid());
vo.setNetmask(ipRange.getNetmask());
vo.setGateway(ipRange.getGateway());
vo.setIpVersion(IPv6Constants.IPv4);
vo = dbf.persistAndRefresh(vo);
return UsedIpInventory.valueOf(vo);
} catch (JpaSystemException e) {
if (e.getRootCause() instanceof MySQLIntegrityConstraintViolationException) {
logger.debug(String.format("Concurrent ip allocation. " +
"Ip[%s] in ip range[uuid:%s] has been allocated, try allocating another one. " +
"The error[Duplicate entry] printed by jdbc.spi.SqlExceptionHelper is no harm, " +
"we will try finding another ip", ip, ipRange.getUuid()));
logger.trace("", e);
} else {
throw e;
}
return null;
}
}
@Override
public UsedIpInventory reserveIp(IpRangeVO ipRange, String ip) {
return reserveIp(ipRange, ip, false);
}
@Override
public UsedIpInventory reserveIp(IpRangeVO ipRange, String ip, boolean allowDuplicatedAddress) {
if (NetworkUtils.isIpv4Address(ip)) {
return reserveIpv4(ipRange, ip, allowDuplicatedAddress);
} else if (IPv6NetworkUtils.isIpv6Address(ip)) {
return reserveIpv6(ipRange, ip, allowDuplicatedAddress);
} else {
return null;
}
}
@Override
public boolean isIpRangeFull(IpRangeVO vo) {
SimpleQuery<UsedIpVO> query = dbf.createQuery(UsedIpVO.class);
query.add(UsedIpVO_.ipRangeUuid, Op.EQ, vo.getUuid());
query.select(UsedIpVO_.ip);
List<Long> used = query.listValue();
used = used.stream().distinct().collect(Collectors.toList());
if (vo.getIpVersion() == IPv6Constants.IPv4) {
int total = NetworkUtils.getTotalIpInRange(vo.getStartIp(), vo.getEndIp());
return used.size() >= total;
} else {
return IPv6NetworkUtils.isIpv6RangeFull(vo.getStartIp(), vo.getEndIp(), used.size());
}
}
@Override
public List<BigInteger> getUsedIpInRange(IpRangeVO vo) {
if (vo.getIpVersion() == IPv6Constants.IPv4) {
SimpleQuery<UsedIpVO> query = dbf.createQuery(UsedIpVO.class);
query.select(UsedIpVO_.ipInLong);
query.add(UsedIpVO_.ipRangeUuid, Op.EQ, vo.getUuid());
List<Long> used = query.listValue();
Collections.sort(used);
return used.stream().distinct().map(l -> new BigInteger(String.valueOf(l))).collect(Collectors.toList());
} else {
SimpleQuery<UsedIpVO> query = dbf.createQuery(UsedIpVO.class);
query.select(UsedIpVO_.ip);
query.add(UsedIpVO_.ipRangeUuid, Op.EQ, vo.getUuid());
List<String> used = query.listValue();
return used.stream().distinct().map(IPv6NetworkUtils::getBigIntegerFromString).sorted().collect(Collectors.toList());
}
}
@Override
public void updateIpAllocationMsg(AllocateIpMsg msg, String mac) {
if (msg.getRequiredIp() != null) {
return;
}
List<NormalIpRangeVO> iprs = Q.New(NormalIpRangeVO.class).eq(NormalIpRangeVO_.ipVersion, IPv6Constants.IPv6)
.eq(NormalIpRangeVO_.l3NetworkUuid, msg.getL3NetworkUuid()).list();
if (iprs.isEmpty()) {
return;
}
if (!iprs.get(0).getAddressMode().equals(IPv6Constants.Stateful_DHCP)) {
msg.setRequiredIp(IPv6NetworkUtils.getIPv6AddresFromMac(iprs.get(0).getNetworkCidr(), mac));
msg.setIpVersion(IPv6Constants.IPv6);
}
}
@Override
public List<Quota> reportQuota() {
QuotaOperator checker = new QuotaOperator() {
@Override
public void checkQuota(APIMessage msg, Map<String, QuotaPair> pairs) {
if (!new QuotaUtil().isAdminAccount(msg.getSession().getAccountUuid())) {
if (msg instanceof APICreateL3NetworkMsg) {
check((APICreateL3NetworkMsg) msg, pairs);
}
}
}
@Override
public void checkQuota(NeedQuotaCheckMessage msg, Map<String, QuotaPair> pairs) {
}
@Override
public List<Quota.QuotaUsage> getQuotaUsageByAccount(String accountUuid) {
Quota.QuotaUsage usage = new Quota.QuotaUsage();
usage.setName(L3NetworkQuotaConstant.L3_NUM);
usage.setUsed(getUsedL3(accountUuid));
return list(usage);
}
@Transactional(readOnly = true)
private long getUsedL3(String accountUuid) {
String sql = "select count(l3) from L3NetworkVO l3, AccountResourceRefVO ref where l3.uuid = ref.resourceUuid and " +
"ref.accountUuid = :auuid and ref.resourceType = :rtype";
TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class);
q.setParameter("auuid", accountUuid);
q.setParameter("rtype", L3NetworkVO.class.getSimpleName());
Long l3n = q.getSingleResult();
l3n = l3n == null ? 0 : l3n;
return l3n;
}
private void check(APICreateL3NetworkMsg msg, Map<String, QuotaPair> pairs) {
long l3Num = pairs.get(L3NetworkQuotaConstant.L3_NUM).getValue();
long l3n = getUsedL3(msg.getSession().getAccountUuid());
if (l3n + 1 > l3Num) {
throw new ApiMessageInterceptionException(new QuotaUtil().buildQuataExceedError(
msg.getSession().getAccountUuid(), L3NetworkQuotaConstant.L3_NUM, l3Num));
}
}
};
Quota quota = new Quota();
quota.setOperator(checker);
quota.addMessageNeedValidation(APICreateL3NetworkMsg.class);
QuotaPair p = new QuotaPair();
p.setName(L3NetworkQuotaConstant.L3_NUM);
p.setValue(L3NetworkQuotaGlobalConfig.L3_NUM.defaultValue(Long.class));
quota.addPair(p);
return list(quota);
}
@Override
@Transactional(readOnly = true)
public void resourceOwnerPreChange(AccountResourceRefInventory ref, String newOwnerUuid) {
}
@Override
public void prepareDbInitialValue() {
List<IpRangeVO> ipRangeVOS = Q.New(IpRangeVO.class).isNull(IpRangeVO_.prefixLen).list();
for (IpRangeVO ipr : ipRangeVOS) {
ipr.setPrefixLen(NetworkUtils.getPrefixLengthFromNetwork(ipr.getNetmask()));
}
if (!ipRangeVOS.isEmpty()) {
dbf.updateCollection(ipRangeVOS);
}
List<VmNicVO> nics = Q.New(VmNicVO.class).notNull(VmNicVO_.usedIpUuid).list();
List<UsedIpVO> ips = new ArrayList<>();
for (VmNicVO nic : nics) {
UsedIpVO ip = Q.New(UsedIpVO.class).eq(UsedIpVO_.uuid, nic.getUsedIpUuid()).isNull(UsedIpVO_.vmNicUuid).find();
if (ip != null) {
ip.setVmNicUuid(nic.getUuid());
ips.add(ip);
}
}
if (!ips.isEmpty()) {
dbf.updateCollection(ips);
}
}
@Override
public List<VmNicInventory> filterVmNicByIpVersion(List<VmNicInventory> vmNics, int ipVersion) {
List<VmNicInventory> ret = new ArrayList<>();
for (VmNicInventory nic : vmNics) {
if (ipVersion == IPv6Constants.IPv4) {
if (!nic.isIpv6OnlyNic()) {
ret.add(nic);
}
} else if (ipVersion == IPv6Constants.IPv6) {
if (!nic.isIpv4OnlyNic()) {
ret.add(nic);
}
}
}
return ret;
}
}
| |
package Gamess.gamessGUI.Dialogs;
import java.awt.Color;
import java.awt.Component;
import java.util.ArrayList;
import java.util.EventObject;
import javax.swing.BorderFactory;
import javax.swing.JComboBox;
import javax.swing.JTable;
import javax.swing.border.Border;
import javax.swing.event.CellEditorListener;
import javax.swing.event.ChangeEvent;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import Gamess.gamessGUI.Cosmetics;
import Gamess.gamessGUI.Dictionary;
import Gamess.gamessGUI.IncompatibilityPackage.ExcludeIncompatibility;
import Gamess.gamessGUI.IncompatibilityPackage.RequiresIncompatibility;
import Gamess.gamessGUI.Storage.Repository;
public class MenuTableComboBox extends JComboBox implements TableCellRenderer,TableCellEditor,IMenuTableCellTester
{
private static final long serialVersionUID = 1L;
private String Group = null;
private String Keyword = null;
private ArrayList<String> OriginalValues = new ArrayList<String>();
private String DEFAULT = "<default>";
private String DefaultValue = null;
private String DefaultToolTip = "<b>DefaultValue :</b> ";
private ArrayList<CellEditorListener> cellListeners = new ArrayList<CellEditorListener>();
private boolean isConsistent = true;
private boolean isRecomputingConsistency = false;
private Border consistentBorder = getBorder();
private Border inConsistentBorder = BorderFactory.createLineBorder(Color.RED);
ArrayList<IMenuTableCellTester> cellListenerList = null;
private JComboBox rendererInstance = new JComboBox();
//default constructor
public MenuTableComboBox()
{
super();
}
//default constructor
public MenuTableComboBox(Object[] items)
{
super(items);
}
//Constructor
public MenuTableComboBox(String Group, Node currentNode, ArrayList<IMenuTableCellTester> cellListenerList)
{
super();
NamedNodeMap currentAttributes = currentNode.getAttributes();
//Check if it is combobox node
if(currentAttributes.getNamedItem("MenuType") == null || !currentAttributes.getNamedItem("MenuType").getNodeValue().equalsIgnoreCase("COMBO"))
{
//Error : Not a combobox node
return;
}
this.cellListenerList = cellListenerList;
if(currentAttributes.getNamedItem("Group") != null)
Group = currentAttributes.getNamedItem("Group").getNodeValue();
//Group
this.Group = Group;
//Keyword
if(currentAttributes.getNamedItem("DisplayName") != null)
Keyword = currentAttributes.getNamedItem("DisplayName").getNodeValue();
String userToolTip = "";
//ToolTip
if(currentAttributes.getNamedItem("ToolTip") != null)
userToolTip = currentAttributes.getNamedItem("ToolTip").getNodeValue();
//Values
NodeList valueList = currentNode.getChildNodes();
for(int i = 0 ; valueList.getLength() > i ; i++)
{
Node valueNode;
//if it is a text node skipit;
if((valueNode = valueList.item(i)).getNodeType() == Node.TEXT_NODE)
continue;
String _Value = valueNode.getTextContent().trim();
currentAttributes = valueNode.getAttributes();
//Check if this value is the default value
if(currentAttributes.getNamedItem("isDefault") != null && currentAttributes.getNamedItem("isDefault").getNodeValue().equalsIgnoreCase("True"))
DefaultValue = _Value;
//Add the value to the list
OriginalValues.add(_Value);
//////////////////////////////////////////////////////////////////////
// Register the entity with the Organized document //
// This is used for the ContentAssist
Dictionary.Register(Group, Keyword, _Value);
// //
//////////////////////////////////////////////////////////////////////
}
//Add items
addItem(DEFAULT);
for (int i = 0; i < OriginalValues.size(); i++)
addItem(OriginalValues.get(i));
if(DefaultValue != null)
{
setSelectedItem(DEFAULT);
DefaultToolTip += DefaultValue;
}
else
{
setSelectedIndex(-1);
DefaultToolTip += "There is no default value set";
}
if(userToolTip.length() != 0)
DefaultToolTip = userToolTip + "<br/>" + DefaultToolTip;
setToolTip(DefaultToolTip);
}
public void resetValues()
{
isRecomputingConsistency = true;
String valueInDB = Repository.getInstance().Retrieve(Group , Keyword);
//if valueInDB is null then there is no value in the document.
if(valueInDB == null)
{
//reset it to the default value
if(DefaultValue == null)
setSelectedIndex(-1);
else
setSelectedItem(DefaultValue);
isRecomputingConsistency = false;
return;
}
//else set the value in the DB to be the current selected
setSelectedItem(valueInDB);
isRecomputingConsistency = false;
}
public Component getTableCellRendererComponent(JTable arg0, Object arg1, boolean arg2, boolean arg3, int arg4, int arg5) {
rendererInstance.removeAllItems();
rendererInstance.addItem((getSelectedItem() == null)?null:getSelectedItem());
//if(isConsistent == false)
//rendererInstance.setBorder(inConsistentBorder);
//else
//rendererInstance.setBorder(consistentBorder);
//rendererInstance.setEnabled(super.isEnabled());
return rendererInstance;
//return new MenuTableComboBox(new String[]{(this.getSelectedItem() == null)?null:this.getSelectedItem().toString()});
}
public Component getTableCellEditorComponent(JTable arg0, Object arg1, boolean arg2, int arg3, int arg4) {
return this;
}
public void addCellEditorListener(CellEditorListener listener)
{
cellListeners.add(listener);
}
public void cancelCellEditing() {
for (int i = 0; i < cellListeners.size(); i++)
{
cellListeners.get(i).editingCanceled(new ChangeEvent(this));
}
}
public Object getCellEditorValue()
{
if(getSelectedItem() == null)
return null;
String CellValue = null;
//if the selected value is "<default>" return the default value
if(DEFAULT.equalsIgnoreCase(getSelectedItem().toString()))
CellValue = DefaultValue;
else
CellValue = getSelectedItem().toString();
Repository.getInstance().Store(Group, Keyword + "=" + CellValue);
return CellValue;
}
public boolean isCellEditable(EventObject arg0)
{
//if the combobox is empty then this is not editable
if(super.getItemCount() == 0)
return false;
return true;
}
public void removeCellEditorListener(CellEditorListener listener)
{
cellListeners.remove(listener);
}
public boolean shouldSelectCell(EventObject arg0)
{
return true;
}
public boolean stopCellEditing()
{
for (int i = 0; i < cellListeners.size(); i++)
{
cellListeners.get(i).editingStopped(new ChangeEvent(this));
}
return true;
}
public void recomputeConsistency() {
isRecomputingConsistency = true;
//Check if this group/keyword is excluded
if(ExcludeIncompatibility.getInstance().isLikelyToBecomeIncompatible(Group) || ExcludeIncompatibility.getInstance().isLikelyToBecomeIncompatible(Group + " " + Keyword))
{
setComboBoxEnabled(false);
setToolTip("This is disabled because it is likely to be excluded if selected.");
isRecomputingConsistency = false;
return;
}
setComboBoxEnabled(true);
setToolTip(DefaultToolTip);
String currentValue = (String)getSelectedItem();
if(DEFAULT.equalsIgnoreCase(currentValue))
currentValue = DefaultValue;
//Remove all the items
removeAllItems();
addItem(DEFAULT);
//check all the values in the original list
for (int i = 0; i < OriginalValues.size(); i++)
{
String Value = OriginalValues.get(i);
//Check if this value is required
//if required and if is not the current selected value then set the inconsistency to true
if(RequiresIncompatibility.getInstance().isLikelyToBecomeIncompatible(Group + " " + Keyword + " " + Value) && !Value.equals(currentValue))
{
setConsistency(false);
setToolTip(Value + " is currently required. Please select it");
}
//Check if this value is excluded
//if excluded then remove it from the combobox else add it
//This makes the the combobox consistent
boolean isIncompatible = ExcludeIncompatibility.getInstance().isLikelyToBecomeIncompatible(Group + " " + Keyword + " " + Value);
//the value is incompatible
//check if the item is the current selected value
if(isIncompatible == true && Value.equals(currentValue))
{
//The item is inconsistent
addItem(Value);
setConsistency(false);
setToolTip("Current selected value is inconsistent. Please select another value");
}
if(!isIncompatible)
addItem(Value);
/*if(isIncompatible)
{
if(Value.equals(currentValue))
{
//it is the current selected value
isConsistent = false;
continue;
}
for (int j = 0; j < getItemCount(); j++)
{
if(getItemAt(j).equals(Value))
{
removeItemAt(j);
j--;
}
}
}
else
{
//This value is not incompatible.
//Check if it is available in the list. if not add it.
boolean isItemAvail = false;
for (int j = 0; j < getItemCount(); j++)
{
if(getItemAt(j).equals(Value))
isItemAvail = true;
}
// find the position to insert
if(!isItemAvail)
addItem(Value);
}*/
}
if(currentValue == null)
setSelectedIndex(-1);
else
{
if(currentValue.equalsIgnoreCase(DefaultValue))
setSelectedItem(DEFAULT);
else
setSelectedItem(currentValue);
}
isRecomputingConsistency = false;
}
public boolean isConsistent() {
return isConsistent;
}
public boolean isDefault() {
//if nothing is selected or if it is the default value or if the value is "<default>" then it is default
if(getSelectedItem() == null || getSelectedItem().toString().equalsIgnoreCase(DefaultValue) || getSelectedItem().toString().equalsIgnoreCase(DEFAULT))
return true;
return false;
}
public String getGroup() {
return Group;
}
public String getKeyword() {
return Keyword;
}
public String getValue()
{
return (String)getSelectedItem();
}
public void setGroup(String Group) {
this.Group = Group;
}
public void setKeyword(String Keyword) {
this.Keyword = Keyword;
}
private void setConsistency(boolean Consistent)
{
isConsistent = Consistent;
if(Consistent == false)
{
setBorder(inConsistentBorder);
rendererInstance.setBorder(inConsistentBorder);
}
else
{
setBorder(consistentBorder);
rendererInstance.setBorder(consistentBorder);
}
}
@Override
protected void selectedItemChanged() {
if(isRecomputingConsistency == false)
{
//Propogate the values and retest all the incompatibility
String value = null;
//Get the previous item from the combobox
if(getSelectedItem() != null)
value = getSelectedItem().toString();
if(DEFAULT.equalsIgnoreCase(value))
value = DefaultValue;
//delete the previous item from the repository
if(value != null)
{
Repository.getInstance().Remove(Group, Keyword);
}
//set the new item
super.selectedItemChanged();
//some value is changed. Place it in the DB
//Get the changed value
if(getSelectedItem() != null)
value = getSelectedItem().toString();
if(DEFAULT.equalsIgnoreCase(value))
value = DefaultValue;
//Add it to the repository
if(value != null && !value.equalsIgnoreCase(DefaultValue))
{
String seperator = Repository.getInstance().getKeywordValueSeperator(Group);
Repository.getInstance().Store(Group, Keyword + seperator + value);
}
//fire test in all the peer component in this dialog
for (int i = 0; i < cellListenerList.size(); i++) {
cellListenerList.get(i).recomputeConsistency();
}
}
}
private void setToolTip(String tooltip)
{
tooltip = Cosmetics.getFormattedToolTip(tooltip);
setToolTipText(tooltip);
rendererInstance.setToolTipText(tooltip);
}
private void setComboBoxEnabled(boolean enabled)
{
setEnabled(enabled);
rendererInstance.setEnabled(enabled);
}
}
| |
package net.maizegenetics.phenotype;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import net.maizegenetics.taxa.TaxaList;
import net.maizegenetics.taxa.TaxaListBuilder;
import net.maizegenetics.taxa.TaxaListUtils;
import net.maizegenetics.taxa.Taxon;
public class FilterPhenotype extends Phenotype {
private Phenotype basePhenotype;
private boolean myIsTaxaFilter = false;
private boolean myIsAttributeFilter = false;
private int[] myTaxaRedirect = null;
private int[] myAttributeRedirect = null;
Map<ATTRIBUTE_TYPE, int[]> myAttributeTypeRedirect = null;
private String myTableTitle = "Filtered phenotype";
private FilterPhenotype(Phenotype basePheno, ArrayList<PhenotypeAttribute> retainedAttributes, TaxaList taxaToRetain) {
basePhenotype = basePheno;
if (taxaToRetain != null) {
myIsTaxaFilter = true;
TaxaList commonTaxa = TaxaListUtils.getCommonTaxa(basePhenotype.taxa(), taxaToRetain);
int ntaxa = commonTaxa.size();
myTaxaRedirect = new int[ntaxa];
int tcount = 0;
for (Taxon taxon : commonTaxa) {
myTaxaRedirect[tcount++] = basePhenotype.taxa().indexOf(taxon);
}
}
if (retainedAttributes != null) {
myIsAttributeFilter = true;
int nAttr = retainedAttributes.size();
myAttributeRedirect = new int[nAttr];
int acount = 0;
for (PhenotypeAttribute attr : retainedAttributes) {
myAttributeRedirect[acount++] = basePhenotype.getAttributeList().indexOf(attr);
}
}
}
public static Phenotype getInstance(Phenotype basePheno, ArrayList<PhenotypeAttribute> retainedAttributes, TaxaList taxaToRetain) {
return new FilterPhenotype(basePheno, retainedAttributes, taxaToRetain);
}
public static Phenotype getInstance(Phenotype basePheno, ArrayList<PhenotypeAttribute> retainedAttributes) {
return new FilterPhenotype(basePheno, retainedAttributes, null);
}
public static Phenotype getInstance(Phenotype basePheno, TaxaList retainedTaxa) {
return new FilterPhenotype(basePheno, null, retainedTaxa);
}
public static Phenotype getInstanceRemoveTaxa(Phenotype basePheno, TaxaList taxaToRemove) {
ArrayList<Taxon> keepList = new ArrayList<Taxon>(basePheno.taxa());
keepList.removeAll(taxaToRemove);
TaxaListBuilder taxaBuilder = new TaxaListBuilder();
taxaBuilder.addAll(keepList);
return new FilterPhenotype(basePheno, null, taxaBuilder.build());
}
//methods to override ------------------------------------
@Override
public Object getValue(int obs, int attrnum) {
if (myIsTaxaFilter && myIsAttributeFilter) {
return basePhenotype.getValue(myTaxaRedirect[obs], myAttributeRedirect[attrnum]);
} else if (myIsAttributeFilter) {
return basePhenotype.getValue(obs, myAttributeRedirect[attrnum]);
} else {
return basePhenotype.getValue(myTaxaRedirect[obs], attrnum);
}
}
@Override
public PhenotypeAttribute getAttribute(int attrnum) {
if (myIsAttributeFilter) {
return basePhenotype.getAttribute(myAttributeRedirect[attrnum]);
} else return super.getAttribute(attrnum);
}
@Override
public PhenotypeAttribute getAttributeOfType(ATTRIBUTE_TYPE type, int attrnum) {
if (myIsAttributeFilter) {
return basePhenotype.getAttribute(myAttributeTypeRedirect.get(type)[attrnum]);
} else {
return basePhenotype.getAttributeOfType(type, attrnum);
}
}
@Override
public List<PhenotypeAttribute> getAttributeListOfType(ATTRIBUTE_TYPE type) {
if (myIsAttributeFilter) {
ArrayList<PhenotypeAttribute> attrList = new ArrayList<PhenotypeAttribute>();
for (int attrnum : myAttributeTypeRedirect.get(type)) attrList.add(super.getAttribute(attrnum));
return attrList;
} else {
return basePhenotype.getAttributeListOfType(type);
}
}
@Override
public TaxaList taxa() {
if (myIsTaxaFilter) {
TaxaListBuilder taxaBuilder = new TaxaListBuilder();
for (int t : myTaxaRedirect) taxaBuilder.add(basePhenotype.taxa().get(t));
return taxaBuilder.build();
} else {
return basePhenotype.taxa();
}
}
@Override
public int getNumberOfAttributes() {
if (myIsAttributeFilter) {
return myAttributeRedirect.length;
} else {
return basePhenotype.getNumberOfAttributes();
}
}
@Override
public int getNumberOfAttributesOfType(ATTRIBUTE_TYPE type) {
if (myIsAttributeFilter) {
return myAttributeTypeRedirect.get(type).length;
} else return basePhenotype.getNumberOfAttributesOfType(type);
}
@Override
public int getNumberOfObservations() {
if (myIsTaxaFilter) {
return myTaxaRedirect.length;
} else return basePhenotype.getNumberOfObservations();
}
@Override
public Object[] getTableColumnNames() {
if (myIsAttributeFilter) {
int nAttr = myAttributeRedirect.length;
Object[] names = new Object[nAttr];
for (int a = 0; a < nAttr; a++) {
names[a] = basePhenotype.getAttribute(a).getName();
}
return names;
} else return basePhenotype.getTableColumnNames();
}
@Override
public Object[][] getTableData() {
int nrows = myTaxaRedirect.length;
int ncols = myAttributeRedirect.length + 1;
Object[][] resultTable = new Object[nrows][ncols];
for (int r = 0; r < nrows; r++) {
for (int c = 0; c < ncols; c++) {
resultTable[r][c] = getValueAt(r,c);
}
}
return resultTable;
}
@Override
public String getTableTitle() {
return myTableTitle;
}
@Override
public int getColumnCount() {
if (myIsAttributeFilter) return myAttributeRedirect.length + 1;
else return basePhenotype.getColumnCount();
}
@Override
public int getRowCount() {
if (myIsTaxaFilter) return myTaxaRedirect.length;
else return basePhenotype.getRowCount();
}
@Override
public Object[] getRow(int row) {
int ncols = getColumnCount();
Object[] rowData = new Object[ncols];
for (int i = 0; i < ncols; i++) rowData[i] = getValueAt(row, i);
return super.getRow(row);
}
@Override
public Object[][] getTableData(int start, int end) {
int nrows = end - start + 1;
int ncols = myAttributeRedirect.length + 1;
Object[][] resultTable = new Object[nrows][ncols];
for (int r = 0; r < nrows ; r++) {
for (int c = 0; c < ncols; c++) {
resultTable[r][c] = getValueAt(r + start,c);
}
}
return resultTable;
}
@Override
public Object getValueAt(int row, int col) {
if (myIsTaxaFilter && myIsAttributeFilter) {
if (col == 0) return basePhenotype.taxa().get(myTaxaRedirect[row]);
return basePhenotype.getValue(myTaxaRedirect[row], myAttributeRedirect[col - 1]);
} else if (myIsAttributeFilter) {
if (col == 0) return basePhenotype.taxa().get(row);
return basePhenotype.getValue(row, myAttributeRedirect[col - 1]);
} else {
if (col == 0) return basePhenotype.taxa().get(myTaxaRedirect[row]);
return basePhenotype.getValue(myTaxaRedirect[row], col - 1);
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.actions;
import org.eclipse.core.expressions.PropertyTester;
import org.eclipse.jface.action.IContributionItem;
import org.eclipse.swt.widgets.Display;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.model.DBPOrderedObject;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.app.DBPResourceHandler;
import org.jkiss.dbeaver.model.edit.*;
import org.jkiss.dbeaver.model.navigator.*;
import org.jkiss.dbeaver.model.struct.*;
import org.jkiss.dbeaver.model.struct.rdb.DBSTableIndex;
import org.jkiss.dbeaver.registry.ObjectManagerRegistry;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.ui.ActionUtils;
import org.jkiss.dbeaver.ui.navigator.actions.NavigatorHandlerObjectCreateNew;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import java.util.List;
/**
* ObjectPropertyTester
*/
public class ObjectPropertyTester extends PropertyTester
{
//static final Log log = Log.getLog(ObjectPropertyTester.class);
public static final String NAMESPACE = "org.jkiss.dbeaver.core.object";
public static final String PROP_CAN_OPEN = "canOpen";
public static final String PROP_CAN_CREATE_SINGLE = "canCreateSingle";
public static final String PROP_CAN_CREATE_MULTI = "canCreateMulti";
public static final String PROP_CAN_PASTE = "canPaste";
public static final String PROP_CAN_DELETE = "canDelete";
public static final String PROP_CAN_RENAME = "canRename";
public static final String PROP_CAN_MOVE_UP = "canMoveUp";
public static final String PROP_CAN_MOVE_DOWN = "canMoveDown";
public static final String PROP_CAN_FILTER = "canFilter";
public static final String PROP_CAN_FILTER_OBJECT = "canFilterObject";
public static final String PROP_HAS_FILTER = "hasFilter";
public static final String PROP_HAS_TOOLS = "hasTools";
public static final String PROP_SUPPORTS_CREATING_INDEX = "supportsIndexCreate";
public static final String PROP_SUPPORTS_CREATING_CONSTRAINT = "supportsConstraintCreate";
public ObjectPropertyTester() {
super();
}
@SuppressWarnings("unchecked")
@Override
public boolean test(Object receiver, String property, Object[] args, Object expectedValue) {
DBNNode node = RuntimeUtils.getObjectAdapter(receiver, DBNNode.class);
if (node == null) {
return false;
}
Display display = Display.getCurrent();
if (display == null) {
return false;
}
//System.out.println("TEST " + property + " ON " + node.getName());
switch (property) {
case PROP_CAN_OPEN:
return node.isPersisted();
case PROP_CAN_CREATE_SINGLE: {
return canCreateObject(node, true);
}
case PROP_CAN_CREATE_MULTI: {
return canCreateObject(node, false);
}
case PROP_CAN_PASTE: {
// We cannot interact with clipboard in property testers (#6489).
// It breaks context menu (and maybe something else) omn some OSes.
/*
Clipboard clipboard = new Clipboard(display);
try {
if (clipboard.getContents(TreeNodeTransfer.getInstance()) == null) {
return false;
}
} finally {
clipboard.dispose();
}
*/
if (node instanceof DBNResource) {
return property.equals(PROP_CAN_PASTE);
}
return canCreateObject(node, null);
// Do not check PASTE command state. It requires clipboard contents check
// which means UI interaction which can break menu popup [RedHat]
// and also is a slow operation. So let paste be always enabled.
/*
// Check objects in clipboard
Collection<DBNNode> cbNodes = TreeNodeTransfer.getFromClipboard();
if (cbNodes == null) {
return false;
}
for (DBNNode nodeObject : cbNodes) {
if (nodeObject.isManagable() && nodeObject instanceof DBSWrapper) {
DBSObject pasteObject = ((DBSWrapper)nodeObject).getObject();
if (pasteObject == null || objectType != pasteObject.getClass()) {
return false;
}
} else {
return false;
}
}
*/
}
case PROP_CAN_DELETE: {
if (node instanceof DBNDataSource || node instanceof DBNLocalFolder) {
return true;
}
if (DBNUtils.isReadOnly(node)) {
return false;
}
if (node instanceof DBSWrapper) {
DBSObject object = ((DBSWrapper) node).getObject();
if (object == null || DBUtils.isReadOnly(object) || !(node.getParentNode() instanceof DBNContainer)) {
return false;
}
DBEObjectMaker objectMaker = getObjectManager(object.getClass(), DBEObjectMaker.class);
return objectMaker != null && objectMaker.canDeleteObject(object);
} else if (node instanceof DBNResource) {
if ((((DBNResource) node).getFeatures() & DBPResourceHandler.FEATURE_DELETE) != 0) {
return true;
}
}
break;
}
case PROP_CAN_RENAME: {
if (node.supportsRename()) {
return true;
}
if (node instanceof DBNDatabaseNode) {
if (DBNUtils.isReadOnly(node)) {
return false;
}
DBSObject object = ((DBNDatabaseNode) node).getObject();
if (object != null) {
DBEObjectRenamer objectRenamer = getObjectManager(object.getClass(), DBEObjectRenamer.class);
return !DBUtils.isReadOnly(object) &&
object.isPersisted() &&
node.getParentNode() instanceof DBNContainer &&
objectRenamer != null && objectRenamer.canRenameObject(object);
}
}
break;
}
case PROP_CAN_MOVE_UP:
case PROP_CAN_MOVE_DOWN: {
if (node instanceof DBNDatabaseNode) {
if (DBNUtils.isReadOnly(node)) {
return false;
}
DBSObject object = ((DBNDatabaseNode) node).getObject();
if (object instanceof DBPOrderedObject) {
DBEObjectReorderer objectReorderer = getObjectManager(object.getClass(), DBEObjectReorderer.class);
if (objectReorderer != null) {
final int position = ((DBPOrderedObject) object).getOrdinalPosition();
if (property.equals(PROP_CAN_MOVE_UP)) {
return position > objectReorderer.getMinimumOrdinalPosition(object);
}
return position < objectReorderer.getMaximumOrdinalPosition(object);
}
}
}
break;
}
case PROP_CAN_FILTER: {
if (node instanceof DBNDatabaseItem) {
node = node.getParentNode();
}
if (node instanceof DBNDatabaseFolder && ((DBNDatabaseFolder) node).getItemsMeta() != null) {
return true;
}
break;
}
case PROP_CAN_FILTER_OBJECT: {
if (node.getParentNode() instanceof DBNDatabaseFolder && ((DBNDatabaseFolder) node.getParentNode()).getItemsMeta() != null) {
return true;
}
break;
}
case PROP_HAS_FILTER: {
if (node instanceof DBNDatabaseItem) {
node = node.getParentNode();
}
if (node instanceof DBNDatabaseFolder && ((DBNDatabaseFolder) node).getItemsMeta() != null) {
DBSObjectFilter filter = ((DBNDatabaseFolder) node).getNodeFilter(((DBNDatabaseFolder) node).getItemsMeta(), true);
if ("defined".equals(expectedValue)) {
return filter != null && !filter.isEmpty();
} else {
return filter != null && !filter.isNotApplicable();
}
}
break;
}
case PROP_SUPPORTS_CREATING_INDEX:
return supportsCreatingColumnObject(node, DBSTableIndex.class);
case PROP_SUPPORTS_CREATING_CONSTRAINT:
return supportsCreatingColumnObject(node, DBSEntityConstraint.class);
}
return false;
}
public static boolean canCreateObject(DBNNode node, Boolean onlySingle) {
if (node instanceof DBNDatabaseNode) {
if (((DBNDatabaseNode)node).isVirtual()) {
// Can't create virtual objects
return false;
}
if (!(node instanceof DBNDataSource) && isMetadataChangeDisabled(((DBNDatabaseNode)node))) {
return false;
}
}
if (onlySingle == null) {
// Just try to find first create handler
if (node instanceof DBNDataSource) {
// We always can create datasource
return true;
}
Class objectType;
if (!(node instanceof DBNContainer)) {
if (node.getParentNode() instanceof DBNContainer) {
node = node.getParentNode();
}
}
DBNContainer container;
if (node instanceof DBNContainer) {
// Try to detect child type
objectType = ((DBNContainer) node).getChildrenClass();
container = (DBNContainer) node;
} else {
return false;
}
if (DBNUtils.isReadOnly(node)) {
return false;
}
if (node instanceof DBSWrapper && DBUtils.isReadOnly(((DBSWrapper) node).getObject())) {
return false;
}
if (objectType == null) {
return false;
}
DBEObjectMaker objectMaker = getObjectManager(objectType, DBEObjectMaker.class);
if (objectMaker == null) {
return false;
}
return objectMaker.canCreateObject(container.getValueObject());
}
if (DBNUtils.isReadOnly(node)) {
return false;
}
// Check whether only single object type can be created or multiple ones
List<IContributionItem> createItems = NavigatorHandlerObjectCreateNew.fillCreateMenuItems(null, node);
if (onlySingle) {
return createItems.size() == 1;
} else {
return createItems.size() > 1;
}
}
public static boolean isMetadataChangeDisabled(DBNDatabaseNode node) {
DBNBrowseSettings navSettings = node.getDataSourceContainer().getNavigatorSettings();
return navSettings.isHideFolders() || navSettings.isShowOnlyEntities();
}
private static <T extends DBEObjectManager> T getObjectManager(Class<?> objectType, Class<T> managerType)
{
return ObjectManagerRegistry.getInstance().getObjectManager(objectType, managerType);
}
public static void firePropertyChange(String propName)
{
ActionUtils.evaluatePropertyState(NAMESPACE + "." + propName);
}
private static boolean supportsCreatingColumnObject(@Nullable DBNNode node, @NotNull Class<?> supertype) {
if (!(node instanceof DBNDatabaseItem)) {
return false;
}
DBNDatabaseItem databaseItem = (DBNDatabaseItem) node;
DBSObject attributeObject = databaseItem.getObject();
if (!(attributeObject instanceof DBSEntityAttribute)) {
return false;
}
DBSObject entityObject = attributeObject.getParentObject();
if (!(entityObject instanceof DBSEntity)) {
return false;
}
DBEStructEditor<?> structEditor = DBWorkbench.getPlatform().getEditorsRegistry().getObjectManager(entityObject.getClass(), DBEStructEditor.class);
if (structEditor == null) {
return false;
}
for (Class<?> childType: structEditor.getChildTypes()) {
DBEObjectMaker<?, ?> maker = DBWorkbench.getPlatform().getEditorsRegistry().getObjectManager(childType, DBEObjectMaker.class);
if (maker != null && maker.canCreateObject(entityObject) && supertype.isAssignableFrom(childType)) {
return true;
}
}
return false;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2013-2016 reark project contributors
*
* https://github.com/reark/reark/graphs/contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.reark.reark.data.stores;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.database.Cursor;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.test.runner.AndroidJUnit4;
import android.test.ProviderTestCase2;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import io.reark.reark.data.stores.SimpleMockContentProvider.DataColumns;
import io.reark.reark.data.stores.cores.ContentProviderStoreCore;
import rx.Observable;
import rx.functions.Action1;
import rx.observers.TestSubscriber;
@RunWith(AndroidJUnit4.class)
public class ContentProviderStoreTest extends ProviderTestCase2<SimpleMockContentProvider> {
private static final String AUTHORITY = "test.authority";
private static final Uri AUTHORITY_URI = Uri.parse("content://" + AUTHORITY);
private static final Uri CONTENT_URI = Uri.withAppendedPath(AUTHORITY_URI, "veggies");
private static final String[] PROJECTION = { DataColumns.KEY, DataColumns.VALUE };
private static final String NONE = "";
private TestStoreCore core;
private TestStore store;
public ContentProviderStoreTest() {
super(SimpleMockContentProvider.class, AUTHORITY);
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
core = new TestStoreCore(getMockContentResolver());
store = new TestStore(core);
Action1<String> insert = value ->
getProvider().insert(
core.getUriForId(TestStore.getIdFor(value)),
core.getContentValuesForItem(value)
);
// Prepare the mock content provider with values
insert.call("parsnip");
insert.call("lettuce");
insert.call("spinach");
}
@Test
public void getOnce_WithData_ReturnsData_AndCompletes() {
// ARRANGE
TestSubscriber<String> testSubscriber = new TestSubscriber<>();
List<String> expected = Collections.singletonList("parsnip");
// ACT
store.getOnce(TestStore.getIdFor("parsnip")).subscribe(testSubscriber);
// ASSERT
testSubscriber.awaitTerminalEvent();
testSubscriber.assertCompleted();
testSubscriber.assertNoErrors();
testSubscriber.assertReceivedOnNext(expected);
}
@Test
public void getOnce_WithNoData_ReturnsNoneValue_AndCompletes() {
// ARRANGE
TestSubscriber<String> testSubscriber = new TestSubscriber<>();
List<String> expected = Collections.singletonList(NONE);
// ACT
store.getOnce(TestStore.getIdFor("bacon")).subscribe(testSubscriber);
// ASSERT
testSubscriber.awaitTerminalEvent();
testSubscriber.assertCompleted();
testSubscriber.assertNoErrors();
testSubscriber.assertReceivedOnNext(expected);
}
@Test
public void getOnceAndStream_WithData_ReturnsData_AndDoesNotComplete() {
// ARRANGE
TestSubscriber<String> testSubscriber = new TestSubscriber<>();
List<String> expected = Collections.singletonList("spinach");
// ACT
store.getOnceAndStream(TestStore.getIdFor("spinach")).subscribe(testSubscriber);
// ASSERT
testSubscriber.awaitTerminalEvent(50, TimeUnit.MILLISECONDS);
testSubscriber.assertNotCompleted();
testSubscriber.assertNoErrors();
testSubscriber.assertReceivedOnNext(expected);
}
@Test
public void getOnceAndStream_WithNoData_ReturnsNoneValue_AndDoesNotComplete() {
// ARRANGE
TestSubscriber<String> testSubscriber = new TestSubscriber<>();
List<String> expected = Collections.singletonList(NONE);
// ACT
store.getOnceAndStream(TestStore.getIdFor("bacon")).subscribe(testSubscriber);
// ASSERT
testSubscriber.awaitTerminalEvent(50, TimeUnit.MILLISECONDS);
testSubscriber.assertNotCompleted();
testSubscriber.assertNoErrors();
testSubscriber.assertReceivedOnNext(expected);
}
// The following tests are not part of the public API, but rather test what
// the content provider core exposes for extending classes.
@Test
public void core_GetAllOnce_WithData_ReturnsData_AndCompletes() {
// ARRANGE
TestSubscriber<List<String>> testSubscriber = new TestSubscriber<>();
List<List<String>> expected = Collections.singletonList(Collections.singletonList("parsnip"));
// ACT
core.getAllCached(TestStore.getIdFor("parsnip")).subscribe(testSubscriber);
// ASSERT
testSubscriber.awaitTerminalEvent();
testSubscriber.assertCompleted();
testSubscriber.assertNoErrors();
testSubscriber.assertReceivedOnNext(expected);
}
@Test
public void core_GetAllOnce_WithWildcardQuery_WithData_ReturnsAllData_AndCompletes() {
// ARRANGE
TestSubscriber<List<String>> testSubscriber = new TestSubscriber<>();
List<List<String>> expected = Collections.singletonList(Arrays.asList("parsnip", "lettuce", "spinach"));
// ACT
// Wildcard depends on content provider. For tests we just use 0 while on SQL backend
// this would be an asterisk. The exact wildcard is not important for the test as we just
// want to make sure the provider stores can return a larger listing of results.
core.getAllCached(0).subscribe(testSubscriber);
// ASSERT
testSubscriber.awaitTerminalEvent();
testSubscriber.assertCompleted();
testSubscriber.assertNoErrors();
testSubscriber.assertReceivedOnNext(expected);
}
/**
* A simple store containing String values tracked with Integer keys.
*/
public static class TestStore extends ContentProviderStore<Integer, String, String> {
public TestStore(@NonNull final TestStoreCore core) {
super(core,
TestStore::getIdFor,
value -> value != null ? value : NONE,
() -> NONE);
}
@NonNull
private static Integer getIdFor(@NonNull final String item) {
return item.hashCode();
}
}
/**
* A simple store core implementing the methods content provider requires.
*/
public static class TestStoreCore extends ContentProviderStoreCore<Integer, String> {
protected TestStoreCore(@NonNull final ContentResolver contentResolver) {
super(contentResolver);
}
@NonNull
public Observable<List<String>> getAllCached(@NonNull Integer id) {
return getAllOnce(getUriForId(id));
}
@NonNull
@Override
protected String getAuthority() {
return AUTHORITY;
}
@NonNull
@Override
public Uri getContentUri() {
return CONTENT_URI;
}
@NonNull
@Override
protected String[] getProjection() {
return PROJECTION;
}
@NonNull
@Override
protected String read(@NonNull final Cursor cursor) {
return cursor.getString(cursor.getColumnIndex(DataColumns.VALUE));
}
@NonNull
@Override
protected ContentValues getContentValuesForItem(@NonNull final String item) {
ContentValues contentValues = new ContentValues();
contentValues.put(DataColumns.KEY, item.hashCode());
contentValues.put(DataColumns.VALUE, item);
return contentValues;
}
@NonNull
@Override
public Uri getUriForId(@NonNull final Integer id) {
return Uri.withAppendedPath(getContentUri(), String.valueOf(id));
}
@NonNull
@Override
protected Integer getIdForUri(@NonNull final Uri uri) {
return Integer.valueOf(uri.getLastPathSegment());
}
}
}
| |
package gui;
import java.util.ArrayList;
import java.util.Collection;
import input.*;
import input.Event.EventType;
import input.MouseEvent.MouseEventType;
import input.KeyEvent.KeyEventType;
import util.*;
public class Widget {
//public final static int NUM_LAYERS = 10;
protected void onRender(RenderTarget t) {}
protected void onLayout() {}
protected final void internalOnEvent(Event e) {
//not visible, break off
if (!mVisible)
return;
//make up to date
if (!mRectValid)
internalOnLayout(false);
//clear any pending child removals, event handling may remove some children
removePendingChildren();
//do children, last added is on top, so it gets a chance
//at the event first
mChildLock = true;
for (int i = mChildren.size()-1; i >= 0; --i) {
mChildren.get(i).internalOnEvent(e);
if (e.consumed())
return;
}
mChildLock = false;
//if mouse is over, handle event
if (contains(e.getInput().getMousePos())) {
//active? consume the event
if (mActive)
e.consume();
//set the input's MouseTarget to this
e.getInput().setMouseTarget(this);
//dispatch
if (e.getEventType() == EventType.Mouse) {
MouseEvent me = (MouseEvent)e;
switch (me.getMouseEventType()) {
case Press:
//clicking on an active widget sets it to the target for keypresses
e.getInput().setKeyTarget(this);
onMouseDown.fire(me);
break;
case Release:
onMouseUp.fire(me);
break;
case Scroll:
onMouseWheel.fire(me);
break;
case Move:
onMouseMove.fire(me);
break;
default:
assert false: "Bad MouseEventType to internalOnEvent";
}
} else if (e.getEventType() == EventType.Key) {
KeyEvent ke = (KeyEvent)e;
switch (ke.getKeyEventType()) {
case Press:
onKeyDown.fire(ke);
break;
case Release:
onKeyUp.fire(ke);
break;
default:
assert false: "Bad KeyEventType to onEventInternal";
}
}
}
}
protected final void internalOnRender(RenderTarget t) {
//not visible, don't render
if (!mVisible)
return;
//make up to date
if (!mRectValid)
internalOnLayout(false);
//render self first
onRender(t);
//children, in order added
//(don't clear pending child removals, there shouldn't be any from a render)
mChildLock = true;
for (Widget w : mChildren) {
w.internalOnRender(t);
}
mChildLock = false;
}
protected final void internalOnLayout(boolean forceupdate) {
//update self
if (!mRectValid || forceupdate) {
if (mParent != null) {
int pw = mParent.getRect().getWidth();
int ph = mParent.getRect().getHeight();
mRect = new Rect((int)(mParent.getRect().getX() + mPos.getXOffset() + mPos.getXScale()*pw),
(int)(mParent.getRect().getY() + mPos.getYOffset() + mPos.getYScale()*ph),
(int)(mSize.getXOffset() + pw*mSize.getXScale()),
(int)(mSize.getYOffset() + ph*mSize.getYScale()));
} else {
mRect = new Rect(mPos.getOffset().vec(), mSize.getOffset().vec());
}
onLayout();
}
//update children
// (don't clear pending child removals, there shouldn't be any from a layout)
mChildLock = true;
for (Widget child : mChildren) {
child.internalOnLayout(forceupdate);
}
mChildLock = false;
}
public boolean contains(Vec v) {
return mRect.contains(v);
}
public final void setParent(Widget parent) {
if (mParent != null) {
if (mParent.mChildLock) { //child list being iterated? don't mess with it
mParent.mChildrenToRemove.add(this);
} else {
mParent.mChildren.remove(this);
}
}
mParent = parent;
if (mParent != null) {
mParent.mChildren.add(this);
}
}
public final Widget getParent() {
return mParent;
}
public final UDim getPos() {
return mPos;
}
public final void setPos(UDim p) {
mPos = p;
mRectValid = false;
}
public final UDim getSize() {
return mSize;
}
public final void setSize(UDim s) {
mSize = s;
mRectValid = false;
}
public final void setLayer(int layer) {
mLayer = layer;
}
public final int getLayer() {
return mLayer;
}
public final boolean getActive() {
return mActive;
}
public final void setActive(boolean active) {
mActive = active;
}
public final boolean getVisible() {
return mVisible;
}
public final void setVisible(boolean visible) {
mVisible = visible;
}
public final Rect getRect() {
return mRect;
}
public final void invalidate() {
mRectValid = false;
}
public final Collection<Widget> getChildren() {
return java.util.Collections.unmodifiableCollection(mChildren);
}
//events
public final MouseEvent.Signal onMouseEnter = new MouseEvent.Signal();
public final MouseEvent.Signal onMouseLeave = new MouseEvent.Signal();
public final MouseEvent.Signal onMouseMove = new MouseEvent.Signal();
public final MouseEvent.Signal onMouseDown = new MouseEvent.Signal();
public final MouseEvent.Signal onMouseUp = new MouseEvent.Signal();
public final MouseEvent.Signal onMouseWheel = new MouseEvent.Signal();
public final KeyEvent.Signal onKeyDown = new KeyEvent.Signal();
public final KeyEvent.Signal onKeyUp = new KeyEvent.Signal();
//herarchy
private Widget mParent;
private ArrayList<Widget> mChildren = new ArrayList<Widget>();
private boolean mChildLock = false;
private ArrayList<Widget> mChildrenToRemove = new ArrayList<Widget>();
private void removePendingChildren() {
assert mChildLock == false;
if (!mChildrenToRemove.isEmpty()) {
mChildren.removeAll(mChildrenToRemove);
mChildrenToRemove.clear();
}
}
//layer
private int mLayer;
//layout
private UDim mSize = new UDim();
private UDim mPos = new UDim();
private Rect mRect;
private boolean mRectValid = false;
//other state
private boolean mVisible = true;
private boolean mActive = true;
}
| |
package org.apache.lucene.codecs.mockrandom;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.lucene.codecs.BlockTreeTermsReader;
import org.apache.lucene.codecs.BlockTreeTermsWriter;
import org.apache.lucene.codecs.FieldsConsumer;
import org.apache.lucene.codecs.FieldsProducer;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.PostingsReaderBase;
import org.apache.lucene.codecs.PostingsWriterBase;
import org.apache.lucene.codecs.TermStats;
import org.apache.lucene.codecs.blockterms.BlockTermsReader;
import org.apache.lucene.codecs.blockterms.BlockTermsWriter;
import org.apache.lucene.codecs.blockterms.FixedGapTermsIndexReader;
import org.apache.lucene.codecs.blockterms.FixedGapTermsIndexWriter;
import org.apache.lucene.codecs.blockterms.TermsIndexReaderBase;
import org.apache.lucene.codecs.blockterms.TermsIndexWriterBase;
import org.apache.lucene.codecs.blockterms.VariableGapTermsIndexReader;
import org.apache.lucene.codecs.blockterms.VariableGapTermsIndexWriter;
import org.apache.lucene.codecs.lucene41.Lucene41PostingsReader;
import org.apache.lucene.codecs.lucene41.Lucene41PostingsWriter;
import org.apache.lucene.codecs.mockintblock.MockFixedIntBlockPostingsFormat;
import org.apache.lucene.codecs.mockintblock.MockVariableIntBlockPostingsFormat;
import org.apache.lucene.codecs.mocksep.MockSingleIntFactory;
import org.apache.lucene.codecs.pulsing.PulsingPostingsReader;
import org.apache.lucene.codecs.pulsing.PulsingPostingsWriter;
import org.apache.lucene.codecs.sep.IntIndexInput;
import org.apache.lucene.codecs.sep.IntIndexOutput;
import org.apache.lucene.codecs.sep.IntStreamFactory;
import org.apache.lucene.codecs.sep.SepPostingsReader;
import org.apache.lucene.codecs.sep.SepPostingsWriter;
import org.apache.lucene.codecs.memory.FSTTermsWriter;
import org.apache.lucene.codecs.memory.FSTTermsReader;
import org.apache.lucene.codecs.memory.FSTOrdTermsWriter;
import org.apache.lucene.codecs.memory.FSTOrdTermsReader;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
/**
* Randomly combines terms index impl w/ postings impls.
*/
public final class MockRandomPostingsFormat extends PostingsFormat {
private final Random seedRandom;
private final String SEED_EXT = "sd";
public MockRandomPostingsFormat() {
// This ctor should *only* be used at read-time: get NPE if you use it!
this(null);
}
public MockRandomPostingsFormat(Random random) {
super("MockRandom");
if (random == null) {
this.seedRandom = new Random(0L) {
@Override
protected int next(int arg0) {
throw new IllegalStateException("Please use MockRandomPostingsFormat(Random)");
}
};
} else {
this.seedRandom = new Random(random.nextLong());
}
}
// Chooses random IntStreamFactory depending on file's extension
private static class MockIntStreamFactory extends IntStreamFactory {
private final int salt;
private final List<IntStreamFactory> delegates = new ArrayList<IntStreamFactory>();
public MockIntStreamFactory(Random random) {
salt = random.nextInt();
delegates.add(new MockSingleIntFactory());
final int blockSize = _TestUtil.nextInt(random, 1, 2000);
delegates.add(new MockFixedIntBlockPostingsFormat.MockIntFactory(blockSize));
final int baseBlockSize = _TestUtil.nextInt(random, 1, 127);
delegates.add(new MockVariableIntBlockPostingsFormat.MockIntFactory(baseBlockSize));
// TODO: others
}
private static String getExtension(String fileName) {
final int idx = fileName.indexOf('.');
assert idx != -1;
return fileName.substring(idx);
}
@Override
public IntIndexInput openInput(Directory dir, String fileName, IOContext context) throws IOException {
// Must only use extension, because IW.addIndexes can
// rename segment!
final IntStreamFactory f = delegates.get((Math.abs(salt ^ getExtension(fileName).hashCode())) % delegates.size());
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: read using int factory " + f + " from fileName=" + fileName);
}
return f.openInput(dir, fileName, context);
}
@Override
public IntIndexOutput createOutput(Directory dir, String fileName, IOContext context) throws IOException {
final IntStreamFactory f = delegates.get((Math.abs(salt ^ getExtension(fileName).hashCode())) % delegates.size());
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: write using int factory " + f + " to fileName=" + fileName);
}
return f.createOutput(dir, fileName, context);
}
}
@Override
public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
int minSkipInterval;
if (state.segmentInfo.getDocCount() > 1000000) {
// Test2BPostings can OOME otherwise:
minSkipInterval = 3;
} else {
minSkipInterval = 2;
}
// we pull this before the seed intentionally: because its not consumed at runtime
// (the skipInterval is written into postings header)
int skipInterval = _TestUtil.nextInt(seedRandom, minSkipInterval, 10);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: skipInterval=" + skipInterval);
}
final long seed = seedRandom.nextLong();
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: writing to seg=" + state.segmentInfo.name + " formatID=" + state.segmentSuffix + " seed=" + seed);
}
final String seedFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SEED_EXT);
final IndexOutput out = state.directory.createOutput(seedFileName, state.context);
try {
out.writeLong(seed);
} finally {
out.close();
}
final Random random = new Random(seed);
random.nextInt(); // consume a random for buffersize
PostingsWriterBase postingsWriter;
if (random.nextBoolean()) {
postingsWriter = new SepPostingsWriter(state, new MockIntStreamFactory(random), skipInterval);
} else {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: writing Standard postings");
}
// TODO: randomize variables like acceptibleOverHead?!
postingsWriter = new Lucene41PostingsWriter(state, skipInterval);
}
if (random.nextBoolean()) {
final int totTFCutoff = _TestUtil.nextInt(random, 1, 20);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: writing pulsing postings with totTFCutoff=" + totTFCutoff);
}
postingsWriter = new PulsingPostingsWriter(state, totTFCutoff, postingsWriter);
}
final FieldsConsumer fields;
final int t1 = random.nextInt(4);
if (t1 == 0) {
boolean success = false;
try {
fields = new FSTTermsWriter(state, postingsWriter);
success = true;
} finally {
if (!success) {
postingsWriter.close();
}
}
} else if (t1 == 1) {
boolean success = false;
try {
fields = new FSTOrdTermsWriter(state, postingsWriter);
success = true;
} finally {
if (!success) {
postingsWriter.close();
}
}
} else if (t1 == 2) {
// Use BlockTree terms dict
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: writing BlockTree terms dict");
}
// TODO: would be nice to allow 1 but this is very
// slow to write
final int minTermsInBlock = _TestUtil.nextInt(random, 2, 100);
final int maxTermsInBlock = Math.max(2, (minTermsInBlock-1)*2 + random.nextInt(100));
boolean success = false;
try {
fields = new BlockTreeTermsWriter(state, postingsWriter, minTermsInBlock, maxTermsInBlock);
success = true;
} finally {
if (!success) {
postingsWriter.close();
}
}
} else {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: writing Block terms dict");
}
boolean success = false;
final TermsIndexWriterBase indexWriter;
try {
if (random.nextBoolean()) {
state.termIndexInterval = _TestUtil.nextInt(random, 1, 100);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: fixed-gap terms index (tii=" + state.termIndexInterval + ")");
}
indexWriter = new FixedGapTermsIndexWriter(state);
} else {
final VariableGapTermsIndexWriter.IndexTermSelector selector;
final int n2 = random.nextInt(3);
if (n2 == 0) {
final int tii = _TestUtil.nextInt(random, 1, 100);
selector = new VariableGapTermsIndexWriter.EveryNTermSelector(tii);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: variable-gap terms index (tii=" + tii + ")");
}
} else if (n2 == 1) {
final int docFreqThresh = _TestUtil.nextInt(random, 2, 100);
final int tii = _TestUtil.nextInt(random, 1, 100);
selector = new VariableGapTermsIndexWriter.EveryNOrDocFreqTermSelector(docFreqThresh, tii);
} else {
final long seed2 = random.nextLong();
final int gap = _TestUtil.nextInt(random, 2, 40);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: random-gap terms index (max gap=" + gap + ")");
}
selector = new VariableGapTermsIndexWriter.IndexTermSelector() {
final Random rand = new Random(seed2);
@Override
public boolean isIndexTerm(BytesRef term, TermStats stats) {
return rand.nextInt(gap) == gap/2;
}
@Override
public void newField(FieldInfo fieldInfo) {
}
};
}
indexWriter = new VariableGapTermsIndexWriter(state, selector);
}
success = true;
} finally {
if (!success) {
postingsWriter.close();
}
}
success = false;
try {
fields = new BlockTermsWriter(indexWriter, state, postingsWriter);
success = true;
} finally {
if (!success) {
try {
postingsWriter.close();
} finally {
indexWriter.close();
}
}
}
}
return fields;
}
@Override
public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException {
final String seedFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, SEED_EXT);
final IndexInput in = state.directory.openInput(seedFileName, state.context);
final long seed = in.readLong();
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading from seg=" + state.segmentInfo.name + " formatID=" + state.segmentSuffix + " seed=" + seed);
}
in.close();
final Random random = new Random(seed);
int readBufferSize = _TestUtil.nextInt(random, 1, 4096);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: readBufferSize=" + readBufferSize);
}
PostingsReaderBase postingsReader;
if (random.nextBoolean()) {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading Sep postings");
}
postingsReader = new SepPostingsReader(state.directory, state.fieldInfos, state.segmentInfo,
state.context, new MockIntStreamFactory(random), state.segmentSuffix);
} else {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading Standard postings");
}
postingsReader = new Lucene41PostingsReader(state.directory, state.fieldInfos, state.segmentInfo, state.context, state.segmentSuffix);
}
if (random.nextBoolean()) {
final int totTFCutoff = _TestUtil.nextInt(random, 1, 20);
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading pulsing postings with totTFCutoff=" + totTFCutoff);
}
postingsReader = new PulsingPostingsReader(state, postingsReader);
}
final FieldsProducer fields;
final int t1 = random.nextInt(4);
if (t1 == 0) {
boolean success = false;
try {
fields = new FSTTermsReader(state, postingsReader);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else if (t1 == 1) {
boolean success = false;
try {
fields = new FSTOrdTermsReader(state, postingsReader);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else if (t1 == 2) {
// Use BlockTree terms dict
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading BlockTree terms dict");
}
boolean success = false;
try {
fields = new BlockTreeTermsReader(state.directory,
state.fieldInfos,
state.segmentInfo,
postingsReader,
state.context,
state.segmentSuffix,
state.termsIndexDivisor);
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
} else {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: reading Block terms dict");
}
final TermsIndexReaderBase indexReader;
boolean success = false;
try {
final boolean doFixedGap = random.nextBoolean();
// randomness diverges from writer, here:
if (state.termsIndexDivisor != -1) {
state.termsIndexDivisor = _TestUtil.nextInt(random, 1, 10);
}
if (doFixedGap) {
// if termsIndexDivisor is set to -1, we should not touch it. It means a
// test explicitly instructed not to load the terms index.
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: fixed-gap terms index (divisor=" + state.termsIndexDivisor + ")");
}
indexReader = new FixedGapTermsIndexReader(state.directory,
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
BytesRef.getUTF8SortedAsUnicodeComparator(),
state.segmentSuffix, state.context);
} else {
final int n2 = random.nextInt(3);
if (n2 == 1) {
random.nextInt();
} else if (n2 == 2) {
random.nextLong();
}
if (LuceneTestCase.VERBOSE) {
System.out.println("MockRandomCodec: variable-gap terms index (divisor=" + state.termsIndexDivisor + ")");
}
indexReader = new VariableGapTermsIndexReader(state.directory,
state.fieldInfos,
state.segmentInfo.name,
state.termsIndexDivisor,
state.segmentSuffix, state.context);
}
success = true;
} finally {
if (!success) {
postingsReader.close();
}
}
success = false;
try {
fields = new BlockTermsReader(indexReader,
state.directory,
state.fieldInfos,
state.segmentInfo,
postingsReader,
state.context,
state.segmentSuffix);
success = true;
} finally {
if (!success) {
try {
postingsReader.close();
} finally {
indexReader.close();
}
}
}
}
return fields;
}
}
| |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.core.view;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class NestedScrollingHelperIntegrationTest {
private NestedScrollingImpl3 mNestedScrollingImpl3;
private NestedScrollingImpl2 mNestedScrollingImpl2;
private NestedScrollingImpl mNestedScrollingImpl;
@Before
public void setup() {
mNestedScrollingImpl3 = new NestedScrollingImpl3(
ApplicationProvider.getApplicationContext());
mNestedScrollingImpl2 = new NestedScrollingImpl2(
ApplicationProvider.getApplicationContext());
mNestedScrollingImpl = new NestedScrollingImpl(ApplicationProvider.getApplicationContext());
}
@Test
public void dispatchNestedScroll_childIsV1ParentIsV3_returnsTrue() {
measureAndLayout(mNestedScrollingImpl, mNestedScrollingImpl3);
mNestedScrollingImpl.setNestedScrollingEnabled(true);
mNestedScrollingImpl.startNestedScroll(ViewCompat.SCROLL_AXIS_VERTICAL);
boolean retValue = mNestedScrollingImpl.dispatchNestedScroll(0,
0, 0, 100, null);
assertThat(retValue, equalTo(true));
}
@Test
public void dispatchNestedScroll_childIsV3ParentIsV1_fullScrollDistancesAddedToConsumed() {
measureAndLayout(mNestedScrollingImpl3, mNestedScrollingImpl);
mNestedScrollingImpl3.setNestedScrollingEnabled(true);
mNestedScrollingImpl3.startNestedScroll(ViewCompat.SCROLL_AXIS_VERTICAL,
ViewCompat.TYPE_TOUCH);
int[] consumed = new int[]{11, 12};
mNestedScrollingImpl3.dispatchNestedScroll(0,
0, 0, 100, null,
ViewCompat.TYPE_TOUCH, consumed);
assertThat(consumed, equalTo(new int[]{11, 112}));
}
@Test
public void dispatchNestedScroll_childIsV2ParentIsV3_returnsTrue() {
measureAndLayout(mNestedScrollingImpl2, mNestedScrollingImpl3);
mNestedScrollingImpl2.setNestedScrollingEnabled(true);
mNestedScrollingImpl2.startNestedScroll(ViewCompat.SCROLL_AXIS_VERTICAL,
ViewCompat.TYPE_TOUCH);
boolean retValue = mNestedScrollingImpl2.dispatchNestedScroll(0,
0, 0, 100, null,
ViewCompat.TYPE_TOUCH);
assertThat(retValue, equalTo(true));
}
@Test
public void dispatchNestedScroll_childIsV3ParentIsV2_fullScrollDistancesAddedToConsumed() {
measureAndLayout(mNestedScrollingImpl3, mNestedScrollingImpl2);
mNestedScrollingImpl3.setNestedScrollingEnabled(true);
mNestedScrollingImpl3.startNestedScroll(ViewCompat.SCROLL_AXIS_VERTICAL,
ViewCompat.TYPE_TOUCH);
int[] consumed = new int[]{11, 12};
mNestedScrollingImpl3.dispatchNestedScroll(
0, 0,
0, 100,
null, ViewCompat.TYPE_TOUCH, consumed);
assertThat(consumed, equalTo(new int[]{11, 112}));
}
private void measureAndLayout(final View child, final ViewGroup parent) {
parent.addView(child);
int measureSpec = View.MeasureSpec.makeMeasureSpec(500, View.MeasureSpec.EXACTLY);
parent.measure(measureSpec, measureSpec);
parent.layout(0, 0, 500, 500);
}
public static class NestedScrollingImpl extends FrameLayout implements NestedScrollingParent,
NestedScrollingChild {
public NestedScrollingChildHelper mNestedScrollingChildHelper;
public NestedScrollingParentHelper mNestedScrollingParentHelper;
public NestedScrollingImpl(@NonNull Context context) {
super(context);
mNestedScrollingChildHelper = new NestedScrollingChildHelper(this);
mNestedScrollingParentHelper = new NestedScrollingParentHelper(this);
}
@Override
public boolean onStartNestedScroll(@NonNull View child, @NonNull View target, int axes) {
return true;
}
@Override
public void onNestedScrollAccepted(@NonNull View child, @NonNull View target, int axes) {
mNestedScrollingParentHelper.onNestedScrollAccepted(child, target, axes);
}
@Override
public void onStopNestedScroll(@NonNull View target) {
mNestedScrollingParentHelper.onStopNestedScroll(target);
}
@Override
public void onNestedScroll(@NonNull View target, int dxConsumed, int dyConsumed,
int dxUnconsumed, int dyUnconsumed) {
dispatchNestedScroll(dxConsumed, dyConsumed, dxUnconsumed, dyUnconsumed,
null);
}
@Override
public void onNestedPreScroll(@NonNull View target, int dx, int dy,
@NonNull int[] consumed) {
dispatchNestedPreScroll(dx, dy, consumed, null);
}
@Override
public boolean onNestedFling(@NonNull View target, float velocityX, float velocityY,
boolean consumed) {
return false;
}
@Override
public boolean onNestedPreFling(@NonNull View target, float velocityX, float velocityY) {
return false;
}
@Override
public int getNestedScrollAxes() {
return mNestedScrollingParentHelper.getNestedScrollAxes();
}
@Override
public void setNestedScrollingEnabled(boolean enabled) {
mNestedScrollingChildHelper.setNestedScrollingEnabled(enabled);
}
@Override
public boolean isNestedScrollingEnabled() {
return mNestedScrollingChildHelper.isNestedScrollingEnabled();
}
@Override
public boolean startNestedScroll(int axes) {
return mNestedScrollingChildHelper.startNestedScroll(axes);
}
@Override
public void stopNestedScroll() {
mNestedScrollingChildHelper.stopNestedScroll();
}
@Override
public boolean hasNestedScrollingParent() {
return mNestedScrollingChildHelper.hasNestedScrollingParent();
}
@Override
public boolean dispatchNestedScroll(int dxConsumed, int dyConsumed, int dxUnconsumed,
int dyUnconsumed, @Nullable int[] offsetInWindow) {
return mNestedScrollingChildHelper.dispatchNestedScroll(dxConsumed, dyConsumed,
dxUnconsumed, dyUnconsumed, offsetInWindow);
}
@Override
public boolean dispatchNestedPreScroll(int dx, int dy, @Nullable int[] consumed,
@Nullable int[] offsetInWindow) {
return mNestedScrollingChildHelper.dispatchNestedPreScroll(dx, dy, consumed,
offsetInWindow);
}
@Override
public boolean dispatchNestedFling(float velocityX, float velocityY, boolean consumed) {
return mNestedScrollingChildHelper.dispatchNestedFling(velocityX, velocityY, consumed);
}
@Override
public boolean dispatchNestedPreFling(float velocityX, float velocityY) {
return mNestedScrollingChildHelper.dispatchNestedPreFling(velocityX, velocityY);
}
}
public static class NestedScrollingImpl2 extends NestedScrollingImpl
implements NestedScrollingParent2, NestedScrollingChild2 {
public NestedScrollingImpl2(Context context) {
super(context);
}
@Override
public boolean onStartNestedScroll(@NonNull View child, @NonNull View target, int axes,
int type) {
return true;
}
@Override
public void onNestedScrollAccepted(@NonNull View child, @NonNull View target, int axes,
int type) {
mNestedScrollingParentHelper.onNestedScrollAccepted(child, target, axes, type);
}
@Override
public void onStopNestedScroll(@NonNull View target, int type) {
mNestedScrollingParentHelper.onStopNestedScroll(target, type);
}
@Override
public void onNestedScroll(@NonNull View target, int dxConsumed, int dyConsumed,
int dxUnconsumed, int dyUnconsumed, int type) {
dispatchNestedScroll(dxConsumed, dyConsumed, dxUnconsumed, dyUnconsumed,
null, type);
}
@Override
public void onNestedPreScroll(@NonNull View target, int dx, int dy, @NonNull int[] consumed,
int type) {
dispatchNestedPreScroll(dx, dy, consumed, null);
}
@Override
public boolean startNestedScroll(int axes, int type) {
return mNestedScrollingChildHelper.startNestedScroll(axes, type);
}
@Override
public void stopNestedScroll(int type) {
mNestedScrollingChildHelper.stopNestedScroll(type);
}
@Override
public boolean hasNestedScrollingParent(int type) {
return mNestedScrollingChildHelper.hasNestedScrollingParent(type);
}
@Override
public boolean dispatchNestedScroll(int dxConsumed, int dyConsumed, int dxUnconsumed,
int dyUnconsumed, @Nullable int[] offsetInWindow, int type) {
return mNestedScrollingChildHelper.dispatchNestedScroll(dxConsumed, dyConsumed,
dxUnconsumed, dyUnconsumed, offsetInWindow, type);
}
@Override
public boolean dispatchNestedPreScroll(int dx, int dy, @Nullable int[] consumed,
@Nullable int[] offsetInWindow, int type) {
return mNestedScrollingChildHelper.dispatchNestedPreScroll(dx, dy, consumed,
offsetInWindow, type);
}
}
public static class NestedScrollingImpl3 extends NestedScrollingImpl2
implements NestedScrollingParent3, NestedScrollingChild3 {
public NestedScrollingImpl3(Context context) {
super(context);
}
@Override
public void onNestedScroll(@NonNull View target, int dxConsumed, int dyConsumed,
int dxUnconsumed, int dyUnconsumed, int type, @NonNull int[] consumed) {
dispatchNestedScroll(dxConsumed, dyConsumed, dxUnconsumed,
dyUnconsumed, null, type, consumed);
}
@Override
public void dispatchNestedScroll(int dxConsumed, int dyConsumed, int dxUnconsumed,
int dyUnconsumed, @Nullable int[] offsetInWindow, int type,
@NonNull int[] consumed) {
mNestedScrollingChildHelper.dispatchNestedScroll(dxConsumed, dyConsumed,
dxUnconsumed, dyUnconsumed, offsetInWindow, type, consumed);
}
}
}
| |
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.factory.fiducial;
import boofcv.alg.fiducial.qrcode.PackedBits32;
import boofcv.misc.BoofMiscOps;
import boofcv.struct.Configuration;
import org.ddogleg.struct.FastArray;
import java.io.*;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* Defines the dictionary and how they are encoded in a Hamming distance marker.
*
* <p>Values for each pre-defined dictionary comes from ArUco marker 3 source code. [1]</p>
*
* <p>[1] <a href="boofcv.alg.fiducial.square.DetectFiducialSquareHamming">ArUco 3</a></p>
*
* @author Peter Abeles
* @see boofcv.alg.fiducial.square.DetectFiducialSquareHamming
*/
@SuppressWarnings({"NullAway.Init"})
public class ConfigHammingMarker implements Configuration {
/** How wide the border is relative to the total fiducial width. Typically, the width of one square. */
public double borderWidthFraction = 0.25;
/** Number of cells along each side in the binary grid */
public int gridWidth = -1;
/** The minimum hamming distance separating two markers */
public int minimumHamming;
/** How each marker is encoded */
public FastArray<Marker> encoding = new FastArray<>(Marker.class);
/** Which dictionary is this based off of. Typically, this will be pre-defined. */
public HammingDictionary dictionary;
/** Length of a targets size in world units. */
public double targetWidth = 1;
public ConfigHammingMarker() {}
@Override public void checkValidity() {
BoofMiscOps.checkTrue(borderWidthFraction > 0.0);
BoofMiscOps.checkTrue(gridWidth > 0);
BoofMiscOps.checkTrue(minimumHamming >= 0);
BoofMiscOps.checkTrue(targetWidth > 0);
for (int i = 0; i < encoding.size(); i++) {
encoding.get(i).checkValidity();
}
}
@Override public void serializeInitialize() {
// If it's custom then the dictionary was encoded
if (dictionary == HammingDictionary.CUSTOM)
return;
// Otherwise, we need to load a pre-defined dictionary
this.encoding = loadDictionary(dictionary).encoding;
}
@Override public List<String> serializeActiveFields() {
List<String> active = new ArrayList<>();
active.add("borderWidthFraction");
active.add("gridWidth");
active.add("minimumHamming");
active.add("dictionary");
if (dictionary == HammingDictionary.CUSTOM)
active.add("encoding");
return active;
}
public ConfigHammingMarker setTo( ConfigHammingMarker src ) {
this.borderWidthFraction = src.borderWidthFraction;
this.gridWidth = src.gridWidth;
this.minimumHamming = src.minimumHamming;
this.dictionary = src.dictionary;
this.targetWidth = src.targetWidth;
this.encoding.clear();
encoding.addAll(src.encoding);
return this;
}
public int bitsPerGrid() {
return gridWidth*gridWidth;
}
/**
* Adds a new marker with the specified encoding number
*/
public void addMarker( long encoding ) {
var m = new Marker();
this.encoding.add(m);
m.pattern.resize(gridWidth*gridWidth);
for (int bit = 0; bit < m.pattern.size; bit++) {
m.pattern.set(bit, (int)((encoding >> bit) & 1L));
}
}
/**
* Defines a marker
*/
public static class Marker {
/** Expected binary bit pattern */
public final PackedBits32 pattern = new PackedBits32();
public void checkValidity() {
BoofMiscOps.checkTrue(pattern.size > 0);
}
public void setTo( Marker src ) {
this.pattern.setTo(src.pattern);
}
}
/**
* Decodes a string that defined a dictionary in standard format
*/
public static ConfigHammingMarker decodeDictionaryString( String text ) {
var config = new ConfigHammingMarker();
String[] lines = text.split("\n");
for (int i = 0; i < lines.length; i++) {
String line = lines[i];
if (line.isEmpty() || line.charAt(0) == '#')
continue;
String[] words = line.split("=");
if (words.length != 2)
throw new RuntimeException("Expected 2 words on line " + i);
switch (words[0]) {
case "grid_width" -> config.gridWidth = Integer.parseInt(words[1]);
case "minimum_hamming" -> config.minimumHamming = Integer.parseInt(words[1]);
case "dictionary" -> {
String[] ids = words[1].split(",");
for (int idIdx = 0; idIdx < ids.length; idIdx++) {
if (ids[idIdx].startsWith("0x"))
config.addMarker(Long.parseUnsignedLong(ids[idIdx].substring(2), 16));
else
config.addMarker(Long.parseUnsignedLong(ids[idIdx]));
}
}
default -> throw new RuntimeException("Unknown key='" + words[0] + "'");
}
}
// Border will be one square wide
config.borderWidthFraction = 1.0/(config.gridWidth + 2.0);
return config;
}
/**
* Loads a predefined dictionary stored in the the resources
*/
public static ConfigHammingMarker loadPredefined( String name ) {
URL path = Objects.requireNonNull(ConfigHammingMarker.class.getResource(name + ".txt"));
try (InputStream stream = path.openStream()) {
String text = new BufferedReader(new InputStreamReader(stream, StandardCharsets.UTF_8))
.lines().collect(Collectors.joining("\n"));
return decodeDictionaryString(text);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
/**
* Creates a predefined dictionary
*
* @param dictionary Which dictionary it should create
* @return The specified dictionary
*/
public static ConfigHammingMarker loadDictionary( HammingDictionary dictionary ) {
ConfigHammingMarker config = switch (dictionary) {
case CUSTOM -> throw new IllegalArgumentException("Need to manually specify a custom dictionary");
case ARUCO_ORIGINAL -> loadPredefined("aruco_original");
case ARUCO_MIP_16h3 -> loadPredefined("aruco_mip_16h3");
case ARUCO_MIP_25h7 -> loadPredefined("aruco_mip_25h7");
case ARUCO_MIP_36h12 -> loadPredefined("aruco_mip_36h12");
case ARUCO_OCV_4x4_1000 -> loadPredefined("aruco_ocv_4x4_1000");
case ARUCO_OCV_5x5_1000 -> loadPredefined("aruco_ocv_5x5_1000");
case ARUCO_OCV_6x6_1000 -> loadPredefined("aruco_ocv_6x6_1000");
case ARUCO_OCV_7x7_1000 -> loadPredefined("aruco_ocv_7x7_1000");
case APRILTAG_16h5 -> loadPredefined("apriltag_16h5");
case APRILTAG_25h7 -> loadPredefined("apriltag_25h7");
case APRILTAG_25h9 -> loadPredefined("apriltag_25h9");
case APRILTAG_36h10 -> loadPredefined("apriltag_36h10");
case APRILTAG_36h11 -> loadPredefined("apriltag_36h11");
};
config.dictionary = dictionary;
return config;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.vcs.changes.ui;
import com.intellij.diff.chains.DiffRequestChain;
import com.intellij.diff.util.DiffUserDataKeysEx;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DeleteProvider;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.actionSystem.ex.CustomComponentAction;
import com.intellij.openapi.actionSystem.ex.ThreeStateCheckboxAction;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diff.DiffBundle;
import com.intellij.openapi.fileChooser.actions.VirtualFileDeleteProvider;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.VcsDataKeys;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.actions.RollbackDialogAction;
import com.intellij.openapi.vcs.changes.actions.diff.UnversionedDiffRequestProducer;
import com.intellij.openapi.vcs.changes.actions.diff.lst.LocalChangeListDiffTool;
import com.intellij.openapi.vcs.impl.LineStatusTrackerManager;
import com.intellij.ui.CollectionComboBoxModel;
import com.intellij.ui.ColoredListCellRenderer;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.ThreeStateCheckBox.State;
import com.intellij.util.ui.update.DisposableUpdate;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.vcs.commit.PartialCommitChangeNodeDecorator;
import com.intellij.vcs.commit.PartialCommitInclusionModel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.DefaultTreeModel;
import java.awt.*;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.function.Supplier;
import static com.intellij.openapi.vcs.changes.ui.ChangesListView.EXACTLY_SELECTED_FILES_DATA_KEY;
import static com.intellij.openapi.vcs.changes.ui.ChangesListView.UNVERSIONED_FILE_PATHS_DATA_KEY;
import static com.intellij.util.ui.update.MergingUpdateQueue.ANY_COMPONENT;
class MultipleLocalChangeListsBrowser extends CommitDialogChangesBrowser implements Disposable {
@NotNull private final MergingUpdateQueue myUpdateQueue =
new MergingUpdateQueue("MultipleLocalChangeListsBrowser", 300, true, ANY_COMPONENT, this);
private final boolean myEnableUnversioned;
private final boolean myEnablePartialCommit;
@Nullable private Supplier<? extends JComponent> myBottomDiffComponent;
@NotNull private final ChangeListChooser myChangeListChooser;
@NotNull private final DeleteProvider myDeleteProvider = new VirtualFileDeleteProvider();
@NotNull private final PartialCommitInclusionModel myInclusionModel;
@NotNull private LocalChangeList myChangeList;
private final List<Change> myChanges = new ArrayList<>();
private final List<FilePath> myUnversioned = new ArrayList<>();
private boolean myHasHiddenUnversioned;
@Nullable private Runnable mySelectedListChangeListener;
private final RollbackDialogAction myRollbackDialogAction;
MultipleLocalChangeListsBrowser(@NotNull Project project,
boolean showCheckboxes,
boolean highlightProblems,
boolean enableUnversioned,
boolean enablePartialCommit) {
super(project, showCheckboxes, highlightProblems);
myEnableUnversioned = enableUnversioned;
myEnablePartialCommit = enablePartialCommit;
ChangeListManager changeListManager = ChangeListManager.getInstance(project);
myChangeList = changeListManager.getDefaultChangeList();
myChangeListChooser = new ChangeListChooser();
myRollbackDialogAction = new RollbackDialogAction();
myRollbackDialogAction.registerCustomShortcutSet(this, null);
if (!changeListManager.areChangeListsEnabled()) {
myChangeListChooser.setVisible(false);
}
else if (Registry.is("vcs.skip.single.default.changelist")) {
List<LocalChangeList> allChangeLists = changeListManager.getChangeLists();
if (allChangeLists.size() == 1 && allChangeLists.get(0).isBlank()) {
myChangeListChooser.setVisible(false);
}
}
myInclusionModel = new PartialCommitInclusionModel(myProject);
Disposer.register(this, myInclusionModel);
getViewer().setInclusionModel(myInclusionModel);
changeListManager.addChangeListListener(new MyChangeListListener(), this);
init();
updateDisplayedChangeLists();
updateSelectedChangeList(myChangeList);
project.getMessageBus().connect(this)
.subscribe(VcsManagedFilesHolder.TOPIC, () -> {
ApplicationManager.getApplication().invokeLater(() -> {
myViewer.repaint();
});
});
}
@Nullable
@Override
protected JComponent createHeaderPanel() {
return JBUI.Panels.simplePanel(myChangeListChooser)
.withBorder(JBUI.Borders.emptyLeft(6));
}
@NotNull
@Override
protected List<AnAction> createToolbarActions() {
AnAction rollbackGroup = createRollbackGroup(true);
return ContainerUtil.append(
super.createToolbarActions(),
rollbackGroup,
ActionManager.getInstance().getAction("ChangesView.Refresh"),
ActionManager.getInstance().getAction("Vcs.CheckinProjectToolbar")
);
}
private AnAction createRollbackGroup(boolean popup) {
List<? extends AnAction> rollbackActions = createAdditionalRollbackActions();
if (rollbackActions.isEmpty()) {
return myRollbackDialogAction;
}
DefaultActionGroup group = new DefaultActionGroup(myRollbackDialogAction);
group.addAll(rollbackActions);
ActionUtil.copyFrom(group, IdeActions.CHANGES_VIEW_ROLLBACK);
group.setPopup(popup);
return group;
}
protected List<? extends AnAction> createAdditionalRollbackActions() {
return Collections.emptyList();
}
@NotNull
@Override
protected List<AnAction> createPopupMenuActions() {
List<AnAction> result = new ArrayList<>(super.createPopupMenuActions());
result.add(ActionManager.getInstance().getAction("ChangesView.Refresh"));
if (myEnableUnversioned) {
result.add(new ShowHideUnversionedFilesAction());
result.add(UnversionedViewDialog.registerUnversionedPopupGroup(myViewer));
}
else {
// avoid duplicated actions on toolbar
result.add(ActionManager.getInstance().getAction(IdeActions.MOVE_TO_ANOTHER_CHANGE_LIST));
}
EmptyAction.registerWithShortcutSet(IdeActions.MOVE_TO_ANOTHER_CHANGE_LIST, CommonShortcuts.getMove(), myViewer);
result.add(createRollbackGroup(false));
EditSourceForDialogAction editSourceAction = new EditSourceForDialogAction(this);
editSourceAction.registerCustomShortcutSet(CommonShortcuts.getEditSource(), this);
result.add(editSourceAction);
result.add(ActionManager.getInstance().getAction("Vcs.CheckinProjectMenu"));
return result;
}
@NotNull
@Override
protected List<AnAction> createDiffActions() {
return ContainerUtil.append(
super.createDiffActions(),
new ToggleChangeDiffAction()
);
}
@Override
protected void updateDiffContext(@NotNull DiffRequestChain chain) {
super.updateDiffContext(chain);
if (myBottomDiffComponent != null) {
chain.putUserData(DiffUserDataKeysEx.BOTTOM_PANEL, myBottomDiffComponent.get());
}
chain.putUserData(LocalChangeListDiffTool.ALLOW_EXCLUDE_FROM_COMMIT, myEnablePartialCommit);
chain.putUserData(DiffUserDataKeysEx.LAST_REVISION_WITH_LOCAL, true);
}
public void setBottomDiffComponent(@Nullable Supplier<? extends JComponent> value) {
myBottomDiffComponent = value;
}
public void setSelectedListChangeListener(@Nullable Runnable runnable) {
mySelectedListChangeListener = runnable;
}
private boolean isShowUnversioned() {
return myEnableUnversioned && VcsConfiguration.getInstance(myProject).SHOW_UNVERSIONED_FILES_WHILE_COMMIT;
}
private void setShowUnversioned(boolean value) {
VcsConfiguration.getInstance(myProject).SHOW_UNVERSIONED_FILES_WHILE_COMMIT = value;
updateDisplayedChanges();
}
@NotNull
@Override
public LocalChangeList getSelectedChangeList() {
return myChangeList;
}
public void setSelectedChangeList(@NotNull LocalChangeList list) {
myChangeListChooser.setSelectedChangeList(list);
}
private void updateSelectedChangeList(@NotNull LocalChangeList list) {
boolean isListChanged = !myChangeList.getId().equals(list.getId());
if (isListChanged) {
LineStatusTrackerManager.getInstanceImpl(myProject).resetExcludedFromCommitMarkers();
}
myChangeList = list;
myChangeListChooser.setToolTipText(list.getName());
updateDisplayedChanges();
if (isListChanged && mySelectedListChangeListener != null) mySelectedListChangeListener.run();
myInclusionModel.setChangeLists(List.of(myChangeList));
}
@Override
public void updateDisplayedChangeLists() {
List<LocalChangeList> changeLists = ChangeListManager.getInstance(myProject).getChangeLists();
myChangeListChooser.setAvailableLists(changeLists);
}
public void updateDisplayedChanges() {
myChanges.clear();
myUnversioned.clear();
myHasHiddenUnversioned = false;
myChanges.addAll(myChangeList.getChanges());
if (myEnableUnversioned) {
List<FilePath> unversioned = ChangeListManager.getInstance(myProject).getUnversionedFilesPaths();
if (isShowUnversioned()) {
myUnversioned.addAll(unversioned);
}
if (!isShowUnversioned() && !unversioned.isEmpty()) {
myHasHiddenUnversioned = true;
}
}
myViewer.rebuildTree();
}
@NotNull
@Override
protected DefaultTreeModel buildTreeModel() {
PartialCommitChangeNodeDecorator decorator =
new PartialCommitChangeNodeDecorator(myProject, RemoteRevisionsCache.getInstance(myProject).getChangesNodeDecorator());
TreeModelBuilder builder = new TreeModelBuilder(myProject, getGrouping());
builder.setChanges(myChanges, decorator);
builder.setUnversioned(myUnversioned);
if (myHasHiddenUnversioned) {
myViewer.getEmptyText()
.setText(VcsBundle.message("status.text.unversioned.files.available"))
.appendText(VcsBundle.message("plugins.configurable.show"), SimpleTextAttributes.LINK_ATTRIBUTES, e -> setShowUnversioned(true));
}
else {
myViewer.getEmptyText()
.setText(DiffBundle.message("diff.count.differences.status.text", 0));
}
return builder.build();
}
@Nullable
@Override
protected ChangeDiffRequestChain.Producer getDiffRequestProducer(@NotNull Object entry) {
if (entry instanceof FilePath) {
return UnversionedDiffRequestProducer.create(myProject, (FilePath)entry);
}
return super.getDiffRequestProducer(entry);
}
@Nullable
@Override
public Object getData(@NotNull String dataId) {
if (UNVERSIONED_FILE_PATHS_DATA_KEY.is(dataId)) {
return ChangesListView.getSelectedUnversionedFiles(myViewer);
}
else if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
return myDeleteProvider;
}
else if (VcsDataKeys.CHANGE_LISTS.is(dataId)) {
return new ChangeList[]{myChangeList};
}
else if (EXACTLY_SELECTED_FILES_DATA_KEY.is(dataId)) {
return ChangesListView.getExactlySelectedVirtualFiles(myViewer);
}
return super.getData(dataId);
}
@NotNull
@Override
public List<Change> getDisplayedChanges() {
return VcsTreeModelData.all(myViewer).userObjects(Change.class);
}
@NotNull
@Override
public List<Change> getSelectedChanges() {
return VcsTreeModelData.selected(myViewer).userObjects(Change.class);
}
@NotNull
@Override
public List<Change> getIncludedChanges() {
return VcsTreeModelData.included(myViewer).userObjects(Change.class);
}
@NotNull
@Override
public List<FilePath> getDisplayedUnversionedFiles() {
if (!isShowUnversioned()) return Collections.emptyList();
VcsTreeModelData treeModelData = VcsTreeModelData.allUnderTag(myViewer, ChangesBrowserNode.UNVERSIONED_FILES_TAG);
if (containsCollapsedUnversionedNode(treeModelData)) {
return List.copyOf(myUnversioned);
}
return treeModelData.userObjects(FilePath.class);
}
@NotNull
@Override
public List<FilePath> getSelectedUnversionedFiles() {
if (!isShowUnversioned()) return Collections.emptyList();
VcsTreeModelData treeModelData = VcsTreeModelData.selectedUnderTag(myViewer, ChangesBrowserNode.UNVERSIONED_FILES_TAG);
if (containsCollapsedUnversionedNode(treeModelData)) {
return List.copyOf(myUnversioned);
}
return treeModelData.userObjects(FilePath.class);
}
@NotNull
@Override
public List<FilePath> getIncludedUnversionedFiles() {
if (!isShowUnversioned()) return Collections.emptyList();
VcsTreeModelData treeModelData = VcsTreeModelData.includedUnderTag(myViewer, ChangesBrowserNode.UNVERSIONED_FILES_TAG);
if (containsCollapsedUnversionedNode(treeModelData)) {
return List.copyOf(myUnversioned);
}
return treeModelData.userObjects(FilePath.class);
}
private static boolean containsCollapsedUnversionedNode(@NotNull VcsTreeModelData treeModelData) {
Optional<ChangesBrowserNode<?>> node = treeModelData.nodesStream()
.filter(it -> it instanceof ChangesBrowserUnversionedFilesNode).findAny();
if (node.isEmpty()) return false;
ChangesBrowserUnversionedFilesNode unversionedFilesNode = (ChangesBrowserUnversionedFilesNode)node.get();
return unversionedFilesNode.isManyFiles();
}
private class ChangeListChooser extends JPanel {
private final static int MAX_NAME_LEN = 35;
@NotNull private final ComboBox<LocalChangeList> myChooser = new ComboBox<>();
ChangeListChooser() {
myChooser.setEditable(false);
myChooser.setRenderer(new ColoredListCellRenderer<>() {
@Override
protected void customizeCellRenderer(@NotNull JList<? extends LocalChangeList> list, LocalChangeList value,
int index, boolean selected, boolean hasFocus) {
String name = StringUtil.shortenTextWithEllipsis(value.getName().trim(), MAX_NAME_LEN, 0);
append(name, value.isDefault() ? SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES : SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
});
myChooser.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.SELECTED) {
LocalChangeList changeList = (LocalChangeList)myChooser.getSelectedItem();
if (changeList != null) {
updateSelectedChangeList(changeList);
}
}
}
});
setLayout(new BorderLayout(4, 2));
JLabel label = new JLabel(VcsBundle.message("commit.dialog.changelist.label"));
label.setLabelFor(myChooser);
add(label, BorderLayout.WEST);
add(myChooser, BorderLayout.CENTER);
}
public void setAvailableLists(@NotNull List<LocalChangeList> lists) {
LocalChangeList currentList = ContainerUtil.find(lists, getSelectedChangeList());
if (currentList == null) currentList = lists.get(0);
myChooser.setModel(new CollectionComboBoxModel<>(lists, currentList));
myChooser.setEnabled(lists.size() > 1);
updateSelectedChangeList(currentList);
}
public void setSelectedChangeList(@NotNull LocalChangeList list) {
ComboBoxModel<LocalChangeList> model = myChooser.getModel();
for (int i = 0; i < model.getSize(); i++) {
LocalChangeList element = model.getElementAt(i);
if (element.getName().equals(list.getName())) {
myChooser.setSelectedIndex(i);
updateSelectedChangeList(element);
return;
}
}
}
}
private final class ShowHideUnversionedFilesAction extends ToggleAction implements DumbAware {
private ShowHideUnversionedFilesAction() {
super(VcsBundle.messagePointer("action.ToggleAction.text.show.unversioned.files"), Presentation.NULL_STRING,
AllIcons.Vcs.ShowUnversionedFiles);
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return isShowUnversioned();
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
setShowUnversioned(state);
}
}
private class ToggleChangeDiffAction extends ThreeStateCheckboxAction implements CustomComponentAction, DumbAware {
ToggleChangeDiffAction() {
super(VcsBundle.messagePointer("commit.dialog.include.action.name"));
}
@NotNull
@Override
public State isSelected(AnActionEvent e) {
Object object = getUserObject(e);
if (object == null) return State.NOT_SELECTED;
return myInclusionModel.getInclusionState(object);
}
@Override
public void setSelected(AnActionEvent e, @NotNull State state) {
Object object = getUserObject(e);
if (object == null) return;
if (state != State.NOT_SELECTED) {
myViewer.includeChange(object);
}
else {
myViewer.excludeChange(object);
}
}
@Nullable
private Object getUserObject(@NotNull AnActionEvent e) {
Object object = e.getData(VcsDataKeys.CURRENT_CHANGE);
if (object == null) object = e.getData(VcsDataKeys.CURRENT_UNVERSIONED);
return object;
}
}
private class MyChangeListListener extends ChangeListAdapter {
@Override
public void changeListsChanged() {
myUpdateQueue.queue(DisposableUpdate.createDisposable(myUpdateQueue, "updateChangeLists", () -> {
updateDisplayedChangeLists();
}));
}
}
}
| |
package bridge1;
import java.awt.*;
import javax.swing.*;
import java.awt.event.*;
public class GeoVolumeGUI extends JPanel{
private JScrollPane upperLeftPane, upperRightPane;
private JTextArea txtVolumeDisplay;
private JSplitPane bigSplitPane, upSplitPane;
private JPanel downPanel, upperLeftPanel, geoDataPanel;
private JComboBox cmbGeoForm;
private JComboBox cmbMeasure;
private JTextField txtCylinderRadius_a;
private JTextField txtCylinderRadius_b;
private JTextField txtCylinderHeight;
//
private JTextField txtEllipsoidRadius_a;
private JTextField txtEllipsoidRadius_b;
private JTextField txtEllipsoidRadius_c;
private JTextField txtEllipsoidHeight;
private JTextField txtCubeLength;
private JTextField txtCubeWidth;
private JTextField txtCubeHeight;
//
private ButtonHandler btnHandler;
static final Dimension minimumSize = new Dimension(230, 200);
public static final String COMPUTE= "Compute Volume";
public static final String EXIT = "Exit";
public static final String CUBE= "Cube ";
public static final String CYLINDER = "Cylinder";
//
public static final String ELLIPSOID = "Ellipsoid";
public static final String BLANK = "Choose geometric form";
public static final String FEET = "Feet";
public static final String METERS = "Meters";
public GeoVolumeGUI(){
super(new GridLayout(1,0));
txtVolumeDisplay=new JTextArea(6, 30);
txtVolumeDisplay.setFont(new Font("Arial", Font.BOLD, 12));
txtVolumeDisplay.setBackground(Color.pink);
txtVolumeDisplay.setText(" Volume Information will be shown here");
btnHandler = new ButtonHandler();
setupLowerPanel();
setupUpperLeftPanel();
buildUpScrollGUI();
}
private void setupLowerPanel(){
downPanel = new JPanel();
downPanel.setBackground(Color.gray);
JButton btnSubmit = new JButton(GeoVolumeGUI.COMPUTE);
btnSubmit.setMnemonic(KeyEvent.VK_G);
JButton btnExit = new JButton(GeoVolumeGUI.EXIT);
btnExit.setMnemonic(KeyEvent.VK_X);
btnSubmit.addActionListener(btnHandler);
btnExit.addActionListener(btnHandler);
downPanel.add(btnSubmit);
downPanel.add(btnExit);
}
private void setupUpperLeftPanel(){
cmbGeoForm = new JComboBox();
cmbGeoForm.addItem(BLANK);
cmbGeoForm.addItem(CUBE);
cmbGeoForm.addItem(CYLINDER);
//
cmbGeoForm.addItem(ELLIPSOID);
cmbGeoForm.addActionListener(btnHandler);
//For layout purposes, put the buttons in a separate panel
upperLeftPanel = new JPanel();
// geoDataPanel is used for loading another panel dynamically
geoDataPanel = new JPanel();
geoDataPanel.setPreferredSize(new Dimension(250, 180));
GridBagLayout gridbag = new GridBagLayout();
upperLeftPanel.setLayout(gridbag);
GridBagConstraints gbc = new GridBagConstraints();
upperLeftPanel.add(cmbGeoForm);
upperLeftPanel.add(geoDataPanel);
gbc.insets.top = 5;
gbc.insets.bottom = 5;
gbc.insets.left = 5;
gbc.insets.left = 1;
gbc.insets.right = 5;
gbc.anchor = GridBagConstraints.WEST;
gbc.gridx = 0;
gbc.gridy = 0;
gridbag.setConstraints(cmbGeoForm, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
gridbag.setConstraints(geoDataPanel, gbc);
}
public void displayVolume(String str){
txtVolumeDisplay.setText(str);
}
public String getGeoType(){
return (String) cmbGeoForm.getSelectedItem();
}
public String getMeasureChoice(){
return (String) cmbMeasure.getSelectedItem();
}
public JComboBox getGeoShapeCombox(){
return cmbGeoForm;
}
public void displayNewGUI(JPanel panel){
geoDataPanel.removeAll();
geoDataPanel.add(panel);
geoDataPanel.validate();
validate();
}
private void buildUpScrollGUI(){
upperLeftPane = new JScrollPane(upperLeftPanel);
upperLeftPane.setMinimumSize(minimumSize);
upperRightPane = new JScrollPane(txtVolumeDisplay);
upSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
upSplitPane.setDividerLocation(380);
upSplitPane.setPreferredSize(new Dimension(600, 290));
upSplitPane.setLeftComponent(upperLeftPane);
upSplitPane.setRightComponent(upperRightPane);
bigSplitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, upSplitPane, downPanel);
bigSplitPane.setDividerLocation(275);
add(bigSplitPane);
setSize(new Dimension(600, 300));
setVisible(true);
}
private static void createAndShowGUI(){
JFrame.setDefaultLookAndFeelDecorated(true);
JFrame frame = new JFrame("Bridge Pattern- Computation of Geo Form");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
GeoVolumeGUI newContentPane = new GeoVolumeGUI();
newContentPane.setOpaque(true);
frame.setContentPane(newContentPane);
frame.pack();
frame.setVisible(true);
}
static public void main(String argv[]){
javax.swing.SwingUtilities.invokeLater(new Runnable(){
public void run() {
createAndShowGUI();
} });
}
class ButtonHandler implements ActionListener{
String selection = null;
String measure = null;
public void actionPerformed(ActionEvent e) {
selection = getGeoType();
Measure m=null;
double v=0.0d;
GeoForm form = null;
String suffix=null;
if (e.getActionCommand().equals(EXIT)) {
System.exit(1);
}
if (e.getActionCommand().equals(COMPUTE)) {
if(selection.equals(CUBE) ){
String len = txtCubeLength.getText();
String width = txtCubeWidth.getText();
String height = txtCubeHeight.getText();
double l = Double.valueOf(len);
double w =Double.valueOf(width);
double h = Double.valueOf(height);
form = new Cube(l, w, h);
}
else if(selection.equals(CYLINDER) ){
String a_radius = txtCylinderRadius_a.getText();
String b_radius = txtCylinderRadius_b.getText();
String height = txtCylinderHeight.getText();
double a = Double.valueOf(a_radius);
double b =Double.valueOf(b_radius);
double h = Double.valueOf(height);
form = new EllipseCylinder(a, b, h);
}
//
else if(selection.equals(ELLIPSOID) ){
String a_radius = txtEllipsoidRadius_a.getText();
String b_radius = txtEllipsoidRadius_b.getText();
//
String c_radius = txtEllipsoidRadius_c.getText();
String height = txtEllipsoidHeight.getText();
double a = Double.valueOf(a_radius);
double b =Double.valueOf(b_radius);
//
double c =Double.valueOf(c_radius);
double h = Double.valueOf(height);
//
form = new Ellipsoid(a, b, c, h);
}
measure = getMeasureChoice();
if(measure.equals(FEET)){
m = new FootMeasure (form);
suffix = "Cubic " + FEET;
}
else if(measure.equals(METERS)){
m = new MeterMeasure (form);
suffix = "Cubic " + METERS;
}
v = m.findVolume();
String description = m.convert();
displayVolume("Volume = " + v +" " + suffix +" \n"+description);
}
if (e.getSource() == getGeoShapeCombox()) {
selection = getGeoType();
if(selection.equals(CUBE) )
displayNewGUI( getTypePanel(CUBE));
else if(selection.equals(CYLINDER) )
displayNewGUI( getTypePanel(CYLINDER));
//
else if(selection.equals(ELLIPSOID) )
displayNewGUI( getTypePanel(ELLIPSOID));
upperLeftPanel.repaint();
}
}
}
private JPanel getTypePanel(String type){
JPanel typePanel = new JPanel();
cmbMeasure = new JComboBox();
cmbMeasure.addItem(FEET);
cmbMeasure.addItem(METERS);
JLabel lblMeasure = new JLabel("Measured by");
if(type.equals(CUBE)){
JLabel lblLength = new JLabel("Input Length");
JLabel lblWidth = new JLabel("Input width");
JLabel lblHeight = new JLabel("Input Height");
txtCubeLength = new JTextField(8);
txtCubeWidth = new JTextField(8);
txtCubeHeight = new JTextField(8);
GridBagLayout gridbag = new GridBagLayout();
typePanel.setLayout(gridbag);
GridBagConstraints gbc = new GridBagConstraints();
typePanel.add(lblLength);
typePanel.add(lblWidth);
typePanel.add(lblHeight);
typePanel.add(lblMeasure);
typePanel.add(txtCubeLength);
typePanel.add(txtCubeWidth);
typePanel.add(txtCubeHeight);
typePanel.add(cmbMeasure);
gbc.insets.top = 5;
gbc.insets.bottom = 5;
gbc.insets.left = 1;
gbc.insets.right = 8;
gbc.anchor = GridBagConstraints.WEST;
gbc.gridx = 0;
gbc.gridy = 0;
gridbag.setConstraints(lblLength, gbc);
gbc.gridx = 1;
gbc.gridy = 0;
gridbag.setConstraints(txtCubeLength, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
gridbag.setConstraints(lblWidth, gbc);
gbc.gridx = 1;
gbc.gridy = 1;
gridbag.setConstraints(txtCubeWidth, gbc);
gbc.gridx = 0;
gbc.gridy = 2;
gridbag.setConstraints(lblHeight, gbc);
gbc.gridx = 1;
gbc.gridy = 2;
gridbag.setConstraints(txtCubeHeight, gbc);
gbc.gridx = 0;
gbc.gridy = 3;
gridbag.setConstraints(lblMeasure, gbc);
gbc.gridx = 1;
gbc.gridy = 3;
gridbag.setConstraints(cmbMeasure, gbc);
}
else if(type.equals(CYLINDER)){
JLabel lblRadius_a = new JLabel("Input Radius a");
JLabel lblRadius_b = new JLabel("Input Radius b");
JLabel lblHeight = new JLabel("Input Height");
txtCylinderRadius_a = new JTextField(8);
txtCylinderRadius_b = new JTextField(8);
txtCylinderHeight = new JTextField(8);
GridBagLayout gridbag = new GridBagLayout();
typePanel.setLayout(gridbag);
GridBagConstraints gbc = new GridBagConstraints();
typePanel.add(lblRadius_a);
typePanel.add(lblRadius_b);
typePanel.add(lblHeight);
typePanel.add(txtCylinderRadius_a);
typePanel.add(txtCylinderRadius_b);
typePanel.add(txtCylinderHeight);
typePanel.add(lblMeasure);
typePanel.add(cmbMeasure);
gbc.insets.top = 5;
gbc.insets.bottom = 5;
gbc.insets.left = 1;
gbc.insets.right = 8;
gbc.anchor = GridBagConstraints.WEST;
gbc.gridx = 0;
gbc.gridy = 0;
gridbag.setConstraints(lblRadius_a, gbc);
gbc.gridx = 1;
gbc.gridy = 0;
gridbag.setConstraints(txtCylinderRadius_a, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
gridbag.setConstraints(lblRadius_b, gbc);
gbc.gridx = 1;
gbc.gridy = 1;
gridbag.setConstraints(txtCylinderRadius_b, gbc);
gbc.gridx = 0;
gbc.gridy = 2;
gridbag.setConstraints(lblHeight, gbc);
gbc.gridx = 1;
gbc.gridy = 2;
gridbag.setConstraints(txtCylinderHeight, gbc);
gbc.gridx = 0;
gbc.gridy = 3;
gridbag.setConstraints(lblMeasure, gbc);
gbc.gridx = 1;
gbc.gridy = 3;
gridbag.setConstraints(cmbMeasure, gbc);
}
//
else if(type.equals(ELLIPSOID)){
JLabel lblRadius_a = new JLabel("Input Radius a");
JLabel lblRadius_b = new JLabel("Input Radius b");
//
JLabel lblRadius_c = new JLabel("Input Radius c");
JLabel lblHeight = new JLabel("Input Height");
txtEllipsoidRadius_a = new JTextField(8);
txtEllipsoidRadius_b = new JTextField(8);
//
txtEllipsoidRadius_c = new JTextField(8);
txtEllipsoidHeight = new JTextField(8);
GridBagLayout gridbag = new GridBagLayout();
typePanel.setLayout(gridbag);
GridBagConstraints gbc = new GridBagConstraints();
typePanel.add(lblRadius_a);
typePanel.add(lblRadius_b);
//
typePanel.add(lblRadius_c);
typePanel.add(lblHeight);
typePanel.add(txtEllipsoidRadius_a);
typePanel.add(txtEllipsoidRadius_b);
//
typePanel.add(txtEllipsoidRadius_c);
typePanel.add(txtEllipsoidHeight);
typePanel.add(lblMeasure);
typePanel.add(cmbMeasure);
gbc.insets.top = 5;
gbc.insets.bottom = 5;
gbc.insets.left = 1;
gbc.insets.right = 8;
gbc.anchor = GridBagConstraints.WEST;
gbc.gridx = 0;
gbc.gridy = 0;
gridbag.setConstraints(lblRadius_a, gbc);
gbc.gridx = 1;
gbc.gridy = 0;
gridbag.setConstraints(txtEllipsoidRadius_a, gbc);
gbc.gridx = 0;
gbc.gridy = 1;
gridbag.setConstraints(lblRadius_b, gbc);
gbc.gridx = 1;
gbc.gridy = 1;
gridbag.setConstraints(txtEllipsoidRadius_b, gbc);
gbc.gridx = 0;
gbc.gridy = 2;
//
gridbag.setConstraints(lblRadius_c, gbc);
gbc.gridx = 1;
gbc.gridy = 2;
gridbag.setConstraints(txtEllipsoidRadius_c, gbc);
gbc.gridx = 0;
gbc.gridy = 3;
gridbag.setConstraints(lblHeight, gbc);
gbc.gridx = 1;
gbc.gridy = 3;
gridbag.setConstraints(txtEllipsoidHeight, gbc);
gbc.gridx = 0;
gbc.gridy = 4;
gridbag.setConstraints(lblMeasure, gbc);
gbc.gridx = 1;
gbc.gridy = 4;
gridbag.setConstraints(cmbMeasure, gbc);
}
return typePanel;
}
}
| |
/*
* Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.core.publisher;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import org.reactivestreams.Subscriber;
import org.reactivestreams.Subscription;
import reactor.core.CoreSubscriber;
import reactor.util.annotation.Nullable;
/**
* Runs the source in unbounded mode and emits only the latest value
* if the subscriber can't keep up properly.
*
* @param <T> the value type
* @see <a href="https://github.com/reactor/reactive-streams-commons">Reactive-Streams-Commons</a>
*/
final class FluxOnBackpressureLatest<T> extends FluxOperator<T, T> {
FluxOnBackpressureLatest(Flux<? extends T> source) {
super(source);
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
source.subscribe(new LatestSubscriber<>(actual));
}
@Override
public int getPrefetch() {
return Integer.MAX_VALUE;
}
static final class LatestSubscriber<T>
implements InnerOperator<T, T> {
final CoreSubscriber<? super T> actual;
volatile long requested;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<LatestSubscriber> REQUESTED =
AtomicLongFieldUpdater.newUpdater(LatestSubscriber.class, "requested");
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<LatestSubscriber> WIP =
AtomicIntegerFieldUpdater.newUpdater(LatestSubscriber.class, "wip");
Subscription s;
Throwable error;
volatile boolean done;
volatile boolean cancelled;
volatile T value;
@SuppressWarnings("rawtypes")
static final AtomicReferenceFieldUpdater<LatestSubscriber, Object> VALUE =
AtomicReferenceFieldUpdater.newUpdater(LatestSubscriber.class, Object.class, "value");
LatestSubscriber(CoreSubscriber<? super T> actual) {
this.actual = actual;
}
@Override
public void request(long n) {
if (Operators.validate(n)) {
Operators.addCap(REQUESTED, this, n);
drain();
}
}
@Override
public void cancel() {
if (!cancelled) {
cancelled = true;
s.cancel();
if (WIP.getAndIncrement(this) == 0) {
VALUE.lazySet(this, null);
}
}
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.s, s)) {
this.s = s;
actual.onSubscribe(this);
s.request(Long.MAX_VALUE);
}
}
@Override
public void onNext(T t) {
VALUE.lazySet(this, t);
drain();
}
@Override
public void onError(Throwable t) {
error = t;
done = true;
drain();
}
@Override
public void onComplete() {
done = true;
drain();
}
void drain() {
if (WIP.getAndIncrement(this) != 0) {
return;
}
final Subscriber<? super T> a = actual;
int missed = 1;
for (; ; ) {
if (checkTerminated(done, value == null, a)) {
return;
}
long r = requested;
long e = 0L;
while (r != e) {
boolean d = done;
@SuppressWarnings("unchecked")
T v = (T) VALUE.getAndSet(this, null);
boolean empty = v == null;
if (checkTerminated(d, empty, a)) {
return;
}
if (empty) {
break;
}
a.onNext(v);
e++;
}
if (r == e && checkTerminated(done, value == null, a)) {
return;
}
if (e != 0L && r != Long.MAX_VALUE) {
Operators.produced(REQUESTED, this, 1);
}
missed = WIP.addAndGet(this, -missed);
if (missed == 0) {
break;
}
}
}
boolean checkTerminated(boolean d, boolean empty, Subscriber<? super T> a) {
if (cancelled) {
VALUE.lazySet(this, null);
return true;
}
if (d) {
Throwable e = error;
if (e != null) {
VALUE.lazySet(this, null);
a.onError(e);
return true;
} else if (empty) {
a.onComplete();
return true;
}
}
return false;
}
@Override
public CoreSubscriber<? super T> actual() {
return actual;
}
@Override
@Nullable
public Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.REQUESTED_FROM_DOWNSTREAM) return requested;
if (key == Attr.TERMINATED) return done;
if (key == Attr.CANCELLED) return cancelled;
if (key == Attr.BUFFERED) return value != null ? 1 : 0;
if (key == Attr.ERROR) return error;
if (key == Attr.PREFETCH) return Integer.MAX_VALUE;
return InnerOperator.super.scanUnsafe(key);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.cache.bitset;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import org.junit.Test;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static org.hamcrest.Matchers.equalTo;
public class BitSetFilterCacheTests extends ESTestCase {
private static int matchCount(BitSetProducer producer, IndexReader reader) throws IOException {
int count = 0;
for (LeafReaderContext ctx : reader.leaves()) {
final BitSet bitSet = producer.getBitSet(ctx);
if (bitSet != null) {
count += bitSet.cardinality();
}
}
return count;
}
@Test
public void testInvalidateEntries() throws Exception {
IndexWriter writer = new IndexWriter(
new RAMDirectory(),
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
);
Document document = new Document();
document.add(new StringField("field", "value", Field.Store.NO));
writer.addDocument(document);
writer.commit();
document = new Document();
document.add(new StringField("field", "value", Field.Store.NO));
writer.addDocument(document);
writer.commit();
document = new Document();
document.add(new StringField("field", "value", Field.Store.NO));
writer.addDocument(document);
writer.commit();
IndexReader reader = DirectoryReader.open(writer, false);
IndexSearcher searcher = new IndexSearcher(reader);
BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY);
BitSetProducer filter = cache.getBitSetProducer(new QueryWrapperFilter(new TermQuery(new Term("field", "value"))));
assertThat(matchCount(filter, reader), equalTo(3));
// now cached
assertThat(matchCount(filter, reader), equalTo(3));
// There are 3 segments
assertThat(cache.getLoadedFilters().size(), equalTo(3l));
writer.forceMerge(1);
reader.close();
reader = DirectoryReader.open(writer, false);
searcher = new IndexSearcher(reader);
assertThat(matchCount(filter, reader), equalTo(3));
// now cached
assertThat(matchCount(filter, reader), equalTo(3));
// Only one segment now, so the size must be 1
assertThat(cache.getLoadedFilters().size(), equalTo(1l));
reader.close();
writer.close();
// There is no reference from readers and writer to any segment in the test index, so the size in the fbs cache must be 0
assertThat(cache.getLoadedFilters().size(), equalTo(0l));
}
public void testListener() throws IOException {
IndexWriter writer = new IndexWriter(
new RAMDirectory(),
new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy())
);
Document document = new Document();
document.add(new StringField("field", "value", Field.Store.NO));
writer.addDocument(document);
writer.commit();
final DirectoryReader writerReader = DirectoryReader.open(writer, false);
final IndexReader reader = randomBoolean() ? writerReader : ElasticsearchDirectoryReader.wrap(writerReader, new ShardId("test", 0));
final AtomicLong stats = new AtomicLong();
final AtomicInteger onCacheCalls = new AtomicInteger();
final AtomicInteger onRemoveCalls = new AtomicInteger();
final BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY);
cache.setListener(new BitsetFilterCache.Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {
onCacheCalls.incrementAndGet();
stats.addAndGet(accountable.ramBytesUsed());
if (writerReader != reader) {
assertNotNull(shardId);
assertEquals("test", shardId.index().name());
assertEquals(0, shardId.id());
} else {
assertNull(shardId);
}
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
onRemoveCalls.incrementAndGet();
stats.addAndGet(-accountable.ramBytesUsed());
if (writerReader != reader) {
assertNotNull(shardId);
assertEquals("test", shardId.index().name());
assertEquals(0, shardId.id());
} else {
assertNull(shardId);
}
}
});
BitSetProducer filter = cache.getBitSetProducer(new QueryWrapperFilter(new TermQuery(new Term("field", "value"))));
assertThat(matchCount(filter, reader), equalTo(1));
assertTrue(stats.get() > 0);
assertEquals(1, onCacheCalls.get());
assertEquals(0, onRemoveCalls.get());
IOUtils.close(reader, writer);
assertEquals(1, onRemoveCalls.get());
assertEquals(0, stats.get());
}
public void testSetListenerTwice() {
final BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY);
cache.setListener(new BitsetFilterCache.Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
}
});
try {
cache.setListener(new BitsetFilterCache.Listener() {
@Override
public void onCache(ShardId shardId, Accountable accountable) {
}
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
}
});
fail("can't set it twice");
} catch (IllegalStateException ex) {
// all is well
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.monomorphicprocessing;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.logger.Logger;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.commons.ClassRemapper;
import org.objectweb.asm.commons.SimpleRemapper;
import sun.misc.Unsafe;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
/**
* Manages class specialization during query processing.
* Usage:
*
* String runtimeShape = stringRuntimeShape.of(bufferAggregator);
* SpecializationState<ProcessingAlgorithm> specializationState = SpecializationService.getSpecializationState(
* ProcessingAlgorithmImpl.class,
* runtimeShape
* );
* ProcessingAlgorithm algorithm = specializationState.getSpecializedOrDefault(new ProcessingAlgorithmImpl());
* long loopIterations = new ProcessingAlgorithmImpl().run(bufferAggregator, ...);
* specializationState.accountLoopIterations(loopIterations);
*
* ProcessingAlgorithmImpl.class, passed as prototypeClass to {@link #getSpecializationState} methods must have public
* no-arg constructor and must be stateless (no fields).
*
* @see SpecializationState
*/
public final class SpecializationService
{
private static final Logger LOG = new Logger(SpecializationService.class);
private static final Unsafe UNSAFE;
static {
try {
Field theUnsafe = Unsafe.class.getDeclaredField("theUnsafe");
theUnsafe.setAccessible(true);
UNSAFE = (Unsafe) theUnsafe.get(null);
}
catch (Exception e) {
throw new RuntimeException("Cannot access Unsafe methods", e);
}
}
/**
* If true, specialization is not actually done, an instance of prototypeClass is used as a "specialized" instance.
* Useful for analysis of generated assembly with JITWatch (https://github.com/AdoptOpenJDK/jitwatch), because
* JITWatch shows only classes present in the loaded JAR (prototypeClass should be), not classes generated during
* runtime.
*/
private static final boolean fakeSpecialize = Boolean.getBoolean("fakeSpecialize");
/**
* Number of loop iterations, accounted via {@link SpecializationState#accountLoopIterations(long)} in
* {@link WindowedLoopIterationCounter} during the last hour window, after which WindowedLoopIterationCounter decides
* to specialize class for the specific runtimeShape. The default value is chosen to be so that the specialized
* class will likely be compiled with C2 HotSpot compiler with the default values of *BackEdgeThreshold options.
*/
private static final int triggerSpecializationIterationsThreshold =
Integer.getInteger("triggerSpecializationIterationsThreshold", 10_000);
/**
* The maximum number of specializations, that this service is allowed to make. It's not unlimited because each
* specialization takes some JVM memory (machine code cache, byte code, etc.)
*/
private static final int maxSpecializations = Integer.getInteger("maxSpecializations", 1000);
private static final AtomicBoolean maxSpecializationsWarningEmitted = new AtomicBoolean(false);
private static final ExecutorService classSpecializationExecutor = Execs.singleThreaded("class-specialization-%d");
private static final AtomicLong specializedClassCounter = new AtomicLong();
private static final ClassValue<PerPrototypeClassState> perPrototypeClassState =
new ClassValue<PerPrototypeClassState>()
{
@Override
protected PerPrototypeClassState computeValue(Class<?> type)
{
return new PerPrototypeClassState<>(type);
}
};
/**
* @param <T> type of query processing algorithm
* @see SpecializationService class-level javadoc for details
*/
public static <T> SpecializationState<T> getSpecializationState(
Class<? extends T> prototypeClass,
String runtimeShape
)
{
return getSpecializationState(prototypeClass, runtimeShape, ImmutableMap.of());
}
/**
* @param classRemapping classes, that should be replaced in the bytecode of the given prototypeClass when specialized
* @see #getSpecializationState(Class, String)
*/
@SuppressWarnings("unchecked")
public static <T> SpecializationState<T> getSpecializationState(
Class<? extends T> prototypeClass,
String runtimeShape,
ImmutableMap<Class<?>, Class<?>> classRemapping
)
{
return perPrototypeClassState.get(prototypeClass).getSpecializationState(runtimeShape, classRemapping);
}
static class PerPrototypeClassState<T>
{
private final Class<T> prototypeClass;
private final ConcurrentMap<SpecializationId, SpecializationState<T>> specializationStates =
new ConcurrentHashMap<>();
private final String prototypeClassBytecodeName;
private final String specializedClassNamePrefix;
private byte[] prototypeClassBytecode;
PerPrototypeClassState(Class<T> prototypeClass)
{
this.prototypeClass = prototypeClass;
String prototypeClassName = prototypeClass.getName();
prototypeClassBytecodeName = classBytecodeName(prototypeClassName);
specializedClassNamePrefix = prototypeClassName + "$Copy";
}
SpecializationState<T> getSpecializationState(String runtimeShape, ImmutableMap<Class<?>, Class<?>> classRemapping)
{
SpecializationId specializationId = new SpecializationId(runtimeShape, classRemapping);
SpecializationState<T> alreadyExistingState = specializationStates.get(specializationId);
if (alreadyExistingState != null) {
return alreadyExistingState;
}
return specializationStates.computeIfAbsent(specializationId, id -> new WindowedLoopIterationCounter<>(this, id));
}
T specialize(ImmutableMap<Class<?>, Class<?>> classRemapping)
{
String specializedClassName = specializedClassNamePrefix + specializedClassCounter.get();
ClassWriter specializedClassWriter = new ClassWriter(0);
SimpleRemapper remapper = new SimpleRemapper(createRemapping(classRemapping, specializedClassName));
ClassVisitor classTransformer = new ClassRemapper(specializedClassWriter, remapper);
try {
ClassReader prototypeClassReader = new ClassReader(getPrototypeClassBytecode());
prototypeClassReader.accept(classTransformer, 0);
byte[] specializedClassBytecode = specializedClassWriter.toByteArray();
Class<T> specializedClass = defineClass(specializedClassName, specializedClassBytecode);
specializedClassCounter.incrementAndGet();
return specializedClass.newInstance();
}
catch (InstantiationException | IllegalAccessException | IOException e) {
throw new RuntimeException(e);
}
}
private HashMap<String, String> createRemapping(
ImmutableMap<Class<?>, Class<?>> classRemapping,
String specializedClassName
)
{
HashMap<String, String> remapping = new HashMap<>();
remapping.put(prototypeClassBytecodeName, classBytecodeName(specializedClassName));
for (Map.Entry<Class<?>, Class<?>> classRemappingEntry : classRemapping.entrySet()) {
Class<?> sourceClass = classRemappingEntry.getKey();
Class<?> remappingClass = classRemappingEntry.getValue();
remapping.put(classBytecodeName(sourceClass.getName()), classBytecodeName(remappingClass.getName()));
}
return remapping;
}
@SuppressWarnings("unchecked")
private Class<T> defineClass(String specializedClassName, byte[] specializedClassBytecode)
{
return (Class<T>) UNSAFE.defineClass(
specializedClassName,
specializedClassBytecode,
0,
specializedClassBytecode.length,
prototypeClass.getClassLoader(),
prototypeClass.getProtectionDomain()
);
}
/**
* No synchronization, because {@link #specialize} is called only from {@link #classSpecializationExecutor}, i. e.
* from a single thread.
*/
byte[] getPrototypeClassBytecode() throws IOException
{
if (prototypeClassBytecode == null) {
ClassLoader cl = prototypeClass.getClassLoader();
try (InputStream prototypeClassBytecodeStream =
cl.getResourceAsStream(prototypeClassBytecodeName + ".class")) {
prototypeClassBytecode = ByteStreams.toByteArray(prototypeClassBytecodeStream);
}
}
return prototypeClassBytecode;
}
private static String classBytecodeName(String className)
{
return className.replace('.', '/');
}
}
private static class SpecializationId
{
private final String runtimeShape;
private final ImmutableMap<Class<?>, Class<?>> classRemapping;
private final int hashCode;
private SpecializationId(String runtimeShape, ImmutableMap<Class<?>, Class<?>> classRemapping)
{
this.runtimeShape = runtimeShape;
this.classRemapping = classRemapping;
this.hashCode = runtimeShape.hashCode() * 1000003 + classRemapping.hashCode();
}
@Override
public boolean equals(Object obj)
{
if (!(obj instanceof SpecializationId)) {
return false;
}
SpecializationId other = (SpecializationId) obj;
return runtimeShape.equals(other.runtimeShape) && classRemapping.equals(other.classRemapping);
}
@Override
public int hashCode()
{
return hashCode;
}
}
/**
* Accumulates the number of iterations during the last hour. (Window size = 1 hour)
*/
static class WindowedLoopIterationCounter<T> extends SpecializationState<T> implements Runnable
{
private final PerPrototypeClassState<T> perPrototypeClassState;
private final SpecializationId specializationId;
/** A map with the number of iterations per each minute during the last hour */
private final ConcurrentMap<Long, AtomicLong> perMinuteIterations = new ConcurrentHashMap<>();
private final AtomicBoolean specializationScheduled = new AtomicBoolean(false);
WindowedLoopIterationCounter(
PerPrototypeClassState<T> perPrototypeClassState,
SpecializationId specializationId
)
{
this.perPrototypeClassState = perPrototypeClassState;
this.specializationId = specializationId;
}
@Nullable
@Override
public T getSpecialized()
{
// Returns null because the class is not yet specialized. The purpose of WindowedLoopIterationCounter is to decide
// whether specialization should be done, or not.
return null;
}
@Override
public void accountLoopIterations(long loopIterations)
{
if (specializationScheduled.get()) {
return;
}
if (loopIterations > triggerSpecializationIterationsThreshold ||
addAndGetTotalIterationsOverTheLastHour(loopIterations) > triggerSpecializationIterationsThreshold) {
if (specializationScheduled.compareAndSet(false, true)) {
classSpecializationExecutor.submit(this);
}
}
}
private long addAndGetTotalIterationsOverTheLastHour(long newIterations)
{
long currentMillis = System.currentTimeMillis();
long currentMinute = TimeUnit.MILLISECONDS.toMinutes(currentMillis);
long minuteOneHourAgo = currentMinute - TimeUnit.HOURS.toMinutes(1);
long totalIterations = 0;
boolean currentMinutePresent = false;
for (Iterator<Map.Entry<Long, AtomicLong>> it = perMinuteIterations.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<Long, AtomicLong> minuteStats = it.next();
long minute = minuteStats.getKey();
if (minute < minuteOneHourAgo) {
it.remove();
} else if (minute == currentMinute) {
totalIterations += minuteStats.getValue().addAndGet(newIterations);
currentMinutePresent = true;
} else {
totalIterations += minuteStats.getValue().get();
}
}
if (!currentMinutePresent) {
perMinuteIterations.computeIfAbsent(currentMinute, m -> new AtomicLong()).addAndGet(newIterations);
totalIterations += newIterations;
}
return totalIterations;
}
@Override
public void run()
{
try {
T specialized;
if (specializedClassCounter.get() > maxSpecializations) {
// Don't specialize, just instantiate the prototype class and emit a warning.
// The "better" approach is probably to implement some kind of cache eviction from
// PerPrototypeClassState.specializationStates. But it might be that nobody ever hits even the current
// maxSpecializations limit, so implementing cache eviction is an unnecessary complexity.
specialized = perPrototypeClassState.prototypeClass.newInstance();
if (!maxSpecializationsWarningEmitted.get() && maxSpecializationsWarningEmitted.compareAndSet(false, true)) {
LOG.warn(
"SpecializationService couldn't make more than [%d] specializations. " +
"Not doing specialization for runtime shape[%s] and class remapping[%s], using the prototype class[%s]",
maxSpecializations,
specializationId.runtimeShape,
specializationId.classRemapping,
perPrototypeClassState.prototypeClass
);
}
} else if (fakeSpecialize) {
specialized = perPrototypeClassState.prototypeClass.newInstance();
LOG.info(
"Not specializing prototype class[%s] for runtime shape[%s] and class remapping[%s] because "
+ "fakeSpecialize=true, using the prototype class instead",
perPrototypeClassState.prototypeClass,
specializationId.runtimeShape,
specializationId.classRemapping
);
} else {
specialized = perPrototypeClassState.specialize(specializationId.classRemapping);
LOG.info(
"Specializing prototype class[%s] for runtime shape[%s] and class remapping[%s]",
perPrototypeClassState.prototypeClass,
specializationId.runtimeShape,
specializationId.classRemapping
);
}
perPrototypeClassState.specializationStates.put(specializationId, new Specialized<>(specialized));
}
catch (Exception e) {
LOG.error(
e,
"Error specializing prototype class[%s] for runtime shape[%s] and class remapping[%s]",
perPrototypeClassState.prototypeClass,
specializationId.runtimeShape,
specializationId.classRemapping
);
}
}
}
static class Specialized<T> extends SpecializationState<T>
{
private final T specialized;
Specialized(T specialized)
{
this.specialized = specialized;
}
@Override
public T getSpecialized()
{
return specialized;
}
@Override
public void accountLoopIterations(long loopIterations)
{
// do nothing
}
}
private SpecializationService()
{
}
}
| |
package org.cagrid.gaards.ui.gts;
import gov.nih.nci.cagrid.common.FaultUtil;
import gov.nih.nci.cagrid.common.Runner;
import gov.nih.nci.cagrid.gts.bean.TrustLevel;
import gov.nih.nci.cagrid.gts.client.GTSAdminClient;
import gov.nih.nci.cagrid.gts.client.GTSPublicClient;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.cagrid.gaards.ui.common.ProgressPanel;
import org.cagrid.gaards.ui.common.TitlePanel;
import org.cagrid.grape.ApplicationComponent;
import org.cagrid.grape.GridApplication;
import org.cagrid.grape.LookAndFeel;
import org.cagrid.grape.utils.ErrorDialog;
/**
* @author <A HREF="MAILTO:langella@bmi.osu.edu">Stephen Langella </A>
* @author <A HREF="MAILTO:oster@bmi.osu.edu">Scott Oster </A>
* @author <A HREF="MAILTO:hastings@bmi.osu.edu">Shannon Langella </A>
*/
public class LevelOfAssuranceManagerWindow extends ApplicationComponent implements LevelOfAssuranceRefresher {
private static Log log = LogFactory.getLog(LevelOfAssuranceManagerWindow.class);
private static final long serialVersionUID = 1L;
private javax.swing.JPanel jContentPane = null;
private JPanel mainPanel = null;
private JPanel contentPanel = null;
private JPanel buttonPanel = null;
private LevelOfAssuranceTable trustLevelTable = null;
private JScrollPane jScrollPane = null;
private JButton addTrustLevel = null;
private JPanel queryPanel = null;
private JButton query = null;
private JButton removeTrustLevelButton = null;
private JButton viewModifyButton = null;
private boolean searchDone = false;
private SessionPanel sessionPanel = null;
private JPanel titlePanel = null;
private ProgressPanel progressPanel = null;
/**
* This is the default constructor
*/
public LevelOfAssuranceManagerWindow() {
super();
initialize();
this.setFrameIcon(GTSLookAndFeel.getTrustLevelIcon());
}
/**
* This method initializes this
*/
private void initialize() {
this.setSize(700, 500);
this.setContentPane(getJContentPane());
this.setTitle("Levels of Assurance");
}
/**
* This method initializes jContentPane
*
* @return javax.swing.JPanel
*/
private javax.swing.JPanel getJContentPane() {
if (jContentPane == null) {
jContentPane = new javax.swing.JPanel();
jContentPane.setLayout(new java.awt.BorderLayout());
jContentPane.add(getMainPanel(), java.awt.BorderLayout.CENTER);
}
return jContentPane;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getMainPanel() {
if (mainPanel == null) {
GridBagConstraints gridBagConstraints22 = new GridBagConstraints();
gridBagConstraints22.gridx = 0;
gridBagConstraints22.weightx = 1.0D;
gridBagConstraints22.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraints22.gridy = 5;
GridBagConstraints gridBagConstraints1 = new GridBagConstraints();
gridBagConstraints1.gridx = 0;
gridBagConstraints1.insets = new Insets(2, 2, 2, 2);
gridBagConstraints1.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraints1.weightx = 1.0D;
gridBagConstraints1.gridy = 0;
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0D;
gridBagConstraints.gridy = 1;
GridBagConstraints gridBagConstraints21 = new GridBagConstraints();
gridBagConstraints21.gridx = 0;
gridBagConstraints21.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints21.weightx = 1.0D;
gridBagConstraints21.weighty = 1.0D;
gridBagConstraints21.insets = new java.awt.Insets(2, 2, 2, 2);
gridBagConstraints21.gridy = 3;
GridBagConstraints gridBagConstraints33 = new GridBagConstraints();
gridBagConstraints33.gridx = 0;
gridBagConstraints33.gridy = 2;
GridBagConstraints gridBagConstraints2 = new GridBagConstraints();
mainPanel = new JPanel();
mainPanel.setLayout(new GridBagLayout());
gridBagConstraints2.gridx = 0;
gridBagConstraints2.gridy = 4;
gridBagConstraints2.insets = new java.awt.Insets(2, 2, 2, 2);
gridBagConstraints2.anchor = java.awt.GridBagConstraints.SOUTH;
gridBagConstraints2.fill = java.awt.GridBagConstraints.HORIZONTAL;
mainPanel.add(getButtonPanel(), gridBagConstraints2);
mainPanel.add(getQueryPanel(), gridBagConstraints33);
mainPanel.add(getContentPanel(), gridBagConstraints21);
mainPanel.add(getSessionPanel(), gridBagConstraints);
mainPanel.add(getTitlePanel(), gridBagConstraints1);
mainPanel.add(getProgressPanel(), gridBagConstraints22);
}
return mainPanel;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getContentPanel() {
if (contentPanel == null) {
GridBagConstraints gridBagConstraints4 = new GridBagConstraints();
contentPanel = new JPanel();
contentPanel.setLayout(new GridBagLayout());
contentPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Level(s) of Assurance",
javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION,
javax.swing.border.TitledBorder.DEFAULT_POSITION, null, LookAndFeel.getPanelLabelColor()));
gridBagConstraints4.weightx = 1.0;
gridBagConstraints4.gridy = 0;
gridBagConstraints4.gridx = 0;
gridBagConstraints4.weighty = 1.0;
gridBagConstraints4.fill = java.awt.GridBagConstraints.BOTH;
contentPanel.add(getJScrollPane(), gridBagConstraints4);
}
return contentPanel;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private JPanel getButtonPanel() {
if (buttonPanel == null) {
buttonPanel = new JPanel();
buttonPanel.add(getViewModifyButton(), null);
buttonPanel.add(getAddTrustLevel(), null);
buttonPanel.add(getRemoveTrustLevelButton(), null);
}
return buttonPanel;
}
/**
* This method initializes jTable
*
* @return javax.swing.JTable
*/
private LevelOfAssuranceTable getTrustLevelTable() {
if (trustLevelTable == null) {
trustLevelTable = new LevelOfAssuranceTable(this);
}
return trustLevelTable;
}
/**
* This method initializes jScrollPane
*
* @return javax.swing.JScrollPane
*/
private JScrollPane getJScrollPane() {
if (jScrollPane == null) {
jScrollPane = new JScrollPane();
jScrollPane.setViewportView(getTrustLevelTable());
}
return jScrollPane;
}
/**
* This method initializes manageUser
*
* @return javax.swing.JButton
*/
private JButton getAddTrustLevel() {
if (addTrustLevel == null) {
addTrustLevel = new JButton();
addTrustLevel.setText("Add");
addTrustLevel.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
Runner runner = new Runner() {
public void execute() {
disableAllActions();
addTrustLevel();
enableAllActions();
}
};
try {
GridApplication.getContext().executeInBackground(runner);
} catch (Exception t) {
FaultUtil.logFault(log, t);
}
}
});
}
return addTrustLevel;
}
public void addTrustLevel() {
try {
GridApplication.getContext().addApplicationComponent(
new LevelOfAssuranceWindow(getSessionPanel().getSession(), this), 600, 300);
} catch (Exception e) {
ErrorDialog.showError(e);
FaultUtil.logFault(log, e);
}
}
public void viewModifyLevel() {
try {
GridApplication.getContext().addApplicationComponent(
new LevelOfAssuranceWindow(getSessionPanel().getSession(),
getTrustLevelTable().getSelectedTrustLevel(), this), 700, 500);
} catch (Exception e) {
ErrorDialog.showError(e);
FaultUtil.logFault(log, e);
}
}
/**
* This method initializes queryPanel
*
* @return javax.swing.JPanel
*/
private JPanel getQueryPanel() {
if (queryPanel == null) {
queryPanel = new JPanel();
queryPanel.add(getQuery(), null);
}
return queryPanel;
}
/**
* This method initializes query
*
* @return javax.swing.JButton
*/
private JButton getQuery() {
if (query == null) {
query = new JButton();
query.setText("Search");
getRootPane().setDefaultButton(query);
query.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
Runner runner = new Runner() {
public void execute() {
disableAllActions();
getTrustLevels();
enableAllActions();
}
};
try {
GridApplication.getContext().executeInBackground(runner);
} catch (Exception t) {
t.getMessage();
}
}
});
}
return query;
}
private void getTrustLevels() {
getProgressPanel().showProgress("Searching...");
this.getTrustLevelTable().clearTable();
try {
GTSPublicClient client = getSessionPanel().getSession().getUserClient();
TrustLevel[] levels = client.getTrustLevels();
int length = 0;
if (levels != null) {
length = levels.length;
for (int i = 0; i < levels.length; i++) {
getTrustLevelTable().addTrustLevel(levels[i]);
}
}
searchDone = true;
this.getProgressPanel().stopProgress(length + " level(s) of assurance found.");
} catch (Exception e) {
ErrorDialog.showError(e);
this.getProgressPanel().stopProgress("Error");
FaultUtil.logFault(log, e);
}
}
/**
* This method initializes removeUser
*
* @return javax.swing.JButton
*/
private JButton getRemoveTrustLevelButton() {
if (removeTrustLevelButton == null) {
removeTrustLevelButton = new JButton();
removeTrustLevelButton.setText("Remove");
removeTrustLevelButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
Runner runner = new Runner() {
public void execute() {
disableAllActions();
removeTrustLevel();
enableAllActions();
}
};
try {
GridApplication.getContext().executeInBackground(runner);
} catch (Exception t) {
FaultUtil.logFault(log, t);
}
}
});
}
return removeTrustLevelButton;
}
private void removeTrustLevel() {
try {
getProgressPanel().showProgress("Removing level of assurance...");
GTSAdminClient client = getSessionPanel().getSession().getAdminClient();
TrustLevel level = getTrustLevelTable().getSelectedTrustLevel();
client.removeTrustLevel(level.getName());
getTrustLevelTable().removeSelectedTrustLevel();
getProgressPanel().stopProgress("Level of assurance successfully removed.");
refreshTrustLevels();
} catch (Exception e) {
ErrorDialog.showError(e);
getProgressPanel().stopProgress("Error");
FaultUtil.logFault(log, e);
}
}
/**
* This method initializes viewModifyButton
*
* @return javax.swing.JButton
*/
private JButton getViewModifyButton() {
if (viewModifyButton == null) {
viewModifyButton = new JButton();
viewModifyButton.setText("View");
viewModifyButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent e) {
try {
disableAllActions();
getTrustLevelTable().doubleClick();
} catch (Exception ex) {
ErrorDialog.showError(ex);
log.error(ex, ex);
} finally {
enableAllActions();
}
}
});
}
return viewModifyButton;
}
private void disableAllActions() {
getQuery().setEnabled(false);
getAddTrustLevel().setEnabled(false);
getViewModifyButton().setEnabled(false);
getRemoveTrustLevelButton().setEnabled(false);
}
private void enableAllActions() {
getQuery().setEnabled(true);
getAddTrustLevel().setEnabled(true);
getViewModifyButton().setEnabled(true);
getRemoveTrustLevelButton().setEnabled(true);
}
public void refreshTrustLevels() {
if (searchDone) {
disableAllActions();
this.getTrustLevels();
enableAllActions();
}
}
/**
* This method initializes sessionPanel
*
* @return javax.swing.JPanel
*/
private SessionPanel getSessionPanel() {
if (sessionPanel == null) {
sessionPanel = new SessionPanel();
}
return sessionPanel;
}
/**
* This method initializes titlePanel
*
* @return javax.swing.JPanel
*/
private JPanel getTitlePanel() {
if (titlePanel == null) {
titlePanel = new TitlePanel("Level of Assurance Search",
"Search for and manage levels of assurance in the trust fabric.");
}
return titlePanel;
}
/**
* This method initializes progressPanel
*
* @return javax.swing.JPanel
*/
private ProgressPanel getProgressPanel() {
if (progressPanel == null) {
progressPanel = new ProgressPanel();
}
return progressPanel;
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.haproxy;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.haproxy.HAProxyProxiedProtocol.AddressFamily;
import io.netty.util.ByteProcessor;
import io.netty.util.CharsetUtil;
import io.netty.util.NetUtil;
/**
* Message container for decoded HAProxy proxy protocol parameters
*/
public final class HAProxyMessage {
/**
* Version 1 proxy protocol message for 'UNKNOWN' proxied protocols. Per spec, when the proxied protocol is
* 'UNKNOWN' we must discard all other header values.
*/
private static final HAProxyMessage V1_UNKNOWN_MSG = new HAProxyMessage(
HAProxyProtocolVersion.V1, HAProxyCommand.PROXY, HAProxyProxiedProtocol.UNKNOWN, null, null, 0, 0);
/**
* Version 2 proxy protocol message for 'UNKNOWN' proxied protocols. Per spec, when the proxied protocol is
* 'UNKNOWN' we must discard all other header values.
*/
private static final HAProxyMessage V2_UNKNOWN_MSG = new HAProxyMessage(
HAProxyProtocolVersion.V2, HAProxyCommand.PROXY, HAProxyProxiedProtocol.UNKNOWN, null, null, 0, 0);
/**
* Version 2 proxy protocol message for local requests. Per spec, we should use an unspecified protocol and family
* for 'LOCAL' commands. Per spec, when the proxied protocol is 'UNKNOWN' we must discard all other header values.
*/
private static final HAProxyMessage V2_LOCAL_MSG = new HAProxyMessage(
HAProxyProtocolVersion.V2, HAProxyCommand.LOCAL, HAProxyProxiedProtocol.UNKNOWN, null, null, 0, 0);
private final HAProxyProtocolVersion protocolVersion;
private final HAProxyCommand command;
private final HAProxyProxiedProtocol proxiedProtocol;
private final String sourceAddress;
private final String destinationAddress;
private final int sourcePort;
private final int destinationPort;
/**
* Creates a new instance
*/
private HAProxyMessage(
HAProxyProtocolVersion protocolVersion, HAProxyCommand command, HAProxyProxiedProtocol proxiedProtocol,
String sourceAddress, String destinationAddress, String sourcePort, String destinationPort) {
this(
protocolVersion, command, proxiedProtocol,
sourceAddress, destinationAddress, portStringToInt(sourcePort), portStringToInt(destinationPort));
}
/**
* Creates a new instance
*/
private HAProxyMessage(
HAProxyProtocolVersion protocolVersion, HAProxyCommand command, HAProxyProxiedProtocol proxiedProtocol,
String sourceAddress, String destinationAddress, int sourcePort, int destinationPort) {
if (proxiedProtocol == null) {
throw new NullPointerException("proxiedProtocol");
}
AddressFamily addrFamily = proxiedProtocol.addressFamily();
checkAddress(sourceAddress, addrFamily);
checkAddress(destinationAddress, addrFamily);
checkPort(sourcePort);
checkPort(destinationPort);
this.protocolVersion = protocolVersion;
this.command = command;
this.proxiedProtocol = proxiedProtocol;
this.sourceAddress = sourceAddress;
this.destinationAddress = destinationAddress;
this.sourcePort = sourcePort;
this.destinationPort = destinationPort;
}
/**
* Decodes a version 2, binary proxy protocol header.
*
* @param header a version 2 proxy protocol header
* @return {@link HAProxyMessage} instance
* @throws HAProxyProtocolException if any portion of the header is invalid
*/
static HAProxyMessage decodeHeader(ByteBuf header) {
if (header == null) {
throw new NullPointerException("header");
}
if (header.readableBytes() < 16) {
throw new HAProxyProtocolException(
"incomplete header: " + header.readableBytes() + " bytes (expected: 16+ bytes)");
}
// Per spec, the 13th byte is the protocol version and command byte
header.skipBytes(12);
final byte verCmdByte = header.readByte();
HAProxyProtocolVersion ver;
try {
ver = HAProxyProtocolVersion.valueOf(verCmdByte);
} catch (IllegalArgumentException e) {
throw new HAProxyProtocolException(e);
}
if (ver != HAProxyProtocolVersion.V2) {
throw new HAProxyProtocolException("version 1 unsupported: 0x" + Integer.toHexString(verCmdByte));
}
HAProxyCommand cmd;
try {
cmd = HAProxyCommand.valueOf(verCmdByte);
} catch (IllegalArgumentException e) {
throw new HAProxyProtocolException(e);
}
if (cmd == HAProxyCommand.LOCAL) {
return V2_LOCAL_MSG;
}
// Per spec, the 14th byte is the protocol and address family byte
HAProxyProxiedProtocol protAndFam;
try {
protAndFam = HAProxyProxiedProtocol.valueOf(header.readByte());
} catch (IllegalArgumentException e) {
throw new HAProxyProtocolException(e);
}
if (protAndFam == HAProxyProxiedProtocol.UNKNOWN) {
return V2_UNKNOWN_MSG;
}
int addressInfoLen = header.readUnsignedShort();
String srcAddress;
String dstAddress;
int addressLen;
int srcPort = 0;
int dstPort = 0;
AddressFamily addressFamily = protAndFam.addressFamily();
if (addressFamily == AddressFamily.AF_UNIX) {
// unix sockets require 216 bytes for address information
if (addressInfoLen < 216 || header.readableBytes() < 216) {
throw new HAProxyProtocolException(
"incomplete UNIX socket address information: " +
Math.min(addressInfoLen, header.readableBytes()) + " bytes (expected: 216+ bytes)");
}
int startIdx = header.readerIndex();
int addressEnd = header.forEachByte(startIdx, 108, ByteProcessor.FIND_NUL);
if (addressEnd == -1) {
addressLen = 108;
} else {
addressLen = addressEnd - startIdx;
}
srcAddress = header.toString(startIdx, addressLen, CharsetUtil.US_ASCII);
startIdx += 108;
addressEnd = header.forEachByte(startIdx, 108, ByteProcessor.FIND_NUL);
if (addressEnd == -1) {
addressLen = 108;
} else {
addressLen = addressEnd - startIdx;
}
dstAddress = header.toString(startIdx, addressLen, CharsetUtil.US_ASCII);
// AF_UNIX defines that exactly 108 bytes are reserved for the address. The previous methods
// did not increase the reader index although we already consumed the information.
header.readerIndex(startIdx + 108);
} else {
if (addressFamily == AddressFamily.AF_IPv4) {
// IPv4 requires 12 bytes for address information
if (addressInfoLen < 12 || header.readableBytes() < 12) {
throw new HAProxyProtocolException(
"incomplete IPv4 address information: " +
Math.min(addressInfoLen, header.readableBytes()) + " bytes (expected: 12+ bytes)");
}
addressLen = 4;
} else if (addressFamily == AddressFamily.AF_IPv6) {
// IPv6 requires 36 bytes for address information
if (addressInfoLen < 36 || header.readableBytes() < 36) {
throw new HAProxyProtocolException(
"incomplete IPv6 address information: " +
Math.min(addressInfoLen, header.readableBytes()) + " bytes (expected: 36+ bytes)");
}
addressLen = 16;
} else {
throw new HAProxyProtocolException(
"unable to parse address information (unkown address family: " + addressFamily + ')');
}
// Per spec, the src address begins at the 17th byte
srcAddress = ipBytestoString(header, addressLen);
dstAddress = ipBytestoString(header, addressLen);
srcPort = header.readUnsignedShort();
dstPort = header.readUnsignedShort();
}
return new HAProxyMessage(ver, cmd, protAndFam, srcAddress, dstAddress, srcPort, dstPort);
}
/**
* Decodes a version 1, human-readable proxy protocol header.
*
* @param header a version 1 proxy protocol header
* @return {@link HAProxyMessage} instance
* @throws HAProxyProtocolException if any portion of the header is invalid
*/
static HAProxyMessage decodeHeader(String header) {
if (header == null) {
throw new HAProxyProtocolException("header");
}
String[] parts = header.split(" ");
int numParts = parts.length;
if (numParts < 2) {
throw new HAProxyProtocolException(
"invalid header: " + header + " (expected: 'PROXY' and proxied protocol values)");
}
if (!"PROXY".equals(parts[0])) {
throw new HAProxyProtocolException("unknown identifier: " + parts[0]);
}
HAProxyProxiedProtocol protAndFam;
try {
protAndFam = HAProxyProxiedProtocol.valueOf(parts[1]);
} catch (IllegalArgumentException e) {
throw new HAProxyProtocolException(e);
}
if (protAndFam != HAProxyProxiedProtocol.TCP4 &&
protAndFam != HAProxyProxiedProtocol.TCP6 &&
protAndFam != HAProxyProxiedProtocol.UNKNOWN) {
throw new HAProxyProtocolException("unsupported v1 proxied protocol: " + parts[1]);
}
if (protAndFam == HAProxyProxiedProtocol.UNKNOWN) {
return V1_UNKNOWN_MSG;
}
if (numParts != 6) {
throw new HAProxyProtocolException("invalid TCP4/6 header: " + header + " (expected: 6 parts)");
}
return new HAProxyMessage(
HAProxyProtocolVersion.V1, HAProxyCommand.PROXY,
protAndFam, parts[2], parts[3], parts[4], parts[5]);
}
/**
* Convert ip address bytes to string representation
*
* @param header buffer containing ip address bytes
* @param addressLen number of bytes to read (4 bytes for IPv4, 16 bytes for IPv6)
* @return string representation of the ip address
*/
private static String ipBytestoString(ByteBuf header, int addressLen) {
StringBuilder sb = new StringBuilder();
if (addressLen == 4) {
sb.append(header.readByte() & 0xff);
sb.append('.');
sb.append(header.readByte() & 0xff);
sb.append('.');
sb.append(header.readByte() & 0xff);
sb.append('.');
sb.append(header.readByte() & 0xff);
} else {
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
sb.append(':');
sb.append(Integer.toHexString(header.readUnsignedShort()));
}
return sb.toString();
}
/**
* Convert port to integer
*
* @param value the port
* @return port as an integer
* @throws HAProxyProtocolException if port is not a valid integer
*/
private static int portStringToInt(String value) {
int port;
try {
port = Integer.parseInt(value);
} catch (NumberFormatException e) {
throw new HAProxyProtocolException("invalid port: " + value, e);
}
if (port <= 0 || port > 65535) {
throw new HAProxyProtocolException("invalid port: " + value + " (expected: 1 ~ 65535)");
}
return port;
}
/**
* Validate an address (IPv4, IPv6, Unix Socket)
*
* @param address human-readable address
* @param addrFamily the {@link AddressFamily} to check the address against
* @throws HAProxyProtocolException if the address is invalid
*/
private static void checkAddress(String address, AddressFamily addrFamily) {
if (addrFamily == null) {
throw new NullPointerException("addrFamily");
}
switch (addrFamily) {
case AF_UNSPEC:
if (address != null) {
throw new HAProxyProtocolException("unable to validate an AF_UNSPEC address: " + address);
}
return;
case AF_UNIX:
return;
}
if (address == null) {
throw new NullPointerException("address");
}
switch (addrFamily) {
case AF_IPv4:
if (!NetUtil.isValidIpV4Address(address)) {
throw new HAProxyProtocolException("invalid IPv4 address: " + address);
}
break;
case AF_IPv6:
if (!NetUtil.isValidIpV6Address(address)) {
throw new HAProxyProtocolException("invalid IPv6 address: " + address);
}
break;
default:
throw new Error();
}
}
/**
* Validate a UDP/TCP port
*
* @param port the UDP/TCP port
* @throws HAProxyProtocolException if the port is out of range (0-65535 inclusive)
*/
private static void checkPort(int port) {
if (port < 0 || port > 65535) {
throw new HAProxyProtocolException("invalid port: " + port + " (expected: 1 ~ 65535)");
}
}
/**
* Returns the {@link HAProxyProtocolVersion} of this {@link HAProxyMessage}.
*/
public HAProxyProtocolVersion protocolVersion() {
return protocolVersion;
}
/**
* Returns the {@link HAProxyCommand} of this {@link HAProxyMessage}.
*/
public HAProxyCommand command() {
return command;
}
/**
* Returns the {@link HAProxyProxiedProtocol} of this {@link HAProxyMessage}.
*/
public HAProxyProxiedProtocol proxiedProtocol() {
return proxiedProtocol;
}
/**
* Returns the human-readable source address of this {@link HAProxyMessage}.
*/
public String sourceAddress() {
return sourceAddress;
}
/**
* Returns the human-readable destination address of this {@link HAProxyMessage}.
*/
public String destinationAddress() {
return destinationAddress;
}
/**
* Returns the UDP/TCP source port of this {@link HAProxyMessage}.
*/
public int sourcePort() {
return sourcePort;
}
/**
* Returns the UDP/TCP destination port of this {@link HAProxyMessage}.
*/
public int destinationPort() {
return destinationPort;
}
}
| |
/*
*
* Copyright 2016 Big Data Curation Lab, University of Toronto,
* Patricia Arocena,
* Boris Glavic,
* Renee J. Miller
*
* This software also contains code derived from STBenchmark as described in
* with the permission of the authors:
*
* Bogdan Alexe, Wang-Chiew Tan, Yannis Velegrakis
*
* This code was originally described in:
*
* STBenchmark: Towards a Benchmark for Mapping Systems
* Alexe, Bogdan and Tan, Wang-Chiew and Velegrakis, Yannis
* PVLDB: Proceedings of the VLDB Endowment archive
* 2008, vol. 1, no. 1, pp. 230-244
*
* The copyright of the ToxGene (included as a jar file: toxgene.jar) belongs to
* Denilson Barbosa. The iBench distribution contains this jar file with the
* permission of the author of ToxGene
* (http://www.cs.toronto.edu/tox/toxgene/index.html)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package tresc.benchmark.schemaGen;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.log4j.Logger;
import org.vagabond.util.CollectionUtils;
import org.vagabond.xmlmodel.MappingType;
import org.vagabond.xmlmodel.RelationType;
import tresc.benchmark.Constants.ScenarioName;
import tresc.benchmark.utils.Utils;
import vtools.dataModel.expression.AND;
import vtools.dataModel.expression.EQ;
import vtools.dataModel.expression.Path;
import vtools.dataModel.expression.Projection;
import vtools.dataModel.expression.Query;
import vtools.dataModel.expression.SPJQuery;
import vtools.dataModel.expression.SelectClauseList;
import vtools.dataModel.expression.Variable;
//MN FIXED "K"; it had not been set correctly - 6 May 2014
//MN Enhanced genTargetRels to pass types of attributes of target relations as argument to addRelation - 6 May 2014
//MN Implemented chooseTargetRels - 17 May 2014
//MN Enhanced genSourceRels to pass types of attributes of source relation as argument to addRelation - 17 May 2014
//MN FIXED chooseTargetRels - 20 May 2014
//MN FIXED target relation names in genTargetRels - 20 May 2014
//MN MODIFIED genSourceRels to be able to evaluate MapMegre (Notice that this modification should be undone later) - 26 May 2014
//MN FIXED chooseTargetRels (F) - 2 June 2014
//MN ToDo FIX chooseSourceRels for circular FK - 2 June 2014
public class SelfJoinScenarioGenerator extends AbstractScenarioGenerator
{
static Logger log = Logger.getLogger(SelfJoinScenarioGenerator.class);
public static final int MAX_NUM_TRIES = 10;
//MN join size - 17 May 2014
private int JN;
//MN primary key - 17 May 2014
private int K;
//MN source rel size - 17 May 2014
private int E;
private int F;
private String[] keys;
private String[] fks;
private int[] keyPos;
private int[] fkPos;
// private int[] normalPos;
//MN added attribute to check whether we are reusing target relations - 17 May 2014
private boolean targetReuse;
public SelfJoinScenarioGenerator()
{
;
}
protected void initPartialMapping () {
super.initPartialMapping();
E = Utils.getRandomNumberAroundSomething(_generator, numOfElements, numOfElementsDeviation);
//MN modified the code so that K works correctly - 6 May 2014
K = Utils.getRandomNumberAroundSomething(_generator, primaryKeySize, primaryKeySizeDeviation);
E = (E < ((2 * K) + 1)) ? ((2 * K) + 1) : E;
//MN join size
JN = Utils.getRandomNumberAroundSomething(_generator, numOfSetElements, numOfSetElementsDeviation);
JN = (JN < 1) ? 1 : JN;
F = E - (2 * K);
//MN BEGIN - 17 May 2014
targetReuse = false;
//MN END
}
//MN - implemented chooseTargetRels - 17 May 2014
@Override
protected boolean chooseTargetRels() throws Exception{
//MN we need two relations with the same size - 17 May 2014
boolean found1 = false;
boolean found2 = false;
RelationType rel1 = null;
RelationType rel2 = null;
if(K==0)
K=1;
//MN 2 June 2014
F =1 ;
int numTries =0;
while((numTries<MAX_NUM_TRIES) && (!found1) && (!found2)){
//find the first one - relation S
int minAttrs1 = K + 1;
rel1 = getRandomRel(false, minAttrs1, minAttrs1);
if(rel1 == null)
found1 = false;
else{
//MN FIXED - 18 May 2014
if ((rel1.sizeOfAttrArray() != K + 1))
found1 = false;
if(found1){
if(rel1.isSetPrimaryKey()){
int[] pkPos = model.getPKPos(rel1.getName(), false);
if(pkPos.length != K)
found1 = false;
else
for(int h=0; h<K; h++)
if(pkPos[h] != h)
found1=false;
}
else{
int [] primaryKeyPos = new int [K];
for(int i=0; i<K; i++)
primaryKeyPos [i] = i;
fac.addPrimaryKey(rel1.getName(), primaryKeyPos, false);
found1=true;
}
}
if(found1)
m.addTargetRel(rel1);
}
//find the second one - relation T
int minAttrs2 = K + K;
rel2 = getRandomRel(false, minAttrs2, minAttrs2);
if(rel2 == null)
found2 = false;
else{
//MN 18 May 2014
if(rel2.sizeOfAttrArray()/K != 2)
found2=false;
if((rel1 != null) && (rel1.getName()==rel2.getName()))
found2 = false;
if(found2){
if(rel2.isSetPrimaryKey()){
int[] pkPos = model.getPKPos(rel2.getName(), false);
if(pkPos.length != K)
found2 = false;
else
for(int h=0; h<K; h++)
if(pkPos[h] != h)
found2=false;
}
else{
int [] primaryKeyPos = new int [K];
for(int i=0; i<K; i++)
primaryKeyPos [i] = i;
fac.addPrimaryKey(rel2.getName(), primaryKeyPos, false);
found2=true;
}
}
}
if(!found1 && !found2)
numTries++;
if(!found1){
//create one
String[] attrs = new String[K + 1];
for(int j = 0; j < K + 1; j++)
attrs[j] = randomAttrName(0, j);
// create the relation
String relName = randomRelName(0);
rel1 = fac.addRelation(getRelHook(0), relName, attrs, false);
//set primary key
int [] primaryKeyPos = new int [K];
for(int i=0; i<K; i++)
primaryKeyPos [i] = i;
fac.addPrimaryKey(relName, primaryKeyPos, false);
found1 = true;
}
if(found2)
m.addTargetRel(rel2);
else{
//create one
String[] attrs = new String[K + K];
for(int j = 0; j < K + K; j++)
attrs[j] = randomAttrName(0, j);
// create the relation
String relName = randomRelName(0);
rel2 = fac.addRelation(getRelHook(0), relName, attrs, false);
//set primary key
int [] primaryKeyPos = new int [K];
for(int i=0; i<K; i++)
primaryKeyPos [i] = i;
fac.addPrimaryKey(relName, primaryKeyPos, false);
found2=true;
}
}
if(numTries>MAX_NUM_TRIES)
return false;
//set source relation parameters
keys = new String[K];
keyPos = new int[K];
fks = new String[K];
fkPos = new int[K];
E = K + K + 1;
//set keys
for(int i=0; i<K; i++){
keys[i] = rel1.getAttrArray(i).getName().toString();
fks[i] = rel2.getAttrArray(i+K).getName().toString();
keyPos[i]=i;
fkPos[i]=i+K;
}
//set FKs
addFK(1, fks, 0, keys, false);
targetReuse = true;
return true;
}
//MN modified chooseSoruceRels - 6 May 2014
//MN Question: I don't get some parts of the code - 6 May 2014
//MN the goal is to preserve value of Key - 6 May 2014
@Override
protected boolean chooseSourceRels() throws Exception {
int numTries = 0;
RelationType rel = null;
String srcName;
// fetch random rel with enough attrs
//MN do we need numTries here? - 17 May 2014
while(numTries < MAX_NUM_TRIES){
//MN two keys (one key set is referring to the other) + 1 (to be reasonable)
rel = getRandomRel(true, K + K + 1);
if(rel == null)
break;
numTries++;
}
//TODO try to reduce number of keys and foreign keys?
keys = new String[K];
keyPos = new int[K];
fks = new String[K];
fkPos = new int[K];
if (rel == null)
return false;
//MN BEGIN
E = rel.sizeOfAttrArray();
//MN END
F = rel.sizeOfAttrArray() - 2 * K;
// normalPos = new int[F];
m.addSourceRel(rel);
srcName = rel.getName();
// already has PK, get positions of PK attrs
if (rel.isSetPrimaryKey()) {
keyPos = model.getPKPos(srcName, true);
keys = model.getPK(srcName, true);
// find attributes to use as fk
int fkDone = 0, pos = 0;
//MN I have trouble in understanding the following piece of code - 6 May 2014
while(fkDone < K) {
// is pk position?
if (Arrays.binarySearch(keyPos, pos) < 0) {
fkPos[fkDone] = pos;
fks[fkDone] = m.getAttrId(0, pos, true);
fkDone++;
}
pos++;
}
//MN addForeignKey or check types of foreign keys - 2 June 2014
}
else {
keyPos = CollectionUtils.createSequence(0, K);
fkPos = CollectionUtils.createSequence(K, K);
for(int i = 0; i < K; i++) {
keys[i] = rel.getAttrArray(i).getName();
fks[i] = rel.getAttrArray(K + i).getName();
}
// normalPos = CollectionUtils.createSequence(2 * K, F);
fac.addPrimaryKey(srcName, CollectionUtils.createSequence(0, K), true);
//MN removed the following line to be able to evaluate MapMerge - 2 June 2014
//fac.addForeignKey(srcName, fks, srcName, keys, true);
}
return true;
}
@Override
protected void genSourceRels() throws Exception {
String srcName = randomRelName(0);
String[] attrs = new String[E];
//MN BEGIN -considered an array to store types of attributes - 17 May 2014
String[] attrsType = new String[E];
//MN END
//MN BEGIN - 17 May 2014
if(!targetReuse){
keys = new String[K];
fks = new String[K];
keyPos = new int[K];
fkPos = new int[K];
}
//MN END
// normalPos = new int[F];
String hook = getRelHook(0);
// create key and foreign key attrs
for(int i = 0; i < K; i++) {
String randAtt = randomAttrName(0, i);
keys[i] = randAtt + "ke";
keyPos[i] = i;
fks[i] = randAtt + "fk";
fkPos[i] = i + K;
attrs[i] = keys[i];
attrs[i + K] = fks[i];
}
// create free attrs
for(int i = 2 * K; i < E; i++)
attrs[i] = randomAttrName(0, i);
// normalPos = CollectionUtils.createSequence(2 * K, F);
//MN BEGIN - 17 May 2014
if(targetReuse){
for(int h=0; h<2*K; h++)
attrsType[h] = m.getTargetRels().get(1).getAttrArray(h).getDataType();
int count =0;
for(int h=2*K; h<E; h++){
attrsType[h] = m.getTargetRels().get(0).getAttrArray(K+count).getDataType();
count++;
}
}
//MN END
fac.addRelation(hook, srcName, attrs, true);
fac.addPrimaryKey(srcName, keys, true);
//MN removed the following line to be able to evaluate MapMerge - it should be undone later - 26 May 2014
fac.addForeignKey(srcName, fks, srcName, keys, true);
//MN BEGIN - 17 May 2014
targetReuse = false;
//MN END
}
@Override
protected void genTargetRels() throws Exception {
//MN modified the way that iBench generates names for target relations by adding curRep
//MN in order to be able to support reusability - 20 May 2014
String bRelName = m.getRelName(0, true) + curRep + "_b";
String fkRelName = m.getRelName(0, true) + curRep + "_fk";
String[] bAttrs = new String[K + F];
String[] fkAttrs = new String[2 * K];
//MN considered arrays to store types of attributes - 4 May 2014
List<String> attrsType1 = new ArrayList<String> ();
List<String> attrsType2 = new ArrayList<String> ();
// add keys to basic table and keys and fks to fk table
for(int i = 0; i < K; i++) {
//MN BEGIN - 6 May 2014
//bAttrs
attrsType1.add(m.getSourceRels().get(0).getAttrArray(i).getDataType());
//fkAttrs
attrsType2.add(m.getSourceRels().get(0).getAttrArray(i).getDataType());
//MN END
bAttrs[i] = m.getAttrId(0, i, true);
fkAttrs[i] = m.getAttrId(0, i, true);
fkAttrs[i + K] = m.getAttrId(0, i + K, true);
}
//MN BEGIN - 6 May 2014
//fkAttrs
for(int i=0; i<K; i++)
attrsType2.add(m.getSourceRels().get(0).getAttrArray(i + K).getDataType());
//MN END
// add free attrs to basic table
for(int i = 2 * K; i < E; i++){
bAttrs[i - K] = m.getAttrId(0, i, true);
//MN BEGIN - 6 May 2014
//bAttrs
attrsType1.add(m.getSourceRels().get(0).getAttrArray(i).getDataType());
//MN END
}
// create relations and foreign keys
//MN - 6 May 2014
fac.addRelation(getRelHook(0), bRelName, bAttrs, attrsType1.toArray(new String[] {}), false);
fac.addRelation(getRelHook(1), fkRelName, fkAttrs, attrsType2.toArray(new String[] {}), false);
fac.addPrimaryKey(bRelName, keys, false);
fac.addPrimaryKey(fkRelName, keys, false);
addFK(1, fks, 0, keys, false);
}
@Override
protected void genMappings() throws Exception {
String[] keyVars = fac.getFreshVars(0, K);
String[] fkVars = fac.getFreshVars(K, K);
String[] fVars = fac.getFreshVars(2 * K, F);
MappingType m1 = fac.addMapping(m.getCorrs(0, false));
fac.addForeachAtom(m1, 0, CollectionUtils.concatArrays(keyVars, fkVars,
fVars));
fac.addExistsAtom(m1, 0, CollectionUtils.concatArrays(keyVars, fVars));
MappingType m2 = fac.addMapping(m.getCorrs(1, false));
fac.addForeachAtom(m2, 0, CollectionUtils.concatArrays(keyVars, fac.getFreshVars(2 * K, E - K)));
fac.addForeachAtom(m2, 0, CollectionUtils.concatArrays(fkVars, keyVars,
fac.getFreshVars(E, F)));
fac.addExistsAtom(m2, 1, CollectionUtils.concatArrays(keyVars, fkVars));
}
@Override
protected void genTransformations() throws Exception {
SPJQuery genQuery = new SPJQuery();
Query q;
String mapId;
genQueries(genQuery);
String creates;
//TODO check
creates = m.getRelName(0, false);
q = (Query) genQuery.getSelect().getTerm(0);
q.storeCode(q.toTrampString(m.getMapIds()));
q = addQueryOrUnion(creates, q);
mapId = m.getMaps().get(0).getId();
fac.addTransformation(q.toTrampStringOneMap(mapId), mapId, creates);
creates = m.getRelName(1, false);
q = (Query) genQuery.getSelect().getTerm(1);
q.storeCode(q.toTrampString(m.getMapIds()));
q = addQueryOrUnion(creates, q);
mapId = m.getMaps().get(1).getId();
fac.addTransformation(q.toTrampStringOneMap(mapId), mapId, creates);
}
private void genQueries(SPJQuery generatedQuery) {
String nameS = m.getRelName(0, true);
String nameTB = m.getRelName(0, false);
String nameTFK = m.getRelName(1, false);
String[] sAttrs = m.getAttrIds(0, true);
// create the first query mapping to the K, F table
SPJQuery query = new SPJQuery();
Variable var = new Variable("X");
query.getFrom().add(var.clone(), new Projection(Path.ROOT, nameS));
// generate the keys in the source and Basic target table
// add the keys constraints to the source and to the target
SelectClauseList select = query.getSelect();
//Variable varKey = new Variable("K");
// the key constraint in the source
for (int i = 0; i < K; i++) {
// add the keys to the select clause of the query
Projection att = new Projection(var.clone(), keys[i]);
select.add(keys[i], att);
}
// generate the free elements in the source table and in the Basic
// target table only
for (int i = 0; i < F; i++) {
// add the free elements to the select clause of the query
String attName = sAttrs[2 * K + i];
Projection att = new Projection(var.clone(), attName);
select.add(attName, att);
}
// add the first query to the final query
query.setSelect(select);
SelectClauseList pselect = pquery.getSelect();
SelectClauseList gselect = generatedQuery.getSelect();
pselect.add(nameTB, query);
gselect.add(nameTB, query);
pquery.setSelect(pselect);
generatedQuery.setSelect(gselect);
generatedQuery.addTarget(nameTB);
// create the second intermediate query
SPJQuery query2 = new SPJQuery();
// create the from clause of the second query
for (int i = 1; i <= JN; i++)
query2.getFrom().add(new Variable("X" + i), new Projection(Path.ROOT, nameS));
// generate the first part of the Select clause of the second query
// add as attr all the keys that belong to the first relation
// that appears in the From clause
SelectClauseList select2 = query2.getSelect();
for (int i = 0; i < K; i++) {
Projection att = new Projection(new Variable("X1"), keys[i]);
select2.add(keys[i], att);
}
// generate in the Join target table the pointers to the keys
// of the source; RE stands for Reference element
// also generate the second part of the Select clause of the second
// query by adding as attr all the keys that
// belong to the last relation that appears in the From clause
for (int i = 0; i < K; i++) {
Projection att = new Projection(new Variable("X" + JN), fks[i]);
select2.add(fks[i], att);
}
// generate the Where clause of the second query; that
// constructs the joining of the source for JN times
AND where = new AND();
for (int j = 1; j < JN; j++)
for (int i = 0; i < K; i++) {
Projection att1 =
new Projection(new Variable("X" + (j + 1)), keys[i]);
Projection att2 =
new Projection(new Variable("X" + j), fks[i]);
where.add(new EQ(att1, att2));
}
// add the second query to the final query
query2.setSelect(select2);
query2.setWhere(where);
pselect = pquery.getSelect();
gselect = generatedQuery.getSelect();
pselect.add(nameTFK, query2);
gselect.add(nameTFK, query2);
pquery.setSelect(pselect);
generatedQuery.setSelect(gselect);
generatedQuery.addTarget(nameTFK);
}
@Override
protected void genCorrespondences() {
// keys from source to both target relations
for(int i = 0; i < K; i++) {
addCorr(0, i, 0, i);
addCorr(0, i, 1, i);
}
// FKs from source to target FK relation
for(int i = 0; i < K; i++)
addCorr(0, i + K, 1, i + K);
// free attrs from source to basic target relation
for(int i = 0; i < F; i++)
addCorr(0, i + (2 * K), 0, i + K);
}
@Override
public ScenarioName getScenType() {
return ScenarioName.SELFJOINS;
}
}
| |
/*
* Copyright 2012 Thomas Bocek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package net.tomp2p.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A map with expiration and more or less LRU. Since the maps are separated in segments, the LRU is done for each
* segment. A segment is chosen based on the hash of the key. If one segments is more loaded than another, then an entry
* of the loaded segment may get evicted before an entry used least recently from an other segment. The expiration is
* done best effort. There is no thread checking for timed out entries since the cache has a fixed size. Once an entry
* times out, it remains in the map until it either is accessed or evicted. A test showed that for the default entry
* size of 1024, this map has a size of 967 if 1024 items are inserted. This is due to the segmentation and hashing.
*
* @author Thomas Bocek
* @param <K>
* the type of the key
* @param <V>
* the type of the value
*/
public class ConcurrentCacheMap<K, V> implements ConcurrentMap<K, V> {
private static final Logger LOGGER = LoggerFactory.getLogger(ConcurrentCacheMap.class);
/**
* Number of segments that can be accessed concurrently.
*/
public static final int SEGMENT_NR = 16;
/**
* Max. number of entries that the map can hold until the least recently used gets replaced
*/
public static final int MAX_ENTRIES = 1024;
/**
* Time to live for a value. The value may stay longer in the map, but it is considered invalid.
*/
public static final int DEFAULT_TIME_TO_LIVE = 60;
private final CacheMap<K, ExpiringObject>[] segments;
private final int timeToLiveSeconds;
private final boolean refreshTimeout;
private final AtomicInteger removedCounter = new AtomicInteger();
/**
* Creates a new instance of ConcurrentCacheMap using the default values and a {@link CacheMap} for the internal
* data structure.
*/
public ConcurrentCacheMap() {
this(DEFAULT_TIME_TO_LIVE, MAX_ENTRIES, true);
}
/**
* Creates a new instance of ConcurrentCacheMap using the supplied values and a {@link CacheMap} for the internal
* data structure.
*
* @param timeToLiveSeconds
* The time-to-live value (seconds)
* @param maxEntries
* The maximum number of entries until items gets replaced with LRU
*/
public ConcurrentCacheMap(final int timeToLiveSeconds, final int maxEntries) {
this(timeToLiveSeconds, maxEntries, true);
}
/**
* Creates a new instance of ConcurrentCacheMap using the supplied values and a {@link CacheMap} for the internal
* data structure.
*
* @param timeToLiveSeconds
* The time-to-live value (seconds)
* @param maxEntries
* The maximum number of entries until items gets replaced with LRU
* @param refreshTimeout
* If set to true, timeout will be reset in case of {@link #putIfAbsent(Object, Object)}
*/
@SuppressWarnings("unchecked")
public ConcurrentCacheMap(final int timeToLiveSeconds, final int maxEntries, final boolean refreshTimeout) {
this.segments = new CacheMap[SEGMENT_NR];
final int maxEntriesPerSegment = maxEntries / SEGMENT_NR;
for (int i = 0; i < SEGMENT_NR; i++) {
// set updateOnInsert to true, since it should behave as a regular map
segments[i] = new CacheMap<K, ExpiringObject>(maxEntriesPerSegment, true);
}
this.timeToLiveSeconds = timeToLiveSeconds;
this.refreshTimeout = refreshTimeout;
}
/**
* Returns the segment based on the key.
*
* @param key
* The key where the hash code identifies the segment
* @return The cache map that corresponds to this segment
*/
private CacheMap<K, ExpiringObject> segment(final Object key) {
return segments[(key.hashCode() & Integer.MAX_VALUE) % SEGMENT_NR];
}
@Override
public V put(final K key, final V value) {
final ExpiringObject newValue = new ExpiringObject(value, System.currentTimeMillis());
final CacheMap<K, ExpiringObject> segment = segment(key);
ExpiringObject oldValue;
synchronized (segment) {
oldValue = segment.put(key, newValue);
}
if (oldValue == null || oldValue.isExpired()) {
return null;
}
return oldValue.getValue();
}
@Override
/**
* This does not reset the timer!
*/
public V putIfAbsent(final K key, final V value) {
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject newValue = new ExpiringObject(value, System.currentTimeMillis());
ExpiringObject oldValue = null;
synchronized (segment) {
if (!segment.containsKey(key)) {
oldValue = segment.put(key, newValue);
} else {
oldValue = segment.get(key);
if (oldValue.isExpired()) {
segment.put(key, newValue);
} else if (refreshTimeout) {
oldValue = new ExpiringObject(oldValue.getValue(), System.currentTimeMillis());
segment.put(key, oldValue);
}
}
}
if (oldValue == null || oldValue.isExpired()) {
return null;
}
return oldValue.getValue();
}
@SuppressWarnings("unchecked")
@Override
public V get(final Object key) {
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject oldValue;
synchronized (segment) {
oldValue = segment.get(key);
}
if (oldValue != null) {
if (expire(segment, (K) key, oldValue)) {
return null;
} else {
LOGGER.debug("Get found. Key: {}. Value: {}.", key, oldValue.getValue());
return oldValue.getValue();
}
}
LOGGER.debug("Get not found. Key: {}.", key);
return null;
}
@Override
public V remove(final Object key) {
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject oldValue;
synchronized (segment) {
oldValue = segment.remove(key);
}
if (oldValue == null || oldValue.isExpired()) {
return null;
}
return oldValue.getValue();
}
@SuppressWarnings("unchecked")
@Override
public boolean remove(final Object key, final Object value) {
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject oldValue;
boolean removed = false;
synchronized (segment) {
oldValue = segment.get(key);
if (oldValue != null && oldValue.equals(value) && !oldValue.isExpired()) {
removed = segment.remove(key) != null;
}
}
if (oldValue != null) {
expire(segment, (K) key, oldValue);
}
return removed;
}
@SuppressWarnings("unchecked")
@Override
public boolean containsKey(final Object key) {
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject oldValue;
synchronized (segment) {
oldValue = segment.get(key);
}
if (oldValue != null) {
if (!expire(segment, (K) key, oldValue)) {
return true;
}
}
return false;
}
@Override
public boolean containsValue(final Object value) {
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
expireSegment(segment);
if (segment.containsValue(value)) {
return true;
}
}
}
return false;
}
@Override
public int size() {
int size = 0;
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
expireSegment(segment);
size += segment.size();
}
}
return size;
}
@Override
public boolean isEmpty() {
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
expireSegment(segment);
if (!segment.isEmpty()) {
return false;
}
}
}
return true;
}
@Override
public void clear() {
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
segment.clear();
}
}
}
@Override
public int hashCode() {
int hashCode = 0;
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
expireSegment(segment);
// as seen in AbstractMap
hashCode += segment.hashCode();
}
}
return hashCode;
}
@Override
public Set<K> keySet() {
final Set<K> retVal = new HashSet<K>();
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
expireSegment(segment);
retVal.addAll(segment.keySet());
}
}
return retVal;
}
@Override
public void putAll(final Map<? extends K, ? extends V> inMap) {
for (final Entry<? extends K, ? extends V> e : inMap.entrySet()) {
this.put(e.getKey(), e.getValue());
}
}
@Override
public Collection<V> values() {
final Collection<V> retVal = new ArrayList<V>() {
private static final long serialVersionUID = 3769009451779243542L;
@Override
public Iterator<V> iterator() {
final Iterator<V> orig = super.iterator();
return new Iterator<V>() {
@Override
public boolean hasNext() {
return orig.hasNext();
}
@Override
public V next() {
return orig.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("Cannot remove from values.");
}
};
}
};
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
final Iterator<ExpiringObject> iterator = segment.values().iterator();
while (iterator.hasNext()) {
final ExpiringObject expiringObject = iterator.next();
if (expiringObject.isExpired()) {
iterator.remove();
LOGGER.debug("Remove in entry set: {}.", expiringObject.getValue());
removedCounter.incrementAndGet();
} else {
retVal.add(expiringObject.getValue());
}
}
}
}
return retVal;
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
final Set<Map.Entry<K, V>> retVal = new HashSet<Map.Entry<K, V>>() {
private static final long serialVersionUID = 3769009451779243542L;
@Override
public Iterator<java.util.Map.Entry<K, V>> iterator() {
final Iterator<Map.Entry<K,V>> orig = super.iterator();
return new Iterator<Map.Entry<K,V>>() {
private K currentKey = null;
@Override
public boolean hasNext() {
return orig.hasNext();
}
@Override
public java.util.Map.Entry<K, V> next() {
java.util.Map.Entry<K, V> entry = orig.next();
currentKey = entry.getKey();
return entry;
}
@Override
public void remove() {
orig.remove();
if(currentKey != null) {
ConcurrentCacheMap.this.remove(currentKey);
}
}
};
}
};
for (final CacheMap<K, ExpiringObject> segment : segments) {
synchronized (segment) {
final Iterator<Map.Entry<K, ExpiringObject>> iterator = segment.entrySet().iterator();
while (iterator.hasNext()) {
final Map.Entry<K, ExpiringObject> entry = iterator.next();
if (entry.getValue().isExpired()) {
iterator.remove();
LOGGER.debug("Removed in entry set: {}.", entry.getValue().getValue());
removedCounter.incrementAndGet();
} else {
retVal.add(new Map.Entry<K, V>() {
@Override
public K getKey() {
return entry.getKey();
}
@Override
public V getValue() {
return entry.getValue().getValue();
}
@Override
public V setValue(final V value) {
throw new UnsupportedOperationException("not supported");
}
});
}
}
}
}
return retVal;
}
@Override
public boolean replace(final K key, final V oldValue, final V newValue) {
final ExpiringObject oldValue2 = new ExpiringObject(oldValue, 0L);
final ExpiringObject newValue2 = new ExpiringObject(newValue, System.currentTimeMillis());
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject oldValue3;
boolean replaced = false;
synchronized (segment) {
oldValue3 = segment.get(key);
if (oldValue3 != null && !oldValue3.isExpired() && oldValue2.getValue().equals(oldValue3.getValue())) {
segment.put(key, newValue2);
replaced = true;
}
}
if (oldValue3 != null) {
expire(segment, key, oldValue3);
}
return replaced;
}
@Override
public V replace(final K key, final V value) {
final ExpiringObject newValue = new ExpiringObject(value, System.currentTimeMillis());
final CacheMap<K, ExpiringObject> segment = segment(key);
final ExpiringObject oldValue;
synchronized (segment) {
oldValue = segment.get(key);
if (oldValue != null && !oldValue.isExpired()) {
segment.put(key, newValue);
}
}
if (oldValue == null) {
return null;
}
if (expire(segment, key, oldValue)) {
return null;
}
return oldValue.getValue();
}
/**
* Expires a key in a segment. If a key value pair is expired, it will get removed.
*
* @param segment
* The segment
* @param key
* The key
* @param value
* The value
* @return True if expired, otherwise false.
*/
private boolean expire(final CacheMap<K, ExpiringObject> segment, final K key, final ExpiringObject value) {
if (value.isExpired()) {
synchronized (segment) {
final ExpiringObject tmp = segment.get(key);
if (tmp != null && tmp.equals(value)) {
segment.remove(key);
LOGGER.debug("Removed in expire: {}.", value.getValue());
removedCounter.incrementAndGet();
}
}
return true;
}
return false;
}
/**
* Fast expiration. Since the ExpiringObject is ordered the for loop can break early if a object is not expired.
*
* @param segment
* The segment
*/
private void expireSegment(final CacheMap<K, ExpiringObject> segment) {
final Iterator<ExpiringObject> iterator = segment.values().iterator();
while (iterator.hasNext()) {
final ExpiringObject expiringObject = iterator.next();
if (expiringObject.isExpired()) {
iterator.remove();
LOGGER.debug("Remove in expire segment: {}.", expiringObject.getValue());
removedCounter.incrementAndGet();
} else {
break;
}
}
}
/**
* @return The number of expired objects
*/
public int expiredCounter() {
return removedCounter.get();
}
/**
* An object that also holds expiration information.
*/
private class ExpiringObject {
private final V value;
private final long lastAccessTime;
/**
* Creates a new expiring object with the given time of access.
*
* @param value
* The value that is wrapped in this instance
* @param lastAccessTimeMillis
* The time of access in milliseconds.
*/
ExpiringObject(final V value, final long lastAccessTimeMillis) {
if (value == null) {
throw new IllegalArgumentException("An expiring object cannot be null.");
}
this.value = value;
this.lastAccessTime = lastAccessTimeMillis;
}
/**
* @return If entry is expired
*/
public boolean isExpired() {
return System.currentTimeMillis() >= lastAccessTime +
(TimeUnit.MILLISECONDS.convert(timeToLiveSeconds, TimeUnit.SECONDS));
}
/**
* @return The wrapped value
*/
public V getValue() {
return value;
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof ConcurrentCacheMap.ExpiringObject)) {
return false;
}
@SuppressWarnings("unchecked")
final ExpiringObject exp = (ExpiringObject) obj;
return value.equals(exp.value);
}
@Override
public int hashCode() {
return value.hashCode();
}
}
}
| |
/**
* Copyright 2015-2017 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin.collector.kafka;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ZookeeperConsumerConnector;
import zipkin.collector.Collector;
import zipkin.collector.CollectorComponent;
import zipkin.collector.CollectorMetrics;
import zipkin.collector.CollectorSampler;
import zipkin.internal.LazyCloseable;
import zipkin.storage.AsyncSpanConsumer;
import zipkin.storage.StorageComponent;
import static kafka.consumer.Consumer.createJavaConsumerConnector;
import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG;
import static zipkin.internal.Util.checkNotNull;
/**
* This collector polls a Kafka topic for messages that contain TBinaryProtocol big-endian encoded
* lists of spans. These spans are pushed to a {@link AsyncSpanConsumer#accept span consumer}.
*
* <p>This collector remains a Kafka 0.8.x consumer, while Zipkin systems update to 0.9+.
*/
public final class KafkaCollector implements CollectorComponent {
public static Builder builder() {
return new Builder();
}
/** Configuration including defaults needed to consume spans from a Kafka topic. */
public static final class Builder implements CollectorComponent.Builder {
final Properties properties = new Properties();
Collector.Builder delegate = Collector.builder(KafkaCollector.class);
CollectorMetrics metrics = CollectorMetrics.NOOP_METRICS;
String topic = "zipkin";
int streams = 1;
@Override public Builder storage(StorageComponent storage) {
delegate.storage(storage);
return this;
}
@Override public Builder sampler(CollectorSampler sampler) {
delegate.sampler(sampler);
return this;
}
@Override public Builder metrics(CollectorMetrics metrics) {
this.metrics = checkNotNull(metrics, "metrics").forTransport("kafka");
delegate.metrics(this.metrics);
return this;
}
/** Topic zipkin spans will be consumed from. Defaults to "zipkin" */
public Builder topic(String topic) {
this.topic = checkNotNull(topic, "topic");
return this;
}
/** The zookeeper connect string, ex. 127.0.0.1:2181. No default */
public Builder zookeeper(String zookeeper) {
properties.put("zookeeper.connect", checkNotNull(zookeeper, "zookeeper"));
return this;
}
/** The consumer group this process is consuming on behalf of. Defaults to "zipkin" */
public Builder groupId(String groupId) {
properties.put(GROUP_ID_CONFIG, checkNotNull(groupId, "groupId"));
return this;
}
/** Count of threads/streams consuming the topic. Defaults to 1 */
public Builder streams(int streams) {
this.streams = streams;
return this;
}
/** Maximum size of a message containing spans in bytes. Defaults to 1 MiB */
public Builder maxMessageSize(int bytes) {
properties.put("fetch.message.max.bytes", String.valueOf(bytes));
return this;
}
/**
* By default, a consumer will be built from properties derived from builder defaults,
* as well "auto.offset.reset" -> "smallest". Any properties set here will override the
* consumer config.
*
* <p>For example: Only consume spans since you connected by setting the below.
* <pre>{@code
* Map<String, String> overrides = new LinkedHashMap<>();
* overrides.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "largest");
* builder.overrides(overrides);
* }</pre>
*
* @see org.apache.kafka.clients.consumer.ConsumerConfig
*/
public final Builder overrides(Map<String, ?> overrides) {
properties.putAll(checkNotNull(overrides, "overrides"));
return this;
}
@Override public KafkaCollector build() {
return new KafkaCollector(this);
}
Builder() {
// Settings below correspond to "Old Consumer Configs"
// http://kafka.apache.org/documentation.html
properties.put(GROUP_ID_CONFIG, "zipkin");
properties.put("fetch.message.max.bytes", String.valueOf(1024 * 1024));
// Same default as zipkin-scala, and keeps tests from hanging
properties.put(AUTO_OFFSET_RESET_CONFIG, "smallest");
}
}
final LazyConnector connector;
final LazyStreams streams;
KafkaCollector(Builder builder) {
connector = new LazyConnector(builder);
streams = new LazyStreams(builder, connector);
}
@Override public KafkaCollector start() {
connector.get();
streams.get();
return this;
}
@Override public CheckResult check() {
try {
connector.get(); // make sure the connector didn't throw
CheckResult failure = streams.failure.get(); // check the streams didn't quit
if (failure != null) return failure;
return CheckResult.OK;
} catch (RuntimeException e) {
return CheckResult.failed(e);
}
}
@Override
public void close() throws IOException {
streams.close();
connector.close();
}
static final class LazyConnector extends LazyCloseable<ZookeeperConsumerConnector> {
final ConsumerConfig config;
LazyConnector(Builder builder) {
this.config = new ConsumerConfig(builder.properties);
}
@Override protected ZookeeperConsumerConnector compute() {
return (ZookeeperConsumerConnector) createJavaConsumerConnector(config);
}
@Override
public void close() {
ZookeeperConsumerConnector maybeNull = maybeNull();
if (maybeNull != null) maybeNull.shutdown();
}
}
static final class LazyStreams extends LazyCloseable<ExecutorService> {
final int streams;
final String topic;
final Collector collector;
final CollectorMetrics metrics;
final LazyCloseable<ZookeeperConsumerConnector> connector;
final AtomicReference<CheckResult> failure = new AtomicReference<>();
LazyStreams(Builder builder, LazyCloseable<ZookeeperConsumerConnector> connector) {
this.streams = builder.streams;
this.topic = builder.topic;
this.collector = builder.delegate.build();
this.metrics = builder.metrics;
this.connector = connector;
}
@Override protected ExecutorService compute() {
ExecutorService pool = streams == 1
? Executors.newSingleThreadExecutor()
: Executors.newFixedThreadPool(streams);
Map<String, Integer> topicCountMap = new LinkedHashMap<>(1);
topicCountMap.put(topic, streams);
for (KafkaStream<byte[], byte[]> stream : connector.get().createMessageStreams(topicCountMap)
.get(topic)) {
pool.execute(guardFailures(new KafkaStreamProcessor(stream, collector, metrics)));
}
return pool;
}
Runnable guardFailures(final Runnable delegate) {
return new Runnable() {
@Override public void run() {
try {
delegate.run();
} catch (RuntimeException e) {
failure.set(CheckResult.failed(e));
}
}
};
}
@Override
public void close() {
ExecutorService maybeNull = maybeNull();
if (maybeNull != null) maybeNull.shutdown();
}
}
}
| |
package eu.ehealth.controllers;
import java.util.ArrayList;
import org.zkoss.zk.ui.Executions;
import org.zkoss.zk.ui.Session;
import org.zkoss.zk.ui.Sessions;
import org.zkoss.zk.ui.event.Event;
import org.zkoss.zk.ui.event.EventListener;
import org.zkoss.util.resource.Labels;
import org.zkoss.zul.Button;
import org.zkoss.zul.Grid;
import org.zkoss.zul.Hbox;
import org.zkoss.zul.Label;
import org.zkoss.zul.Row;
import org.zkoss.zul.Rows;
import org.zkoss.zul.Textbox;
import org.zkoss.zul.Window;
import eu.ehealth.ErrorDictionary;
import eu.ehealth.SystemDictionary;
import eu.ehealth.ws_client.StorageComponentImpl;
import eu.ehealth.ws_client.xsd.Carer;
import eu.ehealth.ws_client.xsd.Clinician;
import eu.ehealth.ws_client.xsd.Consulter;
import eu.ehealth.ws_client.xsd.GeneralPractitioner;
import eu.ehealth.ws_client.xsd.OperationResult;
import eu.ehealth.ws_client.xsd.Patient;
import eu.ehealth.ws_client.xsd.PersonData;
import eu.ehealth.ws_client.xsd.SocialWorker;
import eu.ehealth.ws_client.xsd.SocioDemographicData;
import eu.ehealth.ws_client.xsd.User;
/**
* This class handles the creation and the events of the Patient questionnaire.
*
* @author Xavi Sarda (Atos Origin)
*/
public class PattientControllerWindow extends SDFormControllerWindow
{
private static final long serialVersionUID = 3014122995824061686L;
private CarerListWindowController clist = null;
private String currentresp = null;
private SocialWorker currentsocialworker = null;
private Consulter currentconsulter = null;
private GeneralPractitioner currentgeneralpracticioner = null;
private Carer currentcarers = null;
/**
* Default constructor
*/
public PattientControllerWindow()
{
isNewUser = true;
this.buildForm(true);
}
/**
* Constructor to change the create form into an update form
*
* @param current Patient to be updated
*/
public PattientControllerWindow(Patient current)
{
isNewUser = false;
this.currentid = current.getID();
this.currentdata = current.getPersonData();
this.currentsd = current.getSDData();
this.currentresp = current.getResponsibleClinicianID();
SystemDictionary.webguiLog("TRACE", "Social Worker: " + current.getSocialWorker().getName());
this.currentsocialworker = current.getSocialWorker();
SystemDictionary.webguiLog("TRACE", "Consulter: " + current.getConsulterInCharge().getName());
this.currentconsulter = current.getConsulterInCharge();
SystemDictionary.webguiLog("TRACE", "General Practicioner: " + current.getGeneralPractitioner().getName());
this.currentgeneralpracticioner = current.getGeneralPractitioner();
this.currentcarers = current.getPatientCarer();
this.buildForm(false);
this.addPersonFieldsValues();
this.addAddressFieldsValues();
this.addCommunicationFieldsValues();
this.addSocioDemographicDataFieldsValue();
this.addResponsibleClinicianFieldValues();
this.addCarerFieldValues();
this.addSocialWorkerConsulterAndGPFieldsValues();
this.appendChild(this.createUpdateButton());
this.getFellow("pat_uname").getParent().setVisible(false);
}
/**
* Build form instructions to be executed
*/
public void buildForm(boolean newPatient)
{
this.addErrorBox();
this.addResponsibleClinicianField();
this.addPersonFields(newPatient);
this.addAddressFields();
this.addCommunicationFields();
this.addSocioDemographicDataFields();
this.addCarerField();
this.addSocialWorkerConsulterAndGPFields();
}
/**
* Submit function: Used on the view layer to create a new Patient (using
* the StorageComponentImpl).
*
* @return void but saves a new Patient on the server
*/
public void sendPatient(boolean newpatient)
{
if (!validateForm())
{
return;
}
OperationResult result = null;
String resClinic = ((Textbox) getFellow("pat_respo")).getValue();
String carerId = ((Textbox) getFellow("pat_carid")).getValue();
// Getting information from form fields
SocioDemographicData sdData = this.getSocioDemographicData();
PersonData personData = this.getPersonData();
SocialWorker socialw = this.getSocialWorkerData();
Consulter consulter = this.getConsulterData();
GeneralPractitioner gralprac = this.getGeneralPracticionerData();
try
{
StorageComponentImpl proxy = SystemDictionary.getSCProxy();
Session ses = Sessions.getCurrent();
String id = (String) ses.getAttribute("userid");
Carer car2set = proxy.getCarer(carerId, id);
SystemDictionary.webguiLog("DEBUG", "Social worker: " + socialw.getName());
SystemDictionary.webguiLog("DEBUG", "Consulter: " + consulter.getName());
SystemDictionary.webguiLog("DEBUG", "General practicioner: " + gralprac.getName());
Patient patient = new Patient("", personData, sdData, resClinic, socialw, consulter, gralprac, car2set);
if (newpatient)
{
result = proxy.createPatient(patient, id);
SystemDictionary.webguiLog("INFO", "Patient ID: " + result.getCode());
String username = this.getUsername();
User user = createNewUser(SystemDictionary.USERTYPE_PATIENT, result.getCode(), username);
result = proxy.createUser(user);
if (!result.getDescription().equals("ok"))
{
SystemDictionary.webguiLog("TRACE", "Error creating user");
Window win = (Window) getFellow("internalformerror");
((Label) win.getFellow("errorlbl")).setValue("Username not valid");
getFellow("internalformerror").setVisible(true);
SystemDictionary.webguiLog("TRACE", "Deleting Patient...");
OperationResult newresult = proxy.deletePatient(user.getPersonID(), id);
SystemDictionary.webguiLog("TRACE", "Delete Patient result: " + newresult.getCode());
return;
}
Executions.getCurrent().sendRedirect("/patients/index.zul");
}
else
{
patient.setID(this.currentid);
result = proxy.updatePatient(patient, id);
Executions.getCurrent().sendRedirect("/patients/index.zul");
}
}
catch (Exception e)
{
ErrorDictionary.redirectWithError("/carers/?error=" + ErrorDictionary.UNKOW_ERROR);
}
}
/**
* Creates a modal window to select a primary carer for the patient
*
* @throws InterruptedException
*/
public void createDialog() throws InterruptedException
{
if (this.clist == null)
{
this.clist = new CarerListWindowController(this);
this.appendChild(this.clist);
this.clist.doModal();
}
else
{
this.clist.doModal();
}
}
/**
* This method creates a clinician list for the patient form. It is also in
* charge of attaching it to the form and showing it in a modal mode
*
* @throws InterruptedException
*/
public void createClinicianDialog() throws InterruptedException
{
if (this.getFellowIfAny("rclinlistwin") == null)
{
ClinicianListForPatients respolist =
(ClinicianListForPatients) Executions.getCurrent().createComponents("/patients/clinlist.zul", this, null);
respolist.doModal();
}
}
/**
* This method is used to set Carer form fields with information from the
* modal dialog fields
*
* @param carerID
* @param carerName
*/
public void setCarer(String carerID, String carerName)
{
((Textbox) this.getFellow("pat_carid")).setValue(carerID);
((Textbox) this.getFellow("pat_carname")).setValue(carerName);
}
/**
* This method is used to set responsible clinician form fields with
* information from the modal dialog fields
*
* @param clinID Id of the responsible clinician
* @param clinName Name of the responsible clinician
*/
public void setResponsibleClinician(String clinID, String clinName)
{
((Textbox) this.getFellow("pat_respo")).setValue(clinID);
((Textbox) this.getFellow("pat_respo_lbl")).setValue(clinName);
}
/**
* Protected function to add a responsible clinician field to the window.
*/
protected void addResponsibleClinicianField()
{
String respo = Labels.getLabel("patients.form.responsible");
ArrayList<SimpleFieldData> rowsA = new ArrayList<SimpleFieldData>();
rowsA.add(new SimpleFieldData(respo, "pat_respo_lbl"));
Grid pgrid = new Grid();
pgrid.setSclass("grid");
this.appendColumns(pgrid);
Rows rows = new Rows();
Row rowhidf = new Row();
Label lbl_ins = new Label(respo);
rowhidf.appendChild(lbl_ins);
Hbox hbox1 = new Hbox();
Textbox tbox1 = new Textbox();
tbox1.setId("pat_respo_lbl");
tbox1.setReadonly(true);
tbox1.addEventListener("onClick", new EventListener() {
public void onEvent(Event arg0) throws Exception
{
createClinicianDialog();
}
});
Textbox tbox2 = new Textbox();
tbox2.setVisible(false);
tbox2.setId("pat_respo");
hbox1.appendChild(tbox1);
hbox1.appendChild(tbox2);
rowhidf.appendChild(hbox1);
rows.appendChild(rowhidf);
pgrid.appendChild(rows);
this.appendChild(pgrid);
}
/**
* This method sets the responsible clinician values when updating a form
*/
protected void addResponsibleClinicianFieldValues()
{
String id = (String) Sessions.getCurrent().getAttribute("userid");
StorageComponentImpl proxy = new StorageComponentImpl();
try
{
Clinician clinician = proxy.getClinician(this.currentresp, id);
((Textbox) getFellow("pat_respo")).setValue(this.currentresp);
((Textbox) getFellow("pat_respo_lbl")).setValue(clinician.toString());
}
catch (Exception re)
{
((Textbox) getFellow("pat_respo_lbl")).setValue("Not available");
}
}
/**
* This method appends a carer field to the window. Button will open a modal
* dialog to select a carer
*
* @see CarerListWindowController
*/
protected void addCarerField()
{
String carertitle = Labels.getLabel("patients.carer.title");
String carerlabel = Labels.getLabel("patients.carer.label");
Grid pgrid = new Grid();
pgrid.setSclass("grid");
this.appendColumns(pgrid);
Rows rows = new Rows();
this.appendSubFormTitleRow(carertitle, rows);
Row row = new Row();
Label lab = new Label();
lab.setValue(carerlabel);
row.appendChild(lab);
Hbox hbox01 = new Hbox();
Textbox tbox = new Textbox();
tbox.setId("pat_carname");
tbox.setReadonly(true);
Textbox tbox2 = new Textbox();
tbox2.setId("pat_carid");
tbox2.setVisible(false);
tbox2.setReadonly(true);
tbox.addEventListener("onClick", new EventListener() {
public void onEvent(Event arg0) throws Exception
{
createDialog();
}
});
hbox01.appendChild(tbox);
hbox01.appendChild(tbox2);
row.appendChild(hbox01);
rows.appendChild(row);
pgrid.appendChild(rows);
this.appendChild(pgrid);
}
/**
* This method sets the carer values when updating a patient
*/
protected void addCarerFieldValues()
{
((Textbox) getFellow("pat_carid")).setValue(this.currentcarers.getID());
((Textbox) getFellow("pat_carname")).setValue(
this.currentcarers.getPersonData().getName() + ", " + this.currentcarers.getPersonData().getSurname());
}
/**
* This method adds the social worker, consulter and general practicioner
* fields in the form
*/
protected void addSocialWorkerConsulterAndGPFields()
{
Grid pgrid1 = new Grid();
Grid pgrid2 = new Grid();
Grid pgrid3 = new Grid();
pgrid1.setSclass("grid");
pgrid2.setSclass("grid");
pgrid3.setSclass("grid");
this.appendColumns(pgrid1);
this.appendColumns(pgrid2);
this.appendColumns(pgrid3);
Rows rows1 = new Rows();
Rows rows2 = new Rows();
Rows rows3 = new Rows();
this.appendSubFormTitleRow("Social Workrer Info", rows1);
this.appendSubFormTitleRow("Consulter Info", rows2);
this.appendSubFormTitleRow("General Practicioner Info", rows3);
ArrayList<SimpleFieldData> rowsA = new ArrayList<SimpleFieldData>();
rowsA.add(new SimpleFieldData("Name", "pat_swname"));
rowsA.add(new SimpleFieldData("E-mail", "pat_swmail"));
rowsA.add(new SimpleFieldData("Phone", "pat_swphone"));
ArrayList<SimpleFieldData> rowsB = new ArrayList<SimpleFieldData>();
rowsB.add(new SimpleFieldData("Name", "pat_consname"));
rowsB.add(new SimpleFieldData("E-mail", "pat_consmail"));
rowsB.add(new SimpleFieldData("Phone", "pat_consphone"));
ArrayList<SimpleFieldData> rowsC = new ArrayList<SimpleFieldData>();
rowsC.add(new SimpleFieldData("Name", "pat_gpname"));
rowsC.add(new SimpleFieldData("E-mail", "pat_gpmail"));
rowsC.add(new SimpleFieldData("Phone", "pat_gpphone"));
this.appendTextboxFields(rowsA, rows1);
this.appendTextboxFields(rowsB, rows2);
this.appendTextboxFields(rowsC, rows3);
pgrid1.appendChild(rows1);
pgrid2.appendChild(rows2);
pgrid3.appendChild(rows3);
this.appendChild(pgrid1);
this.appendChild(pgrid2);
this.appendChild(pgrid3);
}
/**
* This method adds the social worker, consulter and general practicioner
* values when updating a patient
*/
protected void addSocialWorkerConsulterAndGPFieldsValues()
{
SystemDictionary.webguiLog("DEBUG", "this.currentsocialworker.getName()");
((Textbox) getFellow("pat_swname")).setValue(this.currentsocialworker.getName());
((Textbox) getFellow("pat_swmail")).setValue(this.currentsocialworker.getEmail());
((Textbox) getFellow("pat_swphone")).setValue(this.currentsocialworker.getPhone());
SystemDictionary.webguiLog("DEBUG", "this.currentconsulter.getName()");
((Textbox) getFellow("pat_consname")).setValue(this.currentconsulter.getName());
((Textbox) getFellow("pat_consmail")).setValue(this.currentconsulter.getEmail());
((Textbox) getFellow("pat_consphone")).setValue(this.currentconsulter.getPhone());
SystemDictionary.webguiLog("DEBUG", "this.currentgeneralpracticioner.getName()");
((Textbox) getFellow("pat_gpname")).setValue(this.currentgeneralpracticioner.getName());
((Textbox) getFellow("pat_gpmail")).setValue(this.currentgeneralpracticioner.getEmail());
((Textbox) getFellow("pat_gpphone")).setValue(this.currentgeneralpracticioner.getPhone());
}
/**
* Method to retrieve a SocialWorker from the form
*
* @return SocialWorker defined on the patient form
*/
protected SocialWorker getSocialWorkerData()
{
String name = ((Textbox) getFellow("pat_swname")).getValue();
String mail = ((Textbox) getFellow("pat_swmail")).getValue();
String phone = ((Textbox) getFellow("pat_swphone")).getValue();
return new SocialWorker(name, phone, mail);
}
/**
* Method to retrieve the consulter from the form
*
* @return Consulter defined on the patient form
*/
protected Consulter getConsulterData()
{
String name = ((Textbox) getFellow("pat_consname")).getValue();
String mail = ((Textbox) getFellow("pat_consmail")).getValue();
String phone = ((Textbox) getFellow("pat_consphone")).getValue();
return new Consulter(name, phone, mail);
}
/**
* Method to retrieve a GeneralPracticioner from the form
*
* @return GeneralPracticioner defined on the patient form
*/
protected GeneralPractitioner getGeneralPracticionerData()
{
String name = ((Textbox) getFellow("pat_gpname")).getValue();
String mail = ((Textbox) getFellow("pat_gpmail")).getValue();
String phone = ((Textbox) getFellow("pat_gpphone")).getValue();
return new GeneralPractitioner(name, phone, mail);
}
/**
* Method in charge of creating update button for patients updates
* operations
*
* @return Button to be added to the form
*/
public Button createUpdateButton()
{
Button btn = new Button();
String text = Labels.getLabel("patients.update.title");
btn.setLabel(text);
btn.addEventListener("onClick", new EventListener() {
public void onEvent(Event arg0) throws Exception
{
sendPatient(false);
}
});
return btn;
}
/**
*
* @return
*/
private boolean validateForm() {
try {
// check common fields
if (!validateFormCommonFields())
return false;
if (((Textbox) getFellow("pat_respo")).getValue() == null || ((Textbox) getFellow("pat_respo")).getValue().trim().equals(""))
{
Window win = (Window) getFellow("internalformerror");
((Label) win.getFellow("errorlbl")).setValue("You must select some clinician");
getFellow("internalformerror").setVisible(true);
return false;
}
else if (((Textbox) getFellow("pat_carid")).getValue() == null || ((Textbox) getFellow("pat_carid")).getValue().trim().equals(""))
{
Window win = (Window) getFellow("internalformerror");
((Label) win.getFellow("errorlbl")).setValue("You must select some carer");
getFellow("internalformerror").setVisible(true);
return false;
}
return true;
}
catch (Exception e)
{
e.printStackTrace();
SystemDictionary.webguiLog("ERROR", e.getMessage());
String errorMsj = e.getMessage();
try {
Window win = (Window) getFellow("internalformerror");
((Label) win.getFellow("errorlbl")).setValue(errorMsj);
getFellow("internalformerror").setVisible(true);
((Label) win.getFellow("errorlbl")).setFocus(true);
return false;
}
catch (Exception ex) {}
return false;
}
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.core.ssomgt.model;
import com.liferay.portal.kernel.bean.AutoEscapeBeanHandler;
import com.liferay.portal.kernel.exception.SystemException;
import com.liferay.portal.kernel.util.ProxyUtil;
import com.liferay.portal.kernel.util.StringBundler;
import com.liferay.portal.model.BaseModel;
import com.liferay.portal.model.impl.BaseModelImpl;
import com.liferay.portal.util.PortalUtil;
import org.oep.core.ssomgt.service.AppRoleLocalServiceUtil;
import org.oep.core.ssomgt.service.ClpSerializer;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* @author trungdk
*/
public class AppRoleClp extends BaseModelImpl<AppRole> implements AppRole {
public AppRoleClp() {
}
@Override
public Class<?> getModelClass() {
return AppRole.class;
}
@Override
public String getModelClassName() {
return AppRole.class.getName();
}
@Override
public long getPrimaryKey() {
return _appRoleId;
}
@Override
public void setPrimaryKey(long primaryKey) {
setAppRoleId(primaryKey);
}
@Override
public Serializable getPrimaryKeyObj() {
return _appRoleId;
}
@Override
public void setPrimaryKeyObj(Serializable primaryKeyObj) {
setPrimaryKey(((Long)primaryKeyObj).longValue());
}
@Override
public Map<String, Object> getModelAttributes() {
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put("appRoleId", getAppRoleId());
attributes.put("userId", getUserId());
attributes.put("groupId", getGroupId());
attributes.put("companyId", getCompanyId());
attributes.put("createDate", getCreateDate());
attributes.put("applicationId", getApplicationId());
attributes.put("roleCode", getRoleCode());
attributes.put("roleName", getRoleName());
return attributes;
}
@Override
public void setModelAttributes(Map<String, Object> attributes) {
Long appRoleId = (Long)attributes.get("appRoleId");
if (appRoleId != null) {
setAppRoleId(appRoleId);
}
Long userId = (Long)attributes.get("userId");
if (userId != null) {
setUserId(userId);
}
Long groupId = (Long)attributes.get("groupId");
if (groupId != null) {
setGroupId(groupId);
}
Long companyId = (Long)attributes.get("companyId");
if (companyId != null) {
setCompanyId(companyId);
}
Date createDate = (Date)attributes.get("createDate");
if (createDate != null) {
setCreateDate(createDate);
}
Long applicationId = (Long)attributes.get("applicationId");
if (applicationId != null) {
setApplicationId(applicationId);
}
String roleCode = (String)attributes.get("roleCode");
if (roleCode != null) {
setRoleCode(roleCode);
}
String roleName = (String)attributes.get("roleName");
if (roleName != null) {
setRoleName(roleName);
}
}
@Override
public long getAppRoleId() {
return _appRoleId;
}
@Override
public void setAppRoleId(long appRoleId) {
_appRoleId = appRoleId;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setAppRoleId", long.class);
method.invoke(_appRoleRemoteModel, appRoleId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getUserId() {
return _userId;
}
@Override
public void setUserId(long userId) {
_userId = userId;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setUserId", long.class);
method.invoke(_appRoleRemoteModel, userId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getUserUuid() throws SystemException {
return PortalUtil.getUserValue(getUserId(), "uuid", _userUuid);
}
@Override
public void setUserUuid(String userUuid) {
_userUuid = userUuid;
}
@Override
public long getGroupId() {
return _groupId;
}
@Override
public void setGroupId(long groupId) {
_groupId = groupId;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setGroupId", long.class);
method.invoke(_appRoleRemoteModel, groupId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getCompanyId() {
return _companyId;
}
@Override
public void setCompanyId(long companyId) {
_companyId = companyId;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setCompanyId", long.class);
method.invoke(_appRoleRemoteModel, companyId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public Date getCreateDate() {
return _createDate;
}
@Override
public void setCreateDate(Date createDate) {
_createDate = createDate;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setCreateDate", Date.class);
method.invoke(_appRoleRemoteModel, createDate);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public long getApplicationId() {
return _applicationId;
}
@Override
public void setApplicationId(long applicationId) {
_applicationId = applicationId;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setApplicationId", long.class);
method.invoke(_appRoleRemoteModel, applicationId);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getRoleCode() {
return _roleCode;
}
@Override
public void setRoleCode(String roleCode) {
_roleCode = roleCode;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setRoleCode", String.class);
method.invoke(_appRoleRemoteModel, roleCode);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
@Override
public String getRoleName() {
return _roleName;
}
@Override
public void setRoleName(String roleName) {
_roleName = roleName;
if (_appRoleRemoteModel != null) {
try {
Class<?> clazz = _appRoleRemoteModel.getClass();
Method method = clazz.getMethod("setRoleName", String.class);
method.invoke(_appRoleRemoteModel, roleName);
}
catch (Exception e) {
throw new UnsupportedOperationException(e);
}
}
}
public BaseModel<?> getAppRoleRemoteModel() {
return _appRoleRemoteModel;
}
public void setAppRoleRemoteModel(BaseModel<?> appRoleRemoteModel) {
_appRoleRemoteModel = appRoleRemoteModel;
}
public Object invokeOnRemoteModel(String methodName,
Class<?>[] parameterTypes, Object[] parameterValues)
throws Exception {
Object[] remoteParameterValues = new Object[parameterValues.length];
for (int i = 0; i < parameterValues.length; i++) {
if (parameterValues[i] != null) {
remoteParameterValues[i] = ClpSerializer.translateInput(parameterValues[i]);
}
}
Class<?> remoteModelClass = _appRoleRemoteModel.getClass();
ClassLoader remoteModelClassLoader = remoteModelClass.getClassLoader();
Class<?>[] remoteParameterTypes = new Class[parameterTypes.length];
for (int i = 0; i < parameterTypes.length; i++) {
if (parameterTypes[i].isPrimitive()) {
remoteParameterTypes[i] = parameterTypes[i];
}
else {
String parameterTypeName = parameterTypes[i].getName();
remoteParameterTypes[i] = remoteModelClassLoader.loadClass(parameterTypeName);
}
}
Method method = remoteModelClass.getMethod(methodName,
remoteParameterTypes);
Object returnValue = method.invoke(_appRoleRemoteModel,
remoteParameterValues);
if (returnValue != null) {
returnValue = ClpSerializer.translateOutput(returnValue);
}
return returnValue;
}
@Override
public void persist() throws SystemException {
if (this.isNew()) {
AppRoleLocalServiceUtil.addAppRole(this);
}
else {
AppRoleLocalServiceUtil.updateAppRole(this);
}
}
@Override
public AppRole toEscapedModel() {
return (AppRole)ProxyUtil.newProxyInstance(AppRole.class.getClassLoader(),
new Class[] { AppRole.class }, new AutoEscapeBeanHandler(this));
}
@Override
public Object clone() {
AppRoleClp clone = new AppRoleClp();
clone.setAppRoleId(getAppRoleId());
clone.setUserId(getUserId());
clone.setGroupId(getGroupId());
clone.setCompanyId(getCompanyId());
clone.setCreateDate(getCreateDate());
clone.setApplicationId(getApplicationId());
clone.setRoleCode(getRoleCode());
clone.setRoleName(getRoleName());
return clone;
}
@Override
public int compareTo(AppRole appRole) {
long primaryKey = appRole.getPrimaryKey();
if (getPrimaryKey() < primaryKey) {
return -1;
}
else if (getPrimaryKey() > primaryKey) {
return 1;
}
else {
return 0;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof AppRoleClp)) {
return false;
}
AppRoleClp appRole = (AppRoleClp)obj;
long primaryKey = appRole.getPrimaryKey();
if (getPrimaryKey() == primaryKey) {
return true;
}
else {
return false;
}
}
public Class<?> getClpSerializerClass() {
return _clpSerializerClass;
}
@Override
public int hashCode() {
return (int)getPrimaryKey();
}
@Override
public String toString() {
StringBundler sb = new StringBundler(17);
sb.append("{appRoleId=");
sb.append(getAppRoleId());
sb.append(", userId=");
sb.append(getUserId());
sb.append(", groupId=");
sb.append(getGroupId());
sb.append(", companyId=");
sb.append(getCompanyId());
sb.append(", createDate=");
sb.append(getCreateDate());
sb.append(", applicationId=");
sb.append(getApplicationId());
sb.append(", roleCode=");
sb.append(getRoleCode());
sb.append(", roleName=");
sb.append(getRoleName());
sb.append("}");
return sb.toString();
}
@Override
public String toXmlString() {
StringBundler sb = new StringBundler(28);
sb.append("<model><model-name>");
sb.append("org.oep.core.ssomgt.model.AppRole");
sb.append("</model-name>");
sb.append(
"<column><column-name>appRoleId</column-name><column-value><![CDATA[");
sb.append(getAppRoleId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>userId</column-name><column-value><![CDATA[");
sb.append(getUserId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>groupId</column-name><column-value><![CDATA[");
sb.append(getGroupId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>companyId</column-name><column-value><![CDATA[");
sb.append(getCompanyId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>createDate</column-name><column-value><![CDATA[");
sb.append(getCreateDate());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>applicationId</column-name><column-value><![CDATA[");
sb.append(getApplicationId());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>roleCode</column-name><column-value><![CDATA[");
sb.append(getRoleCode());
sb.append("]]></column-value></column>");
sb.append(
"<column><column-name>roleName</column-name><column-value><![CDATA[");
sb.append(getRoleName());
sb.append("]]></column-value></column>");
sb.append("</model>");
return sb.toString();
}
private long _appRoleId;
private long _userId;
private String _userUuid;
private long _groupId;
private long _companyId;
private Date _createDate;
private long _applicationId;
private String _roleCode;
private String _roleName;
private BaseModel<?> _appRoleRemoteModel;
private Class<?> _clpSerializerClass = org.oep.core.ssomgt.service.ClpSerializer.class;
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.inspector.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class RemoveAttributesFromFindingsRequest extends
AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARNs specifying the findings that you want to remove attributes from.
* </p>
*/
private java.util.List<String> findingArns;
/**
* <p>
* The array of attribute keys that you want to remove from specified
* findings.
* </p>
*/
private java.util.List<String> attributeKeys;
/**
* <p>
* The ARNs specifying the findings that you want to remove attributes from.
* </p>
*
* @return The ARNs specifying the findings that you want to remove
* attributes from.
*/
public java.util.List<String> getFindingArns() {
return findingArns;
}
/**
* <p>
* The ARNs specifying the findings that you want to remove attributes from.
* </p>
*
* @param findingArns
* The ARNs specifying the findings that you want to remove
* attributes from.
*/
public void setFindingArns(java.util.Collection<String> findingArns) {
if (findingArns == null) {
this.findingArns = null;
return;
}
this.findingArns = new java.util.ArrayList<String>(findingArns);
}
/**
* <p>
* The ARNs specifying the findings that you want to remove attributes from.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setFindingArns(java.util.Collection)} or
* {@link #withFindingArns(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param findingArns
* The ARNs specifying the findings that you want to remove
* attributes from.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveAttributesFromFindingsRequest withFindingArns(
String... findingArns) {
if (this.findingArns == null) {
setFindingArns(new java.util.ArrayList<String>(findingArns.length));
}
for (String ele : findingArns) {
this.findingArns.add(ele);
}
return this;
}
/**
* <p>
* The ARNs specifying the findings that you want to remove attributes from.
* </p>
*
* @param findingArns
* The ARNs specifying the findings that you want to remove
* attributes from.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveAttributesFromFindingsRequest withFindingArns(
java.util.Collection<String> findingArns) {
setFindingArns(findingArns);
return this;
}
/**
* <p>
* The array of attribute keys that you want to remove from specified
* findings.
* </p>
*
* @return The array of attribute keys that you want to remove from
* specified findings.
*/
public java.util.List<String> getAttributeKeys() {
return attributeKeys;
}
/**
* <p>
* The array of attribute keys that you want to remove from specified
* findings.
* </p>
*
* @param attributeKeys
* The array of attribute keys that you want to remove from specified
* findings.
*/
public void setAttributeKeys(java.util.Collection<String> attributeKeys) {
if (attributeKeys == null) {
this.attributeKeys = null;
return;
}
this.attributeKeys = new java.util.ArrayList<String>(attributeKeys);
}
/**
* <p>
* The array of attribute keys that you want to remove from specified
* findings.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setAttributeKeys(java.util.Collection)} or
* {@link #withAttributeKeys(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param attributeKeys
* The array of attribute keys that you want to remove from specified
* findings.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveAttributesFromFindingsRequest withAttributeKeys(
String... attributeKeys) {
if (this.attributeKeys == null) {
setAttributeKeys(new java.util.ArrayList<String>(
attributeKeys.length));
}
for (String ele : attributeKeys) {
this.attributeKeys.add(ele);
}
return this;
}
/**
* <p>
* The array of attribute keys that you want to remove from specified
* findings.
* </p>
*
* @param attributeKeys
* The array of attribute keys that you want to remove from specified
* findings.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public RemoveAttributesFromFindingsRequest withAttributeKeys(
java.util.Collection<String> attributeKeys) {
setAttributeKeys(attributeKeys);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFindingArns() != null)
sb.append("FindingArns: " + getFindingArns() + ",");
if (getAttributeKeys() != null)
sb.append("AttributeKeys: " + getAttributeKeys());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RemoveAttributesFromFindingsRequest == false)
return false;
RemoveAttributesFromFindingsRequest other = (RemoveAttributesFromFindingsRequest) obj;
if (other.getFindingArns() == null ^ this.getFindingArns() == null)
return false;
if (other.getFindingArns() != null
&& other.getFindingArns().equals(this.getFindingArns()) == false)
return false;
if (other.getAttributeKeys() == null ^ this.getAttributeKeys() == null)
return false;
if (other.getAttributeKeys() != null
&& other.getAttributeKeys().equals(this.getAttributeKeys()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getFindingArns() == null) ? 0 : getFindingArns().hashCode());
hashCode = prime
* hashCode
+ ((getAttributeKeys() == null) ? 0 : getAttributeKeys()
.hashCode());
return hashCode;
}
@Override
public RemoveAttributesFromFindingsRequest clone() {
return (RemoveAttributesFromFindingsRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tamaya.core.internal.inject;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.tamaya.ConfigException;
import org.apache.tamaya.Configuration;
import org.apache.tamaya.PropertyAdapter;
import org.apache.tamaya.PropertyAdapters;
import org.apache.tamaya.annot.*;
import org.apache.tamaya.core.internal.Utils;
import java.lang.reflect.Field;
import java.util.*;
import java.util.stream.Collectors;
/**
* Small class that contains and manages all information anc access to a configured field and a concrete instance of
* it (referenced by a weak reference). It also implements all aspects of value filtering, conversiong any applying the
* final value by reflection.
* Created by Anatole on 01.10.2014.
*/
@SuppressWarnings("UnusedDeclaration")
public class ConfiguredField {
private Logger LOG = LogManager.getLogger(ConfiguredField.class);
/**
* The configured field instance.
*/
private Field annotatedField;
/**
* Models a configured field and provides mechanisms for injection.
*
* @param field the field instance.
*/
public ConfiguredField(Field field) {
Objects.requireNonNull(field);
this.annotatedField = field;
}
/**
* Evaluate the initial value from the configuration and apply it to the field.
*
* @param target the target instance.
* @throws ConfigException if evaluation or conversion failed.
*/
public void applyInitialValue(Object target) throws ConfigException {
Collection<ConfiguredProperty> configuredProperties = Utils.getAnnotations(
annotatedField, ConfiguredProperty.class, ConfiguredProperties.class);
DefaultAreas areasAnnot = this.annotatedField.getDeclaringClass().getAnnotation(DefaultAreas.class);
WithLoadPolicy loadPolicy = Utils.getAnnotation(WithLoadPolicy.class, this.annotatedField, this.annotatedField.getDeclaringClass());
DefaultValue defaultValue = this.annotatedField.getAnnotation(DefaultValue.class);
String configValue = getConfigValue(loadPolicy, areasAnnot, configuredProperties, defaultValue);
applyValue(target, configValue, false);
}
/**
* Internally evaluated the current vaslid configuration value based on the given annotations present.
*
* @param loadPolicyAnnot The load policy, determining any explicit listeners to be informed.
* @param areasAnnot Any default areas to be looked up.
* @param propertiesAnnot The configured property keys (qualified or relative).
* @param defaultAnnot any configured default value.
* @return the value to be applied, or null.
*/
private String getConfigValue(WithLoadPolicy loadPolicyAnnot, DefaultAreas areasAnnot, Collection<ConfiguredProperty> propertiesAnnot, DefaultValue defaultAnnot) {
String[] areas = null;
if (areasAnnot != null) {
areas = areasAnnot.value();
}
List<String> keys = evaluateKeys(areasAnnot, propertiesAnnot);
annotatedField.setAccessible(true);
Configuration config = getConfiguration();
String configValue = null;
for (String key : keys) {
if (config.containsKey(key)) {
configValue = config.get(key).orElse(null);
}
if (configValue != null) {
break;
}
}
if (configValue == null && defaultAnnot != null) {
configValue = defaultAnnot.value();
}
if (configValue != null) {
// net step perform expression resolution, if any
return Configuration.evaluateValue(configValue);
}
return null;
}
/**
* This method reapplies a changed configuration value to the field.
*
* @param target the target instance, not null.
* @param configValue the new value to be applied, null will trigger the evaluation of the configured default value.
* @param resolve set to true, if expression resolution should be applied on the value passed.
* @throws ConfigException if the configuration required could not be resolved or converted.
*/
public void applyValue(Object target, String configValue, boolean resolve) throws ConfigException {
Objects.requireNonNull(target);
try {
if (resolve && configValue != null) {
// net step perform exression resolution, if any
configValue = Configuration.evaluateValue(configValue);
}
// Check for adapter/filter
WithPropertyAdapter adapterAnnot = this.annotatedField.getAnnotation(WithPropertyAdapter.class);
Class<? extends PropertyAdapter> propertyAdapterType = null;
if (adapterAnnot != null) {
propertyAdapterType = adapterAnnot.value();
if (!propertyAdapterType.equals(PropertyAdapter.class)) {
// TODO cache here...
PropertyAdapter<String> filter = propertyAdapterType.newInstance();
configValue = filter.adapt(configValue);
}
}
if (configValue == null) {
// TODO Check for optional injection!
// annotatedField.set(target, null);
LOG.info("No config found for " +
this.annotatedField.getDeclaringClass().getName() + '#' +
this.annotatedField.getName());
} else {
Class baseType = annotatedField.getType();
if (String.class.equals(baseType) || baseType.isAssignableFrom(configValue.getClass())) {
annotatedField.set(target, configValue);
} else {
PropertyAdapter<?> adapter = PropertyAdapters.getAdapter(baseType);
annotatedField.set(target, adapter.adapt(configValue));
}
}
} catch (Exception e) {
throw new ConfigException("Failed to inject configured field: " + this.annotatedField.getDeclaringClass()
.getName() + '.' + annotatedField.getName(), e);
}
}
/**
* Evaluates all absolute configuration key based on the annotations found on a class.
*
* @param areasAnnot the (optional) annotation definining areas to be looked up.
* @param propertyAnnotations the annotation on field/method level that may defined the
* exact key to be looked up (in absolute or relative form).
* @return the list of keys in order how they should be processed/looked up.
*/
private List<String> evaluateKeys(DefaultAreas areasAnnot,Collection<ConfiguredProperty> propertyAnnotations) {
Objects.requireNonNull(propertyAnnotations);
List<String> keys = propertyAnnotations.stream().map(s -> s.value()).filter(s -> !s.isEmpty())
.collect(Collectors.toList());
if (keys.isEmpty()) //noinspection UnusedAssignment
keys.add(annotatedField.getName());
ListIterator<String> iterator = keys.listIterator();
while (iterator.hasNext()) {
String next = iterator.next();
if (next.startsWith("[") && next.endsWith("]")) {
// absolute key, strip away brackets, take key as is
iterator.set(next.substring(1, next.length() - 1));
} else {
if (areasAnnot != null) {
// Remove original entry, since it will be replaced with prefixed entries
iterator.remove();
// Add prefixed entries, including absolute (root) entry for "" area value.
for (String area : areasAnnot.value()) {
iterator.add(area.isEmpty() ? next : area + '.' + next);
}
}
}
}
return keys;
}
/**
* This method checks if the given (qualified) configuration key is referenced from this field.
* This is useful to determine, if a key changed in a configuration should trigger any change events
* on the related instances.
*
* @param key the (qualified) configuration key, not null.
* @return true, if the key is referenced.
*/
public boolean matchesKey(String key) {
DefaultAreas areasAnnot = this.annotatedField.getDeclaringClass().getAnnotation(DefaultAreas.class);
Collection<ConfiguredProperty> configuredProperties = Utils.getAnnotations(this.annotatedField, ConfiguredProperty.class,
ConfiguredProperties.class );
List<String> keys = evaluateKeys(areasAnnot, configuredProperties);
return keys.contains(key);
}
/**
* This method evaluates the {@link Configuration} that currently is valid for the given target field/method.
*
* @return the {@link Configuration} instance to be used, never null.
*/
public Configuration getConfiguration() {
WithConfig name = annotatedField.getAnnotation(WithConfig.class);
if(name!=null) {
return Configuration.of(name.value());
}
return Configuration.of();
}
}
| |
/*
* Copyright 2011-2012 MeetMe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.myyearbook.hudson.plugins.confluence;
import com.atlassian.confluence.api.model.content.Content;
import com.atlassian.confluence.api.model.content.ContentBody;
import com.atlassian.confluence.api.model.content.ContentRepresentation;
import com.atlassian.confluence.api.model.content.ContentStatus;
import com.atlassian.confluence.api.model.content.ContentType;
import com.atlassian.confluence.api.model.content.Space;
import com.atlassian.confluence.api.model.pagination.PageResponse;
import com.atlassian.confluence.api.service.exceptions.ServiceException;
import com.atlassian.fugue.Option;
import com.myyearbook.hudson.plugins.confluence.wiki.editors.MarkupEditor;
import com.myyearbook.hudson.plugins.confluence.wiki.editors.MarkupEditor.TokenNotFoundException;
import hudson.EnvVars;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Descriptor;
import hudson.model.EnvironmentContributingAction;
import hudson.model.Result;
import hudson.model.Run;
import hudson.model.Saveable;
import hudson.model.TaskListener;
import hudson.tasks.BuildStepDescriptor;
import hudson.tasks.BuildStepMonitor;
import hudson.tasks.Notifier;
import hudson.tasks.Publisher;
import hudson.util.DescribableList;
import hudson.util.FormValidation;
import jenkins.tasks.SimpleBuildStep;
import jenkins.util.VirtualFile;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.jenkinsci.Symbol;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.DataBoundSetter;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.File;
import java.io.IOException;
import java.net.URLConnection;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Collectors;
public final class ConfluencePublisher extends Notifier implements Saveable, SimpleBuildStep {
private static final String DEFAULT_CONTENT_TYPE = "application/octet-stream";
private @NonNull final String siteName;
private @NonNull final String spaceName;
private @NonNull final String pageName;
private boolean attachArchivedArtifacts;
private boolean buildIfUnstable;
private String fileSet;
private boolean replaceAttachments;
private String labels;
private long parentId;
private DescribableList<MarkupEditor, Descriptor<MarkupEditor>> editors = new DescribableList<>(
this);
@Deprecated
public ConfluencePublisher(String siteName, final boolean buildIfUnstable,
final String spaceName, final String pageName, final String labels, final boolean attachArchivedArtifacts,
final String fileSet, final List<MarkupEditor> editorList, final boolean replaceAttachments, final long parentId) {
this(siteName, spaceName, pageName);
setParentId(parentId);
setLabels(labels);
setBuildIfUnstable(buildIfUnstable);
setAttachArchivedArtifacts(attachArchivedArtifacts);
setFileSet(fileSet);
setReplaceAttachments(replaceAttachments);
setEditorList(editorList);
}
@DataBoundConstructor
public ConfluencePublisher(@NonNull String siteName, final @NonNull String spaceName, final @NonNull String pageName) {
if (siteName == null) {
List<ConfluenceSite> sites = getDescriptor().getSites();
if (sites != null && sites.size() > 0) {
siteName = sites.get(0).getName();
}
}
this.siteName = siteName;
this.spaceName = spaceName;
this.pageName = pageName;
}
@DataBoundSetter
public void setBuildIfUnstable(boolean buildIfUnstable) {
this.buildIfUnstable = buildIfUnstable;
}
@DataBoundSetter
public void setAttachArchivedArtifacts(boolean attachArchivedArtifacts) {
this.attachArchivedArtifacts = attachArchivedArtifacts;
}
@DataBoundSetter
public void setFileSet(final String fileSet) {
this.fileSet = StringUtils.isEmpty(fileSet) ? null : fileSet;
}
@DataBoundSetter
public void setReplaceAttachments(boolean replaceAttachments) {
this.replaceAttachments = replaceAttachments;
}
@DataBoundSetter
public void setLabels(final String labels) {
this.labels = StringUtils.isEmpty(labels) ? null : labels;
}
@DataBoundSetter
public void setParentId(long parentId) {
this.parentId = parentId;
}
@DataBoundSetter
public void setEditorList(final List<MarkupEditor> editorList) {
if (editorList != null) {
this.editors.addAll(editorList);
} else {
this.editors.clear();
}
}
public List<MarkupEditor> getEditorList() {
return this.editors.toList();
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
/**
* @return the fileSet
*/
public String getFileSet() {
return fileSet;
}
/**
* @return the pageName
*/
public @NonNull String getPageName() {
return pageName;
}
/**
* @return the parentId
*/
public long getParentId() {
return parentId;
}
/**
* @return the labels
*/
public String getLabels() {
return labels;
}
@Override
public BuildStepMonitor getRequiredMonitorService() {
return BuildStepMonitor.BUILD;
}
public ConfluenceSite getSite() {
List<ConfluenceSite> sites = getDescriptor().getSites();
if (sites == null) {
return null;
}
if (siteName == null && sites.size() > 0) {
// default
return sites.get(0);
}
for (ConfluenceSite site : sites) {
if (site.getName().equals(siteName)) {
return site;
}
}
return null;
}
/**
* @return the siteName
*/
public @NonNull String getSiteName() {
return siteName;
}
/**
* @return the spaceName
*/
public @NonNull String getSpaceName() {
return spaceName;
}
protected List<Content> performAttachments(Run build, FilePath ws,
TaskListener listener, ConfluenceSession confluence,
final Content pageContent) throws IOException, InterruptedException {
final long pageId = pageContent.getId().asLong();
final List<Content> remoteAttachments = new ArrayList<>();
if (ws == null) {
// Possibly running on a slave that went down
log(listener, "Workspace is unavailable.");
return remoteAttachments;
}
String attachmentComment = build.getEnvironment(listener).expand(
"Published from Jenkins build: $BUILD_URL");
log(listener, "Uploading attachments to Confluence page: " + pageContent.getTitle());
final List<VirtualFile> files = new ArrayList<>();
if (this.attachArchivedArtifacts) {
final List<Run.Artifact> artifacts = build.getArtifacts();
if (artifacts.isEmpty()) {
log(listener, "Attempting to attach the archived artifacts, but there are no"
+ " archived artifacts from the job! Check job configuration...");
} else {
log(listener, "Found " + artifacts.size()
+ " archived artifact(s) to upload to Confluence...");
for (Run.Artifact artifact : artifacts) {
files.add(build.getArtifactManager().root().child(artifact.relativePath));
}
}
}
final String fileSet = hudson.Util.fixEmptyAndTrim(this.fileSet);
if (!StringUtils.isEmpty(fileSet)) {
log(listener, "Evaluating fileset pattern: " + fileSet);
// Expand environment variables
final String artifacts = build.getEnvironment(listener).expand(fileSet);
// Obtain a list of all files that match the pattern
final FilePath[] workspaceFiles = ws.list(artifacts);
if (workspaceFiles.length > 0) {
log(listener, "Found " + workspaceFiles.length
+ " workspace artifact(s) to upload to Confluence...");
for (FilePath file : workspaceFiles) {
if (!files.contains(file)) {
files.add(file.toVirtualFile());
} else {
// Don't include the file twice if it's already in the
// list
log(listener, " - pattern matched an archived artifact: " + file.getName());
}
}
} else {
log(listener, "No files matched the pattern '" + fileSet + "'.");
String msg = null;
try {
msg = ws.validateAntFileMask(artifacts, FilePath.VALIDATE_ANT_FILE_MASK_BOUND);
} catch (Exception e) {
log(listener, "" + e.getMessage());
}
if (msg != null) {
log(listener, "" + msg);
}
}
}
log(listener, "Uploading " + files.size() + " file(s) to Confluence...");
boolean shouldRemoveExistingAttachments = false;
List<Content> existingAttachments = new ArrayList<>();
if (isReplaceAttachments()) {
List<Content> attachments = confluence.getAttachments(pageId);
if (attachments != null && attachments.size() > 0) {
existingAttachments.addAll(attachments);
shouldRemoveExistingAttachments = true;
}
}
for (VirtualFile file : files) {
final String fileName = file.getName();
if (shouldRemoveExistingAttachments) {
for (Content remoteAttachment : existingAttachments) {
if (remoteAttachment.getTitle().equals(fileName)) {
try {
confluence.removeAttachment(remoteAttachment);
existingAttachments.remove(remoteAttachment);
log(listener, "Deleted existing " + remoteAttachment.getTitle() + " from Confluence before upload new...");
break;
} catch (ServiceException e) {
log(listener, "Deleting error: " + e.toString());
throw e;
}
}
}
}
String contentType = URLConnection.guessContentTypeFromName(fileName);
if (StringUtils.isEmpty(contentType)) {
// Confluence does not allow an empty content type
contentType = DEFAULT_CONTENT_TYPE;
}
log(listener, " - Uploading file: " + fileName + " (" + contentType + ")");
try {
final PageResponse<Content> result = confluence.addAttachment(pageId, file,
contentType, attachmentComment);
remoteAttachments.addAll(result.getResults());
log(listener, " done: " + result.getResults().stream()
.map(Content::getTitle).collect(Collectors.joining(", ")));
} catch (ServiceException se) {
listener.error("Unable to upload file...");
se.printStackTrace(listener.getLogger());
}
}
log(listener, "Done");
return remoteAttachments;
}
@Override
public void perform(@NonNull Run<?, ?> build, @NonNull FilePath filePath, @NonNull Launcher launcher,
@NonNull TaskListener listener) throws InterruptedException, IOException {
boolean result = true;
ConfluenceSite site = getSite();
if (site == null) {
log(listener, "Not publishing because no Confluence Site could be found. " +
"Check your Confluence configuration in system settings.");
return;
}
ConfluenceSession confluence = site.createSession();
Result buildResult = build.getResult();
if (!buildIfUnstable && buildResult != null && !Result.SUCCESS.equals(buildResult)) {
// Don't process for unsuccessful builds
log(listener, "Build status is not SUCCESS (" + buildResult + ").");
return;
}
EnvVarAction buildResultAction = new EnvVarAction("BUILD_RESULT", String
.valueOf(buildResult));
build.addAction(buildResultAction);
String spaceName = this.spaceName;
String pageName = this.pageName;
long parentId = this.parentId;
log(listener, "ParentId: " + parentId);
try {
spaceName = build.getEnvironment(listener).expand(spaceName);
pageName = build.getEnvironment(listener).expand(pageName);
} catch (IOException | InterruptedException e) {
e.printStackTrace(listener.getLogger());
}
Content pageContent;
try {
String spaceAndPageNames = String.format("%s/%s", spaceName, pageName);
pageContent = confluence.getContent(spaceName, pageName, true).orElseThrow(() -> new ServiceException(String.format("Page at \"%s\" not found!", spaceAndPageNames)));
} catch (ServiceException e) {
// Still shouldn't fail the job, so just dump this to the console and keep going (true).
log(listener, e.getMessage());
log(listener, "Unable to locate page: " + spaceName + "/" + pageName + ". Attempting to create the page now...");
try {
// if we haven't specified a parent, assign the Space home page as the parent
if (parentId == 0L) {
Space space = confluence.getSpace(spaceName).getOrNull();
if (space != null) {
parentId = space.getId();
}
}
pageContent = this.createPage(confluence, spaceName, pageName, parentId);
} catch (ServiceException exc) {
log(listener, "Page could not be created! Aborting edits...");
exc.printStackTrace(listener.getLogger());
return;
}
}
// Perform attachment uploads
List<Content> remoteAttachments = null;
try {
remoteAttachments = this.performAttachments(build, filePath, listener, confluence, pageContent);
} catch (IOException | InterruptedException e) {
e.printStackTrace(listener.getLogger());
}
if (!editors.isEmpty()) {
// Perform wiki replacements
result &= this.performWikiReplacements(build, filePath, listener, confluence,
pageContent, remoteAttachments);
}
// Add the page labels
String labels = this.labels;
if (StringUtils.isNotBlank(labels)) {
try {
String expandedLabels = build.getEnvironment(listener).expand(labels);
result &= confluence.addLabels(pageContent.getId().asLong(), expandedLabels);
} catch (ServiceException se) {
log(listener, se.getMessage());
}
}
if (result) {
try {
result &= performEditComment(build, listener, confluence, pageContent);
} catch (ServiceException se) {
log(listener, se.getMessage());
}
}
}
/**
* Creates a new Page in Confluence.
*
* @param confluence
* @param spaceName
* @param pageName
* @return The resulting Page
* @throws RemoteException
*/
private Content createPage(ConfluenceSession confluence, String spaceName, String pageName, long parentId)
throws ServiceException {
Content parentContent = confluence.getContent(String.valueOf(parentId))
.orElseThrow(() -> new ServiceException("Can't find parent content with Id:" + parentId));
Content.ContentBuilder newPage = Content.builder()
.title(pageName)
.type(ContentType.PAGE)
.space(spaceName)
.body(ContentBody.contentBodyBuilder().build())
.parent(parentContent);
return confluence.createContent(newPage.build());
}
private boolean performWikiReplacements(Run<?, ?> build, FilePath filePath, TaskListener listener,
ConfluenceSession confluence,
Content pageContent, List<Content> remoteAttachments) {
boolean isUpdated = false;
//Ugly Hack, though required here. DO NOT REMOVE, otherwise Content.ContentBuilder.build() will fail.
Consumer<Map<ContentType, PageResponse<Content>>> SANITIZE_NESTED_CONTENT_MAP = (m) ->
m.entrySet().stream().filter(e -> e.getValue() == null).map(Map.Entry::getKey)
.collect(Collectors.toList()).forEach(m::remove);
SANITIZE_NESTED_CONTENT_MAP.accept(pageContent.getChildren());
SANITIZE_NESTED_CONTENT_MAP.accept(pageContent.getDescendants());
// Get current content and edit.
String originContent = pageContent.getBody().get(ContentRepresentation.STORAGE).getValue();
String contentEdited = performEdits(build, filePath, listener, originContent, remoteAttachments);
//XHTML -> HTML self closing tag adjustment
contentEdited = contentEdited.replaceAll(" /", "/");
// Now set the replacement contentBody
ContentBody contentBody = ContentBody.contentBodyBuilder()
.representation(ContentRepresentation.STORAGE)
.value(contentEdited)
.build();
List<Content> ancestors = pageContent.getAncestors();
Content updatedContent = Content.builder(pageContent)
.version(pageContent.getVersion().nextBuilder().build())
.body(contentBody)
.parent(ancestors.get(ancestors.size() - 1))
.build();
//post updated content.
Content results = confluence.updateContent(updatedContent);
//Check if remote content is updated.
Optional<Content> remoteResults =
confluence.getContent(pageContent.getSpace().getKey(), pageContent.getTitle(), true);
if (remoteResults.isPresent()) {
isUpdated = remoteResults.get().getVersion().getNumber() == results.getVersion().getNumber();
}
return isUpdated;
}
private boolean performEditComment(Run<?, ?> build, TaskListener listener,
ConfluenceSession confluence, Content pageContent)
throws IOException, InterruptedException, ServiceException {
boolean isUpdated = false;
final String editComment = build.getEnvironment(listener).expand(
"Published from Jenkins build: <a href=\"$BUILD_URL\">$BUILD_URL</a>");
Optional<Content> previousComment = Optional.empty();
List<Content> cl = new ArrayList<>();
Optional.ofNullable(pageContent.getChildren())
.flatMap(cn -> Optional.ofNullable(cn.get(ContentType.COMMENT)).flatMap(cm -> Optional.ofNullable(cm.getResults())))
.ifPresent(cl::addAll);
if (!cl.isEmpty()) {
previousComment = cl.stream()
.filter(c -> c.getBody().get(ContentRepresentation.STORAGE).getValue().contains(editComment.split(":")[0]))
.min(Comparator.comparing(c -> c.getVersion().getNumber()));
}
if (previousComment.isPresent()) {
//Edit comment Content
Content comment = Content.builder()
.type(ContentType.COMMENT)
.version(previousComment.get().getVersion().nextBuilder().build())
.id(previousComment.get().getId())
.container(pageContent)
.title("Re: " + pageContent.getTitle())
.extension("location", "footer")
.status(ContentStatus.CURRENT)
.body(ContentBody.contentBodyBuilder()
.representation(ContentRepresentation.STORAGE)
.value(editComment)
.build())
.build();
confluence.updateContent(comment);
} else {
//Post new comment.
createComment(confluence, pageContent, editComment);
}
//Check if remote content is updated.
Optional<Content> remoteResults =
confluence.getContent(pageContent.getSpace().getKey(), pageContent.getTitle(), true);
if (remoteResults.isPresent()) {
isUpdated = remoteResults.get().getChildren().get(ContentType.COMMENT)
.getResults().stream().map(r -> r.getBody().get(ContentRepresentation.STORAGE).getValue())
.collect(Collectors.toList())
.contains(editComment);
}
return isUpdated;
}
/**
* Creates a new Comment to Confluence page.
*
* @param confluence
* @param parentContent
* @param commentText
* @return The resulting comment Content
* @throws RemoteException
*/
private Content createComment(ConfluenceSession confluence, Content parentContent, String commentText)
throws ServiceException {
Content.ContentBuilder newComment = Content.builder()
.title("Re: " + parentContent.getTitle())
.body(ContentBody.contentBodyBuilder()
.representation(ContentRepresentation.STORAGE)
.value(commentText)
.build())
.container(parentContent)
.type(ContentType.COMMENT);
return confluence.createContent(newComment.build());
}
private String performEdits(final Run<?, ?> build, FilePath filePath, final TaskListener listener,
String content, List<Content> remoteAttachments) {
for (MarkupEditor editor : this.editors) {
log(listener, "Performing wiki edits: " + editor.getDescriptor().getDisplayName());
try {
content = editor.performReplacement(build, filePath, listener, content, true, remoteAttachments);
} catch (TokenNotFoundException e) {
log(listener, "ERROR while performing replacement: " + e.getMessage());
}
}
return content;
}
/**
* Recursively scan a directory, returning all files encountered
*
* @param artifactsDir
* @return
*/
private List<FilePath> findArtifacts(File artifactsDir) {
ArrayList<FilePath> files = new ArrayList<>();
if (artifactsDir != null && artifactsDir.isDirectory()) {
File[] listed = artifactsDir.listFiles();
if (listed != null) {
for (File f : listed) {
if (f == null) {
continue;
}
if (f.isDirectory()) {
files.addAll(findArtifacts(f));
} else if (f.isFile()) {
files.add(new FilePath(f));
}
}
}
}
return files;
}
/**
* Log helper
*
* @param listener
* @param message
*/
protected void log(TaskListener listener, String message) {
listener.getLogger().println("[confluence] " + message);
}
/**
* @return the attachArchivedArtifacts
*/
public boolean isAttachArchivedArtifacts() {
return attachArchivedArtifacts;
}
/**
* @return the buildIfUnstable
*/
public boolean isBuildIfUnstable() {
return buildIfUnstable;
}
/**
* @return the replaceAttachments
*/
public boolean isReplaceAttachments() {
return replaceAttachments;
}
@Override
public void save() {
}
@Extension
@Symbol("publishConfluence")
public static class DescriptorImpl extends BuildStepDescriptor<Publisher> {
private final List<ConfluenceSite> sites = new ArrayList<>();
public DescriptorImpl() {
super(ConfluencePublisher.class);
load();
}
public List<Descriptor<MarkupEditor>> getEditors() {
final List<Descriptor<MarkupEditor>> editors = new ArrayList<>(MarkupEditor.all());
return editors;
}
@Override
public boolean configure(StaplerRequest req, JSONObject formData) {
this.setSites(req.bindJSONToList(ConfluenceSite.class, formData.get("sites")));
save();
return true;
}
public FormValidation doParentIdCheck(@QueryParameter final String siteName,
@QueryParameter final String spaceName, @QueryParameter final String parentId) {
ConfluenceSite site = this.getSiteByName(siteName);
if (hudson.Util.fixEmptyAndTrim(spaceName) == null
|| hudson.Util.fixEmptyAndTrim(parentId) == null) {
return FormValidation.ok();
}
Long parentIdL;
try {
parentIdL = Long.valueOf(parentId);
} catch (NumberFormatException nfe) {
return FormValidation.error("The parent page id should be a numeric id.");
}
if (site == null) {
return FormValidation.error("Unknown site:" + siteName);
}
try {
ConfluenceSession confluence = site.createSession();
Content page = confluence.getContent(String.valueOf(parentIdL)).orElseThrow(() -> new ServiceException("Page content is NULL"));
if (page != null) {
return FormValidation.ok("OK: " + page.getTitle());
}
return FormValidation.error("Page not found");
} catch (ServiceException re) {
return FormValidation.warning("Page not found. Check that the page still exists. ");
}
}
public FormValidation doPageNameCheck(@QueryParameter final String siteName,
@QueryParameter final String spaceName, @QueryParameter final String pageName) {
ConfluenceSite site = this.getSiteByName(siteName);
if (hudson.Util.fixEmptyAndTrim(spaceName) == null
|| hudson.Util.fixEmptyAndTrim(pageName) == null) {
return FormValidation.ok();
}
if (site == null) {
return FormValidation.error("Unknown site:" + siteName);
}
try {
ConfluenceSession confluence = site.createSession();
Content page = confluence.getContent(spaceName, pageName, false).orElse(null);
if (page != null) {
return FormValidation.ok("OK: " + page.getTitle());
}
return FormValidation.error("Page not found");
} catch (ServiceException re) {
if (StringUtils.contains(pageName, '$') || StringUtils.contains(spaceName, '$')) {
return FormValidation
.warning("Unable to determine if the page exists because it contains build-time parameters.");
}
return FormValidation.warning("Page not found. Check that the page still exists. "
+ "If you continue, we'll try to create the page at publish-time.");
}
}
public FormValidation doSpaceNameCheck(@QueryParameter final String siteName,
@QueryParameter final String spaceName) {
ConfluenceSite site = this.getSiteByName(siteName);
if (hudson.Util.fixEmptyAndTrim(spaceName) == null) {
return FormValidation.ok();
}
if (site == null) {
return FormValidation.error("Unknown site:" + siteName);
}
try {
ConfluenceSession confluence = site.createSession();
Option<Space> space = confluence.getSpace(spaceName);
if (!space.isEmpty()) {
return FormValidation.ok("OK: " + space.get().getName());
}
return FormValidation.error("Space not found");
} catch (ServiceException re) {
if (StringUtils.contains(spaceName, '$')) {
return FormValidation
.warning("Unable to determine if the space exists because it contains build-time parameters.");
}
return FormValidation.error(re, "Space not found");
}
}
@Override
public String getDisplayName() {
return "Publish to Confluence";
}
public ConfluenceSite getSiteByName(String siteName) {
for (ConfluenceSite site : sites) {
if (site.getName().equals(siteName)) {
return site;
}
}
return null;
}
public List<ConfluenceSite> getSites() {
return Collections.unmodifiableList(sites);
}
@Override
public boolean isApplicable(Class<? extends AbstractProject> p) {
return sites != null && sites.size() > 0;
}
@Override
public Publisher newInstance(StaplerRequest req, JSONObject formData) {
return req.bindJSON(ConfluencePublisher.class, formData);
}
public void setSites(List<ConfluenceSite> sites) {
this.sites.clear();
this.sites.addAll(sites);
}
}
/**
* Build action that is capable of inserting arbitrary KVPs into the EnvVars.
*
* @author jhansche
*/
public static class EnvVarAction implements EnvironmentContributingAction {
private final String name;
private final String value;
public EnvVarAction(final String name, final String value) {
this.name = name;
this.value = value;
}
@Override
public String getIconFileName() {
return null;
}
@Override
public String getDisplayName() {
return null;
}
@Override
public String getUrlName() {
return null;
}
@Override
public void buildEnvVars(AbstractBuild<?, ?> build, EnvVars env) {
env.put(name, value);
}
}
}
| |
/*
* Copyright 2013 Proofpoint Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kairosdb.core.aggregator;
import org.joda.time.DateTime;
import org.joda.time.Months;
import org.kairosdb.core.DataPoint;
import org.kairosdb.core.datastore.DataPointGroup;
import org.kairosdb.core.datastore.Sampling;
import org.kairosdb.core.datastore.TimeUnit;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Iterator;
import java.util.TimeZone;
import static com.google.common.base.Preconditions.checkNotNull;
public abstract class RangeAggregator implements Aggregator
{
private long m_startTime = 0L;
private long m_range = 1L;
private long m_currentRange;
private long m_dayOfMonthOffset = 0L; //day of month offset in milliseconds
private boolean m_alignSampling;
@NotNull
@Valid
private Sampling m_sampling;
private boolean m_alignStartTime;
public DataPointGroup aggregate(DataPointGroup dataPointGroup)
{
checkNotNull(dataPointGroup);
if (m_alignSampling)
{
TimeUnit tu = m_sampling.getUnit();
DateTime dt = new DateTime(m_startTime);
switch (tu)
{
case YEARS:
case MONTHS:
case WEEKS:
case DAYS:
if (tu == TimeUnit.WEEKS)
dt = dt.withDayOfWeek(1);
else if (tu == TimeUnit.MONTHS)
{
dt = dt.withDayOfMonth(1);
m_dayOfMonthOffset = 0;
}
else
dt = dt.withDayOfYear(1);
case HOURS:
case MINUTES:
case SECONDS:
case MILLISECONDS:
dt = dt.withHourOfDay(0);
dt = dt.withMinuteOfHour(0);
dt = dt.withSecondOfMinute(0);
dt = dt.withMillisOfSecond(0);
}
m_startTime = dt.getMillis();
}
m_currentRange = m_startTime;
return (new RangeDataPointAggregator(dataPointGroup, getSubAggregator()));
}
public void setSampling(Sampling sampling)
{
m_sampling = sampling;
m_range = sampling.getSampling();
}
/**
When set to true the time for the aggregated data point for each range will
fall on the start of the range instead of being the value for the first
data point within that range.
@param align
*/
public void setAlignStartTime(boolean align)
{
m_alignStartTime = align;
}
/**
Setting this to true will cause the aggregation range to be aligned based on
the sampling size. For example if your sample size is either milliseconds,
seconds, minutes or hours then the start of the range will always be at the top
of the hour. The effect of setting this to true is that your data will
take the same shape when graphed as you refresh the data.
@param align Set to true to align the range on fixed points instead of
the start of the query.
*/
public void setAlignSampling(boolean align)
{
m_alignSampling = align;
}
/**
Start time to calculate the ranges from. Typically this is the start
of the query
@param startTime
*/
public void setStartTime(long startTime)
{
m_startTime = startTime;
//Get the day of the month for month calculations
Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
cal.setTimeInMillis(startTime);
int dayOfMonth = cal.get(Calendar.DAY_OF_MONTH);
dayOfMonth -= 1; //offset this value so when we subtract it from the data point tome it wont do anything for 1
m_dayOfMonthOffset = dayOfMonth * 24L * 60L * 60L * 1000L;
}
/**
Return a RangeSubAggregator that will be used to aggregate data over a
discrete range of data points. This is called once per grouped data series.
For example, if one metric is queried and no grouping is done this method is
called once and the resulting object is called over and over for each range
within the results.
If the query were grouping by the host tag and host has values of 'A' and 'B'
this method will be called twice, once to aggregate results for 'A' and once
to aggregate results for 'B'.
@return
*/
protected abstract RangeSubAggregator getSubAggregator();
//===========================================================================
/**
*/
private class RangeDataPointAggregator extends AggregatedDataPointGroupWrapper
{
private RangeSubAggregator m_subAggregator;
private Calendar m_calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
private Iterator<DataPoint> m_dpIterator;
public RangeDataPointAggregator(DataPointGroup innerDataPointGroup,
RangeSubAggregator subAggregator)
{
super(innerDataPointGroup);
m_subAggregator = subAggregator;
m_dpIterator = new ArrayList<DataPoint>().iterator();
}
/**
This returns some value that represents the range the timestamp falls into
The actual value returned is not considered just as long as it is unique
for the time range.
@param timestamp
@return
*/
/*private long getRange(long timestamp)
{
while (timestamp > (m_currentRange + m_range))
m_currentRange += m_range;
return (m_currentRange);
if ((m_sampling != null) && (m_sampling.getUnit() == TimeUnit.MONTHS))
{
m_calendar.setTimeInMillis(timestamp - m_dayOfMonthOffset);
int dataPointYear = m_calendar.get(Calendar.YEAR);
int dataPointMonth = m_calendar.get(Calendar.MONTH);
return ((dataPointYear * 12 + dataPointMonth) / m_sampling.getValue());
}
else
{
return ((timestamp - m_startTime) / m_range);
}
}*/
private long getStartRange(long timestamp)
{
if ((m_sampling != null) && (m_sampling.getUnit() == TimeUnit.MONTHS))
{
DateTime start = new DateTime(m_startTime);
DateTime dpTime = new DateTime(timestamp);
Months months = Months.monthsBetween(start, dpTime);
Months period = months.dividedBy(m_sampling.getValue());
long startRange = start.plus(period.multipliedBy(m_sampling.getValue())).getMillis();
return (startRange);
/*m_calendar.setTimeInMillis(timestamp - m_dayOfMonthOffset);
int dataPointYear = m_calendar.get(Calendar.YEAR);
int dataPointMonth = m_calendar.get(Calendar.MONTH);
return ((dataPointYear * 12 + dataPointMonth) / m_sampling.getValue());*/
}
else
{
return (((timestamp - m_startTime) / m_range) * m_range + m_startTime);
}
}
private long getEndRange(long timestamp)
{
if ((m_sampling != null) && (m_sampling.getUnit() == TimeUnit.MONTHS))
{
DateTime start = new DateTime(m_startTime);
DateTime dpTime = new DateTime(timestamp);
Months months = Months.monthsBetween(start, dpTime);
Months period = months.dividedBy(m_sampling.getValue());
long endRange = start.plus(period.plus(1).multipliedBy(m_sampling.getValue())).getMillis();
return (endRange);
}
else
{
return ((((timestamp - m_startTime) / m_range) +1) * m_range + m_startTime);
}
}
@Override
public DataPoint next()
{
if (!m_dpIterator.hasNext())
{
//We calculate start and end ranges as the ranges may not be
//consecutive if data does not show up in each range.
long startRange = getStartRange(currentDataPoint.getTimestamp());
long endRange = getEndRange(currentDataPoint.getTimestamp());
SubRangeIterator subIterator = new SubRangeIterator(
endRange);
long dataPointTime = currentDataPoint.getTimestamp();
if (m_alignStartTime)
dataPointTime = startRange;
m_dpIterator = m_subAggregator.getNextDataPoints(dataPointTime,
subIterator).iterator();
}
return (m_dpIterator.next());
}
@Override
public boolean hasNext()
{
return (m_dpIterator.hasNext() || super.hasNext());
}
//========================================================================
/**
This class provides an iterator over a discrete range of data points
*/
private class SubRangeIterator implements Iterator<DataPoint>
{
private long m_endRange;
public SubRangeIterator(long endRange)
{
m_endRange = endRange;
}
@Override
public boolean hasNext()
{
return ((currentDataPoint != null) && (currentDataPoint.getTimestamp() < m_endRange));
}
@Override
public DataPoint next()
{
DataPoint ret = currentDataPoint;
if (hasNextInternal())
currentDataPoint = nextInternal();
return (ret);
}
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
}
}
//===========================================================================
/**
Instances of this object are created once per grouped data series.
*/
public interface RangeSubAggregator
{
/**
Returns an aggregated data point from a ragne that is passed in
as dataPointRange.
@return
@param
returnTime Timestamp to use on return data point. This is currently
passing the timestamp of the first data point in the range.
@param
dataPointRange Range to aggregate over.
*/
public Iterable<DataPoint> getNextDataPoints(long returnTime, Iterator<DataPoint> dataPointRange);
}
}
| |
package org.leguan.history;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.UUID;
import org.junit.Assert;
import org.junit.Test;
import org.leguan.history.HistoryInfoContainer;
import org.leguan.history.HistoryInfoDetail;
import org.leguan.history.HistoryState;
import org.leguan.history.HistoryTimeConverter;
public class HistoryInfoContainerTest {
HistoryTimeConverter historyTimeConverter = new HistoryTimeConverter();
@Test
public void getContainerUrl () {
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
historyInfoDetailAfter.setContainerUrl("containerUrl");
containerAfter.addHistoryDetail(historyInfoDetailAfter);
Assert.assertEquals ("containerUrl", containerAfter.getContainerUrl());
}
@Test
public void getContainerUrlEmpty () {
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
historyInfoDetailAfter.setContainerUrl("containerUrl");
Assert.assertNull(containerAfter.getContainerUrl());
}
@Test
public void stateWithNull () {
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
containerAfter.addHistoryDetail(historyInfoDetailAfter);
Assert.assertEquals ("(0 SUCCESS, 0 UNSTABLE, 0 FAILED)", containerAfter.getState());
}
@Test
public void stateSuccess () {
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
historyInfoDetailAfter.setStateAsEnum(HistoryState.SUCCESS);
containerAfter.addHistoryDetail(historyInfoDetailAfter);
Assert.assertEquals ("(1 SUCCESS, 0 UNSTABLE, 0 FAILED)", containerAfter.getState());
}
@Test
public void stateAborted () {
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
historyInfoDetailAfter.setStateAsEnum(HistoryState.ABORTED);
containerAfter.addHistoryDetail(historyInfoDetailAfter);
Assert.assertEquals ("(0 SUCCESS, 0 UNSTABLE, 0 FAILED)", containerAfter.getState());
}
@Test
public void compareToAgainstNotStarted () {
HistoryInfoContainer containerBefore = new HistoryInfoContainer();
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
historyInfoDetailAfter.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.now()));
containerAfter.addHistoryDetail(historyInfoDetailAfter);
Assert.assertEquals (0, containerAfter.compareTo(containerBefore));
Assert.assertEquals (0, containerBefore.compareTo(containerAfter));
Assert.assertEquals (0, containerBefore.compareTo(null));
}
@Test
public void compareTo () {
HistoryInfoContainer containerBefore = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetail = new HistoryInfoDetail();
historyInfoDetail.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.now().minus(10, ChronoUnit.HOURS)));
containerBefore.addHistoryDetail(historyInfoDetail);
HistoryInfoContainer containerAfter = new HistoryInfoContainer();
HistoryInfoDetail historyInfoDetailAfter = new HistoryInfoDetail();
historyInfoDetailAfter.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.now()));
containerAfter.addHistoryDetail(historyInfoDetailAfter);
Assert.assertEquals (1, containerAfter.compareTo(containerBefore));
}
@Test
public void toStringImpl (){
HistoryInfoContainer container = new HistoryInfoContainer();
Assert.assertNotNull(container.toString());
Assert.assertEquals ("Analysis null with project null started at not yet started - (0 SUCCESS, 0 UNSTABLE, 0 FAILED)", container.getDescription());
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setAnalysisID("analysisID");
detail1.setAnalysisName("analysisName");
detail1.setProjectID("projectID");
detail1.setProjectName("projectName");
detail1.setBranchID("branchID");
detail1.setBuildUUID(UUID.randomUUID());
detail1.setContainerUrl("containerUrl");
detail1.setDuration("duration");
detail1.setStateAsEnum(HistoryState.SUCCESS);
HistoryInfoContainer container2 = new HistoryInfoContainer();
container2.addHistoryDetail(detail1);
Assert.assertNotNull(container2.toString());
Assert.assertEquals("Analysis analysisName with project projectName started at not yet started - (1 SUCCESS, 0 UNSTABLE, 0 FAILED)", container2.getDescription());
}
@Test
public void oneDetailNotFinished () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setFinishedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 10)));
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setFinishedAt("");
container.getHistoryDetailCollection().add(detail1);
container.getHistoryDetailCollection().add(detail2);
Assert.assertEquals ("running", container.getFinishedAt());
}
@Test
public void allFinished () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setFinishedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 10)));
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setFinishedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 8, 10, 10)));
container.getHistoryDetailCollection().add(detail1);
container.getHistoryDetailCollection().add(detail2);
Assert.assertEquals ("2016-10-10 10:10:00", container.getFinishedAt());
}
@Test
public void status () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setStateAsEnum(HistoryState.SUCCESS);
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setStateAsEnum(HistoryState.UNSTABLE);
HistoryInfoDetail detail3 = new HistoryInfoDetail();
detail3.setStateAsEnum(HistoryState.FAILURE);
container.getHistoryDetailCollection().add(detail1);
container.getHistoryDetailCollection().add(detail2);
container.getHistoryDetailCollection().add(detail3);
Assert.assertEquals ("(1 SUCCESS, 1 UNSTABLE, 1 FAILED)", container.getState());
}
@Test
public void durationNotStarted () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
HistoryInfoDetail detail2 = new HistoryInfoDetail();
container.getHistoryDetailCollection().add(detail1);
container.getHistoryDetailCollection().add(detail2);
Assert.assertEquals ("not yet started", container.getDuration());
}
@Test
public void durationNotFinished () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 10)));
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 10)));
container.getHistoryDetailCollection().add(detail1);
container.getHistoryDetailCollection().add(detail2);
Assert.assertEquals ("running", container.getDuration());
}
@Test
public void durationFinished () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 10)));
detail1.setFinishedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 20)));
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setStartedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 10)));
detail2.setFinishedAt(historyTimeConverter.getDateAsString(LocalDateTime.of(2016, 10, 10, 10, 30)));
container.getHistoryDetailCollection().add(detail1);
container.getHistoryDetailCollection().add(detail2);
Assert.assertEquals ("20 min 0 sec", container.getDuration());
}
@Test(expected = IllegalStateException.class)
public void addDetailsDifferentAnalysisIDs () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setAnalysisID("1");
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setAnalysisID("2");
container.addHistoryDetail(detail1);
container.addHistoryDetail(detail2);
}
@Test(expected = IllegalStateException.class)
public void addDetailsDifferentAnalysisName () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setAnalysisName("1");
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setAnalysisName("2");
container.addHistoryDetail(detail1);
container.addHistoryDetail(detail2);
}
@Test(expected = IllegalStateException.class)
public void addDetailsDifferentProjectIDs () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setProjectID("1");
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setProjectID("2");
container.addHistoryDetail(detail1);
container.addHistoryDetail(detail2);
}
@Test(expected = IllegalStateException.class)
public void addDetailsDifferentProjectName () {
HistoryInfoContainer container = new HistoryInfoContainer();
HistoryInfoDetail detail1 = new HistoryInfoDetail();
detail1.setProjectName("1");
HistoryInfoDetail detail2 = new HistoryInfoDetail();
detail2.setProjectName("2");
container.addHistoryDetail(detail1);
container.addHistoryDetail(detail2);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.wal;
import java.io.Closeable;
import java.io.IOException;
import java.net.ConnectException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.regionserver.wal.AbstractFSWAL;
import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Runs periodically to determine if the WAL should be rolled.
* <p/>
* NOTE: This class extends Thread rather than Chore because the sleep time can be interrupted when
* there is something to do, rather than the Chore sleep time which is invariant.
* <p/>
* The {@link #scheduleFlush(String, List)} is abstract here,
* as sometimes we hold a region without a region server but we still want to roll its WAL.
* <p/>
* TODO: change to a pool of threads
*/
@InterfaceAudience.Private
public abstract class AbstractWALRoller<T extends Abortable> extends Thread
implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(AbstractWALRoller.class);
protected static final String WAL_ROLL_PERIOD_KEY = "hbase.regionserver.logroll.period";
protected final ConcurrentMap<WAL, RollController> wals = new ConcurrentHashMap<>();
protected final T abortable;
// Period to roll log.
private final long rollPeriod;
private final int threadWakeFrequency;
// The interval to check low replication on hlog's pipeline
private final long checkLowReplicationInterval;
private volatile boolean running = true;
public void addWAL(WAL wal) {
// check without lock first
if (wals.containsKey(wal)) {
return;
}
// this is to avoid race between addWAL and requestRollAll.
synchronized (this) {
if (wals.putIfAbsent(wal, new RollController(wal)) == null) {
wal.registerWALActionsListener(new WALActionsListener() {
@Override
public void logRollRequested(WALActionsListener.RollRequestReason reason) {
// TODO logs will contend with each other here, replace with e.g. DelayedQueue
synchronized (AbstractWALRoller.this) {
RollController controller = wals.computeIfAbsent(wal, rc -> new RollController(wal));
controller.requestRoll();
AbstractWALRoller.this.notifyAll();
}
}
@Override
public void postLogArchive(Path oldPath, Path newPath) throws IOException {
afterWALArchive(oldPath, newPath);
}
});
}
}
}
public void requestRollAll() {
synchronized (this) {
for (RollController controller : wals.values()) {
controller.requestRoll();
}
notifyAll();
}
}
protected AbstractWALRoller(String name, Configuration conf, T abortable) {
super(name);
this.abortable = abortable;
this.rollPeriod = conf.getLong(WAL_ROLL_PERIOD_KEY, 3600000);
this.threadWakeFrequency = conf.getInt(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000);
this.checkLowReplicationInterval =
conf.getLong("hbase.regionserver.hlog.check.lowreplication.interval", 30 * 1000);
}
/**
* we need to check low replication in period, see HBASE-18132
*/
private void checkLowReplication(long now) {
try {
for (Entry<WAL, RollController> entry : wals.entrySet()) {
WAL wal = entry.getKey();
boolean needRollAlready = entry.getValue().needsRoll(now);
if (needRollAlready || !(wal instanceof AbstractFSWAL)) {
continue;
}
((AbstractFSWAL<?>) wal).checkLogLowReplication(checkLowReplicationInterval);
}
} catch (Throwable e) {
LOG.warn("Failed checking low replication", e);
}
}
private void abort(String reason, Throwable cause) {
// close all WALs before calling abort on RS.
// This is because AsyncFSWAL replies on us for rolling a new writer to make progress, and if we
// failed, AsyncFSWAL may be stuck, so we need to close it to let the upper layer know that it
// is already broken.
for (WAL wal : wals.keySet()) {
// shutdown rather than close here since we are going to abort the RS and the wals need to be
// split when recovery
try {
wal.shutdown();
} catch (IOException e) {
LOG.warn("Failed to shutdown wal", e);
}
}
abortable.abort(reason, cause);
}
@Override
public void run() {
while (running) {
long now = System.currentTimeMillis();
checkLowReplication(now);
synchronized (this) {
if (wals.values().stream().noneMatch(rc -> rc.needsRoll(now))) {
try {
wait(this.threadWakeFrequency);
} catch (InterruptedException e) {
// restore the interrupt state
Thread.currentThread().interrupt();
}
// goto the beginning to check whether again whether we should fall through to roll
// several WALs, and also check whether we should quit.
continue;
}
}
try {
for (Iterator<Entry<WAL, RollController>> iter = wals.entrySet().iterator();
iter.hasNext();) {
Entry<WAL, RollController> entry = iter.next();
WAL wal = entry.getKey();
RollController controller = entry.getValue();
if (controller.isRollRequested()) {
// WAL roll requested, fall through
LOG.debug("WAL {} roll requested", wal);
} else if (controller.needsPeriodicRoll(now)) {
// Time for periodic roll, fall through
LOG.debug("WAL {} roll period {} ms elapsed", wal, this.rollPeriod);
} else {
continue;
}
// Force the roll if the logroll.period is elapsed or if a roll was requested.
// The returned value is an collection of actual region and family names.
Map<byte[], List<byte[]>> regionsToFlush = controller.rollWal(now);
if (regionsToFlush != null) {
for (Map.Entry<byte[], List<byte[]>> r : regionsToFlush.entrySet()) {
scheduleFlush(Bytes.toString(r.getKey()), r.getValue());
}
}
}
} catch (FailedLogCloseException | ConnectException e) {
abort("Failed log close in log roller", e);
} catch (IOException ex) {
// Abort if we get here. We probably won't recover an IOE. HBASE-1132
abort("IOE in log roller",
ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex);
} catch (Exception ex) {
LOG.error("Log rolling failed", ex);
abort("Log rolling failed", ex);
}
}
LOG.info("LogRoller exiting.");
}
protected void afterWALArchive(Path oldPath, Path newPath) {
}
/**
* @param encodedRegionName Encoded name of region to flush.
* @param families stores of region to flush.
*/
protected abstract void scheduleFlush(String encodedRegionName, List<byte[]> families);
private boolean isWaiting() {
Thread.State state = getState();
return state == Thread.State.WAITING || state == Thread.State.TIMED_WAITING;
}
/**
* @return true if all WAL roll finished
*/
public boolean walRollFinished() {
return wals.values().stream().noneMatch(rc -> rc.needsRoll(System.currentTimeMillis()))
&& isWaiting();
}
/**
* Wait until all wals have been rolled after calling {@link #requestRollAll()}.
*/
public void waitUntilWalRollFinished() throws InterruptedException {
while (!walRollFinished()) {
Thread.sleep(100);
}
}
@Override
public void close() {
running = false;
interrupt();
}
/**
* Independently control the roll of each wal. When use multiwal,
* can avoid all wal roll together. see HBASE-24665 for detail
*/
protected class RollController {
private final WAL wal;
private final AtomicBoolean rollRequest;
private long lastRollTime;
RollController(WAL wal) {
this.wal = wal;
this.rollRequest = new AtomicBoolean(false);
this.lastRollTime = System.currentTimeMillis();
}
public void requestRoll() {
this.rollRequest.set(true);
}
public Map<byte[], List<byte[]>> rollWal(long now) throws IOException {
this.lastRollTime = now;
// reset the flag in front to avoid missing roll request before we return from rollWriter.
this.rollRequest.set(false);
return wal.rollWriter(true);
}
public boolean isRollRequested() {
return rollRequest.get();
}
public boolean needsPeriodicRoll(long now) {
return (now - this.lastRollTime) > rollPeriod;
}
public boolean needsRoll(long now) {
return isRollRequested() || needsPeriodicRoll(now);
}
}
}
| |
package com.projectchanged.client.config;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.projectchanged.client.userInterface.LogInFrame;
import com.projectchanged.client.userInterface.MainWindow;
import com.projectchanged.client.userInterface.SetupWizardFrame;
public class ConfigurationManager {
private static final String DEFAULT_GLOBAL_CONFIG_FILE_NAME = "config.xml";
private static final String DEFAULT_CONFIG_LOCATION = "bin";
private static final String XML_FILE_EXTENSION = ".xml";
private static final String DEFAULT_GIT_LOCAL_PATH = "..";
private static ConfigurationManager instance = null;
private static final String STRING_GLOBAL_CONFIG = "Configuration";
private static final String STRING_LOGIN = "LogIn";
private static final String STRING_LOGIN_USERNAME = "Username";
private static final String STRING_LOGIN_PASSWORD = "Password";
private static final String STRING_LOGIN_STORE_USERNAME = "StoreUsername";
private static final String STRING_LOGIN_STORE_PASSWORD = "StorePassword";
private static final String STRING_LOGIN_AUTO_LOG_ON = "Auto";
private static final String STRING_USER_CONFIG = "Configuration";
private static final String STRING_USER_CONFIG_COMPLETED = "Completed";
private static final String STRING_GIT = "Git";
private static final String STRING_GIT_LOCAL_PATH = "Path";
private static final String STRING_GIT_USERNAME = "Username";
private static final String STRING_GIT_PASSWORD = "Password";
private static final String STRING_GIT_REMEMBER_USERNAME = "RememberUsername";
private static final String STRING_GIT_REMEMBER_PASSWORD = "RememberPassword";
private static final String STRING_GIT_AUTO_LOG_ON = "AutoLogOn";
private static final String STRING_USER_LASTUPDATE = "LastUpdateTime";
private static final String STRING_NOTIFICATION = "Notification";
private static final String STRING_NOTIFICATION_ENABLE_EMAIL = "EmailEnabled";
private static final String STRING_NOTIFICATION_ENABLE_TRAY = "TrayEnabled";
private static final String STRING_NOTIFICATION_EMAIL = "EmailAddress";
private static final String STRING_NOTIFICATION_PERIOD = "EmailPeriod";
private static final String STRING_PROJECTS = "Projects";
private static final String STRING_PROJECT = "Project";
private static final String STRING_PROJECT_NAME = "Name";
// Global Configuration
private String loginUsername = "";
private char[] loginPassword = new char[0];
private boolean loginStoreUsername = false;
private boolean loginStorePassword = false;
private boolean loginAutoLogOn = false;
private String serverLocation = null;
// private String databaseURL = null;
// private String databaseUsername = null;
// private String databasePassword = null;
// private String databaseDriver = null;
// User Configuration
private boolean userConfigCompleted = false;
private String gitLocalPath = ConfigurationManager.DEFAULT_GIT_LOCAL_PATH;
private String gitUsername = null;
private char[] gitPassword = new char[0];
private boolean gitStoreUsername = true;
private boolean gitStorePassword = true;
private boolean gitAutoLogOn = true;
private String notificationEmail = "";
private String notificationPeriod = "0";
private boolean notificationEmailEnabled = true;
private boolean notificationTrayEnabled = true;
private Document globalConfigDocument;
private Document userConfigDocument;
// Local variable
private File globalConfigFile;
private File userConfigFile;
private Set<File> projectDirectories;
public Set<File> getProjectDirectories() {
return projectDirectories;
}
public void setProjectDirectories(Set<File> projectDirectories) {
this.projectDirectories = projectDirectories;
}
/**
*
*/
private ConfigurationManager() {
projectDirectories = new HashSet<File>();
DocumentBuilderFactory builderFactory = createBuilderFactory();
try {
DocumentBuilder builder = builderFactory.newDocumentBuilder();
globalConfigFile = new File(ConfigurationManager.DEFAULT_CONFIG_LOCATION + File.separatorChar + ConfigurationManager.DEFAULT_GLOBAL_CONFIG_FILE_NAME);
if (globalConfigFile.exists() && globalConfigFile.isFile() && (globalConfigFile.length() > 0)) {
parseGlobalConfigDocument(builder);
}
else {
// Error checking and create the configuration folder if not
// exist
File configFolder = validateGlobalConfigFile();
// Create configFile and populate default parameters.
createDefaultGlobalConfigFile(builder, configFolder);
}
}
catch (ParserConfigurationException e1) {
exitOnXMLParserError();
}
catch (SAXException e) {
System.err.println("Corrupted config.xml, Please ask administer for help.");
System.exit(-1);
}
catch (IOException e) {
System.err.println("Cannot create config.xml, Please ask administer for help.");
System.exit(-1);
}
}
/**
* @param builder
* @throws SAXException
* @throws IOException
*/
private void parseGlobalConfigDocument(DocumentBuilder builder) throws SAXException, IOException {
globalConfigDocument = builder.parse(globalConfigFile);
// Configure database settings
// NodeList databaseList = globalConfigDocument
// .getElementsByTagName(ConfigurationManager.STRING_DATABASE);
// if ((databaseList != null) && (databaseList.getLength() == 1)) {
// loadDatabaseConfiguration(databaseList);
// }
// Configure Login settings
NodeList loginList = globalConfigDocument.getElementsByTagName(ConfigurationManager.STRING_LOGIN);
if ((loginList != null) && (loginList.getLength() == 1)) {
loadLoginConfiguration(loginList);
}
else {
System.err.println("Your config.xml is outdated and dones't contain login info.");
}
}
/**
* @return
*/
private DocumentBuilderFactory createBuilderFactory() {
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
builderFactory.setIgnoringComments(true);
builderFactory.setIgnoringElementContentWhitespace(true);
return builderFactory;
}
/**
* @return
*/
private File validateGlobalConfigFile() {
File configFolder = new File(ConfigurationManager.DEFAULT_CONFIG_LOCATION);
if (configFolder.exists() && configFolder.isFile()) {
System.err.print("A file with name \"bin\" already exists in the working directory. ");
System.err.println("Please delete the file or move application to a new location, then try again.");
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
else if (!configFolder.exists() && !configFolder.mkdir()) {
System.err.print("Cannot make folder: " + configFolder.getAbsolutePath() + ".");
System.err.println("Please check your file system permisions.");
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
return configFolder;
}
/**
* @param builder
* @param configFolder
* @throws IOException
*/
private void createDefaultGlobalConfigFile(DocumentBuilder builder, File configFolder) throws IOException {
globalConfigFile.createNewFile();
globalConfigDocument = builder.newDocument();
System.err.print("Please contact your administer to setup your server in file: ");
System.err.println(configFolder.getAbsolutePath() + File.separatorChar + ConfigurationManager.DEFAULT_GLOBAL_CONFIG_FILE_NAME);
outputUserConfigToFile();
}
/**
* @param loginList
*/
private void loadLoginConfiguration(NodeList loginList) {
NamedNodeMap loginAttributes = loginList.item(0).getAttributes();
Node autoLogOn = loginAttributes.getNamedItem(ConfigurationManager.STRING_LOGIN_AUTO_LOG_ON);
if (autoLogOn != null) {
loginAutoLogOn = autoLogOn.getNodeValue().equals("1");
if (loginAutoLogOn) {
loginStoreUsername = true;
loginStorePassword = true;
}
else {
Node storePassword = loginAttributes.getNamedItem(ConfigurationManager.STRING_LOGIN_STORE_PASSWORD);
if (storePassword != null) {
loginStorePassword = storePassword.getNodeValue().equals("1");
if (loginStorePassword) {
loginStoreUsername = true;
}
else {
Node storeUsername = loginAttributes.getNamedItem(ConfigurationManager.STRING_LOGIN_STORE_USERNAME);
if (storeUsername != null) {
loginStoreUsername = storeUsername.getNodeValue().equals("1");
}
}
}
}
loadLoginUsername(loginAttributes);
loadLoginPassword(loginAttributes);
}
}
/**
* @param loginAttributes
*/
private void loadLoginPassword(NamedNodeMap loginAttributes) {
if (loginStorePassword) {
Node password = loginAttributes.getNamedItem(ConfigurationManager.STRING_LOGIN_PASSWORD);
if (password != null) {
loginPassword = password.getNodeValue().toCharArray();
}
}
}
/**
* @param loginAttributes
*/
private void loadLoginUsername(NamedNodeMap loginAttributes) {
if (loginStoreUsername) {
Node username = loginAttributes.getNamedItem(ConfigurationManager.STRING_LOGIN_USERNAME);
if (username != null) {
loginUsername = username.getNodeValue();
}
}
}
/**
* @param password
* @return
*/
private boolean isNodeValid(Node password) {
return (password != null) && !password.getNodeValue().equals("");
}
/**
*
*/
private void exitOnMalformedGlobalConfigFile() {
File configFolder = new File(ConfigurationManager.DEFAULT_CONFIG_LOCATION);
System.err.print("Please contact your administer to setup your server in file: ");
System.err.println(configFolder.getAbsolutePath() + File.separatorChar + ConfigurationManager.DEFAULT_GLOBAL_CONFIG_FILE_NAME);
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
/**
* @param systemUsername
* - user name entered on log in screen.
* @return true if no unrecoverable error while loading or creating the
* file. false otherwise.
*/
public boolean loadConfigurationForUser(String systemUsername) {
DocumentBuilderFactory builderFactory = createBuilderFactory();
try {
DocumentBuilder builder = builderFactory.newDocumentBuilder();
userConfigFile = new File(ConfigurationManager.DEFAULT_CONFIG_LOCATION + File.separatorChar + systemUsername + ConfigurationManager.XML_FILE_EXTENSION);
if (userConfigFile.exists() && userConfigFile.isFile()) {
userConfigDocument = builder.parse(userConfigFile);
loadGitConfiguration();
loadNotificationConfiguration();
loadProjectsConfiguration();
loadUserProfileConfiguration();
if (!userConfigCompleted) {
LogInFrame.getInstance().setVisible(false);
SetupWizardFrame.getInstance().setVisible(true);
}
else {
MainWindow.getInstance().logIn();
}
return true;
}
else if (!userConfigFile.exists() && userConfigFile.createNewFile()) {
createDefaultUserConfigFile(builder);
return true;
}
else {
exitOnConflictingUserConfigFile(systemUsername);
return false;
}
}
catch (ParserConfigurationException e) {
exitOnXMLParserError();
return false;
}
catch (IOException e) {
exitOnCreatingUserConfigFile(systemUsername);
return false;
}
catch (SAXException e) {
exitOnCurruptedUserConfigFile(systemUsername);
return false;
}
}
/**
* @param systemUsername
*/
private void exitOnCurruptedUserConfigFile(String systemUsername) {
System.err.println("Corrupted user config file " + systemUsername + ".xml, Please ask administer for help.");
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
/**
* @param systemUsername
*/
private void exitOnCreatingUserConfigFile(String systemUsername) {
System.err.println("Cannot create " + systemUsername + ".xml, Please ask administer for help.");
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
/**
*
*/
private void exitOnXMLParserError() {
System.err.println("Cannot setup xml parser. Now exit.");
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
/**
* @param systemUsername
*/
private void exitOnConflictingUserConfigFile(String systemUsername) {
System.err.print("A folder with name \"" + systemUsername + "\" already exists in the working directory. ");
System.err.println("Please delete the folder or move application to a new location, then try again.");
//MainWindow.getInstance().trayIconExitClicked();
System.exit(-1);
}
/**
* @param builder
*/
private void createDefaultUserConfigFile(DocumentBuilder builder) {
userConfigDocument = builder.newDocument();
Element configNode = userConfigDocument.createElement(ConfigurationManager.STRING_USER_CONFIG);
configNode.setAttribute(ConfigurationManager.STRING_USER_CONFIG_COMPLETED, "0");
Element git = createDefaultGitConfig();
Element notification = createDefaultDotificationConfig();
Element projects = createDefaultProjectsConfig();
configNode.appendChild(git);
configNode.appendChild(notification);
configNode.appendChild(projects);
userConfigDocument.appendChild(configNode);
outputUserConfigToFile();
LogInFrame.getInstance().setVisible(false);
SetupWizardFrame.getInstance().setVisible(true);
}
/**
* @return
*/
private Element createDefaultProjectsConfig() {
Element projects = userConfigDocument.createElement(ConfigurationManager.STRING_PROJECTS);
return projects;
}
/**
* @return
*/
private Element createDefaultDotificationConfig() {
Element notification = userConfigDocument.createElement(ConfigurationManager.STRING_NOTIFICATION);
notification.setAttribute(ConfigurationManager.STRING_NOTIFICATION_EMAIL, notificationEmail);
notification.setAttribute(ConfigurationManager.STRING_NOTIFICATION_PERIOD, notificationPeriod);
notification.setAttribute(ConfigurationManager.STRING_NOTIFICATION_ENABLE_EMAIL, "1");
notification.setAttribute(ConfigurationManager.STRING_NOTIFICATION_ENABLE_TRAY, "1");
return notification;
}
/**
* @return
*/
private Element createDefaultGitConfig() {
Element git = userConfigDocument.createElement(ConfigurationManager.STRING_GIT);
git.setAttribute(ConfigurationManager.STRING_GIT_LOCAL_PATH, ConfigurationManager.DEFAULT_GIT_LOCAL_PATH);
git.setAttribute(ConfigurationManager.STRING_GIT_USERNAME, "");
git.setAttribute(ConfigurationManager.STRING_GIT_PASSWORD, "");
git.setAttribute(ConfigurationManager.STRING_GIT_REMEMBER_USERNAME, "1");
git.setAttribute(ConfigurationManager.STRING_GIT_REMEMBER_PASSWORD, "1");
git.setAttribute(ConfigurationManager.STRING_GIT_AUTO_LOG_ON, "1");
return git;
}
/**
*
*/
private void loadUserProfileConfiguration() {
NodeList configList = userConfigDocument.getElementsByTagName(ConfigurationManager.STRING_USER_CONFIG);
if ((configList != null) && (configList.getLength() == 1)) {
NamedNodeMap configAttributes = configList.item(0).getAttributes();
Node configComplete = configAttributes.getNamedItem(ConfigurationManager.STRING_USER_CONFIG_COMPLETED);
if (configComplete != null) {
userConfigCompleted = !configComplete.getNodeValue().equals("0");
}
}
}
/**
*
*/
private void loadProjectsConfiguration() {
NodeList projectsObjectList = userConfigDocument.getElementsByTagName(ConfigurationManager.STRING_PROJECTS);
if ((projectsObjectList != null) && (projectsObjectList.getLength() == 1)) {
NodeList projectsList = projectsObjectList.item(0).getChildNodes();
int length = projectsList.getLength();
projectDirectories = new HashSet<File>();
File localDirectory = new File(gitLocalPath);
if (localDirectory.exists() && localDirectory.isDirectory()) {
String absolutePartialPath = localDirectory.getAbsolutePath();
for (int i = 0; i < length; i++) {
Node project = projectsList.item(i);
loadProjectFromNode(absolutePartialPath, project);
}
}
FileManager.getInstance().addRepository(projectDirectories);
}
}
/**
* @param absolutePartialPath
* @param project
*/
private void loadProjectFromNode(String absolutePartialPath, Node project) {
if (((Element) project).getTagName().equalsIgnoreCase(ConfigurationManager.STRING_PROJECT)) {
String projectName = project.getAttributes().getNamedItem(ConfigurationManager.STRING_PROJECT_NAME).getNodeValue();
File directory = new File(absolutePartialPath + File.separatorChar + projectName);
if (directory.exists() && directory.isDirectory()) {
projectDirectories.add(directory);
}
else if (directory.exists()) {
System.err.println(projectName + " is not a directory and will be ignored.");
}
else {
System.err.println(projectName + " doesn't exist and will be ignored.");
}
}
}
/**
*
*/
private void loadNotificationConfiguration() {
NodeList notificationList = userConfigDocument.getElementsByTagName(ConfigurationManager.STRING_NOTIFICATION);
if ((notificationList != null) && (notificationList.getLength() == 1)) {
NamedNodeMap notificationAttributes = notificationList.item(0).getAttributes();
Node emailAddress = notificationAttributes.getNamedItem(ConfigurationManager.STRING_NOTIFICATION_EMAIL);
if (emailAddress != null) {
notificationEmail = emailAddress.getNodeValue();
}
Node period = notificationAttributes.getNamedItem(ConfigurationManager.STRING_NOTIFICATION_PERIOD);
if (period != null) {
notificationPeriod = period.getNodeValue();
}
Node useEmail = notificationAttributes.getNamedItem(ConfigurationManager.STRING_NOTIFICATION_ENABLE_EMAIL);
if (useEmail != null) {
notificationEmailEnabled = !useEmail.getNodeValue().equals("0");
}
Node useTray = notificationAttributes.getNamedItem(ConfigurationManager.STRING_NOTIFICATION_ENABLE_TRAY);
if (useTray != null) {
notificationTrayEnabled = !useTray.getNodeValue().equals("0");
}
}
}
/**
*
*/
private void loadGitConfiguration() {
NodeList gitList = userConfigDocument.getElementsByTagName(ConfigurationManager.STRING_GIT);
if ((gitList != null) && (gitList.getLength() == 1)) {
NamedNodeMap gitAttributes = gitList.item(0).getAttributes();
Node username = gitAttributes.getNamedItem(ConfigurationManager.STRING_GIT_USERNAME);
if (username != null) {
gitUsername = username.getNodeValue();
}
Node localPath = gitAttributes.getNamedItem(ConfigurationManager.STRING_GIT_LOCAL_PATH);
if (localPath != null) {
gitLocalPath = localPath.getNodeValue();
}
Node password = gitAttributes.getNamedItem(ConfigurationManager.STRING_GIT_PASSWORD);
if (password != null) {
gitPassword = password.getNodeValue().toCharArray();
}
Node rememberUsername = gitAttributes.getNamedItem(ConfigurationManager.STRING_GIT_REMEMBER_USERNAME);
if (rememberUsername != null) {
gitStoreUsername = !rememberUsername.getNodeValue().equals("0");
}
Node rememeberPassword = gitAttributes.getNamedItem(ConfigurationManager.STRING_GIT_REMEMBER_PASSWORD);
if (rememeberPassword != null) {
gitStorePassword = !rememeberPassword.getNodeValue().equals("0");
}
Node autoLogOn = gitAttributes.getNamedItem(ConfigurationManager.STRING_GIT_AUTO_LOG_ON);
if (autoLogOn != null) {
gitAutoLogOn = !autoLogOn.getNodeValue().equals("0");
}
}
}
/**
* @return
*/
public static ConfigurationManager getInstance() {
if (ConfigurationManager.instance == null) {
ConfigurationManager.instance = new ConfigurationManager();
}
return ConfigurationManager.instance;
}
/**
* @return
*/
// public String getServerLocation() {
// return serverLocation;
// }
/**
* @return
*/
public Set<File> getDirectories() {
return projectDirectories;
}
/**
* @param projectName
* @return
*/
protected boolean isProjectExist(String projectName) {
for (File directory : projectDirectories) {
if (directory.getName().equals(projectName)) {
return true;
}
}
return false;
}
/**
* @param projectName
* @return
*/
protected File getProject(String projectName) {
for (File directory : projectDirectories) {
if (directory.getName().equals(projectName)) {
return directory;
}
}
return null;
}
/**
* @param directory
* @return
*/
public boolean addDirectory(File directory) {
if ((directory == null) || !directory.isDirectory() || projectDirectories.contains(directory)) {
return false;
}
else {
projectDirectories.add(directory);
return true;
}
}
public void outputLoginConfigToFile() {
if (globalConfigDocument != null) {
Element configElement;
configElement = getRootConfigElement();
storeGlobalLoginConfig(configElement);
writeOutLoginConfigToFile();
}
}
/**
* @throws TransformerFactoryConfigurationError
*/
private void writeOutLoginConfigToFile() throws TransformerFactoryConfigurationError {
try {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer;
transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(globalConfigDocument);
StreamResult result = new StreamResult(globalConfigFile);
transformer.transform(source, result);
}
catch (TransformerConfigurationException e) {
System.err.println("Invalid transformer configuration, failed to store configuration.");
}
catch (TransformerException e) {
System.err.println("Transformer ran in to problem, failed to store configuraiton.");
}
}
/**
* @return
*/
private Element getRootConfigElement() {
Element configElement;
NodeList configList = globalConfigDocument.getElementsByTagName(ConfigurationManager.STRING_GLOBAL_CONFIG);
if (configList.getLength() == 0) {
configElement = globalConfigDocument.createElement(ConfigurationManager.STRING_GLOBAL_CONFIG);
globalConfigDocument.appendChild(configElement);
}
else {
configElement = (Element) configList.item(0);
}
return configElement;
}
/**
* @param configElement
*/
private void storeGlobalLoginConfig(Element configElement) {
NodeList loginList = globalConfigDocument.getElementsByTagName(ConfigurationManager.STRING_LOGIN);
if (loginList.getLength() == 0) {
Element loginElement = globalConfigDocument.createElement(ConfigurationManager.STRING_LOGIN);
loginElement.setAttribute(ConfigurationManager.STRING_LOGIN_USERNAME, "");
loginElement.setAttribute(ConfigurationManager.STRING_LOGIN_PASSWORD, "");
loginElement.setAttribute(ConfigurationManager.STRING_LOGIN_STORE_USERNAME, "0");
loginElement.setAttribute(ConfigurationManager.STRING_LOGIN_STORE_PASSWORD, "0");
loginElement.setAttribute(ConfigurationManager.STRING_LOGIN_AUTO_LOG_ON, "0");
configElement.appendChild(loginElement);
}
else {
storeAttrValueToGlobalDocument(loginStoreUsername ? "1" : "0", ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_STORE_USERNAME);
storeAttrValueToGlobalDocument(loginStorePassword ? "1" : "0", ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_STORE_PASSWORD);
storeAttrValueToGlobalDocument(loginAutoLogOn ? "1" : "0", ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_AUTO_LOG_ON);
if (loginStoreUsername) {
storeAttrValueToGlobalDocument(loginUsername, ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_USERNAME);
}
else {
storeAttrValueToGlobalDocument("", ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_USERNAME);
}
if (loginStorePassword) {
storeAttrValueToGlobalDocument(String.valueOf(loginPassword), ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_PASSWORD);
}
else {
storeAttrValueToGlobalDocument("", ConfigurationManager.STRING_LOGIN, ConfigurationManager.STRING_LOGIN_PASSWORD);
}
}
}
public void outputUserConfigToFile() {
if (globalConfigDocument != null) {
Element configElement;
NodeList configList = globalConfigDocument.getElementsByTagName(ConfigurationManager.STRING_GLOBAL_CONFIG);
if (configList.getLength() == 0) {
configElement = globalConfigDocument.createElement(ConfigurationManager.STRING_GLOBAL_CONFIG);
globalConfigDocument.appendChild(configElement);
}
else {
configElement = (Element) configList.item(0);
}
storeGlobalLoginConfig(configElement);
}
if (userConfigDocument != null) {
storeUserConfigToFile();
storeProjectsToUserDocument();
try {
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer;
transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(globalConfigDocument);
StreamResult result = new StreamResult(globalConfigFile);
transformer.transform(source, result);
source = new DOMSource(userConfigDocument);
result = new StreamResult(userConfigFile);
transformer.transform(source, result);
}
catch (TransformerConfigurationException e) {
System.err.println("Invalid transformer configuration, failed to store configuration.");
}
catch (TransformerException e) {
System.err.println("Transformer ran in to problem, failed to store configuraiton.");
}
}
}
/**
*
*/
private void storeUserConfigToFile() {
storeAttrValueToUserDocument(gitLocalPath, ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_LOCAL_PATH);
storeAttrValueToUserDocument(userConfigCompleted ? "1" : "0", ConfigurationManager.STRING_USER_CONFIG, ConfigurationManager.STRING_USER_CONFIG_COMPLETED);
if (gitStoreUsername) {
storeAttrValueToUserDocument("1", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_REMEMBER_USERNAME);
storeAttrValueToUserDocument(gitUsername, ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_USERNAME);
if (gitStorePassword) {
storeAttrValueToUserDocument("1", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_REMEMBER_PASSWORD);
storeAttrValueToUserDocument(String.valueOf(gitPassword), ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_PASSWORD);
String autoLogonString = gitAutoLogOn ? "1" : "0";
storeAttrValueToUserDocument(autoLogonString, ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_AUTO_LOG_ON);
}
else {
storeAttrValueToUserDocument("0", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_REMEMBER_PASSWORD);
storeAttrValueToUserDocument("0", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_AUTO_LOG_ON);
storeAttrValueToUserDocument("", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_PASSWORD);
}
}
else {
storeAttrValueToUserDocument("0", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_REMEMBER_USERNAME);
storeAttrValueToUserDocument("0", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_REMEMBER_PASSWORD);
storeAttrValueToUserDocument("0", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_AUTO_LOG_ON);
storeAttrValueToUserDocument("", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_USERNAME);
storeAttrValueToUserDocument("", ConfigurationManager.STRING_GIT, ConfigurationManager.STRING_GIT_PASSWORD);
}
storeAttrValueToUserDocument(notificationEmail, ConfigurationManager.STRING_NOTIFICATION, ConfigurationManager.STRING_NOTIFICATION_EMAIL);
storeAttrValueToUserDocument(notificationPeriod, ConfigurationManager.STRING_NOTIFICATION, ConfigurationManager.STRING_NOTIFICATION_PERIOD);
storeAttrValueToUserDocument(notificationEmailEnabled ? "1" : "0", ConfigurationManager.STRING_NOTIFICATION, ConfigurationManager.STRING_NOTIFICATION_ENABLE_EMAIL);
storeAttrValueToUserDocument(notificationTrayEnabled ? "1" : "0", ConfigurationManager.STRING_NOTIFICATION, ConfigurationManager.STRING_NOTIFICATION_ENABLE_TRAY);
}
// public String getDatabaseURL() {
// return databaseURL;
// }
//
// public String getDatabaseUsername() {
// return databaseUsername;
// }
//
// public String getDatabasePassword() {
// return databasePassword;
// }
//
// public String getDatabaseDriver() {
// return databaseDriver;
// }
public String getGitUsername() {
return gitUsername;
}
public boolean isGitStoreUsername() {
return gitStoreUsername;
}
public void setGitStoreUsername(boolean gitStoreUsername) {
this.gitStoreUsername = gitStoreUsername;
}
public boolean isGitStorePassword() {
return gitStorePassword;
}
public void setGitStorePassword(boolean gitStorePassword) {
this.gitStorePassword = gitStorePassword;
}
public boolean isGitAutoLogOn() {
return gitAutoLogOn;
}
public void setGitAutoLogOn(boolean gitAutoLogOn) {
this.gitAutoLogOn = gitAutoLogOn;
}
public void setGitUsername(String gitUsername) {
this.gitUsername = gitUsername;
}
public char[] getGitPassword() {
return gitPassword;
}
public void setGitPassword(char[] gitPassword) {
this.gitPassword = gitPassword;
}
/**
* @return
*/
public String getGitLocalPath() {
return gitLocalPath;
}
/**
* @param folderPath
*/
public void setGitLocalPath(String folderPath) {
gitLocalPath = folderPath;
}
public void setGitLocalPath(File folder) {
gitLocalPath = folder.getAbsolutePath();
}
public String getNotificationEmail() {
return notificationEmail;
}
public void setNotificationEmail(String notificationEmail) {
this.notificationEmail = notificationEmail;
}
public String getNotificationPeriod() {
return notificationPeriod;
}
public void setNotificationPeriod(String notificationPeriod) {
this.notificationPeriod = notificationPeriod;
}
public boolean isNotificationEmailEnabled() {
return notificationEmailEnabled;
}
public void setNotificationEmailEnabled(boolean notificationEmailEnabled) {
this.notificationEmailEnabled = notificationEmailEnabled;
}
public boolean isNotificationTrayEnabled() {
return notificationTrayEnabled;
}
public void setNotificationTrayEnabled(boolean notificationTrayEnabled) {
this.notificationTrayEnabled = notificationTrayEnabled;
}
public void setUserConfigCompleted(boolean userConfigCompleted) {
this.userConfigCompleted = userConfigCompleted;
}
public String getLoginUsername() {
return loginUsername;
}
public void setLoginUsername(String loginUsername) {
this.loginUsername = loginUsername;
}
public char[] getLoginPassword() {
return loginPassword;
}
public void setLoginPassword(char[] loginPassword) {
this.loginPassword = loginPassword;
}
public boolean isLoginStoreUsername() {
return loginStoreUsername;
}
public void setLoginStoreUsername(boolean loginStoreUsername) {
this.loginStoreUsername = loginStoreUsername;
if (!loginStoreUsername) {
loginStorePassword = false;
loginAutoLogOn = false;
}
}
public boolean isLoginStorePassword() {
return loginStorePassword;
}
public void setLoginStorePassword(boolean loginStorePassword) {
this.loginStorePassword = loginStorePassword;
if (loginStorePassword) {
loginStoreUsername = true;
}
else {
loginAutoLogOn = false;
}
}
public boolean isLoginAutoLogOn() {
return loginAutoLogOn;
}
public void setLoginAutoLogOn(boolean loginAutoLogOn) {
this.loginAutoLogOn = loginAutoLogOn;
if (loginAutoLogOn) {
loginStoreUsername = true;
loginStorePassword = true;
}
}
private void storeAttrValueToUserDocument(String value, String elementName, String attrName) {
String outputValue = (value == null) ? "" : value;
NodeList nodeList = userConfigDocument.getElementsByTagName(elementName);
if ((nodeList != null) && (nodeList.getLength() == 1)) {
NamedNodeMap attrs = nodeList.item(0).getAttributes();
Node attrNode = attrs.getNamedItem(attrName);
if (attrNode == null) {
Attr attribute = userConfigDocument.createAttribute(attrName);
attribute.setNodeValue(outputValue);
attrs.setNamedItem(attribute);
}
else {
attrNode.setNodeValue(outputValue);
}
}
else {
Element element = userConfigDocument.createElement(elementName);
userConfigDocument.appendChild(element);
NamedNodeMap attrs = element.getAttributes();
Attr attribute = userConfigDocument.createAttribute(attrName);
attribute.setNodeValue(outputValue);
attrs.setNamedItem(attribute);
}
}
private void storeAttrValueToGlobalDocument(String value, String elementName, String attrName) {
String outputValue = (value == null) ? "" : value;
NodeList nodeList = globalConfigDocument.getElementsByTagName(elementName);
if ((nodeList != null) && (nodeList.getLength() == 1)) {
NamedNodeMap attrs = nodeList.item(0).getAttributes();
Node attrNode = attrs.getNamedItem(attrName);
if (attrNode == null) {
Attr attribute = globalConfigDocument.createAttribute(attrName);
attribute.setNodeValue(outputValue);
attrs.setNamedItem(attribute);
}
else {
attrNode.setNodeValue(outputValue);
}
}
else {
Element element = globalConfigDocument.createElement(elementName);
globalConfigDocument.appendChild(element);
NamedNodeMap attrs = element.getAttributes();
Attr attribute = globalConfigDocument.createAttribute(attrName);
attribute.setNodeValue(outputValue);
attrs.setNamedItem(attribute);
}
}
private void storeProjectsToUserDocument() {
NodeList projectsObjectList = userConfigDocument.getElementsByTagName(ConfigurationManager.STRING_PROJECTS);
Element projectsElement;
if ((projectsObjectList == null) || (projectsObjectList.getLength() == 0)) {
projectsElement = userConfigDocument.createElement(ConfigurationManager.STRING_PROJECTS);
userConfigDocument.appendChild(projectsElement);
}
else {
projectsElement = (Element) projectsObjectList.item(0);
}
// Remove all old project
while (projectsElement.getFirstChild() != null) {
projectsElement.removeChild(projectsElement.getFirstChild());
}
for (File directory : projectDirectories) {
Element projectElement = userConfigDocument.createElement(ConfigurationManager.STRING_PROJECT);
projectElement.setAttribute(ConfigurationManager.STRING_PROJECT_NAME, directory.getName());
projectsElement.appendChild(projectElement);
}
}
}
| |
/**
* Copyright (C) [2013] [The FURTHeR Project]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.utah.further.core.math.misc;
import static edu.utah.further.core.api.collections.CollectionUtil.newList;
import static edu.utah.further.core.api.lang.CoreUtil.preventUtilityConstruction;
import static java.util.Arrays.asList;
import static org.slf4j.LoggerFactory.getLogger;
import java.util.List;
import java.util.Random;
import org.slf4j.Logger;
import edu.utah.further.core.api.context.Api;
import edu.utah.further.core.api.context.Utility;
/**
* Class that contains a selection of numerical routines, integer permutation routines and
* number-theory-related functions.
* <p>
* -----------------------------------------------------------------------------------<br>
* (c) 2008-2013 FURTHeR Project, Health Sciences IT, University of Utah<br>
* Contact: {@code <further@utah.edu>}<br>
* Biomedical Informatics, 26 South 2000 East<br>
* Room 5775 HSEB, Salt Lake City, UT 84112<br>
* Day Phone: 1-801-581-4080<br>
* -----------------------------------------------------------------------------------
*
* @author Oren E. Livne {@code <oren.livne@utah.edu>}
* @version May 29, 2009
*/
@Utility
@Api
@SuppressWarnings("boxing")
public final class NumberUtil
{
// ========================= CONSTANTS =================================
/**
* A logger that helps identify this class' printouts.
*/
@SuppressWarnings("unused")
private static final Logger log = getLogger(NumberUtil.class);
/**
* The number of witnesses queried in randomized primality test.
*/
public static final int TRIALS = 5;
// ========================= CONSTRUCTORS ==============================
/**
* <p>
* Hide constructor in utility class.
* </p>
*/
private NumberUtil()
{
preventUtilityConstruction();
}
// ========================= METHODS ===================================
/**
* Return x^n (mod p) Assumes x, n >= 0, p > 0, x < p, 0^0 = 1 Overflow may occur if p
* > 31 bits.
*/
public static long power(final long x, final long n, final long p)
{
if (n == 0)
return 1;
long tmp = power((x * x) % p, n / 2, p);
if (n % 2 != 0)
tmp = (tmp * x) % p;
return tmp;
}
/**
* Randomized primality test. Adjust TRIALS to increase confidence level.
*
* @param n
* the number to test.
* @return if false, n is definitely not prime. If true, n is probably prime.
*/
public static boolean isPrime(final long n)
{
final Random r = new Random();
for (int counter = 0; counter < TRIALS; counter++)
if (witness(r.nextInt((int) n - 3) + 2, n - 1, n) != 1)
return false;
return true;
}
/**
* Return the greatest common divisor.
*/
public static long gcd(final long a, final long b)
{
if (b == 0)
return a;
return gcd(b, a % b);
}
/**
* Works back through Euclid's algorithm to find x and y such that if gcd(a,b) = 1, ax
* + by = 1.
*
* @param a
* @param b
* @param pair
* holds intermediate results as this method is called recursively
*/
private static void fullGcd(final long a, final long b,
final ComparablePair<Long, Long> pair)
{
long x1, y1;
if (b == 0)
{
pair.setLeft(1l);
pair.setRight(0l);
}
else
{
fullGcd(b, a % b, pair);
x1 = pair.getLeft();
y1 = pair.getRight();
pair.setLeft(y1);
pair.setRight(x1 - (a / b) * y1);
}
}
/**
* Solve ax == 1 (mod n), assuming gcd( a, n ) = 1.
*
* @return x
*/
public static long inverse(final long a, final long n)
{
final ComparablePair<Long, Long> pair = new ComparablePair<>(0l, 0l);
fullGcd(a, n, pair);
final long _x = pair.getLeft();
return _x > 0 ? _x : _x + n;
}
/**
* Return the reverse permutation <code>b</code> of a permutation <code>a</code> of
* the numbers <code>[0..n-1]</code>. This means that <code>a o b = b o a = </code>
* the identity permutation.
* <p>
* For instance, if <code>n = 5, a = {4,0,3,1,2} then
* <code>b = {1,3,4,2,0}.
*
* @param permutation
* permutation of the numbers <code>[0..n-1]</code>
* @return reverse permutation
*/
public static int[] reversePermutation(final int[] permutation)
{
final int n = permutation.length;
final int[] reverse = new int[n];
for (int i = 0; i < n; i++)
{
reverse[permutation[i]] = i;
}
return reverse;
}
/**
* Return the reverse permutation <code>b</code> of a permutation <code>a</code> of
* the numbers <code>[0..n-1]</code>. This means that <code>a o b = b o a = </code>
* the identity permutation.
* <p>
* For instance, if <code>n = 5, a = {4,0,3,1,2} then
* <code>b = {1,3,4,2,0}.
*
* @param permutation
* permutation of the numbers <code>[0..n-1]</code>, as list
* @return reverse permutation
*/
public static int[] reversePermutation(final List<Integer> permutation)
{
final int n = permutation.size();
final int[] array = new int[n];
for (int i = 0; i < n; i++)
{
array[i] = permutation.get(i);
}
return reversePermutation(array);
}
/**
* Get base-<code>b</code> digits of a number. If <code>x</code> is negative, return
* the digits of <code>-x</code>. If <code>x=0</code>, return an array with a single
* entry (<code>0</code>).
*
* @param base
* base to compute digits in
* @param x
* a <i>positive</i> number to be analyzed
* @return list of digits. The first entry is the least significant digit; the last
* one is the most significant digit.
*/
public static List<Integer> toDigits(final int base, final int x)
{
int y = x;
final List<Integer> digits = newList();
if (y == 0)
{
digits.add(0);
}
else
{
if (y < 0)
{
y = -y;
}
while (y != 0)
{
digits.add(y % base);
y = y / base;
}
}
return digits;
}
/**
* Convert base-<code>b</code> digits to a composite number. That is, <code>
* x = digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)
* </code> The
* implementation uses Horner's rule for an <code>O(k)</code> computation.
*
* @param base
* base to compute digits in
* @param digits
* list of digits. The first entry is the least significant digit; the last
* one is the most significant digit.
* @return <code>digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)</code>
* @see http://www.brpreiss.com/books/opus4/html/page39.html
*/
public static int digitIntListToNumber(final int base, final List<Integer> digits)
{
final int k = digits.size() - 1;
int x = digits.get(k);
for (int i = k - 1; i >= 0; i--)
{
x = base * x + digits.get(i);
}
return x;
}
/**
* Convert base-<code>b</code> digits to a composite number. That is, <code>
* x = digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)
* </code> The
* implementation uses Horner's rule for an <code>O(k)</code> computation.
*
* @param base
* base to compute digits in
* @param digits
* array of digits. The first entry is the least significant digit; the
* last one is the most significant digit.
* @return <code>digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)</code>
* @see http://www.brpreiss.com/books/opus4/html/page39.html
*/
public static int toNumber(final int base, final Integer... digits)
{
return digitIntListToNumber(base, asList(digits));
}
/**
* Get base-<code>b</code> digits of a number. If <code>x</code> is negative, return
* the digits of <code>-x</code>. If <code>x=0</code>, return an array with a single
* entry (<code>0</code>).
*
* @param base
* base to compute digits in
* @param x
* a <i>positive</i> number to be analyzed
* @return list of digits. The first entry is the least significant digit; the last
* one is the most significant digit.
*/
public static List<Long> toDigits(final int base, final long x)
{
long y = x;
final List<Long> digits = newList();
if (y == 0)
{
digits.add(0l);
}
else
{
if (y < 0)
{
y = -y;
}
while (y != 0)
{
digits.add(y % base);
y = y / base;
}
}
return digits;
}
/**
* Convert base-<code>b</code> digits to a composite number. That is, <code>
* x = digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)
* </code> The
* implementation uses Horner's rule for an <code>O(k)</code> computation.
*
* @param base
* base to compute digits in
* @param digits
* list of digits. The first entry is the least significant digit; the last
* one is the most significant digit.
* @return <code>digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)</code>
* @see http://www.brpreiss.com/books/opus4/html/page39.html
*/
public static long digitLongListToNumber(final int base, final List<Long> digits)
{
final int k = digits.size() - 1;
long x = digits.get(k);
for (int i = k - 1; i >= 0; i--)
{
x = base * x + digits.get(i);
}
return x;
}
/**
* Convert base-<code>b</code> digits to a composite number. That is, <code>
* x = digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)
* </code> The
* implementation uses Horner's rule for an <code>O(k)</code> computation.
*
* @param base
* base to compute digits in
* @param digits
* array of digits. The first entry is the least significant digit; the
* last one is the most significant digit.
* @return <code>digits(k-1)*b^(k-1) + ... + digits(1)*b + digits(0)</code>
* @see http://www.brpreiss.com/books/opus4/html/page39.html
*/
public static long toNumber(final int base, final Long... digits)
{
return digitLongListToNumber(base, asList(digits));
}
// ========================= PRIVATE METHODS ===========================
/**
* Private method that implements the basic primality test. If witness does not return
* 1, n is definitely composite. Do this by computing a^i (mod n) and looking for
* non-trivial square roots of 1 along the way.
*
* @param a
* @param i
* @param n
* @return
*/
private static long witness(final long a, final long i, final long n)
{
if (i == 0)
return 1;
final long x = witness(a, i / 2, n);
if (x == 0) // If n is recursively composite, stop
return 0;
// n is not prime if we find a non-trivial square root of 1
long y = (x * x) % n;
if (y == 1 && x != 1 && x != n - 1)
return 0;
if (i % 2 != 0)
y = (a * y) % n;
return y;
}
}
| |
package com.couchbase.lite;
import com.couchbase.lite.internal.InterfaceAudience;
import com.couchbase.lite.util.Log;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* An unsaved Couchbase Lite Document Revision.
*/
public final class UnsavedRevision extends Revision {
private final long parentSequence;
private Map<String, Object> properties;
/**
* Constructor
*
* @exclude
*/
@InterfaceAudience.Private
protected UnsavedRevision(Document document, SavedRevision parentRevision) {
super(document);
if (parentRevision == null) {
parentRevID = null;
parentSequence = 0L;
} else {
parentRevID = parentRevision.getId();
parentSequence = parentRevision.getSequence();
}
Map<String, Object> parentRevisionProperties;
if (parentRevision == null) {
parentRevisionProperties = null;
} else {
parentRevisionProperties = parentRevision.getProperties();
}
if (parentRevisionProperties == null) {
properties = new HashMap<String, Object>();
properties.put("_id", document.getId());
if (parentRevID != null) {
properties.put("_rev", parentRevID);
}
} else {
properties = new HashMap<String, Object>(parentRevisionProperties);
}
}
/**
* Set whether this revision is a deletion or not (eg, marks doc as deleted)
*/
@InterfaceAudience.Public
public void setIsDeletion(boolean isDeletion) {
if (isDeletion == true) {
properties.put("_deleted", true);
} else {
properties.remove("_deleted");
}
}
@Override
@InterfaceAudience.Public
public String getId() {
return null;
}
@Override
@InterfaceAudience.Private
protected long getSequence() {
return 0L;
}
/**
* Set the properties for this revision
*/
@InterfaceAudience.Public
public void setProperties(Map<String, Object> properties) {
this.properties = properties;
}
/**
* Saves the new revision to the database.
* <p/>
* This will throw an exception with a 412 error if its parent (the revision it was created from)
* is not the current revision of the document.
* <p/>
* Afterwards you should use the returned Revision instead of this object.
*
* @return A new Revision representing the saved form of the revision.
* @throws CouchbaseLiteException
*/
@InterfaceAudience.Public
public SavedRevision save() throws CouchbaseLiteException {
boolean allowConflict = false;
return document.putProperties(properties, parentRevID, allowConflict);
}
/**
* A special variant of -save: that always adds the revision, even if its parent is not the
* current revision of the document.
* <p/>
* This can be used to resolve conflicts, or to create them. If you're not certain that's what you
* want to do, you should use the regular -save: method instead.
*/
@InterfaceAudience.Public
public SavedRevision save(boolean allowConflict) throws CouchbaseLiteException {
return document.putProperties(properties, parentRevID, allowConflict);
}
/**
* Deletes any existing attachment with the given name.
* The attachment will be deleted from the database when the revision is saved.
*
* @param name The attachment name.
*/
@InterfaceAudience.Public
public void removeAttachment(String name) {
addAttachment(null, name);
}
/**
* Sets the userProperties of the Revision.
* Set replaces all properties except for those with keys prefixed with '_'.
*/
@InterfaceAudience.Public
public void setUserProperties(Map<String, Object> userProperties) {
Map<String, Object> newProps = new HashMap<String, Object>();
newProps.putAll(userProperties);
for (String key : properties.keySet()) {
if (key.startsWith("_")) {
newProps.put(key, properties.get(key)); // Preserve metadata properties
}
}
properties = newProps;
}
/**
* Sets the attachment with the given name. The Attachment data will be written
* to the Database when the Revision is saved.
*
* @param name The name of the Attachment to set.
* @param contentType The content-type of the Attachment.
* @param contentStream The Attachment content. The InputStream will be closed
* after it is no longer needed.
*/
@InterfaceAudience.Public
public void setAttachment(String name, String contentType, InputStream contentStream) {
Attachment attachment = new Attachment(contentStream, contentType);
addAttachment(attachment, name);
}
/**
* Sets the attachment with the given name. The Attachment data will be written
* to the Database when the Revision is saved.
*
* @param name The name of the Attachment to set.
* @param contentType The content-type of the Attachment.
* @param contentStreamURL The URL that contains the Attachment content.
*/
@InterfaceAudience.Public
public void setAttachment(String name, String contentType, URL contentStreamURL) {
try {
InputStream inputStream = contentStreamURL.openStream();
setAttachment(name, contentType, inputStream);
} catch (IOException e) {
Log.e(Database.TAG, "Error opening stream for url: %s", contentStreamURL);
throw new RuntimeException(e);
}
}
@Override
@InterfaceAudience.Public
public Map<String, Object> getProperties() {
return properties;
}
@Override
@InterfaceAudience.Public
public SavedRevision getParent() {
if (parentRevID == null || parentRevID.length() == 0) {
return null;
}
return document.getRevision(parentRevID);
}
@Override
@InterfaceAudience.Public
public String getParentId() {
return parentRevID;
}
@Override
@InterfaceAudience.Private
protected long getParentSequence() {
return parentSequence;
}
@Override
@InterfaceAudience.Public
public List<SavedRevision> getRevisionHistory() throws CouchbaseLiteException {
// (Don't include self in the array, because this revision doesn't really exist yet)
SavedRevision parent = getParent();
return parent != null ? parent.getRevisionHistory() : new ArrayList<SavedRevision>();
}
/**
* Creates or updates an attachment.
* The attachment data will be written to the database when the revision is saved.
*
* @param attachment A newly-created Attachment (not yet associated with any revision)
* @param name The attachment name.
*/
@InterfaceAudience.Private
protected void addAttachment(Attachment attachment, String name) {
Map<String, Object> attachments = (Map<String, Object>) properties.get("_attachments");
if (attachments == null) {
attachments = new HashMap<String, Object>();
}
attachments.put(name, attachment);
properties.put("_attachments", attachments);
if (attachment != null) {
attachment.setName(name);
}
}
}
| |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.fragment.app;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import android.app.Activity;
import android.app.Instrumentation;
import android.content.Intent;
import android.os.Bundle;
import android.os.SystemClock;
import android.support.test.InstrumentationRegistry;
import android.support.test.annotation.UiThreadTest;
import android.support.test.filters.MediumTest;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.Nullable;
import androidx.fragment.app.test.FragmentTestActivity;
import androidx.fragment.app.test.NewIntentActivity;
import androidx.fragment.test.R;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
/**
* Tests usage of the {@link FragmentTransaction} class.
*/
@MediumTest
@RunWith(AndroidJUnit4.class)
public class FragmentTransactionTest {
@Rule
public ActivityTestRule<FragmentTestActivity> mActivityRule =
new ActivityTestRule<>(FragmentTestActivity.class);
private FragmentTestActivity mActivity;
private int mOnBackStackChangedTimes;
private FragmentManager.OnBackStackChangedListener mOnBackStackChangedListener;
@Before
public void setUp() {
mActivity = mActivityRule.getActivity();
mOnBackStackChangedTimes = 0;
mOnBackStackChangedListener = new FragmentManager.OnBackStackChangedListener() {
@Override
public void onBackStackChanged() {
mOnBackStackChangedTimes++;
}
};
mActivity.getSupportFragmentManager()
.addOnBackStackChangedListener(mOnBackStackChangedListener);
}
@After
public void tearDown() {
mActivity.getSupportFragmentManager()
.removeOnBackStackChangedListener(mOnBackStackChangedListener);
mOnBackStackChangedListener = null;
}
@Test
public void testAddTransactionWithValidFragment() throws Throwable {
final Fragment fragment = new CorrectFragment();
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
mActivity.getSupportFragmentManager().beginTransaction()
.add(R.id.content, fragment)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertEquals(1, mOnBackStackChangedTimes);
}
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
assertTrue(fragment.isAdded());
}
@Test
public void testAddTransactionWithPrivateFragment() throws Throwable {
final Fragment fragment = new PrivateFragment();
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
boolean exceptionThrown = false;
try {
mActivity.getSupportFragmentManager().beginTransaction()
.add(R.id.content, fragment)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertEquals(1, mOnBackStackChangedTimes);
} catch (IllegalStateException e) {
exceptionThrown = true;
} finally {
assertTrue("Exception should be thrown", exceptionThrown);
assertFalse("Fragment shouldn't be added", fragment.isAdded());
}
}
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
@Test
public void testAddTransactionWithPackagePrivateFragment() throws Throwable {
final Fragment fragment = new PackagePrivateFragment();
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
boolean exceptionThrown = false;
try {
mActivity.getSupportFragmentManager().beginTransaction()
.add(R.id.content, fragment)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertEquals(1, mOnBackStackChangedTimes);
} catch (IllegalStateException e) {
exceptionThrown = true;
} finally {
assertTrue("Exception should be thrown", exceptionThrown);
assertFalse("Fragment shouldn't be added", fragment.isAdded());
}
}
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
@Test
public void testAddTransactionWithAnonymousFragment() throws Throwable {
final Fragment fragment = new Fragment() {};
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
boolean exceptionThrown = false;
try {
mActivity.getSupportFragmentManager().beginTransaction()
.add(R.id.content, fragment)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertEquals(1, mOnBackStackChangedTimes);
} catch (IllegalStateException e) {
exceptionThrown = true;
} finally {
assertTrue("Exception should be thrown", exceptionThrown);
assertFalse("Fragment shouldn't be added", fragment.isAdded());
}
}
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
@Test
public void testGetLayoutInflater() throws Throwable {
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
final OnGetLayoutInflaterFragment fragment1 = new OnGetLayoutInflaterFragment();
assertEquals(0, fragment1.onGetLayoutInflaterCalls);
mActivity.getSupportFragmentManager().beginTransaction()
.add(R.id.content, fragment1)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertEquals(1, fragment1.onGetLayoutInflaterCalls);
assertEquals(fragment1.layoutInflater, fragment1.getLayoutInflater());
// getLayoutInflater() didn't force onGetLayoutInflater()
assertEquals(1, fragment1.onGetLayoutInflaterCalls);
LayoutInflater layoutInflater = fragment1.layoutInflater;
// Replacing fragment1 won't detach it, so the value won't be cleared
final OnGetLayoutInflaterFragment fragment2 = new OnGetLayoutInflaterFragment();
mActivity.getSupportFragmentManager().beginTransaction()
.replace(R.id.content, fragment2)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertSame(layoutInflater, fragment1.getLayoutInflater());
assertEquals(1, fragment1.onGetLayoutInflaterCalls);
// Popping it should cause onCreateView again, so a new LayoutInflater...
mActivity.getSupportFragmentManager().popBackStackImmediate();
assertNotSame(layoutInflater, fragment1.getLayoutInflater());
assertEquals(2, fragment1.onGetLayoutInflaterCalls);
layoutInflater = fragment1.layoutInflater;
assertSame(layoutInflater, fragment1.getLayoutInflater());
// Popping it should detach it, clearing the cached value again
mActivity.getSupportFragmentManager().popBackStackImmediate();
// once it is detached, the getLayoutInflater() will default to throw
// an exception, but we've made it return null instead.
assertEquals(2, fragment1.onGetLayoutInflaterCalls);
assertNull(fragment1.getLayoutInflater());
assertEquals(3, fragment1.onGetLayoutInflaterCalls);
}
});
}
@Test
public void testAddTransactionWithNonStaticFragment() throws Throwable {
final Fragment fragment = new NonStaticFragment();
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
boolean exceptionThrown = false;
try {
mActivity.getSupportFragmentManager().beginTransaction()
.add(R.id.content, fragment)
.addToBackStack(null)
.commit();
mActivity.getSupportFragmentManager().executePendingTransactions();
assertEquals(1, mOnBackStackChangedTimes);
} catch (IllegalStateException e) {
exceptionThrown = true;
} finally {
assertTrue("Exception should be thrown", exceptionThrown);
assertFalse("Fragment shouldn't be added", fragment.isAdded());
}
}
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
}
@Test
public void testPostOnCommit() throws Throwable {
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
final boolean[] ran = new boolean[1];
FragmentManager fm = mActivityRule.getActivity().getSupportFragmentManager();
fm.beginTransaction().runOnCommit(new Runnable() {
@Override
public void run() {
ran[0] = true;
}
}).commit();
fm.executePendingTransactions();
assertTrue("runOnCommit runnable never ran", ran[0]);
ran[0] = false;
boolean threw = false;
try {
fm.beginTransaction().runOnCommit(new Runnable() {
@Override
public void run() {
ran[0] = true;
}
}).addToBackStack(null).commit();
} catch (IllegalStateException ise) {
threw = true;
}
fm.executePendingTransactions();
assertTrue("runOnCommit was allowed to be called for back stack transaction",
threw);
assertFalse("runOnCommit runnable for back stack transaction was run", ran[0]);
}
});
}
/**
* Test to ensure that when onBackPressed() is received that there is no crash.
*/
@Test
@UiThreadTest
public void crashOnBackPressed() throws Throwable {
Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
Bundle outState = new Bundle();
FragmentTestActivity activity = mActivityRule.getActivity();
instrumentation.callActivityOnSaveInstanceState(activity, outState);
activity.onBackPressed();
}
// Ensure that getFragments() works during transactions, even if it is run off thread
@Test
public void getFragmentsOffThread() throws Throwable {
final FragmentManager fm = mActivity.getSupportFragmentManager();
// Make sure that adding a fragment works
Fragment fragment = new CorrectFragment();
fm.beginTransaction()
.add(R.id.content, fragment)
.addToBackStack(null)
.commit();
FragmentTestUtil.executePendingTransactions(mActivityRule);
Collection<Fragment> fragments = fm.getFragments();
assertEquals(1, fragments.size());
assertTrue(fragments.contains(fragment));
// Removed fragments shouldn't show
fm.beginTransaction()
.remove(fragment)
.addToBackStack(null)
.commit();
FragmentTestUtil.executePendingTransactions(mActivityRule);
assertTrue(fm.getFragments().isEmpty());
// Now try detached fragments
FragmentTestUtil.popBackStackImmediate(mActivityRule);
fm.beginTransaction()
.detach(fragment)
.addToBackStack(null)
.commit();
FragmentTestUtil.executePendingTransactions(mActivityRule);
assertTrue(fm.getFragments().isEmpty());
// Now try hidden fragments
FragmentTestUtil.popBackStackImmediate(mActivityRule);
fm.beginTransaction()
.hide(fragment)
.addToBackStack(null)
.commit();
FragmentTestUtil.executePendingTransactions(mActivityRule);
fragments = fm.getFragments();
assertEquals(1, fragments.size());
assertTrue(fragments.contains(fragment));
// And showing it again shouldn't change anything:
FragmentTestUtil.popBackStackImmediate(mActivityRule);
fragments = fm.getFragments();
assertEquals(1, fragments.size());
assertTrue(fragments.contains(fragment));
// Now pop back to the start state
FragmentTestUtil.popBackStackImmediate(mActivityRule);
// We can't force concurrency, but we can do it lots of times and hope that
// we hit it.
for (int i = 0; i < 100; i++) {
Fragment fragment2 = new CorrectFragment();
fm.beginTransaction()
.add(R.id.content, fragment2)
.addToBackStack(null)
.commit();
getFragmentsUntilSize(1);
fm.popBackStack();
getFragmentsUntilSize(0);
}
}
/**
* When a FragmentManager is detached, it should allow commitAllowingStateLoss()
* and commitNowAllowingStateLoss() by just dropping the transaction.
*/
@Test
public void commitAllowStateLossDetached() throws Throwable {
Fragment fragment1 = new CorrectFragment();
mActivity.getSupportFragmentManager()
.beginTransaction()
.add(fragment1, "1")
.commit();
FragmentTestUtil.executePendingTransactions(mActivityRule);
final FragmentManager fm = fragment1.getChildFragmentManager();
mActivity.getSupportFragmentManager()
.beginTransaction()
.remove(fragment1)
.commit();
FragmentTestUtil.executePendingTransactions(mActivityRule);
Assert.assertEquals(0, mActivity.getSupportFragmentManager().getFragments().size());
assertEquals(0, fm.getFragments().size());
// Now the fragment1's fragment manager should allow commitAllowingStateLoss
// by doing nothing since it has been detached.
Fragment fragment2 = new CorrectFragment();
fm.beginTransaction()
.add(fragment2, "2")
.commitAllowingStateLoss();
FragmentTestUtil.executePendingTransactions(mActivityRule);
assertEquals(0, fm.getFragments().size());
// It should also allow commitNowAllowingStateLoss by doing nothing
mActivityRule.runOnUiThread(new Runnable() {
@Override
public void run() {
Fragment fragment3 = new CorrectFragment();
fm.beginTransaction()
.add(fragment3, "3")
.commitNowAllowingStateLoss();
assertEquals(0, fm.getFragments().size());
}
});
}
/**
* onNewIntent() should note that the state is not saved so that child fragment
* managers can execute transactions.
*/
@Test
public void newIntentUnlocks() throws Throwable {
Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
Intent intent1 = new Intent(mActivity, NewIntentActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
NewIntentActivity newIntentActivity =
(NewIntentActivity) instrumentation.startActivitySync(intent1);
FragmentTestUtil.waitForExecution(mActivityRule);
Intent intent2 = new Intent(mActivity, FragmentTestActivity.class);
intent2.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
Activity coveringActivity = instrumentation.startActivitySync(intent2);
FragmentTestUtil.waitForExecution(mActivityRule);
Intent intent3 = new Intent(mActivity, NewIntentActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
mActivity.startActivity(intent3);
assertTrue(newIntentActivity.newIntent.await(1, TimeUnit.SECONDS));
FragmentTestUtil.waitForExecution(mActivityRule);
for (Fragment fragment : newIntentActivity.getSupportFragmentManager().getFragments()) {
// There really should only be one fragment in newIntentActivity.
assertEquals(1, fragment.getChildFragmentManager().getFragments().size());
}
}
private void getFragmentsUntilSize(int expectedSize) {
final long endTime = SystemClock.uptimeMillis() + 3000;
do {
assertTrue(SystemClock.uptimeMillis() < endTime);
} while (mActivity.getSupportFragmentManager().getFragments().size() != expectedSize);
}
public static class CorrectFragment extends Fragment {}
private static class PrivateFragment extends Fragment {}
static class PackagePrivateFragment extends Fragment {}
private class NonStaticFragment extends Fragment {}
public static class OnGetLayoutInflaterFragment extends Fragment {
public int onGetLayoutInflaterCalls = 0;
public LayoutInflater layoutInflater;
@Override
public LayoutInflater onGetLayoutInflater(Bundle savedInstanceState) {
onGetLayoutInflaterCalls++;
try {
layoutInflater = super.onGetLayoutInflater(savedInstanceState);
} catch (Exception e) {
return null;
}
return layoutInflater;
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_a, container, false);
}
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.util.resource;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.JarURLConnection;
import java.net.URL;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
import org.eclipse.jetty.util.IO;
import org.eclipse.jetty.util.URIUtil;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
/* ------------------------------------------------------------ */
public class JarResource extends URLResource
{
private static final Logger LOG = Log.getLogger(JarResource.class);
protected JarURLConnection _jarConnection;
/* -------------------------------------------------------- */
protected JarResource(URL url)
{
super(url,null);
}
/* ------------------------------------------------------------ */
protected JarResource(URL url, boolean useCaches)
{
super(url, null, useCaches);
}
/* ------------------------------------------------------------ */
@Override
public synchronized void close()
{
_jarConnection=null;
super.close();
}
/* ------------------------------------------------------------ */
@Override
protected synchronized boolean checkConnection()
{
super.checkConnection();
try
{
if (_jarConnection!=_connection)
newConnection();
}
catch(IOException e)
{
LOG.ignore(e);
_jarConnection=null;
}
return _jarConnection!=null;
}
/* ------------------------------------------------------------ */
/**
* @throws IOException Sub-classes of <code>JarResource</code> may throw an IOException (or subclass)
*/
protected void newConnection() throws IOException
{
_jarConnection=(JarURLConnection)_connection;
}
/* ------------------------------------------------------------ */
/**
* Returns true if the represented resource exists.
*/
@Override
public boolean exists()
{
if (_urlString.endsWith("!/"))
return checkConnection();
else
return super.exists();
}
/* ------------------------------------------------------------ */
@Override
public File getFile()
throws IOException
{
return null;
}
/* ------------------------------------------------------------ */
@Override
public InputStream getInputStream()
throws java.io.IOException
{
checkConnection();
if (!_urlString.endsWith("!/"))
return new FilterInputStream(getInputStream(false))
{
@Override
public void close() throws IOException {this.in=IO.getClosedStream();}
};
URL url = new URL(_urlString.substring(4,_urlString.length()-2));
InputStream is = url.openStream();
return is;
}
/* ------------------------------------------------------------ */
@Override
public void copyTo(File directory)
throws IOException
{
if (!exists())
return;
if(LOG.isDebugEnabled())
LOG.debug("Extract "+this+" to "+directory);
String urlString = this.getURL().toExternalForm().trim();
int endOfJarUrl = urlString.indexOf("!/");
int startOfJarUrl = (endOfJarUrl >= 0?4:0);
if (endOfJarUrl < 0)
throw new IOException("Not a valid jar url: "+urlString);
URL jarFileURL = new URL(urlString.substring(startOfJarUrl, endOfJarUrl));
String subEntryName = (endOfJarUrl+2 < urlString.length() ? urlString.substring(endOfJarUrl + 2) : null);
boolean subEntryIsDir = (subEntryName != null && subEntryName.endsWith("/")?true:false);
if (LOG.isDebugEnabled())
LOG.debug("Extracting entry = "+subEntryName+" from jar "+jarFileURL);
try (InputStream is = jarFileURL.openConnection().getInputStream();
JarInputStream jin = new JarInputStream(is))
{
JarEntry entry;
boolean shouldExtract;
while((entry=jin.getNextJarEntry())!=null)
{
String entryName = entry.getName();
if ((subEntryName != null) && (entryName.startsWith(subEntryName)))
{
// is the subentry really a dir?
if (!subEntryIsDir && subEntryName.length()+1==entryName.length() && entryName.endsWith("/"))
subEntryIsDir=true;
//if there is a particular subEntry that we are looking for, only
//extract it.
if (subEntryIsDir)
{
//if it is a subdirectory we are looking for, then we
//are looking to extract its contents into the target
//directory. Remove the name of the subdirectory so
//that we don't wind up creating it too.
entryName = entryName.substring(subEntryName.length());
if (!entryName.equals(""))
{
//the entry is
shouldExtract = true;
}
else
shouldExtract = false;
}
else
shouldExtract = true;
}
else if ((subEntryName != null) && (!entryName.startsWith(subEntryName)))
{
//there is a particular entry we are looking for, and this one
//isn't it
shouldExtract = false;
}
else
{
//we are extracting everything
shouldExtract = true;
}
if (!shouldExtract)
{
if (LOG.isDebugEnabled())
LOG.debug("Skipping entry: "+entryName);
continue;
}
String dotCheck = entryName.replace('\\', '/');
dotCheck = URIUtil.canonicalPath(dotCheck);
if (dotCheck == null)
{
if (LOG.isDebugEnabled())
LOG.debug("Invalid entry: "+entryName);
continue;
}
File file=new File(directory,entryName);
if (entry.isDirectory())
{
// Make directory
if (!file.exists())
file.mkdirs();
}
else
{
// make directory (some jars don't list dirs)
File dir = new File(file.getParent());
if (!dir.exists())
dir.mkdirs();
// Make file
try (OutputStream fout = new FileOutputStream(file))
{
IO.copy(jin,fout);
}
// touch the file.
if (entry.getTime()>=0)
file.setLastModified(entry.getTime());
}
}
if ((subEntryName == null) || (subEntryName != null && subEntryName.equalsIgnoreCase("META-INF/MANIFEST.MF")))
{
Manifest manifest = jin.getManifest();
if (manifest != null)
{
File metaInf = new File (directory, "META-INF");
metaInf.mkdir();
File f = new File(metaInf, "MANIFEST.MF");
try (OutputStream fout = new FileOutputStream(f))
{
manifest.write(fout);
}
}
}
}
}
public static Resource newJarResource(Resource resource) throws IOException
{
if (resource instanceof JarResource)
return resource;
return Resource.newResource("jar:" + resource + "!/");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.factories;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ConfigOptions;
import org.apache.flink.table.connector.ChangelogMode;
import org.apache.flink.table.connector.format.DecodingFormat;
import org.apache.flink.table.connector.format.EncodingFormat;
import org.apache.flink.table.connector.sink.DynamicTableSink;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.connector.source.ScanTableSource;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.factories.FactoryUtil.TableFactoryHelper;
import javax.annotation.Nullable;
import java.util.HashSet;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import static org.apache.flink.table.factories.FactoryUtil.FORMAT;
import static org.apache.flink.table.factories.FactoryUtil.FORMAT_SUFFIX;
/**
* Test implementations for {@link DynamicTableSourceFactory} and {@link DynamicTableSinkFactory}.
*/
public final class TestDynamicTableFactory
implements DynamicTableSourceFactory, DynamicTableSinkFactory {
public static final String IDENTIFIER = "test-connector";
public static final ConfigOption<String> TARGET =
ConfigOptions.key("target")
.stringType()
.noDefaultValue()
.withDeprecatedKeys("deprecated-target");
public static final ConfigOption<Long> BUFFER_SIZE =
ConfigOptions.key("buffer-size")
.longType()
.defaultValue(100L)
.withFallbackKeys("fallback-buffer-size");
public static final ConfigOption<String> KEY_FORMAT =
ConfigOptions.key("key" + FORMAT_SUFFIX).stringType().noDefaultValue();
public static final ConfigOption<String> VALUE_FORMAT =
ConfigOptions.key("value" + FORMAT_SUFFIX).stringType().noDefaultValue();
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
final Optional<DecodingFormat<DeserializationSchema<RowData>>> keyFormat =
helper.discoverOptionalDecodingFormat(
DeserializationFormatFactory.class, KEY_FORMAT);
final DecodingFormat<DeserializationSchema<RowData>> valueFormat =
helper.discoverOptionalDecodingFormat(DeserializationFormatFactory.class, FORMAT)
.orElseGet(
() ->
helper.discoverDecodingFormat(
DeserializationFormatFactory.class, VALUE_FORMAT));
helper.validate();
return new DynamicTableSourceMock(
helper.getOptions().get(TARGET), keyFormat.orElse(null), valueFormat);
}
@Override
public DynamicTableSink createDynamicTableSink(Context context) {
final TableFactoryHelper helper = FactoryUtil.createTableFactoryHelper(this, context);
final Optional<EncodingFormat<SerializationSchema<RowData>>> keyFormat =
helper.discoverOptionalEncodingFormat(SerializationFormatFactory.class, KEY_FORMAT);
final EncodingFormat<SerializationSchema<RowData>> valueFormat =
helper.discoverOptionalEncodingFormat(SerializationFormatFactory.class, FORMAT)
.orElseGet(
() ->
helper.discoverEncodingFormat(
SerializationFormatFactory.class, VALUE_FORMAT));
helper.validate();
return new DynamicTableSinkMock(
helper.getOptions().get(TARGET),
helper.getOptions().get(BUFFER_SIZE),
keyFormat.orElse(null),
valueFormat);
}
@Override
public String factoryIdentifier() {
return IDENTIFIER;
}
@Override
public Set<ConfigOption<?>> requiredOptions() {
final Set<ConfigOption<?>> options = new HashSet<>();
options.add(TARGET);
return options;
}
@Override
public Set<ConfigOption<?>> optionalOptions() {
final Set<ConfigOption<?>> options = new HashSet<>();
options.add(BUFFER_SIZE);
options.add(KEY_FORMAT);
options.add(FORMAT);
options.add(VALUE_FORMAT);
return options;
}
// --------------------------------------------------------------------------------------------
// Table source
// --------------------------------------------------------------------------------------------
/** {@link DynamicTableSource} for testing. */
public static class DynamicTableSourceMock implements ScanTableSource {
public final String target;
public final @Nullable DecodingFormat<DeserializationSchema<RowData>> keyFormat;
public final DecodingFormat<DeserializationSchema<RowData>> valueFormat;
DynamicTableSourceMock(
String target,
@Nullable DecodingFormat<DeserializationSchema<RowData>> keyFormat,
DecodingFormat<DeserializationSchema<RowData>> valueFormat) {
this.target = target;
this.keyFormat = keyFormat;
this.valueFormat = valueFormat;
}
@Override
public ChangelogMode getChangelogMode() {
return null;
}
@Override
public ScanRuntimeProvider getScanRuntimeProvider(ScanContext runtimeProviderContext) {
return null;
}
@Override
public DynamicTableSource copy() {
return null;
}
@Override
public String asSummaryString() {
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DynamicTableSourceMock that = (DynamicTableSourceMock) o;
return target.equals(that.target)
&& Objects.equals(keyFormat, that.keyFormat)
&& valueFormat.equals(that.valueFormat);
}
@Override
public int hashCode() {
return Objects.hash(target, keyFormat, valueFormat);
}
}
// --------------------------------------------------------------------------------------------
// Table sink
// --------------------------------------------------------------------------------------------
/** {@link DynamicTableSink} for testing. */
public static class DynamicTableSinkMock implements DynamicTableSink {
public final String target;
public final Long bufferSize;
public final @Nullable EncodingFormat<SerializationSchema<RowData>> keyFormat;
public final EncodingFormat<SerializationSchema<RowData>> valueFormat;
DynamicTableSinkMock(
String target,
Long bufferSize,
@Nullable EncodingFormat<SerializationSchema<RowData>> keyFormat,
EncodingFormat<SerializationSchema<RowData>> valueFormat) {
this.target = target;
this.bufferSize = bufferSize;
this.keyFormat = keyFormat;
this.valueFormat = valueFormat;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return null;
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
return null;
}
@Override
public DynamicTableSink copy() {
return null;
}
@Override
public String asSummaryString() {
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DynamicTableSinkMock that = (DynamicTableSinkMock) o;
return target.equals(that.target)
&& bufferSize.equals(that.bufferSize)
&& Objects.equals(keyFormat, that.keyFormat)
&& valueFormat.equals(that.valueFormat);
}
@Override
public int hashCode() {
return Objects.hash(target, bufferSize, keyFormat, valueFormat);
}
}
}
| |
package seedu.tasklist.model.task;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import seedu.tasklist.commons.exceptions.IllegalValueException;
import seedu.tasklist.logic.parser.TimePreparser;
import com.joestelmach.natty.*;
//@@author A0146107M
/**
* Represents time.
*
*/
public class Time {
public static final String MESSAGE_TIME_CONSTRAINTS = "Time is invalid!";
public static final Date EPOCH_TIME = new Date(0);
public Calendar time;
/**
* Constructor using NLP
*
* @param input Input string to parse
* @throws IllegalValueException if given time is invalid.
*/
public Time(String input) throws IllegalValueException {
//initialize time
time = Calendar.getInstance();
//check if input string is null
if(input == null || input==""){
//set to epoch time
time.setTime(EPOCH_TIME);
}
else{
parseTimeInNlp(time, input);
}
clearTimeSeconds();
}
/**
* Parse time using Natty Parser NLP
*
* @param time Calendar to update
* @param input String to parse
* @throws IllegalValueException if given time is invalid.
*/
private void parseTimeInNlp(Calendar time, String input) throws IllegalValueException{
//parse inputs
String preparsedTime = TimePreparser.preparse(input);
List<DateGroup> dates = new Parser().parse(preparsedTime);
//check if date is invalid
if(dateNotPresent(dates)){
throw new IllegalValueException(getIVEMessage());
}
//set time
else{
time.setTime(dates.get(0).getDates().get(0));
setDefaultTime(dates.get(0));
}
}
/**
* Check if Natty Parser finds a date in a string
*
* @param dates List of DateGroups generated by Natty Parser after parsing a string
* @return true if no date is found, false if a date is found
*/
private boolean dateNotPresent(List<DateGroup> dates){
return dates.isEmpty() || dates.get(0).getDates().isEmpty();
}
/**
* Clear the second and millisecond fields from the Time object
*
*/
private void clearTimeSeconds(){
time.clear(Calendar.SECOND);
time.clear(Calendar.MILLISECOND);
}
/**
* Constructor using unix time in millis
*
* @param unixTimeMillis A long indicating the number of milliseconds since Unix epoch
*/
public Time(Long unixTimeMillis) {
time = Calendar.getInstance();
time.setTimeInMillis(unixTimeMillis);
}
/**
* Constructor using a Calendar instance
*
* @param cal Calendar to get time info from
*/
public Time(Calendar cal){
time = (Calendar)cal.clone();
}
/**
* Updates time based on NLP input
*
* @param input String representing time
* @throws IllegalValueException if given time is invalid
*/
public void updateTime(String input) throws IllegalValueException {
//check if it is a new time field
if(time.getTime().equals(EPOCH_TIME)){
Time temp = new Time(input);
this.time = temp.time;
}
//it is an update of a previously declared time
else{
//parse inputs
String preparsedTime = TimePreparser.preparse(input);
List<DateGroup> dates = new Parser().parse(preparsedTime);
//check if Natty finds any dates in input string
if(dateNotPresent(dates)){
throw new IllegalValueException(getIVEMessage());
}
//date present
else{
DateGroup dateGroup = dates.get(0);
//update times
updateAppropriateFields(dateGroup);
}
}
}
/**
* Update time's appropriate fields based on parsed input
*
* @param dateGroup DateGroup returned by Natty Parser
*/
private void updateAppropriateFields(DateGroup dateGroup){
//get time read by Natty Parser
Calendar reference = Calendar.getInstance();
reference.setTime(dateGroup.getDates().get(0));
//if time is explicitly defined, update time
if(!dateGroup.isTimeInferred()){
time.set(Calendar.HOUR_OF_DAY, reference.get(Calendar.HOUR_OF_DAY));
time.set(Calendar.MINUTE, reference.get(Calendar.MINUTE));
}
//if date is explicitly defined, update date
if(!dateGroup.isDateInferred()){
time.set(Calendar.DAY_OF_MONTH, reference.get(Calendar.DAY_OF_MONTH));
time.set(Calendar.MONTH, reference.get(Calendar.MONTH));
time.set(Calendar.YEAR, reference.get(Calendar.YEAR));
}
}
/**
* Set time to default values
*
* @param dateGroup dategroup returned by Natty Parser
*/
private void setDefaultTime(DateGroup dateGroup){
if (dateGroup.isTimeInferred()) {
time.set(Calendar.HOUR_OF_DAY, getDefaultHourVal());
time.set(Calendar.MINUTE, getDefaultMinuteVal());
}
}
/**
* Get hour default
*
* @return the default hour value
*/
protected int getDefaultHourVal(){
return 0;
}
/**
* Get minute default
*
* @return the default minute value
*/
protected int getDefaultMinuteVal(){
return 0;
}
/**
* Get Illegal Value Exception message
*
* @return The message associated with an Illegal Value Exception
*/
protected String getIVEMessage(){
return MESSAGE_TIME_CONSTRAINTS;
}
/**
* Get a string representation of the object
*
* @return the string representation of the Time object
*/
@Override
public String toString() {
if(time.getTime().equals(new Date(0))){
return (new Date(0)).toString();
}
else{
return time.getTime().toString();
}
}
/**
* Get a string representation of the object, with epoch time represented as "-"
*
* @return the string representation of the Time object
*/
public String toCardString() {
if(time.getTime().equals(new Date(0))){
return "-";
}
else{
DateFormat df = new SimpleDateFormat("d MMM yyyy\n h:mm a");
String finalString = df.format(time.getTime());
return finalString;
}
}
/**
* Get a string representation of the object, with epoch time represented as "-"
*
* @return the string representation of the Time object
*/
public String toDateString() {
if(time.getTime().equals(new Date(0))){
return "-";
}
else{
DateFormat df = new SimpleDateFormat("d MMM yyyy h:mm a");
String finalString = df.format(time.getTime());
return finalString;
}
}
/**
* Checks if time is equal to epoch time
*
* @return true if time represented by this object is epoch time
*/
public boolean isMissing(){
return time.getTime().equals(EPOCH_TIME);
}
/**
* Checks if another object is equal to this Time instance
*
* @param other Another object to compare to
* @return true if both are Time objects and the time represented by both objects are equal
*/
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof Time
&& this.time.getTimeInMillis()==((Time) other).time.getTimeInMillis()); // state check
}
/**
* Get a hash representation of this object
*
* @return The hash representation of this object
*/
@Override
public int hashCode() {
return time.hashCode();
}
/**
* Get the Calendar object associated with this Time object
*
* @return The calendar object associated with this Time object
*/
public Calendar getAsCalendar() {
return time;
}
/**
* Compares this Time object with another Time object
*
* @param time The other Time object to compare to
* @return 0 if both times are equal, positive if the time represented by this object is
* after the time of the compared object, negative if the time represented by this object is
* before the time of the compared object
*/
public int compareTo(Time time) {
Long time1 = this.getAsCalendar().getTimeInMillis();
Long time2 = time.getAsCalendar().getTimeInMillis();
return time1.compareTo(time2);
}
}
| |
/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.schema.util;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.QName;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.midpoint.prism.PrismReferenceValue;
import com.evolveum.midpoint.prism.PrismValue;
import com.evolveum.midpoint.prism.match.PolyStringOrigMatchingRule;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.query.NaryLogicalFilter;
import com.evolveum.midpoint.schema.ResourceShadowDiscriminator;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang.mutable.MutableBoolean;
import com.evolveum.midpoint.prism.ItemDefinition;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.query.AllFilter;
import com.evolveum.midpoint.prism.query.AndFilter;
import com.evolveum.midpoint.prism.query.EqualFilter;
import com.evolveum.midpoint.prism.query.InOidFilter;
import com.evolveum.midpoint.prism.query.LogicalFilter;
import com.evolveum.midpoint.prism.query.NoneFilter;
import com.evolveum.midpoint.prism.query.NotFilter;
import com.evolveum.midpoint.prism.query.ObjectFilter;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.query.OrFilter;
import com.evolveum.midpoint.prism.query.OrgFilter;
import com.evolveum.midpoint.prism.query.RefFilter;
import com.evolveum.midpoint.prism.query.ValueFilter;
import com.evolveum.midpoint.prism.query.Visitor;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.OrgType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.prism.xml.ns._public.query_3.QueryType;
import com.evolveum.prism.xml.ns._public.types_3.PolyStringType;
public class ObjectQueryUtil {
public static ObjectQuery createNameQuery(String name, PrismContext prismContext) throws SchemaException {
PolyString polyName = new PolyString(name);
return createNameQuery(polyName, prismContext);
}
public static ObjectQuery createOrigNameQuery(String name, PrismContext prismContext) throws SchemaException {
PolyString polyName = new PolyString(name);
return createOrigNameQuery(polyName, prismContext);
}
public static ObjectQuery createNameQuery(PolyStringType name, PrismContext prismContext) throws SchemaException {
return createNameQuery(name.toPolyString(), prismContext);
}
public static ObjectQuery createOrigNameQuery(PolyStringType name, PrismContext prismContext) throws SchemaException {
return createOrigNameQuery(name.toPolyString(), prismContext);
}
public static ObjectQuery createNameQuery(PolyString name, PrismContext prismContext) throws SchemaException {
EqualFilter filter = EqualFilter.createEqual(ObjectType.F_NAME, ObjectType.class, prismContext, null, name);
return ObjectQuery.createObjectQuery(filter);
}
public static ObjectQuery createOrigNameQuery(PolyString name, PrismContext prismContext) throws SchemaException {
EqualFilter filter = EqualFilter.createEqual(ObjectType.F_NAME, ObjectType.class, prismContext, PolyStringOrigMatchingRule.NAME, name);
return ObjectQuery.createObjectQuery(filter);
}
public static ObjectQuery createNameQuery(ObjectType object) throws SchemaException {
return createNameQuery(object.getName(), object.asPrismObject().getPrismContext());
}
public static <O extends ObjectType> ObjectQuery createNameQuery(PrismObject<O> object) throws SchemaException {
return createNameQuery(object.asObjectable().getName(), object.getPrismContext());
}
public static ObjectQuery createResourceAndObjectClassQuery(String resourceOid, QName objectClass, PrismContext prismContext) throws SchemaException {
return ObjectQuery.createObjectQuery(createResourceAndObjectClassFilter(resourceOid, objectClass, prismContext));
}
public static ObjectFilter createResourceAndObjectClassFilter(String resourceOid, QName objectClass, PrismContext prismContext) throws SchemaException {
Validate.notNull(resourceOid, "Resource where to search must not be null.");
Validate.notNull(objectClass, "Object class to search must not be null.");
Validate.notNull(prismContext, "Prism context must not be null.");
AndFilter and = AndFilter.createAnd(
createResourceFilter(resourceOid, prismContext),
createObjectClassFilter(objectClass, prismContext));
return and;
}
public static ObjectQuery createResourceQuery(String resourceOid, PrismContext prismContext) throws SchemaException {
Validate.notNull(resourceOid, "Resource where to search must not be null.");
Validate.notNull(prismContext, "Prism context must not be null.");
return ObjectQuery.createObjectQuery(createResourceFilter(resourceOid, prismContext));
}
public static ObjectFilter createResourceFilter(String resourceOid, PrismContext prismContext) throws SchemaException {
return RefFilter.createReferenceEqual(ShadowType.F_RESOURCE_REF, ShadowType.class, prismContext, resourceOid);
}
public static ObjectFilter createObjectClassFilter(QName objectClass, PrismContext prismContext) {
return EqualFilter.createEqual(ShadowType.F_OBJECT_CLASS, ShadowType.class, prismContext, null, objectClass);
}
public static <T extends ObjectType> ObjectQuery createNameQuery(Class<T> clazz, PrismContext prismContext, String name) throws SchemaException{
PolyString namePolyString = new PolyString(name);
EqualFilter equal = EqualFilter.createEqual(ObjectType.F_NAME, clazz, prismContext, null, namePolyString);
return ObjectQuery.createObjectQuery(equal);
}
public static ObjectQuery createRootOrgQuery(PrismContext prismContext) throws SchemaException {
ObjectQuery objectQuery = ObjectQuery.createObjectQuery(OrgFilter.createRootOrg());
return objectQuery;
}
public static boolean hasAllDefinitions(ObjectQuery query) {
return hasAllDefinitions(query.getFilter());
}
public static boolean hasAllDefinitions(ObjectFilter filter) {
final MutableBoolean hasAllDefinitions = new MutableBoolean(true);
Visitor visitor = new Visitor() {
@Override
public void visit(ObjectFilter filter) {
if (filter instanceof ValueFilter) {
ItemDefinition definition = ((ValueFilter<?>)filter).getDefinition();
if (definition == null) {
hasAllDefinitions.setValue(false);
}
}
}
};
filter.accept(visitor);
return hasAllDefinitions.booleanValue();
}
public static void assertPropertyOnly(ObjectFilter filter, final String message) {
Visitor visitor = new Visitor() {
@Override
public void visit(ObjectFilter filter) {
if (filter instanceof OrgFilter) {
if (message == null) {
throw new IllegalArgumentException(filter.toString());
} else {
throw new IllegalArgumentException(message+": "+filter);
}
}
}
};
filter.accept(visitor);
}
public static void assertNotRaw(ObjectFilter filter, final String message) {
Visitor visitor = new Visitor() {
@Override
public void visit(ObjectFilter filter) {
if (filter instanceof ValueFilter && ((ValueFilter)filter).isRaw()) {
if (message == null) {
throw new IllegalArgumentException(filter.toString());
} else {
throw new IllegalArgumentException(message+": "+filter);
}
}
}
};
filter.accept(visitor);
}
public static String dump(QueryType query) throws SchemaException {
if (query == null) {
return "null";
}
StringBuilder sb = new StringBuilder("Query(");
sb.append(query.getDescription()).append("):\n");
if (query.getFilter() != null && query.getFilter().containsFilterClause())
sb.append(DOMUtil.serializeDOMToString(query.getFilter().getFilterClauseAsElement()));
else
sb.append("(no filter)");
return sb.toString();
}
/**
* Merges the two provided arguments into one AND filter in the most efficient way.
*/
public static ObjectFilter filterAnd(ObjectFilter origFilter, ObjectFilter additionalFilter) {
if (origFilter == additionalFilter) {
// AND with itself
return origFilter;
}
if (origFilter == null) {
return additionalFilter;
}
if (additionalFilter == null) {
return origFilter;
}
if (origFilter instanceof NoneFilter) {
return origFilter;
}
if (additionalFilter instanceof NoneFilter) {
return additionalFilter;
}
if (origFilter instanceof AllFilter) {
return additionalFilter;
}
if (additionalFilter instanceof AllFilter) {
return origFilter;
}
if (origFilter instanceof AndFilter) {
if (!((AndFilter)origFilter).contains(additionalFilter)) {
((AndFilter)origFilter).addCondition(additionalFilter);
}
return origFilter;
}
return AndFilter.createAnd(origFilter, additionalFilter);
}
/**
* Merges the two provided arguments into one OR filter in the most efficient way.
*/
public static ObjectFilter filterOr(ObjectFilter origFilter, ObjectFilter additionalFilter) {
if (origFilter == additionalFilter) {
// OR with itself
return origFilter;
}
if (origFilter == null) {
return additionalFilter;
}
if (additionalFilter == null) {
return origFilter;
}
if (origFilter instanceof AllFilter) {
return origFilter;
}
if (additionalFilter instanceof AllFilter) {
return additionalFilter;
}
if (origFilter instanceof NoneFilter) {
return additionalFilter;
}
if (additionalFilter instanceof NoneFilter) {
return origFilter;
}
if (origFilter instanceof OrFilter) {
if (!((OrFilter)origFilter).contains(additionalFilter)) {
((OrFilter)origFilter).addCondition(additionalFilter);
}
return origFilter;
}
return OrFilter.createOr(origFilter, additionalFilter);
}
public static boolean isAll(ObjectFilter filter) {
return filter == null || filter instanceof AllFilter;
}
public static boolean isNone(ObjectFilter filter) {
return filter != null && filter instanceof NoneFilter;
}
public static ObjectFilter simplify(ObjectFilter filter) {
if (filter == null) {
return null;
}
if (filter instanceof AndFilter) {
List<ObjectFilter> conditions = ((AndFilter)filter).getConditions();
AndFilter simplifiedFilter = ((AndFilter)filter).cloneEmpty();
for (ObjectFilter subfilter: conditions) {
if (subfilter instanceof NoneFilter) {
// AND with "false"
return NoneFilter.createNone();
} else if (subfilter instanceof AllFilter) {
// AND with "true", just skip it
} else {
ObjectFilter simplifiedSubfilter = simplify(subfilter);
simplifiedFilter.addCondition(simplifiedSubfilter);
}
}
if (simplifiedFilter.isEmpty()) {
return AllFilter.createAll();
}
return simplifiedFilter;
} else if (filter instanceof OrFilter) {
List<ObjectFilter> conditions = ((OrFilter)filter).getConditions();
OrFilter simplifiedFilter = ((OrFilter)filter).cloneEmpty();
for (ObjectFilter subfilter: conditions) {
if (subfilter instanceof NoneFilter) {
// OR with "false", just skip it
} else if (subfilter instanceof AllFilter) {
// OR with "true"
return AllFilter.createAll();
} else {
ObjectFilter simplifiedSubfilter = simplify(subfilter);
simplifiedFilter.addCondition(simplifiedSubfilter);
}
}
if (simplifiedFilter.isEmpty()) {
return AllFilter.createAll();
}
return simplifiedFilter;
} else if (filter instanceof NotFilter) {
ObjectFilter subfilter = ((NotFilter)filter).getFilter();
ObjectFilter simplifiedSubfilter = simplify(subfilter);
if (subfilter instanceof NoneFilter) {
return AllFilter.createAll();
} else if (subfilter instanceof AllFilter) {
return NoneFilter.createNone();
} else {
NotFilter simplifiedFilter = ((NotFilter)filter).cloneEmpty();
simplifiedFilter.setFilter(simplifiedSubfilter);
return simplifiedFilter;
}
} else {
// Cannot simplify
return filter.clone();
}
}
@SuppressWarnings("rawtypes")
public static <T> T getValueFromFilter(List<? extends ObjectFilter> conditions, QName propertyName)
throws SchemaException {
ItemPath propertyPath = new ItemPath(propertyName);
for (ObjectFilter f : conditions) {
if (f instanceof EqualFilter && propertyPath.equivalent(((EqualFilter) f).getFullPath())) {
List<? extends PrismValue> values = ((EqualFilter) f).getValues();
if (values.size() > 1) {
throw new SchemaException("More than one " + propertyName
+ " defined in the search query.");
}
if (values.size() < 1) {
throw new SchemaException("Search query does not have specified " + propertyName + ".");
}
return (T) ((PrismPropertyValue) values.get(0)).getValue();
}
if (NaryLogicalFilter.class.isAssignableFrom(f.getClass())) {
T value = getValueFromFilter(((NaryLogicalFilter) f).getConditions(), propertyName);
if (value != null) {
return value;
}
}
}
return null;
}
@SuppressWarnings("unchecked")
public static String getResourceOidFromFilter(List<? extends ObjectFilter> conditions)
throws SchemaException {
for (ObjectFilter f : conditions) {
if (f instanceof RefFilter
&& ShadowType.F_RESOURCE_REF.equals(((RefFilter) f).getDefinition().getName())) {
List<PrismReferenceValue> values = (List<PrismReferenceValue>) ((RefFilter) f).getValues();
if (values.size() > 1) {
throw new SchemaException(
"More than one resource references defined in the search query.");
}
if (values.size() < 1) {
throw new SchemaException("Search query does not have specified resource reference.");
}
return values.get(0).getOid();
}
if (NaryLogicalFilter.class.isAssignableFrom(f.getClass())) {
String resourceOid = getResourceOidFromFilter(((NaryLogicalFilter) f).getConditions());
if (resourceOid != null) {
return resourceOid;
}
}
}
return null;
}
public static ResourceShadowDiscriminator getCoordinates(ObjectFilter filter) throws SchemaException {
String resourceOid = null;
QName objectClass = null;
ShadowKindType kind = null;
String intent = null;
if (filter instanceof AndFilter) {
List<? extends ObjectFilter> conditions = ((AndFilter) filter).getConditions();
resourceOid = getResourceOidFromFilter(conditions);
objectClass = getValueFromFilter(conditions, ShadowType.F_OBJECT_CLASS);
kind = getValueFromFilter(conditions, ShadowType.F_KIND);
intent = getValueFromFilter(conditions, ShadowType.F_INTENT);
}
if (resourceOid == null) {
throw new SchemaException("Resource not defined in a search query");
}
if (objectClass == null && kind == null) {
throw new SchemaException("Neither objectclass not kind is specified in a search query");
}
ResourceShadowDiscriminator coordinates = new ResourceShadowDiscriminator(resourceOid, kind, intent, false);
coordinates.setObjectClass(objectClass);
return coordinates;
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.communications.relationships;
import java.awt.event.ActionEvent;
import java.util.logging.Level;
import javax.swing.AbstractAction;
import javax.swing.Action;
import org.apache.commons.lang3.StringUtils;
import org.openide.nodes.Sheet;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_SENT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL_FROM;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL_TO;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SUBJECT;
import org.sleuthkit.datamodel.TskCoreException;
import static org.sleuthkit.autopsy.communications.relationships.RelationshipsNodeUtilities.getAttributeDisplayString;
import org.sleuthkit.autopsy.datamodel.BlackboardArtifactNode;
import org.sleuthkit.datamodel.Account;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.CommunicationsManager;
import org.sleuthkit.datamodel.blackboardutils.attributes.BlackboardJsonAttrUtil;
import org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments;
/**
* Wraps a BlackboardArtifact as an AbstractNode for use in an OutlookView
*/
class MessageNode extends BlackboardArtifactNode {
public static final String UNTHREADED_ID = "<UNTHREADED>";
private static final Logger logger = Logger.getLogger(MessageNode.class.getName());
private final String threadID;
private final Action preferredAction;
private final Action defaultNoopAction = new DefaultMessageAction();
MessageNode(BlackboardArtifact artifact, String threadID, Action preferredAction) {
super(artifact);
this.preferredAction = preferredAction;
final String stripEnd = StringUtils.stripEnd(artifact.getDisplayName(), "s"); // NON-NLS
String removeEndIgnoreCase = StringUtils.removeEndIgnoreCase(stripEnd, "message"); // NON-NLS
setDisplayName(removeEndIgnoreCase.isEmpty() ? stripEnd : removeEndIgnoreCase);
this.threadID = threadID;
}
@Messages({
"MessageNode_Node_Property_Type=Type",
"MessageNode_Node_Property_From=From",
"MessageNode_Node_Property_To=To",
"MessageNode_Node_Property_Date=Date",
"MessageNode_Node_Property_Subject=Subject",
"MessageNode_Node_Property_Attms=Attachment Count"
})
@Override
protected Sheet createSheet() {
Sheet sheet = Sheet.createDefault();
Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
if (sheetSet == null) {
sheetSet = Sheet.createPropertiesSet();
sheet.put(sheetSet);
}
sheetSet.put(new NodeProperty<>("Type", Bundle.MessageNode_Node_Property_Type(), "", getDisplayName())); //NON-NLS
final BlackboardArtifact artifact = getArtifact();
BlackboardArtifact.ARTIFACT_TYPE fromID = BlackboardArtifact.ARTIFACT_TYPE.fromID(artifact.getArtifactTypeID());
if (fromID == null
|| (fromID != TSK_EMAIL_MSG
&& fromID != TSK_MESSAGE)) {
return sheet;
}
if (threadID != null) {
sheetSet.put(new NodeProperty<>("ThreadID", "ThreadID", "", threadID)); //NON-NLS
}
sheetSet.put(new NodeProperty<>("Subject", Bundle.MessageNode_Node_Property_Subject(), "",
getAttributeDisplayString(artifact, TSK_SUBJECT))); //NON-NLS
try {
sheetSet.put(new NodeProperty<>("Attms", Bundle.MessageNode_Node_Property_Attms(), "", getAttachmentsCount())); //NON-NLS
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error loading attachment count for " + artifact, ex); //NON-NLS
}
String msg_from = getAttributeDisplayString(artifact, TSK_EMAIL_FROM);
String msg_to = getAttributeDisplayString(artifact, TSK_EMAIL_TO);
String date = getAttributeDisplayString(artifact, TSK_DATETIME_SENT);
Account account_from = null;
Account account_to = null;
try {
CommunicationsManager manager = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager();
if (msg_from.isEmpty()) {
msg_from = getAttributeDisplayString(artifact, TSK_PHONE_NUMBER_FROM);
if(manager != null && !msg_from.isEmpty()) {
account_from = manager.getAccount(Account.Type.PHONE, msg_from);
}
} else if(manager != null) {
// To email address sometime is in the format <name>: <email>
String toStr = msg_to;
String[] strSplit = msg_to.split(":");
if(strSplit.length > 0) {
toStr = strSplit[strSplit.length-1].trim();
}
account_from = manager.getAccount(Account.Type.EMAIL, toStr);
}
if (msg_to.isEmpty()) {
msg_to = getAttributeDisplayString(artifact, TSK_PHONE_NUMBER_TO);
if(manager != null && !msg_to.isEmpty()) {
account_to = manager.getAccount(Account.Type.PHONE, msg_to);
}
} else if(manager != null) {
account_to = manager.getAccount(Account.Type.EMAIL, msg_to);
}
if (date.isEmpty()) {
date = getAttributeDisplayString(artifact, TSK_DATETIME);
}
} catch (TskCoreException ex) {
}
sheetSet.put(new AccountNodeProperty<>("From", Bundle.MessageNode_Node_Property_From(),
msg_from, account_from)); //NON-NLS
sheetSet.put(new AccountNodeProperty<>("To", Bundle.MessageNode_Node_Property_To(),
msg_to, account_to)); //NON-NLS
sheetSet.put(new NodeProperty<>("Date", Bundle.MessageNode_Node_Property_Date(), "",
date)); //NON-NLS
return sheet;
}
/**
* Circumvent DataResultFilterNode's slightly odd delegation to
* BlackboardArtifactNode.getSourceName().
*
* @return the displayName of this Node, which is the type.
*/
@Override
public String getSourceName() {
return getDisplayName();
}
String getThreadID() {
return threadID;
}
@Override
public Action getPreferredAction() {
return preferredAction != null ? preferredAction : defaultNoopAction;
}
private int getAttachmentsCount() throws TskCoreException {
final BlackboardArtifact artifact = getArtifact();
int attachmentsCount;
// Attachments are specified in an attribute TSK_ATTACHMENTS as JSON attribute
BlackboardAttribute attachmentsAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ATTACHMENTS));
if (attachmentsAttr != null) {
try {
MessageAttachments msgAttachments = BlackboardJsonAttrUtil.fromAttribute(attachmentsAttr, MessageAttachments.class);
return msgAttachments.getAttachmentsCount();
} catch (BlackboardJsonAttrUtil.InvalidJsonException ex) {
logger.log(Level.WARNING, String.format("Unable to parse json for MessageAttachments object in artifact: %s", artifact.getName()), ex);
return 0;
}
} else { // legacy attachments may be children of message artifact.
attachmentsCount = artifact.getChildrenCount();
}
return attachmentsCount;
}
/**
* A no op action to override the default action of BlackboardArtifactNode
*/
private class DefaultMessageAction extends AbstractAction {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
// Do Nothing.
}
}
}
| |
package app.monitor.job;
import core.framework.internal.web.api.APIDefinitionResponse;
import core.framework.internal.web.api.APIType;
import core.framework.log.Severity;
import core.framework.util.Strings;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author neo
*/
class APIValidator {
private static final Set<String> SIMPLE_TYPES = Set.of("String", "Boolean", "Integer", "Long", "Double", "BigDecimal", "LocalDate", "LocalDateTime", "ZonedDateTime", "LocalTime");
private static final Set<String> COLLECTION_TYPES = Set.of("Map", "List");
final Set<String> warnings = new LinkedHashSet<>();
final Set<String> errors = new LinkedHashSet<>();
final Map<String, Operation> previousOperations;
final Map<String, APIType> previousTypes;
final Map<String, Operation> currentOperations;
final Map<String, APIType> currentTypes;
final Set<String> visitedPreviousTypes = new HashSet<>();
final Set<String> visitedCurrentTypes = new HashSet<>();
final Map<String, Severity> removedReferenceTypes = new HashMap<>(); // types referred by removed methods and fields
APIValidator(APIDefinitionResponse previous, APIDefinitionResponse current) {
previousOperations = operations(previous);
previousTypes = previous.types.stream().collect(Collectors.toMap(type -> type.name, Function.identity()));
currentOperations = operations(current);
currentTypes = current.types.stream().collect(Collectors.toMap(type -> type.name, Function.identity()));
}
String validate() {
validateOperations();
validateTypes();
warnings.removeAll(errors); // remove warnings if there is same error, e.g. one change is referred by both request/response bean
return result();
}
private void validateOperations() {
for (Map.Entry<String, Operation> entry : previousOperations.entrySet()) {
Operation previous = entry.getValue();
Operation current = currentOperations.remove(entry.getKey());
if (current == null) {
boolean deprecated = Boolean.TRUE.equals(previous.operation.deprecated);
addError(deprecated, Strings.format("removed method {}", previous.signature()));
Severity severity = deprecated ? Severity.WARN : Severity.ERROR;
removeReferenceType(previous.operation.requestType, severity);
removeReferenceType(previous.operation.responseType, severity);
} else {
validateOperation(previous, current);
}
}
if (!currentOperations.isEmpty()) {
for (Operation operation : currentOperations.values()) {
warnings.add(Strings.format("added method {}", operation.signature()));
}
}
}
private void validateTypes() {
var leftPreviousTypes = new HashMap<>(previousTypes);
visitedPreviousTypes.forEach(leftPreviousTypes::remove);
var leftCurrentTypes = new HashMap<>(currentTypes);
visitedCurrentTypes.forEach(leftCurrentTypes::remove);
for (var previousType : leftPreviousTypes.values()) {
var currentType = leftCurrentTypes.remove(previousType.name);
if (currentType == null) {
boolean warning = removedReferenceTypes.get(previousType.name) == Severity.WARN;
addError(warning, Strings.format("removed type {}", previousType.name));
} else if (!Strings.equals(previousType.type, currentType.type)) { // changed bean to enum or vice versa
errors.add(Strings.format("changed type {} from {} to {}", previousType.name, previousType.type, currentType.type));
} else {
validateType(previousType.name, currentType.name, false);
}
}
for (APIType currentType : leftCurrentTypes.values()) {
warnings.add(Strings.format("added type {}", currentType.name));
}
}
private void validateOperation(Operation previous, Operation current) {
String previousMethod = previous.methodLiteral();
String currentMethod = current.methodLiteral();
if (!Strings.equals(previousMethod, currentMethod)) {
warnings.add(Strings.format("renamed method {} to {}", previousMethod, currentMethod));
}
APIDefinitionResponse.PathParam[] previousPathParams = previous.operation.pathParams.toArray(new APIDefinitionResponse.PathParam[0]);
APIDefinitionResponse.PathParam[] currentPathParams = current.operation.pathParams.toArray(new APIDefinitionResponse.PathParam[0]);
for (int i = 0; i < previousPathParams.length; i++) { // previous length must equal to current size, as the "method/path" is same
APIDefinitionResponse.PathParam previousPathParam = previousPathParams[i];
APIDefinitionResponse.PathParam currentPathParam = currentPathParams[i];
if (!Strings.equals(previousPathParam.type, currentPathParam.type)) {
errors.add(Strings.format("changed pathParam {} of {} from {} to {}", previousPathParam.name, previousMethod, previousPathParam.type, currentPathParam.type));
}
}
if ((previous.operation.requestType == null || current.operation.requestType == null)
&& !Strings.equals(previous.operation.requestType, current.operation.requestType)) {
errors.add(Strings.format("changed request type of {} from {} to {}", previousMethod, previous.operation.requestType, current.operation.requestType));
} else if (previous.operation.requestType != null && current.operation.requestType != null) {
if (!Strings.equals(previous.operation.requestType, current.operation.requestType)) {
warnings.add(Strings.format("renamed request type of {} from {} to {}", previousMethod, previous.operation.requestType, current.operation.requestType));
}
validateType(previous.operation.requestType, current.operation.requestType, true);
}
if (Boolean.compare(previous.optional(), current.optional()) != 0) {
errors.add(Strings.format("changed response type of {} from {} to {}", previousMethod, previous.responseTypeLiteral(), current.responseTypeLiteral()));
} else if ("void".equals(previous.operation.responseType) || "void".equals(current.operation.responseType)) {
if (!Strings.equals(previous.operation.responseType, current.operation.responseType)) {
errors.add(Strings.format("changed response type of {} from {} to {}", previousMethod, previous.responseTypeLiteral(), current.responseTypeLiteral()));
}
} else { // both are not void
validateType(previous.operation.responseType, current.operation.responseType, false);
}
if (Boolean.compare(previous.deprecated(), current.deprecated()) != 0) {
if (previous.deprecated())
warnings.add(Strings.format("removed @Deprecated from method {}", previousMethod));
else
warnings.add(Strings.format("added @Deprecated to method {}", previousMethod));
}
}
private void validateType(String previousType, String currentType, boolean isRequest) {
visitedPreviousTypes.add(previousType);
visitedCurrentTypes.add(currentType);
APIType previous = previousTypes.get(previousType);
APIType current = currentTypes.get(currentType);
if ("enum".equals(previous.type)) {
validateEnumType(previous, current, isRequest);
} else { // bean
validateBeanType(current, previous, isRequest);
}
}
private void validateBeanType(APIType current, APIType previous, boolean isRequest) {
var currentFields = current.fields.stream().collect(Collectors.toMap(field -> field.name, Function.identity()));
for (APIType.Field previousField : previous.fields) {
String[] previousTypes = candidateTypes(previousField);
var currentField = currentFields.remove(previousField.name);
if (currentField == null) {
boolean warning = isRequest || !Boolean.TRUE.equals(previousField.constraints.notNull);
addError(warning, Strings.format("removed field {}.{}", previous.name, previousField.name));
Severity severity = warning ? Severity.WARN : Severity.ERROR;
for (String previousType : previousTypes) {
removeReferenceType(previousType, severity);
}
continue;
}
String[] currentTypes = candidateTypes(currentField);
if (previousTypes.length != currentTypes.length) {
errors.add(Strings.format("changed field type of {}.{} from {} to {}", previous.name, previousField.name, fieldType(previousField), fieldType(currentField)));
} else {
for (int i = 0; i < previousTypes.length; i++) {
String previousCandidateType = previousTypes[i];
String currentCandidateType = currentTypes[i];
switch (compareType(previousCandidateType, currentCandidateType)) {
case NOT_MATCH -> errors.add(Strings.format("changed field type of {}.{} from {} to {}", previous.name, previousField.name, fieldType(previousField), fieldType(currentField)));
case FURTHER_COMPARE -> validateType(previousCandidateType, currentCandidateType, isRequest);
default -> {
}
}
}
}
if (!Boolean.TRUE.equals(previousField.constraints.notNull) && Boolean.TRUE.equals(currentField.constraints.notNull)) {
addError(!isRequest, Strings.format("added @NotNull to field {}.{}", previous.name, previousField.name));
} else if (Boolean.TRUE.equals(previousField.constraints.notNull) && !Boolean.TRUE.equals(currentField.constraints.notNull)) {
addError(isRequest, Strings.format("removed @NotNull from field {}.{}", previous.name, previousField.name));
}
}
for (var currentField : currentFields.values()) {
if (isRequest && Boolean.TRUE.equals(currentField.constraints.notNull)) {
errors.add(Strings.format("added field @NotNull {}.{}", current.name, currentField.name));
} else {
warnings.add(Strings.format("added field {}.{}", current.name, currentField.name));
}
}
}
private void validateEnumType(APIType previous, APIType current, boolean isRequest) {
var previousEnums = previous.enumConstants.stream().collect(Collectors.toMap(constant -> constant.name, constant -> constant.value));
var currentEnums = current.enumConstants.stream().collect(Collectors.toMap(constant -> constant.name, constant -> constant.value));
for (var entry : previousEnums.entrySet()) {
String previousKey = entry.getKey();
String previousValue = entry.getValue();
String currentValue = currentEnums.remove(previousKey);
if (currentValue == null) {
errors.add(Strings.format("removed enum value {}.{}", previous.name, previousKey));
} else if (!Strings.equals(previousValue, currentValue)) {
errors.add(Strings.format("changed enum value of {}.{} from {} to {}", previous.name, previousKey, previousValue, currentValue));
}
}
for (String currentName : currentEnums.keySet()) {
addError(isRequest, Strings.format("added enum value {}.{}", previous.name, currentName));
}
}
private CompareTypeResult compareType(String previousType, String currentType) {
if (SIMPLE_TYPES.contains(previousType) || SIMPLE_TYPES.contains(currentType)
|| COLLECTION_TYPES.contains(previousType) || COLLECTION_TYPES.contains(currentType)) {
if (!previousType.equals(currentType)) return CompareTypeResult.NOT_MATCH;
return CompareTypeResult.MATCH;
} else {
var previous = previousTypes.get(previousType);
var current = currentTypes.get(currentType);
if (previous == null || current == null || !Strings.equals(previous.type, current.type)) return CompareTypeResult.NOT_MATCH;
return CompareTypeResult.FURTHER_COMPARE;
}
}
private String[] candidateTypes(APIType.Field field) {
List<String> types = new ArrayList<>();
types.add(field.type);
if (field.typeParams != null) types.addAll(field.typeParams);
return types.toArray(String[]::new);
}
private void removeReferenceType(String typeName, Severity severity) {
APIType type = previousTypes.get(typeName);
if (type == null) return; // not bean type, e.g. simple type, collection type, void, null
Severity value = severity;
if (value == Severity.WARN) value = removedReferenceTypes.getOrDefault(typeName, Severity.WARN);
removedReferenceTypes.put(typeName, value);
if ("bean".equals(type.type)) {
for (var field : type.fields) {
final String[] candidateTypes = candidateTypes(field);
for (String candidateType : candidateTypes) {
removeReferenceType(candidateType, severity);
}
}
}
}
private Map<String, Operation> operations(APIDefinitionResponse response) {
Map<String, Operation> operations = new LinkedHashMap<>();
for (APIDefinitionResponse.Service service : response.services) {
for (APIDefinitionResponse.Operation operation : service.operations) {
operations.put(operation.method + "/" + operation.path, new Operation(service.name, operation));
}
}
return operations;
}
void addError(boolean warning, String error) {
if (warning) {
warnings.add(error);
} else {
errors.add(error);
}
}
String fieldType(APIType.Field field) {
if ("List".equals(field.type)) return "List<" + field.typeParams.get(0) + ">";
if ("Map".equals(field.type)) {
if ("List".equals(field.typeParams.get(1))) return "Map<" + field.typeParams.get(0) + ", List<" + field.typeParams.get(2) + ">";
return "Map<" + field.typeParams.get(0) + ", " + field.typeParams.get(1) + ">";
}
return field.type;
}
String result() {
if (!errors.isEmpty()) return "ERROR";
if (!warnings.isEmpty()) return "WARN";
return null;
}
public String errorMessage() {
StringBuilder builder = new StringBuilder();
if (!errors.isEmpty()) {
builder.append("*incompatible changes*\n");
errors.forEach(error -> builder.append("* ").append(error).append('\n'));
}
if (!warnings.isEmpty()) {
builder.append("*compatible changes*\n");
warnings.forEach(warning -> builder.append("* ").append(warning).append('\n'));
}
return builder.toString();
}
enum CompareTypeResult {
MATCH, // simple types and match
NOT_MATCH, // one is simple or oen is bean, another is enum, and not match, stop comparing
FURTHER_COMPARE // both are bean or enum, require further compare
}
static class Operation {
String service;
APIDefinitionResponse.Operation operation;
Operation(String service, APIDefinitionResponse.Operation operation) {
this.service = service;
this.operation = operation;
}
String signature() {
var builder = new StringBuilder(64);
if (deprecated()) builder.append("@Deprecated ");
builder.append('@').append(operation.method)
.append(" @Path(\"").append(operation.path).append("\") ")
.append(service).append('.').append(operation.name);
return builder.toString();
}
String methodLiteral() {
return Strings.format("{}.{}", service, operation.name);
}
String responseTypeLiteral() {
if (operation.optional) {
return Strings.format("Optional<{}>", operation.responseType);
}
return operation.responseType;
}
boolean optional() {
return Boolean.TRUE.equals(operation.optional);
}
boolean deprecated() {
return Boolean.TRUE.equals(operation.deprecated);
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/servicedirectory/v1/namespace.proto
package com.google.cloud.servicedirectory.v1;
/**
*
*
* <pre>
* A container for [services][google.cloud.servicedirectory.v1.Service].
* Namespaces allow administrators to group services together and define
* permissions for a collection of services.
* </pre>
*
* Protobuf type {@code google.cloud.servicedirectory.v1.Namespace}
*/
public final class Namespace extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.servicedirectory.v1.Namespace)
NamespaceOrBuilder {
private static final long serialVersionUID = 0L;
// Use Namespace.newBuilder() to construct.
private Namespace(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Namespace() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Namespace();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Namespace(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
labels_ =
com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
mutable_bitField0_ |= 0x00000001;
}
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
input.readMessage(
LabelsDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry);
labels_.getMutableMap().put(labels__.getKey(), labels__.getValue());
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.servicedirectory.v1.NamespaceProto
.internal_static_google_cloud_servicedirectory_v1_Namespace_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapField internalGetMapField(int number) {
switch (number) {
case 2:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.servicedirectory.v1.NamespaceProto
.internal_static_google_cloud_servicedirectory_v1_Namespace_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.servicedirectory.v1.Namespace.class,
com.google.cloud.servicedirectory.v1.Namespace.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABELS_FIELD_NUMBER = 2;
private static final class LabelsDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.servicedirectory.v1.NamespaceProto
.internal_static_google_cloud_servicedirectory_v1_Namespace_LabelsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrDefault(java.lang.String key, java.lang.String defaultValue) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetLabels(), LabelsDefaultEntryHolder.defaultEntry, 2);
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetLabels().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> labels__ =
LabelsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, labels__);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.servicedirectory.v1.Namespace)) {
return super.equals(obj);
}
com.google.cloud.servicedirectory.v1.Namespace other =
(com.google.cloud.servicedirectory.v1.Namespace) obj;
if (!getName().equals(other.getName())) return false;
if (!internalGetLabels().equals(other.internalGetLabels())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (!internalGetLabels().getMap().isEmpty()) {
hash = (37 * hash) + LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetLabels().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1.Namespace parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.servicedirectory.v1.Namespace prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A container for [services][google.cloud.servicedirectory.v1.Service].
* Namespaces allow administrators to group services together and define
* permissions for a collection of services.
* </pre>
*
* Protobuf type {@code google.cloud.servicedirectory.v1.Namespace}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.servicedirectory.v1.Namespace)
com.google.cloud.servicedirectory.v1.NamespaceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.servicedirectory.v1.NamespaceProto
.internal_static_google_cloud_servicedirectory_v1_Namespace_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMapField(int number) {
switch (number) {
case 2:
return internalGetLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
switch (number) {
case 2:
return internalGetMutableLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.servicedirectory.v1.NamespaceProto
.internal_static_google_cloud_servicedirectory_v1_Namespace_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.servicedirectory.v1.Namespace.class,
com.google.cloud.servicedirectory.v1.Namespace.Builder.class);
}
// Construct using com.google.cloud.servicedirectory.v1.Namespace.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
internalGetMutableLabels().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.servicedirectory.v1.NamespaceProto
.internal_static_google_cloud_servicedirectory_v1_Namespace_descriptor;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.Namespace getDefaultInstanceForType() {
return com.google.cloud.servicedirectory.v1.Namespace.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.Namespace build() {
com.google.cloud.servicedirectory.v1.Namespace result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.Namespace buildPartial() {
com.google.cloud.servicedirectory.v1.Namespace result =
new com.google.cloud.servicedirectory.v1.Namespace(this);
int from_bitField0_ = bitField0_;
result.name_ = name_;
result.labels_ = internalGetLabels();
result.labels_.makeImmutable();
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.servicedirectory.v1.Namespace) {
return mergeFrom((com.google.cloud.servicedirectory.v1.Namespace) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.servicedirectory.v1.Namespace other) {
if (other == com.google.cloud.servicedirectory.v1.Namespace.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
internalGetMutableLabels().mergeFrom(other.internalGetLabels());
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.servicedirectory.v1.Namespace parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.servicedirectory.v1.Namespace) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. The resource name for the namespace in the format
* `projects/*/locations/*/namespaces/*`.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> labels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetLabels() {
if (labels_ == null) {
return com.google.protobuf.MapField.emptyMapField(LabelsDefaultEntryHolder.defaultEntry);
}
return labels_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableLabels() {
onChanged();
;
if (labels_ == null) {
labels_ = com.google.protobuf.MapField.newMapField(LabelsDefaultEntryHolder.defaultEntry);
}
if (!labels_.isMutable()) {
labels_ = labels_.copy();
}
return labels_;
}
public int getLabelsCount() {
return internalGetLabels().getMap().size();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public boolean containsLabels(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return internalGetLabels().getMap().containsKey(key);
}
/** Use {@link #getLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getLabels() {
return getLabelsMap();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getLabelsMap() {
return internalGetLabels().getMap();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrDefault(
java.lang.String key, java.lang.String defaultValue) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public java.lang.String getLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearLabels() {
internalGetMutableLabels().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder removeLabels(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
internalGetMutableLabels().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableLabels() {
return internalGetMutableLabels().getMutableMap();
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder putLabels(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new java.lang.NullPointerException();
}
if (value == null) {
throw new java.lang.NullPointerException();
}
internalGetMutableLabels().getMutableMap().put(key, value);
return this;
}
/**
*
*
* <pre>
* Optional. Resource labels associated with this Namespace.
* No more than 64 user labels can be associated with a given resource. Label
* keys and values can be no longer than 63 characters.
* </pre>
*
* <code>map<string, string> labels = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder putAllLabels(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableLabels().getMutableMap().putAll(values);
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.servicedirectory.v1.Namespace)
}
// @@protoc_insertion_point(class_scope:google.cloud.servicedirectory.v1.Namespace)
private static final com.google.cloud.servicedirectory.v1.Namespace DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.servicedirectory.v1.Namespace();
}
public static com.google.cloud.servicedirectory.v1.Namespace getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Namespace> PARSER =
new com.google.protobuf.AbstractParser<Namespace>() {
@java.lang.Override
public Namespace parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Namespace(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Namespace> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Namespace> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.Namespace getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package view;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import processing.core.PApplet;
import processing.core.PConstants;
import processing.core.PVector;
import model.Event;
/**
* This class represent the circle where the events (particles) are displayed.
* The name is from particle *emitter* because it handle the position of the
* particles and the asssociated labels.
* @author Francis Bonneau
*/
public class Emitter {
private PApplet p; // Where to draw the particles
private Hud hud; // Abstraction of multiples emitters
private int id; // The Emitter ID
public String host; // The data source hostname
public float centerX; // The X center position of the emmitter
public float centerY; // The Y center position of the emmitter
public List<Particle> particlesList; // The list of displayed particles
public List<EmitterLabel> labelsList; // The list of displayed labels
public List<EmitterHalo> halosList; // The list of displayed halos
// The circle subdivisons - the circle is subdivided into section according
// to the events distributions, and this class take care of that
public EmitterSubdivider subdivisions;
// True if the user mouse is over a displayed particle
public boolean selectionActive = false;
// The name of category of the currently selected particle
public String selectionID;
// Various counters to keep stats about the Emitter
public long eventsDisplayedCount;
public long syscallDisplayedCount;
public long eventsTotalCount;
public long syscallTotalCount;
// Constructor
public Emitter(PApplet p, Hud hud, int id, int x, int y) {
this.p = p;
this.hud = hud;
this.id = id;
this.centerX = x;
this.centerY = y;
this.host = ""; // empty hostname on initialisation
// create a new subdivider for this object
subdivisions = new EmitterSubdivider(this);
// initialise the lists
particlesList = new ArrayList<Particle>();
labelsList = new ArrayList<EmitterLabel>();
halosList = new ArrayList<EmitterHalo>();
// Populate the halos list according to the settings
int halosNb = hud.params.emitterHalosIntervalsSec.length;
for (int i = 0; i < halosNb; i++) {
halosList.add(new EmitterHalo(this.p, this));
}
// initialise the stats counters
eventsDisplayedCount = 0;
syscallDisplayedCount = 0;
eventsTotalCount = 0;
syscallTotalCount = 0;
}
// Setup the emitter data source hostname
public void setHost(String host) {
this.host = host;
}
// Return the emitter ID
public int getID() {
return this.id;
}
// Return the hud associated to the emitter
public Hud getHud() {
return this.hud;
}
/**
* Add particles to the emitter, based on the new data received. The new
* particles will be added to the list, and updated and drawn every time
* at every update() and draw() of the emitter
*/
public void addParticles(ArrayList<Event> newData) {
String divisionAttribute = hud.params.divisionAttribute;
subdivisions.addDivisions(newData, divisionAttribute);
subdivisions.addHalos(newData, divisionAttribute);
subdivisions.adjustDivisionsSizes();
subdivisions.adjustHalosSizes();
// Add the labels to each emitter subdivision
labelsList = new ArrayList<>();
Iterator<String> it = subdivisions.currentDivisions.keySet().iterator();
while (it.hasNext()) {
String divisionID = it.next();
float minAngle = subdivisions.getDivisionStartAngle(divisionID);
float maxAngle = subdivisions.getDivisonEndAngle(divisionID);
float emitterRadius = hud.params.emitterRadius;
float labelColor = hud.colorPalette.get(divisionID);
float labelsDistance = hud.params.emitterLabelsDistance;
EmitterLabel label = new EmitterLabel(p, divisionID, 15, labelColor);
label.calculateLabelPosition(minAngle, maxAngle, emitterRadius,
centerX, centerY, labelsDistance);
labelsList.add(label);
}
// for each process in the list
Iterator<Event> events = newData.iterator();
while (events.hasNext()) {
Event event = events.next();
String divisionID = event.attributes.get(divisionAttribute);
Iterator<Integer> syscallName = event.latencyBreakdown.keySet().iterator();
float Min = subdivisions.getDivisionStartAngle(divisionID);
float Max = subdivisions.getDivisonEndAngle(divisionID);
float angle = Min + (float) Math.random() * ((Max - Min) + 1);
while(syscallName.hasNext()) {
// update the hud latencies displayed
int latency = syscallName.next();
if (hud.smallestEvtLatency > latency)
hud.smallestEvtLatency = latency;
if (hud.biggestEvtLatency < latency)
hud.biggestEvtLatency = latency;
Particle newParticle = new Particle(p, event);
// determine the new paticle parameters
float hue = this.hud.colorPalette.get(divisionID);
float brightness = 100;
float size = (float) Math.sqrt(hud.params.particleSize * event.syscallNumber);
float velocity = PApplet.map(latency, hud.smallestEvtLatency,
hud.biggestEvtLatency, hud.params.particleMinVelocity,
hud.params.particleMaxVelocity);
float acceleration = hud.params.particleAcceleration;
newParticle.setup(new PVector(centerX, centerY), size, angle,
velocity, acceleration, hue, brightness);
particlesList.add(newParticle);
eventsDisplayedCount += 1;
syscallDisplayedCount += event.syscallNumber;
eventsTotalCount += 1;
syscallTotalCount += event.syscallNumber;
}
}
}
// Update all the particles position and speed
public void updateParticles() {
Iterator<Particle> it = particlesList.iterator();
while(it.hasNext()) {
Particle particle = it.next();
particle.update();
// check if the particle is outside of the emitter radius
double distance = Math.sqrt(Math.pow(centerX - particle.location.x, 2)
+ Math.pow(centerY - particle.location.y, 2));
if ( distance > hud.params.emitterRadius/2 ) {
// if this is the case the particle is removed
it.remove();
// and the stats are updated
eventsDisplayedCount -= eventsDisplayedCount - 1;
syscallDisplayedCount -= particle.event.syscallNumber;
// if the particle is approching the radius, fade it out
} else if (hud.params.emitterRadius/2 - distance < 60) {
if (particle.brightness -8 >= hud.params.backgroundBrightness)
particle.brightness = particle.brightness - 8;
}
}
}
// Draw all the particles, and update the current selection if a particle
// is selected by the user
public void drawParticles(float backgroundBrightness) {
boolean selectionDetected = false;
for (Particle particle : particlesList) {
boolean particleSelected = particle.draw(backgroundBrightness,
hud.params.drawCirclesStrokes, selectionActive, selectionID);
if (particleSelected) {
selectionActive = true;
selectionID = particle.getDivisionID(hud.params.divisionAttribute);
selectionDetected = true;
}
}
if (!selectionDetected) {
selectionActive = false;
}
}
// Draw the labels - around the circle
public void drawLabels() {
boolean selectionDetected = false;
for (EmitterLabel label : labelsList) {
boolean selectedLabel = false;
if (selectionActive && selectionID.equals(label.divisionID)) {
selectedLabel = true;
}
boolean labelSelected = label.draw(selectedLabel);
if (labelSelected) {
selectionActive = true;
selectionID = label.divisionID;
selectionDetected = true;
}
}
if (!selectionDetected) {
selectionActive = false;
}
}
// Draw others circles (halos) around the main circle to represent different
// (longer) timeframes of events distribution
public void drawHalos() {
LinkedList<LinkedHashMap<String, EmitterSubdivision>> halosDivs =
subdivisions.halosDivisions;
Iterator<LinkedHashMap<String, EmitterSubdivision>> it =
halosDivs.iterator();
float brightness = 100;
float distance = 40;
for (EmitterHalo halo : halosList) {
brightness -= 15;
halo.draw(it.next(), distance, brightness);
distance += 45;
}
}
// Draw the Emitter radius circle
public void drawRadius(float backgroundBrightness, float emitterRadiusColor,
float radius) {
p.colorMode(PConstants.HSB,360,100,100);
p.stroke(0,0,emitterRadiusColor);
p.noFill();
p.ellipse(centerX, centerY, radius, radius);
}
// Update all the components of the emitter
public void update() {
if (!hud.params.displayPaused) {
updateParticles();
}
}
// Update the position of all the labels, used to realign the labels to
// the middle of each section of the emitter (or section of the pie chart)
// also used to readjust the labels distance to the center
// TODO : Should maybe be merged with the update() method, the only reason
// its not is to avoid the labels change position too frequently
public void updateLabelsPositions() {
for (EmitterLabel label : labelsList) {
float minAngle = subdivisions.getDivisionStartAngle(label.divisionID);
float maxAngle = subdivisions.getDivisonEndAngle(label.divisionID);
float emitterRadius = hud.params.emitterRadius;
float labelsDistance = hud.params.emitterLabelsDistance;
label.calculateLabelPosition(minAngle, maxAngle,
emitterRadius, centerX, centerY, labelsDistance);
}
}
// Draw all the components of the emitter
public void draw() {
drawParticles(hud.params.backgroundBrightness);
if (hud.params.displayEmitterLabels)
drawLabels();
if (hud.params.displayEmitterRadius)
drawRadius(hud.params.backgroundBrightness,
hud.params.emitterRadiusBrightness, hud.params.emitterRadius);
if (hud.params.displayEmitterHalos)
drawHalos();
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import com.facebook.buck.io.MorePaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildContext;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeBuildableContext;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.keys.DefaultRuleKeyFactory;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
public class DirectHeaderMapTest {
@Rule
public final TemporaryPaths tmpDir = new TemporaryPaths();
private ProjectFilesystem projectFilesystem;
private BuildTarget buildTarget;
private DirectHeaderMap buildRule;
private ImmutableMap<Path, SourcePath> links;
private Path symlinkTreeRoot;
private Path headerMapPath;
private Path file1;
private Path file2;
@Before
public void setUp() throws Exception {
projectFilesystem = new FakeProjectFilesystem(tmpDir.getRoot());
// Create a build target to use when building the symlink tree.
buildTarget = BuildTargetFactory.newInstance("//test:test");
// Get the first file we're symlinking
Path link1 = Paths.get("file");
file1 = tmpDir.newFile();
Files.write(file1, "hello world".getBytes(Charsets.UTF_8));
// Get the second file we're symlinking
Path link2 = Paths.get("directory", "then", "file");
file2 = tmpDir.newFile();
Files.write(file2, "hello world".getBytes(Charsets.UTF_8));
// Setup the map representing the link tree.
links = ImmutableMap.of(
link1,
new PathSourcePath(
projectFilesystem,
MorePaths.relativize(tmpDir.getRoot(), file1)),
link2,
new PathSourcePath(
projectFilesystem,
MorePaths.relativize(tmpDir.getRoot(), file2)));
// The output path used by the buildable for the link tree.
symlinkTreeRoot = projectFilesystem.resolve(
BuildTargets.getGenPath(projectFilesystem, buildTarget, "%s/symlink-tree-root"));
// Setup the symlink tree buildable.
buildRule = new DirectHeaderMap(
new FakeBuildRuleParamsBuilder(buildTarget).build(),
new SourcePathResolver(new SourcePathRuleFinder(
new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer())
)),
symlinkTreeRoot,
links);
headerMapPath = buildRule.getPathToOutput();
}
@Test
public void testBuildSteps() throws IOException {
BuildContext buildContext = FakeBuildContext.NOOP_CONTEXT;
ProjectFilesystem filesystem = new FakeProjectFilesystem();
FakeBuildableContext buildableContext = new FakeBuildableContext();
ImmutableList<Step> expectedBuildSteps =
ImmutableList.of(
new MakeCleanDirectoryStep(filesystem, symlinkTreeRoot),
new HeaderMapStep(
filesystem,
headerMapPath,
ImmutableMap.of(
Paths.get("file"),
filesystem.resolve(filesystem.getBuckPaths().getBuckOut())
.relativize(file1),
Paths.get("directory/then/file"),
filesystem.resolve(filesystem.getBuckPaths().getBuckOut())
.relativize(file2))));
ImmutableList<Step> actualBuildSteps =
buildRule.getBuildSteps(
buildContext,
buildableContext);
assertEquals(expectedBuildSteps, actualBuildSteps.subList(1, actualBuildSteps.size()));
}
@Test
public void testSymlinkTreeRuleKeyChangesIfLinkMapChanges() throws Exception {
Path aFile = tmpDir.newFile();
Files.write(aFile, "hello world".getBytes(Charsets.UTF_8));
AbstractBuildRule modifiedBuildRule = new DirectHeaderMap(
new FakeBuildRuleParamsBuilder(buildTarget).build(),
new SourcePathResolver(new SourcePathRuleFinder(
new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer())
)),
symlinkTreeRoot,
ImmutableMap.of(
Paths.get("different/link"),
new PathSourcePath(
projectFilesystem,
MorePaths.relativize(tmpDir.getRoot(), aFile))));
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
SourcePathResolver resolver = new SourcePathResolver(ruleFinder);
// Calculate their rule keys and verify they're different.
FakeFileHashCache hashCache = FakeFileHashCache.createFromStrings(
ImmutableMap.of());
RuleKey key1 = new DefaultRuleKeyFactory(0, hashCache, resolver, ruleFinder).build(
buildRule);
RuleKey key2 = new DefaultRuleKeyFactory(0, hashCache, resolver, ruleFinder).build(
modifiedBuildRule);
assertNotEquals(key1, key2);
}
@Test
public void testRuleKeyDoesNotChangeIfLinkTargetsChange() throws IOException {
BuildRuleResolver ruleResolver = new BuildRuleResolver(
TargetGraph.EMPTY,
new DefaultTargetNodeToBuildRuleTransformer());
ruleResolver.addToIndex(buildRule);
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(ruleResolver);
SourcePathResolver resolver = new SourcePathResolver(ruleFinder);
DefaultRuleKeyFactory defaultRuleKeyFactory = new DefaultRuleKeyFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.of()),
resolver,
ruleFinder);
// Calculate the rule key
RuleKey key1 = defaultRuleKeyFactory.build(buildRule);
// Change the contents of the target of the link.
Path existingFile = resolver.getAbsolutePath(links.values().asList().get(0));
Files.write(existingFile, "something new".getBytes(Charsets.UTF_8));
// Re-calculate the rule key
RuleKey key2 = defaultRuleKeyFactory.build(buildRule);
// Verify that the rules keys are the same.
assertEquals(key1, key2);
}
}
| |
package com.codepath.oauth;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import androidx.annotation.Nullable;
import com.github.scribejava.core.builder.api.BaseApi;
import com.github.scribejava.core.model.OAuth1AccessToken;
import com.github.scribejava.core.model.OAuth1RequestToken;
import com.github.scribejava.core.model.OAuth2AccessToken;
import com.github.scribejava.core.model.OAuthConstants;
import com.github.scribejava.core.model.Token;
import java.util.HashMap;
public abstract class OAuthBaseClient {
protected String baseUrl;
protected Context context;
protected OAuthTokenClient tokenClient;
protected OAuthAsyncHttpClient client;
protected SharedPreferences prefs;
protected SharedPreferences.Editor editor;
protected OAuthAccessHandler accessHandler;
protected String callbackUrl;
protected int requestIntentFlags = -1;
private static final String OAUTH1_REQUEST_TOKEN = "request_token";
private static final String OAUTH1_REQUEST_TOKEN_SECRET = "request_token_secret";
private static final String OAUTH1_VERSION = "1.0";
private static final String OAUTH2_VERSION = "2.0";
protected static HashMap<Class<? extends OAuthBaseClient>, OAuthBaseClient> instances =
new HashMap<Class<? extends OAuthBaseClient>, OAuthBaseClient>();
public static OAuthBaseClient getInstance(Class<? extends OAuthBaseClient> klass, Context context) {
OAuthBaseClient instance = instances.get(klass);
if (instance == null) {
try {
instance = (OAuthBaseClient) klass.getConstructor(Context.class).newInstance(context);
instances.put(klass, instance);
} catch (Exception e) {
e.printStackTrace();
}
}
return instance;
}
public OAuthBaseClient(Context c, final BaseApi apiInstance, String consumerUrl, final String consumerKey, final String consumerSecret, @Nullable String scope, String callbackUrl) {
this.baseUrl = consumerUrl;
this.callbackUrl = callbackUrl;
tokenClient = new OAuthTokenClient(apiInstance, consumerKey,
consumerSecret, callbackUrl, scope, new OAuthTokenClient.OAuthTokenHandler() {
// Store request token and launch the authorization URL in the browser
@Override
public void onReceivedRequestToken(Token requestToken, String authorizeUrl, String oAuthVersion) {
if (requestToken != null) {
if (oAuthVersion == OAUTH1_VERSION) { // store for OAuth1.0a
OAuth1RequestToken oAuth1RequestToken = (OAuth1RequestToken) requestToken;
editor.putString(OAUTH1_REQUEST_TOKEN, oAuth1RequestToken.getToken());
editor.putString(OAUTH1_REQUEST_TOKEN_SECRET, oAuth1RequestToken.getTokenSecret());
editor.putInt(OAuthConstants.VERSION, 1);
editor.commit();
}
}
// Launch the authorization URL in the browser
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(authorizeUrl));
if (requestIntentFlags != -1) {
intent.setFlags(requestIntentFlags);
}
OAuthBaseClient.this.context.startActivity(intent);
}
// Store the access token in preferences, set the token in the tokenClient and fire the success callback
@Override
public void onReceivedAccessToken(Token accessToken, String oAuthVersion) {
if (oAuthVersion == OAUTH1_VERSION) {
OAuth1AccessToken oAuth1AccessToken = (OAuth1AccessToken) accessToken;
tokenClient.setAccessToken(accessToken);
instantiateClient(consumerKey, consumerSecret, oAuth1AccessToken);
editor.putString(OAuthConstants.TOKEN, oAuth1AccessToken.getToken());
editor.putString(OAuthConstants.TOKEN_SECRET, oAuth1AccessToken.getTokenSecret());
editor.putInt(OAuthConstants.VERSION, 1);
editor.commit();
} else if (oAuthVersion == OAUTH2_VERSION) {
OAuth2AccessToken oAuth2AccessToken = (OAuth2AccessToken) accessToken;
instantiateClient(consumerKey, consumerSecret, oAuth2AccessToken);
tokenClient.setAccessToken(accessToken);
editor.putString(OAuthConstants.TOKEN, oAuth2AccessToken.getAccessToken());
editor.putString(OAuthConstants.SCOPE, oAuth2AccessToken.getScope());
editor.putString(OAuthConstants.REFRESH_TOKEN, oAuth2AccessToken.getRefreshToken());
editor.putInt(OAuthConstants.VERSION, 2);
editor.commit();
}
accessHandler.onLoginSuccess();
}
@Override
public void onFailure(Exception e) {
accessHandler.onLoginFailure(e);
}
});
this.context = c;
// Store preferences namespaced by the class and consumer key used
this.prefs = this.context.getSharedPreferences("OAuth_" + apiInstance.getClass().getSimpleName() + "_" + consumerKey, 0);
this.editor = this.prefs.edit();
// Set access token in the tokenClient if already stored in preferences
Token accessToken = this.checkAccessToken();
if (accessToken != null) {
tokenClient.setAccessToken(accessToken);
instantiateClient(consumerKey, consumerSecret, accessToken);
}
}
public void instantiateClient(String consumerKey, String consumerSecret, Token token) {
if (token instanceof OAuth1AccessToken) {
client = OAuthAsyncHttpClient.create(consumerKey, consumerSecret, (OAuth1AccessToken)(token));
} else if (token instanceof OAuth2AccessToken){
client = OAuthAsyncHttpClient.create((OAuth2AccessToken) token);
} else {
throw new IllegalStateException("unrecognized token type" + token);
}
}
// Fetches a request token and retrieve and authorization url
// Should open a browser in onReceivedRequestToken once the url has been received
public void connect() {
tokenClient.fetchRequestToken();
}
// Retrieves access token given authorization url
public void authorize(Uri uri, OAuthAccessHandler handler) {
this.accessHandler = handler;
if (checkAccessToken() == null && uri != null) {
// TODO: check UriServiceCallback with intent:// scheme
tokenClient.fetchAccessToken(getOAuth1RequestToken(), uri);
} else if (checkAccessToken() != null) { // already have access token
this.accessHandler.onLoginSuccess();
}
}
// Return access token if the token exists in preferences
public Token checkAccessToken() {
int oAuthVersion = prefs.getInt(OAuthConstants.VERSION, 0);
if (oAuthVersion == 1 && prefs.contains(OAuthConstants.TOKEN) && prefs.contains(OAuthConstants.TOKEN_SECRET)) {
return new OAuth1AccessToken(prefs.getString(OAuthConstants.TOKEN, ""),
prefs.getString(OAuthConstants.TOKEN_SECRET, ""));
} else if (oAuthVersion == 2 && prefs.contains(OAuthConstants.TOKEN)) {
return new OAuth2AccessToken(prefs.getString(OAuthConstants.TOKEN, ""));
}
return null;
}
protected OAuthTokenClient getTokenClient() {
return tokenClient;
}
// Returns the request token stored during the request token phase (OAuth1 only)
protected @Nullable Token getOAuth1RequestToken() {
int oAuthVersion = prefs.getInt(OAuthConstants.VERSION, 0);
if (oAuthVersion == 1) {
return new OAuth1RequestToken(prefs.getString(OAUTH1_REQUEST_TOKEN, ""),
prefs.getString(OAUTH1_REQUEST_TOKEN_SECRET, ""));
}
return null;
}
// Assigns the base url for the API
protected void setBaseUrl(String url) {
this.baseUrl = url;
}
// Returns the full ApiUrl
protected String getApiUrl(String path) {
return this.baseUrl + "/" + path;
}
// Removes the access tokens (for signing out)
public void clearAccessToken() {
tokenClient.setAccessToken(null);
editor.remove(OAuthConstants.TOKEN);
editor.remove(OAuthConstants.TOKEN_SECRET);
editor.remove(OAuthConstants.REFRESH_TOKEN);
editor.remove(OAuthConstants.SCOPE);
editor.commit();
}
// Returns true if the tokenClient is authenticated; false otherwise.
public boolean isAuthenticated() {
return tokenClient.getAccessToken() != null;
}
// Sets the flags used when launching browser to authenticate through OAuth
public void setRequestIntentFlags(int flags) {
this.requestIntentFlags = flags;
}
// Defines the handler events for the OAuth flow
public static interface OAuthAccessHandler {
public void onLoginSuccess();
public void onLoginFailure(Exception e);
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.ims.qti.export;
import java.util.Map;
import java.util.Set;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.MultipleSelectionElement;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
/**
* Initial Date: June 07, 2006 <br>
*
* @author Alexander Schneider
*/
public class OptionsChooseForm extends FormBasicController {
private static final String SCQ_ITEMCOLS = "scqitemcols";
private static final String SCQ_POSCOL = "scqposcol";
private static final String SCQ_POINTCOL = "scqpointcol";
private static final String SCQ_TIMECOLS = "scqtimecols";
private static final String MCQ_ITEMCOLS = "mcqitemcols";
private static final String MCQ_POSCOL = "mcqposcol";
private static final String MCQ_POINTCOL = "mcqpointcol";
private static final String MCQ_TIMECOLS = "mcqtimecols";
private static final String KPRIM_ITEMCOLS = "kprimitemcols";
private static final String KPRIM_POINTCOL = "kprimpointcol";
private static final String KPRIM_TIMECOLS = "kprimtimecols";
private static final String FIB_ITEMCOLS = "fibitemcols";
private static final String FIB_POINTCOL = "fibpointcol";
private static final String FIB_TIMECOLS = "fibtimecols";
private static final String ESSAY_ITEMCOLS = "essayitemcols";
private static final String ESSAY_TIMECOLS = "essaytimecols";
private boolean hasSCQ = false;
private boolean hasMCQ = false;
private boolean hasKRIM = false;
private boolean hasFIB = false;
private boolean hasEssay = false;
private final Map mapWithConfigs;
private MultipleSelectionElement scq, mcq, kprim, fib, essay;
private final String[] scqKeys, mcqKeys, kprimKeys, fibKeys, essayKeys;
private final String[] scqVals, mcqVals, kprimVals, fibVals, essayVals;
public OptionsChooseForm(final UserRequest ureq, final WindowControl wControl, final Map mapWithConfigs) {
super(ureq, wControl);
this.mapWithConfigs = mapWithConfigs;
hasSCQ = mapWithConfigs.get(QTIExportSCQItemFormatConfig.class) != null;
hasMCQ = mapWithConfigs.get(QTIExportMCQItemFormatConfig.class) != null;
hasKRIM = mapWithConfigs.get(QTIExportKPRIMItemFormatConfig.class) != null;
hasFIB = mapWithConfigs.get(QTIExportFIBItemFormatConfig.class) != null;
hasEssay = mapWithConfigs.get(QTIExportEssayItemFormatConfig.class) != null;
scqKeys = new String[] { SCQ_ITEMCOLS, SCQ_POSCOL, SCQ_POINTCOL, SCQ_TIMECOLS };
scqVals = new String[] { translate("form.itemcols"), translate("form.poscol"), translate("form.pointcol"), translate("form.timecols") };
mcqKeys = new String[] { MCQ_ITEMCOLS, MCQ_POSCOL, MCQ_POINTCOL, MCQ_TIMECOLS };
mcqVals = new String[] { translate("form.itemcols"), translate("form.poscol"), translate("form.pointcol"), translate("form.timecols") };
kprimKeys = new String[] { KPRIM_ITEMCOLS, KPRIM_POINTCOL, KPRIM_TIMECOLS };
kprimVals = new String[] { translate("form.itemcols"), translate("form.pointcol"), translate("form.timecols") };
fibKeys = new String[] { FIB_ITEMCOLS, FIB_POINTCOL, FIB_TIMECOLS };
fibVals = new String[] { translate("form.itemcols"), translate("form.pointcol"), translate("form.timecols") };
essayKeys = new String[] { ESSAY_ITEMCOLS, ESSAY_TIMECOLS };
essayVals = new String[] { translate("form.itemcols"), translate("form.timecols") };
initForm(ureq);
}
@Override
protected boolean validateFormLogic(final UserRequest ureq) {
// Single Choice
if (hasSCQ) {
final QTIExportSCQItemFormatConfig c = (QTIExportSCQItemFormatConfig) mapWithConfigs.get(QTIExportSCQItemFormatConfig.class);
final Set s = scq.getSelectedKeys();
c.setResponseCols(s.contains(SCQ_ITEMCOLS));
c.setPositionsOfResponsesCol(s.contains(SCQ_POSCOL));
c.setPointCol(s.contains(SCQ_POINTCOL));
c.setTimeCols(s.contains(SCQ_TIMECOLS));
}
// Multiple Choice
if (hasMCQ) {
final QTIExportMCQItemFormatConfig c = (QTIExportMCQItemFormatConfig) mapWithConfigs.get(QTIExportMCQItemFormatConfig.class);
final Set s = mcq.getSelectedKeys();
c.setResponseCols(s.contains(MCQ_ITEMCOLS));
c.setPositionsOfResponsesCol(s.contains(MCQ_POSCOL));
c.setPointCol(s.contains(MCQ_POINTCOL));
c.setTimeCols(s.contains(MCQ_TIMECOLS));
}
// KPRIM
if (hasKRIM) {
final QTIExportKPRIMItemFormatConfig c = (QTIExportKPRIMItemFormatConfig) mapWithConfigs.get(QTIExportKPRIMItemFormatConfig.class);
final Set s = kprim.getSelectedKeys();
c.setResponseCols(s.contains(KPRIM_ITEMCOLS));
c.setPointCol(s.contains(KPRIM_POINTCOL));
c.setTimeCols(s.contains(KPRIM_TIMECOLS));
}
// Fill in the Blank
if (hasFIB) {
final QTIExportFIBItemFormatConfig c = (QTIExportFIBItemFormatConfig) mapWithConfigs.get(QTIExportFIBItemFormatConfig.class);
final Set s = fib.getSelectedKeys();
c.setResponseCols(s.contains(FIB_ITEMCOLS));
c.setPointCol(s.contains(FIB_POINTCOL));
c.setTimeCols(s.contains(FIB_TIMECOLS));
}
// Essay
if (hasEssay) {
final QTIExportEssayItemFormatConfig c = (QTIExportEssayItemFormatConfig) mapWithConfigs.get(QTIExportEssayItemFormatConfig.class);
final Set s = essay.getSelectedKeys();
c.setResponseCols(s.contains(ESSAY_ITEMCOLS));
c.setTimeCols(s.contains(ESSAY_TIMECOLS));
}
return true;
}
@Override
protected void formOK(final UserRequest ureq) {
fireEvent(ureq, Event.DONE_EVENT);
}
@Override
protected void initForm(final FormItemContainer formLayout, final Controller listener, final UserRequest ureq) {
scq = uifactory.addCheckboxesVertical("scq", "form.scqtitle", formLayout, scqKeys, scqVals, null, 1);
if (hasSCQ) {
final QTIExportItemFormatConfig c = (QTIExportItemFormatConfig) mapWithConfigs.get(QTIExportSCQItemFormatConfig.class);
scq.select(SCQ_ITEMCOLS, c.hasResponseCols());
scq.select(SCQ_POSCOL, c.hasPositionsOfResponsesCol());
scq.select(SCQ_POINTCOL, c.hasPointCol());
scq.select(SCQ_TIMECOLS, c.hasTimeCols());
} else {
scq.setVisible(false);
}
mcq = uifactory.addCheckboxesVertical("mcq", "form.mcqtitle", formLayout, mcqKeys, mcqVals, null, 1);
if (hasMCQ) {
final QTIExportItemFormatConfig c = (QTIExportItemFormatConfig) mapWithConfigs.get(QTIExportMCQItemFormatConfig.class);
mcq.select(MCQ_ITEMCOLS, c.hasResponseCols());
mcq.select(MCQ_POSCOL, c.hasPositionsOfResponsesCol());
mcq.select(MCQ_POINTCOL, c.hasPointCol());
mcq.select(MCQ_TIMECOLS, c.hasTimeCols());
} else {
mcq.setVisible(false);
}
kprim = uifactory.addCheckboxesVertical("kprim", "form.kprimtitle", formLayout, kprimKeys, kprimVals, null, 1);
if (hasKRIM) {
final QTIExportItemFormatConfig c = (QTIExportItemFormatConfig) mapWithConfigs.get(QTIExportKPRIMItemFormatConfig.class);
kprim.select(KPRIM_ITEMCOLS, c.hasResponseCols());
kprim.select(KPRIM_POINTCOL, c.hasPointCol());
kprim.select(KPRIM_TIMECOLS, c.hasTimeCols());
} else {
kprim.setVisible(false);
}
fib = uifactory.addCheckboxesVertical("fib", "form.fibtitle", formLayout, fibKeys, fibVals, null, 1);
if (hasFIB) {
final QTIExportItemFormatConfig c = (QTIExportItemFormatConfig) mapWithConfigs.get(QTIExportFIBItemFormatConfig.class);
fib.select(FIB_ITEMCOLS, c.hasResponseCols());
fib.select(FIB_POINTCOL, c.hasPointCol());
fib.select(FIB_TIMECOLS, c.hasTimeCols());
} else {
fib.setVisible(false);
}
essay = uifactory.addCheckboxesVertical("essay", "form.essaytitle", formLayout, essayKeys, essayVals, null, 1);
if (hasEssay) {
final QTIExportItemFormatConfig c = (QTIExportItemFormatConfig) mapWithConfigs.get(QTIExportEssayItemFormatConfig.class);
essay.select(FIB_ITEMCOLS, c.hasResponseCols());
essay.select(FIB_TIMECOLS, c.hasTimeCols());
} else {
essay.setVisible(false);
}
uifactory.addFormSubmitButton("next", formLayout);
}
@Override
protected void doDispose() {
//
}
}
| |
package com.pvsagar.smartlockscreen.baseclasses;
import android.app.NotificationManager;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.util.Log;
import com.pvsagar.smartlockscreen.R;
import com.pvsagar.smartlockscreen.applogic_objects.passphrases.PassphraseFactory;
import com.pvsagar.smartlockscreen.applogic_objects.passphrases.Pattern;
import com.pvsagar.smartlockscreen.backend_helpers.EncryptorDecryptor;
import com.pvsagar.smartlockscreen.backend_helpers.RootHelper;
import com.pvsagar.smartlockscreen.backend_helpers.SharedPreferencesHelper;
import com.pvsagar.smartlockscreen.backend_helpers.Utility;
import com.pvsagar.smartlockscreen.environmentdb.EnvironmentDatabaseContract.PasswordEntry;
import com.pvsagar.smartlockscreen.frontend_helpers.NotificationHelper;
import com.pvsagar.smartlockscreen.receivers.AdminActions;
/**
* Created by aravind on 10/8/14.
* Abstract class to represent a generic passphrase. Any class can be used for PassphraseRepresentation,
* but the subclasses should provide a one-one mapping from any object of PassphraseRepresentation
* to a set of unique Strings.
*/
public abstract class Passphrase<PassphraseRepresentation> {
private static final String LOG_TAG = Passphrase.class.getSimpleName();
private String passwordString, encryptedPasswordString;
private PassphraseRepresentation passphraseRepresentation;
private String passphraseType;
//User friendly password type strings
public static final String[] passphraseTypes = {"Password","Pin","Pattern","None"};
public static final String[] masterPassphraseTypes = {"Password","Pin"};
public static final int INDEX_PASSPHRASE_TYPE_PASSWORD = 0;
public static final int INDEX_PASSPHRASE_TYPE_PIN = 1;
public static final int INDEX_PASSPHRASE_TYPE_PATTERN = 2;
public static final int INDEX_PASSPHRASE_TYPE_NONE = 3;
private static final String KEY = "000102030405060708090A0B0C0D0E0F";
private static final String PACKAGE_PREFIX = "com.pvsagar.smartlockscreen.applogic_objects" +
".passphrases";
public static final String TYPE_PASSWORD = PACKAGE_PREFIX + ".TYPE_PASSWORD";
public static final String TYPE_PIN = PACKAGE_PREFIX + ".TYPE_PIN";
public static final String TYPE_NONE = PACKAGE_PREFIX + ".TYPE_NONE";
public static final String TYPE_PATTERN = PACKAGE_PREFIX + ".TYPE_PATTERN";
protected static Passphrase currentPassphrase, previousPassphrase;
public Passphrase(String type){
Utility.checkForNullAndThrowException(type);
if(!checkTypeValidity(type)){
throw new IllegalArgumentException("Cannot initialize passphrase with type " +
type);
}
passphraseType = type;
}
public Passphrase(String type, PassphraseRepresentation passphrase){
this(type);
setPasswordRepresentation(passphrase);
}
/**
* Each passphrase type should provide a unique string corresponding to each of the passphrase representation
* instance used. This method does that.
* @param passphrase to be converted to a string
* @return string representation of the passphrase
*/
protected abstract String getPassphraseStringFromPassphraseRepresentation(PassphraseRepresentation passphrase);
/**
* Converts a given string back to the original passphrase representation object instance
* @param passphrase string representation of the passphrase
* @return original representation of the passphrase
*/
protected abstract PassphraseRepresentation getPassphraseRepresentationFromPassphraseString(String passphrase);
/**
* Takes a given passphrase representation instance and checks for its validity
* @param passphrase whose validity should be checked
* @return true if valid, false otherwise
*/
protected abstract boolean isPassphraseRepresentationValid(PassphraseRepresentation passphrase);
public void setPasswordRepresentation(PassphraseRepresentation passphrase) {
if (isPassphraseRepresentationValid(passphrase)) {
passphraseRepresentation = passphrase;
this.passwordString = getPassphraseStringFromPassphraseRepresentation(passphrase);
encryptPassword();
} else {
throw new IllegalArgumentException("Invalid passphrase passed.");
}
}
public PassphraseRepresentation getPassphraseRepresentation(){
return passphraseRepresentation;
}
private boolean checkTypeValidity(String variableType){
return variableType.equals(TYPE_NONE) || variableType.equals(TYPE_PASSWORD)
|| variableType.equals(TYPE_PIN) || variableType.equals(TYPE_PATTERN);
}
public String getPassphraseType(){
return passphraseType;
}
private void encryptPassword(){
encryptedPasswordString = EncryptorDecryptor.encrypt(passwordString, KEY);
}
private void decryptPassword(){
passwordString = EncryptorDecryptor.decrypt(encryptedPasswordString, KEY);
}
public ContentValues getContentValues(){
ContentValues passwordValues = new ContentValues();
passwordValues.put(PasswordEntry.COLUMN_PASSWORD_TYPE, passphraseType);
passwordValues.put(PasswordEntry.COLUMN_PASSWORD_STRING, encryptedPasswordString);
return passwordValues;
}
/**
* Reads passphrase information from a cursor
* @param cursor should be populated with values from passwords table
* @return passphrase instances corresponding to the values from the cursor
*/
public static Passphrase getPassphraseFromCursor(Cursor cursor){
try{
String type = cursor.getString(cursor.getColumnIndex(PasswordEntry.COLUMN_PASSWORD_TYPE));
Passphrase returnPassphrase = PassphraseFactory.getPassphraseInstance(type);
returnPassphrase.encryptedPasswordString = cursor.getString(cursor.getColumnIndex
(PasswordEntry.COLUMN_PASSWORD_STRING));
if(returnPassphrase.passphraseType.equals(TYPE_NONE)){
returnPassphrase.encryptedPasswordString = "";
}
returnPassphrase.decryptPassword();
returnPassphrase.setPasswordRepresentation(returnPassphrase.
getPassphraseRepresentationFromPassphraseString(returnPassphrase.passwordString));
return returnPassphrase;
} catch (Exception e){
e.printStackTrace();
throw new IllegalArgumentException("Cursor should have values from passwords table");
}
}
/**
* Sets this passphrase as current device password
* @return
*/
public final boolean setAsCurrentPassword(Context context){
if(currentPassphrase != null && this.equals(currentPassphrase) && previousPassphrase != null && currentPassphrase.equals(previousPassphrase)) {
Log.d(LOG_TAG, "Previous passphrase matches current passphrase");
if(!passphraseType.equals(TYPE_PATTERN) || SharedPreferencesHelper.isRootPattern(context))
return true;
}
boolean result = setPatternAsCurrentPassword(context);
if(!result) {
result = AdminActions.changePassword(passwordString, this.passphraseType, getMasterPassword(context));
}
if(result){
previousPassphrase = currentPassphrase;
currentPassphrase = this;
}
return result;
}
public static void clearCurrentPassphraseCache(){
currentPassphrase = null;
previousPassphrase = null;
}
public boolean setPatternAsCurrentPassword(Context context) {
if(!(this instanceof Pattern)) return false;
Pattern p = (Pattern) this;
if(SharedPreferencesHelper.isRootPattern(context)){
Passphrase.getMasterPassword(context).setAsCurrentPassword(context);
if(p.getPassphraseRepresentation().size() <= 2) {
NotificationManager notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(NotificationHelper.ENTER_PATTERN_NOTIFICATION_ID,
NotificationHelper.getAppNotification(context, context.getString(R.string.enter_pattern_again)));
return true;
}
if(RootHelper.setCurrentPattern(context, p)) return true;
}
return false;
}
/**
* Sets the master passphrase
* @param masterPassword master passphrase to be set
* @param context Activity/service context
*/
public static void setMasterPassword(Passphrase masterPassword, Context context){
SharedPreferencesHelper.setMasterPassword(context, masterPassword.encryptedPasswordString,
masterPassword.passphraseType);
}
/**
* Returns the current master passphrase
* @param context Activity/service context
* @return current master passphrase
*/
public static Passphrase getMasterPassword(Context context){
String masterPasswordType = SharedPreferencesHelper.getMasterPasswordType(context),
masterPasswordString = SharedPreferencesHelper.getMasterPasswordString(context);
if(masterPasswordType == null || masterPasswordString == null){
masterPasswordType = TYPE_NONE;
masterPasswordString = "";
}
Passphrase masterPassphrase = PassphraseFactory.getPassphraseInstance(masterPasswordType);
masterPassphrase.encryptedPasswordString = masterPasswordString;
if(masterPassphrase.passphraseType.equals(TYPE_NONE)){
masterPassphrase.encryptedPasswordString = "";
}
masterPassphrase.decryptPassword();
masterPassphrase.setPasswordRepresentation(masterPassphrase.
getPassphraseRepresentationFromPassphraseString(masterPassphrase.passwordString));
return masterPassphrase;
}
public boolean compareString(String passphrase){
return passwordString != null && passphrase != null && passwordString.equals(passphrase);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Passphrase)) return false;
Passphrase p = (Passphrase) o;
return p.getPassphraseType().equals(getPassphraseType())
&& p.passwordString.equals(passwordString);
}
}
| |
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.event.cause;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.commons.lang3.Validate.noNullElements;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import org.apache.commons.lang3.ArrayUtils;
import org.spongepowered.api.entity.Entity;
import org.spongepowered.api.event.Event;
import org.spongepowered.api.event.cause.entity.damage.source.DamageSource;
import org.spongepowered.api.event.cause.entity.spawn.SpawnCause;
import org.spongepowered.api.event.entity.DamageEntityEvent;
import org.spongepowered.api.event.entity.SpawnEntityEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
/**
* A cause represents the reason or initiator of an event.
*
* <p>For example, if a block of sand is placed where it drops, the block
* of sand would create a falling sand entity, which then would place another
* block of sand. The block place event for the final block of sand would have
* the cause chain of the block of sand -> falling sand entity.</p>
*
* <p>It is not possible to accurately the describe the chain of causes in
* all scenarios so a best effort approach is generally acceptable. For
* example, a player might press a lever, activating a complex Redstone
* circuit, which would then launch TNT and cause the destruction of
* some blocks, but tracing this event would be too complicated and thus
* may not be attempted.</p>
*/
@SuppressWarnings("unchecked")
public abstract class Cause {
private static final Cause EMPTY = new EmptyCause();
/**
* Gets the "empty" {@link Cause}. If a {@link Cause} is required, but
* there is no {@link Object} that can be stated as the direct "cause" of
* an {@link Event}, an "empty" {@link Cause} can be used.
*
* @return The empty cause instance
*/
public static Cause empty() {
return EMPTY;
}
/**
* Creates a new {@link Cause} of the provided {@link Object}s. Note that
* none of the provided {@link Object}s can be <code>null</code>. The order
* of the objects should represent the "priority" that the object aided in
* the "cause" of an {@link Event}. The first {@link Object} should be the
* direct "cause" of the {@link Event}.
*
* <p>Usually, in most cases, some {@link Event}s will have "helper"
* objects to interface with their direct causes, such as
* {@link DamageSource} for an {@link DamageEntityEvent}, or a
* {@link SpawnCause} for an {@link SpawnEntityEvent}.</p>
*
* @param objects The objects being the cause
* @return The new cause
*/
public static Cause of(Object... objects) {
checkNotNull(objects, "The objects cannot be null!");
if (objects.length == 0) {
return EMPTY;
}
noNullElements(objects, "No elements in a cause can be null!");
return new PresentCause(objects);
}
public static Cause ofNullable(@Nullable Object... objects) {
if (objects == null || objects.length == 0) {
return EMPTY;
} else {
List<Object> list = new ArrayList<>();
for (Object object : objects) {
if (object != null) {
list.add(object);
}
}
return new PresentCause(list.toArray());
}
}
Cause() {}
/**
* Gets whether this {@link Cause} is empty of any causes or not. An empty
* cause may mean the {@link Cause} is not originating from any vanilla
* interactions, or it may mean the cause is simply not known.
*
* @return True if this cause is empty, false otherwise
*/
public abstract boolean isEmpty();
/**
* Gets the root {@link Object} of this cause. The root can be anything,
* including but not limited to: {@link DamageSource}, {@link Entity},
* {@link SpawnCause}, etc.
*
* @return The root object cause for this cause
*/
public abstract Optional<?> root();
/**
* Gets the first <code>T</code> object of this {@link Cause}, if
* available.
*
* @param target The class of the target type
* @param <T> The type of object being queried for
* @return The first element of the type, if available
*/
public abstract <T> Optional<T> first(Class<T> target);
/**
* Gets the last object instance of the {@link Class} of type
* <code>T</code>.
*
* @param target The class of the target type
* @param <T> The type of object being queried for
* @return The last element of the type, if available
*/
public abstract <T> Optional<T> last(Class<T> target);
/**
* Gets the object immediately before the object that is an instance of
* the {@link Class} passed in.
*
* @param clazz The class of the object
* @return The object
*/
public abstract Optional<?> before(Class<?> clazz);
/**
* Gets the object immediately after the object that is an instance of
* the {@link Class} passed in.
*
* @param clazz
* @return
*/
public abstract Optional<?> after(Class<?> clazz);
/**
* Returns whether the target class matches any object of this {@link Cause}.
* @param target The class of the target type
* @return True if found, false otherwise
*/
public abstract boolean any(Class<?> target);
/**
* Gets an {@link ImmutableList} of all objects that are instances of the
* given {@link Class} type <code>T</code>.
*
* @param <T> The type of objects to query for
* @param target The class of the target type
* @return An immutable list of the objects queried
*/
public abstract <T> List<T> allOf(Class<T> target);
/**
* Gets an immutable {@link List} with all object causes that are not
* instances of the provided {@link Class}.
*
* @param ignoredClass The class of object types to ignore
* @return The list of objects not an instance of the provided class
*/
public abstract List<Object> noneOf(Class<?> ignoredClass);
/**
* Gets an {@link List} of all causes within this {@link Cause}.
*
* @return An immutable list of all the causes
*/
public abstract List<Object> all();
/**
* Creates a new {@link Cause} where the objects are added at the end of
* the cause array of objects.
*
* @param additional The additional objects to add
* @return The new cause
*/
public abstract Cause with(Object... additional);
/**
* Creates a new {@link Cause} where the objects are added at the end of
* the cause array of objects.
*
* @param iterable The additional objects
* @return The new cause
*/
public abstract Cause with(Iterable<?> iterable);
/**
* Returns {@code true} if {@code object} is a {@code Cause} instance, and
* either the contained references are {@linkplain Object#equals equal} to
* each other or both are absent.
*/
@Override
public abstract boolean equals(@Nullable Object object);
/**
* Returns a hash code for this instance.
*/
@Override
public abstract int hashCode();
private static final class PresentCause extends Cause {
private final Object[] cause;
PresentCause(Object... causes) {
for (Object aCause : causes) {
checkNotNull(aCause, "Null cause element!");
}
this.cause = Arrays.copyOf(causes, causes.length);
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public Optional<?> root() {
return Optional.of(this.cause[0]);
}
@Override
public <T> Optional<T> first(Class<T> target) {
for (Object aCause : this.cause) {
if (target.isInstance(aCause)) {
return Optional.of((T) aCause);
}
}
return Optional.empty();
}
@Override
public <T> List<T> allOf(Class<T> target) {
ImmutableList.Builder<T> builder = ImmutableList.builder();
for (Object aCause : this.cause) {
if (target.isInstance(aCause)) {
builder.add((T) aCause);
}
}
return builder.build();
}
@Override
public List<Object> noneOf(Class<?> ignoredClass) {
ImmutableList.Builder<Object> builder = ImmutableList.builder();
for (Object cause : this.cause) {
if (!ignoredClass.isInstance(cause)) {
builder.add(cause);
}
}
return builder.build();
}
@Override
public <T> Optional<T> last(Class<T> target) {
for (int i = this.cause.length - 1; i >= 0; i--) {
if (target.isInstance(this.cause[i])) {
return Optional.of((T) this.cause[i]);
}
}
return Optional.empty();
}
@Override
public Optional<?> before(Class<?> clazz) {
checkArgument(clazz != null, "The provided class cannot be null!");
if (this.cause.length == 1) {
return Optional.empty();
}
for (int i = 0; i < this.cause.length; i++) {
if (clazz.isInstance(this.cause[i]) && i > 0) {
return Optional.of(this.cause[i - 1]);
}
}
return Optional.empty();
}
@Override
public Optional<?> after(Class<?> clazz) {
checkArgument(clazz != null, "The provided class cannot be null!");
if (this.cause.length == 1) {
return Optional.empty();
}
for (int i = 0; i < this.cause.length; i++) {
if (clazz.isInstance(this.cause[i]) && i + 1 < this.cause.length) {
return Optional.of(this.cause[i + 1]);
}
}
return Optional.empty();
}
@Override
public boolean any(Class<?> target) {
checkArgument(target != null, "The provided class cannot be null!");
for (Object aCause : this.cause) {
if (target.isInstance(aCause)) {
return true;
}
}
return false;
}
@Override
public List<Object> all() {
return ImmutableList.copyOf(this.cause);
}
@Override
public Cause with(Object... additional) {
checkArgument(additional != null, "Cannot add a null argument!");
return of(ArrayUtils.addAll(this.cause, additional));
}
@Override
public Cause with(Iterable<?> iterable) {
List<Object> list = new ArrayList<Object>();
for (Object o : this.cause) {
list.add(o);
}
for (Object o : iterable) {
checkArgument(o != null, "Cannot add null causes");
list.add(o);
}
return of(list.toArray());
}
@Override
public boolean equals(@Nullable Object object) {
if (object instanceof PresentCause) {
PresentCause cause = ((PresentCause) object);
return Arrays.equals(this.cause, cause.cause);
}
return false;
}
@Override
public int hashCode() {
return Objects.hashCode(this.cause);
}
@Override
public String toString() {
return "Cause{" + Arrays.deepToString(this.cause) + "}";
}
}
private static final class EmptyCause extends Cause {
EmptyCause() {}
@Override
public boolean isEmpty() {
return true;
}
@Override
public Optional<?> root() {
return Optional.empty();
}
@Override
public <T> Optional<T> first(Class<T> target) {
return Optional.empty();
}
@Override
public <T> Optional<T> last(Class<T> target) {
return Optional.empty();
}
@Override
public Optional<?> before(Class<?> clazz) {
return Optional.empty();
}
@Override
public Optional<?> after(Class<?> clazz) {
return Optional.empty();
}
@Override
public boolean any(Class<?> target) {
return false;
}
@Override
public <T> List<T> allOf(Class<T> target) {
return ImmutableList.of();
}
@Override
public List<Object> noneOf(Class<?> ignoredClass) {
return ImmutableList.of();
}
@Override
public List<Object> all() {
return ImmutableList.of();
}
@Override
public Cause with(Object... additional) {
return of(additional);
}
@Override
public Cause with(Iterable<?> iterable) {
List<Object> list = new ArrayList<Object>();
for (Object o : iterable) {
list.add(o);
}
return of(list.toArray());
}
@Override
public boolean equals(@Nullable Object object) {
return object == this;
}
@Override
public int hashCode() {
return 0x39e8a5b;
}
@Override
public String toString() {
return "Cause{}";
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mediapackage.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* A new Channel configuration.
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/CreateChannel" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateChannelRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/** A short text description of the Channel. */
private String description;
/**
* The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel is
* created.
*/
private String id;
private java.util.Map<String, String> tags;
/**
* A short text description of the Channel.
*
* @param description
* A short text description of the Channel.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* A short text description of the Channel.
*
* @return A short text description of the Channel.
*/
public String getDescription() {
return this.description;
}
/**
* A short text description of the Channel.
*
* @param description
* A short text description of the Channel.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateChannelRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel is
* created.
*
* @param id
* The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel is
* created.
*/
public void setId(String id) {
this.id = id;
}
/**
* The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel is
* created.
*
* @return The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel
* is created.
*/
public String getId() {
return this.id;
}
/**
* The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel is
* created.
*
* @param id
* The ID of the Channel. The ID must be unique within the region and it cannot be changed after a Channel is
* created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateChannelRequest withId(String id) {
setId(id);
return this;
}
/**
* @return
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* @param tags
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* @param tags
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateChannelRequest withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
public CreateChannelRequest addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateChannelRequest clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateChannelRequest == false)
return false;
CreateChannelRequest other = (CreateChannelRequest) obj;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateChannelRequest clone() {
return (CreateChannelRequest) super.clone();
}
}
| |
package fr.jmini.asciidoctorj.testcases;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.asciidoctor.OptionsBuilder;
import org.asciidoctor.ast.Cell;
import org.asciidoctor.ast.Column;
import org.asciidoctor.ast.Document;
import org.asciidoctor.ast.Row;
import org.asciidoctor.ast.Table;
public class TableFooterRowTestCase implements AdocTestCase {
public static final String ASCIIDOC = "" +
"[options=\"footer\"]\n" +
"|===\n" +
"|Name of Column 1 |Name of Column 2\n" +
"\n" +
"|Cell in column 1, row 1\n" +
"|Cell in column 2, row 1\n" +
"\n" +
"|Cell in column 1, row 2\n" +
"|Cell in column 2, row 2\n" +
"\n" +
"|Footer in column 1, row 3\n" +
"|Footer in column 2, row 3\n" +
"|===";
@Override
public String getAdocInput() {
return ASCIIDOC;
}
@Override
public Map<String, Object> getInputOptions() {
return OptionsBuilder.options()
.asMap();
}
// tag::expected-html[]
public static final String EXPECTED_HTML = "" +
"<table class=\"tableblock frame-all grid-all spread\">\n" +
"<colgroup>\n" +
"<col style=\"width: 50%;\" />\n" +
"<col style=\"width: 50%;\" />\n" +
"</colgroup>\n" +
"<thead>\n" +
"<tr>\n" +
"<th class=\"tableblock halign-left valign-top\">Name of Column 1</th>\n" +
"<th class=\"tableblock halign-left valign-top\">Name of Column 2</th>\n" +
"</tr>\n" +
"</thead>\n" +
"<tfoot>\n" +
"<tr>\n" +
"<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Footer in column 1, row 3</p></td>\n" +
"<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Footer in column 2, row 3</p></td>\n" +
"</tr>\n" +
"</tfoot>\n" +
"<tbody>\n" +
"<tr>\n" +
"<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 1, row 1</p></td>\n" +
"<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 2, row 1</p></td>\n" +
"</tr>\n" +
"<tr>\n" +
"<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 1, row 2</p></td>\n" +
"<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 2, row 2</p></td>\n" +
"</tr>\n" +
"</tbody>\n" +
"</table>";
// end::expected-html[]
@Override
public String getHtmlOutput() {
return EXPECTED_HTML;
}
@Override
// tag::assert-code[]
public void checkAst(Document astDocument) {
Document document1 = astDocument;
assertThat(document1.getId()).isNull();
assertThat(document1.getNodeName()).isEqualTo("document");
assertThat(document1.getParent()).isNull();
assertThat(document1.getContext()).isEqualTo("document");
assertThat(document1.getDocument()).isSameAs(document1);
assertThat(document1.isInline()).isFalse();
assertThat(document1.isBlock()).isTrue();
assertThat(document1.getAttributes()).containsEntry("doctype", "article")
.containsEntry("example-caption", "Example")
.containsEntry("figure-caption", "Figure")
.containsEntry("filetype", "html")
.containsEntry("notitle", "")
.containsEntry("prewrap", "")
.containsEntry("table-caption", "Table");
assertThat(document1.getRoles()).isNullOrEmpty();
assertThat(document1.isReftext()).isFalse();
assertThat(document1.getReftext()).isNull();
assertThat(document1.getCaption()).isNull();
assertThat(document1.getTitle()).isNull();
assertThat(document1.getStyle()).isNull();
assertThat(document1.getLevel()).isEqualTo(0);
assertThat(document1.getContentModel()).isEqualTo("compound");
assertThat(document1.getSourceLocation()).isNull();
assertThat(document1.getSubstitutions()).isNullOrEmpty();
assertThat(document1.getBlocks()).hasSize(1);
Table table1 = (Table) document1.getBlocks()
.get(0);
assertThat(table1.getId()).isNull();
assertThat(table1.getNodeName()).isEqualTo("table");
assertThat(table1.getParent()).isSameAs(document1);
assertThat(table1.getContext()).isEqualTo("table");
assertThat(table1.getDocument()).isSameAs(document1);
assertThat(table1.isInline()).isFalse();
assertThat(table1.isBlock()).isTrue();
assertThat(table1.getAttributes()).containsEntry("colcount", 2L)
.containsEntry("footer-option", "")
.containsEntry("header-option", "")
.containsEntry("options", "footer,header")
.containsEntry("rowcount", 4L)
.containsEntry("style", "table")
.containsEntry("tablepcwidth", 100L);
assertThat(table1.getRoles()).isNullOrEmpty();
assertThat(table1.isReftext()).isFalse();
assertThat(table1.getReftext()).isNull();
assertThat(table1.getCaption()).isNull();
assertThat(table1.getTitle()).isNull();
assertThat(table1.getStyle()).isEqualTo("table");
assertThat(table1.getLevel()).isEqualTo(0);
assertThat(table1.getContentModel()).isEqualTo("compound");
assertThat(table1.getSourceLocation()).isNull();
assertThat(table1.getSubstitutions()).isNullOrEmpty();
assertThat(table1.getBlocks()).isNullOrEmpty();
assertThat(table1.hasHeaderOption()).isTrue();
assertThat(table1.getColumns()).hasSize(2);
Column column1 = (Column) table1.getColumns()
.get(0);
assertThat(column1.getId()).isNull();
assertThat(column1.getNodeName()).isEqualTo("column");
assertThat(column1.getParent()).isSameAs(table1);
assertThat(column1.getContext()).isEqualTo("column");
assertThat(column1.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
column1.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
column1.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(column1.getAttributes()).containsEntry("colnumber", 1L)
.containsEntry("colpcwidth", 50L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(column1.getRoles()).isNullOrEmpty();
assertThat(column1.isReftext()).isFalse();
assertThat(column1.getReftext()).isNull();
assertThat(column1.getStyle()).isNull();
assertThat(column1.getTable()).isSameAs(table1);
assertThat(column1.getColumnNumber()).isEqualTo(1);
assertThat(column1.getWidth()).isEqualTo(1);
assertThat(column1.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(column1.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
Column column2 = (Column) table1.getColumns()
.get(1);
assertThat(column2.getId()).isNull();
assertThat(column2.getNodeName()).isEqualTo("column");
assertThat(column2.getParent()).isSameAs(table1);
assertThat(column2.getContext()).isEqualTo("column");
assertThat(column2.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
column2.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
column2.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(column2.getAttributes()).containsEntry("colnumber", 2L)
.containsEntry("colpcwidth", 50L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(column2.getRoles()).isNullOrEmpty();
assertThat(column2.isReftext()).isFalse();
assertThat(column2.getReftext()).isNull();
assertThat(column2.getStyle()).isNull();
assertThat(column2.getTable()).isSameAs(table1);
assertThat(column2.getColumnNumber()).isEqualTo(2);
assertThat(column2.getWidth()).isEqualTo(1);
assertThat(column2.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(column2.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(table1.getHeader()).hasSize(1);
Row row1 = (Row) table1.getHeader()
.get(0);
assertThat(row1.getCells()).hasSize(2);
Cell cell1 = (Cell) row1.getCells()
.get(0);
assertThat(cell1.getId()).isNull();
assertThat(cell1.getNodeName()).isEqualTo("cell");
assertThat(cell1.getParent()).isSameAs(column1);
assertThat(cell1.getContext()).isEqualTo("cell");
assertThat(cell1.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell1.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell1.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell1.getAttributes()).containsEntry("colnumber", 1L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell1.getRoles()).isNullOrEmpty();
assertThat(cell1.isReftext()).isFalse();
assertThat(cell1.getReftext()).isNull();
assertThat(cell1.getColumn()).isSameAs(column1);
assertThat(cell1.getColspan()).isEqualTo(0);
assertThat(cell1.getRowspan()).isEqualTo(0);
assertThat(cell1.getText()).isEqualTo("Name of Column 1");
assertThat(cell1.getSource()).isEqualTo("Name of Column 1");
assertThat(cell1.getStyle()).isNull();
assertThat(cell1.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell1.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell1.getInnerDocument()).isNull();
Cell cell2 = (Cell) row1.getCells()
.get(1);
assertThat(cell2.getId()).isNull();
assertThat(cell2.getNodeName()).isEqualTo("cell");
assertThat(cell2.getParent()).isSameAs(column2);
assertThat(cell2.getContext()).isEqualTo("cell");
assertThat(cell2.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell2.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell2.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell2.getAttributes()).containsEntry("colnumber", 2L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell2.getRoles()).isNullOrEmpty();
assertThat(cell2.isReftext()).isFalse();
assertThat(cell2.getReftext()).isNull();
assertThat(cell2.getColumn()).isSameAs(column2);
assertThat(cell2.getColspan()).isEqualTo(0);
assertThat(cell2.getRowspan()).isEqualTo(0);
assertThat(cell2.getText()).isEqualTo("Name of Column 2");
assertThat(cell2.getSource()).isEqualTo("Name of Column 2");
assertThat(cell2.getStyle()).isNull();
assertThat(cell2.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell2.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell2.getInnerDocument()).isNull();
assertThat(table1.getFooter()).hasSize(1);
Row row2 = (Row) table1.getFooter()
.get(0);
assertThat(row2.getCells()).hasSize(2);
Cell cell3 = (Cell) row2.getCells()
.get(0);
assertThat(cell3.getId()).isNull();
assertThat(cell3.getNodeName()).isEqualTo("cell");
assertThat(cell3.getParent()).isSameAs(column1);
assertThat(cell3.getContext()).isEqualTo("cell");
assertThat(cell3.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell3.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell3.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell3.getAttributes()).containsEntry("colnumber", 1L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell3.getRoles()).isNullOrEmpty();
assertThat(cell3.isReftext()).isFalse();
assertThat(cell3.getReftext()).isNull();
assertThat(cell3.getColumn()).isSameAs(column1);
assertThat(cell3.getColspan()).isEqualTo(0);
assertThat(cell3.getRowspan()).isEqualTo(0);
assertThat(cell3.getText()).isEqualTo("Footer in column 1, row 3");
assertThat(cell3.getSource()).isEqualTo("Footer in column 1, row 3");
assertThat(cell3.getStyle()).isNull();
assertThat(cell3.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell3.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell3.getInnerDocument()).isNull();
Cell cell4 = (Cell) row2.getCells()
.get(1);
assertThat(cell4.getId()).isNull();
assertThat(cell4.getNodeName()).isEqualTo("cell");
assertThat(cell4.getParent()).isSameAs(column2);
assertThat(cell4.getContext()).isEqualTo("cell");
assertThat(cell4.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell4.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell4.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell4.getAttributes()).containsEntry("colnumber", 2L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell4.getRoles()).isNullOrEmpty();
assertThat(cell4.isReftext()).isFalse();
assertThat(cell4.getReftext()).isNull();
assertThat(cell4.getColumn()).isSameAs(column2);
assertThat(cell4.getColspan()).isEqualTo(0);
assertThat(cell4.getRowspan()).isEqualTo(0);
assertThat(cell4.getText()).isEqualTo("Footer in column 2, row 3");
assertThat(cell4.getSource()).isEqualTo("Footer in column 2, row 3");
assertThat(cell4.getStyle()).isNull();
assertThat(cell4.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell4.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell4.getInnerDocument()).isNull();
assertThat(table1.getBody()).hasSize(2);
Row row3 = (Row) table1.getBody()
.get(0);
assertThat(row3.getCells()).hasSize(2);
Cell cell5 = (Cell) row3.getCells()
.get(0);
assertThat(cell5.getId()).isNull();
assertThat(cell5.getNodeName()).isEqualTo("cell");
assertThat(cell5.getParent()).isSameAs(column1);
assertThat(cell5.getContext()).isEqualTo("cell");
assertThat(cell5.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell5.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell5.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell5.getAttributes()).containsEntry("colnumber", 1L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell5.getRoles()).isNullOrEmpty();
assertThat(cell5.isReftext()).isFalse();
assertThat(cell5.getReftext()).isNull();
assertThat(cell5.getColumn()).isSameAs(column1);
assertThat(cell5.getColspan()).isEqualTo(0);
assertThat(cell5.getRowspan()).isEqualTo(0);
assertThat(cell5.getText()).isEqualTo("Cell in column 1, row 1");
assertThat(cell5.getSource()).isEqualTo("Cell in column 1, row 1");
assertThat(cell5.getStyle()).isNull();
assertThat(cell5.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell5.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell5.getInnerDocument()).isNull();
Cell cell6 = (Cell) row3.getCells()
.get(1);
assertThat(cell6.getId()).isNull();
assertThat(cell6.getNodeName()).isEqualTo("cell");
assertThat(cell6.getParent()).isSameAs(column2);
assertThat(cell6.getContext()).isEqualTo("cell");
assertThat(cell6.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell6.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell6.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell6.getAttributes()).containsEntry("colnumber", 2L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell6.getRoles()).isNullOrEmpty();
assertThat(cell6.isReftext()).isFalse();
assertThat(cell6.getReftext()).isNull();
assertThat(cell6.getColumn()).isSameAs(column2);
assertThat(cell6.getColspan()).isEqualTo(0);
assertThat(cell6.getRowspan()).isEqualTo(0);
assertThat(cell6.getText()).isEqualTo("Cell in column 2, row 1");
assertThat(cell6.getSource()).isEqualTo("Cell in column 2, row 1");
assertThat(cell6.getStyle()).isNull();
assertThat(cell6.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell6.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell6.getInnerDocument()).isNull();
Row row4 = (Row) table1.getBody()
.get(1);
assertThat(row4.getCells()).hasSize(2);
Cell cell7 = (Cell) row4.getCells()
.get(0);
assertThat(cell7.getId()).isNull();
assertThat(cell7.getNodeName()).isEqualTo("cell");
assertThat(cell7.getParent()).isSameAs(column1);
assertThat(cell7.getContext()).isEqualTo("cell");
assertThat(cell7.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell7.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell7.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell7.getAttributes()).containsEntry("colnumber", 1L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell7.getRoles()).isNullOrEmpty();
assertThat(cell7.isReftext()).isFalse();
assertThat(cell7.getReftext()).isNull();
assertThat(cell7.getColumn()).isSameAs(column1);
assertThat(cell7.getColspan()).isEqualTo(0);
assertThat(cell7.getRowspan()).isEqualTo(0);
assertThat(cell7.getText()).isEqualTo("Cell in column 1, row 2");
assertThat(cell7.getSource()).isEqualTo("Cell in column 1, row 2");
assertThat(cell7.getStyle()).isNull();
assertThat(cell7.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell7.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell7.getInnerDocument()).isNull();
Cell cell8 = (Cell) row4.getCells()
.get(1);
assertThat(cell8.getId()).isNull();
assertThat(cell8.getNodeName()).isEqualTo("cell");
assertThat(cell8.getParent()).isSameAs(column2);
assertThat(cell8.getContext()).isEqualTo("cell");
assertThat(cell8.getDocument()).isSameAs(document1);
assertThatThrownBy(() -> {
cell8.isInline();
}).hasMessageContaining("NotImplementedError");
assertThatThrownBy(() -> {
cell8.isBlock();
}).hasMessageContaining("NotImplementedError");
assertThat(cell8.getAttributes()).containsEntry("colnumber", 2L)
.containsEntry("halign", "left")
.containsEntry("valign", "top")
.containsEntry("width", 1L);
assertThat(cell8.getRoles()).isNullOrEmpty();
assertThat(cell8.isReftext()).isFalse();
assertThat(cell8.getReftext()).isNull();
assertThat(cell8.getColumn()).isSameAs(column2);
assertThat(cell8.getColspan()).isEqualTo(0);
assertThat(cell8.getRowspan()).isEqualTo(0);
assertThat(cell8.getText()).isEqualTo("Cell in column 2, row 2");
assertThat(cell8.getSource()).isEqualTo("Cell in column 2, row 2");
assertThat(cell8.getStyle()).isNull();
assertThat(cell8.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT);
assertThat(cell8.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP);
assertThat(cell8.getInnerDocument()).isNull();
assertThat(table1.getFrame()).isEqualTo("all");
assertThat(table1.getGrid()).isEqualTo("all");
assertThat(document1.getStructuredDoctitle()).isNull();
assertThat(document1.getDoctitle()).isNull();
assertThat(document1.getOptions()).containsEntry("header_footer", false);
}
// end::assert-code[]
@Override
// tag::mock-code[]
public Document createMock() {
Document mockDocument1 = mock(Document.class);
when(mockDocument1.getId()).thenReturn(null);
when(mockDocument1.getNodeName()).thenReturn("document");
when(mockDocument1.getParent()).thenReturn(null);
when(mockDocument1.getContext()).thenReturn("document");
when(mockDocument1.getDocument()).thenReturn(mockDocument1);
when(mockDocument1.isInline()).thenReturn(false);
when(mockDocument1.isBlock()).thenReturn(true);
Map<String, Object> map1 = new HashMap<>();
map1.put("doctype", "article");
map1.put("example-caption", "Example");
map1.put("figure-caption", "Figure");
map1.put("filetype", "html");
map1.put("notitle", "");
map1.put("prewrap", "");
map1.put("table-caption", "Table");
when(mockDocument1.getAttributes()).thenReturn(map1);
when(mockDocument1.getRoles()).thenReturn(Collections.emptyList());
when(mockDocument1.isReftext()).thenReturn(false);
when(mockDocument1.getReftext()).thenReturn(null);
when(mockDocument1.getCaption()).thenReturn(null);
when(mockDocument1.getTitle()).thenReturn(null);
when(mockDocument1.getStyle()).thenReturn(null);
when(mockDocument1.getLevel()).thenReturn(0);
when(mockDocument1.getContentModel()).thenReturn("compound");
when(mockDocument1.getSourceLocation()).thenReturn(null);
when(mockDocument1.getSubstitutions()).thenReturn(Collections.emptyList());
Table mockTable1 = mock(Table.class);
when(mockTable1.getId()).thenReturn(null);
when(mockTable1.getNodeName()).thenReturn("table");
when(mockTable1.getParent()).thenReturn(mockDocument1);
when(mockTable1.getContext()).thenReturn("table");
when(mockTable1.getDocument()).thenReturn(mockDocument1);
when(mockTable1.isInline()).thenReturn(false);
when(mockTable1.isBlock()).thenReturn(true);
Map<String, Object> map2 = new HashMap<>();
map2.put("colcount", 2L);
map2.put("footer-option", "");
map2.put("header-option", "");
map2.put("options", "footer,header");
map2.put("rowcount", 4L);
map2.put("style", "table");
map2.put("tablepcwidth", 100L);
when(mockTable1.getAttributes()).thenReturn(map2);
when(mockTable1.getRoles()).thenReturn(Collections.emptyList());
when(mockTable1.isReftext()).thenReturn(false);
when(mockTable1.getReftext()).thenReturn(null);
when(mockTable1.getCaption()).thenReturn(null);
when(mockTable1.getTitle()).thenReturn(null);
when(mockTable1.getStyle()).thenReturn("table");
when(mockTable1.getLevel()).thenReturn(0);
when(mockTable1.getContentModel()).thenReturn("compound");
when(mockTable1.getSourceLocation()).thenReturn(null);
when(mockTable1.getSubstitutions()).thenReturn(Collections.emptyList());
when(mockTable1.getBlocks()).thenReturn(Collections.emptyList());
when(mockTable1.hasHeaderOption()).thenReturn(true);
Column mockColumn1 = mock(Column.class);
when(mockColumn1.getId()).thenReturn(null);
when(mockColumn1.getNodeName()).thenReturn("column");
when(mockColumn1.getParent()).thenReturn(mockTable1);
when(mockColumn1.getContext()).thenReturn("column");
when(mockColumn1.getDocument()).thenReturn(mockDocument1);
when(mockColumn1.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockColumn1.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map3 = new HashMap<>();
map3.put("colnumber", 1L);
map3.put("colpcwidth", 50L);
map3.put("halign", "left");
map3.put("valign", "top");
map3.put("width", 1L);
when(mockColumn1.getAttributes()).thenReturn(map3);
when(mockColumn1.getRoles()).thenReturn(Collections.emptyList());
when(mockColumn1.isReftext()).thenReturn(false);
when(mockColumn1.getReftext()).thenReturn(null);
when(mockColumn1.getStyle()).thenReturn(null);
when(mockColumn1.getTable()).thenReturn(mockTable1);
when(mockColumn1.getColumnNumber()).thenReturn(1);
when(mockColumn1.getWidth()).thenReturn(1);
when(mockColumn1.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockColumn1.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
Column mockColumn2 = mock(Column.class);
when(mockColumn2.getId()).thenReturn(null);
when(mockColumn2.getNodeName()).thenReturn("column");
when(mockColumn2.getParent()).thenReturn(mockTable1);
when(mockColumn2.getContext()).thenReturn("column");
when(mockColumn2.getDocument()).thenReturn(mockDocument1);
when(mockColumn2.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockColumn2.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map4 = new HashMap<>();
map4.put("colnumber", 2L);
map4.put("colpcwidth", 50L);
map4.put("halign", "left");
map4.put("valign", "top");
map4.put("width", 1L);
when(mockColumn2.getAttributes()).thenReturn(map4);
when(mockColumn2.getRoles()).thenReturn(Collections.emptyList());
when(mockColumn2.isReftext()).thenReturn(false);
when(mockColumn2.getReftext()).thenReturn(null);
when(mockColumn2.getStyle()).thenReturn(null);
when(mockColumn2.getTable()).thenReturn(mockTable1);
when(mockColumn2.getColumnNumber()).thenReturn(2);
when(mockColumn2.getWidth()).thenReturn(1);
when(mockColumn2.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockColumn2.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockTable1.getColumns()).thenReturn(Arrays.asList(mockColumn1, mockColumn2));
Row mockRow1 = mock(Row.class);
Cell mockCell1 = mock(Cell.class);
when(mockCell1.getId()).thenReturn(null);
when(mockCell1.getNodeName()).thenReturn("cell");
when(mockCell1.getParent()).thenReturn(mockColumn1);
when(mockCell1.getContext()).thenReturn("cell");
when(mockCell1.getDocument()).thenReturn(mockDocument1);
when(mockCell1.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell1.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map5 = new HashMap<>();
map5.put("colnumber", 1L);
map5.put("halign", "left");
map5.put("valign", "top");
map5.put("width", 1L);
when(mockCell1.getAttributes()).thenReturn(map5);
when(mockCell1.getRoles()).thenReturn(Collections.emptyList());
when(mockCell1.isReftext()).thenReturn(false);
when(mockCell1.getReftext()).thenReturn(null);
when(mockCell1.getColumn()).thenReturn(mockColumn1);
when(mockCell1.getColspan()).thenReturn(0);
when(mockCell1.getRowspan()).thenReturn(0);
when(mockCell1.getText()).thenReturn("Name of Column 1");
when(mockCell1.getSource()).thenReturn("Name of Column 1");
when(mockCell1.getStyle()).thenReturn(null);
when(mockCell1.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell1.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell1.getInnerDocument()).thenReturn(null);
Cell mockCell2 = mock(Cell.class);
when(mockCell2.getId()).thenReturn(null);
when(mockCell2.getNodeName()).thenReturn("cell");
when(mockCell2.getParent()).thenReturn(mockColumn2);
when(mockCell2.getContext()).thenReturn("cell");
when(mockCell2.getDocument()).thenReturn(mockDocument1);
when(mockCell2.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell2.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map6 = new HashMap<>();
map6.put("colnumber", 2L);
map6.put("halign", "left");
map6.put("valign", "top");
map6.put("width", 1L);
when(mockCell2.getAttributes()).thenReturn(map6);
when(mockCell2.getRoles()).thenReturn(Collections.emptyList());
when(mockCell2.isReftext()).thenReturn(false);
when(mockCell2.getReftext()).thenReturn(null);
when(mockCell2.getColumn()).thenReturn(mockColumn2);
when(mockCell2.getColspan()).thenReturn(0);
when(mockCell2.getRowspan()).thenReturn(0);
when(mockCell2.getText()).thenReturn("Name of Column 2");
when(mockCell2.getSource()).thenReturn("Name of Column 2");
when(mockCell2.getStyle()).thenReturn(null);
when(mockCell2.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell2.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell2.getInnerDocument()).thenReturn(null);
when(mockRow1.getCells()).thenReturn(Arrays.asList(mockCell1, mockCell2));
when(mockTable1.getHeader()).thenReturn(Collections.singletonList(mockRow1));
Row mockRow2 = mock(Row.class);
Cell mockCell3 = mock(Cell.class);
when(mockCell3.getId()).thenReturn(null);
when(mockCell3.getNodeName()).thenReturn("cell");
when(mockCell3.getParent()).thenReturn(mockColumn1);
when(mockCell3.getContext()).thenReturn("cell");
when(mockCell3.getDocument()).thenReturn(mockDocument1);
when(mockCell3.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell3.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map7 = new HashMap<>();
map7.put("colnumber", 1L);
map7.put("halign", "left");
map7.put("valign", "top");
map7.put("width", 1L);
when(mockCell3.getAttributes()).thenReturn(map7);
when(mockCell3.getRoles()).thenReturn(Collections.emptyList());
when(mockCell3.isReftext()).thenReturn(false);
when(mockCell3.getReftext()).thenReturn(null);
when(mockCell3.getColumn()).thenReturn(mockColumn1);
when(mockCell3.getColspan()).thenReturn(0);
when(mockCell3.getRowspan()).thenReturn(0);
when(mockCell3.getText()).thenReturn("Footer in column 1, row 3");
when(mockCell3.getSource()).thenReturn("Footer in column 1, row 3");
when(mockCell3.getStyle()).thenReturn(null);
when(mockCell3.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell3.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell3.getInnerDocument()).thenReturn(null);
Cell mockCell4 = mock(Cell.class);
when(mockCell4.getId()).thenReturn(null);
when(mockCell4.getNodeName()).thenReturn("cell");
when(mockCell4.getParent()).thenReturn(mockColumn2);
when(mockCell4.getContext()).thenReturn("cell");
when(mockCell4.getDocument()).thenReturn(mockDocument1);
when(mockCell4.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell4.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map8 = new HashMap<>();
map8.put("colnumber", 2L);
map8.put("halign", "left");
map8.put("valign", "top");
map8.put("width", 1L);
when(mockCell4.getAttributes()).thenReturn(map8);
when(mockCell4.getRoles()).thenReturn(Collections.emptyList());
when(mockCell4.isReftext()).thenReturn(false);
when(mockCell4.getReftext()).thenReturn(null);
when(mockCell4.getColumn()).thenReturn(mockColumn2);
when(mockCell4.getColspan()).thenReturn(0);
when(mockCell4.getRowspan()).thenReturn(0);
when(mockCell4.getText()).thenReturn("Footer in column 2, row 3");
when(mockCell4.getSource()).thenReturn("Footer in column 2, row 3");
when(mockCell4.getStyle()).thenReturn(null);
when(mockCell4.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell4.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell4.getInnerDocument()).thenReturn(null);
when(mockRow2.getCells()).thenReturn(Arrays.asList(mockCell3, mockCell4));
when(mockTable1.getFooter()).thenReturn(Collections.singletonList(mockRow2));
Row mockRow3 = mock(Row.class);
Cell mockCell5 = mock(Cell.class);
when(mockCell5.getId()).thenReturn(null);
when(mockCell5.getNodeName()).thenReturn("cell");
when(mockCell5.getParent()).thenReturn(mockColumn1);
when(mockCell5.getContext()).thenReturn("cell");
when(mockCell5.getDocument()).thenReturn(mockDocument1);
when(mockCell5.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell5.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map9 = new HashMap<>();
map9.put("colnumber", 1L);
map9.put("halign", "left");
map9.put("valign", "top");
map9.put("width", 1L);
when(mockCell5.getAttributes()).thenReturn(map9);
when(mockCell5.getRoles()).thenReturn(Collections.emptyList());
when(mockCell5.isReftext()).thenReturn(false);
when(mockCell5.getReftext()).thenReturn(null);
when(mockCell5.getColumn()).thenReturn(mockColumn1);
when(mockCell5.getColspan()).thenReturn(0);
when(mockCell5.getRowspan()).thenReturn(0);
when(mockCell5.getText()).thenReturn("Cell in column 1, row 1");
when(mockCell5.getSource()).thenReturn("Cell in column 1, row 1");
when(mockCell5.getStyle()).thenReturn(null);
when(mockCell5.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell5.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell5.getInnerDocument()).thenReturn(null);
Cell mockCell6 = mock(Cell.class);
when(mockCell6.getId()).thenReturn(null);
when(mockCell6.getNodeName()).thenReturn("cell");
when(mockCell6.getParent()).thenReturn(mockColumn2);
when(mockCell6.getContext()).thenReturn("cell");
when(mockCell6.getDocument()).thenReturn(mockDocument1);
when(mockCell6.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell6.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map10 = new HashMap<>();
map10.put("colnumber", 2L);
map10.put("halign", "left");
map10.put("valign", "top");
map10.put("width", 1L);
when(mockCell6.getAttributes()).thenReturn(map10);
when(mockCell6.getRoles()).thenReturn(Collections.emptyList());
when(mockCell6.isReftext()).thenReturn(false);
when(mockCell6.getReftext()).thenReturn(null);
when(mockCell6.getColumn()).thenReturn(mockColumn2);
when(mockCell6.getColspan()).thenReturn(0);
when(mockCell6.getRowspan()).thenReturn(0);
when(mockCell6.getText()).thenReturn("Cell in column 2, row 1");
when(mockCell6.getSource()).thenReturn("Cell in column 2, row 1");
when(mockCell6.getStyle()).thenReturn(null);
when(mockCell6.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell6.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell6.getInnerDocument()).thenReturn(null);
when(mockRow3.getCells()).thenReturn(Arrays.asList(mockCell5, mockCell6));
Row mockRow4 = mock(Row.class);
Cell mockCell7 = mock(Cell.class);
when(mockCell7.getId()).thenReturn(null);
when(mockCell7.getNodeName()).thenReturn("cell");
when(mockCell7.getParent()).thenReturn(mockColumn1);
when(mockCell7.getContext()).thenReturn("cell");
when(mockCell7.getDocument()).thenReturn(mockDocument1);
when(mockCell7.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell7.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map11 = new HashMap<>();
map11.put("colnumber", 1L);
map11.put("halign", "left");
map11.put("valign", "top");
map11.put("width", 1L);
when(mockCell7.getAttributes()).thenReturn(map11);
when(mockCell7.getRoles()).thenReturn(Collections.emptyList());
when(mockCell7.isReftext()).thenReturn(false);
when(mockCell7.getReftext()).thenReturn(null);
when(mockCell7.getColumn()).thenReturn(mockColumn1);
when(mockCell7.getColspan()).thenReturn(0);
when(mockCell7.getRowspan()).thenReturn(0);
when(mockCell7.getText()).thenReturn("Cell in column 1, row 2");
when(mockCell7.getSource()).thenReturn("Cell in column 1, row 2");
when(mockCell7.getStyle()).thenReturn(null);
when(mockCell7.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell7.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell7.getInnerDocument()).thenReturn(null);
Cell mockCell8 = mock(Cell.class);
when(mockCell8.getId()).thenReturn(null);
when(mockCell8.getNodeName()).thenReturn("cell");
when(mockCell8.getParent()).thenReturn(mockColumn2);
when(mockCell8.getContext()).thenReturn("cell");
when(mockCell8.getDocument()).thenReturn(mockDocument1);
when(mockCell8.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
when(mockCell8.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError"));
Map<String, Object> map12 = new HashMap<>();
map12.put("colnumber", 2L);
map12.put("halign", "left");
map12.put("valign", "top");
map12.put("width", 1L);
when(mockCell8.getAttributes()).thenReturn(map12);
when(mockCell8.getRoles()).thenReturn(Collections.emptyList());
when(mockCell8.isReftext()).thenReturn(false);
when(mockCell8.getReftext()).thenReturn(null);
when(mockCell8.getColumn()).thenReturn(mockColumn2);
when(mockCell8.getColspan()).thenReturn(0);
when(mockCell8.getRowspan()).thenReturn(0);
when(mockCell8.getText()).thenReturn("Cell in column 2, row 2");
when(mockCell8.getSource()).thenReturn("Cell in column 2, row 2");
when(mockCell8.getStyle()).thenReturn(null);
when(mockCell8.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT);
when(mockCell8.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP);
when(mockCell8.getInnerDocument()).thenReturn(null);
when(mockRow4.getCells()).thenReturn(Arrays.asList(mockCell7, mockCell8));
when(mockTable1.getBody()).thenReturn(Arrays.asList(mockRow3, mockRow4));
when(mockTable1.getFrame()).thenReturn("all");
when(mockTable1.getGrid()).thenReturn("all");
when(mockDocument1.getBlocks()).thenReturn(Collections.singletonList(mockTable1));
when(mockDocument1.getStructuredDoctitle()).thenReturn(null);
when(mockDocument1.getDoctitle()).thenReturn(null);
Map<Object, Object> map13 = new HashMap<>();
map13.put("attributes", "{}");
map13.put("header_footer", false);
when(mockDocument1.getOptions()).thenReturn(map13);
return mockDocument1;
}
// end::mock-code[]
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.jstorm.metric;
import backtype.storm.generated.MetricInfo;
import com.alibaba.jstorm.common.metric.*;
import com.alibaba.jstorm.common.metric.snapshot.AsmSnapshot;
import com.alibaba.jstorm.utils.NetWorkUtils;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.*;
/**
* @author Cody (weiyue.wy@alibaba-inc.com)
* @since 2.0.5
*/
public class JStormMetrics implements Serializable {
private static final long serialVersionUID = -2580242512743243267L;
public static final String NIMBUS_METRIC_KEY = "__NIMBUS__";
public static final String CLUSTER_METRIC_KEY = "__CLUSTER__";
public static final String SUPERVISOR_METRIC_KEY = "__SUPERVISOR__";
protected static final Logger LOG = LoggerFactory.getLogger(JStormMetrics.class);
/**
* Metrics in this object will be uploaded to nimbus
*/
protected static final AsmMetricRegistry workerMetrics = new AsmMetricRegistry();
protected static final AsmMetricRegistry nettyMetrics = new AsmMetricRegistry();
protected static final AsmMetricRegistry componentMetrics = new AsmMetricRegistry();
protected static final AsmMetricRegistry taskMetrics = new AsmMetricRegistry();
protected static final AsmMetricRegistry streamMetrics = new AsmMetricRegistry();
protected static final AsmMetricRegistry topologyMetrics = new AsmMetricRegistry();
protected static final AsmMetricRegistry[] allRegistries = {
streamMetrics, taskMetrics, componentMetrics, workerMetrics, nettyMetrics, topologyMetrics};
protected static String topologyId;
protected static String host;
protected static int port;
protected static boolean debug;
public static final String DEFAULT_GROUP = "sys";
public static final String NETTY_GROUP = "netty";
protected static Set<String> debugMetricNames = new HashSet<String>();
static {
host = NetWorkUtils.ip();
}
private static boolean enabled = true;
public static int getPort() {
return port;
}
public static void setPort(int port) {
JStormMetrics.port = port;
}
public static String getHost() {
return host;
}
public static void setHost(String host) {
JStormMetrics.host = host;
}
public static String getTopologyId() {
return topologyId;
}
public static void setTopologyId(String topologyId) {
JStormMetrics.topologyId = topologyId;
}
public static boolean isDebug() {
return debug;
}
public static void setDebug(boolean debug) {
JStormMetrics.debug = debug;
LOG.info("topology metrics debug enabled:{}", debug);
}
public static void setEnabled(boolean enabled) {
JStormMetrics.enabled = enabled;
}
public static boolean isEnabled() {
return enabled;
}
public static String workerMetricName(String name, MetricType type) {
return MetricUtils.workerMetricName(topologyId, host, port, name, type);
}
public static void addDebugMetrics(String names) {
String[] metrics = names.split(",");
for (String metric : metrics) {
metric = metric.trim();
if (!StringUtils.isBlank(metric)) {
debugMetricNames.add(metric);
}
}
LOG.info("debug metric names:{}", Joiner.on(",").join(debugMetricNames));
}
/**
* reserve for debug purposes
*/
public static AsmMetric find(String name) {
for (AsmMetricRegistry registry : allRegistries) {
AsmMetric metric = registry.getMetric(name);
if (metric != null) {
return metric;
}
}
return null;
}
public static AsmMetric registerStreamMetric(String name, AsmMetric metric, boolean mergeTopology) {
name = fixNameIfPossible(name);
LOG.info("register stream metric:{}", name);
AsmMetric ret = streamMetrics.register(name, metric);
if (metric.isAggregate()) {
List<AsmMetric> assocMetrics = new ArrayList<>();
String taskMetricName = MetricUtils.stream2taskName(name);
AsmMetric taskMetric = taskMetrics.register(taskMetricName, metric.clone());
assocMetrics.add(taskMetric);
String compMetricName = MetricUtils.task2compName(taskMetricName);
AsmMetric componentMetric = componentMetrics.register(compMetricName, taskMetric.clone());
assocMetrics.add(componentMetric);
String metricName = MetricUtils.getMetricName(name);
if (metricName.contains(".")){
compMetricName = MetricUtils.task2MergeCompName(taskMetricName);
AsmMetric mergeCompMetric = componentMetrics.register(compMetricName, taskMetric.clone());
assocMetrics.add(mergeCompMetric);
}
if (mergeTopology){
String topologyMetricName = MetricUtils.comp2topologyName(compMetricName);
AsmMetric topologyMetric = topologyMetrics.register(topologyMetricName, ret.clone());
assocMetrics.add(topologyMetric);
}
ret.addAssocMetrics(assocMetrics.toArray(new AsmMetric[assocMetrics.size()]));
}
return ret;
}
public static AsmMetric registerTaskMetric(String name, AsmMetric metric) {
name = fixNameIfPossible(name);
AsmMetric ret = taskMetrics.register(name, metric);
if (metric.isAggregate()) {
String compMetricName = MetricUtils.task2compName(name);
AsmMetric componentMetric = componentMetrics.register(compMetricName, ret.clone());
ret.addAssocMetrics(componentMetric);
}
return ret;
}
// public static AsmMetric registerStreamTopologyMetric(String name, AsmMetric metric) {
// name = fixNameIfPossible(name);
// LOG.info("register stream metric:{}", name);
// AsmMetric ret = streamMetrics.register(name, metric);
//
// if (metric.isAggregate()) {
// String taskMetricName = MetricUtils.stream2taskName(name);
// AsmMetric taskMetric = taskMetrics.register(taskMetricName, ret.clone());
//
// String compMetricName = MetricUtils.task2compName(taskMetricName);
// AsmMetric componentMetric = componentMetrics.register(compMetricName, ret.clone());
//
// String topologyMetricName = MetricUtils.comp2topologyName(compMetricName);
// AsmMetric topologyMetric = topologyMetrics.register(topologyMetricName, ret.clone());
//
// ret.addAssocMetrics(taskMetric, componentMetric, topologyMetric);
// }
//
// return ret;
// }
public static AsmMetric registerWorkerMetric(String name, AsmMetric metric) {
name = fixNameIfPossible(name);
return workerMetrics.register(name, metric);
}
public static AsmMetric registerWorkerTopologyMetric(String name, AsmMetric metric) {
name = fixNameIfPossible(name);
AsmMetric ret = workerMetrics.register(name, metric);
String topologyMetricName = MetricUtils.worker2topologyName(name);
AsmMetric topologyMetric = topologyMetrics.register(topologyMetricName, ret.clone());
ret.addAssocMetrics(topologyMetric);
return ret;
}
public static AsmMetric registerNettyMetric(String name, AsmMetric metric) {
name = fixNameIfPossible(name, NETTY_GROUP);
return nettyMetrics.register(name, metric);
}
/**
* simplified helper method to register a worker histogram
*
* @param topologyId topology id
* @param name metric name, NOTE it's not a full-qualified name.
* @param histogram histogram
* @return registered histogram
*/
public static AsmHistogram registerWorkerHistogram(String topologyId, String name, AsmHistogram histogram) {
return (AsmHistogram) registerWorkerMetric(
MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.HISTOGRAM), histogram);
}
/**
* simplified helper method to register a worker gauge
*/
public static AsmGauge registerWorkerGauge(String topologyId, String name, AsmGauge gauge) {
return (AsmGauge) registerWorkerMetric(
MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.GAUGE), gauge);
}
/**
* simplified helper method to register a worker meter
*/
public static AsmMeter registerWorkerMeter(String topologyId, String name, AsmMeter meter) {
return (AsmMeter) registerWorkerMetric(
MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.METER), meter);
}
/**
* simplified helper method to register a worker counter
*/
public static AsmCounter registerWorkerCounter(String topologyId, String name, AsmCounter counter) {
return (AsmCounter) registerWorkerMetric(
MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.COUNTER), counter);
}
/**
* simplified helper method to register a worker timer
*/
public static AsmTimer registerWorkerTimer(String topologyId, String name, AsmTimer timer) {
return (AsmTimer) registerWorkerMetric(
MetricUtils.workerMetricName(topologyId, host, 0, name, MetricType.TIMER), timer);
}
public static AsmMetric getStreamMetric(String name) {
name = fixNameIfPossible(name);
return streamMetrics.getMetric(name);
}
public static AsmMetric getTaskMetric(String name) {
name = fixNameIfPossible(name);
return taskMetrics.getMetric(name);
}
public static AsmMetric getComponentMetric(String name) {
name = fixNameIfPossible(name);
return componentMetrics.getMetric(name);
}
public static AsmMetric getWorkerMetric(String name) {
name = fixNameIfPossible(name);
return workerMetrics.getMetric(name);
}
public static void unregisterWorkerMetric(String name) {
name = fixNameIfPossible(name);
workerMetrics.remove(name);
}
public static void unregisterNettyMetric(String name) {
name = fixNameIfPossible(name, NETTY_GROUP);
nettyMetrics.remove(name);
}
public static void unregisterTaskMetric(String name) {
name = fixNameIfPossible(name);
taskMetrics.remove(name);
}
public static AsmMetricRegistry getNettyMetrics() {
return nettyMetrics;
}
public static AsmMetricRegistry getWorkerMetrics() {
return workerMetrics;
}
public static AsmMetricRegistry getComponentMetrics() {
return componentMetrics;
}
public static AsmMetricRegistry getTaskMetrics() {
return taskMetrics;
}
public static AsmMetricRegistry getStreamMetrics() {
return streamMetrics;
}
/**
* convert snapshots to thrift objects, note that timestamps are aligned to min during the conversion,
* so nimbus server will get snapshots with aligned timestamps (still in ms as TDDL will use it).
*/
public static MetricInfo computeAllMetrics() {
long start = System.currentTimeMillis();
MetricInfo metricInfo = MetricUtils.mkMetricInfo();
List<Map.Entry<String, AsmMetric>> entries = Lists.newArrayList();
entries.addAll(streamMetrics.metrics.entrySet());
entries.addAll(taskMetrics.metrics.entrySet());
entries.addAll(componentMetrics.metrics.entrySet());
entries.addAll(workerMetrics.metrics.entrySet());
entries.addAll(nettyMetrics.metrics.entrySet());
entries.addAll(topologyMetrics.metrics.entrySet());
for (Map.Entry<String, AsmMetric> entry : entries) {
String name = entry.getKey();
AsmMetric metric = entry.getValue();
Map<Integer, AsmSnapshot> snapshots = metric.getSnapshots();
int op = metric.getOp();
if ((op & AsmMetric.MetricOp.LOG) == AsmMetric.MetricOp.LOG) {
MetricUtils.printMetricSnapshot(metric, snapshots);
}
if ((op & AsmMetric.MetricOp.REPORT) == AsmMetric.MetricOp.REPORT) {
MetaType metaType = MetricUtils.metaType(metric.getMetricName());
try {
if (metric instanceof AsmCounter) {
Map data = MetricUtils.toThriftCounterSnapshots(snapshots);
putIfNotEmpty(metricInfo.get_metrics(), name, data);
} else if (metric instanceof AsmGauge) {
Map data = MetricUtils.toThriftGaugeSnapshots(snapshots);
putIfNotEmpty(metricInfo.get_metrics(), name, data);
} else if (metric instanceof AsmMeter) {
Map data = MetricUtils.toThriftMeterSnapshots(snapshots);
putIfNotEmpty(metricInfo.get_metrics(), name, data);
} else if (metric instanceof AsmHistogram) {
Map data = MetricUtils.toThriftHistoSnapshots(metaType, snapshots);
putIfNotEmpty(metricInfo.get_metrics(), name, data);
} else if (metric instanceof AsmTimer) {
Map data = MetricUtils.toThriftTimerSnapshots(metaType, snapshots);
putIfNotEmpty(metricInfo.get_metrics(), name, data);
}
} catch (Exception ex) {
LOG.error("Error", ex);
}
}
}
if (debug) {
MetricUtils.printMetricInfo(metricInfo, debugMetricNames);
}
LOG.info("compute all metrics, cost:{}", System.currentTimeMillis() - start);
return metricInfo;
}
@SuppressWarnings("unchecked")
public static <T extends Map> void putIfNotEmpty(Map base, String name, T data) {
if (data != null && data.size() > 0) {
base.put(name, data);
}
}
public static String fixNameIfPossible(String name) {
return fixNameIfPossible(name, DEFAULT_GROUP);
}
public static String fixNameIfPossible(String name, String group) {
MetaType type = MetricUtils.metaType(name);
String[] parts = name.split(MetricUtils.DELIM);
if (parts[1].equals("")) {
parts[1] = topologyId;
}
if (type != MetaType.WORKER && parts[5].equals("")) {
parts[5] = group;
} else if (parts[2].equals("")) {
parts[2] = host;
parts[3] = port + "";
if (parts[4].equals("")) {
parts[4] = group;
}
}
return MetricUtils.concat(parts);
}
public static void main(String[] args) throws Exception {
JStormMetrics.topologyId = "topologyId";
JStormMetrics.host = "127.0.0.1";
JStormMetrics.port = 6800;
String tpId = "test";
String compName = "bolt";
int taskId = 1;
String streamId = "defaultStream";
String type = MetaType.STREAM.getV() + MetricType.COUNTER.getV();
String metricName = "counter1";
String group = "udf";
String name = MetricUtils.metricName(type, tpId, compName, taskId, streamId, group, metricName);
System.out.println(name);
AsmCounter counter = new AsmCounter();
AsmMetric ret1 = JStormMetrics.registerStreamMetric(name, counter, false);
AsmMetric ret2 = JStormMetrics.registerStreamMetric(name, counter, false);
System.out.println(ret1 == ret2);
counter.update(1L);
metricName = MetricUtils.workerMetricName("metric1", MetricType.COUNTER);
System.out.println(metricName);
metricName = fixNameIfPossible(metricName);
System.out.println(metricName);
System.out.println(fixNameIfPossible(metricName));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.suggest.completion;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.suggest.document.ContextSuggestField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.CompletionFieldMapper.CompletionFieldType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.search.suggest.completion.context.CategoryContextMapping;
import org.elasticsearch.search.suggest.completion.context.ContextBuilder;
import org.elasticsearch.search.suggest.completion.context.ContextMapping;
import org.elasticsearch.test.ESSingleNodeTestCase;
import java.util.List;
import java.util.Set;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class CategoryContextMappingTests extends ESSingleNodeTestCase {
public void testIndexingWithNoContexts() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
.array("input", "suggestion1", "suggestion2")
.field("weight", 3)
.endObject()
.startObject()
.array("input", "suggestion3", "suggestion4")
.field("weight", 4)
.endObject()
.startObject()
.array("input", "suggestion5", "suggestion6", "suggestion7")
.field("weight", 5)
.endObject()
.endArray()
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 7);
}
public void testIndexingWithSimpleContexts() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.field("ctx", "ctx1")
.endObject()
.field("weight", 5)
.endObject()
.endArray()
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 3);
}
public void testIndexingWithSimpleNumberContexts() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.field("ctx", 100)
.endObject()
.field("weight", 5)
.endObject()
.endArray()
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 3);
}
public void testIndexingWithSimpleBooleanContexts() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.field("ctx", true)
.endObject()
.field("weight", 5)
.endObject()
.endArray()
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 3);
}
public void testIndexingWithSimpleNULLContexts() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
XContentBuilder builder = jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.nullField("ctx")
.endObject()
.field("weight", 5)
.endObject()
.endArray()
.endObject();
Exception e = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)));
assertEquals("contexts must be a string, number or boolean or a list of string, number or boolean, but was [VALUE_NULL]", e.getCause().getMessage());
}
public void testIndexingWithContextList() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(jsonBuilder()
.startObject()
.startObject("completion")
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.array("ctx", "ctx1", "ctx2", "ctx3")
.endObject()
.field("weight", 5)
.endObject()
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 3);
}
public void testIndexingWithMixedTypeContextList() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
.bytes(jsonBuilder()
.startObject()
.startObject("completion")
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.array("ctx", "ctx1", true, 100)
.endObject()
.field("weight", 5)
.endObject()
.endObject()),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 3);
}
public void testIndexingWithMixedTypeContextListHavingNULL() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
XContentBuilder builder = jsonBuilder()
.startObject()
.startObject("completion")
.array("input", "suggestion5", "suggestion6", "suggestion7")
.startObject("contexts")
.array("ctx", "ctx1", true, 100, null)
.endObject()
.field("weight", 5)
.endObject()
.endObject();
Exception e = expectThrows(MapperParsingException.class,
() -> defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder), XContentType.JSON)));
assertEquals("context array must have string, number or boolean values, but was [VALUE_NULL]", e.getCause().getMessage());
}
public void testIndexingWithMultipleContexts() throws Exception {
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.startObject()
.field("name", "type")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject());
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
Mapper fieldMapper = defaultMapper.mappers().getMapper("completion");
XContentBuilder builder = jsonBuilder()
.startObject()
.startArray("completion")
.startObject()
.array("input", "suggestion5", "suggestion6", "suggestion7")
.field("weight", 5)
.startObject("contexts")
.array("ctx", "ctx1", "ctx2", "ctx3")
.array("type", "typr3", "ftg")
.endObject()
.endObject()
.endArray()
.endObject();
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference.bytes(builder),
XContentType.JSON));
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
assertContextSuggestFields(fields, 3);
}
public void testQueryContextParsingBasic() throws Exception {
XContentBuilder builder = jsonBuilder().value("context1");
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
}
public void testBooleanQueryContextParsingBasic() throws Exception {
XContentBuilder builder = jsonBuilder().value(true);
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("true"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
}
public void testNumberQueryContextParsingBasic() throws Exception {
XContentBuilder builder = jsonBuilder().value(10);
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("10"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
}
public void testNULLQueryContextParsingBasic() throws Exception {
XContentBuilder builder = jsonBuilder().nullValue();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser));
assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean"));
}
public void testQueryContextParsingArray() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.value("context1")
.value("context2")
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(1));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
}
public void testQueryContextParsingMixedTypeValuesArray() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.value("context1")
.value("context2")
.value(true)
.value(10)
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(4));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(1));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(1));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(2).context, equalTo("true"));
assertThat(internalQueryContexts.get(2).boost, equalTo(1));
assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(3).context, equalTo("10"));
assertThat(internalQueryContexts.get(3).boost, equalTo(1));
assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false));
}
public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.value("context1")
.value("context2")
.value(true)
.value(10)
.nullValue()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser));
assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean"));
}
public void testQueryContextParsingObject() throws Exception {
XContentBuilder builder = jsonBuilder().startObject()
.field("context", "context1")
.field("boost", 10)
.field("prefix", true)
.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(10));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
}
public void testQueryContextParsingObjectHavingBoolean() throws Exception {
XContentBuilder builder = jsonBuilder().startObject()
.field("context", false)
.field("boost", 10)
.field("prefix", true)
.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("false"));
assertThat(internalQueryContexts.get(0).boost, equalTo(10));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
}
public void testQueryContextParsingObjectHavingNumber() throws Exception {
XContentBuilder builder = jsonBuilder().startObject()
.field("context", 333)
.field("boost", 10)
.field("prefix", true)
.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(1));
assertThat(internalQueryContexts.get(0).context, equalTo("333"));
assertThat(internalQueryContexts.get(0).boost, equalTo(10));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
}
public void testQueryContextParsingObjectHavingNULL() throws Exception {
XContentBuilder builder = jsonBuilder().startObject()
.nullField("context")
.field("boost", 10)
.field("prefix", true)
.endObject();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser));
assertThat(e.getMessage(), containsString("category context must be a string, number or boolean"));
}
public void testQueryContextParsingObjectArray() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.startObject()
.field("context", "context1")
.field("boost", 2)
.field("prefix", true)
.endObject()
.startObject()
.field("context", "context2")
.field("boost", 3)
.field("prefix", false)
.endObject()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(2));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(2));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(3));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
}
public void testQueryContextParsingMixedTypeObjectArray() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.startObject()
.field("context", "context1")
.field("boost", 2)
.field("prefix", true)
.endObject()
.startObject()
.field("context", "context2")
.field("boost", 3)
.field("prefix", false)
.endObject()
.startObject()
.field("context", true)
.field("boost", 3)
.field("prefix", false)
.endObject()
.startObject()
.field("context", 333)
.field("boost", 3)
.field("prefix", false)
.endObject()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(4));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(2));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(3));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(2).context, equalTo("true"));
assertThat(internalQueryContexts.get(2).boost, equalTo(3));
assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(3).context, equalTo("333"));
assertThat(internalQueryContexts.get(3).boost, equalTo(3));
assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false));
}
public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.startObject()
.field("context", "context1")
.field("boost", 2)
.field("prefix", true)
.endObject()
.startObject()
.field("context", "context2")
.field("boost", 3)
.field("prefix", false)
.endObject()
.startObject()
.field("context", true)
.field("boost", 3)
.field("prefix", false)
.endObject()
.startObject()
.field("context", 333)
.field("boost", 3)
.field("prefix", false)
.endObject()
.startObject()
.nullField("context")
.field("boost", 3)
.field("prefix", false)
.endObject()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser));
assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean"));
}
public void testQueryContextParsingMixed() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.startObject()
.field("context", "context1")
.field("boost", 2)
.field("prefix", true)
.endObject()
.value("context2")
.value(false)
.startObject()
.field("context", 333)
.field("boost", 2)
.field("prefix", true)
.endObject()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
List<ContextMapping.InternalQueryContext> internalQueryContexts = mapping.parseQueryContext(parser);
assertThat(internalQueryContexts.size(), equalTo(4));
assertThat(internalQueryContexts.get(0).context, equalTo("context1"));
assertThat(internalQueryContexts.get(0).boost, equalTo(2));
assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true));
assertThat(internalQueryContexts.get(1).context, equalTo("context2"));
assertThat(internalQueryContexts.get(1).boost, equalTo(1));
assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(2).context, equalTo("false"));
assertThat(internalQueryContexts.get(2).boost, equalTo(1));
assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false));
assertThat(internalQueryContexts.get(3).context, equalTo("333"));
assertThat(internalQueryContexts.get(3).boost, equalTo(2));
assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true));
}
public void testQueryContextParsingMixedHavingNULL() throws Exception {
XContentBuilder builder = jsonBuilder().startArray()
.startObject()
.field("context", "context1")
.field("boost", 2)
.field("prefix", true)
.endObject()
.value("context2")
.value(false)
.startObject()
.field("context", 333)
.field("boost", 2)
.field("prefix", true)
.endObject()
.nullValue()
.endArray();
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
CategoryContextMapping mapping = ContextBuilder.category("cat").build();
XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser));
assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean"));
}
public void testUnknownQueryContextParsing() throws Exception {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("completion")
.field("type", "completion")
.startArray("contexts")
.startObject()
.field("name", "ctx")
.field("type", "category")
.endObject()
.startObject()
.field("name", "type")
.field("type", "category")
.endObject()
.endArray()
.endObject().endObject()
.endObject().endObject();
MapperService mapperService = createIndex("test", Settings.EMPTY, "type1", mapping).mapperService();
CompletionFieldType completionFieldType = (CompletionFieldType) mapperService.fullName("completion");
Exception e = expectThrows(IllegalArgumentException.class, () -> completionFieldType.getContextMappings().get("brand"));
assertEquals("Unknown context name [brand], must be one of [ctx, type]", e.getMessage());
}
public void testParsingContextFromDocument() throws Exception {
CategoryContextMapping mapping = ContextBuilder.category("cat").field("category").build();
ParseContext.Document document = new ParseContext.Document();
KeywordFieldMapper.KeywordFieldType keyword = new KeywordFieldMapper.KeywordFieldType();
keyword.setName("category");
document.add(new Field(keyword.name(), new BytesRef("category1"), keyword));
// Ignore doc values
document.add(new SortedSetDocValuesField(keyword.name(), new BytesRef("category1")));
Set<String> context = mapping.parseContext(document);
assertThat(context.size(), equalTo(1));
assertTrue(context.contains("category1"));
document = new ParseContext.Document();
TextFieldMapper.TextFieldType text = new TextFieldMapper.TextFieldType();
text.setName("category");
document.add(new Field(text.name(), "category1", text));
// Ignore stored field
document.add(new StoredField(text.name(), "category1", text));
context = mapping.parseContext(document);
assertThat(context.size(), equalTo(1));
assertTrue(context.contains("category1"));
document = new ParseContext.Document();
document.add(new SortedSetDocValuesField("category", new BytesRef("category")));
context = mapping.parseContext(document);
assertThat(context.size(), equalTo(0));
document = new ParseContext.Document();
document.add(new SortedDocValuesField("category", new BytesRef("category")));
context = mapping.parseContext(document);
assertThat(context.size(), equalTo(0));
final ParseContext.Document doc = new ParseContext.Document();
doc.add(new IntPoint("category", 36));
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> mapping.parseContext(doc));
assertThat(exc.getMessage(), containsString("Failed to parse context field [category]"));
}
static void assertContextSuggestFields(IndexableField[] fields, int expected) {
int actualFieldCount = 0;
for (IndexableField field : fields) {
if (field instanceof ContextSuggestField) {
actualFieldCount++;
}
}
assertThat(actualFieldCount, equalTo(expected));
}
}
| |
package com.produce.ciro.bchart4;
import java.text.FieldPosition;
import java.text.Format;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import android.app.Activity;
import android.app.Fragment;
import android.app.LoaderManager;
import android.content.CursorLoader;
import android.content.Loader;
import android.database.Cursor;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.androidplot.xy.LineAndPointFormatter;
import com.androidplot.xy.PointLabelFormatter;
import com.androidplot.xy.PointLabeler;
import com.androidplot.xy.SimpleXYSeries;
import com.androidplot.xy.XYPlot;
import com.androidplot.xy.XYSeries;
public class TransactionFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor> {
private static final String TAG = TransactionFragment.class.getSimpleName();
// Identifies a particular Loader being used in this component
private static final int TRANSACTION_LOADER = 0;
private XYPlot _plot1;
private SimpleXYSeries _series;
private long _tempID;
public TransactionFragment() {
// Empty constructor required for fragment subclasses
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.i(TAG, "on create");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Inflate the layout for this fragment
Log.i(TAG, "on createView");
/*
* Initializes the CursorLoader. The URL_LOADER value is eventually passed to onCreateLoader().
*/
getLoaderManager().initLoader(TRANSACTION_LOADER, null, this);
return inflater.inflate(R.layout.fragment_chart, container, false);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
Log.i(TAG, "on attach");
}
@Override
public void onResume() {
super.onResume();
_tempID = 0;
_plot1 = (XYPlot) getView().findViewById(R.id.chart);
Log.i(TAG, "on resume");
}
@Override
public void onPause() {
super.onPause();
_plot1.setVisibility(TRIM_MEMORY_BACKGROUND);
Log.i(TAG, "on pause");
}
@Override
public void onDetach() {
_tempID = 0;
_series = null;
_plot1.clear();
_plot1 = null;
super.onDetach();
Log.i(TAG, "on detach");
}
private void updateTransaction(Cursor cursor) {
//_plot1.clear();
if (_tempID == 0) {
int n = cursor.getCount();
Log.i(TAG, "first time ploting transaction size is: " + n);
Number[] time = new Number[n];
Number[] y = new Number[n];
int i = 0;
cursor.moveToFirst();
_tempID = cursor.getLong(cursor.getColumnIndex(TransactionProviderContract.TRANSACTION_TID_COLUMN));
while (cursor.isAfterLast() == false) {
time[i] = Long.parseLong(cursor.getString(cursor.getColumnIndex(TransactionProviderContract.TRANSACTION_DATE_COLUMN)));
y[i] = Double.parseDouble(cursor.getString(cursor.getColumnIndex(TransactionProviderContract.TRANSACTION_PRICE_COLUMN)));
i++;
cursor.moveToNext();
}
_series = new SimpleXYSeries(Arrays.asList(time), Arrays.asList(y), "Transactions");
LineAndPointFormatter seriesFormat = new LineAndPointFormatter();
seriesFormat.setPointLabelFormatter(new PointLabelFormatter());
seriesFormat.configure(getActivity().getApplicationContext(), R.xml.line_point_formatter);
seriesFormat.setPointLabeler(new PointLabeler() {
@Override
public String getLabel(XYSeries series, int index) {
return index % 10 == 0 ? series.getY(index) + "" : "";
}
});
_plot1.addSeries(_series, seriesFormat);
// reduce the number of range labels
_plot1.setTicksPerRangeLabel(3);
_plot1.getGraphWidget().setDomainLabelOrientation(-45);
// customize our domain/range labels
_plot1.setDomainLabel("Time");
_plot1.setRangeLabel("Price");
_plot1.setDomainValueFormat(new Format() {
// create a simple date format that draws on the year portion of our timestamp.
// see http://download.oracle.com/javase/1.4.2/docs/api/java/text/SimpleDateFormat.html
// for a full description of SimpleDateFormat.
private SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
@Override
public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) {
// because our timestamps are in seconds and SimpleDateFormat expects milliseconds
// we multiply our timestamp by 1000:
long timestamp = ((Number) obj).longValue() * 1000;
Date date = new Date(timestamp);
return dateFormat.format(date, toAppendTo, pos);
}
@Override
public Object parseObject(String source, ParsePosition pos) {
return null;
}
});
} else {
cursor.moveToFirst();
long temp = cursor.getLong(cursor.getColumnIndex(TransactionProviderContract.TRANSACTION_TID_COLUMN));
while (temp > _tempID){
temp = _tempID;
long x = Long.parseLong(cursor.getString(cursor.getColumnIndex(TransactionProviderContract.TRANSACTION_DATE_COLUMN)));
double y = Double.parseDouble(cursor.getString(cursor.getColumnIndex(TransactionProviderContract.TRANSACTION_PRICE_COLUMN)));
_series.addFirst(x, y);
cursor.moveToNext();
}
Log.i(TAG, "ploting transaction size is: " + _series.size());
}
_plot1.redraw();
_plot1.setVisibility(1);
_plot1.bringToFront();
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
// This is called when a new Loader needs to be created. This
// sample only has one Loader, so we don't care about the ID.
// First, pick the base URI to use depending on whether we are
// currently filtering.
/*
* Takes action based on the ID of the Loader that's being created
*/
switch (id) {
case TRANSACTION_LOADER:
// Returns a new CursorLoader
String[] projection = { TransactionProviderContract.TRANSACTION_DATE_COLUMN,
TransactionProviderContract.TRANSACTION_TID_COLUMN,
TransactionProviderContract.TRANSACTION_PRICE_COLUMN,
TransactionProviderContract.TRANSACTION_AMOUNT_COLUMN };
String selection = TransactionProviderContract.TRANSACTION_TID_COLUMN + " > " + _tempID;
Log.i(TAG, "query for TID greater than: " + _tempID);
String sortOrder = TransactionProviderContract.TRANSACTION_TID_COLUMN + " DESC" + " LIMIT " + 800;
return new CursorLoader(getActivity(), // Parent activity context
TransactionProviderContract.TRANSACTIONURL_TABLE_CONTENTURI, // Table to query
projection, // Projection to return
selection, // No selection clause
null, // No selection arguments
sortOrder // Default sort order
);
default:
Log.e(TAG, "invalid id passed in");
// An invalid id was passed in
return null;
}
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor returnCursor) {
/*
* Moves the query results into the adapter, causing the ListView fronting this adapter to re-display
*/
if (returnCursor != null)
updateTransaction(returnCursor);
// mAdapter.changeCursor(returnCursor);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
// Sets the Adapter's backing data to null. This prevents memory leaks.
// mAdapter.changeCursor(null);
}
}
| |
/*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.client.v3;
import org.cloudfoundry.AbstractIntegrationTest;
import org.cloudfoundry.CloudFoundryVersion;
import org.cloudfoundry.IfCloudFoundryVersion;
import org.cloudfoundry.client.CloudFoundryClient;
import org.cloudfoundry.client.v3.applications.ApplicationFeatureResource;
import org.cloudfoundry.client.v3.applications.ApplicationRelationships;
import org.cloudfoundry.client.v3.applications.ApplicationResource;
import org.cloudfoundry.client.v3.applications.CreateApplicationRequest;
import org.cloudfoundry.client.v3.applications.CreateApplicationResponse;
import org.cloudfoundry.client.v3.applications.DeleteApplicationRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationCurrentDropletRelationshipRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationCurrentDropletRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationCurrentDropletResponse;
import org.cloudfoundry.client.v3.applications.GetApplicationEnvironmentRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationEnvironmentVariablesRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationEnvironmentVariablesResponse;
import org.cloudfoundry.client.v3.applications.GetApplicationFeatureRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationFeatureResponse;
import org.cloudfoundry.client.v3.applications.GetApplicationPermissionsRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationPermissionsResponse;
import org.cloudfoundry.client.v3.applications.GetApplicationProcessRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationProcessResponse;
import org.cloudfoundry.client.v3.applications.GetApplicationRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationResponse;
import org.cloudfoundry.client.v3.applications.GetApplicationSshEnabledRequest;
import org.cloudfoundry.client.v3.applications.GetApplicationSshEnabledResponse;
import org.cloudfoundry.client.v3.applications.ListApplicationFeaturesRequest;
import org.cloudfoundry.client.v3.applications.ListApplicationRoutesRequest;
import org.cloudfoundry.client.v3.applications.ListApplicationsRequest;
import org.cloudfoundry.client.v3.applications.ScaleApplicationRequest;
import org.cloudfoundry.client.v3.applications.SetApplicationCurrentDropletRequest;
import org.cloudfoundry.client.v3.applications.SetApplicationCurrentDropletResponse;
import org.cloudfoundry.client.v3.applications.StartApplicationRequest;
import org.cloudfoundry.client.v3.applications.StartApplicationResponse;
import org.cloudfoundry.client.v3.applications.StopApplicationRequest;
import org.cloudfoundry.client.v3.applications.UpdateApplicationEnvironmentVariablesRequest;
import org.cloudfoundry.client.v3.applications.UpdateApplicationFeatureRequest;
import org.cloudfoundry.client.v3.applications.UpdateApplicationRequest;
import org.cloudfoundry.client.v3.builds.BuildState;
import org.cloudfoundry.client.v3.builds.CreateBuildRequest;
import org.cloudfoundry.client.v3.builds.CreateBuildResponse;
import org.cloudfoundry.client.v3.builds.GetBuildRequest;
import org.cloudfoundry.client.v3.builds.GetBuildResponse;
import org.cloudfoundry.client.v3.domains.CreateDomainRequest;
import org.cloudfoundry.client.v3.domains.CreateDomainResponse;
import org.cloudfoundry.client.v3.domains.DomainRelationships;
import org.cloudfoundry.client.v3.droplets.DropletResource;
import org.cloudfoundry.client.v3.droplets.ListDropletsRequest;
import org.cloudfoundry.client.v3.organizations.CreateOrganizationRequest;
import org.cloudfoundry.client.v3.organizations.CreateOrganizationResponse;
import org.cloudfoundry.client.v3.packages.CreatePackageRequest;
import org.cloudfoundry.client.v3.packages.CreatePackageResponse;
import org.cloudfoundry.client.v3.packages.GetPackageRequest;
import org.cloudfoundry.client.v3.packages.GetPackageResponse;
import org.cloudfoundry.client.v3.packages.PackageRelationships;
import org.cloudfoundry.client.v3.packages.PackageState;
import org.cloudfoundry.client.v3.packages.PackageType;
import org.cloudfoundry.client.v3.packages.UploadPackageRequest;
import org.cloudfoundry.client.v3.packages.UploadPackageResponse;
import org.cloudfoundry.client.v3.routes.Application;
import org.cloudfoundry.client.v3.routes.CreateRouteRequest;
import org.cloudfoundry.client.v3.routes.CreateRouteResponse;
import org.cloudfoundry.client.v3.routes.Destination;
import org.cloudfoundry.client.v3.routes.ReplaceRouteDestinationsRequest;
import org.cloudfoundry.client.v3.routes.ReplaceRouteDestinationsResponse;
import org.cloudfoundry.client.v3.routes.RouteRelationships;
import org.cloudfoundry.client.v3.routes.RouteResource;
import org.cloudfoundry.client.v3.spaces.CreateSpaceRequest;
import org.cloudfoundry.client.v3.spaces.CreateSpaceResponse;
import org.cloudfoundry.client.v3.spaces.SpaceRelationships;
import org.cloudfoundry.util.DelayUtils;
import org.cloudfoundry.util.JobUtils;
import org.cloudfoundry.util.PaginationUtils;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
import java.io.IOException;
import java.time.Duration;
import java.util.Collections;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.cloudfoundry.client.v3.applications.ApplicationState.STARTED;
import static org.cloudfoundry.client.v3.applications.ApplicationState.STOPPED;
import static org.cloudfoundry.util.tuple.TupleUtils.function;
public final class ApplicationsTest extends AbstractIntegrationTest {
@Autowired
private CloudFoundryClient cloudFoundryClient;
@Autowired
private Mono<String> organizationId;
@Autowired
private Mono<String> spaceId;
@Test
public void create() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> this.cloudFoundryClient.applicationsV3()
.create(CreateApplicationRequest.builder()
.environmentVariable("test-create-env-key", "test-create-env-value")
.metadata(Metadata.builder()
.label("test-create-label-key", "test-create-label-value")
.build())
.name(applicationName)
.relationships(ApplicationRelationships.builder()
.space(ToOneRelationship.builder()
.data(Relationship.builder()
.id(spaceId)
.build())
.build())
.build())
.build())
.map(CreateApplicationResponse::getId))
.flatMap(applicationId -> requestGetApplication(this.cloudFoundryClient, applicationId))
.map(response -> response.getMetadata().getLabels().get(("test-create-label-key")))
.as(StepVerifier::create)
.expectNext("test-create-label-value")
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void delete() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.delayUntil(applicationId -> this.cloudFoundryClient.applicationsV3()
.delete(DeleteApplicationRequest.builder()
.applicationId(applicationId)
.build())
.flatMap(job -> JobUtils.waitForCompletion(this.cloudFoundryClient, Duration.ofMinutes(5), job)))
.flatMap(applicationId -> requestGetApplications(this.cloudFoundryClient, applicationId))
.as(StepVerifier::create)
.consumeErrorWith(t -> assertThat(t).isInstanceOf(ClientV3Exception.class).hasMessageMatching("CF-ResourceNotFound\\([0-9]+\\): App not found.*"))
.verify(Duration.ofMinutes(5));
}
@Test
public void get() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.get(GetApplicationRequest.builder()
.applicationId(applicationId)
.build()))
.map(GetApplicationResponse::getName)
.as(StepVerifier::create)
.expectNext(applicationName)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void getDropletAssociation() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> Mono.zip(
Mono.just(applicationId),
uploadPackageAndCreateDropletId(this.cloudFoundryClient, applicationId)
))
.delayUntil(function((applicationId, dropletId) -> requestSetDroplet(this.cloudFoundryClient, applicationId, dropletId)))
.flatMap(function((applicationId, dropletId) -> Mono.zip(
Mono.just(dropletId),
this.cloudFoundryClient.applicationsV3()
.getCurrentDropletRelationship(GetApplicationCurrentDropletRelationshipRequest.builder()
.applicationId(applicationId)
.build())
.map(response -> response.getData().getId()))
))
.as(StepVerifier::create)
.consumeNextWith(tupleEquality())
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@SuppressWarnings("unchecked")
@Test
public void getEnvironment() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.getEnvironment(GetApplicationEnvironmentRequest.builder()
.applicationId(applicationId)
.build()))
.map(env -> ((Map<String, Object>) env.getApplicationEnvironmentVariables().get("VCAP_APPLICATION")).get("name"))
.as(StepVerifier::create)
.expectNext(applicationName)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void getFeature() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.getFeature(GetApplicationFeatureRequest.builder()
.applicationId(applicationId)
.featureName("ssh")
.build()))
.as(StepVerifier::create)
.expectNext(GetApplicationFeatureResponse.builder()
.description("Enable SSHing into the app.")
.enabled(true)
.name("ssh")
.build())
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_10)
@Test
public void getPermissions() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.getPermissions(GetApplicationPermissionsRequest.builder()
.applicationId(applicationId)
.build()))
.as(StepVerifier::create)
.expectNext(GetApplicationPermissionsResponse.builder()
.readBasicData(true)
.readSensitiveData(true)
.build())
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void getSshEnabled() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.getSshEnabled(GetApplicationSshEnabledRequest.builder()
.applicationId(applicationId)
.build()))
.as(StepVerifier::create)
.expectNext(GetApplicationSshEnabledResponse.builder()
.enabled(true)
.reason("")
.build())
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void list() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMapMany(applicationId -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.list(ListApplicationsRequest.builder()
.page(page)
.build()))
.filter(resource -> applicationId.equals(resource.getId())))
.map(ApplicationResource::getName)
.as(StepVerifier::create)
.expectNext(applicationName)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutes() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
this.organizationId
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
this.spaceId
))
.flatMap(function((domainId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
createRouteId(this.cloudFoundryClient, domainId, "listApplicationRoutes", spaceId)
)))
.flatMapMany(function((applicationId, routeId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)
.thenReturn(applicationId)))
.flatMap(applicationId -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.page(page)
.build())))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutes-key", "test-listApplicationRoutes-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutesByDomain() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
this.organizationId
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
this.spaceId
))
.flatMap(function((domainId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
Mono.just(domainId),
createRouteId(this.cloudFoundryClient, domainId, "listApplicationRoutesByDomain", spaceId)
)))
.delayUntil(function((applicationId, domainId, routeId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)))
.flatMapMany(function((applicationId, domainId, ignore) -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.domainId(domainId)
.page(page)
.build()))))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutesByDomain-key", "test-listApplicationRoutesByDomain-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutesByHost() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
String hostName = this.nameFactory.getHostName();
this.organizationId
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
this.spaceId
))
.flatMap(function((domainId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
createRouteId(this.cloudFoundryClient, domainId, hostName, "listApplicationRoutesByHost", null, null, spaceId)
)))
.flatMap(function((applicationId, routeId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)
.thenReturn(applicationId)))
.flatMapMany(applicationId -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.host(hostName)
.page(page)
.build())))
.filter(route -> route.getMetadata().getLabels().containsKey("test-listApplicationRoutesByHost-key"))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutesByHost-key", "test-listApplicationRoutesByHost-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutesByLabelSelector() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
String spaceName = this.nameFactory.getSpaceName();
this.organizationId
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
createSpaceId(this.cloudFoundryClient, organizationId, spaceName)
))
.flatMap(function((domainId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
createRouteId(this.cloudFoundryClient, domainId, "listApplicationRoutesByLabelSelector", spaceId)
)))
.flatMapMany(function((applicationId, routeId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)
.thenReturn(applicationId)))
.flatMap(applicationId -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.labelSelector("test-listApplicationRoutesByLabelSelector-key=test-listApplicationRoutesByLabelSelector-value")
.page(page)
.build())))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutesByLabelSelector-key", "test-listApplicationRoutesByLabelSelector-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutesByOrganizationId() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
String organizationName = this.nameFactory.getOrganizationName();
String spaceName = this.nameFactory.getSpaceName();
createOrganizationId(this.cloudFoundryClient, organizationName)
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
Mono.just(organizationId),
createSpaceId(this.cloudFoundryClient, organizationId, spaceName)
))
.flatMap(function((domainId, organizationId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
Mono.just(organizationId),
createRouteId(this.cloudFoundryClient, domainId, "listApplicationRoutesByOrganizationId", spaceId)
)))
.delayUntil(function((applicationId, organizationId, routeId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)))
.flatMapMany(function((applicationId, organizationId, ignore) -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.organizationId(organizationId)
.page(page)
.build()))))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutesByOrganizationId-key", "test-listApplicationRoutesByOrganizationId-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutesByPath() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
String path = this.nameFactory.getPath();
this.organizationId
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
this.spaceId
))
.flatMap(function((domainId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
createRouteId(this.cloudFoundryClient, domainId, null, "listApplicationRoutesByPath", path, null, spaceId)
)))
.flatMapMany(function((applicationId, routeId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)
.thenReturn(applicationId)))
.flatMap(applicationId -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.path(path)
.page(page)
.build())))
.filter(route -> route.getMetadata().getLabels().containsKey("test-listApplicationRoutesByPath-key"))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutesByPath-key", "test-listApplicationRoutesByPath-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void listApplicationRoutesBySpaceId() {
String applicationName = this.nameFactory.getApplicationName();
String domainName = this.nameFactory.getDomainName();
String spaceName = this.nameFactory.getSpaceName();
this.organizationId
.flatMap(organizationId -> Mono.zip(
createDomainId(this.cloudFoundryClient, domainName, organizationId),
createSpaceId(this.cloudFoundryClient, organizationId, spaceName)
))
.flatMap(function((domainId, spaceId) -> Mono.zip(
createApplicationId(this.cloudFoundryClient, applicationName, spaceId),
createRouteId(this.cloudFoundryClient, domainId, "listApplicationRoutesBySpaceId", spaceId),
Mono.just(spaceId)
)))
.delayUntil(function((applicationId, routeId, spaceId) -> requestReplaceDestinations(this.cloudFoundryClient, applicationId, routeId)
.thenReturn(applicationId)))
.flatMapMany(function((applicationId, ignore, spaceId) -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listRoutes(ListApplicationRoutesRequest.builder()
.applicationId(applicationId)
.page(page)
.spaceId(spaceId)
.build()))))
.filter(route -> route.getMetadata().getLabels().containsKey("test-listApplicationRoutesBySpaceId-key"))
.map(RouteResource::getMetadata)
.map(Metadata::getLabels)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-listApplicationRoutesBySpaceId-key", "test-listApplicationRoutesBySpaceId-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void listFeatures() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMapMany(applicationId -> PaginationUtils.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.listFeatures(ListApplicationFeaturesRequest.builder()
.applicationId(applicationId)
.page(page)
.build())))
.filter(resource -> "revisions".equals(resource.getName()))
.map(ApplicationFeatureResource::getDescription)
.as(StepVerifier::create)
.consumeNextWith(description -> assertThat(description).startsWith("Enable versioning of an application"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void listFilterBySpaceId() {
String applicationName = this.nameFactory.getApplicationName();
String spaceName = this.nameFactory.getSpaceName();
this.organizationId
.flatMap(organizationId -> createSpaceId(this.cloudFoundryClient, organizationId, spaceName))
.flatMap(spaceId -> Mono.zip(
Mono.just(spaceId),
createApplicationId(this.cloudFoundryClient, applicationName, spaceId)
))
.flatMapMany(function((spaceId, applicationId) -> PaginationUtils
.requestClientV3Resources(page ->
this.cloudFoundryClient.applicationsV3()
.list(ListApplicationsRequest.builder()
.spaceId(spaceId)
.page(page)
.build()))))
.map(ApplicationResource::getName)
.as(StepVerifier::create)
.expectNext(applicationName)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@IfCloudFoundryVersion(greaterThanOrEqualTo = CloudFoundryVersion.PCF_2_9)
@Test
public void scale() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.scale(ScaleApplicationRequest.builder()
.applicationId(applicationId)
.diskInMb(404)
.type("web")
.build())
.thenReturn(applicationId))
.flatMap(applicationId -> requestApplicationProcess(this.cloudFoundryClient, applicationId))
.map(GetApplicationProcessResponse::getDiskInMb)
.as(StepVerifier::create)
.expectNext(404)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void setAndGetDroplet() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> Mono.zip(
Mono.just(applicationId),
uploadPackageAndCreateDropletId(this.cloudFoundryClient, applicationId)
))
.delayUntil(function((applicationId, dropletId) -> this.cloudFoundryClient.applicationsV3()
.setCurrentDroplet(SetApplicationCurrentDropletRequest.builder()
.applicationId(applicationId)
.data(Relationship.builder()
.id(dropletId)
.build())
.build())))
.flatMap(function((applicationId, dropletId) -> Mono.zip(
Mono.just(dropletId),
this.cloudFoundryClient.applicationsV3()
.getCurrentDroplet(GetApplicationCurrentDropletRequest.builder()
.applicationId(applicationId)
.build())
.map(GetApplicationCurrentDropletResponse::getId))))
.as(StepVerifier::create)
.consumeNextWith(tupleEquality())
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void start() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.delayUntil(applicationId -> prepareApplicationToStart(this.cloudFoundryClient, applicationId))
.delayUntil((applicationId -> this.cloudFoundryClient.applicationsV3()
.start(StartApplicationRequest.builder()
.applicationId(applicationId)
.build())))
.flatMap(applicationId -> requestGetApplication(this.cloudFoundryClient, applicationId))
.map(GetApplicationResponse::getState)
.as(StepVerifier::create)
.expectNext(STARTED)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void stop() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.delayUntil(applicationId -> prepareApplicationToStart(this.cloudFoundryClient, applicationId))
.delayUntil((applicationId -> requestStartApplication(this.cloudFoundryClient, applicationId)))
.delayUntil(applicationId -> this.cloudFoundryClient.applicationsV3()
.stop(StopApplicationRequest.builder()
.applicationId(applicationId)
.build()))
.flatMap(applicationId -> requestGetApplication(this.cloudFoundryClient, applicationId))
.map(GetApplicationResponse::getState)
.as(StepVerifier::create)
.expectNext(STOPPED)
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void update() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.delayUntil(applicationId -> this.cloudFoundryClient.applicationsV3()
.update(UpdateApplicationRequest.builder()
.applicationId(applicationId)
.metadata(Metadata.builder()
.label("test-update-key", "test-update-value")
.build())
.build()))
.flatMap(applicationId -> requestGetApplication(this.cloudFoundryClient, applicationId))
.map(response -> response.getMetadata().getLabels())
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-update-key", "test-update-value"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void updateAndGetEnvironmentVariables() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.delayUntil(applicationId -> this.cloudFoundryClient.applicationsV3()
.updateEnvironmentVariables(UpdateApplicationEnvironmentVariablesRequest.builder()
.applicationId(applicationId)
.var("test-updateEnv-key", "test-updateEnv-key")
.build()))
.flatMap(applicationId -> this.cloudFoundryClient.applicationsV3()
.getEnvironmentVariables(GetApplicationEnvironmentVariablesRequest.builder()
.applicationId(applicationId)
.build()))
.map(GetApplicationEnvironmentVariablesResponse::getVars)
.as(StepVerifier::create)
.expectNext(Collections.singletonMap("test-updateEnv-key", "test-updateEnv-key"))
.expectComplete()
.verify(Duration.ofMinutes(5));
}
@Test
public void updateFeature() {
String applicationName = this.nameFactory.getApplicationName();
this.spaceId
.flatMap(spaceId -> createApplicationId(this.cloudFoundryClient, applicationName, spaceId))
.flatMap(applicationId -> Mono.zip(
Mono.just(applicationId),
getFeatureEnabled(this.cloudFoundryClient, applicationId, "ssh")
))
.delayUntil(function((applicationId, enabled) -> this.cloudFoundryClient.applicationsV3()
.updateFeature(UpdateApplicationFeatureRequest.builder()
.applicationId(applicationId)
.enabled(!enabled)
.featureName("ssh")
.build())))
.flatMap(function((applicationId, enabled) -> Mono.zip(
Mono.just(!enabled),
getFeatureEnabled(this.cloudFoundryClient, applicationId, "ssh"))
))
.as(StepVerifier::create)
.consumeNextWith(tupleEquality())
.expectComplete()
.verify(Duration.ofMinutes(5));
}
private static Mono<String> createApplicationId(CloudFoundryClient cloudFoundryClient, String applicationName, String spaceId) {
return requestCreateApplication(cloudFoundryClient, applicationName, spaceId)
.map(CreateApplicationResponse::getId);
}
private static Mono<String> createBuildId(CloudFoundryClient cloudFoundryClient, String packageId) {
return requestCreateBuild(cloudFoundryClient, packageId)
.map(CreateBuildResponse::getId);
}
private static Mono<String> createDomainId(CloudFoundryClient cloudFoundryClient, String domainName, String organizationId) {
return requestCreateDomain(cloudFoundryClient, domainName, organizationId)
.map(CreateDomainResponse::getId);
}
private static Mono<String> createOrganizationId(CloudFoundryClient cloudFoundryClient, String organizationName) {
return requestCreateOrganization(cloudFoundryClient, organizationName)
.map(CreateOrganizationResponse::getId);
}
private static Mono<String> createPackageId(CloudFoundryClient cloudFoundryClient, String applicationId) {
return requestCreatePackage(cloudFoundryClient, applicationId).map(CreatePackageResponse::getId);
}
private static Mono<String> createRouteId(CloudFoundryClient cloudFoundryClient, String domainId, String label, String spaceId) {
return requestCreateRoute(cloudFoundryClient, domainId, label, spaceId)
.map(CreateRouteResponse::getId);
}
private static Mono<String> createRouteId(CloudFoundryClient cloudFoundryClient, String domainId, String host, String label, String path, Integer port, String spaceId) {
return requestCreateRoute(cloudFoundryClient, domainId, host, label, path, port, spaceId)
.map(CreateRouteResponse::getId);
}
private static Mono<String> createSpaceId(CloudFoundryClient cloudFoundryClient, String organizationId, String spaceName) {
return requestCreateSpace(cloudFoundryClient, organizationId, spaceName)
.map(CreateSpaceResponse::getId);
}
private static Mono<Boolean> getFeatureEnabled(CloudFoundryClient cloudFoundryClient, String applicationId, String featureName) {
return requestGetFeature(cloudFoundryClient, applicationId, featureName)
.map(GetApplicationFeatureResponse::getEnabled);
}
private static Mono<GetApplicationProcessResponse> requestApplicationProcess(CloudFoundryClient cloudFoundryClient, String applicationId) {
return cloudFoundryClient.applicationsV3()
.getProcess(GetApplicationProcessRequest.builder()
.applicationId(applicationId)
.type("web")
.build());
}
private static Mono<CreateApplicationResponse> requestCreateApplication(CloudFoundryClient cloudFoundryClient, String applicationName, String spaceId) {
return cloudFoundryClient.applicationsV3()
.create(CreateApplicationRequest.builder()
.name(applicationName)
.relationships(ApplicationRelationships.builder()
.space(ToOneRelationship.builder()
.data(Relationship.builder()
.id(spaceId)
.build())
.build())
.build())
.build());
}
private static Mono<CreateBuildResponse> requestCreateBuild(CloudFoundryClient cloudFoundryClient, String packageId) {
return cloudFoundryClient.builds()
.create(CreateBuildRequest.builder()
.getPackage(Relationship.builder()
.id(packageId)
.build())
.build());
}
private static Mono<CreateDomainResponse> requestCreateDomain(CloudFoundryClient cloudFoundryClient, String domainName, String organizationId) {
return cloudFoundryClient.domainsV3()
.create(CreateDomainRequest.builder()
.internal(false)
.name(domainName)
.relationships(DomainRelationships.builder()
.organization(ToOneRelationship.builder()
.data(Relationship.builder()
.id(organizationId)
.build())
.build())
.build())
.build());
}
private static Mono<CreateOrganizationResponse> requestCreateOrganization(CloudFoundryClient cloudFoundryClient, String organizationName) {
return cloudFoundryClient.organizationsV3()
.create(CreateOrganizationRequest.builder()
.name(organizationName)
.build());
}
private static Mono<CreatePackageResponse> requestCreatePackage(CloudFoundryClient cloudFoundryClient, String applicationId) {
return cloudFoundryClient.packages()
.create(CreatePackageRequest.builder()
.relationships(PackageRelationships.builder()
.application(ToOneRelationship.builder()
.data(Relationship.builder()
.id(applicationId)
.build())
.build())
.build())
.type(PackageType.BITS)
.build());
}
private static Mono<CreateRouteResponse> requestCreateRoute(CloudFoundryClient cloudFoundryClient, String domainId, String host, String label, String path, Integer port, String spaceId) {
String key = String.format("test-%s-key", label);
String value = String.format("test-%s-value", label);
return cloudFoundryClient.routesV3()
.create(CreateRouteRequest.builder()
.host(host)
.metadata(Metadata.builder()
.label(key, value)
.build())
.path(path)
.port(port)
.relationships(RouteRelationships.builder()
.domain(ToOneRelationship.builder()
.data(Relationship.builder()
.id(domainId)
.build())
.build())
.space(ToOneRelationship.builder()
.data(Relationship.builder()
.id(spaceId)
.build())
.build())
.build())
.build());
}
private static Mono<CreateRouteResponse> requestCreateRoute(CloudFoundryClient cloudFoundryClient, String domainId, String label, String spaceId) {
String key = String.format("test-%s-key", label);
String value = String.format("test-%s-value", label);
return cloudFoundryClient.routesV3()
.create(CreateRouteRequest.builder()
.metadata(Metadata.builder()
.label(key, value)
.build())
.relationships(RouteRelationships.builder()
.domain(ToOneRelationship.builder()
.data(Relationship.builder()
.id(domainId)
.build())
.build())
.space(ToOneRelationship.builder()
.data(Relationship.builder()
.id(spaceId)
.build())
.build())
.build())
.build());
}
private static Mono<CreateSpaceResponse> requestCreateSpace(CloudFoundryClient cloudFoundryClient, String organizationId, String spaceName) {
return cloudFoundryClient.spacesV3()
.create(CreateSpaceRequest.builder()
.name(spaceName)
.relationships(SpaceRelationships.builder()
.organization(ToOneRelationship.builder()
.data(Relationship.builder()
.id(organizationId)
.build())
.build())
.build())
.build());
}
private static Mono<GetApplicationResponse> requestGetApplication(CloudFoundryClient cloudFoundryClient, String applicationId) {
return cloudFoundryClient.applicationsV3()
.get(GetApplicationRequest.builder()
.applicationId(applicationId)
.build());
}
private static Mono<GetApplicationResponse> requestGetApplications(CloudFoundryClient cloudFoundryClient, String applicationId) {
return cloudFoundryClient.applicationsV3()
.get(GetApplicationRequest.builder()
.applicationId(applicationId)
.build());
}
private static Mono<GetBuildResponse> requestGetBuild(CloudFoundryClient cloudFoundryClient, String buildId) {
return cloudFoundryClient.builds()
.get(GetBuildRequest.builder()
.buildId(buildId)
.build());
}
private static Mono<GetApplicationFeatureResponse> requestGetFeature(CloudFoundryClient cloudFoundryClient, String applicationId, String featureName) {
return cloudFoundryClient.applicationsV3()
.getFeature(GetApplicationFeatureRequest.builder()
.applicationId(applicationId)
.featureName(featureName)
.build());
}
private static Mono<GetPackageResponse> requestGetPackage(CloudFoundryClient cloudFoundryClient, String packageId) {
return cloudFoundryClient.packages()
.get(GetPackageRequest.builder()
.packageId(packageId)
.build());
}
private static Mono<ReplaceRouteDestinationsResponse> requestReplaceDestinations(CloudFoundryClient cloudFoundryClient, String applicationId, String routeId) {
return cloudFoundryClient.routesV3()
.replaceDestinations(ReplaceRouteDestinationsRequest.builder()
.destination(Destination.builder()
.application(Application.builder()
.applicationId(applicationId)
.build())
.build())
.routeId(routeId)
.build());
}
private static Mono<SetApplicationCurrentDropletResponse> requestSetDroplet(CloudFoundryClient cloudFoundryClient, String applicationId, String dropletId) {
return cloudFoundryClient.applicationsV3()
.setCurrentDroplet(SetApplicationCurrentDropletRequest.builder()
.applicationId(applicationId)
.data(Relationship.builder()
.id(dropletId)
.build())
.build());
}
private static Mono<StartApplicationResponse> requestStartApplication(CloudFoundryClient cloudFoundryClient, String applicationId) {
return cloudFoundryClient.applicationsV3()
.start(StartApplicationRequest.builder()
.applicationId(applicationId)
.build());
}
private static Mono<UploadPackageResponse> requestUploadPackage(CloudFoundryClient cloudFoundryClient, String packageId) {
try {
return cloudFoundryClient.packages()
.upload(UploadPackageRequest.builder()
.bits(new ClassPathResource("test-application.zip").getFile().toPath())
.packageId(packageId)
.build());
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static Mono<String> uploadPackageAndCreateDropletId(CloudFoundryClient cloudFoundryClient, String applicationId) {
return createPackageId(cloudFoundryClient, applicationId)
.delayUntil(packageId -> requestUploadPackage(cloudFoundryClient, packageId))
.delayUntil(packageId -> waitForPackageUpload(cloudFoundryClient, packageId))
.flatMap(packageId -> createBuildId(cloudFoundryClient, packageId))
.delayUntil(buildId -> waitForBuild(cloudFoundryClient, buildId))
.flatMap(buildId -> requestGetBuild(cloudFoundryClient, buildId))
.map(build -> build.getDroplet().getId());
}
private static Mono<GetBuildResponse> waitForBuild(CloudFoundryClient cloudFoundryClient, String buildId) {
return requestGetBuild(cloudFoundryClient, buildId)
.filter(response -> BuildState.STAGED.equals(response.getState()))
.repeatWhenEmpty(DelayUtils.exponentialBackOff(Duration.ofSeconds(1), Duration.ofSeconds(15), Duration.ofMinutes(5)));
}
private static Mono<GetPackageResponse> waitForPackageUpload(CloudFoundryClient cloudFoundryClient, String packageId) {
return requestGetPackage(cloudFoundryClient, packageId)
.filter(response -> PackageState.READY.equals(response.getState()))
.repeatWhenEmpty(DelayUtils.exponentialBackOff(Duration.ofSeconds(1), Duration.ofSeconds(15), Duration.ofMinutes(5)));
}
private Mono<SetApplicationCurrentDropletResponse> prepareApplicationToStart(CloudFoundryClient cloudFoundryClient, String applicationId) {
return uploadPackageAndCreateDropletId(cloudFoundryClient, applicationId)
.flatMap(dropletId -> requestSetDroplet(cloudFoundryClient, applicationId, dropletId));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.Map;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.FailedToCreateConsumerException;
import org.apache.camel.IsSingleton;
import org.apache.camel.PollingConsumer;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.LRUCache;
import org.apache.camel.util.LRUSoftCache;
import org.apache.camel.util.ServiceHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Cache containing created {@link org.apache.camel.Consumer}.
*
* @version
*/
public class ConsumerCache extends ServiceSupport {
private static final transient Logger LOG = LoggerFactory.getLogger(ConsumerCache.class);
private final CamelContext camelContext;
private final Map<String, PollingConsumer> consumers;
private final Object source;
public ConsumerCache(Object source, CamelContext camelContext) {
this(source, camelContext, CamelContextHelper.getMaximumCachePoolSize(camelContext));
}
public ConsumerCache(Object source, CamelContext camelContext, int cacheSize) {
this(source, camelContext, createLRUCache(cacheSize));
}
public ConsumerCache(Object source, CamelContext camelContext, Map<String, PollingConsumer> cache) {
this.camelContext = camelContext;
this.consumers = cache;
this.source = source;
}
/**
* Creates the {@link LRUCache} to be used.
* <p/>
* This implementation returns a {@link org.apache.camel.util.LRUSoftCache} instance.
* @param cacheSize the cache size
* @return the cache
*/
protected static LRUCache<String, PollingConsumer> createLRUCache(int cacheSize) {
// We use a soft reference cache to allow the JVM to re-claim memory if it runs low on memory.
return new LRUSoftCache<String, PollingConsumer>(cacheSize);
}
public synchronized PollingConsumer getConsumer(Endpoint endpoint) {
String key = endpoint.getEndpointUri();
PollingConsumer answer = consumers.get(key);
if (answer == null) {
try {
answer = endpoint.createPollingConsumer();
answer.start();
} catch (Exception e) {
throw new FailedToCreateConsumerException(endpoint, e);
}
boolean singleton = true;
if (answer instanceof IsSingleton) {
singleton = ((IsSingleton) answer).isSingleton();
}
if (singleton) {
LOG.debug("Adding to consumer cache with key: {} for consumer: {}", endpoint, answer);
consumers.put(key, answer);
} else {
LOG.debug("Consumer for endpoint: {} is not singleton and thus not added to consumer cache", key);
}
}
return answer;
}
public Exchange receive(Endpoint endpoint) {
LOG.debug("<<<< {}", endpoint);
PollingConsumer consumer = getConsumer(endpoint);
return consumer.receive();
}
public Exchange receive(Endpoint endpoint, long timeout) {
LOG.debug("<<<< {}", endpoint);
PollingConsumer consumer = getConsumer(endpoint);
return consumer.receive(timeout);
}
public Exchange receiveNoWait(Endpoint endpoint) {
LOG.debug("<<<< {}", endpoint);
PollingConsumer consumer = getConsumer(endpoint);
return consumer.receiveNoWait();
}
public CamelContext getCamelContext() {
return camelContext;
}
/**
* Gets the source which uses this cache
*
* @return the source
*/
public Object getSource() {
return source;
}
/**
* Returns the current size of the cache
*
* @return the current size
*/
public int size() {
int size = consumers.size();
LOG.trace("size = {}", size);
return size;
}
/**
* Gets the maximum cache size (capacity).
* <p/>
* Will return <tt>-1</tt> if it cannot determine this if a custom cache was used.
*
* @return the capacity
*/
public int getCapacity() {
int capacity = -1;
if (consumers instanceof LRUCache) {
LRUCache cache = (LRUCache) consumers;
capacity = cache.getMaxCacheSize();
}
return capacity;
}
/**
* Gets the cache hits statistic
* <p/>
* Will return <tt>-1</tt> if it cannot determine this if a custom cache was used.
*
* @return the hits
*/
public long getHits() {
long hits = -1;
if (consumers instanceof LRUCache) {
LRUCache cache = (LRUCache) consumers;
hits = cache.getHits();
}
return hits;
}
/**
* Gets the cache misses statistic
* <p/>
* Will return <tt>-1</tt> if it cannot determine this if a custom cache was used.
*
* @return the misses
*/
public long getMisses() {
long misses = -1;
if (consumers instanceof LRUCache) {
LRUCache cache = (LRUCache) consumers;
misses = cache.getMisses();
}
return misses;
}
/**
* Resets the cache statistics
*/
public void resetCacheStatistics() {
if (consumers instanceof LRUCache) {
LRUCache cache = (LRUCache) consumers;
cache.resetStatistics();
}
}
/**
* Purges this cache
*/
public synchronized void purge() {
consumers.clear();
}
@Override
public String toString() {
return "ConsumerCache for source: " + source + ", capacity: " + getCapacity();
}
protected void doStart() throws Exception {
ServiceHelper.startServices(consumers);
}
protected void doStop() throws Exception {
ServiceHelper.stopServices(consumers);
consumers.clear();
}
}
| |
/*
* Copyright 2001-2010 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.time;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import org.joda.convert.FromString;
import org.joda.convert.ToString;
import org.joda.time.base.BasePartial;
import org.joda.time.chrono.ISOChronology;
import org.joda.time.field.AbstractPartialFieldProperty;
import org.joda.time.field.FieldUtils;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.ISODateTimeFormat;
/**
* MonthDay is an immutable partial supporting the monthOfYear and dayOfMonth fields.
* <p>
* NOTE: This class only supports the two fields listed above.
* It is impossible to query any other fields, such as dayOfWeek or centuryOfEra.
* <p>
* Calculations on MonthDay are performed using a {@link Chronology}.
* This chronology is set to be in the UTC time zone for all calculations.
* <p>
* One use case for this class is to store a birthday without the year (to avoid
* storing the age of the person).
* This class can be used as the gMonthDay type in XML Schema.
* <p>
* Each individual field can be queried in two ways:
* <ul>
* <li><code>getMonthOfYear()</code>
* <li><code>monthOfYear().get()</code>
* </ul>
* The second technique also provides access to other useful methods on the
* field:
* <ul>
* <li>numeric value - <code>monthOfYear().get()</code>
* <li>text value - <code>monthOfYear().getAsText()</code>
* <li>short text value - <code>monthOfYear().getAsShortText()</code>
* <li>maximum/minimum values - <code>monthOfYear().getMaximumValue()</code>
* <li>add/subtract - <code>monthOfYear().addToCopy()</code>
* <li>set - <code>monthOfYear().setCopy()</code>
* </ul>
* <p>
* MonthDay is thread-safe and immutable, provided that the Chronology is as well.
* All standard Chronology classes supplied are thread-safe and immutable.
*
* @author Chris Pheby
* @since 2.0
*/
public final class MonthDay
extends BasePartial
implements ReadablePartial, Serializable {
/** Serialization version */
private static final long serialVersionUID = 2954560699050434609L;
/** The singleton set of field types */
private static final DateTimeFieldType[] FIELD_TYPES = new DateTimeFieldType[] {
DateTimeFieldType.monthOfYear(),
DateTimeFieldType.dayOfMonth(), };
/** The singleton set of field types */
private static final DateTimeFormatter PARSER = new DateTimeFormatterBuilder()
.appendOptional(ISODateTimeFormat.localDateParser().getParser())
.appendOptional(DateTimeFormat.forPattern("--MM-dd").getParser()).toFormatter();
/** The index of the monthOfYear field in the field array */
public static final int MONTH_OF_YEAR = 0;
/** The index of the day field in the field array */
public static final int DAY_OF_MONTH = 1;
//-----------------------------------------------------------------------
/**
* Obtains a {@code MonthDay} set to the current system millisecond time
* using <code>ISOChronology</code> in the default time zone.
* The resulting object does not use the zone.
*
* @return the current month-day, not null
* @since 2.0
*/
public static MonthDay now() {
return new MonthDay();
}
/**
* Obtains a {@code MonthDay} set to the current system millisecond time
* using <code>ISOChronology</code> in the specified time zone.
* The resulting object does not use the zone.
*
* @param zone the time zone, not null
* @return the current month-day, not null
* @since 2.0
*/
public static MonthDay now(DateTimeZone zone) {
if (zone == null) {
throw new NullPointerException("Zone must not be null");
}
return new MonthDay(zone);
}
/**
* Obtains a {@code MonthDay} set to the current system millisecond time
* using the specified chronology.
* The resulting object does not use the zone.
*
* @param chronology the chronology, not null
* @return the current month-day, not null
* @since 2.0
*/
public static MonthDay now(Chronology chronology) {
if (chronology == null) {
throw new NullPointerException("Chronology must not be null");
}
return new MonthDay(chronology);
}
//-----------------------------------------------------------------------
/**
* Parses a {@code MonthDay} from the specified string.
* <p>
* This uses {@link ISODateTimeFormat#localDateParser()} or the format {@code --MM-dd}.
*
* @param str the string to parse, not null
* @since 2.0
*/
@FromString
public static MonthDay parse(String str) {
return parse(str, PARSER);
}
/**
* Parses a {@code MonthDay} from the specified string using a formatter.
*
* @param str the string to parse, not null
* @param formatter the formatter to use, not null
* @since 2.0
*/
public static MonthDay parse(String str, DateTimeFormatter formatter) {
LocalDate date = formatter.parseLocalDate(str);
return new MonthDay(date.getMonthOfYear(), date.getDayOfMonth());
}
//-----------------------------------------------------------------------
/**
* Constructs a MonthDay from a <code>java.util.Calendar</code>
* using exactly the same field values avoiding any time zone effects.
* <p>
* Each field is queried from the Calendar and assigned to the MonthDay.
* <p>
* This factory method ignores the type of the calendar and always
* creates a MonthDay with ISO chronology. It is expected that you
* will only pass in instances of <code>GregorianCalendar</code> however
* this is not validated.
*
* @param calendar the Calendar to extract fields from
* @return the created MonthDay, never null
* @throws IllegalArgumentException if the calendar is null
* @throws IllegalArgumentException if the monthOfYear or dayOfMonth is invalid for the ISO chronology
*/
public static MonthDay fromCalendarFields(Calendar calendar) {
if (calendar == null) {
throw new IllegalArgumentException("The calendar must not be null");
}
return new MonthDay(calendar.get(Calendar.MONTH) + 1, calendar.get(Calendar.DAY_OF_MONTH));
}
/**
* Constructs a MonthDay from a <code>java.util.Date</code>
* using exactly the same field values avoiding any time zone effects.
* <p>
* Each field is queried from the Date and assigned to the MonthDay.
* <p>
* This factory method always creates a MonthDay with ISO chronology.
*
* @param date the Date to extract fields from
* @return the created MonthDay, never null
* @throws IllegalArgumentException if the calendar is null
* @throws IllegalArgumentException if the monthOfYear or dayOfMonth is invalid for the ISO chronology
*/
@SuppressWarnings("deprecation")
public static MonthDay fromDateFields(Date date) {
if (date == null) {
throw new IllegalArgumentException("The date must not be null");
}
return new MonthDay(date.getMonth() + 1, date.getDate());
}
//-----------------------------------------------------------------------
/**
* Constructs a MonthDay with the current monthOfYear, using ISOChronology in
* the default zone to extract the fields.
* <p>
* The constructor uses the default time zone, resulting in the local time
* being initialised. Once the constructor is complete, all further calculations
* are performed without reference to a time-zone (by switching to UTC).
*
* @see #now()
*/
public MonthDay() {
super();
}
/**
* Constructs a MonthDay with the current month-day, using ISOChronology in
* the specified zone to extract the fields.
* <p>
* The constructor uses the specified time zone to obtain the current month-day.
* Once the constructor is complete, all further calculations
* are performed without reference to a time-zone (by switching to UTC).
*
* @param zone the zone to use, null means default zone
* @see #now(DateTimeZone)
*/
public MonthDay(DateTimeZone zone) {
super(ISOChronology.getInstance(zone));
}
/**
* Constructs a MonthDay with the current month-day, using the specified chronology
* and zone to extract the fields.
* <p>
* The constructor uses the time zone of the chronology specified.
* Once the constructor is complete, all further calculations are performed
* without reference to a time-zone (by switching to UTC).
*
* @param chronology the chronology, null means ISOChronology in the default zone
* @see #now(Chronology)
*/
public MonthDay(Chronology chronology) {
super(chronology);
}
/**
* Constructs a MonthDay extracting the partial fields from the specified
* milliseconds using the ISOChronology in the default zone.
* <p>
* The constructor uses the default time zone, resulting in the local time
* being initialised. Once the constructor is complete, all further calculations
* are performed without reference to a time-zone (by switching to UTC).
*
* @param instant the milliseconds from 1970-01-01T00:00:00Z
*/
public MonthDay(long instant) {
super(instant);
}
/**
* Constructs a MonthDay extracting the partial fields from the specified
* milliseconds using the chronology provided.
* <p>
* The constructor uses the time zone of the chronology specified.
* Once the constructor is complete, all further calculations are performed
* without reference to a time-zone (by switching to UTC).
*
* @param instant the milliseconds from 1970-01-01T00:00:00Z
* @param chronology the chronology, null means ISOChronology in the default zone
*/
public MonthDay(long instant, Chronology chronology) {
super(instant, chronology);
}
/**
* Constructs a MonthDay from an Object that represents some form of time.
* <p>
* The recognised object types are defined in
* {@link org.joda.time.convert.ConverterManager ConverterManager} and
* include ReadableInstant, String, Calendar and Date.
* The String formats are described by {@link ISODateTimeFormat#localDateParser()}.
* <p>
* The chronology used will be derived from the object, defaulting to ISO.
*
* @param instant the date-time object, null means now
* @throws IllegalArgumentException if the instant is invalid
*/
public MonthDay(Object instant) {
super(instant, null, ISODateTimeFormat.localDateParser());
}
/**
* Constructs a MonthDay from an Object that represents some form of time,
* using the specified chronology.
* <p>
* The recognised object types are defined in
* {@link org.joda.time.convert.ConverterManager ConverterManager} and
* include ReadableInstant, String, Calendar and Date.
* The String formats are described by {@link ISODateTimeFormat#localDateParser()}.
* <p>
* The constructor uses the time zone of the chronology specified.
* Once the constructor is complete, all further calculations are performed
* without reference to a time-zone (by switching to UTC).
* The specified chronology overrides that of the object.
*
* @param instant the date-time object, null means now
* @param chronology the chronology, null means ISO default
* @throws IllegalArgumentException if the instant is invalid
*/
public MonthDay(Object instant, Chronology chronology) {
super(instant, DateTimeUtils.getChronology(chronology), ISODateTimeFormat.localDateParser());
}
/**
* Constructs a MonthDay with specified year and month
* using <code>ISOChronology</code>.
* <p>
* The constructor uses the no time zone initialising the fields as provided.
* Once the constructor is complete, all further calculations
* are performed without reference to a time-zone (by switching to UTC).
*
* @param monthOfYear the month of the year
* @param dayOfMonth the day of the month
*/
public MonthDay(int monthOfYear, int dayOfMonth) {
this(monthOfYear, dayOfMonth, null);
}
/**
* Constructs an instance set to the specified monthOfYear and dayOfMonth
* using the specified chronology, whose zone is ignored.
* <p>
* If the chronology is null, <code>ISOChronology</code> is used.
* <p>
* The constructor uses the time zone of the chronology specified.
* Once the constructor is complete, all further calculations are performed
* without reference to a time-zone (by switching to UTC).
*
* @param monthOfYear the month of the year
* @param dayOfMonth the day of the month
* @param chronology the chronology, null means ISOChronology in the default zone
*/
public MonthDay(int monthOfYear, int dayOfMonth, Chronology chronology) {
super(new int[] {monthOfYear, dayOfMonth}, chronology);
}
/**
* Constructs a MonthDay with chronology from this instance and new values.
*
* @param partial the partial to base this new instance on
* @param values the new set of values
*/
MonthDay(MonthDay partial, int[] values) {
super(partial, values);
}
/**
* Constructs a MonthDay with values from this instance and a new chronology.
*
* @param partial the partial to base this new instance on
* @param chrono the new chronology
*/
MonthDay(MonthDay partial, Chronology chrono) {
super(partial, chrono);
}
/**
* Handle broken serialization from other tools.
* @return the resolved object, not null
*/
private Object readResolve() {
if (DateTimeZone.UTC.equals(getChronology().getZone()) == false) {
return new MonthDay(this, getChronology().withUTC());
}
return this;
}
//-----------------------------------------------------------------------
/**
* Gets the number of fields in this partial, which is two.
* The supported fields are MonthOfYear and DayOfMonth.
* Note that only these fields may be queried.
*
* @return the field count, two
*/
public int size() {
return 2;
}
/**
* Gets the field for a specific index in the chronology specified.
* <p>
* This method must not use any instance variables.
*
* @param index the index to retrieve
* @param chrono the chronology to use
* @return the field, never null
*/
protected DateTimeField getField(int index, Chronology chrono) {
switch (index) {
case MONTH_OF_YEAR:
return chrono.monthOfYear();
case DAY_OF_MONTH:
return chrono.dayOfMonth();
default:
throw new IndexOutOfBoundsException("Invalid index: " + index);
}
}
/**
* Gets the field type at the specified index.
*
* @param index the index to retrieve
* @return the field at the specified index, never null
* @throws IndexOutOfBoundsException if the index is invalid
*/
public DateTimeFieldType getFieldType(int index) {
return FIELD_TYPES[index];
}
/**
* Gets an array of the field type of each of the fields that this partial supports.
* <p>
* The fields are returned largest to smallest, Month, Day.
*
* @return the array of field types (cloned), largest to smallest, never null
*/
public DateTimeFieldType[] getFieldTypes() {
return (DateTimeFieldType[]) FIELD_TYPES.clone();
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this month-day with the specified chronology.
* This instance is immutable and unaffected by this method call.
* <p>
* This method retains the values of the fields, thus the result will
* typically refer to a different instant.
* <p>
* The time zone of the specified chronology is ignored, as MonthDay
* operates without a time zone.
*
* @param newChronology the new chronology, null means ISO
* @return a copy of this month-day with a different chronology, never null
* @throws IllegalArgumentException if the values are invalid for the new chronology
*/
public MonthDay withChronologyRetainFields(Chronology newChronology) {
newChronology = DateTimeUtils.getChronology(newChronology);
newChronology = newChronology.withUTC();
if (newChronology == getChronology()) {
return this;
} else {
MonthDay newMonthDay = new MonthDay(this, newChronology);
newChronology.validate(newMonthDay, getValues());
return newMonthDay;
}
}
/**
* Returns a copy of this month-day with the specified field set to a new value.
* <p>
* For example, if the field type is <code>dayOfMonth</code> then the day
* would be changed in the returned instance.
* <p>
* These three lines are equivalent:
* <pre>
* MonthDay updated = md.withField(DateTimeFieldType.dayOfMonth(), 6);
* MonthDay updated = md.dayOfMonth().setCopy(6);
* MonthDay updated = md.property(DateTimeFieldType.dayOfMonth()).setCopy(6);
* </pre>
*
* @param fieldType the field type to set, not null
* @param value the value to set
* @return a copy of this instance with the field set, never null
* @throws IllegalArgumentException if the value is null or invalid
*/
public MonthDay withField(DateTimeFieldType fieldType, int value) {
int index = indexOfSupported(fieldType);
if (value == getValue(index)) {
return this;
}
int[] newValues = getValues();
newValues = getField(index).set(this, index, newValues, value);
return new MonthDay(this, newValues);
}
/**
* Returns a copy of this month-day with the value of the specified field increased.
* <p>
* If the addition is zero, then <code>this</code> is returned.
* <p>
* These three lines are equivalent:
* <pre>
* MonthDay added = md.withFieldAdded(DurationFieldType.days(), 6);
* MonthDay added = md.plusDays(6);
* MonthDay added = md.dayOfMonth().addToCopy(6);
* </pre>
*
* @param fieldType the field type to add to, not null
* @param amount the amount to add
* @return a copy of this instance with the field updated, never null
* @throws IllegalArgumentException if the value is null or invalid
* @throws ArithmeticException if the new date-time exceeds the capacity
*/
public MonthDay withFieldAdded(DurationFieldType fieldType, int amount) {
int index = indexOfSupported(fieldType);
if (amount == 0) {
return this;
}
int[] newValues = getValues();
newValues = getField(index).add(this, index, newValues, amount);
return new MonthDay(this, newValues);
}
/**
* Returns a copy of this month-day with the specified period added.
* <p>
* If the addition is zero, then <code>this</code> is returned.
* Fields in the period that aren't present in the partial are ignored.
* <p>
* This method is typically used to add multiple copies of complex
* period instances. Adding one field is best achieved using methods
* like {@link #withFieldAdded(DurationFieldType, int)}
* or {@link #plusMonths(int)}.
*
* @param period the period to add to this one, null means zero
* @param scalar the amount of times to add, such as -1 to subtract once
* @return a copy of this instance with the period added, never null
* @throws ArithmeticException if the new date-time exceeds the capacity
*/
public MonthDay withPeriodAdded(ReadablePeriod period, int scalar) {
if (period == null || scalar == 0) {
return this;
}
int[] newValues = getValues();
for (int i = 0; i < period.size(); i++) {
DurationFieldType fieldType = period.getFieldType(i);
int index = indexOf(fieldType);
if (index >= 0) {
newValues = getField(index).add(this, index, newValues,
FieldUtils.safeMultiply(period.getValue(i), scalar));
}
}
return new MonthDay(this, newValues);
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this month-day with the specified period added.
* <p>
* If the amount is zero or null, then <code>this</code> is returned.
* <p>
* This method is typically used to add complex period instances.
* Adding one field is best achieved using methods
* like {@link #plusMonths(int)}.
*
* @param period the duration to add to this one, null means zero
* @return a copy of this instance with the period added, never null
* @throws ArithmeticException if the new month-day exceeds the capacity
*/
public MonthDay plus(ReadablePeriod period) {
return withPeriodAdded(period, 1);
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this month-day plus the specified number of months.
* <p>
* This month-day instance is immutable and unaffected by this method call.
* The month will wrap at the end of the year from December to January.
* The day will be adjusted to the last valid value if necessary.
* <p>
* The following three lines are identical in effect:
* <pre>
* MonthDay added = md.plusMonths(6);
* MonthDay added = md.plus(Period.months(6));
* MonthDay added = md.withFieldAdded(DurationFieldType.months(), 6);
* </pre>
*
* @param months the amount of months to add, may be negative
* @return the new month-day plus the increased months, never null
*/
public MonthDay plusMonths(int months) {
return withFieldAdded(DurationFieldType.months(), months);
}
/**
* Returns a copy of this month-day plus the specified number of days.
* <p>
* This month-day instance is immutable and unaffected by this method call.
* The month will wrap at the end of the year from December to January.
* <p>
* The following three lines are identical in effect:
* <pre>
* MonthDay added = md.plusDays(6);
* MonthDay added = md.plus(Period.days(6));
* MonthDay added = md.withFieldAdded(DurationFieldType.days(), 6);
* </pre>
*
* @param days the amount of days to add, may be negative
* @return the new month-day plus the increased days, never null
*/
public MonthDay plusDays(int days) {
return withFieldAdded(DurationFieldType.days(), days);
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this month-day with the specified period taken away.
* <p>
* If the amount is zero or null, then <code>this</code> is returned.
* <p>
* This method is typically used to subtract complex period instances.
* Subtracting one field is best achieved using methods
* like {@link #minusMonths(int)}.
*
* @param period the period to reduce this instant by
* @return a copy of this instance with the period taken away, never null
* @throws ArithmeticException if the new month-day exceeds the capacity
*/
public MonthDay minus(ReadablePeriod period) {
return withPeriodAdded(period, -1);
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this month-day minus the specified number of months.
* <p>
* This MonthDay instance is immutable and unaffected by this method call.
* The month will wrap at the end of the year from January to December.
* The day will be adjusted to the last valid value if necessary.
* <p>
* The following three lines are identical in effect:
* <pre>
* MonthDay subtracted = md.minusMonths(6);
* MonthDay subtracted = md.minus(Period.months(6));
* MonthDay subtracted = md.withFieldAdded(DurationFieldType.months(), -6);
* </pre>
*
* @param months the amount of months to subtract, may be negative
* @return the new month-day minus the increased months, never null
*/
public MonthDay minusMonths(int months) {
return withFieldAdded(DurationFieldType.months(), FieldUtils.safeNegate(months));
}
/**
* Returns a copy of this month-day minus the specified number of months.
* <p>
* This month-day instance is immutable and unaffected by this method call.
* The month will wrap at the end of the year from January to December.
* <p>
* The following three lines are identical in effect:
* <pre>
* MonthDay subtracted = md.minusDays(6);
* MonthDay subtracted = md.minus(Period.days(6));
* MonthDay subtracted = md.withFieldAdded(DurationFieldType.days(), -6);
* </pre>
*
* @param days the amount of days to subtract, may be negative
* @return the new month-day minus the increased days, never null
*/
public MonthDay minusDays(int days) {
return withFieldAdded(DurationFieldType.days(), FieldUtils.safeNegate(days));
}
//-----------------------------------------------------------------------
/**
* Converts this object to a LocalDate with the same month-day and chronology.
*
* @param year the year to use, valid for chronology
* @return a LocalDate with the same month-day and chronology, never null
*/
public LocalDate toLocalDate(int year) {
return new LocalDate(year, getMonthOfYear(), getDayOfMonth(), getChronology());
}
//-----------------------------------------------------------------------
/**
* Get the month of year field value.
*
* @return the month of year
*/
public int getMonthOfYear() {
return getValue(MONTH_OF_YEAR);
}
/**
* Get the day of month field value.
*
* @return the day of month
*/
public int getDayOfMonth() {
return getValue(DAY_OF_MONTH);
}
//-----------------------------------------------------------------------
/**
* Returns a copy of this month-day with the month of year field updated.
* <p>
* MonthDay is immutable, so there are no set methods.
* Instead, this method returns a new instance with the value of
* month of year changed.
*
* @param monthOfYear the month of year to set
* @return a copy of this object with the field set, never null
* @throws IllegalArgumentException if the value is invalid
*/
public MonthDay withMonthOfYear(int monthOfYear) {
int[] newValues = getValues();
newValues = getChronology().monthOfYear().set(this, MONTH_OF_YEAR, newValues, monthOfYear);
return new MonthDay(this, newValues);
}
/**
* Returns a copy of this month-day with the day of month field updated.
* <p>
* MonthDay is immutable, so there are no set methods.
* Instead, this method returns a new instance with the value of
* day of month changed.
*
* @param dayOfMonth the day of month to set
* @return a copy of this object with the field set, never null
* @throws IllegalArgumentException if the value is invalid
*/
public MonthDay withDayOfMonth(int dayOfMonth) {
int[] newValues = getValues();
newValues = getChronology().dayOfMonth().set(this, DAY_OF_MONTH, newValues, dayOfMonth);
return new MonthDay(this, newValues);
}
//-----------------------------------------------------------------------
/**
* Gets the property object for the specified type, which contains
* many useful methods.
*
* @param type the field type to get the property for
* @return the property object
* @throws IllegalArgumentException if the field is null or unsupported
*/
public Property property(DateTimeFieldType type) {
return new Property(this, indexOfSupported(type));
}
//-----------------------------------------------------------------------
/**
* Get the month of year field property which provides access to advanced functionality.
*
* @return the month of year property
*/
public Property monthOfYear() {
return new Property(this, MONTH_OF_YEAR);
}
/**
* Get the day of month field property which provides access to advanced functionality.
*
* @return the day of month property
*/
public Property dayOfMonth() {
return new Property(this, DAY_OF_MONTH);
}
//-----------------------------------------------------------------------
/**
* Output the month-day in ISO8601 format (--MM-dd).
*
* @return ISO8601 time formatted string.
*/
@ToString
public String toString() {
List<DateTimeFieldType> fields = new ArrayList<DateTimeFieldType>();
fields.add(DateTimeFieldType.monthOfYear());
fields.add(DateTimeFieldType.dayOfMonth());
return ISODateTimeFormat.forFields(fields, true, true).print(this);
}
/**
* Output the month-day using the specified format pattern.
*
* @param pattern the pattern specification, null means use <code>toString</code>
* @see org.joda.time.format.DateTimeFormat
*/
public String toString(String pattern) {
if (pattern == null) {
return toString();
}
return DateTimeFormat.forPattern(pattern).print(this);
}
/**
* Output the month-day using the specified format pattern.
*
* @param pattern the pattern specification, null means use <code>toString</code>
* @param locale Locale to use, null means default
* @see org.joda.time.format.DateTimeFormat
*/
public String toString(String pattern, Locale locale) throws IllegalArgumentException {
if (pattern == null) {
return toString();
}
return DateTimeFormat.forPattern(pattern).withLocale(locale).print(this);
}
//-----------------------------------------------------------------------
/**
* The property class for <code>MonthDay</code>.
* <p>
* This class binds a <code>YearMonth</code> to a <code>DateTimeField</code>.
*
* @author Chris Pheby
* @since 2.0
*/
public static class Property extends AbstractPartialFieldProperty implements Serializable {
/** Serialization version */
private static final long serialVersionUID = 5727734012190224363L;
/** The partial */
private final MonthDay iBase;
/** The field index */
private final int iFieldIndex;
/**
* Constructs a property.
*
* @param partial the partial instance
* @param fieldIndex the index in the partial
*/
Property(MonthDay partial, int fieldIndex) {
super();
iBase = partial;
iFieldIndex = fieldIndex;
}
/**
* Gets the field that this property uses.
*
* @return the field
*/
public DateTimeField getField() {
return iBase.getField(iFieldIndex);
}
/**
* Gets the partial that this property belongs to.
*
* @return the partial
*/
protected ReadablePartial getReadablePartial() {
return iBase;
}
/**
* Gets the partial that this property belongs to.
*
* @return the partial
*/
public MonthDay getMonthDay() {
return iBase;
}
/**
* Gets the value of this field.
*
* @return the field value
*/
public int get() {
return iBase.getValue(iFieldIndex);
}
//-----------------------------------------------------------------------
/**
* Adds to the value of this field in a copy of this MonthDay.
* <p>
* The value will be added to this field. If the value is too large to be
* added solely to this field then it will affect larger fields.
* Smaller fields are unaffected.
* <p>
* The MonthDay attached to this property is unchanged by this call.
* Instead, a new instance is returned.
*
* @param valueToAdd the value to add to the field in the copy
* @return a copy of the MonthDay with the field value changed
* @throws IllegalArgumentException if the value isn't valid
*/
public MonthDay addToCopy(int valueToAdd) {
int[] newValues = iBase.getValues();
newValues = getField().add(iBase, iFieldIndex, newValues, valueToAdd);
return new MonthDay(iBase, newValues);
}
/**
* Adds to the value of this field in a copy of this MonthDay wrapping
* within this field if the maximum value is reached.
* <p>
* The value will be added to this field. If the value is too large to be
* added solely to this field then it wraps within this field.
* Other fields are unaffected.
* <p>
* For example,
* <code>--12-30</code> addWrapField one month returns <code>--01-30</code>.
* <p>
* The MonthDay attached to this property is unchanged by this call.
* Instead, a new instance is returned.
*
* @param valueToAdd the value to add to the field in the copy
* @return a copy of the MonthDay with the field value changed
* @throws IllegalArgumentException if the value isn't valid
*/
public MonthDay addWrapFieldToCopy(int valueToAdd) {
int[] newValues = iBase.getValues();
newValues = getField().addWrapField(iBase, iFieldIndex, newValues, valueToAdd);
return new MonthDay(iBase, newValues);
}
//-----------------------------------------------------------------------
/**
* Sets this field in a copy of the MonthDay.
* <p>
* The MonthDay attached to this property is unchanged by this call.
* Instead, a new instance is returned.
*
* @param value the value to set the field in the copy to
* @return a copy of the MonthDay with the field value changed
* @throws IllegalArgumentException if the value isn't valid
*/
public MonthDay setCopy(int value) {
int[] newValues = iBase.getValues();
newValues = getField().set(iBase, iFieldIndex, newValues, value);
return new MonthDay(iBase, newValues);
}
/**
* Sets this field in a copy of the MonthDay to a parsed text value.
* <p>
* The MonthDay attached to this property is unchanged by this call.
* Instead, a new instance is returned.
*
* @param text the text value to set
* @param locale optional locale to use for selecting a text symbol
* @return a copy of the MonthDay with the field value changed
* @throws IllegalArgumentException if the text value isn't valid
*/
public MonthDay setCopy(String text, Locale locale) {
int[] newValues = iBase.getValues();
newValues = getField().set(iBase, iFieldIndex, newValues, text, locale);
return new MonthDay(iBase, newValues);
}
/**
* Sets this field in a copy of the MonthDay to a parsed text value.
* <p>
* The MonthDay attached to this property is unchanged by this call.
* Instead, a new instance is returned.
*
* @param text the text value to set
* @return a copy of the MonthDay with the field value changed
* @throws IllegalArgumentException if the text value isn't valid
*/
public MonthDay setCopy(String text) {
return setCopy(text, null);
}
}
}
| |
/*
* Copyright 2015-2016 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.cordmcast;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Modified;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.onlab.packet.Ethernet;
import org.onlab.packet.IpAddress;
import org.onlab.packet.VlanId;
import org.onosproject.cfg.ComponentConfigService;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.JsonCodec;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.DeviceId;
import org.onosproject.net.config.ConfigFactory;
import org.onosproject.net.config.NetworkConfigEvent;
import org.onosproject.net.config.NetworkConfigListener;
import org.onosproject.net.config.NetworkConfigRegistry;
import org.onosproject.net.config.basics.SubjectFactories;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flowobjective.DefaultForwardingObjective;
import org.onosproject.net.flowobjective.DefaultNextObjective;
import org.onosproject.net.flowobjective.FlowObjectiveService;
import org.onosproject.net.flowobjective.ForwardingObjective;
import org.onosproject.net.flowobjective.NextObjective;
import org.onosproject.net.flowobjective.Objective;
import org.onosproject.net.flowobjective.ObjectiveContext;
import org.onosproject.net.flowobjective.ObjectiveError;
import org.onosproject.net.mcast.McastEvent;
import org.onosproject.net.mcast.McastListener;
import org.onosproject.net.mcast.McastRoute;
import org.onosproject.net.mcast.McastRouteInfo;
import org.onosproject.net.mcast.MulticastRouteService;
import org.onosproject.olt.AccessDeviceConfig;
import org.onosproject.olt.AccessDeviceData;
import org.onosproject.rest.AbstractWebResource;
import org.osgi.service.component.ComponentContext;
import org.slf4j.Logger;
import javax.ws.rs.core.MediaType;
import java.io.IOException;
import java.util.Dictionary;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.google.common.net.MediaType.JSON_UTF_8;
import static org.onlab.util.Tools.get;
import static org.slf4j.LoggerFactory.getLogger;
/**
* CORD multicast provisioning application. Operates by listening to
* events on the multicast rib and provisioning groups to program multicast
* flows on the dataplane.
*/
@Component(immediate = true)
public class CordMcast {
private static final int DEFAULT_REST_TIMEOUT_MS = 2000;
private static final int DEFAULT_PRIORITY = 500;
private static final short DEFAULT_MCAST_VLAN = 4000;
private static final String DEFAULT_SYNC_HOST = "10.90.0.8:8181";
private static final String DEFAULT_USER = "karaf";
private static final String DEFAULT_PASSWORD = "karaf";
private static final boolean DEFAULT_VLAN_ENABLED = true;
private final Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected MulticastRouteService mcastService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected FlowObjectiveService flowObjectiveService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CodecService codecService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ComponentConfigService componentConfigService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected NetworkConfigRegistry networkConfig;
protected McastListener listener = new InternalMulticastListener();
private InternalNetworkConfigListener configListener =
new InternalNetworkConfigListener();
//TODO: move this to a ec map
private Map<IpAddress, Integer> groups = Maps.newConcurrentMap();
private ApplicationId appId;
@Property(name = "mcastVlan", intValue = DEFAULT_MCAST_VLAN,
label = "VLAN for multicast traffic")
private int mcastVlan = DEFAULT_MCAST_VLAN;
@Property(name = "vlanEnabled", boolValue = DEFAULT_VLAN_ENABLED,
label = "Use vlan for multicast traffic?")
private boolean vlanEnabled = DEFAULT_VLAN_ENABLED;
@Property(name = "priority", intValue = DEFAULT_PRIORITY,
label = "Priority for multicast rules")
private int priority = DEFAULT_PRIORITY;
@Property(name = "syncHost", value = DEFAULT_SYNC_HOST,
label = "host:port to synchronize routes to")
private String syncHost = DEFAULT_SYNC_HOST;
@Property(name = "username", value = DEFAULT_USER,
label = "Username for REST password authentication")
private String user = DEFAULT_USER;
@Property(name = "password", value = DEFAULT_PASSWORD,
label = "Password for REST authentication")
private String password = DEFAULT_PASSWORD;
private String fabricOnosUrl;
private Map<DeviceId, AccessDeviceData> oltData = new ConcurrentHashMap<>();
private static final Class<AccessDeviceConfig> CONFIG_CLASS =
AccessDeviceConfig.class;
private ConfigFactory<DeviceId, AccessDeviceConfig> configFactory =
new ConfigFactory<DeviceId, AccessDeviceConfig>(
SubjectFactories.DEVICE_SUBJECT_FACTORY, CONFIG_CLASS, "accessDevice") {
@Override
public AccessDeviceConfig createConfig() {
return new AccessDeviceConfig();
}
};
@Activate
public void activate(ComponentContext context) {
componentConfigService.registerProperties(getClass());
modified(context);
appId = coreService.registerApplication("org.onosproject.cordmcast");
clearRemoteRoutes();
networkConfig.registerConfigFactory(configFactory);
networkConfig.addListener(configListener);
networkConfig.getSubjects(DeviceId.class, AccessDeviceConfig.class).forEach(
subject -> {
AccessDeviceConfig config = networkConfig.getConfig(subject, AccessDeviceConfig.class);
if (config != null) {
AccessDeviceData data = config.getOlt();
oltData.put(data.deviceId(), data);
}
}
);
mcastService.addListener(listener);
mcastService.getRoutes().stream()
.map(r -> new ImmutablePair<>(r, mcastService.fetchSinks(r)))
.filter(pair -> pair.getRight() != null && !pair.getRight().isEmpty())
.forEach(pair -> pair.getRight().forEach(sink -> provisionGroup(pair.getLeft(),
sink)));
log.info("Started");
}
@Deactivate
public void deactivate() {
componentConfigService.unregisterProperties(getClass(), false);
mcastService.removeListener(listener);
networkConfig.unregisterConfigFactory(configFactory);
networkConfig.removeListener(configListener);
log.info("Stopped");
}
@Modified
public void modified(ComponentContext context) {
Dictionary<?, ?> properties = context != null ? context.getProperties() : new Properties();
try {
String s = get(properties, "username");
user = isNullOrEmpty(s) ? DEFAULT_USER : s.trim();
s = get(properties, "password");
password = isNullOrEmpty(s) ? DEFAULT_PASSWORD : s.trim();
s = get(properties, "mcastVlan");
mcastVlan = isNullOrEmpty(s) ? DEFAULT_MCAST_VLAN : Short.parseShort(s.trim());
s = get(properties, "vlanEnabled");
vlanEnabled = isNullOrEmpty(s) ? DEFAULT_VLAN_ENABLED : Boolean.parseBoolean(s.trim());
s = get(properties, "priority");
priority = isNullOrEmpty(s) ? DEFAULT_PRIORITY : Integer.parseInt(s.trim());
s = get(properties, "syncHost");
syncHost = isNullOrEmpty(s) ? DEFAULT_SYNC_HOST : s.trim();
} catch (Exception e) {
user = DEFAULT_USER;
password = DEFAULT_PASSWORD;
syncHost = DEFAULT_SYNC_HOST;
mcastVlan = DEFAULT_MCAST_VLAN;
vlanEnabled = false;
priority = DEFAULT_PRIORITY;
}
fabricOnosUrl = createRemoteUrl(syncHost);
}
private static String createRemoteUrl(String remoteHost) {
return "http://" + remoteHost + "/onos/v1/mcast";
}
private class InternalMulticastListener implements McastListener {
@Override
public void event(McastEvent event) {
McastRouteInfo info = event.subject();
switch (event.type()) {
case ROUTE_ADDED:
break;
case ROUTE_REMOVED:
break;
case SOURCE_ADDED:
break;
case SINK_ADDED:
if (!info.sink().isPresent()) {
log.warn("No sink given after sink added event: {}", info);
return;
}
provisionGroup(info.route(), info.sink().get());
break;
case SINK_REMOVED:
unprovisionGroup(event.subject());
break;
default:
log.warn("Unknown mcast event {}", event.type());
}
}
}
private void unprovisionGroup(McastRouteInfo info) {
if (info.sinks().isEmpty()) {
removeRemoteRoute(info.route());
}
if (!info.sink().isPresent()) {
log.warn("No sink given after sink removed event: {}", info);
return;
}
ConnectPoint loc = info.sink().get();
NextObjective next = DefaultNextObjective.builder()
.fromApp(appId)
.addTreatment(DefaultTrafficTreatment.builder().setOutput(loc.port()).build())
.withType(NextObjective.Type.BROADCAST)
.withId(groups.get(info.route().group()))
.removeFromExisting(new ObjectiveContext() {
@Override
public void onSuccess(Objective objective) {
//TODO: change to debug
log.info("Next Objective {} installed", objective.id());
}
@Override
public void onError(Objective objective, ObjectiveError error) {
//TODO: change to debug
log.info("Next Objective {} failed, because {}",
objective.id(),
error);
}
});
flowObjectiveService.next(loc.deviceId(), next);
}
private void provisionGroup(McastRoute route, ConnectPoint sink) {
checkNotNull(route, "Route cannot be null");
checkNotNull(sink, "Sink cannot be null");
AccessDeviceData oltInfo = oltData.get(sink.deviceId());
if (oltInfo == null) {
log.warn("Unknown OLT device : {}", sink.deviceId());
return;
}
final AtomicBoolean sync = new AtomicBoolean(false);
Integer nextId = groups.computeIfAbsent(route.group(), (g) -> {
Integer id = flowObjectiveService.allocateNextId();
NextObjective next = DefaultNextObjective.builder()
.fromApp(appId)
.addTreatment(DefaultTrafficTreatment.builder().setOutput(sink.port()).build())
.withType(NextObjective.Type.BROADCAST)
.withId(id)
.add(new ObjectiveContext() {
@Override
public void onSuccess(Objective objective) {
//TODO: change to debug
log.info("Next Objective {} installed", objective.id());
}
@Override
public void onError(Objective objective, ObjectiveError error) {
//TODO: change to debug
log.info("Next Objective {} failed, because {}",
objective.id(),
error);
}
});
flowObjectiveService.next(sink.deviceId(), next);
TrafficSelector.Builder mcast = DefaultTrafficSelector.builder()
.matchInPort(oltInfo.uplink())
.matchEthType(Ethernet.TYPE_IPV4)
.matchIPDst(g.toIpPrefix());
if (vlanEnabled) {
mcast.matchVlanId(VlanId.vlanId((short) mcastVlan));
}
ForwardingObjective fwd = DefaultForwardingObjective.builder()
.fromApp(appId)
.nextStep(id)
.makePermanent()
.withFlag(ForwardingObjective.Flag.VERSATILE)
.withPriority(priority)
.withSelector(mcast.build())
.add(new ObjectiveContext() {
@Override
public void onSuccess(Objective objective) {
//TODO: change to debug
log.info("Forwarding objective installed {}", objective);
}
@Override
public void onError(Objective objective, ObjectiveError error) {
//TODO: change to debug
log.info("Forwarding objective failed {}", objective);
}
});
flowObjectiveService.forward(sink.deviceId(), fwd);
sync.set(true);
return id;
});
if (!sync.get()) {
NextObjective next = DefaultNextObjective.builder()
.fromApp(appId)
.addTreatment(DefaultTrafficTreatment.builder().setOutput(sink.port()).build())
.withType(NextObjective.Type.BROADCAST)
.withId(nextId)
.addToExisting(new ObjectiveContext() {
@Override
public void onSuccess(Objective objective) {
//TODO: change to debug
log.info("Next Objective {} installed", objective.id());
}
@Override
public void onError(Objective objective, ObjectiveError error) {
//TODO: change to debug
log.info("Next Objective {} failed, because {}",
objective.id(),
error);
}
});
flowObjectiveService.next(sink.deviceId(), next);
}
addRemoteRoute(route);
}
private void addRemoteRoute(McastRoute route) {
checkNotNull(route);
if (syncHost == null) {
log.warn("No host configured for synchronization; route will be dropped");
return;
}
log.debug("Sending route {} to other ONOS {}", route, fabricOnosUrl);
WebResource.Builder builder = getClientBuilder(fabricOnosUrl);
ObjectNode json = codecService.getCodec(McastRoute.class)
.encode(route, new AbstractWebResource());
try {
builder.post(json.toString());
} catch (ClientHandlerException e) {
log.warn("Unable to send route to remote controller: {}", e.getMessage());
}
}
private void removeRemoteRoute(McastRoute route) {
if (syncHost == null) {
log.warn("No host configured for synchronization; route will be dropped");
return;
}
log.debug("Removing route {} from other ONOS {}", route, fabricOnosUrl);
WebResource.Builder builder = getClientBuilder(fabricOnosUrl);
ObjectNode json = codecService.getCodec(McastRoute.class)
.encode(route, new AbstractWebResource());
try {
builder.delete(json.toString());
} catch (ClientHandlerException e) {
log.warn("Unable to delete route from remote controller: {}", e.getMessage());
}
}
private void clearRemoteRoutes() {
if (syncHost == null) {
log.warn("No host configured for synchronization");
return;
}
log.debug("Clearing remote multicast routes from {}", fabricOnosUrl);
WebResource.Builder builder = getClientBuilder(fabricOnosUrl);
List<McastRoute> mcastRoutes = Lists.newArrayList();
try {
String response = builder
.accept(MediaType.APPLICATION_JSON_TYPE)
.get(String.class);
JsonCodec<McastRoute> routeCodec = codecService.getCodec(McastRoute.class);
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = (ObjectNode) mapper.readTree(response);
ArrayNode list = (ArrayNode) node.path("routes");
list.forEach(n -> mcastRoutes.add(
routeCodec.decode((ObjectNode) n, new AbstractWebResource())));
} catch (ClientHandlerException e) {
log.warn("Unable to clear routes from remote controller: {}", e.getMessage());
} catch (IOException e) {
log.warn("Error clearing remote routes", e);
}
mcastRoutes.forEach(this::removeRemoteRoute);
}
private WebResource.Builder getClientBuilder(String uri) {
Client client = Client.create();
client.setConnectTimeout(DEFAULT_REST_TIMEOUT_MS);
client.setReadTimeout(DEFAULT_REST_TIMEOUT_MS);
client.addFilter(new HTTPBasicAuthFilter(user, password));
WebResource resource = client.resource(uri);
return resource.accept(JSON_UTF_8.toString())
.type(JSON_UTF_8.toString());
}
private class InternalNetworkConfigListener implements NetworkConfigListener {
@Override
public void event(NetworkConfigEvent event) {
switch (event.type()) {
case CONFIG_ADDED:
case CONFIG_UPDATED:
AccessDeviceConfig config =
networkConfig.getConfig((DeviceId) event.subject(), CONFIG_CLASS);
if (config != null) {
oltData.put(config.getOlt().deviceId(), config.getOlt());
}
break;
case CONFIG_REGISTERED:
case CONFIG_UNREGISTERED:
break;
case CONFIG_REMOVED:
oltData.remove(event.subject());
break;
default:
break;
}
}
@Override
public boolean isRelevant(NetworkConfigEvent event) {
return event.configClass().equals(CONFIG_CLASS);
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import com.facebook.buck.event.BuckEventBusForTests;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultSourcePathResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.TestExecutionContext;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.util.FakeProcess;
import com.facebook.buck.util.FakeProcessExecutor;
import com.google.common.base.Functions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Paths;
import java.util.Optional;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class JavacStepTest {
@Rule public ExpectedException thrown = ExpectedException.none();
@Test
public void successfulCompileDoesNotSendStdoutAndStderrToConsole() throws Exception {
FakeJavac fakeJavac = new FakeJavac();
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver);
SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder);
ProjectFilesystem fakeFilesystem = FakeProjectFilesystem.createJavaOnlyFilesystem();
JavacOptions javacOptions =
JavacOptions.builder().setSourceLevel("8.0").setTargetLevel("8.0").build();
ClasspathChecker classpathChecker =
new ClasspathChecker(
"/", ":", Paths::get, dir -> false, file -> false, (path, glob) -> ImmutableSet.of());
JavacStep step =
new JavacStep(
Paths.get("output"),
NoOpClassUsageFileWriter.instance(),
Optional.empty(),
Optional.empty(),
ImmutableSortedSet.of(),
Paths.get("pathToSrcsList"),
ImmutableSortedSet.of(),
fakeJavac,
javacOptions,
BuildTargetFactory.newInstance("//foo:bar"),
sourcePathResolver,
fakeFilesystem,
classpathChecker,
Optional.empty(),
null);
FakeProcess fakeJavacProcess = new FakeProcess(0, "javac stdout\n", "javac stderr\n");
ExecutionContext executionContext =
TestExecutionContext.newBuilder()
.setProcessExecutor(
new FakeProcessExecutor(Functions.constant(fakeJavacProcess), new TestConsole()))
.build();
BuckEventBusForTests.CapturingConsoleEventListener listener =
new BuckEventBusForTests.CapturingConsoleEventListener();
executionContext.getBuckEventBus().register(listener);
StepExecutionResult result = step.execute(executionContext);
// Note that we don't include stderr in the step result on success.
assertThat(result, equalTo(StepExecutionResult.SUCCESS));
assertThat(listener.getLogMessages(), empty());
}
@Test
public void failedCompileSendsStdoutAndStderrToConsole() throws Exception {
FakeJavac fakeJavac = new FakeJavac();
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver);
SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder);
ProjectFilesystem fakeFilesystem = FakeProjectFilesystem.createJavaOnlyFilesystem();
JavacOptions javacOptions =
JavacOptions.builder().setSourceLevel("8.0").setTargetLevel("8.0").build();
ClasspathChecker classpathChecker =
new ClasspathChecker(
"/", ":", Paths::get, dir -> false, file -> false, (path, glob) -> ImmutableSet.of());
JavacStep step =
new JavacStep(
Paths.get("output"),
NoOpClassUsageFileWriter.instance(),
Optional.empty(),
Optional.empty(),
ImmutableSortedSet.of(),
Paths.get("pathToSrcsList"),
ImmutableSortedSet.of(),
fakeJavac,
javacOptions,
BuildTargetFactory.newInstance("//foo:bar"),
sourcePathResolver,
fakeFilesystem,
classpathChecker,
Optional.empty(),
null);
FakeProcess fakeJavacProcess = new FakeProcess(1, "javac stdout\n", "javac stderr\n");
ExecutionContext executionContext =
TestExecutionContext.newBuilder()
.setProcessExecutor(
new FakeProcessExecutor(Functions.constant(fakeJavacProcess), new TestConsole()))
.build();
BuckEventBusForTests.CapturingConsoleEventListener listener =
new BuckEventBusForTests.CapturingConsoleEventListener();
executionContext.getBuckEventBus().register(listener);
StepExecutionResult result = step.execute(executionContext);
// JavacStep itself writes stdout to the console on error; we expect the Build class to write
// the stderr stream returned in the StepExecutionResult
assertThat(result, equalTo(StepExecutionResult.of(1, Optional.of("javac stderr\n"))));
assertThat(listener.getLogMessages(), equalTo(ImmutableList.of("javac stdout\n")));
}
@Test
public void existingBootclasspathDirSucceeds() throws Exception {
FakeJavac fakeJavac = new FakeJavac();
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver);
SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder);
ProjectFilesystem fakeFilesystem = FakeProjectFilesystem.createJavaOnlyFilesystem();
JavacOptions javacOptions =
JavacOptions.builder()
.setSourceLevel("8.0")
.setTargetLevel("8.0")
.setBootclasspath("/this-totally-exists")
.build();
ClasspathChecker classpathChecker =
new ClasspathChecker(
"/", ":", Paths::get, dir -> true, file -> false, (path, glob) -> ImmutableSet.of());
JavacStep step =
new JavacStep(
Paths.get("output"),
NoOpClassUsageFileWriter.instance(),
Optional.empty(),
Optional.empty(),
ImmutableSortedSet.of(),
Paths.get("pathToSrcsList"),
ImmutableSortedSet.of(),
fakeJavac,
javacOptions,
BuildTargetFactory.newInstance("//foo:bar"),
sourcePathResolver,
fakeFilesystem,
classpathChecker,
Optional.empty(),
null);
FakeProcess fakeJavacProcess = new FakeProcess(0, "javac stdout\n", "javac stderr\n");
ExecutionContext executionContext =
TestExecutionContext.newBuilder()
.setProcessExecutor(
new FakeProcessExecutor(Functions.constant(fakeJavacProcess), new TestConsole()))
.build();
BuckEventBusForTests.CapturingConsoleEventListener listener =
new BuckEventBusForTests.CapturingConsoleEventListener();
executionContext.getBuckEventBus().register(listener);
StepExecutionResult result = step.execute(executionContext);
assertThat(result, equalTo(StepExecutionResult.SUCCESS));
assertThat(listener.getLogMessages(), empty());
}
@Test
public void missingBootclasspathDirFailsWithError() throws Exception {
FakeJavac fakeJavac = new FakeJavac();
BuildRuleResolver buildRuleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver);
SourcePathResolver sourcePathResolver = DefaultSourcePathResolver.from(ruleFinder);
ProjectFilesystem fakeFilesystem = FakeProjectFilesystem.createJavaOnlyFilesystem();
JavacOptions javacOptions =
JavacOptions.builder()
.setSourceLevel("8.0")
.setTargetLevel("8.0")
.setBootclasspath("/no-such-dir")
.build();
ClasspathChecker classpathChecker =
new ClasspathChecker(
"/", ":", Paths::get, dir -> false, file -> false, (path, glob) -> ImmutableSet.of());
JavacStep step =
new JavacStep(
Paths.get("output"),
NoOpClassUsageFileWriter.instance(),
Optional.empty(),
Optional.empty(),
ImmutableSortedSet.of(),
Paths.get("pathToSrcsList"),
ImmutableSortedSet.of(),
fakeJavac,
javacOptions,
BuildTargetFactory.newInstance("//foo:bar"),
sourcePathResolver,
fakeFilesystem,
classpathChecker,
Optional.empty(),
null);
FakeProcess fakeJavacProcess = new FakeProcess(1, "javac stdout\n", "javac stderr\n");
ExecutionContext executionContext =
TestExecutionContext.newBuilder()
.setProcessExecutor(
new FakeProcessExecutor(Functions.constant(fakeJavacProcess), new TestConsole()))
.build();
BuckEventBusForTests.CapturingConsoleEventListener listener =
new BuckEventBusForTests.CapturingConsoleEventListener();
executionContext.getBuckEventBus().register(listener);
thrown.expectMessage("Bootstrap classpath /no-such-dir contains no valid entries");
step.execute(executionContext);
}
}
| |
/**
* OWASP Enterprise Security API (ESAPI)
*
* This file is part of the Open Web Application Security Project (OWASP)
* Enterprise Security API (ESAPI) project. For details, please see
* <a href="http://www.owasp.org/index.php/ESAPI">http://www.owasp.org/index.php/ESAPI</a>.
*
* Copyright (c) 2007 - The OWASP Foundation
*
* The ESAPI is published by OWASP under the BSD license. You should read and accept the
* LICENSE before you use, modify, and/or redistribute this software.
*
* @author Jeff Williams <a href="http://www.aspectsecurity.com">Aspect Security</a>
* @created 2007
*/
package org.owasp.esapi.reference;
import java.io.IOException;
import java.util.Arrays;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.owasp.esapi.ESAPI;
import org.owasp.esapi.Logger;
//import org.owasp.esapi.errors.AuthenticationException;
//import org.owasp.esapi.errors.ValidationException;
//import org.owasp.esapi.http.MockHttpServletRequest;
//import org.owasp.esapi.http.MockHttpServletResponse;
/**
* The Class LoggerTest.
*
* @author Jeff Williams (jeff.williams@aspectsecurity.com)
*/
public class JavaLoggerTest extends TestCase {
private static int testCount = 0;
private static Logger testLogger = null;
/**
* Instantiates a new logger test.
*
* @param testName the test name
*/
public JavaLoggerTest(String testName) {
super(testName);
}
/**
* {@inheritDoc}
* @throws Exception
*/
protected void setUp() throws Exception {
UnitTestSecurityConfiguration tmpConfig = new UnitTestSecurityConfiguration((DefaultSecurityConfiguration) ESAPI.securityConfiguration());
tmpConfig.setLogImplementation( JavaLogFactory.class.getName() );
ESAPI.override(tmpConfig);
//This ensures a clean logger between tests
testLogger = ESAPI.getLogger( "test" + testCount++ );
System.out.println("Test logger: " + testLogger);
}
/**
* {@inheritDoc}
* @throws Exception
*/
protected void tearDown() throws Exception {
//this helps, with garbage collection
testLogger = null;
ESAPI.override(null);
}
/**
* Suite.
*
* @return the test
*/
public static Test suite() {
TestSuite suite = new TestSuite(JavaLoggerTest.class);
return suite;
}
/**
* Test of logHTTPRequest method, of class org.owasp.esapi.Logger.
*
* @throws ValidationException
* the validation exception
* @throws IOException
* Signals that an I/O exception has occurred.
* @throws AuthenticationException
* the authentication exception
*/
// public void testLogHTTPRequest() throws /*ValidationException,*/ IOException/*, AuthenticationException*/ {
// System.out.println("logHTTPRequest");
// String[] ignore = {"password","ssn","ccn"};
// MockHttpServletRequest request = new MockHttpServletRequest();
// MockHttpServletResponse response = new MockHttpServletResponse();
//// ESAPI.httpUtilities().setCurrentHTTP(request, response);
// Logger logger = ESAPI.getLogger("logger");
//// ESAPI.httpUtilities().logHTTPRequest( request, logger, Arrays.asList(ignore) );
// request.addParameter("one","one");
// request.addParameter("two","two1");
// request.addParameter("two","two2");
// request.addParameter("password","jwilliams");
//// ESAPI.httpUtilities().logHTTPRequest( request, logger, Arrays.asList(ignore) );
// }
/**
* Test of setLevel method of the inner class org.owasp.esapi.reference.JavaLogger that is defined in
* org.owasp.esapi.reference.JavaLogFactory.
*/
public void testSetLevel() {
System.out.println("setLevel");
// The following tests that the default logging level is set to WARNING. Since the default might be changed
// in the ESAPI security configuration file, these are commented out.
// assertTrue(testLogger.isWarningEnabled());
// assertFalse(testLogger.isInfoEnabled());
// First, test all the different logging levels
testLogger.setLevel( Logger.ALL );
assertTrue(testLogger.isFatalEnabled());
assertTrue(testLogger.isErrorEnabled());
assertTrue(testLogger.isWarningEnabled());
assertTrue(testLogger.isInfoEnabled());
assertTrue(testLogger.isDebugEnabled());
assertTrue(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.TRACE );
assertTrue(testLogger.isFatalEnabled());
assertTrue(testLogger.isErrorEnabled());
assertTrue(testLogger.isWarningEnabled());
assertTrue(testLogger.isInfoEnabled());
assertTrue(testLogger.isDebugEnabled());
assertTrue(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.DEBUG );
assertTrue(testLogger.isFatalEnabled());
assertTrue(testLogger.isErrorEnabled());
assertTrue(testLogger.isWarningEnabled());
assertTrue(testLogger.isInfoEnabled());
assertTrue(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.INFO );
assertTrue(testLogger.isFatalEnabled());
assertTrue(testLogger.isErrorEnabled());
assertTrue(testLogger.isWarningEnabled());
assertTrue(testLogger.isInfoEnabled());
assertFalse(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.WARNING );
assertTrue(testLogger.isFatalEnabled());
assertTrue(testLogger.isErrorEnabled());
assertTrue(testLogger.isWarningEnabled());
assertFalse(testLogger.isInfoEnabled());
assertFalse(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.ERROR );
assertTrue(testLogger.isFatalEnabled());
assertTrue(testLogger.isErrorEnabled());
assertFalse(testLogger.isWarningEnabled());
assertFalse(testLogger.isInfoEnabled());
assertFalse(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.FATAL );
assertTrue(testLogger.isFatalEnabled());
assertFalse(testLogger.isErrorEnabled());
assertFalse(testLogger.isWarningEnabled());
assertFalse(testLogger.isInfoEnabled());
assertFalse(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
testLogger.setLevel( Logger.OFF );
assertFalse(testLogger.isFatalEnabled());
assertFalse(testLogger.isErrorEnabled());
assertFalse(testLogger.isWarningEnabled());
assertFalse(testLogger.isInfoEnabled());
assertFalse(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
//Now test to see if a change to the logging level in one log affects other logs
Logger newLogger = ESAPI.getLogger( "test_num2" );
testLogger.setLevel( Logger.OFF );
newLogger.setLevel( Logger.INFO );
assertFalse(testLogger.isFatalEnabled());
assertFalse(testLogger.isErrorEnabled());
assertFalse(testLogger.isWarningEnabled());
assertFalse(testLogger.isInfoEnabled());
assertFalse(testLogger.isDebugEnabled());
assertFalse(testLogger.isTraceEnabled());
assertTrue(newLogger.isFatalEnabled());
assertTrue(newLogger.isErrorEnabled());
assertTrue(newLogger.isWarningEnabled());
assertTrue(newLogger.isInfoEnabled());
assertFalse(newLogger.isDebugEnabled());
assertFalse(newLogger.isTraceEnabled());
}
/**
* Test of info method, of class org.owasp.esapi.Logger.
*/
public void testInfo() {
System.out.println("info");
testLogger.info(Logger.SECURITY_SUCCESS, "test message" );
testLogger.info(Logger.SECURITY_SUCCESS, "test message", null );
testLogger.info(Logger.SECURITY_SUCCESS, "%3escript%3f test message", null );
testLogger.info(Logger.SECURITY_SUCCESS, "<script> test message", null );
}
/**
* Test of trace method, of class org.owasp.esapi.Logger.
*/
public void testTrace() {
System.out.println("trace");
testLogger.trace(Logger.SECURITY_SUCCESS, "test message trace" );
testLogger.trace(Logger.SECURITY_SUCCESS, "test message trace", null );
}
/**
* Test of debug method, of class org.owasp.esapi.Logger.
*/
public void testDebug() {
System.out.println("debug");
testLogger.debug(Logger.SECURITY_SUCCESS, "test message debug" );
testLogger.debug(Logger.SECURITY_SUCCESS, "test message debug", null );
}
/**
* Test of error method, of class org.owasp.esapi.Logger.
*/
public void testError() {
System.out.println("error");
testLogger.error(Logger.SECURITY_SUCCESS, "test message error" );
testLogger.error(Logger.SECURITY_SUCCESS, "test message error", null );
}
/**
* Test of warning method, of class org.owasp.esapi.Logger.
*/
public void testWarning() {
System.out.println("warning");
testLogger.warning(Logger.SECURITY_SUCCESS, "test message warning" );
testLogger.warning(Logger.SECURITY_SUCCESS, "test message warning", null );
}
/**
* Test of fatal method, of class org.owasp.esapi.Logger.
*/
public void testFatal() {
System.out.println("fatal");
testLogger.fatal(Logger.SECURITY_SUCCESS, "test message fatal" );
testLogger.fatal(Logger.SECURITY_SUCCESS, "test message fatal", null );
}
/**
* Test of always method, of class org.owasp.esapi.Logger.
*/
public void testAlways() {
System.out.println("always");
testLogger.always(Logger.SECURITY_SUCCESS, "test message always 1 (SECURITY_SUCCESS)" );
testLogger.always(Logger.SECURITY_AUDIT, "test message always 2 (SECURITY_AUDIT)" );
testLogger.always(Logger.SECURITY_SUCCESS, "test message always 3 (SECURITY_SUCCESS)", null );
testLogger.always(Logger.SECURITY_AUDIT, "test message always 4 (SECURITY_AUDIT)", null );
try {
throw new RuntimeException("What? You call that a 'throw'? My grandmother throws " +
"better than that and she's been dead for more than 10 years!");
} catch(RuntimeException rtex) {
testLogger.always(Logger.SECURITY_AUDIT, "test message always 5", rtex );
}
}
}
| |
package com.shecc.basedev.pat.entity;
import java.util.Date;
import com.shecc.basedev.core.entity.AbstractEntity;
public class WeekReport extends AbstractEntity{
private String contractor;
private String projectid;
private String projectnae;
private Date startdate;
private Date enddate;
private String projectstatus;
private String workcontent;
private String followingplan;
private String problemanalysis;
private String riskandsolutions;
private String extension1;
private String extension2;
private String extension3;
private String extension4;
private String extension5;
private String extension6;
private String extension7;
private String extension8;
private String extension9;
private String extension10;
public String getContractor() {
return contractor;
}
public void setContractor(String contractor) {
this.contractor = contractor == null ? null : contractor.trim();
}
public String getProjectid() {
return projectid;
}
public void setProjectid(String projectid) {
this.projectid = projectid == null ? null : projectid.trim();
}
public String getProjectnae() {
return projectnae;
}
public void setProjectnae(String projectnae) {
this.projectnae = projectnae == null ? null : projectnae.trim();
}
public Date getStartdate() {
return startdate;
}
public void setStartdate(Date startdate) {
this.startdate = startdate;
}
public Date getEnddate() {
return enddate;
}
public void setEnddate(Date enddate) {
this.enddate = enddate;
}
public String getProjectstatus() {
return projectstatus;
}
public void setProjectstatus(String projectstatus) {
this.projectstatus = projectstatus == null ? null : projectstatus.trim();
}
public String getWorkcontent() {
return workcontent;
}
public void setWorkcontent(String workcontent) {
this.workcontent = workcontent == null ? null : workcontent.trim();
}
public String getFollowingplan() {
return followingplan;
}
public void setFollowingplan(String followingplan) {
this.followingplan = followingplan == null ? null : followingplan.trim();
}
public String getProblemanalysis() {
return problemanalysis;
}
public void setProblemanalysis(String problemanalysis) {
this.problemanalysis = problemanalysis == null ? null : problemanalysis.trim();
}
public String getRiskandsolutions() {
return riskandsolutions;
}
public void setRiskandsolutions(String riskandsolutions) {
this.riskandsolutions = riskandsolutions == null ? null : riskandsolutions.trim();
}
public String getExtension1() {
return extension1;
}
public void setExtension1(String extension1) {
this.extension1 = extension1 == null ? null : extension1.trim();
}
public String getExtension2() {
return extension2;
}
public void setExtension2(String extension2) {
this.extension2 = extension2 == null ? null : extension2.trim();
}
public String getExtension3() {
return extension3;
}
public void setExtension3(String extension3) {
this.extension3 = extension3 == null ? null : extension3.trim();
}
public String getExtension4() {
return extension4;
}
public void setExtension4(String extension4) {
this.extension4 = extension4 == null ? null : extension4.trim();
}
public String getExtension5() {
return extension5;
}
public void setExtension5(String extension5) {
this.extension5 = extension5 == null ? null : extension5.trim();
}
public String getExtension6() {
return extension6;
}
public void setExtension6(String extension6) {
this.extension6 = extension6 == null ? null : extension6.trim();
}
public String getExtension7() {
return extension7;
}
public void setExtension7(String extension7) {
this.extension7 = extension7 == null ? null : extension7.trim();
}
public String getExtension8() {
return extension8;
}
public void setExtension8(String extension8) {
this.extension8 = extension8 == null ? null : extension8.trim();
}
public String getExtension9() {
return extension9;
}
public void setExtension9(String extension9) {
this.extension9 = extension9 == null ? null : extension9.trim();
}
public String getExtension10() {
return extension10;
}
public void setExtension10(String extension10) {
this.extension10 = extension10 == null ? null : extension10.trim();
}
}
| |
/**
* Copyright 2010-present Facebook.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.widget;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.res.TypedArray;
import android.support.v4.app.Fragment;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.widget.Button;
import com.facebook.FacebookException;
import com.facebook.Request;
import com.facebook.Response;
import com.facebook.Session;
import com.facebook.SessionDefaultAudience;
import com.facebook.SessionLoginBehavior;
import com.facebook.SessionState;
import com.facebook.android.R;
import com.facebook.internal.SessionAuthorizationType;
import com.facebook.internal.SessionTracker;
import com.facebook.internal.Utility;
import com.facebook.model.GraphUser;
import java.util.Collections;
import java.util.List;
/**
* A Log In/Log Out button that maintains session state and logs
* in/out for the app.
* <p/>
* This control will create and use the active session upon construction
* if it has the available data (if the app ID is specified in the manifest).
* It will also open the active session if it does not require user interaction
* (i.e. if the session is in the {@link com.facebook.SessionState#CREATED_TOKEN_LOADED} state.
* Developers can override the use of the active session by calling
* the {@link #setSession(com.facebook.Session)} method.
*/
public class LoginButton extends Button {
private static final String TAG = LoginButton.class.getName();
private String applicationId = null;
private SessionTracker sessionTracker;
private GraphUser user = null;
private Session userInfoSession = null; // the Session used to fetch the current user info
private boolean confirmLogout;
private boolean fetchUserInfo;
private String loginText;
private String logoutText;
private UserInfoChangedCallback userInfoChangedCallback;
private Fragment parentFragment;
private LoginButtonProperties properties = new LoginButtonProperties();
static class LoginButtonProperties {
private SessionDefaultAudience defaultAudience = SessionDefaultAudience.FRIENDS;
private List<String> permissions = Collections.<String>emptyList();
private SessionAuthorizationType authorizationType = null;
private OnErrorListener onErrorListener;
private SessionLoginBehavior loginBehavior = SessionLoginBehavior.SSO_WITH_FALLBACK;
private Session.StatusCallback sessionStatusCallback;
public void setOnErrorListener(OnErrorListener onErrorListener) {
this.onErrorListener = onErrorListener;
}
public OnErrorListener getOnErrorListener() {
return onErrorListener;
}
public void setDefaultAudience(SessionDefaultAudience defaultAudience) {
this.defaultAudience = defaultAudience;
}
public SessionDefaultAudience getDefaultAudience() {
return defaultAudience;
}
public void setReadPermissions(List<String> permissions, Session session) {
if (SessionAuthorizationType.PUBLISH.equals(authorizationType)) {
throw new UnsupportedOperationException(
"Cannot call setReadPermissions after setPublishPermissions has been called.");
}
if (validatePermissions(permissions, SessionAuthorizationType.READ, session)) {
this.permissions = permissions;
authorizationType = SessionAuthorizationType.READ;
}
}
public void setPublishPermissions(List<String> permissions, Session session) {
if (SessionAuthorizationType.READ.equals(authorizationType)) {
throw new UnsupportedOperationException(
"Cannot call setPublishPermissions after setReadPermissions has been called.");
}
if (validatePermissions(permissions, SessionAuthorizationType.PUBLISH, session)) {
this.permissions = permissions;
authorizationType = SessionAuthorizationType.PUBLISH;
}
}
private boolean validatePermissions(List<String> permissions,
SessionAuthorizationType authType, Session currentSession) {
if (SessionAuthorizationType.PUBLISH.equals(authType)) {
if (Utility.isNullOrEmpty(permissions)) {
throw new IllegalArgumentException("Permissions for publish actions cannot be null or empty.");
}
}
if (currentSession != null && currentSession.isOpened()) {
if (!Utility.isSubset(permissions, currentSession.getPermissions())) {
Log.e(TAG, "Cannot set additional permissions when session is already open.");
return false;
}
}
return true;
}
List<String> getPermissions() {
return permissions;
}
public void clearPermissions() {
permissions = null;
authorizationType = null;
}
public void setLoginBehavior(SessionLoginBehavior loginBehavior) {
this.loginBehavior = loginBehavior;
}
public SessionLoginBehavior getLoginBehavior() {
return loginBehavior;
}
public void setSessionStatusCallback(Session.StatusCallback callback) {
this.sessionStatusCallback = callback;
}
public Session.StatusCallback getSessionStatusCallback() {
return sessionStatusCallback;
}
}
/**
* Specifies a callback interface that will be called when the button's notion of the current
* user changes (if the fetch_user_info attribute is true for this control).
*/
public interface UserInfoChangedCallback {
/**
* Called when the current user changes.
* @param user the current user, or null if there is no user
*/
void onUserInfoFetched(GraphUser user);
}
/**
* Callback interface that will be called when a network or other error is encountered
* while logging in.
*/
public interface OnErrorListener {
/**
* Called when a network or other error is encountered.
* @param error a FacebookException representing the error that was encountered.
*/
void onError(FacebookException error);
}
/**
* Create the LoginButton.
*
* @see android.view.View#View(android.content.Context)
*/
public LoginButton(Context context) {
super(context);
initializeActiveSessionWithCachedToken(context);
// since onFinishInflate won't be called, we need to finish initialization ourselves
finishInit();
}
/**
* Create the LoginButton by inflating from XML
*
* @see android.view.View#View(android.content.Context, android.util.AttributeSet)
*/
public LoginButton(Context context, AttributeSet attrs) {
super(context, attrs);
if (attrs.getStyleAttribute() == 0) {
// apparently there's no method of setting a default style in xml,
// so in case the users do not explicitly specify a style, we need
// to use sensible defaults.
this.setTextColor(getResources().getColor(R.color.com_facebook_loginview_text_color));
this.setTextSize(TypedValue.COMPLEX_UNIT_PX,
getResources().getDimension(R.dimen.com_facebook_loginview_text_size));
this.setPadding(getResources().getDimensionPixelSize(R.dimen.com_facebook_loginview_padding_left),
getResources().getDimensionPixelSize(R.dimen.com_facebook_loginview_padding_top),
getResources().getDimensionPixelSize(R.dimen.com_facebook_loginview_padding_right),
getResources().getDimensionPixelSize(R.dimen.com_facebook_loginview_padding_bottom));
this.setWidth(getResources().getDimensionPixelSize(R.dimen.com_facebook_loginview_width));
this.setHeight(getResources().getDimensionPixelSize(R.dimen.com_facebook_loginview_height));
this.setGravity(Gravity.CENTER);
if (isInEditMode()) {
// cannot use a drawable in edit mode, so setting the background color instead
// of a background resource.
this.setBackgroundColor(getResources().getColor(R.color.com_facebook_blue));
// hardcoding in edit mode as getResources().getString() doesn't seem to work in IntelliJ
loginText = "Log in";
} else {
this.setBackgroundResource(R.drawable.com_facebook_loginbutton_blue);
}
}
parseAttributes(attrs);
if (!isInEditMode()) {
initializeActiveSessionWithCachedToken(context);
}
}
/**
* Create the LoginButton by inflating from XML and applying a style.
*
* @see android.view.View#View(android.content.Context, android.util.AttributeSet, int)
*/
public LoginButton(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
parseAttributes(attrs);
initializeActiveSessionWithCachedToken(context);
}
/**
* Sets an OnErrorListener for this instance of LoginButton to call into when
* certain exceptions occur.
*
* @param onErrorListener The listener object to set
*/
public void setOnErrorListener(OnErrorListener onErrorListener) {
properties.setOnErrorListener(onErrorListener);
}
/**
* Returns the current OnErrorListener for this instance of LoginButton.
*
* @return The OnErrorListener
*/
public OnErrorListener getOnErrorListener() {
return properties.getOnErrorListener();
}
/**
* Sets the default audience to use when the session is opened.
* This value is only useful when specifying write permissions for the native
* login dialog.
*
* @param defaultAudience the default audience value to use
*/
public void setDefaultAudience(SessionDefaultAudience defaultAudience) {
properties.setDefaultAudience(defaultAudience);
}
/**
* Gets the default audience to use when the session is opened.
* This value is only useful when specifying write permissions for the native
* login dialog.
*
* @return the default audience value to use
*/
public SessionDefaultAudience getDefaultAudience() {
return properties.getDefaultAudience();
}
/**
* Set the permissions to use when the session is opened. The permissions here
* can only be read permissions. If any publish permissions are included, the login
* attempt by the user will fail. The LoginButton can only be associated with either
* read permissions or publish permissions, but not both. Calling both
* setReadPermissions and setPublishPermissions on the same instance of LoginButton
* will result in an exception being thrown unless clearPermissions is called in between.
* <p/>
* This method is only meaningful if called before the session is open. If this is called
* after the session is opened, and the list of permissions passed in is not a subset
* of the permissions granted during the authorization, it will log an error.
* <p/>
* Since the session can be automatically opened when the LoginButton is constructed,
* it's important to always pass in a consistent set of permissions to this method, or
* manage the setting of permissions outside of the LoginButton class altogether
* (by managing the session explicitly).
*
* @param permissions the read permissions to use
*
* @throws UnsupportedOperationException if setPublishPermissions has been called
*/
public void setReadPermissions(List<String> permissions) {
properties.setReadPermissions(permissions, sessionTracker.getSession());
}
/**
* Set the permissions to use when the session is opened. The permissions here
* should only be publish permissions. If any read permissions are included, the login
* attempt by the user may fail. The LoginButton can only be associated with either
* read permissions or publish permissions, but not both. Calling both
* setReadPermissions and setPublishPermissions on the same instance of LoginButton
* will result in an exception being thrown unless clearPermissions is called in between.
* <p/>
* This method is only meaningful if called before the session is open. If this is called
* after the session is opened, and the list of permissions passed in is not a subset
* of the permissions granted during the authorization, it will log an error.
* <p/>
* Since the session can be automatically opened when the LoginButton is constructed,
* it's important to always pass in a consistent set of permissions to this method, or
* manage the setting of permissions outside of the LoginButton class altogether
* (by managing the session explicitly).
*
* @param permissions the read permissions to use
*
* @throws UnsupportedOperationException if setReadPermissions has been called
* @throws IllegalArgumentException if permissions is null or empty
*/
public void setPublishPermissions(List<String> permissions) {
properties.setPublishPermissions(permissions, sessionTracker.getSession());
}
/**
* Clears the permissions currently associated with this LoginButton.
*/
public void clearPermissions() {
properties.clearPermissions();
}
/**
* Sets the login behavior for the session that will be opened. If null is specified,
* the default ({@link SessionLoginBehavior SessionLoginBehavior.SSO_WITH_FALLBACK}
* will be used.
*
* @param loginBehavior The {@link SessionLoginBehavior SessionLoginBehavior} that
* specifies what behaviors should be attempted during
* authorization.
*/
public void setLoginBehavior(SessionLoginBehavior loginBehavior) {
properties.setLoginBehavior(loginBehavior);
}
/**
* Gets the login behavior for the session that will be opened. If null is returned,
* the default ({@link SessionLoginBehavior SessionLoginBehavior.SSO_WITH_FALLBACK}
* will be used.
*
* @return loginBehavior The {@link SessionLoginBehavior SessionLoginBehavior} that
* specifies what behaviors should be attempted during
* authorization.
*/
public SessionLoginBehavior getLoginBehavior() {
return properties.getLoginBehavior();
}
/**
* Set the application ID to be used to open the session.
*
* @param applicationId the application ID to use
*/
public void setApplicationId(String applicationId) {
this.applicationId = applicationId;
}
/**
* Gets the callback interface that will be called when the current user changes.
* @return the callback interface
*/
public UserInfoChangedCallback getUserInfoChangedCallback() {
return userInfoChangedCallback;
}
/**
* Sets the callback interface that will be called when the current user changes.
*
* @param userInfoChangedCallback the callback interface
*/
public void setUserInfoChangedCallback(UserInfoChangedCallback userInfoChangedCallback) {
this.userInfoChangedCallback = userInfoChangedCallback;
}
/**
* Sets the callback interface that will be called whenever the status of the Session
* associated with this LoginButton changes. Note that updates will only be sent to the
* callback while the LoginButton is actually attached to a window.
*
* @param callback the callback interface
*/
public void setSessionStatusCallback(Session.StatusCallback callback) {
properties.setSessionStatusCallback(callback);
}
/**
* Sets the callback interface that will be called whenever the status of the Session
* associated with this LoginButton changes.
* @return the callback interface
*/
public Session.StatusCallback getSessionStatusCallback() {
return properties.getSessionStatusCallback();
}
/**
* Provides an implementation for {@link android.app.Activity#onActivityResult
* onActivityResult} that updates the Session based on information returned
* during the authorization flow. The Activity containing this view
* should forward the resulting onActivityResult call here to
* update the Session state based on the contents of the resultCode and
* data.
*
* @param requestCode
* The requestCode parameter from the forwarded call. When this
* onActivityResult occurs as part of Facebook authorization
* flow, this value is the activityCode passed to open or
* authorize.
* @param resultCode
* An int containing the resultCode parameter from the forwarded
* call.
* @param data
* The Intent passed as the data parameter from the forwarded
* call.
* @return A boolean indicating whether the requestCode matched a pending
* authorization request for this Session.
* @see Session#onActivityResult(android.app.Activity, int, int, android.content.Intent)
*/
public boolean onActivityResult(int requestCode, int resultCode, Intent data) {
Session session = sessionTracker.getSession();
if (session != null) {
return session.onActivityResult((Activity)getContext(), requestCode,
resultCode, data);
} else {
return false;
}
}
/**
* Set the Session object to use instead of the active Session. Since a Session
* cannot be reused, if the user logs out from this Session, and tries to
* log in again, a new Active Session will be used instead.
* <p/>
* If the passed in session is currently opened, this method will also attempt to
* load some user information for display (if needed).
*
* @param newSession the Session object to use
* @throws FacebookException if errors occur during the loading of user information
*/
public void setSession(Session newSession) {
sessionTracker.setSession(newSession);
fetchUserInfo();
setButtonText();
}
@Override
public void onFinishInflate() {
super.onFinishInflate();
finishInit();
}
private void finishInit() {
setOnClickListener(new LoginClickListener());
setButtonText();
if (!isInEditMode()) {
sessionTracker = new SessionTracker(getContext(), new LoginButtonCallback(), null, false);
fetchUserInfo();
}
}
/**
* Sets the fragment that contains this control. This allows the LoginButton to be
* embedded inside a Fragment, and will allow the fragment to receive the
* {@link Fragment#onActivityResult(int, int, android.content.Intent) onActivityResult}
* call rather than the Activity.
*
* @param fragment the fragment that contains this control
*/
public void setFragment(Fragment fragment) {
parentFragment = fragment;
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (sessionTracker != null && !sessionTracker.isTracking()) {
sessionTracker.startTracking();
fetchUserInfo();
setButtonText();
}
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (sessionTracker != null) {
sessionTracker.stopTracking();
}
}
// For testing purposes only
List<String> getPermissions() {
return properties.getPermissions();
}
void setProperties(LoginButtonProperties properties) {
this.properties = properties;
}
private void parseAttributes(AttributeSet attrs) {
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.com_facebook_login_view);
confirmLogout = a.getBoolean(R.styleable.com_facebook_login_view_confirm_logout, true);
fetchUserInfo = a.getBoolean(R.styleable.com_facebook_login_view_fetch_user_info, true);
loginText = a.getString(R.styleable.com_facebook_login_view_login_text);
logoutText = a.getString(R.styleable.com_facebook_login_view_logout_text);
a.recycle();
}
private void setButtonText() {
if (sessionTracker != null && sessionTracker.getOpenSession() != null) {
setText((logoutText != null) ? logoutText :
getResources().getString(R.string.com_facebook_loginview_log_out_button));
} else {
setText((loginText != null) ? loginText :
getResources().getString(R.string.com_facebook_loginview_log_in_button));
}
}
private boolean initializeActiveSessionWithCachedToken(Context context) {
if (context == null) {
return false;
}
Session session = Session.getActiveSession();
if (session != null) {
return session.isOpened();
}
String applicationId = Utility.getMetadataApplicationId(context);
if (applicationId == null) {
return false;
}
return Session.openActiveSessionFromCache(context) != null;
}
private void fetchUserInfo() {
if (fetchUserInfo) {
final Session currentSession = sessionTracker.getOpenSession();
if (currentSession != null) {
if (currentSession != userInfoSession) {
Request request = Request.newMeRequest(currentSession, new Request.GraphUserCallback() {
@Override
public void onCompleted(GraphUser me, Response response) {
if (currentSession == sessionTracker.getOpenSession()) {
user = me;
if (userInfoChangedCallback != null) {
userInfoChangedCallback.onUserInfoFetched(user);
}
}
if (response.getError() != null) {
handleError(response.getError().getException());
}
}
});
Request.executeBatchAsync(request);
userInfoSession = currentSession;
}
} else {
user = null;
if (userInfoChangedCallback != null) {
userInfoChangedCallback.onUserInfoFetched(user);
}
}
}
}
private class LoginClickListener implements OnClickListener {
@Override
public void onClick(View v) {
Context context = getContext();
final Session openSession = sessionTracker.getOpenSession();
if (openSession != null) {
// If the Session is currently open, it must mean we need to log out
if (confirmLogout) {
// Create a confirmation dialog
String logout = getResources().getString(R.string.com_facebook_loginview_log_out_action);
String cancel = getResources().getString(R.string.com_facebook_loginview_cancel_action);
String message;
if (user != null && user.getName() != null) {
message = String.format(getResources().getString(R.string.com_facebook_loginview_logged_in_as), user.getName());
} else {
message = getResources().getString(R.string.com_facebook_loginview_logged_in_using_facebook);
}
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setMessage(message)
.setCancelable(true)
.setPositiveButton(logout, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
openSession.closeAndClearTokenInformation();
}
})
.setNegativeButton(cancel, null);
builder.create().show();
} else {
openSession.closeAndClearTokenInformation();
}
} else {
Session currentSession = sessionTracker.getSession();
if (currentSession == null || currentSession.getState().isClosed()) {
sessionTracker.setSession(null);
Session session = new Session.Builder(context).setApplicationId(applicationId).build();
Session.setActiveSession(session);
currentSession = session;
}
if (!currentSession.isOpened()) {
Session.OpenRequest openRequest = null;
if (parentFragment != null) {
openRequest = new Session.OpenRequest(parentFragment);
} else if (context instanceof Activity) {
openRequest = new Session.OpenRequest((Activity)context);
}
if (openRequest != null) {
openRequest.setDefaultAudience(properties.defaultAudience);
openRequest.setPermissions(properties.permissions);
openRequest.setLoginBehavior(properties.loginBehavior);
if (SessionAuthorizationType.PUBLISH.equals(properties.authorizationType)) {
currentSession.openForPublish(openRequest);
} else {
currentSession.openForRead(openRequest);
}
}
}
}
}
}
private class LoginButtonCallback implements Session.StatusCallback {
@Override
public void call(Session session, SessionState state,
Exception exception) {
fetchUserInfo();
setButtonText();
if (exception != null) {
handleError(exception);
}
if (properties.sessionStatusCallback != null) {
properties.sessionStatusCallback.call(session, state, exception);
}
}
};
void handleError(Exception exception) {
if (properties.onErrorListener != null) {
if (exception instanceof FacebookException) {
properties.onErrorListener.onError((FacebookException)exception);
} else {
properties.onErrorListener.onError(new FacebookException(exception));
}
}
}
}
| |
package org.jasig.cas.ticket;
import org.jasig.cas.authentication.Authentication;
import org.jasig.cas.authentication.principal.Service;
import org.jasig.cas.ticket.proxy.ProxyGrantingTicket;
import com.google.common.collect.ImmutableMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;
import javax.persistence.Column;
import javax.persistence.DiscriminatorColumn;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Lob;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.NotNull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Concrete implementation of a TicketGrantingTicket. A TicketGrantingTicket is
* the global identifier of a principal into the system. It grants the Principal
* single-sign on access to any service that opts into single-sign on.
* Expiration of a TicketGrantingTicket is controlled by the ExpirationPolicy
* specified as object creation.
*
* @author Scott Battaglia
* @since 3.0.0
*/
@Entity
@Table(name="TICKETGRANTINGTICKET")
@DiscriminatorColumn(name = "TYPE")
@DiscriminatorValue(TicketGrantingTicket.PREFIX)
public class TicketGrantingTicketImpl extends AbstractTicket implements TicketGrantingTicket {
/** Unique Id for serialization. */
private static final long serialVersionUID = -8608149809180911599L;
/** Logger instance. */
private static final Logger LOGGER = LoggerFactory.getLogger(TicketGrantingTicketImpl.class);
/** The authenticated object for which this ticket was generated for. */
@Lob
@Column(name="AUTHENTICATION", nullable=false, length = Integer.MAX_VALUE)
private Authentication authentication;
/** Flag to enforce manual expiration. */
@Column(name="EXPIRED", nullable=false)
private Boolean expired = Boolean.FALSE;
/** Service that produced a proxy-granting ticket. */
@Lob
@Column(name="PROXIED_BY", nullable=true, length = Integer.MAX_VALUE)
private Service proxiedBy;
/** The services associated to this ticket. */
@Lob
@Column(name="SERVICES_GRANTED_ACCESS_TO", nullable=false, length = Integer.MAX_VALUE)
private final HashMap<String, Service> services = new HashMap<>();
/** The {@link TicketGrantingTicket} this is associated with. */
@ManyToOne(targetEntity = TicketGrantingTicketImpl.class)
private TicketGrantingTicket ticketGrantingTicket;
/** The PGTs associated to this ticket. */
@OneToMany(targetEntity = TicketGrantingTicketImpl.class, mappedBy = "ticketGrantingTicket", fetch = FetchType.EAGER)
private final Set<ProxyGrantingTicket> proxyGrantingTickets = new HashSet<>();
@Lob
@Column(name="SUPPLEMENTAL_AUTHENTICATIONS", nullable=false, length = Integer.MAX_VALUE)
private final ArrayList<Authentication> supplementalAuthentications = new ArrayList<>();
/**
* Instantiates a new ticket granting ticket impl.
*/
public TicketGrantingTicketImpl() {
// nothing to do
}
/**
* Constructs a new TicketGrantingTicket.
* May throw an {@link IllegalArgumentException} if the Authentication object is null.
*
* @param id the id of the Ticket
* @param proxiedBy Service that produced this proxy ticket.
* @param parentTicketGrantingTicket the parent ticket
* @param authentication the Authentication request for this ticket
* @param policy the expiration policy for this ticket.
*/
public TicketGrantingTicketImpl(final String id,
final Service proxiedBy,
final TicketGrantingTicket parentTicketGrantingTicket,
@NotNull final Authentication authentication, final ExpirationPolicy policy) {
super(id, parentTicketGrantingTicket, policy);
if (parentTicketGrantingTicket != null && proxiedBy == null) {
throw new IllegalArgumentException("Must specify proxiedBy when providing parent TGT");
}
Assert.notNull(authentication, "authentication cannot be null");
this.ticketGrantingTicket = parentTicketGrantingTicket;
this.authentication = authentication;
this.proxiedBy = proxiedBy;
}
/**
* Constructs a new TicketGrantingTicket without a parent
* TicketGrantingTicket.
*
* @param id the id of the Ticket
* @param authentication the Authentication request for this ticket
* @param policy the expiration policy for this ticket.
*/
public TicketGrantingTicketImpl(final String id,
final Authentication authentication, final ExpirationPolicy policy) {
this(id, null, null, authentication, policy);
}
@Override
public final TicketGrantingTicket getGrantingTicket() {
return this.ticketGrantingTicket;
}
@Override
public final Authentication getAuthentication() {
return this.authentication;
}
/**
* {@inheritDoc}
* <p>The state of the ticket is affected by this operation and the
* ticket will be considered used. The state update subsequently may
* impact the ticket expiration policy in that, depending on the policy
* configuration, the ticket may be considered expired.
*/
@Override
public final synchronized ServiceTicket grantServiceTicket(final String id,
final Service service, final ExpirationPolicy expirationPolicy,
final boolean credentialsProvided, final boolean onlyTrackMostRecentSession) {
final ServiceTicket serviceTicket = new ServiceTicketImpl(id, this,
service, this.getCountOfUses() == 0 || credentialsProvided,
expirationPolicy);
updateServiceAndTrackSession(serviceTicket.getId(), service, onlyTrackMostRecentSession);
return serviceTicket;
}
/**
* Update service and track session.
*
* @param id the id
* @param service the service
* @param onlyTrackMostRecentSession the only track most recent session
*/
protected void updateServiceAndTrackSession(final String id, final Service service, final boolean onlyTrackMostRecentSession) {
updateState();
final List<Authentication> authentications = getChainedAuthentications();
service.setPrincipal(authentications.get(authentications.size()-1).getPrincipal());
if (onlyTrackMostRecentSession) {
final String path = normalizePath(service);
final Collection<Service> existingServices = services.values();
// loop on existing services
existingServices.stream()
.filter(existingService -> path.equals(normalizePath(existingService)))
.findFirst().ifPresent(existingService -> {
existingServices.remove(existingService);
LOGGER.trace("Removed previous tickets for service: {}", existingService);
});
}
this.services.put(id, service);
}
/**
* Normalize the path of a service by removing the query string and everything after a semi-colon.
*
* @param service the service to normalize
* @return the normalized path
*/
private static String normalizePath(final Service service) {
String path = service.getId();
path = StringUtils.substringBefore(path, "?");
path = StringUtils.substringBefore(path, ";");
path = StringUtils.substringBefore(path, "#");
return path;
}
/**
* Gets an immutable map of service ticket and services accessed by this ticket-granting ticket.
* Unlike {@link java.util.Collections#unmodifiableMap(java.util.Map)},
* which is a view of a separate map which can still change, an instance of {@link ImmutableMap}
* contains its own data and will never change.
*
* @return an immutable map of service ticket and services accessed by this ticket-granting ticket.
*/
@Override
public final synchronized Map<String, Service> getServices() {
return ImmutableMap.copyOf(this.services);
}
@Override
public Collection<ProxyGrantingTicket> getProxyGrantingTickets() {
return proxyGrantingTickets;
}
/**
* Remove all services of the TGT (at logout).
*/
@Override
public final void removeAllServices() {
services.clear();
}
/**
* Return if the TGT has no parent.
*
* @return if the TGT has no parent.
*/
@Override
public final boolean isRoot() {
return this.getGrantingTicket() == null;
}
@Override
public final void markTicketExpired() {
this.expired = Boolean.TRUE;
}
@Override
public final TicketGrantingTicket getRoot() {
TicketGrantingTicket current = this;
TicketGrantingTicket parent = current.getGrantingTicket();
while (parent != null) {
current = parent;
parent = current.getGrantingTicket();
}
return current;
}
/**
* Return if the TGT is expired.
*
* @return if the TGT is expired.
*/
@Override
public final boolean isExpiredInternal() {
return this.expired;
}
@Override
public final List<Authentication> getSupplementalAuthentications() {
return this.supplementalAuthentications;
}
@Override
public final List<Authentication> getChainedAuthentications() {
final List<Authentication> list = new ArrayList<>();
list.add(getAuthentication());
if (getGrantingTicket() == null) {
return Collections.unmodifiableList(list);
}
list.addAll(getGrantingTicket().getChainedAuthentications());
return Collections.unmodifiableList(list);
}
@Override
public final Service getProxiedBy() {
return this.proxiedBy;
}
@Override
public final boolean equals(final Object object) {
if (object == null) {
return false;
}
if (object == this) {
return true;
}
if (!(object instanceof TicketGrantingTicket)) {
return false;
}
final Ticket ticket = (Ticket) object;
return new EqualsBuilder()
.append(ticket.getId(), this.getId())
.isEquals();
}
}
| |
/*
*
* * Copyright (C) 2016 Amit Shekhar
* * Copyright (C) 2011 Android Open Source Project
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.rx2androidnetworking;
import android.app.Application;
import android.test.ApplicationTestCase;
import com.androidnetworking.common.ANConstants;
import com.androidnetworking.error.ANError;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Rule;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
import io.reactivex.Observer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Created by amitshekhar on 03/05/17.
*/
public class Rx2MultipartJSONApiTest extends ApplicationTestCase<Application> {
@Rule
public final MockWebServer server = new MockWebServer();
public Rx2MultipartJSONApiTest() {
super(Application.class);
}
@Override
public void setUp() throws Exception {
super.setUp();
createApplication();
}
public void testJSONObjectMultipartRequest() throws InterruptedException {
server.enqueue(new MockResponse().setBody("{\"firstName\":\"Amit\", \"lastName\":\"Shekhar\"}"));
final AtomicReference<String> firstNameRef = new AtomicReference<>();
final AtomicReference<String> lastNameRef = new AtomicReference<>();
final AtomicReference<Boolean> isSubscribedRef = new AtomicReference<>();
final AtomicReference<Boolean> isCompletedRef = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(2);
Rx2AndroidNetworking.upload(server.url("/").toString())
.addMultipartParameter("key", "value")
.build()
.getJSONObjectObservable()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<JSONObject>() {
@Override
public void onSubscribe(Disposable d) {
isSubscribedRef.set(true);
}
@Override
public void onNext(JSONObject response) {
try {
firstNameRef.set(response.getString("firstName"));
lastNameRef.set(response.getString("lastName"));
latch.countDown();
} catch (JSONException e) {
assertTrue(false);
}
}
@Override
public void onError(Throwable e) {
assertTrue(false);
}
@Override
public void onComplete() {
isCompletedRef.set(true);
latch.countDown();
}
});
assertTrue(latch.await(2, SECONDS));
assertTrue(isSubscribedRef.get());
assertTrue(isCompletedRef.get());
assertEquals("Amit", firstNameRef.get());
assertEquals("Shekhar", lastNameRef.get());
}
public void testJSONObjectMultipartRequest404() throws InterruptedException {
server.enqueue(new MockResponse().setResponseCode(404).setBody("data"));
final AtomicReference<String> errorDetailRef = new AtomicReference<>();
final AtomicReference<String> errorBodyRef = new AtomicReference<>();
final AtomicReference<Integer> errorCodeRef = new AtomicReference<>();
final AtomicReference<Boolean> isSubscribedRef = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
Rx2AndroidNetworking.upload(server.url("/").toString())
.addMultipartParameter("key", "value")
.build()
.getJSONObjectObservable()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<JSONObject>() {
@Override
public void onSubscribe(Disposable d) {
isSubscribedRef.set(true);
}
@Override
public void onNext(JSONObject response) {
assertTrue(false);
}
@Override
public void onError(Throwable e) {
ANError anError = (ANError) e;
errorBodyRef.set(anError.getErrorBody());
errorDetailRef.set(anError.getErrorDetail());
errorCodeRef.set(anError.getErrorCode());
latch.countDown();
}
@Override
public void onComplete() {
assertTrue(false);
}
});
assertTrue(latch.await(2, SECONDS));
assertTrue(isSubscribedRef.get());
assertEquals(ANConstants.RESPONSE_FROM_SERVER_ERROR, errorDetailRef.get());
assertEquals("data", errorBodyRef.get());
assertEquals(404, errorCodeRef.get().intValue());
}
public void testJSONArrayMultipartRequest() throws InterruptedException {
server.enqueue(new MockResponse().setBody("[{\"firstName\":\"Amit\", \"lastName\":\"Shekhar\"}]"));
final AtomicReference<String> firstNameRef = new AtomicReference<>();
final AtomicReference<String> lastNameRef = new AtomicReference<>();
final AtomicReference<Boolean> isSubscribedRef = new AtomicReference<>();
final AtomicReference<Boolean> isCompletedRef = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(2);
Rx2AndroidNetworking.upload(server.url("/").toString())
.addMultipartParameter("key", "value")
.build()
.getJSONArrayObservable()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<JSONArray>() {
@Override
public void onSubscribe(Disposable d) {
isSubscribedRef.set(true);
}
@Override
public void onNext(JSONArray response) {
try {
JSONObject jsonObject = response.getJSONObject(0);
firstNameRef.set(jsonObject.getString("firstName"));
lastNameRef.set(jsonObject.getString("lastName"));
latch.countDown();
} catch (JSONException e) {
assertTrue(false);
}
}
@Override
public void onError(Throwable e) {
assertTrue(false);
}
@Override
public void onComplete() {
isCompletedRef.set(true);
latch.countDown();
}
});
assertTrue(latch.await(2, SECONDS));
assertTrue(isSubscribedRef.get());
assertTrue(isCompletedRef.get());
assertEquals("Amit", firstNameRef.get());
assertEquals("Shekhar", lastNameRef.get());
}
public void testJSONArrayMultipartRequest404() throws InterruptedException {
server.enqueue(new MockResponse().setResponseCode(404).setBody("data"));
final AtomicReference<String> errorDetailRef = new AtomicReference<>();
final AtomicReference<String> errorBodyRef = new AtomicReference<>();
final AtomicReference<Integer> errorCodeRef = new AtomicReference<>();
final AtomicReference<Boolean> isSubscribedRef = new AtomicReference<>();
final CountDownLatch latch = new CountDownLatch(1);
Rx2AndroidNetworking.upload(server.url("/").toString())
.addMultipartParameter("key", "value")
.build()
.getJSONArrayObservable()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<JSONArray>() {
@Override
public void onSubscribe(Disposable d) {
isSubscribedRef.set(true);
}
@Override
public void onNext(JSONArray response) {
assertTrue(false);
}
@Override
public void onError(Throwable e) {
ANError anError = (ANError) e;
errorBodyRef.set(anError.getErrorBody());
errorDetailRef.set(anError.getErrorDetail());
errorCodeRef.set(anError.getErrorCode());
latch.countDown();
}
@Override
public void onComplete() {
assertTrue(false);
}
});
assertTrue(latch.await(2, SECONDS));
assertTrue(isSubscribedRef.get());
assertEquals(ANConstants.RESPONSE_FROM_SERVER_ERROR, errorDetailRef.get());
assertEquals("data", errorBodyRef.get());
assertEquals(404, errorCodeRef.get().intValue());
}
}
| |
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.sequencer.ddl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.modeshape.common.annotation.Immutable;
import org.modeshape.common.text.ParsingException;
import org.modeshape.common.text.Position;
import org.modeshape.common.util.CheckArg;
import org.modeshape.jcr.api.JcrConstants;
import org.modeshape.sequencer.ddl.dialect.derby.DerbyDdlParser;
import org.modeshape.sequencer.ddl.dialect.oracle.OracleDdlParser;
import org.modeshape.sequencer.ddl.dialect.postgres.PostgresDdlParser;
import org.modeshape.sequencer.ddl.node.AstNode;
import org.modeshape.sequencer.ddl.node.AstNodeFactory;
/**
* A set of parsers capable of understanding DDL file content. This class can be used directly to create an {@link AstNode} tree
* representing nodes and properties for DDL statement components.
* <p>
* You can also provide an input or parent {@link AstNode} node as the starting point for your tree.
* </p>
* <p>
* The parser is based on the SQL-92 and extended by specific dialects. These dialect-specific parsers provide db-specific parsing
* of db-specific statements of statement extensions, features or properties.
* </p>
*/
@Immutable
public class DdlParsers {
/**
* Sorts the parser scores.
*/
private static final Comparator<Entry<DdlParser, Integer>> SORTER = new Comparator<Entry<DdlParser, Integer>>() {
@Override
public int compare( final Entry<DdlParser, Integer> thisEntry,
final Entry<DdlParser, Integer> thatEntry ) {
// reverse order as we want biggest value to sort first
int result = (thisEntry.getValue().compareTo(thatEntry.getValue()) * -1);
// default to standard SQL parser if score is a tie
if (result == 0) {
if (StandardDdlParser.ID.equals(thisEntry.getKey().getId())
&& !StandardDdlParser.ID.equals(thatEntry.getKey().getId())) {
return -1;
}
if (StandardDdlParser.ID.equals(thatEntry.getKey().getId())
&& !StandardDdlParser.ID.equals(thisEntry.getKey().getId())) {
return 1;
}
}
return result;
}
};
public static final List<DdlParser> BUILTIN_PARSERS;
static {
List<DdlParser> parsers = new ArrayList<DdlParser>();
parsers.add(new StandardDdlParser());
parsers.add(new OracleDdlParser());
parsers.add(new DerbyDdlParser());
parsers.add(new PostgresDdlParser());
BUILTIN_PARSERS = Collections.unmodifiableList(parsers);
}
private List<DdlParser> parsers;
private AstNodeFactory nodeFactory = new AstNodeFactory();
/**
* Create an instance that uses all of the {@link #BUILTIN_PARSERS built-in parsers}.
*/
public DdlParsers() {
this.parsers = BUILTIN_PARSERS;
}
/**
* Create an instance that uses the supplied parsers, in order.
*
* @param parsers the list of parsers; may be empty or null if the {@link #BUILTIN_PARSERS built-in parsers} should be used
*/
public DdlParsers( List<DdlParser> parsers ) {
this.parsers = (parsers != null && !parsers.isEmpty()) ? parsers : BUILTIN_PARSERS;
}
private AstNode createDdlStatementsContainer( final String parserId ) {
final AstNode node = this.nodeFactory.node(StandardDdlLexicon.STATEMENTS_CONTAINER);
node.setProperty(JcrConstants.JCR_PRIMARY_TYPE, JcrConstants.NT_UNSTRUCTURED);
node.setProperty(StandardDdlLexicon.PARSER_ID, parserId);
return node;
}
/**
* @param id the identifier of the parser being requested (cannot be <code>null</code> or empty)
* @return the parser or <code>null</code> if not found
*/
public DdlParser getParser( final String id ) {
CheckArg.isNotEmpty(id, "id");
for (final DdlParser parser : this.parsers) {
if (parser.getId().equals(id)) {
return parser;
}
}
return null;
}
/**
* @return a copy of the DDL parsers used in this instance (never <code>null</code> or empty)
*/
public Set<DdlParser> getParsers() {
return new HashSet<DdlParser>(this.parsers);
}
/**
* @param ddl the DDL being parsed (cannot be <code>null</code> or empty)
* @param parserId the identifier of the parser to use (can be <code>null</code> or empty if best matched parser should be
* used)
* @return the root tree {@link AstNode}
* @throws ParsingException if there is an error parsing the supplied DDL content
* @throws IllegalArgumentException if a parser with the specified identifier cannot be found
*/
public AstNode parseUsing( final String ddl,
final String parserId ) throws ParsingException {
CheckArg.isNotEmpty(ddl, "ddl");
CheckArg.isNotEmpty(parserId, "parserId");
DdlParser parser = getParser(parserId);
if (parser == null) {
throw new ParsingException(Position.EMPTY_CONTENT_POSITION, DdlSequencerI18n.unknownParser.text(parserId));
}
// create DDL root node
AstNode astRoot = createDdlStatementsContainer(parserId);
// parse
parser.parse(ddl, astRoot, null);
return astRoot;
}
/**
* Parse the supplied DDL using multiple parsers, returning the result of each parser with its score in the order of highest
* scoring to lowest scoring.
*
* @param ddl the DDL being parsed (cannot be <code>null</code> or empty)
* @param firstParserId the identifier of the first parser to use (cannot be <code>null</code> or empty)
* @param secondParserId the identifier of the second parser to use (cannot be <code>null</code> or empty)
* @param additionalParserIds the identifiers of additional parsers that should be used; may be empty but not contain a null
* identifier value
* @return the list of {@link ParsingResult} instances, one for each parser, ordered from highest score to lowest score (never
* <code>null</code> or empty)
* @throws ParsingException if there is an error parsing the supplied DDL content
* @throws IllegalArgumentException if a parser with the specified identifier cannot be found
*/
public List<ParsingResult> parseUsing( final String ddl,
final String firstParserId,
final String secondParserId,
final String... additionalParserIds ) throws ParsingException {
CheckArg.isNotEmpty(firstParserId, "firstParserId");
CheckArg.isNotEmpty(secondParserId, "secondParserId");
if (additionalParserIds != null) {
CheckArg.containsNoNulls(additionalParserIds, "additionalParserIds");
}
final int numParsers = ((additionalParserIds == null) ? 2 : (additionalParserIds.length + 2));
final List<DdlParser> selectedParsers = new ArrayList<DdlParser>(numParsers);
{ // add first parser
final DdlParser parser = getParser(firstParserId);
if (parser == null) {
throw new ParsingException(Position.EMPTY_CONTENT_POSITION, DdlSequencerI18n.unknownParser.text(firstParserId));
}
selectedParsers.add(parser);
}
{ // add second parser
final DdlParser parser = getParser(secondParserId);
if (parser == null) {
throw new ParsingException(Position.EMPTY_CONTENT_POSITION, DdlSequencerI18n.unknownParser.text(secondParserId));
}
selectedParsers.add(parser);
}
// add remaining parsers
if ((additionalParserIds != null) && (additionalParserIds.length != 0)) {
for (final String id : additionalParserIds) {
final DdlParser parser = getParser(id);
if (parser == null) {
throw new ParsingException(Position.EMPTY_CONTENT_POSITION, DdlSequencerI18n.unknownParser.text(id));
}
selectedParsers.add(parser);
}
}
return parseUsing(ddl, selectedParsers);
}
private List<ParsingResult> parseUsing( final String ddl,
final List<DdlParser> parsers ) {
CheckArg.isNotEmpty(ddl, "ddl");
final List<ParsingResult> results = new ArrayList<DdlParsers.ParsingResult>(this.parsers.size());
final DdlParserScorer scorer = new DdlParserScorer();
for (final DdlParser parser : this.parsers) {
final String parserId = parser.getId();
int score = ParsingResult.NO_SCORE;
AstNode rootNode = null;
Exception error = null;
try {
// score
final Object scorerOutput = parser.score(ddl, null, scorer);
score = scorer.getScore();
// create DDL root node
rootNode = createDdlStatementsContainer(parserId);
// parse
parser.parse(ddl, rootNode, scorerOutput);
} catch (final RuntimeException e) {
error = e;
} finally {
final ParsingResult result = new ParsingResult(parserId, rootNode, score, error);
results.add(result);
scorer.reset();
}
}
Collections.sort(results);
return results;
}
/**
* Parse the supplied DDL using all registered parsers, returning the result of each parser with its score in the order of
* highest scoring to lowest scoring.
*
* @param ddl the DDL being parsed (cannot be <code>null</code> or empty)
* @return the list or {@link ParsingResult} instances, one for each parser, ordered from highest score to lowest score
* @throws ParsingException if there is an error parsing the supplied DDL content
* @throws IllegalArgumentException if a parser with the specified identifier cannot be found
*/
public List<ParsingResult> parseUsingAll( final String ddl ) throws ParsingException {
return parseUsing(ddl, this.parsers);
}
/**
* Parse the supplied DDL content and return the {@link AstNode root node} of the AST representation.
*
* @param ddl content string; may not be null
* @param fileName the approximate name of the file containing the DDL content; may be null if this is not known
* @return the root tree {@link AstNode}
* @throws ParsingException if there is an error parsing the supplied DDL content
*/
public AstNode parse( final String ddl,
final String fileName ) throws ParsingException {
CheckArg.isNotEmpty(ddl, "ddl");
RuntimeException firstException = null;
// Go through each parser and score the DDL content
final Map<DdlParser, Integer> scoreMap = new HashMap<DdlParser, Integer>(this.parsers.size());
final DdlParserScorer scorer = new DdlParserScorer();
for (final DdlParser parser : this.parsers) {
try {
parser.score(ddl, fileName, scorer);
scoreMap.put(parser, scorer.getScore());
} catch (RuntimeException e) {
if (firstException == null) {
firstException = e;
}
} finally {
scorer.reset();
}
}
if (scoreMap.isEmpty()) {
if (firstException == null) {
throw new ParsingException(Position.EMPTY_CONTENT_POSITION,
DdlSequencerI18n.errorParsingDdlContent.text(this.parsers.size()));
}
throw firstException;
}
// sort the scores
final List<Entry<DdlParser, Integer>> scoredParsers = new ArrayList<Entry<DdlParser, Integer>>(scoreMap.entrySet());
Collections.sort(scoredParsers, SORTER);
firstException = null;
AstNode astRoot = null;
for (final Entry<DdlParser, Integer> scoredParser : scoredParsers) {
try {
final DdlParser parser = scoredParser.getKey();
// create DDL root node
astRoot = createDdlStatementsContainer(parser.getId());
// parse
parser.parse(ddl, astRoot, null);
return astRoot; // successfully parsed
} catch (final RuntimeException e) {
if (astRoot != null) {
astRoot.removeFromParent();
}
if (firstException == null) {
firstException = e;
}
}
}
if (firstException == null) {
throw new ParsingException(Position.EMPTY_CONTENT_POSITION, DdlSequencerI18n.errorParsingDdlContent.text());
}
throw firstException;
}
/**
* Represents a parsing result of one parser parsing one DDL input.
*/
@Immutable
public class ParsingResult implements Comparable<ParsingResult> {
public static final int NO_SCORE = -1;
private final Exception error;
private final String id;
private final AstNode rootNode;
private final int score;
/**
* @param parserId the parser identifier (cannot be <code>null</code> or empty)
* @param rootTreeNode the node at the root of the parse tree (can be <code>null</code> if an error occurred)
* @param parserScore the parsing score (can have {@link #NO_SCORE no score} if an error occurred
* @param parsingError an error that occurred during parsing (can be <code>null</code>)
*/
public ParsingResult( final String parserId,
final AstNode rootTreeNode,
final int parserScore,
final Exception parsingError ) {
CheckArg.isNotEmpty(parserId, "parserId");
this.id = parserId;
this.rootNode = rootTreeNode;
this.score = parserScore;
this.error = parsingError;
}
/**
* {@inheritDoc}
*
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
@Override
public int compareTo( final ParsingResult that ) {
if ((this == that) || (this.score == that.score)) {
return 0;
}
return ((this.score > that.score) ? -1 : 1);
}
/**
* @return the parsing error (<code>null</code> if no error occurred)
*/
public Exception getError() {
return this.error;
}
/**
* @return the parser identifier (never <code>null</code> or empty)
*/
public String getParserId() {
return this.id;
}
/**
* @return the root <code>AstNode</code> (can be <code>null</code> if a parsing error occurred)
*/
public AstNode getRootTree() {
return this.rootNode;
}
/**
* @return the parsing score
*/
public int getScore() {
return this.score;
}
}
}
| |
/*
* Conditions Of Use
*
* This software was developed by employees of the National Institute of
* Standards and Technology (NIST), an agency of the Federal Government.
* Pursuant to title 15 Untied States Code Section 105, works of NIST
* employees are not subject to copyright protection in the United States
* and are considered to be in the public domain. As a result, a formal
* license is not needed to use the software.
*
* This software is provided by NIST as a service and is expressly
* provided "AS IS." NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT
* AND DATA ACCURACY. NIST does not warrant or make any representations
* regarding the use of the software or the results thereof, including but
* not limited to the correctness, accuracy, reliability or usefulness of
* the software.
*
* Permission to use this software is contingent upon your acceptance
* of the terms of this agreement
*
* .
*
*/
/*******************************************************************************
* Product of NIST/ITL Advanced Networking Technologies Division (ANTD). *
*******************************************************************************/
package gov.nist.javax.sip.address;
/*
*Bug fix contributions
*Daniel J. Martinez Manzano <dani@dif.um.es>
*Stefan Marx.
*pmusgrave@newheights.com (Additions for gruu and outbound drafts)
*Jeroen van Bemmel ( additions for SCTP transport )
*/
import gov.nist.core.*;
import gov.nist.javax.sip.ListeningPointExt;
import java.util.*;
import java.text.ParseException;
import javax.sip.PeerUnavailableException;
import javax.sip.SipFactory;
import javax.sip.address.SipURI;
import javax.sip.header.Header;
import javax.sip.header.HeaderFactory;
/**
* Implementation of the SipURI interface.
*
*
* @author M. Ranganathan <br/>
* @version 1.2 $Revision: 1.24 $ $Date: 2010-05-06 14:08:05 $
*
*
*
*/
public class SipUri extends GenericURI implements javax.sip.address.SipURI , SipURIExt{
private static final long serialVersionUID = 7749781076218987044L;
/** Authority for the uri.
*/
protected Authority authority;
/** uriParms list
*/
protected NameValueList uriParms;
/** qheaders list
*/
protected NameValueList qheaders;
/** telephoneSubscriber field
*/
protected TelephoneNumber telephoneSubscriber;
public SipUri() {
this.scheme = SIP;
this.uriParms = new NameValueList();
this.qheaders = new NameValueList();
this.qheaders.setSeparator("&");
}
/** Constructor given the scheme.
* The scheme must be either Sip or Sips
*/
public void setScheme(String scheme) {
if (scheme.compareToIgnoreCase(SIP) != 0
&& scheme.compareToIgnoreCase(SIPS) != 0)
throw new IllegalArgumentException("bad scheme " + scheme);
this.scheme = scheme.toLowerCase();
}
/** Get the scheme.
*/
public String getScheme() {
return scheme;
}
/**
* clear all URI Parameters.
* @since v1.0
*/
public void clearUriParms() {
uriParms = new NameValueList();
}
/**
*Clear the password from the user part if it exists.
*/
public void clearPassword() {
if (this.authority != null) {
UserInfo userInfo = authority.getUserInfo();
if (userInfo != null)
userInfo.clearPassword();
}
}
/** Get the authority.
*/
public Authority getAuthority() {
return this.authority;
}
/**
* Clear all Qheaders.
*/
public void clearQheaders() {
qheaders = new NameValueList();
}
/**
* Compare two URIs and return true if they are equal.
* @param that the object to compare to.
* @return true if the object is equal to this object.
*
* JvB: Updated to define equality in terms of API methods, according to the rules
* in RFC3261 section 19.1.4
*
* Jean Deruelle: Updated to define equality of API methods, according to the rules
* in RFC3261 section 19.1.4 convert potential ie :
* %HEX HEX encoding parts of the URI before comparing them
* transport param added in comparison
* header equality enforced in comparison
*
*/
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object that) {
// Shortcut for same object
if (that==this) return true;
if (that instanceof SipURI) {
final SipURI a = this;
final SipURI b = (SipURI) that;
// A SIP and SIPS URI are never equivalent
if ( a.isSecure() ^ b.isSecure() ) return false;
// For two URIs to be equal, the user, password, host, and port
// components must match; comparison of userinfo is case-sensitive
if (a.getUser()==null ^ b.getUser()==null) return false;
if (a.getUserPassword()==null ^ b.getUserPassword()==null) return false;
if (a.getUser()!=null && !UriDecoder.decode(a.getUser()).equals(UriDecoder.decode(b.getUser()))) return false;
if (a.getUserPassword()!=null && !UriDecoder.decode(a.getUserPassword()).equals(UriDecoder.decode(b.getUserPassword()))) return false;
if (a.getHost() == null ^ b.getHost() == null) return false;
if (a.getHost() != null && !a.getHost().equalsIgnoreCase(b.getHost())) return false;
if (a.getPort() != b.getPort()) return false;
// URI parameters
for (Iterator i = a.getParameterNames(); i.hasNext();) {
String pname = (String) i.next();
String p1 = a.getParameter(pname);
String p2 = b.getParameter(pname);
// those present in both must match (case-insensitive)
if (p1!=null && p2!=null && !UriDecoder.decode(p1).equalsIgnoreCase(UriDecoder.decode(p2))) return false;
}
// transport, user, ttl or method must match when present in either
if (a.getTransportParam()==null ^ b.getTransportParam()==null) return false;
if (a.getUserParam()==null ^ b.getUserParam()==null) return false;
if (a.getTTLParam()==-1 ^ b.getTTLParam()==-1) return false;
if (a.getMethodParam()==null ^ b.getMethodParam()==null) return false;
if (a.getMAddrParam()==null ^ b.getMAddrParam()==null) return false;
// Headers: must match according to their definition.
if(a.getHeaderNames().hasNext() && !b.getHeaderNames().hasNext()) return false;
if(!a.getHeaderNames().hasNext() && b.getHeaderNames().hasNext()) return false;
if(a.getHeaderNames().hasNext() && b.getHeaderNames().hasNext()) {
HeaderFactory headerFactory = null;
try {
headerFactory = SipFactory.getInstance().createHeaderFactory();
} catch (PeerUnavailableException e) {
Debug.logError("Cannot get the header factory to parse the header of the sip uris to compare", e);
return false;
}
for (Iterator i = a.getHeaderNames(); i.hasNext();) {
String hname = (String) i.next();
String h1 = a.getHeader(hname);
String h2 = b.getHeader(hname);
if(h1 == null && h2 != null) return false;
if(h2 == null && h1 != null) return false;
// The following check should not be needed but we add it for findbugs.
if(h1 == null && h2 == null) continue;
try {
Header header1 = headerFactory.createHeader(hname, UriDecoder.decode(h1));
Header header2 = headerFactory.createHeader(hname, UriDecoder.decode(h2));
// those present in both must match according to the equals method of the corresponding header
if (!header1.equals(header2)) return false;
} catch (ParseException e) {
Debug.logError("Cannot parse one of the header of the sip uris to compare " + a + " " + b, e);
return false;
}
}
}
// Finally, we can conclude that they are indeed equal
return true;
}
return false;
}
/**
* Construct a URL from the parsed structure.
* @return String
*/
public String encode() {
return encode(new StringBuilder()).toString();
}
public StringBuilder encode(StringBuilder buffer) {
buffer.append(scheme).append(COLON);
if (authority != null)
authority.encode(buffer);
if (!uriParms.isEmpty()) {
buffer.append(SEMICOLON);
uriParms.encode(buffer);
}
if (!qheaders.isEmpty()) {
buffer.append(QUESTION);
qheaders.encode(buffer);
}
return buffer;
}
/** Return a string representation.
*
*@return the String representation of this URI.
*
*/
public String toString() {
return this.encode();
}
/**
* getUser@host
* @return user@host portion of the uri (null if none exists).
*
* Peter Musgrave - handle null user
*/
public String getUserAtHost() {
String user = "";
if (authority.getUserInfo() != null)
user = authority.getUserInfo().getUser();
String host = authority.getHost().encode();
StringBuilder s = null;
if (user.equals("")) {
s = new StringBuilder();
} else {
s = new StringBuilder(user).append(AT);
}
return s.append(host).toString();
}
/**
* getUser@host
* @return user@host portion of the uri (null if none exists).
*/
public String getUserAtHostPort() {
String user = "";
if (authority.getUserInfo() != null)
user = authority.getUserInfo().getUser();
String host = authority.getHost().encode();
int port = authority.getPort();
// If port not set assign the default.
StringBuilder s = null;
if (user.equals("")) {
s = new StringBuilder();
} else {
s = new StringBuilder(user).append(AT);
}
if (port != -1) {
return s.append(host).append(COLON).append(port).toString();
} else
return s.append(host).toString();
}
/**
* get the parameter (do a name lookup) and return null if none exists.
* @param parmname Name of the parameter to get.
* @return Parameter of the given name (null if none exists).
*/
public Object getParm(String parmname) {
Object obj = uriParms.getValue(parmname);
return obj;
}
/**
* Get the method parameter.
* @return Method parameter.
*/
public String getMethod() {
return (String) getParm(METHOD);
}
/**
* Accessor for URI parameters
* @return A name-value list containing the parameters.
*/
public NameValueList getParameters() {
return uriParms;
}
/** Remove the URI parameters.
*
*/
public void removeParameters() {
this.uriParms = new NameValueList();
}
/**
* Accessor forSIPObjects
* @return Get the query headers (that appear after the ? in
* the URL)
*/
public NameValueList getQheaders() {
return qheaders;
}
/**
* Get the urse parameter.
* @return User parameter (user= phone or user=ip).
*/
public String getUserType() {
return (String) uriParms.getValue(USER);
}
/**
* Get the password of the user.
* @return User password when it embedded as part of the uri
* ( a very bad idea).
*/
public String getUserPassword() {
if (authority == null)
return null;
return authority.getPassword();
}
/** Set the user password.
*@param password - password to set.
*/
public void setUserPassword(String password) {
if (this.authority == null)
this.authority = new Authority();
authority.setPassword(password);
}
/**
* Returns the stucture corresponding to the telephone number
* provided that the user is a telephone subscriber.
* @return TelephoneNumber part of the url (only makes sense
* when user = phone is specified)
*/
public TelephoneNumber getTelephoneSubscriber() {
if (telephoneSubscriber == null) {
telephoneSubscriber = new TelephoneNumber();
}
return telephoneSubscriber;
}
/**
* Get the host and port of the server.
* @return get the host:port part of the url parsed into a
* structure.
*/
public HostPort getHostPort() {
if (authority == null || authority.getHost() == null )
return null;
else {
return authority.getHostPort();
}
}
/** Get the port from the authority field.
*
*@return the port from the authority field.
*/
public int getPort() {
HostPort hp = this.getHostPort();
if (hp == null)
return -1;
return hp.getPort();
}
/** Get the host protion of the URI.
* @return the host portion of the url.
*/
public String getHost() {
if ( authority == null) return null;
else if (authority.getHost() == null ) return null;
else return authority.getHost().encode();
}
/**
* returns true if the user is a telephone subscriber.
* If the host is an Internet telephony
* gateway, a telephone-subscriber field MAY be used instead
* of a user field. The telephone-subscriber field uses the
* notation of RFC 2806 [19]. Any characters of the un-escaped
* "telephone-subscriber" that are not either in the set
* "unreserved" or "user-unreserved" MUST be escaped. The set
* of characters not reserved in the RFC 2806 description of
* telephone-subscriber contains a number of characters in
* various syntax elements that need to be escaped when used
* in SIP URLs, for example quotation marks (%22), hash (%23),
* colon (%3a), at-sign (%40) and the "unwise" characters,
* i.e., punctuation of %5b and above.
*
* The telephone number is a special case of a user name and
* cannot be distinguished by a BNF. Thus, a URL parameter,
* user, is added to distinguish telephone numbers from user
* names.
*
* The user parameter value "phone" indicates that the user
* part contains a telephone number. Even without this
* parameter, recipients of SIP URLs MAY interpret the pre-@
* part as a telephone number if local restrictions on the
* @return true if the user is a telephone subscriber.
*/
public boolean isUserTelephoneSubscriber() {
String usrtype = (String) uriParms.getValue(USER);
if (usrtype == null)
return false;
return usrtype.equalsIgnoreCase(PHONE);
}
/**
*remove the ttl value from the parameter list if it exists.
*/
public void removeTTL() {
if (uriParms != null)
uriParms.delete(TTL);
}
/**
*Remove the maddr param if it exists.
*/
public void removeMAddr() {
if (uriParms != null)
uriParms.delete(MADDR);
}
/**
*Delete the transport string.
*/
public void removeTransport() {
if (uriParms != null)
uriParms.delete(TRANSPORT);
}
/** Remove a header given its name (provided it exists).
* @param name name of the header to remove.
*/
public void removeHeader(String name) {
if (qheaders != null)
qheaders.delete(name);
}
/** Remove all headers.
*/
public void removeHeaders() {
qheaders = new NameValueList();
}
/**
* Set the user type.
*/
public void removeUserType() {
if (uriParms != null)
uriParms.delete(USER);
}
/**
*remove the port setting.
*/
public void removePort() {
authority.removePort();
}
/**
* remove the Method.
*/
public void removeMethod() {
if (uriParms != null)
uriParms.delete(METHOD);
}
/** Sets the user of SipURI. The identifier of a particular resource at
* the host being addressed. The user and the user password including the
* "at" sign make up the user-info.
*
* @param uname The new String value of the user.
* @throws ParseException which signals that an error has been reached
* unexpectedly while parsing the user value.
*/
public void setUser(String uname) {
if (this.authority == null) {
this.authority = new Authority();
}
this.authority.setUser(uname);
}
/** Remove the user.
*/
public void removeUser() {
this.authority.removeUserInfo();
}
/** Set the default parameters for this URI.
* Do nothing if the parameter is already set to some value.
* Otherwise set it to the given value.
* @param name Name of the parameter to set.
* @param value value of the parameter to set.
*/
public void setDefaultParm(String name, Object value) {
if (uriParms.getValue(name) == null) {
NameValue nv = new NameValue(name, value);
uriParms.set(nv);
}
}
/** Set the authority member
* @param authority Authority to set.
*/
public void setAuthority(Authority authority) {
this.authority = authority;
}
/** Set the host for this URI.
* @param h host to set.
*/
public void setHost(Host h) {
if (this.authority == null)
this.authority = new Authority();
this.authority.setHost(h);
}
/** Set the uriParms member
* @param parms URI parameters to set.
*/
public void setUriParms(NameValueList parms) {
uriParms = parms;
}
/**
* Set a given URI parameter. Note - parameter must be properly
* encoded before the function is called.
* @param name Name of the parameter to set.
* @param value value of the parameter to set.
*/
public void setUriParm(String name, Object value) {
NameValue nv = new NameValue(name, value);
uriParms.set(nv);
}
/** Set the qheaders member
* @param parms query headers to set.
*/
public void setQheaders(NameValueList parms) {
qheaders = parms;
}
/**
* Set the MADDR parameter .
* @param mAddr Host Name to set
*/
public void setMAddr(String mAddr) {
NameValue nameValue = uriParms.getNameValue(MADDR);
Host host = new Host();
host.setAddress(mAddr);
if (nameValue != null)
nameValue.setValueAsObject(host);
else {
nameValue = new NameValue(MADDR, host);
uriParms.set(nameValue);
}
}
/** Sets the value of the user parameter. The user URI parameter exists to
* distinguish telephone numbers from user names that happen to look like
* telephone numbers. This is equivalent to setParameter("user", user).
*
* @param usertype New value String value of the method parameter
*/
public void setUserParam(String usertype) {
uriParms.set(USER, usertype);
}
/**
* Set the Method
* @param method method parameter
*/
public void setMethod(String method) {
uriParms.set(METHOD, method);
}
/**
* Sets ISDN subaddress of SipURL
* @param isdnSubAddress ISDN subaddress
*/
public void setIsdnSubAddress(String isdnSubAddress) {
if (telephoneSubscriber == null)
telephoneSubscriber = new TelephoneNumber();
telephoneSubscriber.setIsdnSubaddress(isdnSubAddress);
}
/**
* Set the telephone subscriber field.
* @param tel Telephone subscriber field to set.
*/
public void setTelephoneSubscriber(TelephoneNumber tel) {
telephoneSubscriber = tel;
}
/** set the port to a given value.
* @param p Port to set.
*/
public void setPort(int p) {
if (authority == null)
authority = new Authority();
authority.setPort(p);
}
/**
* Boolean to check if a parameter of a given name exists.
* @param name Name of the parameter to check on.
* @return a boolean indicating whether the parameter exists.
*/
public boolean hasParameter(String name) {
return uriParms.getValue(name) != null;
}
/**
* Set the query header when provided as a name-value pair.
* @param nameValue qeuery header provided as a name,value pair.
*/
public void setQHeader(NameValue nameValue) {
this.qheaders.set(nameValue);
}
/** Set the parameter as given.
*@param nameValue - parameter to set.
*/
public void setUriParameter(NameValue nameValue) {
this.uriParms.set(nameValue);
}
/** Return true if the transport parameter is defined.
* @return true if transport appears as a parameter and false otherwise.
*/
public boolean hasTransport() {
return hasParameter(TRANSPORT);
}
/**
* Remove a parameter given its name
* @param name -- name of the parameter to remove.
*/
public void removeParameter(String name) {
uriParms.delete(name);
}
/** Set the hostPort field of the imbedded authority field.
*@param hostPort is the hostPort to set.
*/
public void setHostPort(HostPort hostPort) {
if (this.authority == null) {
this.authority = new Authority();
}
authority.setHostPort(hostPort);
}
/** clone this.
*/
public Object clone() {
SipUri retval = (SipUri) super.clone();
if (this.authority != null)
retval.authority = (Authority) this.authority.clone();
if (this.uriParms != null)
retval.uriParms = (NameValueList) this.uriParms.clone();
if (this.qheaders != null)
retval.qheaders = (NameValueList) this.qheaders.clone();
if (this.telephoneSubscriber != null)
retval.telephoneSubscriber = (TelephoneNumber) this.telephoneSubscriber.clone();
return retval;
}
/**
* Returns the value of the named header, or null if it is not set.
* SIP/SIPS URIs may specify headers. As an example, the URI
* sip:joe@jcp.org?priority=urgent has a header "priority" whose
* value is "urgent".
*
* @param name name of header to retrieve
* @return the value of specified header
*/
public String getHeader(String name) {
return this.qheaders.getValue(name) != null
? this.qheaders.getValue(name).toString()
: null;
}
/**
* Returns an Iterator over the names (Strings) of all headers present
* in this SipURI.
*
* @return an Iterator over all the header names
*/
public Iterator<String> getHeaderNames() {
return this.qheaders.getNames();
}
/** Returns the value of the <code>lr</code> parameter, or null if this
* is not set. This is equivalent to getParameter("lr").
*
* @return the value of the <code>lr</code> parameter
*/
public String getLrParam() {
boolean haslr = this.hasParameter(LR);
return haslr ? "true" : null;
}
/** Returns the value of the <code>maddr</code> parameter, or null if this
* is not set. This is equivalent to getParameter("maddr").
*
* @return the value of the <code>maddr</code> parameter
*/
public String getMAddrParam() {
NameValue maddr = uriParms.getNameValue(MADDR);
if (maddr == null)
return null;
String host = (String) maddr.getValueAsObject();
return host;
}
/**
* Returns the value of the <code>method</code> parameter, or null if this
* is not set. This is equivalent to getParameter("method").
*
* @return the value of the <code>method</code> parameter
*/
public String getMethodParam() {
return this.getParameter(METHOD);
}
/**
* Returns the value of the named parameter, or null if it is not set. A
* zero-length String indicates flag parameter.
*
* @param name name of parameter to retrieve
* @return the value of specified parameter
*/
public String getParameter(String name) {
Object val = uriParms.getValue(name);
if (val == null)
return null;
if (val instanceof GenericObject)
return ((GenericObject) val).encode();
else
return val.toString();
}
/**
* Returns an Iterator over the names (Strings) of all parameters present
*
* in this ParametersHeader.
*
*
*
* @return an Iterator over all the parameter names
*
*/
public Iterator<String> getParameterNames() {
return this.uriParms.getNames();
}
/** Returns the value of the "ttl" parameter, or -1 if this is not set.
* This method is equivalent to getParameter("ttl").
*
* @return the value of the <code>ttl</code> parameter
*/
public int getTTLParam() {
Integer ttl = (Integer) uriParms.getValue("ttl");
if (ttl != null)
return ttl.intValue();
else
return -1;
}
/** Returns the value of the "transport" parameter, or null if this is not
* set. This is equivalent to getParameter("transport").
*
* @return the transport paramter of the SipURI
*/
public String getTransportParam() {
if (uriParms != null) {
return (String) uriParms.getValue(TRANSPORT);
} else
return null;
}
/** Returns the value of the <code>userParam</code>,
*or null if this is not set.
* <p>
* This is equivalent to getParameter("user").
*
* @return the value of the <code>userParam</code> of the SipURI
*/
public String getUser() {
return authority.getUser();
}
/** Returns true if this SipURI is secure i.e. if this SipURI represents a
* sips URI. A sip URI returns false.
*
* @return <code>true</code> if this SipURI represents a sips URI, and
* <code>false</code> if it represents a sip URI.
*/
public boolean isSecure() {
return this.getScheme().equalsIgnoreCase(SIPS);
}
/** This method determines if this is a URI with a scheme of "sip" or "sips".
*
* @return true if the scheme is "sip" or "sips", false otherwise.
*/
public boolean isSipURI() {
return true;
}
/** Sets the value of the specified header fields to be included in a
* request constructed from the URI. If the header already had a value it
* will be overwritten.
*
* @param name - a String specifying the header name
* @param value - a String specifying the header value
*/
public void setHeader(String name, String value) {
NameValue nv = new NameValue(name, value);
qheaders.set(nv);
}
/**
* Set the host portion of the SipURI
*
* @param host host to set.
*/
public void setHost(String host) throws ParseException {
Host h = new Host(host);
this.setHost(h);
}
/** Sets the value of the <code>lr</code> parameter of this SipURI. The lr
* parameter, when present, indicates that the element responsible for
* this resource implements the routing mechanisms specified in RFC 3261.
* This parameter will be used in the URIs proxies place in the
* Record-Route header field values, and may appear in the URIs in a
* pre-existing route set.
*/
public void setLrParam() {
this.uriParms.set("lr",null); // JvB: fixed to not add duplicates
}
/**
* Sets the value of the <code>maddr</code> parameter of this SipURI. The
* maddr parameter indicates the server address to be contacted for this
* user, overriding any address derived from the host field. This is
* equivalent to setParameter("maddr", maddr).
*
* @param maddr New value of the <code>maddr</code> parameter
*/
public void setMAddrParam(String maddr) throws ParseException {
if (maddr == null)
throw new NullPointerException("bad maddr");
setParameter("maddr", maddr);
}
/** Sets the value of the <code>method</code> parameter. This specifies
* which SIP method to use in requests directed at this URI. This is
* equivalent to setParameter("method", method).
*
* @param method - new value String value of the method parameter
*/
public void setMethodParam(String method) throws ParseException {
setParameter("method", method);
}
/**
* Sets the value of the specified parameter. If the parameter already had
*
* a value it will be overwritten. A zero-length String indicates flag
*
* parameter.
*
*
*
* @param name - a String specifying the parameter name
*
* @param value - a String specifying the parameter value
*
* @throws ParseException which signals that an error has been reached
*
* unexpectedly while parsing the parameter name or value.
*
*/
public void setParameter(String name, String value) throws ParseException {
if (name.equalsIgnoreCase("ttl")) {
try {
Integer.parseInt(value);
} catch (NumberFormatException ex) {
throw new ParseException("bad parameter " + value, 0);
}
}
uriParms.set(name,value);
}
/** Sets the scheme of this URI to sip or sips depending on whether the
* argument is true or false. The default value is false.
*
* @param secure - the boolean value indicating if the SipURI is secure.
*/
public void setSecure(boolean secure) {
if (secure)
this.scheme = SIPS;
else
this.scheme = SIP;
}
/** Sets the value of the <code>ttl</code> parameter. The ttl parameter
* specifies the time-to-live value when packets are sent using UDP
* multicast. This is equivalent to setParameter("ttl", ttl).
*
* @param ttl - new value of the <code>ttl</code> parameter
*/
public void setTTLParam(int ttl) {
if (ttl <= 0)
throw new IllegalArgumentException("Bad ttl value");
if (uriParms != null) {
NameValue nv = new NameValue("ttl", Integer.valueOf(ttl));
uriParms.set(nv);
}
}
/** Sets the value of the "transport" parameter. This parameter specifies
* which transport protocol to use for sending requests and responses to
* this entity. The following values are defined: "udp", "tcp", "sctp",
* "tls", "ws", "wss" but other values may be used also. This method is equivalent to
* setParameter("transport", transport). Transport parameter constants
* are defined in the {@link javax.sip.ListeningPoint}.
*
* @param transport - new value for the "transport" parameter
* @see javax.sip.ListeningPoint
*/
public void setTransportParam(String transport) throws ParseException {
if (transport == null)
throw new NullPointerException("null arg");
/*if (transport.compareToIgnoreCase("UDP") == 0
|| transport.compareToIgnoreCase("TLS") == 0
|| transport.compareToIgnoreCase("TCP") == 0
|| transport.compareToIgnoreCase("SCTP") == 0
|| transport.compareToIgnoreCase(ListeningPointExt.WS) == 0
|| transport.compareToIgnoreCase(ListeningPointExt.WSS) == 0) {*/
NameValue nv = new NameValue(TRANSPORT, transport.toLowerCase());
uriParms.set(nv);
/*} else
throw new ParseException("bad transport " + transport, 0);*/
}
/** Returns the user part of this SipURI, or null if it is not set.
*
* @return the user part of this SipURI
*/
public String getUserParam() {
return getParameter("user");
}
/** Returns whether the the <code>lr</code> parameter is set. This is
* equivalent to hasParameter("lr"). This interface has no getLrParam as
* RFC3261 does not specify any values for the "lr" paramater.
*
* @return true if the "lr" parameter is set, false otherwise.
*/
public boolean hasLrParam() {
return uriParms.getNameValue(LR) != null;
}
/**
* Returns whether the <code>gr</code> parameter is set.
*
* Not part on the interface since gruu is not part of the base RFC3261.
*/
public boolean hasGrParam() {
return uriParms.getNameValue(GRUU) != null;
}
/**
* Sets the <code>gr</code> parameter.
*
* Not part on the interface since gruu is not part of the base RFC3261.
*/
public void setGrParam(String value) {
this.uriParms.set(GRUU, value); // JvB: fixed to not add duplicates
}
/**
* Sets the <code>gr</code> parameter.
*
* Not part on the interface since gruu is not part of the base RFC3261.
*/
public String getGrParam() {
return (String) this.uriParms.getValue(GRUU); // JvB: fixed to not add duplicates
}
/**
*remove the +sip-instance value from the parameter list if it exists.
*/
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import static com.google.common.truth.Truth.assertAbout;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assume.assumeTrue;
import com.android.resources.ResourceType;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.jimfs.Jimfs;
import com.google.common.truth.Subject;
import com.google.devtools.build.android.AndroidResourceMerger.MergingException;
import com.google.devtools.build.android.xml.ArrayXmlResourceValue;
import com.google.devtools.build.android.xml.ArrayXmlResourceValue.ArrayType;
import com.google.devtools.build.android.xml.AttrXmlResourceValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.BooleanResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.ColorResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.DimensionResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.EnumResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.FlagResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.FloatResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.FractionResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.IntegerResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.ReferenceResourceXmlAttrValue;
import com.google.devtools.build.android.xml.AttrXmlResourceValue.StringResourceXmlAttrValue;
import com.google.devtools.build.android.xml.IdXmlResourceValue;
import com.google.devtools.build.android.xml.Namespaces;
import com.google.devtools.build.android.xml.PluralXmlResourceValue;
import com.google.devtools.build.android.xml.PublicXmlResourceValue;
import com.google.devtools.build.android.xml.ResourcesAttribute;
import com.google.devtools.build.android.xml.SimpleXmlResourceValue;
import com.google.devtools.build.android.xml.StyleXmlResourceValue;
import com.google.devtools.build.android.xml.StyleableXmlResourceValue;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystem;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
import javax.xml.stream.XMLStreamException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link DataResourceXml}. */
@RunWith(JUnit4.class)
public class DataResourceXmlTest {
static final ImmutableMap<String, String> XLIFF_NAMESPACES =
ImmutableMap.of("xliff", "urn:oasis:names:tc:xliff:document:1.2");
static final String END_RESOURCES = new String(AndroidDataWriter.END_RESOURCES);
private FullyQualifiedName.Factory fqnFactory;
private FileSystem fs;
@Before
public void createCleanEnvironment() {
fs = Jimfs.newFileSystem();
fqnFactory = FullyQualifiedName.Factory.from(ImmutableList.<String>of());
}
private Path writeResourceXml(String... xml) throws IOException {
return writeResourceXml(ImmutableMap.<String, String>of(), ImmutableMap.<String, String>of(),
xml);
}
private Path writeResourceXml(Map<String, String> namespaces, Map<String, String> attributes,
String... xml) throws IOException {
Path values = fs.getPath("root/values/values.xml");
Files.createDirectories(values.getParent());
StringBuilder builder = new StringBuilder();
builder.append(AndroidDataWriter.PRELUDE).append("<resources");
for (Map.Entry<String, String> entry : namespaces.entrySet()) {
builder
.append(" xmlns:")
.append(entry.getKey())
.append("=\"")
.append(entry.getValue())
.append("\"");
}
for (Map.Entry<String, String> entry : attributes.entrySet()) {
builder
.append(" ")
.append(entry.getKey())
.append("=\"")
.append(entry.getValue())
.append("\"");
}
builder.append(">");
Joiner.on("\n").appendTo(builder, xml);
builder.append(END_RESOURCES);
Files.write(values, builder.toString().getBytes(StandardCharsets.UTF_8));
return values;
}
private void parseResourcesFrom(
Path path, Map<DataKey, DataResource> toOverwrite, Map<DataKey, DataResource> toCombine)
throws XMLStreamException, IOException {
DataResourceXml.parse(
XmlResourceValues.getXmlInputFactory(),
path,
fqnFactory,
new FakeConsumer(toOverwrite),
new FakeConsumer(toCombine));
}
private FullyQualifiedName fqn(String raw) {
return fqnFactory.parse(raw);
}
@Test
public void simpleXmlResources() throws Exception {
Path path =
writeResourceXml(
"<string name='exit' description=\"& egress -> "\">way out</string>",
"<bool name='canExit'>false</bool>",
"<color name='exitColor'>#FF000000</color>",
"<dimen name='exitSize'>20sp</dimen>",
"<integer name='exitInt'>20</integer>",
"<fraction name='exitFraction'>%20</fraction>",
"<drawable name='exitBackground'>#99000000</drawable>",
"<item name='some_id' type='id'/>",
"<item name='reference_id' type='id'>@id/some_id</item>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("string/exit"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.of(
SimpleXmlResourceValue.Type.STRING,
ImmutableMap.of("description", "& egress -> ""),
"way out")), // Value
fqn("bool/canExit"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.BOOL, "false")), // Value
fqn("color/exitColor"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.COLOR, "#FF000000")), // Value
fqn("dimen/exitSize"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.DIMEN, "20sp")), // Value
fqn("integer/exitInt"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.INTEGER, "20")), // Value
fqn("fraction/exitFraction"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.FRACTION, "%20")), // Value
fqn("drawable/exitBackground"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.DRAWABLE, "#99000000")) // Value
);
assertThat(toCombine)
.containsExactly(
fqn("id/some_id"), // Key
DataResourceXml.createWithNoNamespace(path, IdXmlResourceValue.of()), // Value
fqn("id/reference_id"), // Key
DataResourceXml.createWithNoNamespace(
path, IdXmlResourceValue.of("@id/some_id")) // Value
);
}
@Test
public void skipIgnored() throws Exception {
Path path = writeResourceXml("<skip/>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite).isEmpty();
assertThat(toCombine).isEmpty();
}
@Test
public void eatCommentIgnored() throws Exception {
Path path = writeResourceXml("<eat-comment/>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite).isEmpty();
assertThat(toCombine).isEmpty();
}
@Test
public void itemSimpleXmlResources() throws Exception {
Path path =
writeResourceXml(
"<item type='dimen' name='exitSizePercent'>20%</item>",
"<item type='dimen' format='float' name='exitSizeFloat'>20.0</item>",
"<item type='fraction' name='denom'>%5</item>",
"<item name='subtype_id' type='id'>0x6f972360</item>",
"<item type='array' name='oboes'/>",
"<item type='drawable' name='placeholder'/>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("dimen/exitSizePercent"), // Key
DataResourceXml.createWithNoNamespace(
path, SimpleXmlResourceValue.itemWithValue(ResourceType.DIMEN, "20%")), // Value
fqn("dimen/exitSizeFloat"), // Key
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.itemWithFormattedValue(
ResourceType.DIMEN, "float", "20.0")), // Value
fqn("drawable/placeholder"), // Key
DataResourceXml.createWithNoNamespace(
path, SimpleXmlResourceValue.itemPlaceHolderFor(ResourceType.DRAWABLE)), // Value
fqn("array/oboes"), // Key
DataResourceXml.createWithNoNamespace(
path, SimpleXmlResourceValue.itemPlaceHolderFor(ResourceType.ARRAY)), // Value
fqn("fraction/denom"), // Key
DataResourceXml.createWithNoNamespace(
path, SimpleXmlResourceValue.itemWithValue(ResourceType.FRACTION, "%5")) // Value
);
}
@Test
public void styleableXmlResourcesEnum() throws Exception {
Path path =
writeResourceXml(
"<declare-styleable name='Theme'>",
" <attr name=\"labelPosition\" format=\"enum\">",
" <enum name=\"left\" value=\"0\"/>",
" <enum name=\"right\" value=\"1\"/>",
" </attr>",
"</declare-styleable>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("attr/labelPosition"), // Key
DataResourceXml.createWithNoNamespace(
path,
AttrXmlResourceValue.fromFormatEntries(
EnumResourceXmlAttrValue.asEntryOf("left", "0", "right", "1"))) // Value
);
assertThat(toCombine)
.containsExactly(
fqn("styleable/Theme"), // Key
DataResourceXml.createWithNoNamespace(
path,
StyleableXmlResourceValue.createAllAttrAsDefinitions(
fqnFactory.parse("attr/labelPosition"))) // Value
);
}
@Test
public void styleableXmlResourcesString() throws Exception {
Path path =
writeResourceXml(
"<declare-styleable name='UnusedStyleable'>",
" <attr name='attribute1'/>",
" <attr name='attribute2'/>",
"</declare-styleable>",
"<attr name='attribute1' format='string'/>",
"<attr name='attribute2' format='string'/>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("attr/attribute1"), // Key
DataResourceXml.createWithNoNamespace(
path,
AttrXmlResourceValue.fromFormatEntries(
StringResourceXmlAttrValue.asEntry())), // Value
fqn("attr/attribute2"), // Key
DataResourceXml.createWithNoNamespace(
path,
AttrXmlResourceValue.fromFormatEntries(
StringResourceXmlAttrValue.asEntry())) // Value
);
assertThat(toCombine)
.containsExactly(
fqn("styleable/UnusedStyleable"), // Key
DataResourceXml.createWithNoNamespace(
path,
StyleableXmlResourceValue.createAllAttrAsReferences(
fqnFactory.parse("attr/attribute1"),
fqnFactory.parse("attr/attribute2"))) // Value
);
}
@Test
public void pluralXmlResources() throws Exception {
Path path =
writeResourceXml(
"<plurals name='numberOfSongsAvailable'>",
" <item quantity='one'>%d song found.</item>",
" <item quantity='other'>%d songs found.</item>",
"</plurals>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("plurals/numberOfSongsAvailable"), // Key
DataResourceXml.createWithNoNamespace(
path,
PluralXmlResourceValue.createWithoutAttributes(
ImmutableMap.of(
"one", "%d song found.",
"other", "%d songs found."))) // Value
);
}
@Test
public void pluralXmlResourcesWithTrailingCharacters() throws Exception {
Path path =
writeResourceXml(
"<plurals name='numberOfSongsAvailable'>",
" <item quantity='one'>%d song found.</item> // this is an invalid comment.",
" <item quantity='other'>%d songs found.</item>typo",
"</plurals>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("plurals/numberOfSongsAvailable"), // Key
DataResourceXml.createWithNoNamespace(
path,
PluralXmlResourceValue.createWithoutAttributes(
ImmutableMap.of(
"one", "%d song found.",
"other", "%d songs found."))) // Value
);
}
@Test
public void styleResources() throws Exception {
Path path =
writeResourceXml(
"<style name=\"candlestick_maker\" parent=\"@style/wax_maker\">\n"
+ " <item name=\"texture\">waxy</item>\n"
+ "</style>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("style/candlestick_maker"), // Key
DataResourceXml.createWithNoNamespace(
path,
StyleXmlResourceValue.of(
"@style/wax_maker", ImmutableMap.of("texture", "waxy"))) // Value
);
}
@Test
public void styleResourcesNoParent() throws Exception {
Path path =
writeResourceXml(
"<style name=\"CandlestickMaker\">\n"
+ " <item name=\"texture\">waxy</item>\n"
+ "</style>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("style/CandlestickMaker"), // Key
DataResourceXml.createWithNoNamespace(
path, StyleXmlResourceValue.of(null, ImmutableMap.of("texture", "waxy"))) // Value
);
}
@Test
public void styleResourcesForceNoParent() throws Exception {
Path path =
writeResourceXml(
// using a '.' implies a parent Candlestick, parent='' corrects to no parent.
"<style name=\"Candlestick.Maker\" parent=\"\">\n"
+ " <item name=\"texture\">waxy</item>\n"
+ "</style>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("style/Candlestick.Maker"), // Key
DataResourceXml.createWithNoNamespace(
path, StyleXmlResourceValue.of("", ImmutableMap.of("texture", "waxy"))) // Value
);
}
@Test
public void styleResourcesLazyReference() throws Exception {
Path path =
writeResourceXml(
"<style name=\"candlestick_maker\" parent=\"AppTheme\">\n"
+ " <item name=\"texture\">waxy</item>\n"
+ "</style>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("style/candlestick_maker"), // Key
DataResourceXml.createWithNoNamespace(
path,
StyleXmlResourceValue.of("AppTheme", ImmutableMap.of("texture", "waxy"))) // Value
);
}
@Test
public void arrayXmlResources() throws Exception {
Path path =
writeResourceXml(
"<array name='icons'>",
" <item>@drawable/home</item>",
" <item>@drawable/settings</item>",
" <item>@drawable/logout</item>",
"</array>",
"<array name='colors'>",
" <item>#FFFF0000</item>",
" <item>#FF00FF00</item>",
" <item>#FF0000FF</item>",
"</array>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("array/icons"), // Key
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(
ArrayType.ARRAY,
"@drawable/home",
"@drawable/settings",
"@drawable/logout")), // Value
fqn("array/colors"), // Key
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(
ArrayType.ARRAY, "#FFFF0000", "#FF00FF00", "#FF0000FF")) // Value
);
}
@Test
public void stringArrayXmlResources() throws Exception {
Path path =
writeResourceXml(
"<string-array name='characters'>",
" <item>boojum</item>",
" <item>snark</item>",
" <item>bellman</item>",
" <item>barrister</item>",
" <item>\\\"billiard-marker\\\"</item>",
"</string-array>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("array/characters"),
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(
ArrayType.STRING_ARRAY,
"boojum",
"snark",
"bellman",
"barrister",
"\\\"billiard-marker\\\"")));
assertThat(toCombine).isEmpty();
}
@Test
public void integerArrayXmlResources() throws Exception {
Path path =
writeResourceXml(
"<integer-array name='bits'>",
" <item>4</item>",
" <item>8</item>",
" <item>16</item>",
" <item>32</item>",
"</integer-array>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("array/bits"), // Key
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(ArrayType.INTEGER_ARRAY, "4", "8", "16", "32")) // Value
);
}
@Test
public void attrFlagXmlResources() throws Exception {
Path path =
writeResourceXml(
" <attr name=\"labelPosition\">",
" <flag name=\"left\" value=\"0\"/>",
" <flag name=\"right\" value=\"1\"/>",
" </attr>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("attr/labelPosition"), // Key
DataResourceXml.createWithNoNamespace(
path,
AttrXmlResourceValue.fromFormatEntries(
FlagResourceXmlAttrValue.asEntryOf(
"left", "0",
"right", "1"))) // Value
);
}
@Test
public void attrMultiFormatImplicitFlagXmlResources() throws Exception {
Path path =
writeResourceXml(
" <attr name='labelPosition' format='reference'>",
" <flag name='left' value='0'/>",
" <flag name='right' value='1'/>",
" </attr>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("attr/labelPosition"), // Key
DataResourceXml.createWithNoNamespace(
path,
AttrXmlResourceValue.fromFormatEntries(
ReferenceResourceXmlAttrValue.asEntry(),
FlagResourceXmlAttrValue.asEntryOf(
"left", "0",
"right", "1"))) // Value
);
}
@Test
public void attrMultiFormatResources() throws Exception {
Path path =
writeResourceXml(
"<attr name='labelPosition' ",
"format='color|boolean|dimension|float|integer|string|fraction|reference' />");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("attr/labelPosition"), // Key
DataResourceXml.createWithNoNamespace(
path,
AttrXmlResourceValue.fromFormatEntries(
ColorResourceXmlAttrValue.asEntry(),
BooleanResourceXmlAttrValue.asEntry(),
ReferenceResourceXmlAttrValue.asEntry(),
DimensionResourceXmlAttrValue.asEntry(),
FloatResourceXmlAttrValue.asEntry(),
IntegerResourceXmlAttrValue.asEntry(),
StringResourceXmlAttrValue.asEntry(),
FractionResourceXmlAttrValue.asEntry())) // Value
);
}
@Test
public void publicXmlResource() throws Exception {
Path path =
writeResourceXml(
"<string name='exit'>way out</string>",
"<public type='string' name='exit' id='0x123'/>");
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
assertThat(toOverwrite)
.containsExactly(
fqn("string/exit"),
DataResourceXml.createWithNoNamespace(
path,
SimpleXmlResourceValue.createWithValue(
SimpleXmlResourceValue.Type.STRING, "way out")));
assertThat(toCombine)
.containsExactly(
fqn("public/exit"),
DataResourceXml.createWithNoNamespace(
path, PublicXmlResourceValue.create(ResourceType.STRING, Optional.of(0x123))));
}
@Test
public void resourcesAttribute() throws Exception {
Namespaces namespaces = Namespaces.from(
ImmutableMap.of("tools", "http://schemas.android.com/tools"));
Path path = writeResourceXml(
namespaces.asMap(),
ImmutableMap.of("tools:foo", "fooVal", "tools:ignore", "ignoreVal"));
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
FullyQualifiedName fooFqn = fqn("<resources>/{http://schemas.android.com/tools}foo");
assertThat(toOverwrite)
.containsExactly(
fooFqn,
DataResourceXml.createWithNamespaces(
path,
ResourcesAttribute.of(fooFqn, "tools:foo", "fooVal"),
namespaces)
);
FullyQualifiedName ignoreFqn = fqn("<resources>/{http://schemas.android.com/tools}ignore");
assertThat(toCombine)
.containsExactly(
ignoreFqn,
DataResourceXml.createWithNamespaces(
path,
ResourcesAttribute.of(ignoreFqn, "tools:ignore", "ignoreVal"),
namespaces)
);
}
@Test
public void writeSimpleXmlResources() throws Exception {
Path source =
writeResourceXml(
"<string name='exit'>way <a href=\"out.html\">out</a></string>",
"<bool name='canExit'>false</bool>",
"<color name='exitColor'>#FF000000</color>",
"<integer name='exitInt'>5</integer>",
"<drawable name='reference'/>");
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("string/exit")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
source, "<string name='exit'>way <a href=\"out.html\">out</a></string>"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("bool/canExit")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, "<bool name='canExit'>false</bool>"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("color/exitColor")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(source, "<color name='exitColor'>#FF000000</color>"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("integer/exitInt")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, "<integer name='exitInt'>5</integer>"));
}
@Test
public void writeItemResources() throws Exception {
Path source =
writeResourceXml(
"<item type='dimen' name='exitSizePercent'>20%</item>",
"<item type='dimen' format='float' name='exitSizeFloat'>20.0</item>",
"<item name='exitId' type='id'/>",
"<item name='frac' type='fraction'>5%</item>");
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("dimen/exitSizePercent")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(source, "<item type='dimen' name='exitSizePercent'>20%</item>"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("dimen/exitSizeFloat")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
source, "<item type='dimen' format='float' name='exitSizeFloat'>20.0</item>"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("fraction/frac")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(source, "<item name='frac' type='fraction'>5%</item>"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("id/exitId")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, "<item name='exitId' type='id'/>"));
}
@Test
public void writeStringResourceWithXliffNamespace() throws Exception {
Path source = fs.getPath("root/values/values.xml");
Files.createDirectories(source.getParent());
Files.write(
source,
("<resources xmlns:xliff=\"urn:oasis:names:tc:xliff:document:1.2\">"
+ "<string name=\"star_rating\">Check out our 5\n"
+ " <xliff:g id=\"star\">\\u2605</xliff:g>\n"
+ "</string>"
+ "</resources>")
.getBytes(StandardCharsets.UTF_8));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("string/star_rating")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
XLIFF_NAMESPACES,
ImmutableMap.<String, String>of(),
source,
"<string name=\"star_rating\">Check out our 5",
" <xliff:g id=\"star\">\\u2605</xliff:g>",
"</string>"));
}
@Test
public void writeStringResourceCData() throws Exception {
String[] xml = {
"<string name='cdata'><![CDATA[<b>Jabber, Jabber</b><br><br>\n Wock!]]></string>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("string/cdata")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeStringResourceWithNamespace() throws Exception {
Path source = fs.getPath("root/values/values.xml");
Files.createDirectories(source.getParent());
Files.write(
source,
("<resources xmlns:ns1=\"urn:oasis:names:tc:xliff:document:1.2\">"
+ "<string name=\"star_rating\">Check out our 5\n"
+ " <ns1:g xmlns:foglebert=\"defogle\" foglebert:id=\"star\">\\u2605</ns1:g>\n"
+ "</string>"
+ "</resources>")
.getBytes(StandardCharsets.UTF_8));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("string/star_rating")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
ImmutableMap.of("ns1", "urn:oasis:names:tc:xliff:document:1.2"),
ImmutableMap.<String, String>of(),
source,
"<string name=\"star_rating\">Check out our 5 ",
"<ns1:g xmlns:foglebert=\"defogle\" " + "foglebert:id=\"star\">\\u2605</ns1:g>",
"</string>"));
}
@Test
public void writeStringResourceWithUnusedNamespace() throws Exception {
Path source = fs.getPath("root/values/values.xml");
Files.createDirectories(source.getParent());
Files.write(
source,
("<resources xmlns:ns1=\"urn:oasis:names:tc:xliff:document:1.2\">"
+ "<string name=\"star_rating\">"
+ "not yet implemented\n"
+ "</string>"
+ "</resources>")
.getBytes(StandardCharsets.UTF_8));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("string/star_rating")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
source, "<string name=\"star_rating\">", "not yet implemented\n", "</string>"));
}
@Test
public void writeStringResourceWithEscapedValues() throws Exception {
String[] xml = {
"<string name=\"AAP_SUGGEST_ACCEPT_SUGGESTION\">",
" <b><xliff:g id=\"name\" example=\"Pizza hut\">%1$s</xliff:g></b> "
+ "already exists at <b><xliff:g id=\"address\" "
+ "example=\"123 main street\">%2$s</xliff:g></b><br>",
" <br>",
" Is this the place you\\'re trying to add?\n",
"</string>"
};
Path source = writeResourceXml(XLIFF_NAMESPACES, ImmutableMap.<String, String>of(), xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("string/AAP_SUGGEST_ACCEPT_SUGGESTION")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(XLIFF_NAMESPACES, ImmutableMap.<String, String>of(), source, xml));
}
@Test
public void writeStyleableXmlResource() throws Exception {
String[] xml = {
"<declare-styleable name='Theme'>",
" <attr name=\"labelPosition\" />",
"</declare-styleable>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("styleable/Theme")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeStyleableXmlResourceReference() throws Exception {
String[] xml = {
"<declare-styleable name='Theme'>",
" <attr name=\"labelColor\" format=\"color\" />",
"</declare-styleable>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("styleable/Theme"), fqn("attr/labelColor")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writePluralXmlResources() throws Exception {
String[] xml = {
"<plurals name='numberOfSongsAvailable' format='none'>",
" <item quantity='one'>%d song found.</item>",
" <item quantity='other'>%d songs found.</item>",
"</plurals>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("plurals/numberOfSongsAvailable")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writePublicXmlResources() throws Exception {
String[] xml = {
"<public name='bar' type='dimen' id='0x7f030003' />",
"<public name='foo' type='dimen' />",
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("public/bar"), fqn("public/foo")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeArrayXmlResources() throws Exception {
String[] xml = {
"<array name='icons'>",
" <item>@drawable/home</item>",
" <item>@drawable/settings</item>",
" <item>@drawable/logout</item>",
"</array>",
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("array/icons")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeStringArrayXmlResources() throws Exception {
String[] xml = {
"<string-array name='rosebud' translatable='false'>",
" <item>Howard Hughes</item>",
" <item>Randolph Hurst</item>",
"</string-array>",
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("array/rosebud")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeIntegerArrayXmlResources() throws Exception {
String[] xml = {
"<integer-array name='bits'>",
" <item>4</item>",
" <item>8</item>",
" <item>16</item>",
" <item>32</item>",
"</integer-array>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("array/bits")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeAttrFlagXmlResources() throws Exception {
String[] xml = {
" <attr name=\"labelPosition\">",
" <flag name=\"left\" value=\"0\"/>",
" <flag name=\"right\" value=\"1\"/>",
" </attr>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("attr/labelPosition")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
source,
" <attr name=\"labelPosition\">",
" <flag name=\"left\" value=\"0\"/>",
" <flag name=\"right\" value=\"1\"/>",
" </attr>"));
}
@Test
public void writeAttrMultiFormatImplicitFlagXmlResources() throws Exception {
String[] xml = {
" <attr name='labelPosition' format='reference'>",
" <flag name='left' value='0'/>",
" <flag name='right' value='1'/>",
" </attr>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("attr/labelPosition")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
source,
" <attr name='labelPosition' format='reference'>",
" <flag name='left' value='0'/>",
" <flag name='right' value='1'/>",
" </attr>"));
}
@Test
public void writeAttrMultiFormatResources() throws Exception {
String[] xml = {
"<attr name='labelPosition' ",
"format='boolean|color|dimension|float|fraction|integer|reference|string'/>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("attr/labelPosition")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeStyle() throws Exception {
String[] xml = {
"<style name='candlestick_maker' parent='@style/wax_maker'>\n"
+ " <item name='texture'>waxy</item>\n"
+ "</style>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("style/candlestick_maker")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeForceNoParentStyle() throws Exception {
String[] xml = {
"<style name='candlestick_maker' parent=''>\n",
" <item name='texture'>waxy</item>\n",
"</style>"
};
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("style/candlestick_maker")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeResourceAttributes() throws Exception {
Path source = writeResourceXml(
ImmutableMap.of("tools", "http://schemas.android.com/tools"),
ImmutableMap.of("tools:foo", "fooVal"));
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("<resources>/{http://schemas.android.com/tools}foo")))
.xmlContentsIsEqualTo(
resourcesXmlFrom(
ImmutableMap.of("tools", "http://schemas.android.com/tools"),
ImmutableMap.of("tools:foo", "fooVal"),
null));
}
@Test
public void writeMacroXmlResource() throws Exception {
// TODO(b/193025750): The current version of the layoutlib prebuilt used by Bazel does not
// contain the macro type.
assumeTrue(ResourceType.getEnum("macro") != null);
String xml = "<macro name='foo'>@string/bar</macro>";
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("macro/foo")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void writeOverlayableXmlResource() throws Exception {
// TODO(b/193025750): The current version of the layoutlib prebuilt used by Bazel does not
// contain the overlayable type.
assumeTrue(ResourceType.getEnum("overlayable") != null);
String xml =
"<overlayable name='foo'>"
+ " <policy type='public'>"
+ " <item name='my_color' type='color'/>"
+ " </policy>"
+ "</overlayable>";
Path source = writeResourceXml(xml);
assertAbout(resourcePaths)
.that(parsedAndWritten(source, fqn("overlayable/foo")))
.xmlContentsIsEqualTo(resourcesXmlFrom(source, xml));
}
@Test
public void serializeMultipleSimpleXmlResources() throws Exception {
Path serialized = fs.getPath("out/out.bin");
Path source = fs.getPath("res/values/values.xml");
FullyQualifiedName stringKey = fqn("string/exit");
DataResourceXml stringValue =
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.createWithValue(SimpleXmlResourceValue.Type.STRING, "way out"));
FullyQualifiedName nullStringKey = fqn("string/nullexit");
DataResourceXml nullStringValue =
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.createWithValue(SimpleXmlResourceValue.Type.STRING, null));
FullyQualifiedName boolKey = fqn("bool/canExit");
DataResourceXml boolValue =
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.createWithValue(SimpleXmlResourceValue.Type.BOOL, "false"));
FullyQualifiedName colorKey = fqn("color/exitColor");
DataResourceXml colorValue =
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.createWithValue(SimpleXmlResourceValue.Type.COLOR, "#FF000000"));
FullyQualifiedName dimenKey = fqn("dimen/exitSize");
DataResourceXml dimenValue =
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.createWithValue(SimpleXmlResourceValue.Type.DIMEN, "20sp"));
AndroidDataSerializer serializer = AndroidDataSerializer.create();
serializer.queueForSerialization(stringKey, stringValue);
serializer.queueForSerialization(nullStringKey, nullStringValue);
serializer.queueForSerialization(boolKey, boolValue);
serializer.queueForSerialization(colorKey, colorValue);
serializer.queueForSerialization(dimenKey, dimenValue);
serializer.flushTo(serialized);
AndroidDataDeserializer deserializer = AndroidParsedDataDeserializer.create();
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
deserializer.read(
DependencyInfo.UNKNOWN,
serialized,
KeyValueConsumers.of(new FakeConsumer(toOverwrite), new FakeConsumer(toCombine), null));
assertThat(toOverwrite)
.containsExactly(
stringKey, stringValue,
boolKey, boolValue,
colorKey, colorValue,
nullStringKey, nullStringValue,
dimenKey, dimenValue);
assertThat(toCombine).isEmpty();
}
@Test
public void serializeItemXmlResources() throws Exception {
Path source = fs.getPath("res/values/values.xml");
assertSerialization(
fqn("dimen/exitSizePercent"),
DataResourceXml.createWithNoNamespace(
source, SimpleXmlResourceValue.itemWithValue(ResourceType.DIMEN, "20%")));
assertSerialization(
fqn("dimen/exitSizeFloat"),
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.itemWithFormattedValue(ResourceType.DIMEN, "float", "20.0")));
assertSerialization(
fqn("fraction/denom"),
DataResourceXml.createWithNoNamespace(
source,
SimpleXmlResourceValue.createWithValue(SimpleXmlResourceValue.Type.COLOR, "5%")));
assertSerialization(
fqn("id/subtype_afrikaans"),
DataResourceXml.createWithNoNamespace(
source, SimpleXmlResourceValue.itemWithValue(ResourceType.ID, "0x6f972360")));
}
@Test
public void serializeStyleableXmlResource() throws Exception {
Path serialized = fs.getPath("out/out.bin");
Path source = fs.getPath("res/values/values.xml");
FullyQualifiedName attrKey = fqn("attr/labelPosition");
DataResourceXml attrValue =
DataResourceXml.createWithNoNamespace(
source,
AttrXmlResourceValue.fromFormatEntries(
EnumResourceXmlAttrValue.asEntryOf("left", "0", "right", "1")));
FullyQualifiedName themeKey = fqn("styleable/Theme");
DataResourceXml themeValue =
DataResourceXml.createWithNoNamespace(
source,
StyleableXmlResourceValue.createAllAttrAsReferences(
fqnFactory.parse("attr/labelPosition")));
AndroidDataSerializer serializer = AndroidDataSerializer.create();
serializer.queueForSerialization(attrKey, attrValue);
serializer.queueForSerialization(themeKey, themeValue);
serializer.flushTo(serialized);
AndroidDataDeserializer deserializer = AndroidParsedDataDeserializer.create();
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
deserializer.read(
DependencyInfo.UNKNOWN,
serialized,
KeyValueConsumers.of(new FakeConsumer(toOverwrite), new FakeConsumer(toCombine), null));
assertThat(toOverwrite).containsEntry(attrKey, attrValue);
assertThat(toCombine).containsEntry(themeKey, themeValue);
}
@Test
public void serializePlurals() throws Exception {
Path path = fs.getPath("res/values/values.xml");
assertSerialization(
fqn("plurals/numberOfSongsAvailable"),
DataResourceXml.createWithNoNamespace(
path,
PluralXmlResourceValue.createWithoutAttributes(
ImmutableMap.of(
"one", "%d song found.",
"other", "%d songs found."))));
}
@Test
public void serializeArrays() throws Exception {
Path path = fs.getPath("res/values/values.xml");
assertSerialization(
fqn("plurals/numberOfSongsAvailable"),
DataResourceXml.createWithNoNamespace(
path,
PluralXmlResourceValue.createWithoutAttributes(
ImmutableMap.of(
"one", "%d song found.",
"other", "%d songs found."))));
assertSerialization(
fqn("array/icons"),
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(
ArrayType.ARRAY, "@drawable/home", "@drawable/settings", "@drawable/logout")));
assertSerialization(
fqn("array/colors"),
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(ArrayType.ARRAY, "#FFFF0000", "#FF00FF00", "#FF0000FF")));
assertSerialization(
fqn("array/characters"),
DataResourceXml.createWithNoNamespace(
path,
ArrayXmlResourceValue.of(
ArrayType.STRING_ARRAY,
"boojum",
"snark",
"bellman",
"barrister",
"\\\"billiard-marker\\\"")));
}
@Test
public void serializeAttrFlag() throws Exception {
assertSerialization(
fqn("attr/labelPosition"),
DataResourceXml.createWithNoNamespace(
fs.getPath("res/values/values.xml"),
AttrXmlResourceValue.fromFormatEntries(
FlagResourceXmlAttrValue.asEntryOf(
"left", "0",
"right", "1"))));
}
@Test
public void serializeId() throws Exception {
assertSerialization(
fqn("id/squark"),
DataResourceXml.createWithNoNamespace(
fs.getPath("res/values/values.xml"), IdXmlResourceValue.of()));
}
@Test
public void serializePublic() throws Exception {
assertSerialization(
fqn("public/park"),
DataResourceXml.createWithNoNamespace(
fs.getPath("res/values/public.xml"), PublicXmlResourceValue.of(
ImmutableMap.of(
ResourceType.DIMEN, Optional.of(0x7f040000),
ResourceType.STRING, Optional.of(0x7f050000))
)));
}
@Test
public void serializeStyle() throws Exception {
assertSerialization(
fqn("style/snark"),
DataResourceXml.createWithNoNamespace(
fs.getPath("res/values/styles.xml"),
StyleXmlResourceValue.of(null, ImmutableMap.of("look", "boojum"))));
}
@Test
public void assertMultiFormatAttr() throws Exception {
assertSerialization(
fqn("attr/labelPosition"),
DataResourceXml.createWithNoNamespace(
fs.getPath("res/values/values.xml"),
AttrXmlResourceValue.fromFormatEntries(
ColorResourceXmlAttrValue.asEntry(),
BooleanResourceXmlAttrValue.asEntry(),
ReferenceResourceXmlAttrValue.asEntry(),
DimensionResourceXmlAttrValue.asEntry(),
FloatResourceXmlAttrValue.asEntry(),
IntegerResourceXmlAttrValue.asEntry(),
StringResourceXmlAttrValue.asEntry(),
FractionResourceXmlAttrValue.asEntry())));
}
private void assertSerialization(FullyQualifiedName key, DataValue value) throws Exception {
Path serialized = fs.getPath("out/out.bin");
Files.deleteIfExists(serialized);
Path manifestPath = fs.getPath("AndroidManifest.xml");
Files.deleteIfExists(manifestPath);
Files.createFile(manifestPath);
AndroidDataSerializer serializer = AndroidDataSerializer.create();
serializer.queueForSerialization(key, value);
serializer.flushTo(serialized);
AndroidDataDeserializer deserializer = AndroidParsedDataDeserializer.create();
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
deserializer.read(
DependencyInfo.UNKNOWN,
serialized,
KeyValueConsumers.of(new FakeConsumer(toOverwrite), new FakeConsumer(toCombine), null));
if (key.isOverwritable()) {
assertThat(toOverwrite).containsEntry(key, value);
assertThat(toCombine).isEmpty();
} else {
assertThat(toCombine).containsEntry(key, value);
assertThat(toOverwrite).isEmpty();
}
}
private String[] resourcesXmlFrom(Path source, String... lines) {
return resourcesXmlFrom(ImmutableMap.<String, String>of(), ImmutableMap.<String, String>of(),
source, lines);
}
private String[] resourcesXmlFrom(Map<String, String> namespaces, Map<String, String> attributes,
Path source, String... lines) {
FluentIterable<String> xml =
FluentIterable.of(new String(AndroidDataWriter.PRELUDE))
.append("<resources")
.append(
FluentIterable.from(namespaces.entrySet())
.transform(
new Function<Map.Entry<String, String>, String>() {
@Override
public String apply(Map.Entry<String, String> input) {
return String.format(
" xmlns:%s=\"%s\"", input.getKey(), input.getValue());
}
})
.join(Joiner.on("")))
.append(
FluentIterable.from(attributes.entrySet())
.transform(
new Function<Map.Entry<String, String>, String>() {
@Override
public String apply(Map.Entry<String, String> input) {
return String.format(" %s=\"%s\"", input.getKey(), input.getValue());
}
})
.join(Joiner.on("")));
if (source == null && (lines == null || lines.length == 0)) {
xml = xml.append("/>");
} else {
xml = xml.append(">");
if (source != null) {
xml = xml.append(String.format("<!-- %s --> <eat-comment/>", source));
}
if (lines != null) {
xml = xml.append(lines);
}
xml = xml.append(END_RESOURCES);
}
return xml.toArray(String.class);
}
private Path parsedAndWritten(Path path, FullyQualifiedName... fqns)
throws XMLStreamException, IOException, MergingException {
final Map<DataKey, DataResource> toOverwrite = new HashMap<>();
final Map<DataKey, DataResource> toCombine = new HashMap<>();
parseResourcesFrom(path, toOverwrite, toCombine);
Path out = fs.getPath("out");
if (Files.exists(out)) {
Files.walkFileTree(
out,
new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path directory, IOException e)
throws IOException {
if (e != null) {
throw e;
}
Files.delete(directory);
return FileVisitResult.CONTINUE;
}
});
}
// find and write the resource -- the categorization is tested during parsing.
AndroidDataWriter mergedDataWriter = AndroidDataWriter.createWithDefaults(out);
for (FullyQualifiedName fqn : fqns) {
if (toOverwrite.containsKey(fqn)) {
toOverwrite.get(fqn).writeResource(fqn, mergedDataWriter);
} else if (toCombine.containsKey(fqn)) {
toCombine.get(fqn).writeResource(fqn, mergedDataWriter);
}
}
mergedDataWriter.flush();
return mergedDataWriter.resourceDirectory().resolve("values/values.xml");
}
private static final Subject.Factory<PathsSubject, Path> resourcePaths = PathsSubject::new;
private static class FakeConsumer
implements ParsedAndroidData.KeyValueConsumer<DataKey, DataResource> {
private final Map<DataKey, DataResource> target;
FakeConsumer(Map<DataKey, DataResource> target) {
this.target = target;
}
@Override
public void accept(DataKey key, DataResource value) {
target.put(key, value);
}
}
}
| |
/**
*/
package bpsim.impl;
import bpsim.BpsimPackage;
import bpsim.WeibullDistributionType;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Weibull Distribution Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link bpsim.impl.WeibullDistributionTypeImpl#getScale <em>Scale</em>}</li>
* <li>{@link bpsim.impl.WeibullDistributionTypeImpl#getShape <em>Shape</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class WeibullDistributionTypeImpl extends DistributionParameterImpl implements WeibullDistributionType {
/**
* The default value of the '{@link #getScale() <em>Scale</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getScale()
* @generated
* @ordered
*/
protected static final double SCALE_EDEFAULT = 0.0;
/**
* The cached value of the '{@link #getScale() <em>Scale</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getScale()
* @generated
* @ordered
*/
protected double scale = SCALE_EDEFAULT;
/**
* This is true if the Scale attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean scaleESet;
/**
* The default value of the '{@link #getShape() <em>Shape</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getShape()
* @generated
* @ordered
*/
protected static final double SHAPE_EDEFAULT = 0.0;
/**
* The cached value of the '{@link #getShape() <em>Shape</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getShape()
* @generated
* @ordered
*/
protected double shape = SHAPE_EDEFAULT;
/**
* This is true if the Shape attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean shapeESet;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected WeibullDistributionTypeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return BpsimPackage.Literals.WEIBULL_DISTRIBUTION_TYPE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getScale() {
return scale;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setScale(double newScale) {
double oldScale = scale;
scale = newScale;
boolean oldScaleESet = scaleESet;
scaleESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SCALE, oldScale, scale, !oldScaleESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetScale() {
double oldScale = scale;
boolean oldScaleESet = scaleESet;
scale = SCALE_EDEFAULT;
scaleESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SCALE, oldScale, SCALE_EDEFAULT, oldScaleESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetScale() {
return scaleESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getShape() {
return shape;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setShape(double newShape) {
double oldShape = shape;
shape = newShape;
boolean oldShapeESet = shapeESet;
shapeESet = true;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SHAPE, oldShape, shape, !oldShapeESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetShape() {
double oldShape = shape;
boolean oldShapeESet = shapeESet;
shape = SHAPE_EDEFAULT;
shapeESet = false;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.UNSET, BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SHAPE, oldShape, SHAPE_EDEFAULT, oldShapeESet));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetShape() {
return shapeESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SCALE:
return getScale();
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SHAPE:
return getShape();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SCALE:
setScale((Double)newValue);
return;
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SHAPE:
setShape((Double)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SCALE:
unsetScale();
return;
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SHAPE:
unsetShape();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SCALE:
return isSetScale();
case BpsimPackage.WEIBULL_DISTRIBUTION_TYPE__SHAPE:
return isSetShape();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (scale: ");
if (scaleESet) result.append(scale); else result.append("<unset>");
result.append(", shape: ");
if (shapeESet) result.append(shape); else result.append("<unset>");
result.append(')');
return result.toString();
}
} //WeibullDistributionTypeImpl
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.api.dto.composite;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.io.Serializable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamImplicit;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
import org.apache.camel.component.salesforce.api.dto.AbstractDescribedSObjectBase;
import org.apache.camel.component.salesforce.api.dto.AbstractSObjectBase;
import org.apache.camel.component.salesforce.api.dto.Attributes;
import org.apache.camel.component.salesforce.api.dto.RestError;
import org.apache.camel.util.ObjectHelper;
import static java.util.Objects.requireNonNull;
/**
* Payload and response for the SObject tree Composite API. The main interface
* for specifying what to include in the sumission to the API endpoint. To build
* the tree out use: <blockquote>
*
* <pre>
* {@code
* Account account = ...
* Contact president = ...
* Contact marketing = ...
*
* Account anotherAccount = ...
* Contact sales = ...
* Asset someAsset = ...
*
* SObjectTree request = new SObjectTree();
* request.addObject(account).addChildren(president, marketing);
* request.addObject(anotherAccount).addChild(sales).addChild(someAsset);
* }
* </pre>
*
* </blockquote> This will generate a tree of SObjects resembling: <blockquote>
*
* <pre>
* .
* |-- account
* | |-- president
* | `-- marketing
* `-- anotherAccount
* `-- sales
* `-- someAsset
* </pre>
*
* </blockquote> By default references that correlate between SObjects in the
* tree and returned identifiers and errors are handled automatically, if you
* wish to customize the generation of the reference implement
* {@link ReferenceGenerator} and supply it as constructor argument to
* {@link #SObjectTree(ReferenceGenerator)}.
* <p/>
* Note that the tree can hold single object type at the root of the tree.
*
* @see ReferenceGenerator
* @see SObjectNode
* @see AbstractSObjectBase
* @see AbstractDescribedSObjectBase
*/
@XStreamAlias("SObjectTreeRequest")
public final class SObjectTree implements Serializable {
private static final long serialVersionUID = 1L;
@XStreamImplicit
@JsonProperty
final List<SObjectNode> records = new CopyOnWriteArrayList<>();
@XStreamOmitField
final ReferenceGenerator referenceGenerator;
@XStreamOmitField
private String objectType;
/**
* Create new SObject tree with the default {@link ReferenceGenerator}.
*/
public SObjectTree() {
this(new Counter());
}
/**
* Create new SObject tree with custom {@link ReferenceGenerator}.
*/
public SObjectTree(final ReferenceGenerator referenceGenerator) {
this.referenceGenerator = requireNonNull(referenceGenerator, "You must specify the ReferenceGenerator implementation");
}
/**
* Add SObject at the root of the tree.
*
* @param object SObject to add
* @return {@link SObjectNode} for the given SObject
*/
public SObjectNode addObject(final AbstractSObjectBase object) {
ObjectHelper.notNull(object, "object");
return addNode(new SObjectNode(this, object));
}
/**
* Returns a stream of all nodes in the tree.
*
* @return
*/
public Stream<SObjectNode> allNodes() {
return records.stream().flatMap(r -> Stream.concat(Stream.of(r), r.getChildNodes()));
}
/**
* Returns a stream of all objects in the tree.
*
* @return
*/
public Stream<AbstractSObjectBase> allObjects() {
return records.stream().flatMap(r -> Stream.concat(Stream.of(r.getObject()), r.getChildren()));
}
/**
* Returns the type of the objects in the root of the tree.
*
* @return object type
*/
@JsonIgnore
public String getObjectType() {
return objectType;
}
public Class[] objectTypes() {
final Set<Class> types = records.stream().flatMap(n -> n.objectTypes()).collect(Collectors.toSet());
return types.toArray(new Class[types.size()]);
}
/**
* Sets errors for the given reference. Used when processing the response of
* API invocation.
*
* @param referenceId reference identifier
* @param errors list of {@link RestError}
*/
public void setErrorFor(final String referenceId, final List<RestError> errors) {
for (final SObjectNode node : records) {
if (setErrorFor(node, referenceId, errors)) {
return;
}
}
}
/**
* Sets identifier of SObject for the given reference. Used when processing
* the response of API invocation.
*
* @param referenceId reference identifier
* @param id SObject identifier
*/
public void setIdFor(final String referenceId, final String id) {
for (final SObjectNode node : records) {
if (setIdFor(node, referenceId, id)) {
return;
}
}
}
/**
* Returns the number of elements in the tree.
*
* @return number of elements in the tree
*/
public int size() {
return records.stream().mapToInt(r -> r.size()).sum();
}
SObjectNode addNode(final SObjectNode node) {
final String givenObjectType = node.getObjectType();
if (objectType != null && !objectType.equals(givenObjectType)) {
throw new IllegalArgumentException("SObjectTree can hold only records of the same type, previously given: " + objectType + ", and now trying to add: "
+ givenObjectType);
}
objectType = givenObjectType;
records.add(node);
return node;
}
boolean setErrorFor(final SObjectNode node, final String referenceId, final List<RestError> errors) {
final Attributes attributes = node.getObject().getAttributes();
final String attributesReferenceId = attributes.getReferenceId();
if (Objects.equals(attributesReferenceId, referenceId)) {
node.setErrors(errors);
return true;
}
return StreamSupport.stream(node.getChildNodes().spliterator(), false).anyMatch(n -> setErrorFor(n, referenceId, errors));
}
boolean setIdFor(final SObjectNode node, final String referenceId, final String id) {
final Attributes attributes = node.getObject().getAttributes();
final String attributesReferenceId = attributes.getReferenceId();
if (Objects.equals(attributesReferenceId, referenceId)) {
final Object object = node.getObject();
if (object != null) {
return updateBaseObjectId(id, (AbstractSObjectBase)object);
} else {
return updateGeneralObjectId(id, object);
}
}
return StreamSupport.stream(node.getChildNodes().spliterator(), false).anyMatch(n -> setIdFor(n, referenceId, id));
}
boolean updateBaseObjectId(final String id, final AbstractSObjectBase object) {
object.setId(id);
return true;
}
boolean updateGeneralObjectId(final String id, final Object object) {
final Class<? extends Object> clazz = object.getClass();
final BeanInfo beanInfo;
try {
beanInfo = Introspector.getBeanInfo(clazz);
} catch (final IntrospectionException e) {
throw new IllegalStateException(e);
}
final PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
final Optional<PropertyDescriptor> maybeIdProperty = Arrays.stream(propertyDescriptors).filter(pd -> "id".equals(pd.getName())).findFirst();
if (maybeIdProperty.isPresent()) {
final Method readMethod = maybeIdProperty.get().getReadMethod();
try {
readMethod.invoke(object, id);
return true;
} catch (IllegalAccessException | InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
return false;
}
}
| |
/**
* SAHARA Scheduling Server
*
* Schedules and assigns local laboratory rigs.
*
* @license See LICENSE in the top level directory for complete license terms.
*
* Copyright (c) 2010, University of Technology, Sydney
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of Technology, Sydney nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @author Michael Diponio (mdiponio)
* @date 4th January 2010
*/
package au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.tests;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import junit.framework.TestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.DataAccessActivator;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.SessionDao;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.ResourcePermission;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.RigType;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.Session;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.User;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.UserClass;
import au.edu.uts.eng.remotelabs.schedserver.dataaccess.testsetup.DataAccessTestSetup;
/**
* Tests the {@link SessionDao} class.
*/
public class SessionDaoTester extends TestCase
{
/** Object of class under test. */
private SessionDao dao;
@Before
@Override
public void setUp() throws Exception
{
DataAccessTestSetup.setup();
this.dao = new SessionDao();
}
@After
@Override
public void tearDown() throws Exception
{
this.dao.closeSession();
}
/**
* Test method for {@link au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.SessionDao#findActiveSession(au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.User)}.
*/
@Test
public void testFindActiveSession()
{
Date now = new Date();
org.hibernate.Session db = DataAccessActivator.getNewSession();
db.beginTransaction();
User user = new User("locktest", "ns", "USER");
db.persist(user);
UserClass userClass= new UserClass();
userClass.setName("uc");
db.persist(userClass);
RigType type = new RigType();
type.setName("rig_type");
db.persist(type);
ResourcePermission perm = new ResourcePermission();
perm.setStartTime(now);
perm.setExpiryTime(now);
perm.setUserClass(userClass);
perm.setType("TYPE");
perm.setRigType(type);
db.persist(perm);
Session ses = new Session();
ses.setActive(true);
ses.setActivityLastUpdated(now);
ses.setPriority((short) 1);
ses.setRequestTime(now);
ses.setUser(user);
ses.setUserName(user.getName());
ses.setUserNamespace(user.getNamespace());
ses.setRequestedResourceId(type.getId());
ses.setRequestedResourceName(type.getName());
ses.setResourcePermission(perm);
ses.setResourceType("TYPE");
db.persist(ses);
db.getTransaction().commit();
Session found = this.dao.findActiveSession(user);
assertNotNull(found);
assertEquals(ses.getId(), found.getId());
db.beginTransaction();
db.delete(ses);
db.delete(perm);
db.delete(type);
db.delete(userClass);
db.delete(user);
db.getTransaction().commit();
}
/**
* Test method for {@link au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.SessionDao#findActiveSession(au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.User)}.
*/
@Test
public void testFindActiveSessionNotFound()
{
org.hibernate.Session db = DataAccessActivator.getNewSession();
db.beginTransaction();
User user = new User("locktest", "ns", "USER");
db.persist(user);
db.getTransaction().commit();
Session found = this.dao.findActiveSession(user);
assertNull(found);
db.beginTransaction();
db.delete(user);
db.getTransaction().commit();
}
/**
* Test method for {@link au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.SessionDao#findActiveSession(au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.User)}.
*/
@Test
public void testFindActiveSessionTwoSessions()
{
Date now = new Date();
org.hibernate.Session db = DataAccessActivator.getNewSession();
db.beginTransaction();
User user = new User("locktest", "ns", "USER");
db.persist(user);
UserClass userClass= new UserClass();
userClass.setName("uc");
db.persist(userClass);
RigType type = new RigType();
type.setName("rig_type");
db.persist(type);
ResourcePermission perm = new ResourcePermission();
perm.setStartTime(now);
perm.setExpiryTime(now);
perm.setUserClass(userClass);
perm.setType("TYPE");
perm.setRigType(type);
db.persist(perm);
Session ses = new Session();
ses.setActive(true);
ses.setActivityLastUpdated(now);
ses.setPriority((short) 1);
ses.setRequestTime(now);
ses.setUser(user);
ses.setUserName(user.getName());
ses.setUserNamespace(user.getNamespace());
ses.setRequestedResourceId(type.getId());
ses.setRequestedResourceName(type.getName());
ses.setResourcePermission(perm);
ses.setResourceType("TYPE");
db.persist(ses);
Session ses2 = new Session();
ses2.setActive(false);
ses2.setActivityLastUpdated(now);
ses2.setPriority((short) 1);
ses2.setRequestTime(new Date(System.currentTimeMillis() - 1000));
ses2.setUser(user);
ses2.setUserName(user.getName());
ses2.setUserNamespace(user.getNamespace());
ses2.setRequestedResourceId(type.getId());
ses2.setRequestedResourceName(type.getName());
ses2.setResourcePermission(perm);
ses2.setResourceType("TYPE");
db.persist(ses2);
db.getTransaction().commit();
Session found = this.dao.findActiveSession(user);
assertNotNull(found);
assertEquals(ses.getId(), found.getId());
db.beginTransaction();
db.delete(ses2);
db.delete(ses);
db.delete(perm);
db.delete(type);
db.delete(userClass);
db.delete(user);
db.getTransaction().commit();
}
/**
* Test method for {@link au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.SessionDao#findActiveSession(au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.User)}.
*/
@Test
public void testFindActiveSessionTwoActiveSessions()
{
Date now = new Date();
org.hibernate.Session db = DataAccessActivator.getNewSession();
db.beginTransaction();
User user = new User("locktest", "ns", "USER");
db.persist(user);
UserClass userClass= new UserClass();
userClass.setName("uc");
db.persist(userClass);
RigType type = new RigType();
type.setName("rig_type");
db.persist(type);
ResourcePermission perm = new ResourcePermission();
perm.setStartTime(now);
perm.setExpiryTime(now);
perm.setUserClass(userClass);
perm.setType("TYPE");
perm.setRigType(type);
db.persist(perm);
Session ses = new Session();
ses.setActive(true);
ses.setActivityLastUpdated(now);
ses.setPriority((short) 1);
ses.setRequestTime(now);
ses.setUser(user);
ses.setUserName(user.getName());
ses.setUserNamespace(user.getNamespace());
ses.setRequestedResourceId(type.getId());
ses.setRequestedResourceName(type.getName());
ses.setResourcePermission(perm);
ses.setResourceType("TYPE");
db.persist(ses);
Session ses2 = new Session();
ses2.setActive(true);
ses2.setActivityLastUpdated(now);
ses2.setPriority((short) 1);
ses2.setRequestTime(new Date(System.currentTimeMillis() - 1000));
ses2.setUser(user);
ses2.setUserName(user.getName());
ses2.setUserNamespace(user.getNamespace());
ses2.setRequestedResourceId(type.getId());
ses2.setRequestedResourceName(type.getName());
ses2.setResourcePermission(perm);
ses2.setResourceType("TYPE");
db.persist(ses2);
db.getTransaction().commit();
Session found = this.dao.findActiveSession(user);
assertNotNull(found);
assertEquals(ses.getId(), found.getId());
db.beginTransaction();
db.delete(ses2);
db.delete(ses);
db.delete(perm);
db.delete(type);
db.delete(userClass);
db.delete(user);
db.getTransaction().commit();
db.close();
}
/**
* Test method for {@link au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.SessionDao#findActiveSession(au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.User)}.
*/
@Test
public void testFindAllActiveSessions()
{
Date now = new Date();
org.hibernate.Session db = DataAccessActivator.getNewSession();
db.beginTransaction();
User user = new User("sestest", "ns", "USER");
db.persist(user);
User user2 = new User("sestest2", "ns", "USER");
db.persist(user2);
UserClass userClass= new UserClass();
userClass.setName("uc");
db.persist(userClass);
RigType type = new RigType();
type.setName("rig_type");
db.persist(type);
ResourcePermission perm = new ResourcePermission();
perm.setStartTime(now);
perm.setExpiryTime(now);
perm.setUserClass(userClass);
perm.setType("TYPE");
perm.setRigType(type);
db.persist(perm);
Session ses = new Session();
ses.setActive(true);
ses.setActivityLastUpdated(now);
ses.setPriority((short) 1);
ses.setRequestTime(now);
ses.setUser(user2);
ses.setUserName(user2.getName());
ses.setUserNamespace(user2.getNamespace());
ses.setRequestedResourceId(type.getId());
ses.setRequestedResourceName(type.getName());
ses.setResourcePermission(perm);
ses.setResourceType("TYPE");
db.persist(ses);
Session ses2 = new Session();
ses2.setActive(true);
ses2.setActivityLastUpdated(now);
ses2.setPriority((short) 1);
ses2.setRequestTime(new Date(System.currentTimeMillis() - 1000));
ses2.setUser(user);
ses2.setUserName(user.getName());
ses2.setUserNamespace(user.getNamespace());
ses2.setRequestedResourceId(type.getId());
ses2.setRequestedResourceName(type.getName());
ses2.setResourcePermission(perm);
ses2.setResourceType("TYPE");
db.persist(ses2);
Session ses3 = new Session();
ses3.setActive(false);
ses3.setActivityLastUpdated(now);
ses3.setPriority((short) 1);
ses3.setRequestTime(new Date(System.currentTimeMillis() - 1000));
ses3.setUser(user);
ses3.setUserName(user.getName());
ses3.setUserNamespace(user.getNamespace());
ses3.setRequestedResourceId(type.getId());
ses3.setRequestedResourceName(type.getName());
ses3.setResourcePermission(perm);
ses3.setResourceType("TYPE");
db.persist(ses3);
db.getTransaction().commit();
List<Session> sessions = this.dao.findAllActiveSessions();
db.beginTransaction();
db.delete(ses);
db.delete(ses2);
db.delete(ses3);
db.delete(ses);
db.delete(perm);
db.delete(type);
db.delete(userClass);
db.delete(user);
db.delete(user2);
db.getTransaction().commit();
assertNotNull(sessions);
assertEquals(2, sessions.size());
List<Long> ids = new ArrayList<Long>(2);
for (Session s : sessions)
{
ids.add(s.getId());
}
assertTrue(ids.contains(ses.getId()));
assertTrue(ids.contains(ses2.getId()));
}
}
| |
/*
* Copyright 1999-2019 Seata.io Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.seata.sqlparser.druid;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.SQLUpdateSetItem;
import com.alibaba.druid.sql.ast.statement.SQLUpdateStatement;
import com.alibaba.druid.sql.dialect.mysql.ast.expr.MySqlCharExpr;
import com.alibaba.druid.sql.dialect.mysql.ast.expr.MySqlOrderingExpr;
import io.seata.sqlparser.ParametersHolder;
import io.seata.sqlparser.SQLParsingException;
import io.seata.sqlparser.SQLType;
import io.seata.sqlparser.druid.mysql.MySQLUpdateRecognizer;
import io.seata.sqlparser.util.JdbcConstants;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/**
* The type My sql update recognizer test.
*/
public class MySQLUpdateRecognizerTest extends AbstractRecognizerTest {
/**
* Update recognizer test 0.
*/
@Test
public void updateRecognizerTest_0() {
String sql = "UPDATE t1 SET name = 'name1' WHERE id = 'id1'";
SQLStatement statement = getSQLStatement(sql);
MySQLUpdateRecognizer mySQLUpdateRecognizer = new MySQLUpdateRecognizer(sql, statement);
Assertions.assertEquals(sql, mySQLUpdateRecognizer.getOriginalSQL());
Assertions.assertEquals("t1", mySQLUpdateRecognizer.getTableName());
Assertions.assertEquals(1, mySQLUpdateRecognizer.getUpdateColumns().size());
Assertions.assertEquals("name", mySQLUpdateRecognizer.getUpdateColumns().get(0));
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateValues().get(0));
Assertions.assertEquals("id = 'id1'", mySQLUpdateRecognizer.getWhereCondition());
}
/**
* Update recognizer test 1.
*/
@Test
public void updateRecognizerTest_1() {
String sql = "UPDATE t1 SET name1 = 'name1', name2 = 'name2' WHERE id = 'id1'";
SQLStatement statement = getSQLStatement(sql);
MySQLUpdateRecognizer mySQLUpdateRecognizer = new MySQLUpdateRecognizer(sql, statement);
Assertions.assertEquals(sql, mySQLUpdateRecognizer.getOriginalSQL());
Assertions.assertEquals("t1", mySQLUpdateRecognizer.getTableName());
Assertions.assertEquals(2, mySQLUpdateRecognizer.getUpdateColumns().size());
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateColumns().get(0));
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateValues().get(0));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateColumns().get(1));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateValues().get(1));
Assertions.assertEquals("id = 'id1'", mySQLUpdateRecognizer.getWhereCondition());
}
/**
* Update recognizer test 2.
*/
@Test
public void updateRecognizerTest_2() {
String sql = "UPDATE t1 SET name1 = 'name1', name2 = 'name2' WHERE id = ?";
SQLStatement statement = getSQLStatement(sql);
MySQLUpdateRecognizer mySQLUpdateRecognizer = new MySQLUpdateRecognizer(sql, statement);
Assertions.assertEquals(sql, mySQLUpdateRecognizer.getOriginalSQL());
Assertions.assertEquals("t1", mySQLUpdateRecognizer.getTableName());
Assertions.assertEquals(2, mySQLUpdateRecognizer.getUpdateColumns().size());
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateColumns().get(0));
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateValues().get(0));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateColumns().get(1));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateValues().get(1));
ArrayList<List<Object>> paramAppenderList = new ArrayList<>();
String whereCondition = mySQLUpdateRecognizer.getWhereCondition(new ParametersHolder() {
@Override
public Map<Integer,ArrayList<Object>> getParameters() {
ArrayList<Object> idParam = new ArrayList<>();
idParam.add("id1");
Map result = new HashMap();
result.put(1, idParam);
return result;
}
}, paramAppenderList);
Assertions.assertEquals(Collections.singletonList(Collections.singletonList("id1")), paramAppenderList);
Assertions.assertEquals("id = ?", whereCondition);
}
/**
* Update recognizer test 3.
*/
@Test
public void updateRecognizerTest_3() {
String sql = "UPDATE t1 SET name1 = 'name1', name2 = 'name2' WHERE id in (?, ?)";
SQLStatement statement = getSQLStatement(sql);
MySQLUpdateRecognizer mySQLUpdateRecognizer = new MySQLUpdateRecognizer(sql, statement);
Assertions.assertEquals(sql, mySQLUpdateRecognizer.getOriginalSQL());
Assertions.assertEquals("t1", mySQLUpdateRecognizer.getTableName());
Assertions.assertEquals(2, mySQLUpdateRecognizer.getUpdateColumns().size());
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateColumns().get(0));
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateValues().get(0));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateColumns().get(1));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateValues().get(1));
ArrayList<List<Object>> paramAppenderList = new ArrayList<>();
String whereCondition = mySQLUpdateRecognizer.getWhereCondition(new ParametersHolder() {
@Override
public Map<Integer,ArrayList<Object>> getParameters() {
ArrayList<Object> id1Param = new ArrayList<>();
id1Param.add("id1");
ArrayList<Object> id2Param = new ArrayList<>();
id2Param.add("id2");
Map result = new HashMap();
result.put(1, id1Param);
result.put(2, id2Param);
return result;
}
}, paramAppenderList);
Assertions.assertEquals(Collections.singletonList(Arrays.asList("id1", "id2")), paramAppenderList);
Assertions.assertEquals("id IN (?, ?)", whereCondition);
}
/**
* Update recognizer test 4.
*/
@Test
public void updateRecognizerTest_4() {
String sql = "UPDATE t1 SET name1 = 'name1', name2 = 'name2' WHERE id in (?, ?) and name1 = ?";
SQLStatement statement = getSQLStatement(sql);
MySQLUpdateRecognizer mySQLUpdateRecognizer = new MySQLUpdateRecognizer(sql, statement);
Assertions.assertEquals(sql, mySQLUpdateRecognizer.getOriginalSQL());
Assertions.assertEquals("t1", mySQLUpdateRecognizer.getTableName());
Assertions.assertEquals(2, mySQLUpdateRecognizer.getUpdateColumns().size());
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateColumns().get(0));
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateValues().get(0));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateColumns().get(1));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateValues().get(1));
ArrayList<List<Object>> paramAppenderList = new ArrayList<>();
String whereCondition = mySQLUpdateRecognizer.getWhereCondition(new ParametersHolder() {
@Override
public Map<Integer,ArrayList<Object>> getParameters() {
ArrayList<Object> id1Param = new ArrayList<>();
id1Param.add("id1");
ArrayList<Object> id2Param = new ArrayList<>();
id2Param.add("id2");
ArrayList<Object> name1Param = new ArrayList<>();
name1Param.add("name");
Map result = new HashMap();
result.put(1, id1Param);
result.put(2, id2Param);
result.put(3, name1Param);
return result;
}
}, paramAppenderList);
Assertions.assertEquals(Collections.singletonList(Arrays.asList("id1", "id2", "name")), paramAppenderList);
Assertions.assertEquals("id IN (?, ?)\nAND name1 = ?", whereCondition);
}
/**
* Update recognizer test 5.
*/
@Test
public void updateRecognizerTest_5() {
String sql = "UPDATE t1 SET name1 = 'name1', name2 = 'name2' WHERE id between ? and ?";
SQLStatement statement = getSQLStatement(sql);
MySQLUpdateRecognizer mySQLUpdateRecognizer = new MySQLUpdateRecognizer(sql, statement);
Assertions.assertEquals(sql, mySQLUpdateRecognizer.getOriginalSQL());
Assertions.assertEquals("t1", mySQLUpdateRecognizer.getTableName());
Assertions.assertEquals(2, mySQLUpdateRecognizer.getUpdateColumns().size());
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateColumns().get(0));
Assertions.assertEquals("name1", mySQLUpdateRecognizer.getUpdateValues().get(0));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateColumns().get(1));
Assertions.assertEquals("name2", mySQLUpdateRecognizer.getUpdateValues().get(1));
ArrayList<List<Object>> paramAppenderList = new ArrayList<>();
String whereCondition = mySQLUpdateRecognizer.getWhereCondition(new ParametersHolder() {
@Override
public Map<Integer,ArrayList<Object>> getParameters() {
ArrayList<Object> id1Param = new ArrayList<>();
id1Param.add("id1");
ArrayList<Object> id2Param = new ArrayList<>();
id2Param.add("id2");
Map result = new HashMap();
result.put(1, id1Param);
result.put(2, id2Param);
return result;
}
}, paramAppenderList);
Assertions.assertEquals(Collections.singletonList(Arrays.asList("id1", "id2")), paramAppenderList);
Assertions.assertEquals("id BETWEEN ? AND ?", whereCondition);
}
@Test
public void testGetSqlType() {
String sql = "update t set n = ?";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLUpdateRecognizer recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
Assertions.assertEquals(recognizer.getSQLType(), SQLType.UPDATE);
}
@Test
public void testGetUpdateColumns() {
// test with normal
String sql = "update t set a = ?, b = ?, c = ?";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLUpdateRecognizer recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
List<String> updateColumns = recognizer.getUpdateColumns();
Assertions.assertEquals(updateColumns.size(), 3);
// test with alias
sql = "update t set a.a = ?, a.b = ?, a.c = ?";
asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
updateColumns = recognizer.getUpdateColumns();
Assertions.assertEquals(updateColumns.size(), 3);
//test with error
Assertions.assertThrows(SQLParsingException.class, () -> {
String s = "update t set a = a";
List<SQLStatement> sqlStatements = SQLUtils.parseStatements(s, JdbcConstants.MYSQL);
SQLUpdateStatement sqlUpdateStatement = (SQLUpdateStatement) sqlStatements.get(0);
List<SQLUpdateSetItem> updateSetItems = sqlUpdateStatement.getItems();
for (SQLUpdateSetItem updateSetItem : updateSetItems) {
updateSetItem.setColumn(new MySqlCharExpr());
}
MySQLUpdateRecognizer oracleUpdateRecognizer = new MySQLUpdateRecognizer(s, sqlUpdateStatement);
oracleUpdateRecognizer.getUpdateColumns();
});
}
@Test
public void testGetUpdateDatabaseNameColumns() {
// test with normal
String sql = "update d.t set d.t.a = ?, d.t.b = ?, d.t.c = ?";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLUpdateRecognizer recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
List<String> updateColumns = recognizer.getUpdateColumns();
Assertions.assertEquals(updateColumns.size(), 3);
// test with alias
sql = "update t set a.a = ?, a.b = ?, a.c = ?";
asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
updateColumns = recognizer.getUpdateColumns();
Assertions.assertEquals(updateColumns.size(), 3);
//test with error
Assertions.assertThrows(SQLParsingException.class, () -> {
String s = "update t set a = a";
List<SQLStatement> sqlStatements = SQLUtils.parseStatements(s, JdbcConstants.MYSQL);
SQLUpdateStatement sqlUpdateStatement = (SQLUpdateStatement) sqlStatements.get(0);
List<SQLUpdateSetItem> updateSetItems = sqlUpdateStatement.getItems();
for (SQLUpdateSetItem updateSetItem : updateSetItems) {
updateSetItem.setColumn(new MySqlCharExpr());
}
MySQLUpdateRecognizer oracleUpdateRecognizer = new MySQLUpdateRecognizer(s, sqlUpdateStatement);
oracleUpdateRecognizer.getUpdateColumns();
});
}
@Test
public void testGetUpdateValues() {
// test with normal
String sql = "update t set a = ?, b = ?, c = ?";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLUpdateRecognizer recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
List<Object> updateValues = recognizer.getUpdateValues();
Assertions.assertEquals(updateValues.size(), 3);
// test with values
sql = "update t set a = 1, b = 2, c = 3";
asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
updateValues = recognizer.getUpdateValues();
Assertions.assertEquals(updateValues.size(), 3);
// test with error
Assertions.assertThrows(SQLParsingException.class, () -> {
String s = "update t set a = ?";
List<SQLStatement> sqlStatements = SQLUtils.parseStatements(s, JdbcConstants.MYSQL);
SQLUpdateStatement sqlUpdateStatement = (SQLUpdateStatement)sqlStatements.get(0);
List<SQLUpdateSetItem> updateSetItems = sqlUpdateStatement.getItems();
for (SQLUpdateSetItem updateSetItem : updateSetItems) {
updateSetItem.setValue(new MySqlOrderingExpr());
}
MySQLUpdateRecognizer oracleUpdateRecognizer = new MySQLUpdateRecognizer(s, sqlUpdateStatement);
oracleUpdateRecognizer.getUpdateValues();
});
}
@Test
public void testGetTableAlias() {
String sql = "update t set a = ?, b = ?, c = ?";
List<SQLStatement> asts = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL);
MySQLUpdateRecognizer recognizer = new MySQLUpdateRecognizer(sql, asts.get(0));
Assertions.assertNull(recognizer.getTableAlias());
}
@Override
public String getDbType() {
return JdbcConstants.MYSQL;
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.zookeeper;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.zookeeper.server.NIOServerCnxnFactory;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnLog;
/**
* TODO: Most of the code in this class is ripped from ZooKeeper tests. Instead
* of redoing it, we should contribute updates to their code which let us more
* easily access testing helper objects.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class MiniZooKeeperCluster {
private static final Log LOG = LogFactory.getLog(MiniZooKeeperCluster.class);
private static final int TICK_TIME = 2000;
private static final int CONNECTION_TIMEOUT = 30000;
private boolean started;
/** The default port. If zero, we use a random port. */
private int defaultClientPort = 0;
private int clientPort;
private List<NIOServerCnxnFactory> standaloneServerFactoryList;
private List<ZooKeeperServer> zooKeeperServers;
private List<Integer> clientPortList;
private int activeZKServerIndex;
private int tickTime = 0;
private Configuration configuration;
public MiniZooKeeperCluster() {
this(new Configuration());
}
public MiniZooKeeperCluster(Configuration configuration) {
this.started = false;
this.configuration = configuration;
activeZKServerIndex = -1;
zooKeeperServers = new ArrayList<ZooKeeperServer>();
clientPortList = new ArrayList<Integer>();
standaloneServerFactoryList = new ArrayList<NIOServerCnxnFactory>();
}
public void setDefaultClientPort(int clientPort) {
if (clientPort <= 0) {
throw new IllegalArgumentException("Invalid default ZK client port: "
+ clientPort);
}
this.defaultClientPort = clientPort;
}
/**
* Selects a ZK client port. Returns the default port if specified.
* Otherwise, returns a random port. The random port is selected from the
* range between 49152 to 65535. These ports cannot be registered with IANA
* and are intended for dynamic allocation (see http://bit.ly/dynports).
*/
private int selectClientPort() {
if (defaultClientPort > 0) {
return defaultClientPort;
}
return 0xc000 + new Random().nextInt(0x3f00);
}
public void setTickTime(int tickTime) {
this.tickTime = tickTime;
}
public int getBackupZooKeeperServerNum() {
return zooKeeperServers.size()-1;
}
public int getZooKeeperServerNum() {
return zooKeeperServers.size();
}
// / XXX: From o.a.zk.t.ClientBase
private static void setupTestEnv() {
// during the tests we run with 100K prealloc in the logs.
// on windows systems prealloc of 64M was seen to take ~15seconds
// resulting in test failure (client timeout on first session).
// set env and directly in order to handle static init/gc issues
System.setProperty("zookeeper.preAllocSize", "100");
FileTxnLog.setPreallocSize(100 * 1024);
}
public int startup(File baseDir) throws IOException, InterruptedException {
return startup(baseDir,1);
}
/**
* @param baseDir
* @param numZooKeeperServers
* @return ClientPort server bound to, -1 if there was a
* binding problem and we couldn't pick another port.
* @throws IOException
* @throws InterruptedException
*/
public int startup(File baseDir, int numZooKeeperServers) throws IOException,
InterruptedException {
if (numZooKeeperServers <= 0)
return -1;
setupTestEnv();
shutdown();
int tentativePort = selectClientPort();
// running all the ZK servers
for (int i = 0; i < numZooKeeperServers; i++) {
File dir = new File(baseDir, "zookeeper_"+i).getAbsoluteFile();
recreateDir(dir);
int tickTimeToUse;
if (this.tickTime > 0) {
tickTimeToUse = this.tickTime;
} else {
tickTimeToUse = TICK_TIME;
}
ZooKeeperServer server = new ZooKeeperServer(dir, dir, tickTimeToUse);
NIOServerCnxnFactory standaloneServerFactory;
while (true) {
try {
standaloneServerFactory = new NIOServerCnxnFactory();
standaloneServerFactory.configure(
new InetSocketAddress(tentativePort),
configuration.getInt(HConstants.ZOOKEEPER_MAX_CLIENT_CNXNS,
1000));
} catch (BindException e) {
LOG.debug("Failed binding ZK Server to client port: " +
tentativePort, e);
// We're told to use some port but it's occupied, fail
if (defaultClientPort > 0) return -1;
// This port is already in use, try to use another.
tentativePort = selectClientPort();
continue;
}
break;
}
// Start up this ZK server
standaloneServerFactory.startup(server);
if (!waitForServerUp(tentativePort, CONNECTION_TIMEOUT)) {
throw new IOException("Waiting for startup of standalone server");
}
// We have selected this port as a client port.
clientPortList.add(tentativePort);
standaloneServerFactoryList.add(standaloneServerFactory);
zooKeeperServers.add(server);
tentativePort++; //for the next server
}
// set the first one to be active ZK; Others are backups
activeZKServerIndex = 0;
started = true;
clientPort = clientPortList.get(activeZKServerIndex);
LOG.info("Started MiniZK Cluster and connect 1 ZK server " +
"on client port: " + clientPort);
return clientPort;
}
private void recreateDir(File dir) throws IOException {
if (dir.exists()) {
if(!FileUtil.fullyDelete(dir)) {
throw new IOException("Could not delete zk base directory: " + dir);
}
}
try {
dir.mkdirs();
} catch (SecurityException e) {
throw new IOException("creating dir: " + dir, e);
}
}
/**
* @throws IOException
*/
public void shutdown() throws IOException {
if (!started) {
return;
}
// shut down all the zk servers
for (int i = 0; i < standaloneServerFactoryList.size(); i++) {
NIOServerCnxnFactory standaloneServerFactory =
standaloneServerFactoryList.get(i);
int clientPort = clientPortList.get(i);
standaloneServerFactory.shutdown();
if (!waitForServerDown(clientPort, CONNECTION_TIMEOUT)) {
throw new IOException("Waiting for shutdown of standalone server");
}
}
for (ZooKeeperServer zkServer: zooKeeperServers) {
//explicitly close ZKDatabase since ZookeeperServer does not close them
zkServer.getZKDatabase().close();
}
// clear everything
started = false;
activeZKServerIndex = 0;
standaloneServerFactoryList.clear();
clientPortList.clear();
zooKeeperServers.clear();
LOG.info("Shutdown MiniZK cluster with all ZK servers");
}
/**@return clientPort return clientPort if there is another ZK backup can run
* when killing the current active; return -1, if there is no backups.
* @throws IOException
* @throws InterruptedException
*/
public int killCurrentActiveZooKeeperServer() throws IOException,
InterruptedException {
if (!started || activeZKServerIndex < 0 ) {
return -1;
}
// Shutdown the current active one
NIOServerCnxnFactory standaloneServerFactory =
standaloneServerFactoryList.get(activeZKServerIndex);
int clientPort = clientPortList.get(activeZKServerIndex);
standaloneServerFactory.shutdown();
if (!waitForServerDown(clientPort, CONNECTION_TIMEOUT)) {
throw new IOException("Waiting for shutdown of standalone server");
}
zooKeeperServers.get(activeZKServerIndex).getZKDatabase().close();
// remove the current active zk server
standaloneServerFactoryList.remove(activeZKServerIndex);
clientPortList.remove(activeZKServerIndex);
zooKeeperServers.remove(activeZKServerIndex);
LOG.info("Kill the current active ZK servers in the cluster " +
"on client port: " + clientPort);
if (standaloneServerFactoryList.size() == 0) {
// there is no backup servers;
return -1;
}
clientPort = clientPortList.get(activeZKServerIndex);
LOG.info("Activate a backup zk server in the cluster " +
"on client port: " + clientPort);
// return the next back zk server's port
return clientPort;
}
/**
* Kill one back up ZK servers
* @throws IOException
* @throws InterruptedException
*/
public void killOneBackupZooKeeperServer() throws IOException,
InterruptedException {
if (!started || activeZKServerIndex < 0 ||
standaloneServerFactoryList.size() <= 1) {
return ;
}
int backupZKServerIndex = activeZKServerIndex+1;
// Shutdown the current active one
NIOServerCnxnFactory standaloneServerFactory =
standaloneServerFactoryList.get(backupZKServerIndex);
int clientPort = clientPortList.get(backupZKServerIndex);
standaloneServerFactory.shutdown();
if (!waitForServerDown(clientPort, CONNECTION_TIMEOUT)) {
throw new IOException("Waiting for shutdown of standalone server");
}
zooKeeperServers.get(backupZKServerIndex).getZKDatabase().close();
// remove this backup zk server
standaloneServerFactoryList.remove(backupZKServerIndex);
clientPortList.remove(backupZKServerIndex);
zooKeeperServers.remove(backupZKServerIndex);
LOG.info("Kill one backup ZK servers in the cluster " +
"on client port: " + clientPort);
}
// XXX: From o.a.zk.t.ClientBase
private static boolean waitForServerDown(int port, long timeout) {
long start = System.currentTimeMillis();
while (true) {
try {
Socket sock = new Socket("localhost", port);
try {
OutputStream outstream = sock.getOutputStream();
outstream.write("stat".getBytes());
outstream.flush();
} finally {
sock.close();
}
} catch (IOException e) {
return true;
}
if (System.currentTimeMillis() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
// XXX: From o.a.zk.t.ClientBase
private static boolean waitForServerUp(int port, long timeout) {
long start = System.currentTimeMillis();
while (true) {
try {
Socket sock = new Socket("localhost", port);
BufferedReader reader = null;
try {
OutputStream outstream = sock.getOutputStream();
outstream.write("stat".getBytes());
outstream.flush();
Reader isr = new InputStreamReader(sock.getInputStream());
reader = new BufferedReader(isr);
String line = reader.readLine();
if (line != null && line.startsWith("Zookeeper version:")) {
return true;
}
} finally {
sock.close();
if (reader != null) {
reader.close();
}
}
} catch (IOException e) {
// ignore as this is expected
LOG.info("server localhost:" + port + " not up " + e);
}
if (System.currentTimeMillis() > start + timeout) {
break;
}
try {
Thread.sleep(250);
} catch (InterruptedException e) {
// ignore
}
}
return false;
}
public int getClientPort() {
return clientPort;
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app.data;
import android.content.ComponentName;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.pm.PackageManager;
import android.content.pm.ProviderInfo;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.os.Build;
import android.test.AndroidTestCase;
import android.util.Log;
import com.example.android.sunshine.app.data.WeatherContract.LocationEntry;
import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry;
/*
Note: This is not a complete set of tests of the Sunshine ContentProvider, but it does test
that at least the basic functionality has been implemented correctly.
Students: Uncomment the tests in this class as you implement the functionality in your
ContentProvider to make sure that you've implemented things reasonably correctly.
*/
public class TestProvider extends AndroidTestCase {
public static final String LOG_TAG = TestProvider.class.getSimpleName();
/*
This helper function deletes all records from both database tables using the ContentProvider.
It also queries the ContentProvider to make sure that the database has been successfully
deleted, so it cannot be used until the Query and Delete functions have been written
in the ContentProvider.
Students: Replace the calls to deleteAllRecordsFromDB with this one after you have written
the delete functionality in the ContentProvider.
*/
public void deleteAllRecordsFromProvider() {
mContext.getContentResolver().delete(
WeatherEntry.CONTENT_URI,
null,
null
);
mContext.getContentResolver().delete(
LocationEntry.CONTENT_URI,
null,
null
);
Cursor cursor = mContext.getContentResolver().query(
WeatherEntry.CONTENT_URI,
null,
null,
null,
null
);
assertEquals("Error: Records not deleted from Weather table during delete", 0, cursor.getCount());
cursor.close();
cursor = mContext.getContentResolver().query(
LocationEntry.CONTENT_URI,
null,
null,
null,
null
);
assertEquals("Error: Records not deleted from Location table during delete", 0, cursor.getCount());
cursor.close();
}
/*
This helper function deletes all records from both database tables using the database
functions only. This is designed to be used to reset the state of the database until the
delete functionality is available in the ContentProvider.
*/
public void deleteAllRecordsFromDB() {
WeatherDbHelper dbHelper = new WeatherDbHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
db.delete(WeatherEntry.TABLE_NAME, null, null);
db.delete(LocationEntry.TABLE_NAME, null, null);
db.close();
}
/*
Student: Refactor this function to use the deleteAllRecordsFromProvider functionality once
you have implemented delete functionality there.
*/
public void deleteAllRecords() {
deleteAllRecordsFromProvider();
}
// Since we want each test to start with a clean slate, run deleteAllRecords
// in setUp (called by the test runner before each test).
@Override
protected void setUp() throws Exception {
super.setUp();
deleteAllRecords();
}
/*
This test checks to make sure that the content provider is registered correctly.
Students: Uncomment this test to make sure you've correctly registered the WeatherProvider.
*/
public void testProviderRegistry() {
PackageManager pm = mContext.getPackageManager();
// We define the component name based on the package name from the context and the
// WeatherProvider class.
ComponentName componentName = new ComponentName(mContext.getPackageName(),
WeatherProvider.class.getName());
try {
// Fetch the provider info using the component name from the PackageManager
// This throws an exception if the provider isn't registered.
ProviderInfo providerInfo = pm.getProviderInfo(componentName, 0);
// Make sure that the registered authority matches the authority from the Contract.
assertEquals("Error: WeatherProvider registered with authority: " + providerInfo.authority +
" instead of authority: " + WeatherContract.CONTENT_AUTHORITY,
providerInfo.authority, WeatherContract.CONTENT_AUTHORITY);
} catch (PackageManager.NameNotFoundException e) {
// I guess the provider isn't registered correctly.
assertTrue("Error: WeatherProvider not registered at " + mContext.getPackageName(),
false);
}
}
/*
This test doesn't touch the database. It verifies that the ContentProvider returns
the correct type for each type of URI that it can handle.
Students: Uncomment this test to verify that your implementation of GetType is
functioning correctly.
*/
public void testGetType() {
// content://com.example.android.sunshine.app/weather/
String type = mContext.getContentResolver().getType(WeatherEntry.CONTENT_URI);
// vnd.android.cursor.dir/com.example.android.sunshine.app/weather
assertEquals("Error: the WeatherEntry CONTENT_URI should return WeatherEntry.CONTENT_TYPE",
WeatherEntry.CONTENT_TYPE, type);
String testLocation = "94074";
// content://com.example.android.sunshine.app/weather/94074
type = mContext.getContentResolver().getType(
WeatherEntry.buildWeatherLocation(testLocation));
// vnd.android.cursor.dir/com.example.android.sunshine.app/weather
assertEquals("Error: the WeatherEntry CONTENT_URI with location should return WeatherEntry.CONTENT_TYPE",
WeatherEntry.CONTENT_TYPE, type);
long testDate = 1419120000L; // December 21st, 2014
// content://com.example.android.sunshine.app/weather/94074/20140612
type = mContext.getContentResolver().getType(
WeatherEntry.buildWeatherLocationWithDate(testLocation, testDate));
// vnd.android.cursor.item/com.example.android.sunshine.app/weather/1419120000
assertEquals("Error: the WeatherEntry CONTENT_URI with location and date should return WeatherEntry.CONTENT_ITEM_TYPE",
WeatherEntry.CONTENT_ITEM_TYPE, type);
// content://com.example.android.sunshine.app/location/
type = mContext.getContentResolver().getType(LocationEntry.CONTENT_URI);
// vnd.android.cursor.dir/com.example.android.sunshine.app/location
assertEquals("Error: the LocationEntry CONTENT_URI should return LocationEntry.CONTENT_TYPE",
LocationEntry.CONTENT_TYPE, type);
}
/*
This test uses the database directly to insert and then uses the ContentProvider to
read out the data. Uncomment this test to see if the basic weather query functionality
given in the ContentProvider is working correctly.
*/
public void testBasicWeatherQuery() {
// insert our test records into the database
WeatherDbHelper dbHelper = new WeatherDbHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
ContentValues testValues = TestUtilities.createNorthPoleLocationValues();
long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext);
// Fantastic. Now that we have a location, add some weather!
ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId);
long weatherRowId = db.insert(WeatherEntry.TABLE_NAME, null, weatherValues);
assertTrue("Unable to Insert WeatherEntry into the Database", weatherRowId != -1);
db.close();
// Test the basic content provider query
Cursor weatherCursor = mContext.getContentResolver().query(
WeatherEntry.CONTENT_URI,
null,
null,
null,
null
);
// Make sure we get the correct cursor out of the database
TestUtilities.validateCursor("testBasicWeatherQuery", weatherCursor, weatherValues);
}
/*
This test uses the database directly to insert and then uses the ContentProvider to
read out the data. Uncomment this test to see if your location queries are
performing correctly.
*/
public void testBasicLocationQueries() {
// insert our test records into the database
WeatherDbHelper dbHelper = new WeatherDbHelper(mContext);
SQLiteDatabase db = dbHelper.getWritableDatabase();
ContentValues testValues = TestUtilities.createNorthPoleLocationValues();
long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext);
// Test the basic content provider query
Cursor locationCursor = mContext.getContentResolver().query(
LocationEntry.CONTENT_URI,
null,
null,
null,
null
);
// Make sure we get the correct cursor out of the database
TestUtilities.validateCursor("testBasicLocationQueries, location query", locationCursor, testValues);
// Has the NotificationUri been set correctly? --- we can only test this easily against API
// level 19 or greater because getNotificationUri was added in API level 19.
if ( Build.VERSION.SDK_INT >= 19 ) {
assertEquals("Error: Location Query did not properly set NotificationUri",
locationCursor.getNotificationUri(), LocationEntry.CONTENT_URI);
}
}
/*
This test uses the provider to insert and then update the data. Uncomment this test to
see if your update location is functioning correctly.
*/
public void testUpdateLocation() {
// Create a new map of values, where column names are the keys
ContentValues values = TestUtilities.createNorthPoleLocationValues();
Uri locationUri = mContext.getContentResolver().
insert(LocationEntry.CONTENT_URI, values);
long locationRowId = ContentUris.parseId(locationUri);
// Verify we got a row back.
assertTrue(locationRowId != -1);
Log.d(LOG_TAG, "New row id: " + locationRowId);
ContentValues updatedValues = new ContentValues(values);
updatedValues.put(LocationEntry._ID, locationRowId);
updatedValues.put(LocationEntry.COLUMN_CITY_NAME, "Santa's Village");
// Create a cursor with observer to make sure that the content provider is notifying
// the observers as expected
Cursor locationCursor = mContext.getContentResolver().query(LocationEntry.CONTENT_URI, null, null, null, null);
TestUtilities.TestContentObserver tco = TestUtilities.getTestContentObserver();
locationCursor.registerContentObserver(tco);
int count = mContext.getContentResolver().update(
LocationEntry.CONTENT_URI, updatedValues, LocationEntry._ID + "= ?",
new String[] { Long.toString(locationRowId)});
assertEquals(count, 1);
// Test to make sure our observer is called. If not, we throw an assertion.
//
// Students: If your code is failing here, it means that your content provider
// isn't calling getContext().getContentResolver().notifyChange(uri, null);
tco.waitForNotificationOrFail();
locationCursor.unregisterContentObserver(tco);
locationCursor.close();
// A cursor is your primary interface to the query results.
Cursor cursor = mContext.getContentResolver().query(
LocationEntry.CONTENT_URI,
null, // projection
LocationEntry._ID + " = " + locationRowId,
null, // Values for the "where" clause
null // sort order
);
TestUtilities.validateCursor("testUpdateLocation. Error validating location entry update.",
cursor, updatedValues);
cursor.close();
}
// Make sure we can still delete after adding/updating stuff
//
// Student: Uncomment this test after you have completed writing the insert functionality
// in your provider. It relies on insertions with testInsertReadProvider, so insert and
// query functionality must also be complete before this test can be used.
public void testInsertReadProvider() {
ContentValues testValues = TestUtilities.createNorthPoleLocationValues();
// Register a content observer for our insert. This time, directly with the content resolver
TestUtilities.TestContentObserver tco = TestUtilities.getTestContentObserver();
mContext.getContentResolver().registerContentObserver(LocationEntry.CONTENT_URI, true, tco);
Uri locationUri = mContext.getContentResolver().insert(LocationEntry.CONTENT_URI, testValues);
// Did our content observer get called? Students: If this fails, your insert location
// isn't calling getContext().getContentResolver().notifyChange(uri, null);
tco.waitForNotificationOrFail();
mContext.getContentResolver().unregisterContentObserver(tco);
long locationRowId = ContentUris.parseId(locationUri);
// Verify we got a row back.
assertTrue(locationRowId != -1);
// Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made
// the round trip.
// A cursor is your primary interface to the query results.
Cursor cursor = mContext.getContentResolver().query(
LocationEntry.CONTENT_URI,
null, // leaving "columns" null just returns all the columns.
null, // cols for "where" clause
null, // values for "where" clause
null // sort order
);
TestUtilities.validateCursor("testInsertReadProvider. Error validating LocationEntry.",
cursor, testValues);
// Fantastic. Now that we have a location, add some weather!
ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId);
// The TestContentObserver is a one-shot class
tco = TestUtilities.getTestContentObserver();
mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, tco);
Uri weatherInsertUri = mContext.getContentResolver()
.insert(WeatherEntry.CONTENT_URI, weatherValues);
assertTrue(weatherInsertUri != null);
// Did our content observer get called? Students: If this fails, your insert weather
// in your ContentProvider isn't calling
// getContext().getContentResolver().notifyChange(uri, null);
tco.waitForNotificationOrFail();
mContext.getContentResolver().unregisterContentObserver(tco);
// A cursor is your primary interface to the query results.
Cursor weatherCursor = mContext.getContentResolver().query(
WeatherEntry.CONTENT_URI, // Table to Query
null, // leaving "columns" null just returns all the columns.
null, // cols for "where" clause
null, // values for "where" clause
null // columns to group by
);
TestUtilities.validateCursor("testInsertReadProvider. Error validating WeatherEntry insert.",
weatherCursor, weatherValues);
// Add the location values in with the weather data so that we can make
// sure that the join worked and we actually get all the values back
weatherValues.putAll(testValues);
// Get the joined Weather and Location data
weatherCursor = mContext.getContentResolver().query(
WeatherEntry.buildWeatherLocation(TestUtilities.TEST_LOCATION),
null, // leaving "columns" null just returns all the columns.
null, // cols for "where" clause
null, // values for "where" clause
null // sort order
);
TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location Data.",
weatherCursor, weatherValues);
// Get the joined Weather and Location data with a start date
weatherCursor = mContext.getContentResolver().query(
WeatherEntry.buildWeatherLocationWithStartDate(
TestUtilities.TEST_LOCATION, TestUtilities.TEST_DATE),
null, // leaving "columns" null just returns all the columns.
null, // cols for "where" clause
null, // values for "where" clause
null // sort order
);
TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location Data with start date.",
weatherCursor, weatherValues);
// Get the joined Weather data for a specific date
weatherCursor = mContext.getContentResolver().query(
WeatherEntry.buildWeatherLocationWithDate(TestUtilities.TEST_LOCATION, TestUtilities.TEST_DATE),
null,
null,
null,
null
);
TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location data for a specific date.",
weatherCursor, weatherValues);
}
// Make sure we can still delete after adding/updating stuff
//
// Student: Uncomment this test after you have completed writing the delete functionality
// in your provider. It relies on insertions with testInsertReadProvider, so insert and
// query functionality must also be complete before this test can be used.
public void testDeleteRecords() {
testInsertReadProvider();
// Register a content observer for our location delete.
TestUtilities.TestContentObserver locationObserver = TestUtilities.getTestContentObserver();
mContext.getContentResolver().registerContentObserver(LocationEntry.CONTENT_URI, true, locationObserver);
// Register a content observer for our weather delete.
TestUtilities.TestContentObserver weatherObserver = TestUtilities.getTestContentObserver();
mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, weatherObserver);
deleteAllRecordsFromProvider();
// Students: If either of these fail, you most-likely are not calling the
// getContext().getContentResolver().notifyChange(uri, null); in the ContentProvider
// delete. (only if the insertReadProvider is succeeding)
locationObserver.waitForNotificationOrFail();
weatherObserver.waitForNotificationOrFail();
mContext.getContentResolver().unregisterContentObserver(locationObserver);
mContext.getContentResolver().unregisterContentObserver(weatherObserver);
}
static private final int BULK_INSERT_RECORDS_TO_INSERT = 10;
static ContentValues[] createBulkInsertWeatherValues(long locationRowId) {
long currentTestDate = TestUtilities.TEST_DATE;
long millisecondsInADay = 1000*60*60*24;
ContentValues[] returnContentValues = new ContentValues[BULK_INSERT_RECORDS_TO_INSERT];
for ( int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++, currentTestDate+= millisecondsInADay ) {
ContentValues weatherValues = new ContentValues();
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_LOC_KEY, locationRowId);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DATE, currentTestDate);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_DEGREES, 1.1);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_HUMIDITY, 1.2 + 0.01 * (float) i);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_PRESSURE, 1.3 - 0.01 * (float) i);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MAX_TEMP, 75 + i);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_MIN_TEMP, 65 - i);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_SHORT_DESC, "Asteroids");
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WIND_SPEED, 5.5 + 0.2 * (float) i);
weatherValues.put(WeatherContract.WeatherEntry.COLUMN_WEATHER_ID, 321);
returnContentValues[i] = weatherValues;
}
return returnContentValues;
}
// Student: Uncomment this test after you have completed writing the BulkInsert functionality
// in your provider. Note that this test will work with the built-in (default) provider
// implementation, which just inserts records one-at-a-time, so really do implement the
// BulkInsert ContentProvider function.
// public void testBulkInsert() {
// // first, let's create a location value
// ContentValues testValues = TestUtilities.createNorthPoleLocationValues();
// Uri locationUri = mContext.getContentResolver().insert(LocationEntry.CONTENT_URI, testValues);
// long locationRowId = ContentUris.parseId(locationUri);
//
// // Verify we got a row back.
// assertTrue(locationRowId != -1);
//
// // Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made
// // the round trip.
//
// // A cursor is your primary interface to the query results.
// Cursor cursor = mContext.getContentResolver().query(
// LocationEntry.CONTENT_URI,
// null, // leaving "columns" null just returns all the columns.
// null, // cols for "where" clause
// null, // values for "where" clause
// null // sort order
// );
//
// TestUtilities.validateCursor("testBulkInsert. Error validating LocationEntry.",
// cursor, testValues);
//
// // Now we can bulkInsert some weather. In fact, we only implement BulkInsert for weather
// // entries. With ContentProviders, you really only have to implement the features you
// // use, after all.
// ContentValues[] bulkInsertContentValues = createBulkInsertWeatherValues(locationRowId);
//
// // Register a content observer for our bulk insert.
// TestUtilities.TestContentObserver weatherObserver = TestUtilities.getTestContentObserver();
// mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, weatherObserver);
//
// int insertCount = mContext.getContentResolver().bulkInsert(WeatherEntry.CONTENT_URI, bulkInsertContentValues);
//
// // Students: If this fails, it means that you most-likely are not calling the
// // getContext().getContentResolver().notifyChange(uri, null); in your BulkInsert
// // ContentProvider method.
// weatherObserver.waitForNotificationOrFail();
// mContext.getContentResolver().unregisterContentObserver(weatherObserver);
//
// assertEquals(insertCount, BULK_INSERT_RECORDS_TO_INSERT);
//
// // A cursor is your primary interface to the query results.
// cursor = mContext.getContentResolver().query(
// WeatherEntry.CONTENT_URI,
// null, // leaving "columns" null just returns all the columns.
// null, // cols for "where" clause
// null, // values for "where" clause
// WeatherEntry.COLUMN_DATE + " ASC" // sort order == by DATE ASCENDING
// );
//
// // we should have as many records in the database as we've inserted
// assertEquals(cursor.getCount(), BULK_INSERT_RECORDS_TO_INSERT);
//
// // and let's make sure they match the ones we created
// cursor.moveToFirst();
// for ( int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++, cursor.moveToNext() ) {
// TestUtilities.validateCurrentRecord("testBulkInsert. Error validating WeatherEntry " + i,
// cursor, bulkInsertContentValues[i]);
// }
// cursor.close();
// }
}
| |
package de.uop.code.cubemerging.dao;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.query.*;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.sparql.engine.http.QueryEngineHTTP;
import de.uop.code.cubemerging.Observation;
import de.uop.code.cubemerging.domain.DatasetDescription;
import de.uop.code.cubemerging.domain.EntityDefinition;
import de.uop.code.cubemerging.domain.cube.Cube;
import de.uop.code.cubemerging.domain.cube.DatasetStructureDefinition;
import de.uop.code.cubemerging.domain.cube.Dimension;
import de.uop.code.cubemerging.domain.cube.Measure;
import de.uop.code.cubemerging.util.Properties;
import de.uop.code.cubemerging.vocabulary.QB;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
@Repository
public class MergeDao {
private final static String TRIPLE_STORE = Properties.getInstance().getTripleStore();
private final static String PREFIXES = "PREFIX qb: <http://purl.org/linked-data/cube#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX code: <http://code-research.eu/resource/> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX prov: <http://www.w3.org/ns/prov#> ";
private final static String GRAPH_QUERY = PREFIXES + "SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s rdf:type qb:DataSet }}";
private final static String DATASET_QUERY = PREFIXES + "SELECT DISTINCT ?s WHERE { GRAPH ?g { ?s rdf:type qb:DataSet }}";
private final static String GENERIC_CONSTRUCT = PREFIXES + "CONSTRUCT { ?s ?p ?o } WHERE { GRAPH ?g { ?s ?p ?o }}";
private final static String COMPONENTS_QUERY = PREFIXES + "SELECT ?component ?concept WHERE { GRAPH ?g { ?d rdf:type qb:DataSet ; qb:structure ?dsd . ?dsd rdf:type qb:DataStructureDefinition ; qb:component ?comp . ?comp ?component ?concept }}";
private final static String OBSERVATION_QUERY = PREFIXES + "SELECT ?s ?p ?o WHERE { GRAPH ?g { ?s rdf:type qb:Observation . ?s ?p ?o }}";
private final static String DIMENSION_PROPERTY_QUERY = PREFIXES + "CONSTRUCT {?s ?p ?o} WHERE { GRAPH ?g { ?s rdf:type qb:DimensionProperty . ?s ?p ?o }}";
private final static String MEASURE_PROPERTY_QUERY = PREFIXES + "CONSTRUCT {?s ?p ?o} WHERE { GRAPH ?g { ?s rdf:type qb:MeasureProperty . ?s ?p ?o }}";
private final static String ENTITY_QUERY = PREFIXES + "SELECT ?s WHERE { GRAPH ?g { ?s rdf:type code:Entity }}";
private final static String ENTITY_LABEL_QUERY = PREFIXES + "SELECT ?s ?d ?l WHERE { GRAPH ?g { ?s rdf:type code:Entity . ?s rdfs:label ?l . ?s rdfs:isDefinedBy ?d }}";
private final static String ENTITY_CONSTRUCT_QUERY = PREFIXES + "CONSTRUCT {?s ?p ?o} WHERE { GRAPH ?g { ?s rdf:type code:Entity . ?s ?p ?o }}";
private final static String GRAPH_LABEL = PREFIXES + "SELECT ?label WHERE { GRAPH ?g { ?s rdf:type qb:DataSet . ?s rdfs:label ?label }}";
private final static String QUERY_DATASET_DESCRIPTIONS = PREFIXES + "SELECT DISTINCT ?g ?id ?label ?description WHERE { GRAPH ?g { ?id rdf:type qb:DataSet . ?id prov:wasGeneratedBy ?activity . ?activity prov:wasStartedBy ?agent . ?agent rdfs:label ?auth . ?id rdfs:label ?label . ?id rdfs:comment ?description }}";
private final static String QUERY_DATASET_DESCRIPTION = PREFIXES + "SELECT DISTINCT ?id ?label ?description WHERE { GRAPH ?g { ?id rdf:type qb:DataSet . ?id prov:wasGeneratedBy ?activity . ?activity prov:wasStartedBy ?agent . ?agent rdfs:label ?auth . ?id rdfs:label ?label . ?id rdfs:comment ?description }}";
private final static String COMPONENT_LABEL = PREFIXES + "SELECT ?label WHERE { GRAPH ?g {?s rdfs:label ?label}}";
private final static String COMPONENT_RANGE = PREFIXES + "SELECT ?range WHERE { GRAPH ?g {?s rdfs:subPropertyOf ?range}}";
// queries for the cube info
private final static String QUERY_CUBE_INFO = PREFIXES + "SELECT DISTINCT ?id ?label ?description WHERE { GRAPH ?g { ?id rdf:type qb:DataSet . ?id prov:wasGeneratedBy ?activity . ?activity prov:wasStartedBy ?agent . ?agent rdfs:label ?auth . ?id rdfs:label ?label . ?id rdfs:comment ?description }}";
private final static String QUERY_DIMENSION_INFO = PREFIXES + "SELECT ?id ?label ?subproperty WHERE { GRAPH ?g { ?id rdf:type qb:DimensionProperty . ?id rdfs:label ?label . ?id rdfs:subPropertyOf ?subproperty}}";
private final static String QUERY_MEASURE_INFO = PREFIXES + "SELECT ?id ?label ?subproperty WHERE { GRAPH ?g { ?id rdf:type qb:MeasureProperty . ?id rdfs:label ?label . ?id rdfs:subPropertyOf ?subproperty}}";
private final Logger logger = LoggerFactory.getLogger(MergeDao.class);
public Cube getCubeInformation(String graph, String userId) {
Cube cube = addBasicInfo(graph, userId);
DatasetStructureDefinition datasetStructureDefinition = new DatasetStructureDefinition();
datasetStructureDefinition.setDimensions(getDimensionsInfo(graph));
datasetStructureDefinition.setMeasures(getMeasuresInfo(graph));
cube.setDatasetStructureDefinition(datasetStructureDefinition);
return cube;
}
private Cube addBasicInfo(String graph, String userId) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(QUERY_CUBE_INFO);
prepareQuery.setIri("g", graph);
prepareQuery.setLiteral("auth", userId, XSDDatatype.XSDstring);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
Cube cube = new Cube();
if (rs.hasNext()) {
QuerySolution result = rs.next();
cube.setGraph(graph);
cube.setAuth(userId);
cube.setLabel(result.get("label").toString());
cube.setId(result.get("id").toString());
cube.setDescription(result.get("description").toString());
}
qeHTTP.close();
return cube;
}
private List<Dimension> getDimensionsInfo(String graph) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(QUERY_DIMENSION_INFO);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<Dimension> dims = new LinkedList<Dimension>();
while (rs.hasNext()) {
QuerySolution result = rs.next();
Dimension dim = new Dimension();
dim.setLabel(result.get("label").toString());
dim.setSubpropertyOf(result.get("subproperty").toString());
dim.setUrl(result.get("id").toString());
dims.add(dim);
}
qeHTTP.close();
return dims;
}
private List<Measure> getMeasuresInfo(String graph) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(QUERY_MEASURE_INFO);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<Measure> measures = new LinkedList<Measure>();
while (rs.hasNext()) {
QuerySolution result = rs.next();
Measure measure = new Measure();
measure.setLabel(result.get("label").toString());
measure.setSubpropertyOf(result.get("subproperty").toString());
measure.setUrl(result.get("id").toString());
measures.add(measure);
}
qeHTTP.close();
return measures;
}
public String getLabelForResource(String graph, String resource) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(COMPONENT_LABEL);
prepareQuery.setIri("g", graph);
prepareQuery.setIri("s", resource);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
String label = "";
if (rs.hasNext()) {
QuerySolution result = rs.next();
label = result.get("label").toString();
}
qeHTTP.close();
return label;
}
public String getRangeForResource(String graph, String resource) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(COMPONENT_RANGE);
prepareQuery.setIri("g", graph);
prepareQuery.setIri("s", resource);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
String label = "";
if (rs.hasNext()) {
QuerySolution result = rs.next();
label = result.get("range").toString();
}
qeHTTP.close();
return label;
}
public DatasetDescription getDatasetFromUser(String graph, String userId) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(QUERY_DATASET_DESCRIPTION);
prepareQuery.setIri("g", graph);
prepareQuery.setLiteral("auth", userId, XSDDatatype.XSDstring);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
DatasetDescription dd = new DatasetDescription();
if (rs.hasNext()) {
QuerySolution result = rs.next();
dd.setNamedGraph(graph);
dd.setDatasetId(result.get("id").toString());
dd.setLabel(result.get("label").toString());
dd.setDescription(result.get("description").toString());
}
qeHTTP.close();
return dd;
}
public List<DatasetDescription> getDatasetsFromUser(String userId) {
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(QUERY_DATASET_DESCRIPTIONS);
prepareQuery.setLiteral("auth", userId, XSDDatatype.XSDstring);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<DatasetDescription> descriptions = new LinkedList<DatasetDescription>();
while (rs.hasNext()) {
QuerySolution result = rs.next();
DatasetDescription dd = new DatasetDescription();
dd.setNamedGraph(result.get("g").toString());
dd.setDatasetId(result.get("id").toString());
dd.setLabel(result.get("label").toString());
dd.setDescription(result.get("description").toString());
descriptions.add(dd);
}
qeHTTP.close();
return descriptions;
}
public String getLabel(String graph) {
logger.debug("getLabel: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(GRAPH_LABEL);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<String> labels = new LinkedList<String>();
while (rs.hasNext()) {
QuerySolution result = rs.next();
labels.add(result.get("label").toString());
}
qeHTTP.close();
if (labels == null || labels.size() != 1) {
logger.warn("label size != 1");
}
return labels.get(0);
}
public Model getAllMeasures(String graph) {
return getMeasure(graph, null);
}
public Model getAllEntityDefinitions(String graph) {
return getEntityDefinition(graph, null);
}
public Model getMeasure(String graph, String subject) {
logger.debug("getMeaure: " + graph + " " + subject);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(MEASURE_PROPERTY_QUERY);
prepareQuery.setIri("g", graph);
if (subject != null) {
prepareQuery.setIri("s", subject);
}
Model model = null;
try {
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
model = qeHTTP.execConstruct();
} catch (QueryParseException exception) {
logger.error("QueryParseException: ", exception);
}
return model;
}
public Model getDimension(String graph, String subject) {
logger.debug("getDimensions: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(DIMENSION_PROPERTY_QUERY);
prepareQuery.setIri("g", graph);
prepareQuery.setIri("s", subject);
Model model = null;
try {
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
model = qeHTTP.execConstruct();
} catch (QueryParseException exception) {
logger.error("QueryParseException: ", exception);
}
return model;
}
public Model getDimensions(String graph) {
logger.debug("getDimensions: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(DIMENSION_PROPERTY_QUERY);
prepareQuery.setIri("g", graph);
Model model = null;
try {
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
model = qeHTTP.execConstruct();
} catch (QueryParseException exception) {
logger.error("QueryParseException: ", exception);
}
return model;
}
public List<EntityDefinition> getEntityDefs(String graph) {
logger.debug("getEntityDefs: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(ENTITY_LABEL_QUERY);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<EntityDefinition> entities = new LinkedList<EntityDefinition>();
while (rs.hasNext()) {
QuerySolution result = rs.next();
EntityDefinition ed = new EntityDefinition();
ed.setResource(result.get("s").toString());
ed.setLabel(result.getLiteral("l").getString());
ed.setDefinedBy(result.get("d").toString());
entities.add(ed);
}
qeHTTP.close();
return entities;
}
public Model getEntityDefinition(String graph, String entity) {
logger.debug("getEntityDefinition: " + graph + " " + entity);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(ENTITY_CONSTRUCT_QUERY);
prepareQuery.setIri("g", graph);
if (entity != null) {
prepareQuery.setIri("s", entity);
}
Model model = null;
try {
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
model = qeHTTP.execConstruct();
} catch (QueryParseException exception) {
logger.error("QueryParseException: ", exception);
}
return model;
}
public List<String> getEntityDefinitions(String graph) {
logger.debug("getEntityDefinitions: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(ENTITY_QUERY);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<String> entities = new LinkedList<String>();
while(rs.hasNext()) {
QuerySolution result = rs.next();
entities.add(result.get("s").toString());
}
qeHTTP.close();
return entities;
}
public List<Observation> getObservations(String graph) {
logger.debug("getObservations: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(OBSERVATION_QUERY);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
Map<String, String> components = getComponents(graph);
Map<String, Observation> observations = new HashMap<String, Observation>();
while(rs.hasNext()) {
QuerySolution result = rs.next();
String obs = result.get("s").toString();
String predicate = result.get("p").toString();
String object = result.get("o").toString();
// TODO this is a hack to strip the datatype
object = object.replace("^^http://www.w3.org/2001/XMLSchema#long", "");
object = object.replace("^^http://www.w3.org/2001/XMLSchema#double", "");
if (!observations.containsKey(obs)) {
observations.put(obs, new Observation());
}
Observation o = observations.get(obs);
if (!object.equals(QB.OBSERVATION.getURI())) {
if (predicate.equals(QB.DATASET_PROPERTY.getURI())) {
o.setDataset(object);
o.setResource(obs);
} else {
if (components.get(predicate).equals(QB.MEASURE.getURI())) {
o.getMeasures().put(predicate, object);
} else if (components.get(predicate).equals(QB.DIMENSION.getURI())) {
o.getDimensions().put(predicate, object);
} else {
throw new IllegalStateException("unknown component: " + components.get(predicate));
}
}
}
}
qeHTTP.close();
List<Observation> result = new LinkedList<Observation>();
result.addAll(observations.values());
return result;
}
public Model getCompleteGraph(String graph) {
logger.debug("getCompleteGraph: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(GENERIC_CONSTRUCT);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
return qeHTTP.execConstruct();
}
/**
*
* @param graph The current named graph
* @return Key: concept; Value: component
*/
public Map<String, String> getComponents(String graph) {
logger.debug("getComponents: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(COMPONENTS_QUERY);
prepareQuery.setIri("g", graph);
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
Map<String, String> components = new HashMap<String, String>();
while(rs.hasNext()) {
QuerySolution result = rs.next();
components.put(result.get("concept").toString(), result.get("component").toString());
}
qeHTTP.close();
return components;
}
/**
*
* @param graph The current named graph
* @return Null if there is not exactly one dataset.
*/
public String getDataset(String graph) {
logger.debug("getDataset: " + graph);
ParameterizedSparqlString prepareQuery = new ParameterizedSparqlString(DATASET_QUERY);
prepareQuery.setIri("g", graph);
logger.debug(prepareQuery.toString());
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, prepareQuery.toString());
ResultSet rs = qeHTTP.execSelect();
List<String> datasets = new LinkedList<String>();
while(rs.hasNext()) {
QuerySolution result = rs.next();
datasets.add(result.get("s").toString());
}
String dataset = null;
if (datasets.size() > 1) {
logger.warn("There is more than one dataset in this graph. Dismiss potentially invalid graph. " + graph);
} else {
dataset = datasets.get(0);
}
qeHTTP.close();
return dataset;
}
public List<String> getGraphs() {
logger.debug("getGraphs");
QueryEngineHTTP qeHTTP = (QueryEngineHTTP) QueryExecutionFactory.sparqlService(TRIPLE_STORE, GRAPH_QUERY);
ResultSet rs = qeHTTP.execSelect();
List<String> graphs = new LinkedList<String>();
while(rs.hasNext()) {
QuerySolution result = rs.next();
graphs.add(result.get("g").toString());
}
qeHTTP.close();
return graphs;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.instructions.spark;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import scala.Tuple2;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysml.runtime.controlprogram.context.SparkExecutionContext;
import org.apache.sysml.runtime.functionobjects.CM;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.cp.CM_COV_Object;
import org.apache.sysml.runtime.instructions.cp.CPOperand;
import org.apache.sysml.runtime.instructions.cp.DoubleObject;
import org.apache.sysml.runtime.instructions.cp.ScalarObject;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.operators.CMOperator;
import org.apache.sysml.runtime.matrix.operators.CMOperator.AggregateOperationTypes;
/**
*
*/
public class CentralMomentSPInstruction extends UnarySPInstruction
{
public CentralMomentSPInstruction(CMOperator op, CPOperand in1, CPOperand in2,
CPOperand in3, CPOperand out, String opcode, String str)
{
super(op, in1, in2, in3, out, opcode, str);
}
/**
*
* @param str
* @return
* @throws DMLRuntimeException
*/
public static CentralMomentSPInstruction parseInstruction(String str)
throws DMLRuntimeException
{
CPOperand in1 = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
CPOperand in2 = null;
CPOperand in3 = null;
CPOperand out = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
String[] parts = InstructionUtils.getInstructionPartsWithValueType(str);
String opcode = parts[0];
//check supported opcode
if( !opcode.equalsIgnoreCase("cm") ) {
throw new DMLRuntimeException("Unsupported opcode "+opcode);
}
if ( parts.length == 4 ) {
// Example: CP.cm.mVar0.Var1.mVar2; (without weights)
in2 = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
parseUnaryInstruction(str, in1, in2, out);
}
else if ( parts.length == 5) {
// CP.cm.mVar0.mVar1.Var2.mVar3; (with weights)
in2 = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
in3 = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN);
parseUnaryInstruction(str, in1, in2, in3, out);
}
// Exact order of the central moment MAY NOT be known at compilation time.
// We first try to parse the second argument as an integer, and if we fail,
// we simply pass -1 so that getCMAggOpType() picks up AggregateOperationTypes.INVALID.
// It must be updated at run time in processInstruction() method.
int cmOrder;
try {
if ( in3 == null ) {
cmOrder = Integer.parseInt(in2.getName());
}
else {
cmOrder = Integer.parseInt(in3.getName());
}
} catch(NumberFormatException e) {
cmOrder = -1; // unknown at compilation time
}
AggregateOperationTypes opType = CMOperator.getCMAggOpType(cmOrder);
CMOperator cm = new CMOperator(CM.getCMFnObject(opType), opType);
return new CentralMomentSPInstruction(cm, in1, in2, in3, out, opcode, str);
}
@Override
public void processInstruction( ExecutionContext ec )
throws DMLRuntimeException
{
SparkExecutionContext sec = (SparkExecutionContext)ec;
//parse 'order' input argument
CPOperand scalarInput = (input3==null ? input2 : input3);
ScalarObject order = ec.getScalarInput(scalarInput.getName(), scalarInput.getValueType(), scalarInput.isLiteral());
CMOperator cop = ((CMOperator)_optr);
if ( cop.getAggOpType() == AggregateOperationTypes.INVALID ) {
cop.setCMAggOp((int)order.getLongValue());
}
//get input
JavaPairRDD<MatrixIndexes,MatrixBlock> in1 = sec.getBinaryBlockRDDHandleForVariable( input1.getName() );
//process central moment instruction
CM_COV_Object cmobj = null;
if( input3 == null ) //w/o weights
{
cmobj = in1.values().map(new RDDCMFunction(cop))
.reduce(new RDDCMReduceFunction(cop));
}
else //with weights
{
JavaPairRDD<MatrixIndexes,MatrixBlock> in2 = sec.getBinaryBlockRDDHandleForVariable( input2.getName() );
cmobj = in1.join( in2 )
.values().map(new RDDCMWeightsFunction(cop))
.reduce(new RDDCMReduceFunction(cop));
}
//create scalar output (no lineage information required)
double val = cmobj.getRequiredResult(_optr);
DoubleObject ret = new DoubleObject(output.getName(), val);
ec.setScalarOutput(output.getName(), ret);
}
/**
*
*/
private static class RDDCMFunction implements Function<MatrixBlock, CM_COV_Object>
{
private static final long serialVersionUID = 2293839116041610644L;
private CMOperator _op = null;
public RDDCMFunction( CMOperator op ) {
_op = op;
}
@Override
public CM_COV_Object call(MatrixBlock arg0)
throws Exception
{
//execute cm operations
return arg0.cmOperations(_op);
}
}
/**
*
*/
private static class RDDCMWeightsFunction implements Function<Tuple2<MatrixBlock,MatrixBlock>, CM_COV_Object>
{
private static final long serialVersionUID = -8949715516574052497L;
private CMOperator _op = null;
public RDDCMWeightsFunction( CMOperator op ) {
_op = op;
}
@Override
public CM_COV_Object call(Tuple2<MatrixBlock,MatrixBlock> arg0)
throws Exception
{
MatrixBlock input = arg0._1();
MatrixBlock weights = arg0._2();
//execute cm operations
return input.cmOperations(_op, weights);
}
}
private static class RDDCMReduceFunction implements Function2<CM_COV_Object, CM_COV_Object, CM_COV_Object>
{
private static final long serialVersionUID = 3272260751983866544L;
private CMOperator _op = null;
public RDDCMReduceFunction( CMOperator op ) {
_op = op;
}
@Override
public CM_COV_Object call(CM_COV_Object arg0, CM_COV_Object arg1)
throws Exception
{
CM_COV_Object out = new CM_COV_Object();
//execute cm combine operations
_op.fn.execute(out, arg0);
_op.fn.execute(out, arg1);
return out;
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.channel.Channel.Unsafe;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.concurrent.EventExecutor;
import io.netty.util.concurrent.EventExecutorGroup;
import io.netty.util.concurrent.PausableEventExecutor;
import io.netty.util.internal.OneTimeTask;
import io.netty.util.internal.PlatformDependent;
import io.netty.util.internal.StringUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.net.SocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.WeakHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
* The default {@link ChannelPipeline} implementation. It is usually created
* by a {@link Channel} implementation when the {@link Channel} is created.
*/
final class DefaultChannelPipeline implements ChannelPipeline {
static final InternalLogger logger = InternalLoggerFactory.getInstance(DefaultChannelPipeline.class);
@SuppressWarnings("unchecked")
private static final WeakHashMap<Class<?>, String>[] nameCaches =
new WeakHashMap[Runtime.getRuntime().availableProcessors()];
static {
for (int i = 0; i < nameCaches.length; i ++) {
nameCaches[i] = new WeakHashMap<Class<?>, String>();
}
}
final AbstractChannel channel;
final AbstractChannelHandlerContext head;
final AbstractChannelHandlerContext tail;
private final Map<String, AbstractChannelHandlerContext> name2ctx =
new HashMap<String, AbstractChannelHandlerContext>(4);
/**
* @see #findInvoker(EventExecutorGroup)
*/
private Map<EventExecutorGroup, ChannelHandlerInvoker> childInvokers;
DefaultChannelPipeline(AbstractChannel channel) {
if (channel == null) {
throw new NullPointerException("channel");
}
this.channel = channel;
tail = new TailContext(this);
head = new HeadContext(this);
head.next = tail;
tail.prev = head;
}
@Override
public Channel channel() {
return channel;
}
@Override
public ChannelPipeline addFirst(String name, ChannelHandler handler) {
return addFirst((ChannelHandlerInvoker) null, name, handler);
}
@Override
public ChannelPipeline addFirst(EventExecutorGroup group, String name, ChannelHandler handler) {
synchronized (this) {
checkDuplicateName(name);
addFirst0(name, new DefaultChannelHandlerContext(this, findInvoker(group), name, handler));
}
return this;
}
@Override
public ChannelPipeline addFirst(ChannelHandlerInvoker invoker, final String name, ChannelHandler handler) {
synchronized (this) {
checkDuplicateName(name);
addFirst0(name, new DefaultChannelHandlerContext(this, invoker, name, handler));
}
return this;
}
private void addFirst0(String name, AbstractChannelHandlerContext newCtx) {
checkMultiplicity(newCtx);
AbstractChannelHandlerContext nextCtx = head.next;
newCtx.prev = head;
newCtx.next = nextCtx;
head.next = newCtx;
nextCtx.prev = newCtx;
name2ctx.put(name, newCtx);
callHandlerAdded(newCtx);
}
@Override
public ChannelPipeline addLast(String name, ChannelHandler handler) {
return addLast((ChannelHandlerInvoker) null, name, handler);
}
@Override
public ChannelPipeline addLast(EventExecutorGroup group, String name, ChannelHandler handler) {
synchronized (this) {
checkDuplicateName(name);
addLast0(name, new DefaultChannelHandlerContext(this, findInvoker(group), name, handler));
}
return this;
}
@Override
public ChannelPipeline addLast(ChannelHandlerInvoker invoker, final String name, ChannelHandler handler) {
synchronized (this) {
checkDuplicateName(name);
addLast0(name, new DefaultChannelHandlerContext(this, invoker, name, handler));
}
return this;
}
private void addLast0(final String name, AbstractChannelHandlerContext newCtx) {
checkMultiplicity(newCtx);
AbstractChannelHandlerContext prev = tail.prev;
newCtx.prev = prev;
newCtx.next = tail;
prev.next = newCtx;
tail.prev = newCtx;
name2ctx.put(name, newCtx);
callHandlerAdded(newCtx);
}
@Override
public ChannelPipeline addBefore(String baseName, String name, ChannelHandler handler) {
return addBefore((ChannelHandlerInvoker) null, baseName, name, handler);
}
@Override
public ChannelPipeline addBefore(EventExecutorGroup group, String baseName, String name, ChannelHandler handler) {
synchronized (this) {
AbstractChannelHandlerContext ctx = getContextOrDie(baseName);
checkDuplicateName(name);
addBefore0(name, ctx, new DefaultChannelHandlerContext(this, findInvoker(group), name, handler));
}
return this;
}
@Override
public ChannelPipeline addBefore(
ChannelHandlerInvoker invoker, String baseName, final String name, ChannelHandler handler) {
synchronized (this) {
AbstractChannelHandlerContext ctx = getContextOrDie(baseName);
checkDuplicateName(name);
addBefore0(name, ctx, new DefaultChannelHandlerContext(this, invoker, name, handler));
}
return this;
}
private void addBefore0(
final String name, AbstractChannelHandlerContext ctx, AbstractChannelHandlerContext newCtx) {
checkMultiplicity(newCtx);
newCtx.prev = ctx.prev;
newCtx.next = ctx;
ctx.prev.next = newCtx;
ctx.prev = newCtx;
name2ctx.put(name, newCtx);
callHandlerAdded(newCtx);
}
@Override
public ChannelPipeline addAfter(String baseName, String name, ChannelHandler handler) {
return addAfter((ChannelHandlerInvoker) null, baseName, name, handler);
}
@Override
public ChannelPipeline addAfter(EventExecutorGroup group, String baseName, String name, ChannelHandler handler) {
synchronized (this) {
AbstractChannelHandlerContext ctx = getContextOrDie(baseName);
checkDuplicateName(name);
addAfter0(name, ctx, new DefaultChannelHandlerContext(this, findInvoker(group), name, handler));
}
return this;
}
@Override
public ChannelPipeline addAfter(
ChannelHandlerInvoker invoker, String baseName, final String name, ChannelHandler handler) {
synchronized (this) {
AbstractChannelHandlerContext ctx = getContextOrDie(baseName);
checkDuplicateName(name);
addAfter0(name, ctx, new DefaultChannelHandlerContext(this, invoker, name, handler));
}
return this;
}
private void addAfter0(final String name, AbstractChannelHandlerContext ctx, AbstractChannelHandlerContext newCtx) {
checkDuplicateName(name);
checkMultiplicity(newCtx);
newCtx.prev = ctx;
newCtx.next = ctx.next;
ctx.next.prev = newCtx;
ctx.next = newCtx;
name2ctx.put(name, newCtx);
callHandlerAdded(newCtx);
}
@Override
public ChannelPipeline addFirst(ChannelHandler... handlers) {
return addFirst((ChannelHandlerInvoker) null, handlers);
}
@Override
public ChannelPipeline addFirst(EventExecutorGroup group, ChannelHandler... handlers) {
if (handlers == null) {
throw new NullPointerException("handlers");
}
if (handlers.length == 0 || handlers[0] == null) {
return this;
}
int size;
for (size = 1; size < handlers.length; size ++) {
if (handlers[size] == null) {
break;
}
}
for (int i = size - 1; i >= 0; i --) {
ChannelHandler h = handlers[i];
addFirst(group, generateName(h), h);
}
return this;
}
@Override
public ChannelPipeline addFirst(ChannelHandlerInvoker invoker, ChannelHandler... handlers) {
if (handlers == null) {
throw new NullPointerException("handlers");
}
if (handlers.length == 0 || handlers[0] == null) {
return this;
}
int size;
for (size = 1; size < handlers.length; size ++) {
if (handlers[size] == null) {
break;
}
}
for (int i = size - 1; i >= 0; i --) {
ChannelHandler h = handlers[i];
addFirst(invoker, generateName(h), h);
}
return this;
}
@Override
public ChannelPipeline addLast(ChannelHandler... handlers) {
return addLast((ChannelHandlerInvoker) null, handlers);
}
@Override
public ChannelPipeline addLast(EventExecutorGroup group, ChannelHandler... handlers) {
if (handlers == null) {
throw new NullPointerException("handlers");
}
for (ChannelHandler h: handlers) {
if (h == null) {
break;
}
addLast(group, generateName(h), h);
}
return this;
}
@Override
public ChannelPipeline addLast(ChannelHandlerInvoker invoker, ChannelHandler... handlers) {
if (handlers == null) {
throw new NullPointerException("handlers");
}
for (ChannelHandler h: handlers) {
if (h == null) {
break;
}
addLast(invoker, generateName(h), h);
}
return this;
}
// No need for synchronization because it is always executed in a synchronized(this) block.
private ChannelHandlerInvoker findInvoker(EventExecutorGroup group) {
if (group == null) {
return null;
}
// Lazily initialize the data structure that maps an EventExecutorGroup to a ChannelHandlerInvoker.
Map<EventExecutorGroup, ChannelHandlerInvoker> childInvokers = this.childInvokers;
if (childInvokers == null) {
childInvokers = this.childInvokers = new IdentityHashMap<EventExecutorGroup, ChannelHandlerInvoker>(4);
}
// Pick one of the child executors and remember its invoker
// so that the same invoker is used to fire events for the same channel.
ChannelHandlerInvoker invoker = childInvokers.get(group);
if (invoker == null) {
EventExecutor executor = group.next();
if (executor instanceof EventLoop) {
invoker = ((EventLoop) executor).asInvoker();
} else {
invoker = new DefaultChannelHandlerInvoker(executor);
}
childInvokers.put(group, invoker);
}
return invoker;
}
String generateName(ChannelHandler handler) {
WeakHashMap<Class<?>, String> cache = nameCaches[(int) (Thread.currentThread().getId() % nameCaches.length)];
Class<?> handlerType = handler.getClass();
String name;
synchronized (cache) {
name = cache.get(handlerType);
if (name == null) {
name = generateName0(handlerType);
cache.put(handlerType, name);
}
}
synchronized (this) {
// It's not very likely for a user to put more than one handler of the same type, but make sure to avoid
// any name conflicts. Note that we don't cache the names generated here.
if (name2ctx.containsKey(name)) {
String baseName = name.substring(0, name.length() - 1); // Strip the trailing '0'.
for (int i = 1;; i ++) {
String newName = baseName + i;
if (!name2ctx.containsKey(newName)) {
name = newName;
break;
}
}
}
}
return name;
}
private static String generateName0(Class<?> handlerType) {
return StringUtil.simpleClassName(handlerType) + "#0";
}
@Override
public ChannelPipeline remove(ChannelHandler handler) {
remove(getContextOrDie(handler));
return this;
}
@Override
public ChannelHandler remove(String name) {
return remove(getContextOrDie(name)).handler();
}
@SuppressWarnings("unchecked")
@Override
public <T extends ChannelHandler> T remove(Class<T> handlerType) {
return (T) remove(getContextOrDie(handlerType)).handler();
}
private AbstractChannelHandlerContext remove(final AbstractChannelHandlerContext ctx) {
assert ctx != head && ctx != tail;
AbstractChannelHandlerContext context;
Future<?> future;
synchronized (this) {
if (!ctx.channel().isRegistered() || ctx.executor().inEventLoop()) {
remove0(ctx);
return ctx;
} else {
future = ctx.executor().submit(new Runnable() {
@Override
public void run() {
synchronized (DefaultChannelPipeline.this) {
remove0(ctx);
}
}
});
context = ctx;
}
}
// Run the following 'waiting' code outside of the above synchronized block
// in order to avoid deadlock
waitForFuture(future);
return context;
}
void remove0(AbstractChannelHandlerContext ctx) {
AbstractChannelHandlerContext prev = ctx.prev;
AbstractChannelHandlerContext next = ctx.next;
prev.next = next;
next.prev = prev;
name2ctx.remove(ctx.name());
callHandlerRemoved(ctx);
}
@Override
public ChannelHandler removeFirst() {
if (head.next == tail) {
throw new NoSuchElementException();
}
return remove(head.next).handler();
}
@Override
public ChannelHandler removeLast() {
if (head.next == tail) {
throw new NoSuchElementException();
}
return remove(tail.prev).handler();
}
@Override
public ChannelPipeline replace(ChannelHandler oldHandler, String newName, ChannelHandler newHandler) {
replace(getContextOrDie(oldHandler), newName, newHandler);
return this;
}
@Override
public ChannelHandler replace(String oldName, String newName, ChannelHandler newHandler) {
return replace(getContextOrDie(oldName), newName, newHandler);
}
@Override
@SuppressWarnings("unchecked")
public <T extends ChannelHandler> T replace(
Class<T> oldHandlerType, String newName, ChannelHandler newHandler) {
return (T) replace(getContextOrDie(oldHandlerType), newName, newHandler);
}
private ChannelHandler replace(
final AbstractChannelHandlerContext ctx, final String newName,
ChannelHandler newHandler) {
assert ctx != head && ctx != tail;
Future<?> future;
synchronized (this) {
boolean sameName = ctx.name().equals(newName);
if (!sameName) {
checkDuplicateName(newName);
}
final AbstractChannelHandlerContext newCtx =
new DefaultChannelHandlerContext(this, ctx.invoker, newName, newHandler);
if (!newCtx.channel().isRegistered() || newCtx.executor().inEventLoop()) {
replace0(ctx, newName, newCtx);
return ctx.handler();
} else {
future = newCtx.executor().submit(new Runnable() {
@Override
public void run() {
synchronized (DefaultChannelPipeline.this) {
replace0(ctx, newName, newCtx);
}
}
});
}
}
// Run the following 'waiting' code outside of the above synchronized block
// in order to avoid deadlock
waitForFuture(future);
return ctx.handler();
}
private void replace0(AbstractChannelHandlerContext oldCtx, String newName,
AbstractChannelHandlerContext newCtx) {
checkMultiplicity(newCtx);
AbstractChannelHandlerContext prev = oldCtx.prev;
AbstractChannelHandlerContext next = oldCtx.next;
newCtx.prev = prev;
newCtx.next = next;
// Finish the replacement of oldCtx with newCtx in the linked list.
// Note that this doesn't mean events will be sent to the new handler immediately
// because we are currently at the event handler thread and no more than one handler methods can be invoked
// at the same time (we ensured that in replace().)
prev.next = newCtx;
next.prev = newCtx;
if (!oldCtx.name().equals(newName)) {
name2ctx.remove(oldCtx.name());
}
name2ctx.put(newName, newCtx);
// update the reference to the replacement so forward of buffered content will work correctly
oldCtx.prev = newCtx;
oldCtx.next = newCtx;
// Invoke newHandler.handlerAdded() first (i.e. before oldHandler.handlerRemoved() is invoked)
// because callHandlerRemoved() will trigger inboundBufferUpdated() or flush() on newHandler and those
// event handlers must be called after handlerAdded().
callHandlerAdded(newCtx);
callHandlerRemoved(oldCtx);
}
private static void checkMultiplicity(ChannelHandlerContext ctx) {
ChannelHandler handler = ctx.handler();
if (handler instanceof ChannelHandlerAdapter) {
ChannelHandlerAdapter h = (ChannelHandlerAdapter) handler;
if (!h.isSharable() && h.added) {
throw new ChannelPipelineException(
h.getClass().getName() +
" is not a @Sharable handler, so can't be added or removed multiple times.");
}
h.added = true;
}
}
private void callHandlerAdded(final AbstractChannelHandlerContext ctx) {
if ((ctx.skipFlags & AbstractChannelHandlerContext.MASK_HANDLER_ADDED) != 0) {
return;
}
if (ctx.channel().isRegistered() && !ctx.executor().inEventLoop()) {
ctx.executor().execute(new Runnable() {
@Override
public void run() {
callHandlerAdded0(ctx);
}
});
return;
}
callHandlerAdded0(ctx);
}
private void callHandlerAdded0(final AbstractChannelHandlerContext ctx) {
try {
ctx.handler().handlerAdded(ctx);
} catch (Throwable t) {
boolean removed = false;
try {
remove(ctx);
removed = true;
} catch (Throwable t2) {
if (logger.isWarnEnabled()) {
logger.warn("Failed to remove a handler: " + ctx.name(), t2);
}
}
if (removed) {
fireExceptionCaught(new ChannelPipelineException(
ctx.handler().getClass().getName() +
".handlerAdded() has thrown an exception; removed.", t));
} else {
fireExceptionCaught(new ChannelPipelineException(
ctx.handler().getClass().getName() +
".handlerAdded() has thrown an exception; also failed to remove.", t));
}
}
}
private void callHandlerRemoved(final AbstractChannelHandlerContext ctx) {
if ((ctx.skipFlags & AbstractChannelHandlerContext.MASK_HANDLER_REMOVED) != 0) {
return;
}
if (ctx.channel().isRegistered() && !ctx.executor().inEventLoop()) {
ctx.executor().execute(new Runnable() {
@Override
public void run() {
callHandlerRemoved0(ctx);
}
});
return;
}
callHandlerRemoved0(ctx);
}
private void callHandlerRemoved0(final AbstractChannelHandlerContext ctx) {
// Notify the complete removal.
try {
ctx.handler().handlerRemoved(ctx);
ctx.setRemoved();
} catch (Throwable t) {
fireExceptionCaught(new ChannelPipelineException(
ctx.handler().getClass().getName() + ".handlerRemoved() has thrown an exception.", t));
}
}
/**
* Waits for a future to finish. If the task is interrupted, then the current thread will be interrupted.
* It is expected that the task performs any appropriate locking.
* <p>
* If the internal call throws a {@link Throwable}, but it is not an instance of {@link Error} or
* {@link RuntimeException}, then it is wrapped inside a {@link ChannelPipelineException} and that is
* thrown instead.</p>
*
* @param future wait for this future
* @see Future#get()
* @throws Error if the task threw this.
* @throws RuntimeException if the task threw this.
* @throws ChannelPipelineException with a {@link Throwable} as a cause, if the task threw another type of
* {@link Throwable}.
*/
private static void waitForFuture(Future<?> future) {
try {
future.get();
} catch (ExecutionException ex) {
// In the arbitrary case, we can throw Error, RuntimeException, and Exception
PlatformDependent.throwException(ex.getCause());
} catch (InterruptedException ex) {
// Interrupt the calling thread (note that this method is not called from the event loop)
Thread.currentThread().interrupt();
}
}
@Override
public ChannelHandler first() {
ChannelHandlerContext first = firstContext();
if (first == null) {
return null;
}
return first.handler();
}
@Override
public ChannelHandlerContext firstContext() {
AbstractChannelHandlerContext first = head.next;
if (first == tail) {
return null;
}
return head.next;
}
@Override
public ChannelHandler last() {
AbstractChannelHandlerContext last = tail.prev;
if (last == head) {
return null;
}
return last.handler();
}
@Override
public ChannelHandlerContext lastContext() {
AbstractChannelHandlerContext last = tail.prev;
if (last == head) {
return null;
}
return last;
}
@Override
public ChannelHandler get(String name) {
ChannelHandlerContext ctx = context(name);
if (ctx == null) {
return null;
} else {
return ctx.handler();
}
}
@SuppressWarnings("unchecked")
@Override
public <T extends ChannelHandler> T get(Class<T> handlerType) {
ChannelHandlerContext ctx = context(handlerType);
if (ctx == null) {
return null;
} else {
return (T) ctx.handler();
}
}
@Override
public ChannelHandlerContext context(String name) {
if (name == null) {
throw new NullPointerException("name");
}
synchronized (this) {
return name2ctx.get(name);
}
}
@Override
public ChannelHandlerContext context(ChannelHandler handler) {
if (handler == null) {
throw new NullPointerException("handler");
}
AbstractChannelHandlerContext ctx = head.next;
for (;;) {
if (ctx == null) {
return null;
}
if (ctx.handler() == handler) {
return ctx;
}
ctx = ctx.next;
}
}
@Override
public ChannelHandlerContext context(Class<? extends ChannelHandler> handlerType) {
if (handlerType == null) {
throw new NullPointerException("handlerType");
}
AbstractChannelHandlerContext ctx = head.next;
for (;;) {
if (ctx == null) {
return null;
}
if (handlerType.isAssignableFrom(ctx.handler().getClass())) {
return ctx;
}
ctx = ctx.next;
}
}
@Override
public List<String> names() {
List<String> list = new ArrayList<String>();
AbstractChannelHandlerContext ctx = head.next;
for (;;) {
if (ctx == null) {
return list;
}
list.add(ctx.name());
ctx = ctx.next;
}
}
@Override
public Map<String, ChannelHandler> toMap() {
Map<String, ChannelHandler> map = new LinkedHashMap<String, ChannelHandler>();
AbstractChannelHandlerContext ctx = head.next;
for (;;) {
if (ctx == tail) {
return map;
}
map.put(ctx.name(), ctx.handler());
ctx = ctx.next;
}
}
@Override
public Iterator<Map.Entry<String, ChannelHandler>> iterator() {
return toMap().entrySet().iterator();
}
/**
* Returns the {@link String} representation of this pipeline.
*/
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append(StringUtil.simpleClassName(this));
buf.append('{');
AbstractChannelHandlerContext ctx = head.next;
for (;;) {
if (ctx == tail) {
break;
}
buf.append('(');
buf.append(ctx.name());
buf.append(" = ");
buf.append(ctx.handler().getClass().getName());
buf.append(')');
ctx = ctx.next;
if (ctx == tail) {
break;
}
buf.append(", ");
}
buf.append('}');
return buf.toString();
}
@Override
public ChannelPipeline fireChannelRegistered() {
head.fireChannelRegistered();
return this;
}
@Override
public ChannelPipeline fireChannelUnregistered() {
head.fireChannelUnregistered();
// Remove all handlers sequentially if channel is closed and unregistered.
if (!channel.isOpen()) {
teardownAll();
}
return this;
}
/**
* Removes all handlers from the pipeline one by one from tail (exclusive) to head (inclusive) to trigger
* handlerRemoved(). Note that the tail handler is excluded because it's neither an outbound handler nor it
* does anything in handlerRemoved().
*/
private void teardownAll() {
tail.prev.teardown();
}
@Override
public ChannelPipeline fireChannelActive() {
head.fireChannelActive();
if (channel.config().isAutoRead()) {
channel.read();
}
return this;
}
@Override
public ChannelPipeline fireChannelInactive() {
head.fireChannelInactive();
return this;
}
@Override
public ChannelPipeline fireExceptionCaught(Throwable cause) {
head.fireExceptionCaught(cause);
return this;
}
@Override
public ChannelPipeline fireUserEventTriggered(Object event) {
head.fireUserEventTriggered(event);
return this;
}
@Override
public ChannelPipeline fireChannelRead(Object msg) {
head.fireChannelRead(msg);
return this;
}
@Override
public ChannelPipeline fireChannelReadComplete() {
head.fireChannelReadComplete();
if (channel.config().isAutoRead()) {
read();
}
return this;
}
@Override
public ChannelPipeline fireChannelWritabilityChanged() {
head.fireChannelWritabilityChanged();
return this;
}
@Override
public ChannelFuture bind(SocketAddress localAddress) {
return tail.bind(localAddress);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress) {
return tail.connect(remoteAddress);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) {
return tail.connect(remoteAddress, localAddress);
}
@Override
public ChannelFuture disconnect() {
return tail.disconnect();
}
@Override
public ChannelFuture close() {
return tail.close();
}
@Override
public ChannelFuture deregister() {
return tail.deregister();
}
@Override
public ChannelPipeline flush() {
tail.flush();
return this;
}
@Override
public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) {
return tail.bind(localAddress, promise);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) {
return tail.connect(remoteAddress, promise);
}
@Override
public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) {
return tail.connect(remoteAddress, localAddress, promise);
}
@Override
public ChannelFuture disconnect(ChannelPromise promise) {
return tail.disconnect(promise);
}
@Override
public ChannelFuture close(ChannelPromise promise) {
return tail.close(promise);
}
@Override
public ChannelFuture deregister(ChannelPromise promise) {
return tail.deregister(promise);
}
@Override
public ChannelPipeline read() {
tail.read();
return this;
}
@Override
public ChannelFuture write(Object msg) {
return tail.write(msg);
}
@Override
public ChannelFuture write(Object msg, ChannelPromise promise) {
return tail.write(msg, promise);
}
@Override
public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) {
return tail.writeAndFlush(msg, promise);
}
@Override
public ChannelFuture writeAndFlush(Object msg) {
return tail.writeAndFlush(msg);
}
private void checkDuplicateName(String name) {
if (name2ctx.containsKey(name)) {
throw new IllegalArgumentException("Duplicate handler name: " + name);
}
}
private AbstractChannelHandlerContext getContextOrDie(String name) {
AbstractChannelHandlerContext ctx = (AbstractChannelHandlerContext) context(name);
if (ctx == null) {
throw new NoSuchElementException(name);
} else {
return ctx;
}
}
private AbstractChannelHandlerContext getContextOrDie(ChannelHandler handler) {
AbstractChannelHandlerContext ctx = (AbstractChannelHandlerContext) context(handler);
if (ctx == null) {
throw new NoSuchElementException(handler.getClass().getName());
} else {
return ctx;
}
}
private AbstractChannelHandlerContext getContextOrDie(Class<? extends ChannelHandler> handlerType) {
AbstractChannelHandlerContext ctx = (AbstractChannelHandlerContext) context(handlerType);
if (ctx == null) {
throw new NoSuchElementException(handlerType.getName());
} else {
return ctx;
}
}
static final class TailContext extends AbstractChannelHandlerContext implements ChannelHandler {
private static final int SKIP_FLAGS = skipFlags0(TailContext.class);
private static final String TAIL_NAME = generateName0(TailContext.class);
TailContext(DefaultChannelPipeline pipeline) {
super(pipeline, null, TAIL_NAME, SKIP_FLAGS);
}
@Override
public ChannelHandler handler() {
return this;
}
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception { }
@Override
public void channelUnregistered(ChannelHandlerContext ctx) throws Exception { }
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception { }
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception { }
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception { }
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { }
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.warn(
"An exceptionCaught() event was fired, and it reached at the tail of the pipeline. " +
"It usually means the last handler in the pipeline did not handle the exception.", cause);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
try {
logger.debug(
"Discarded inbound message {} that reached at the tail of the pipeline. " +
"Please check your pipeline configuration.", msg);
} finally {
ReferenceCountUtil.release(msg);
}
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { }
@Skip
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception { }
@Skip
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { }
@Skip
@Override
public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise)
throws Exception {
ctx.bind(localAddress, promise);
}
@Skip
@Override
public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress,
SocketAddress localAddress, ChannelPromise promise) throws Exception {
ctx.connect(remoteAddress, localAddress, promise);
}
@Skip
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
ctx.disconnect(promise);
}
@Skip
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
ctx.close(promise);
}
@Skip
@Override
public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
ctx.deregister(promise);
}
@Skip
@Override
public void read(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
@Skip
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
ctx.write(msg, promise);
}
@Skip
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
}
static final class HeadContext extends AbstractChannelHandlerContext implements ChannelHandler {
private static final int SKIP_FLAGS = skipFlags0(HeadContext.class);
private static final String HEAD_NAME = generateName0(HeadContext.class);
private final Unsafe unsafe;
HeadContext(DefaultChannelPipeline pipeline) {
super(pipeline, null, HEAD_NAME, SKIP_FLAGS);
unsafe = pipeline.channel().unsafe();
}
@Override
public ChannelHandler handler() {
return this;
}
@Override
public void bind(
ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise)
throws Exception {
unsafe.bind(localAddress, promise);
}
@Override
public void connect(
ChannelHandlerContext ctx,
SocketAddress remoteAddress, SocketAddress localAddress,
ChannelPromise promise) throws Exception {
unsafe.connect(remoteAddress, localAddress, promise);
}
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
unsafe.disconnect(promise);
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
unsafe.close(promise);
}
@Override
public void deregister(ChannelHandlerContext ctx, final ChannelPromise promise) throws Exception {
assert !((PausableEventExecutor) ctx.channel().eventLoop()).isAcceptingNewTasks();
// submit deregistration task
ctx.channel().eventLoop().unwrap().execute(new OneTimeTask() {
@Override
public void run() {
unsafe.deregister(promise);
}
});
}
@Override
public void read(ChannelHandlerContext ctx) {
unsafe.beginRead();
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception {
unsafe.write(msg, promise);
}
@Override
public void flush(ChannelHandlerContext ctx) throws Exception {
unsafe.flush();
}
@Skip
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception { }
@Skip
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { }
@Skip
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
ctx.fireExceptionCaught(cause);
}
@Skip
@Override
public void channelRegistered(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelRegistered();
}
@Skip
@Override
public void channelUnregistered(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelUnregistered();
}
@Skip
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelActive();
}
@Skip
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelInactive();
}
@Skip
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.fireChannelRead(msg);
}
@Skip
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelReadComplete();
}
@Skip
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
ctx.fireUserEventTriggered(evt);
}
@Skip
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception {
ctx.fireChannelWritabilityChanged();
}
}
}
| |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.plugins.ide.eclipse.model;
import com.google.common.base.Preconditions;
import groovy.lang.Closure;
import org.gradle.api.Action;
import org.gradle.api.Project;
import org.gradle.api.internal.project.ProjectInternal;
import org.gradle.api.internal.tasks.DefaultTaskDependency;
import org.gradle.api.model.ObjectFactory;
import org.gradle.api.tasks.TaskDependency;
import org.gradle.internal.xml.XmlTransformer;
import org.gradle.plugins.ide.api.XmlFileContentMerger;
import javax.inject.Inject;
import java.io.File;
import java.util.Map;
import static org.gradle.util.internal.ConfigureUtil.configure;
/**
* DSL-friendly model of the Eclipse project information.
* First point of entry for customizing Eclipse project generation.
*
* <pre class='autoTested'>
* plugins {
* id 'java'
* id 'eclipse'
* id 'eclipse-wtp' // for web projects only
* }
*
* eclipse {
* pathVariables 'GRADLE_HOME': file('/best/software/gradle'), 'TOMCAT_HOME': file('../tomcat')
*
* project {
* //see docs for {@link EclipseProject}
* }
*
* classpath {
* //see docs for {@link EclipseClasspath}
* }
*
* wtp {
* //see docs for {@link EclipseWtp}
* }
* }
* </pre>
*
* More examples in docs for {@link EclipseProject}, {@link EclipseClasspath}, {@link EclipseWtp}
*/
public class EclipseModel {
private EclipseProject project;
private EclipseClasspath classpath;
private EclipseJdt jdt;
private EclipseWtp wtp;
private final DefaultTaskDependency synchronizationTasks;
private final DefaultTaskDependency autoBuildTasks;
public EclipseModel() {
synchronizationTasks = new DefaultTaskDependency();
autoBuildTasks = new DefaultTaskDependency();
}
/**
* Constructor.
*
* @since 5.4
*/
public EclipseModel(Project project) {
this.synchronizationTasks = new DefaultTaskDependency(((ProjectInternal) project).getTasks());
this.autoBuildTasks = new DefaultTaskDependency(((ProjectInternal) project).getTasks());
}
/**
* Injects and returns an instance of {@link ObjectFactory}.
*
* @since 4.9
*/
@Inject
protected ObjectFactory getObjectFactory() {
throw new UnsupportedOperationException();
}
/**
* Configures eclipse project information
* <p>
* For examples see docs for {@link EclipseProject}
*/
public EclipseProject getProject() {
if (project == null) {
XmlTransformer xmlTransformer = new XmlTransformer();
xmlTransformer.setIndentation("\t");
project = getObjectFactory().newInstance(EclipseProject.class, new XmlFileContentMerger(xmlTransformer));
}
return project;
}
public void setProject(EclipseProject project) {
this.project = project;
}
/**
* Configures eclipse classpath information
* <p>
* For examples see docs for {@link EclipseClasspath}
*/
public EclipseClasspath getClasspath() {
return classpath;
}
public void setClasspath(EclipseClasspath classpath) {
this.classpath = classpath;
}
/**
* Configures eclipse java compatibility information (jdt)
* <p>
* For examples see docs for {@link EclipseProject}
*/
public EclipseJdt getJdt() {
return jdt;
}
public void setJdt(EclipseJdt jdt) {
this.jdt = jdt;
}
/**
* Configures eclipse wtp information
* <p>
* For examples see docs for {@link EclipseWtp}
*/
public EclipseWtp getWtp() {
if (wtp == null) {
wtp = getObjectFactory().newInstance(EclipseWtp.class);
}
return wtp;
}
public void setWtp(EclipseWtp wtp) {
this.wtp = wtp;
}
/**
* Configures eclipse project information
* <p>
* For examples see docs for {@link EclipseProject}
*/
public void project(Closure closure) {
configure(closure, getProject());
}
/**
* Configures eclipse project information
* <p>
* For examples see docs for {@link EclipseProject}
*
* @since 3.5
*/
public void project(Action<? super EclipseProject> action) {
action.execute(getProject());
}
/**
* Configures eclipse classpath information
* <p>
* For examples see docs for {@link EclipseClasspath}
*/
public void classpath(Closure closure) {
configure(closure, classpath);
}
/**
* Configures eclipse classpath information
* <p>
* For examples see docs for {@link EclipseClasspath}
*
* @since 3.5
*/
public void classpath(Action<? super EclipseClasspath> action) {
action.execute(classpath);
}
/**
* Configures eclipse wtp information
* <p>
* For examples see docs for {@link EclipseWtp}
*/
public void wtp(Closure closure) {
configure(closure, wtp);
}
/**
* Configures eclipse wtp information
* <p>
* For examples see docs for {@link EclipseWtp}
*
* @since 3.5
*/
public void wtp(Action<? super EclipseWtp> action) {
action.execute(wtp);
}
/**
* Configures eclipse java compatibility information (jdt)
* <p>
* For examples see docs for {@link EclipseProject}
*/
public void jdt(Closure closure) {
configure(closure, getJdt());
}
/**
* Configures eclipse java compatibility information (jdt)
* <p>
* For examples see docs for {@link EclipseProject}
*
* @since 3.5
*/
public void jdt(Action<? super EclipseJdt> action) {
action.execute(getJdt());
}
/**
* Returns the tasks to be executed before the Eclipse synchronization starts.
* <p>
* This property doesn't have a direct effect to the Gradle Eclipse plugin's behaviour. It is used, however, by
* Buildship to execute the configured tasks each time before the user imports the project or before a project
* synchronization starts.
*
* @return the tasks names
* @since 5.4
*/
public TaskDependency getSynchronizationTasks() {
return synchronizationTasks;
}
/**
* Set tasks to be executed before the Eclipse synchronization.
*
* @see #getSynchronizationTasks()
* @since 5.4
*/
public void synchronizationTasks(Object... synchronizationTasks) {
this.synchronizationTasks.add(synchronizationTasks);
}
/**
* Returns the tasks to be executed during the Eclipse auto-build.
* <p>
* This property doesn't have a direct effect to the Gradle Eclipse plugin's behaviour. It is used, however, by
* Buildship to execute the configured tasks each time when the Eclipse automatic build is triggered for the project.
*
* @return the tasks names
* @since 5.4
*/
public TaskDependency getAutoBuildTasks() {
return autoBuildTasks;
}
/**
* Set tasks to be executed during the Eclipse auto-build.
*
* @see #getAutoBuildTasks()
* @since 5.4
*/
public void autoBuildTasks(Object... autoBuildTasks) {
this.autoBuildTasks.add(autoBuildTasks);
}
/**
* Adds path variables to be used for replacing absolute paths in classpath entries.
* <p>
* If the beginning of the absolute path of a library or other path-related element matches a value of a variable,
* a variable entry is used. The matching part of the library path is replaced with the variable name.
* <p>
* For example see docs for {@link EclipseModel}
*
* @param pathVariables A map with String->File pairs.
*/
public void pathVariables(Map<String, File> pathVariables) {
Preconditions.checkNotNull(pathVariables);
classpath.getPathVariables().putAll(pathVariables);
if (wtp != null && wtp.getComponent() != null) {
wtp.getComponent().getPathVariables().putAll(pathVariables);
}
}
}
| |
package org.mapfish.print.attribute.map;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import org.geotools.referencing.CRS;
import org.json.JSONException;
import org.json.JSONObject;
import org.mapfish.print.Constants;
import org.mapfish.print.ExceptionUtils;
import org.mapfish.print.attribute.ReflectiveAttribute;
import org.mapfish.print.config.Configuration;
import org.mapfish.print.config.ConfigurationException;
import org.mapfish.print.config.Template;
import org.mapfish.print.map.MapLayerFactoryPlugin;
import org.mapfish.print.parser.HasDefaultValue;
import org.mapfish.print.parser.MapfishParser;
import org.mapfish.print.wrapper.PArray;
import org.mapfish.print.wrapper.PObject;
import org.opengis.referencing.FactoryException;
import org.opengis.referencing.NoSuchAuthorityCodeException;
import org.opengis.referencing.crs.CoordinateReferenceSystem;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import java.awt.Dimension;
import java.util.List;
import java.util.Map;
/**
* Generic attributes for {@link org.mapfish.print.processor.map.CreateMapProcessor} and
* {@link org.mapfish.print.processor.map.CreateOverviewMapProcessor}.
* @param <GenericMapAttributeValues>
*/
public abstract class GenericMapAttribute<GenericMapAttributeValues>
extends ReflectiveAttribute<GenericMapAttribute<?>.GenericMapAttributeValues> {
private static final double[] DEFAULT_DPI_VALUES = {72, 120, 200, 254, 300, 600, 1200, 2400};
/**
* The json key for the suggested DPI values in the client config.
*/
public static final String JSON_DPI_SUGGESTIONS = "dpiSuggestions";
/**
* The json key for the max DPI value in the client config.
*/
public static final String JSON_MAX_DPI = "maxDPI";
/**
* The json key for the width of the map in the client config.
*/
public static final String JSON_MAP_WIDTH = "width";
/**
* The json key for the height of the map in the client config.
*/
public static final String JSON_MAP_HEIGHT = "height";
static final String JSON_ZOOM_LEVEL_SUGGESTIONS = "scales";
@Autowired
private ApplicationContext applicationContext;
@Autowired
private MapfishParser mapfishJsonParser;
private Double maxDpi = null;
private double[] dpiSuggestions = null;
private ZoomLevels zoomLevels = null;
private Double zoomSnapTolerance = null;
private ZoomLevelSnapStrategy zoomLevelSnapStrategy = null;
private Boolean zoomSnapGeodetic = null;
private Integer width = null;
private Integer height = null;
public final Double getMaxDpi() {
return this.maxDpi;
}
public final void setMaxDpi(final Double maxDpi) {
this.maxDpi = maxDpi;
}
/**
* Get DPI suggestions.
* @return DPI suggestions
*/
public final double[] getDpiSuggestions() {
if (this.dpiSuggestions == null) {
List<Double> list = Lists.newArrayList();
for (double suggestion : DEFAULT_DPI_VALUES) {
if (suggestion <= this.maxDpi) {
list.add(suggestion);
}
}
double[] suggestions = new double[list.size()];
for (int i = 0; i < suggestions.length; i++) {
suggestions[i] = list.get(i);
}
return suggestions;
}
return this.dpiSuggestions;
}
/**
* Suggestions for DPI values to use. Typically these are used by the client to create a UI for a user.
* @param dpiSuggestions DPI suggestions
*/
public final void setDpiSuggestions(final double[] dpiSuggestions) {
this.dpiSuggestions = dpiSuggestions;
}
public final Integer getWidth() {
return this.width;
}
/**
* The width of the map in pixels. This value should match the width
* of the sub-report in the JasperReport template.
* @param width Width
*/
public final void setWidth(final Integer width) {
this.width = width;
}
public final Integer getHeight() {
return this.height;
}
/**
* The height of the map in pixels. This value should match the height
* of the sub-report in the JasperReport template.
* @param height Height
*/
public final void setHeight(final Integer height) {
this.height = height;
}
public final void setZoomLevels(final ZoomLevels zoomLevels) {
this.zoomLevels = zoomLevels;
}
public final void setZoomSnapTolerance(final Double zoomSnapTolerance) {
this.zoomSnapTolerance = zoomSnapTolerance;
}
public final void setZoomLevelSnapStrategy(final ZoomLevelSnapStrategy zoomLevelSnapStrategy) {
this.zoomLevelSnapStrategy = zoomLevelSnapStrategy;
}
public final void setZoomSnapGeodetic(final Boolean zoomSnapGeodetic) {
this.zoomSnapGeodetic = zoomSnapGeodetic;
}
//CSOFF: DesignForExtension
@Override
public void validate(final List<Throwable> validationErrors, final Configuration configuration) {
//CSON: DesignForExtension
if (this.width == null || this.width < 1) {
validationErrors.add(new ConfigurationException("width field is not legal: " + this.width + " in " + getClass().getName()));
}
if (this.height == null || this.height < 1) {
validationErrors.add(new ConfigurationException("height field is not legal: " + this.height + " in " + getClass().getName()));
}
if (this.getMaxDpi() == null || this.getMaxDpi() < 1) {
validationErrors.add(
new ConfigurationException("maxDpi field is not legal: " + this.getMaxDpi() + " in " + getClass().getName()));
}
if (this.getMaxDpi() != null && this.getDpiSuggestions() != null) {
for (double dpi : this.getDpiSuggestions()) {
if (dpi < 1 || dpi > this.getMaxDpi()) {
validationErrors.add(new ConfigurationException(
"dpiSuggestions contains an invalid value: " + dpi + " in " + getClass().getName()));
}
}
}
}
@Override
protected final Optional<JSONObject> getClientInfo() throws JSONException {
final JSONObject jsonObject = new JSONObject();
jsonObject.put(JSON_DPI_SUGGESTIONS, getDpiSuggestions());
if (this.zoomLevels != null) {
jsonObject.put(JSON_ZOOM_LEVEL_SUGGESTIONS, this.zoomLevels.getScales());
}
jsonObject.put(JSON_MAX_DPI, this.maxDpi);
jsonObject.put(JSON_MAP_WIDTH, this.width);
jsonObject.put(JSON_MAP_HEIGHT, this.height);
return Optional.of(jsonObject);
}
/**
* The value of {@link GenericMapAttribute}.
*/
public abstract class GenericMapAttributeValues {
private static final String TYPE = "type";
/**
* The default projection.
*/
protected static final String DEFAULT_PROJECTION = "EPSG:3857";
private final Dimension mapSize;
private final Template template;
private List<MapLayer> mapLayers;
/**
* The projection of the map.
*/
@HasDefaultValue
public String projection = null;
/**
* The rotation of the map.
*/
@HasDefaultValue
public Double rotation = null;
/**
* Indicates if the map should adjust its scale/zoom level to be equal to one of those defined in the configuration file.
* <p></p>
*
* @see #isUseNearestScale()
*/
@HasDefaultValue
public Boolean useNearestScale = null;
/**
* Indicates if the map should adjust its bounds.
* <p></p>
*
* @see #isUseAdjustBounds()
*/
@HasDefaultValue
public Boolean useAdjustBounds = null;
/**
* By default the normal axis order as specified in EPSG code will be used when parsing projections. However
* the requestor can override this by explicitly declaring that longitude axis is first.
*/
@HasDefaultValue
public Boolean longitudeFirst = null;
/**
* Should the vector style definitions be adapted to the target DPI resolution? (Default: true)
* <p></p>
* The style definitions are often optimized for a use with OpenLayers (which uses
* a DPI value of 72). When these styles are used to print with a higher DPI value,
* lines often look too thin, label are too small, etc.
* <p></p>
* If this property is set to `true`, the style definitions will be scaled to the target DPI value.
*/
@HasDefaultValue
public Boolean dpiSensitiveStyle = true;
/**
* Constructor.
*
* @param template the template this map is part of.
* @param mapSize the size of the map.
*/
public GenericMapAttributeValues(final Template template, final Dimension mapSize) {
this.template = template;
this.mapSize = mapSize;
}
/**
* Validate the values provided by the request data and construct MapBounds and parse the layers.
*/
//CSOFF: DesignForExtension
public void postConstruct() throws FactoryException {
//CSON: DesignForExtension
this.mapLayers = parseLayers();
}
private List<MapLayer> parseLayers() {
List<MapLayer> layerList = Lists.newArrayList();
for (int i = 0; i < this.getRawLayers().size(); i++) {
try {
PObject layer = this.getRawLayers().getObject(i);
// only render if the opacity is greater than 0
if (Math.abs(layer.optDouble("opacity", 1.0)) > Constants.OPACITY_PRECISION) {
parseSingleLayer(layerList, layer);
}
} catch (Throwable throwable) {
throw ExceptionUtils.getRuntimeException(throwable);
}
}
return layerList;
}
@SuppressWarnings("unchecked")
private void parseSingleLayer(final List<MapLayer> layerList,
final PObject layer) throws Throwable {
final Map<String, MapLayerFactoryPlugin> layerParsers =
GenericMapAttribute.this.applicationContext.getBeansOfType(MapLayerFactoryPlugin.class);
for (MapLayerFactoryPlugin layerParser : layerParsers.values()) {
final boolean layerApplies = layerParser.getTypeNames().contains(layer.getString(TYPE).toLowerCase());
if (layerApplies) {
Object param = layerParser.createParameter();
GenericMapAttribute.this.mapfishJsonParser.parse(this.template.getConfiguration()
.isThrowErrorOnExtraParameters(),
layer, param, TYPE
);
final MapLayer newLayer = layerParser.parse(this.template, param);
if (layerList.isEmpty()) {
layerList.add(newLayer);
} else {
final int lastLayerIndex = layerList.size() - 1;
final MapLayer lastLayer = layerList.get(lastLayerIndex);
Optional<MapLayer> combinedLayer = lastLayer.tryAddLayer(newLayer);
if (combinedLayer.isPresent()) {
layerList.remove(lastLayerIndex);
layerList.add(lastLayerIndex, combinedLayer.get());
} else {
layerList.add(newLayer);
}
}
return;
}
}
StringBuilder message = new StringBuilder("\nLayer with type: '" + layer.getString(TYPE) + "' is not currently " +
"supported. Options include: ");
for (MapLayerFactoryPlugin<?> mapLayerFactoryPlugin : layerParsers.values()) {
for (Object name : mapLayerFactoryPlugin.getTypeNames()) {
message.append("\n");
message.append("\t* ").append(name);
}
}
throw new IllegalArgumentException(message.toString());
}
/**
* Parse the projection from a string.
* @return the crs
*/
protected final CoordinateReferenceSystem parseProjection() {
return GenericMapAttribute.parseProjection(getProjection(), this.longitudeFirst);
}
/**
* Return the DPI value for the map.
* This method is abstract because the dpi value is optional for the overview map,
* but must be given for the normal map. So, in the overview map the field is defined
* with a @HasDefaultValue annotation.
*/
public abstract Double getDpi();
/**
* Return the JSON layer definiton.
* This method is abstract is abstract for the same reasons as {@link #getDpi()}.
*/
protected abstract PArray getRawLayers();
//CSOFF: DesignForExtension
public List<MapLayer> getLayers() {
//CSON: DesignForExtension
return Lists.newArrayList(this.mapLayers);
}
public final Template getTemplate() {
return this.template;
}
public final Dimension getMapSize() {
return this.mapSize;
}
//CSOFF: DesignForExtension
public Double getRotation() {
//CSON: DesignForExtension
return this.rotation;
}
//CSOFF: DesignForExtension
public String getProjection() {
//CSON: DesignForExtension
return this.projection;
}
/**
* Return true if requestData has useNearestScale and configuration has some zoom levels defined.
*/
//CSOFF: DesignForExtension
public Boolean isUseNearestScale() {
//CSON: DesignForExtension
return this.useNearestScale && GenericMapAttribute.this.zoomLevels != null;
}
/**
* Return true if requestData has useNearestScale and configuration has some zoom levels defined.
*/
//CSOFF: DesignForExtension
public Boolean isUseAdjustBounds() {
//CSON: DesignForExtension
return this.useAdjustBounds;
}
public final Boolean isDpiSensitiveStyle() {
return this.dpiSensitiveStyle;
}
//CSOFF: DesignForExtension
public ZoomLevels getZoomLevels() {
//CSON: DesignForExtension
return GenericMapAttribute.this.zoomLevels;
}
//CSOFF: DesignForExtension
public Double getZoomSnapTolerance() {
//CSON: DesignForExtension
return GenericMapAttribute.this.zoomSnapTolerance;
}
//CSOFF: DesignForExtension
public ZoomLevelSnapStrategy getZoomLevelSnapStrategy() {
//CSON: DesignForExtension
return GenericMapAttribute.this.zoomLevelSnapStrategy;
}
//CSOFF: DesignForExtension
public Boolean getZoomSnapGeodetic() {
//CSON: DesignForExtension
return GenericMapAttribute.this.zoomSnapGeodetic;
}
//CSOFF: DesignForExtension
public double[] getDpiSuggestions() {
//CSON: DesignForExtension
return GenericMapAttribute.this.getDpiSuggestions();
}
//CSOFF: DesignForExtension
public double getRequestorDPI() {
//CSON: DesignForExtension
// We are making the same assumption as Openlayers 2.x versions, that the DPI is 72.
// In the future we probably need to change this assumption and allow the client software to
// specify the DPI they are using for creating the bounds.
// For the moment we require the client to convert their bounds to 72 DPI
return Constants.PDF_DPI;
}
/**
* @param value The value or null.
* @param defaultValue The default value.
* @param <T> A type.
*/
protected final <T extends Object> T getValueOr(final T value, final T defaultValue) {
if (value != null) {
return value;
} else {
return defaultValue;
}
}
}
/**
* Parse the given projection.
*
* @param projection The projection string.
* @param longitudeFirst longitudeFirst
*/
public static CoordinateReferenceSystem parseProjection(final String projection, final Boolean longitudeFirst) {
String epsgCode = projection;
if (epsgCode.equalsIgnoreCase("EPSG:900913")) {
epsgCode = "EPSG:3857";
}
try {
if (longitudeFirst == null) {
return CRS.decode(epsgCode);
} else {
return CRS.decode(epsgCode, longitudeFirst);
}
} catch (NoSuchAuthorityCodeException e) {
throw new RuntimeException(epsgCode + " was not recognized as a crs code", e);
} catch (FactoryException e) {
throw new RuntimeException("Error occurred while parsing: " + epsgCode, e);
}
}
}
| |
/*
Derby - Class com.pivotal.gemfirexd.internal.client.ClientXid
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.pivotal.gemfirexd.internal.client;
import javax.transaction.xa.Xid;
public class ClientXid implements Xid {
//
// The format identifier for the Xid. A value of -1 indicates
// that the NULLXid
//
private int formatID_;
//
// The number of bytes in the global transaction identfier
//
private int gtrid_length_;
//
// The number of bytes in the branch qualifier
//
private int bqual_length_;
//
// The data for the Xid.
// <p> The Xid is made up of two contiguous parts. The first (of size
// <b>gtrid_length</b>) is the global transaction identfier and the second
// (of size <b>bqual_length</b>) is the branch qualifier.
// <p>If the <b>formatID</b> is -1, indicating the NULLXid, the data is
// ignored.
//
private byte data_[];
//
// The size of <b>data</b>.
//
static private final int XidDATASIZE = 128;
//
// The maximum size of the global transaction identifier.
//
static public final int MAXGTRIDSIZE = 64;
//
// The maximum size of the branch qualifier.
//
static public final int MAXBQUALSIZE = 64;
static private final String hextab_ = "0123456789ABCDEF";
//
// Constructs a new null Xid.
// <p>After construction the data within the Xid should be initialized.
//
public ClientXid() {
data_ = new byte[XidDATASIZE];
gtrid_length_ = 0;
bqual_length_ = 0;
formatID_ = -1;
}
//
// another contructor
//
public ClientXid(int formatID, byte[] gtrid, byte[] bqual) {
formatID_ = formatID;
gtrid_length_ = gtrid.length;
bqual_length_ = bqual.length;
data_ = new byte[XidDATASIZE];
System.arraycopy(gtrid, 0, data_, 0, gtrid_length_);
System.arraycopy(bqual, 0, data_, gtrid_length_, bqual_length_);
}
//
// Return a string representing this Xid for debuging
//
// @return the string representation of this Xid
//
public String toString() {
StringBuilder d; // Data String, in HeXidecimal
String s; // Resultant String
int i;
int v;
int L;
L = gtrid_length_ + bqual_length_;
d = new StringBuilder(L + L);
for (i = 0; i < L; i++) {
// Convert data string to hex
v = data_[i] & 0xff;
d.append(hextab_.charAt(v / 16));
d.append(hextab_.charAt(v & 15));
if ((i + 1) % 4 == 0 && (i + 1) < L) {
d.append(" ");
}
}
s = "{ClientXid: " +
"formatID(" + formatID_ + "), " +
"gtrid_length(" + gtrid_length_ + "), " +
"bqual_length(" + bqual_length_ + "), " +
"data(" + d.toString() + ")" +
"}";
return s;
}
//
// Returns the branch qualifier for this Xid.
//
// @return the branch qualifier
//
public byte[] getBranchQualifier() {
byte[] bqual = new byte[bqual_length_];
System.arraycopy(data_, gtrid_length_, bqual, 0, bqual_length_);
return bqual;
}
//
// Set the branch qualifier for this Xid.
//
// @param qual a Byte array containing the branch qualifier to be set. If
// the size of the array exceeds MAXBQUALSIZE, only the first MAXBQUALSIZE
// elements of qual will be used.
//
public void setBranchQualifier(byte[] qual) {
bqual_length_ = qual.length > MAXBQUALSIZE ? MAXBQUALSIZE : qual.length;
System.arraycopy(qual, 0, data_, gtrid_length_, bqual_length_);
}
//
// Obtain the format identifier part of the Xid.
//
// @return Format identifier. -1 indicates a null Xid
//
public int getFormatId() {
return formatID_;
}
//
// Set the format identifier part of the Xid.
//
// @param Format identifier. -1 indicates a null Xid.
//
public void setFormatID(int formatID) {
formatID_ = formatID;
return;
}
//
// Returns the global transaction identifier for this Xid.
//
// @return the global transaction identifier
//
public byte[] getGlobalTransactionId() {
byte[] gtrid = new byte[gtrid_length_];
System.arraycopy(data_, 0, gtrid, 0, gtrid_length_);
return gtrid;
}
//
// return fields of Xid
//
public byte[] getData() {
return data_;
}
public int getGtridLength() {
return gtrid_length_;
}
public int getBqualLength() {
return bqual_length_;
}
public int hashCode() {
if (formatID_ == (-1)) {
return (-1);
}
return formatID_ + gtrid_length_ - bqual_length_;
}
public boolean equals(Object obj) {
return com.pivotal.gemfirexd.internal.client.net.NetXAResource.xidsEqual(this, (javax.transaction.xa.Xid) obj);
}
} // class Xid
| |
/*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.swing.text;
import java.util.Vector;
import java.io.Serializable;
import javax.swing.undo.*;
import javax.swing.SwingUtilities;
/**
* An implementation of the AbstractDocument.Content interface that is
* a brute force implementation that is useful for relatively small
* documents and/or debugging. It manages the character content
* as a simple character array. It is also quite inefficient.
* <p>
* It is generally recommended that the gap buffer or piece table
* implementations be used instead. This buffer does not scale up
* to large sizes.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans™
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @author Timothy Prinzing
*/
public final class StringContent implements AbstractDocument.Content, Serializable {
/**
* Creates a new StringContent object. Initial size defaults to 10.
*/
public StringContent() {
this(10);
}
/**
* Creates a new StringContent object, with the initial
* size specified. If the length is < 1, a size of 1 is used.
*
* @param initialLength the initial size
*/
public StringContent(int initialLength) {
if (initialLength < 1) {
initialLength = 1;
}
data = new char[initialLength];
data[0] = '\n';
count = 1;
}
/**
* Returns the length of the content.
*
* @return the length >= 1
* @see AbstractDocument.Content#length
*/
public int length() {
return count;
}
/**
* Inserts a string into the content.
*
* @param where the starting position >= 0 && < length()
* @param str the non-null string to insert
* @return an UndoableEdit object for undoing
* @exception BadLocationException if the specified position is invalid
* @see AbstractDocument.Content#insertString
*/
public UndoableEdit insertString(int where, String str) throws BadLocationException {
if (where >= count || where < 0) {
throw new BadLocationException("Invalid location", count);
}
char[] chars = str.toCharArray();
replace(where, 0, chars, 0, chars.length);
if (marks != null) {
updateMarksForInsert(where, str.length());
}
return new InsertUndo(where, str.length());
}
/**
* Removes part of the content. where + nitems must be < length().
*
* @param where the starting position >= 0
* @param nitems the number of characters to remove >= 0
* @return an UndoableEdit object for undoing
* @exception BadLocationException if the specified position is invalid
* @see AbstractDocument.Content#remove
*/
public UndoableEdit remove(int where, int nitems) throws BadLocationException {
if (where + nitems >= count) {
throw new BadLocationException("Invalid range", count);
}
String removedString = getString(where, nitems);
UndoableEdit edit = new RemoveUndo(where, removedString);
replace(where, nitems, empty, 0, 0);
if (marks != null) {
updateMarksForRemove(where, nitems);
}
return edit;
}
/**
* Retrieves a portion of the content. where + len must be <= length().
*
* @param where the starting position >= 0
* @param len the length to retrieve >= 0
* @return a string representing the content; may be empty
* @exception BadLocationException if the specified position is invalid
* @see AbstractDocument.Content#getString
*/
public String getString(int where, int len) throws BadLocationException {
if (where + len > count) {
throw new BadLocationException("Invalid range", count);
}
return new String(data, where, len);
}
/**
* Retrieves a portion of the content. where + len must be <= length()
*
* @param where the starting position >= 0
* @param len the number of characters to retrieve >= 0
* @param chars the Segment object to return the characters in
* @exception BadLocationException if the specified position is invalid
* @see AbstractDocument.Content#getChars
*/
public void getChars(int where, int len, Segment chars) throws BadLocationException {
if (where + len > count) {
throw new BadLocationException("Invalid location", count);
}
chars.array = data;
chars.offset = where;
chars.count = len;
}
/**
* Creates a position within the content that will
* track change as the content is mutated.
*
* @param offset the offset to create a position for >= 0
* @return the position
* @exception BadLocationException if the specified position is invalid
*/
public Position createPosition(int offset) throws BadLocationException {
// some small documents won't have any sticky positions
// at all, so the buffer is created lazily.
if (marks == null) {
marks = new Vector<PosRec>();
}
return new StickyPosition(offset);
}
// --- local methods ---------------------------------------
/**
* Replaces some of the characters in the array
* @param offset offset into the array to start the replace
* @param length number of characters to remove
* @param replArray replacement array
* @param replOffset offset into the replacement array
* @param replLength number of character to use from the
* replacement array.
*/
void replace(int offset, int length,
char[] replArray, int replOffset, int replLength) {
int delta = replLength - length;
int src = offset + length;
int nmove = count - src;
int dest = src + delta;
if ((count + delta) >= data.length) {
// need to grow the array
int newLength = Math.max(2*data.length, count + delta);
char[] newData = new char[newLength];
System.arraycopy(data, 0, newData, 0, offset);
System.arraycopy(replArray, replOffset, newData, offset, replLength);
System.arraycopy(data, src, newData, dest, nmove);
data = newData;
} else {
// patch the existing array
System.arraycopy(data, src, data, dest, nmove);
System.arraycopy(replArray, replOffset, data, offset, replLength);
}
count = count + delta;
}
void resize(int ncount) {
char[] ndata = new char[ncount];
System.arraycopy(data, 0, ndata, 0, Math.min(ncount, count));
data = ndata;
}
synchronized void updateMarksForInsert(int offset, int length) {
if (offset == 0) {
// zero is a special case where we update only
// marks after it.
offset = 1;
}
int n = marks.size();
for (int i = 0; i < n; i++) {
PosRec mark = marks.elementAt(i);
if (mark.unused) {
// this record is no longer used, get rid of it
marks.removeElementAt(i);
i -= 1;
n -= 1;
} else if (mark.offset >= offset) {
mark.offset += length;
}
}
}
synchronized void updateMarksForRemove(int offset, int length) {
int n = marks.size();
for (int i = 0; i < n; i++) {
PosRec mark = marks.elementAt(i);
if (mark.unused) {
// this record is no longer used, get rid of it
marks.removeElementAt(i);
i -= 1;
n -= 1;
} else if (mark.offset >= (offset + length)) {
mark.offset -= length;
} else if (mark.offset >= offset) {
mark.offset = offset;
}
}
}
/**
* Returns a Vector containing instances of UndoPosRef for the
* Positions in the range
* <code>offset</code> to <code>offset</code> + <code>length</code>.
* If <code>v</code> is not null the matching Positions are placed in
* there. The vector with the resulting Positions are returned.
* <p>
* This is meant for internal usage, and is generally not of interest
* to subclasses.
*
* @param v the Vector to use, with a new one created on null
* @param offset the starting offset >= 0
* @param length the length >= 0
* @return the set of instances
*/
protected Vector getPositionsInRange(Vector v, int offset,
int length) {
int n = marks.size();
int end = offset + length;
Vector placeIn = (v == null) ? new Vector() : v;
for (int i = 0; i < n; i++) {
PosRec mark = marks.elementAt(i);
if (mark.unused) {
// this record is no longer used, get rid of it
marks.removeElementAt(i);
i -= 1;
n -= 1;
} else if(mark.offset >= offset && mark.offset <= end)
placeIn.addElement(new UndoPosRef(mark));
}
return placeIn;
}
/**
* Resets the location for all the UndoPosRef instances
* in <code>positions</code>.
* <p>
* This is meant for internal usage, and is generally not of interest
* to subclasses.
*
* @param positions the positions of the instances
*/
protected void updateUndoPositions(Vector positions) {
for(int counter = positions.size() - 1; counter >= 0; counter--) {
UndoPosRef ref = (UndoPosRef)positions.elementAt(counter);
// Check if the Position is still valid.
if(ref.rec.unused) {
positions.removeElementAt(counter);
}
else
ref.resetLocation();
}
}
private static final char[] empty = new char[0];
private char[] data;
private int count;
transient Vector<PosRec> marks;
/**
* holds the data for a mark... separately from
* the real mark so that the real mark can be
* collected if there are no more references to
* it.... the update table holds only a reference
* to this grungy thing.
*/
final class PosRec {
PosRec(int offset) {
this.offset = offset;
}
int offset;
boolean unused;
}
/**
* This really wants to be a weak reference but
* in 1.1 we don't have a 100% pure solution for
* this... so this class trys to hack a solution
* to causing the marks to be collected.
*/
final class StickyPosition implements Position {
StickyPosition(int offset) {
rec = new PosRec(offset);
marks.addElement(rec);
}
public int getOffset() {
return rec.offset;
}
protected void finalize() throws Throwable {
// schedule the record to be removed later
// on another thread.
rec.unused = true;
}
public String toString() {
return Integer.toString(getOffset());
}
PosRec rec;
}
/**
* Used to hold a reference to a Position that is being reset as the
* result of removing from the content.
*/
final class UndoPosRef {
UndoPosRef(PosRec rec) {
this.rec = rec;
this.undoLocation = rec.offset;
}
/**
* Resets the location of the Position to the offset when the
* receiver was instantiated.
*/
protected void resetLocation() {
rec.offset = undoLocation;
}
/** Location to reset to when resetLocatino is invoked. */
protected int undoLocation;
/** Position to reset offset. */
protected PosRec rec;
}
/**
* UnoableEdit created for inserts.
*/
class InsertUndo extends AbstractUndoableEdit {
protected InsertUndo(int offset, int length) {
super();
this.offset = offset;
this.length = length;
}
public void undo() throws CannotUndoException {
super.undo();
try {
synchronized(StringContent.this) {
// Get the Positions in the range being removed.
if(marks != null)
posRefs = getPositionsInRange(null, offset, length);
string = getString(offset, length);
remove(offset, length);
}
} catch (BadLocationException bl) {
throw new CannotUndoException();
}
}
public void redo() throws CannotRedoException {
super.redo();
try {
synchronized(StringContent.this) {
insertString(offset, string);
string = null;
// Update the Positions that were in the range removed.
if(posRefs != null) {
updateUndoPositions(posRefs);
posRefs = null;
}
}
} catch (BadLocationException bl) {
throw new CannotRedoException();
}
}
// Where the string goes.
protected int offset;
// Length of the string.
protected int length;
// The string that was inserted. To cut down on space needed this
// will only be valid after an undo.
protected String string;
// An array of instances of UndoPosRef for the Positions in the
// range that was removed, valid after undo.
protected Vector posRefs;
}
/**
* UndoableEdit created for removes.
*/
class RemoveUndo extends AbstractUndoableEdit {
protected RemoveUndo(int offset, String string) {
super();
this.offset = offset;
this.string = string;
this.length = string.length();
if(marks != null)
posRefs = getPositionsInRange(null, offset, length);
}
public void undo() throws CannotUndoException {
super.undo();
try {
synchronized(StringContent.this) {
insertString(offset, string);
// Update the Positions that were in the range removed.
if(posRefs != null) {
updateUndoPositions(posRefs);
posRefs = null;
}
string = null;
}
} catch (BadLocationException bl) {
throw new CannotUndoException();
}
}
public void redo() throws CannotRedoException {
super.redo();
try {
synchronized(StringContent.this) {
string = getString(offset, length);
// Get the Positions in the range being removed.
if(marks != null)
posRefs = getPositionsInRange(null, offset, length);
remove(offset, length);
}
} catch (BadLocationException bl) {
throw new CannotRedoException();
}
}
// Where the string goes.
protected int offset;
// Length of the string.
protected int length;
// The string that was inserted. This will be null after an undo.
protected String string;
// An array of instances of UndoPosRef for the Positions in the
// range that was removed, valid before undo.
protected Vector posRefs;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core.triggers;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import org.apache.beam.runners.core.triggers.TriggerStateMachine.OnceTriggerStateMachine;
import org.apache.beam.sdk.transforms.SerializableFunction;
import org.apache.beam.sdk.transforms.windowing.AfterAll;
import org.apache.beam.sdk.transforms.windowing.AfterDelayFromFirstElement;
import org.apache.beam.sdk.transforms.windowing.AfterEach;
import org.apache.beam.sdk.transforms.windowing.AfterFirst;
import org.apache.beam.sdk.transforms.windowing.AfterPane;
import org.apache.beam.sdk.transforms.windowing.AfterProcessingTime;
import org.apache.beam.sdk.transforms.windowing.AfterSynchronizedProcessingTime;
import org.apache.beam.sdk.transforms.windowing.AfterWatermark;
import org.apache.beam.sdk.transforms.windowing.DefaultTrigger;
import org.apache.beam.sdk.transforms.windowing.Never.NeverTrigger;
import org.apache.beam.sdk.transforms.windowing.OrFinallyTrigger;
import org.apache.beam.sdk.transforms.windowing.Repeatedly;
import org.apache.beam.sdk.transforms.windowing.Trigger;
import org.apache.beam.sdk.transforms.windowing.Trigger.OnceTrigger;
import org.apache.beam.sdk.util.ReshuffleTrigger;
import org.apache.beam.sdk.util.TimeDomain;
import org.joda.time.Instant;
/** Translates a {@link Trigger} to a {@link TriggerStateMachine}. */
public class TriggerStateMachines {
private TriggerStateMachines() {}
@VisibleForTesting static final StateMachineConverter CONVERTER = new StateMachineConverter();
public static TriggerStateMachine stateMachineForTrigger(Trigger trigger) {
return CONVERTER.evaluateTrigger(trigger);
}
public static OnceTriggerStateMachine stateMachineForOnceTrigger(OnceTrigger trigger) {
return CONVERTER.evaluateOnceTrigger(trigger);
}
@VisibleForTesting
static class StateMachineConverter {
public TriggerStateMachine evaluateTrigger(Trigger trigger) {
Method evaluationMethod = getEvaluationMethod(trigger.getClass());
return tryEvaluate(evaluationMethod, trigger);
}
public OnceTriggerStateMachine evaluateOnceTrigger(OnceTrigger trigger) {
Method evaluationMethod = getEvaluationMethod(trigger.getClass());
return (OnceTriggerStateMachine) tryEvaluate(evaluationMethod, trigger);
}
private TriggerStateMachine tryEvaluate(Method evaluationMethod, Trigger trigger) {
try {
return (TriggerStateMachine) evaluationMethod.invoke(this, trigger);
} catch (InvocationTargetException exc) {
if (exc.getCause() instanceof RuntimeException) {
throw (RuntimeException) exc.getCause();
} else {
throw new RuntimeException(exc.getCause());
}
} catch (IllegalAccessException exc) {
throw new IllegalStateException(
String.format("Internal error: could not invoke %s", evaluationMethod));
}
}
private Method getEvaluationMethod(Class<?> clazz) {
try {
return getClass().getDeclaredMethod("evaluateSpecific", clazz);
} catch (NoSuchMethodException exc) {
throw new UnsupportedOperationException(
String.format(
"Cannot translate trigger class %s to a state machine.", clazz.getCanonicalName()),
exc);
}
}
private TriggerStateMachine evaluateSpecific(DefaultTrigger v) {
return DefaultTriggerStateMachine.of();
}
private TriggerStateMachine evaluateSpecific(ReshuffleTrigger v) {
return new ReshuffleTriggerStateMachine();
}
private OnceTriggerStateMachine evaluateSpecific(AfterWatermark.FromEndOfWindow v) {
return AfterWatermarkStateMachine.pastEndOfWindow();
}
private OnceTriggerStateMachine evaluateSpecific(NeverTrigger v) {
return NeverStateMachine.ever();
}
private OnceTriggerStateMachine evaluateSpecific(AfterSynchronizedProcessingTime v) {
return new AfterSynchronizedProcessingTimeStateMachine();
}
private OnceTriggerStateMachine evaluateSpecific(AfterFirst v) {
List<OnceTriggerStateMachine> subStateMachines =
Lists.newArrayListWithCapacity(v.subTriggers().size());
for (Trigger subtrigger : v.subTriggers()) {
subStateMachines.add(stateMachineForOnceTrigger((OnceTrigger) subtrigger));
}
return AfterFirstStateMachine.of(subStateMachines);
}
private OnceTriggerStateMachine evaluateSpecific(AfterAll v) {
List<OnceTriggerStateMachine> subStateMachines =
Lists.newArrayListWithCapacity(v.subTriggers().size());
for (Trigger subtrigger : v.subTriggers()) {
subStateMachines.add(stateMachineForOnceTrigger((OnceTrigger) subtrigger));
}
return AfterAllStateMachine.of(subStateMachines);
}
private OnceTriggerStateMachine evaluateSpecific(AfterPane v) {
return AfterPaneStateMachine.elementCountAtLeast(v.getElementCount());
}
private TriggerStateMachine evaluateSpecific(AfterWatermark.AfterWatermarkEarlyAndLate v) {
AfterWatermarkStateMachine.AfterWatermarkEarlyAndLate machine =
AfterWatermarkStateMachine.pastEndOfWindow()
.withEarlyFirings(stateMachineForOnceTrigger(v.getEarlyTrigger()));
if (v.getLateTrigger() != null) {
machine = machine.withLateFirings(stateMachineForOnceTrigger(v.getLateTrigger()));
}
return machine;
}
private TriggerStateMachine evaluateSpecific(AfterEach v) {
List<TriggerStateMachine> subStateMachines =
Lists.newArrayListWithCapacity(v.subTriggers().size());
for (Trigger subtrigger : v.subTriggers()) {
subStateMachines.add(stateMachineForTrigger(subtrigger));
}
return AfterEachStateMachine.inOrder(subStateMachines);
}
private TriggerStateMachine evaluateSpecific(Repeatedly v) {
return RepeatedlyStateMachine.forever(stateMachineForTrigger(v.getRepeatedTrigger()));
}
private TriggerStateMachine evaluateSpecific(OrFinallyTrigger v) {
return new OrFinallyStateMachine(
stateMachineForTrigger(v.getMainTrigger()),
stateMachineForOnceTrigger(v.getUntilTrigger()));
}
private OnceTriggerStateMachine evaluateSpecific(AfterProcessingTime v) {
return evaluateSpecific((AfterDelayFromFirstElement) v);
}
private OnceTriggerStateMachine evaluateSpecific(final AfterDelayFromFirstElement v) {
return new AfterDelayFromFirstElementStateMachineAdapter(v);
}
private static class AfterDelayFromFirstElementStateMachineAdapter
extends AfterDelayFromFirstElementStateMachine {
public AfterDelayFromFirstElementStateMachineAdapter(AfterDelayFromFirstElement v) {
this(v.getTimeDomain(), v.getTimestampMappers());
}
private AfterDelayFromFirstElementStateMachineAdapter(
TimeDomain timeDomain, List<SerializableFunction<Instant, Instant>> timestampMappers) {
super(timeDomain, timestampMappers);
}
@Override
public Instant getCurrentTime(TriggerContext context) {
switch (timeDomain) {
case PROCESSING_TIME:
return context.currentProcessingTime();
case SYNCHRONIZED_PROCESSING_TIME:
return context.currentSynchronizedProcessingTime();
case EVENT_TIME:
return context.currentEventTime();
default:
throw new IllegalArgumentException("A time domain that doesn't exist was received!");
}
}
@Override
protected AfterDelayFromFirstElementStateMachine newWith(
List<SerializableFunction<Instant, Instant>> transform) {
return new AfterDelayFromFirstElementStateMachineAdapter(timeDomain, transform);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.TimeUnit;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.RowUpdateBuilder;
import org.apache.cassandra.db.lifecycle.SSTableSet;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.utils.Pair;
import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.getBuckets;
import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.newestBucket;
import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.filterOldSSTables;
import static org.apache.cassandra.db.compaction.DateTieredCompactionStrategy.validateOptions;
import static org.junit.Assert.*;
public class DateTieredCompactionStrategyTest extends SchemaLoader
{
public static final String KEYSPACE1 = "DateTieredCompactionStrategyTest";
private static final String CF_STANDARD1 = "Standard1";
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE1,
KeyspaceParams.simple(1),
SchemaLoader.standardCFMD(KEYSPACE1, CF_STANDARD1));
}
@Test
public void testOptionsValidation() throws ConfigurationException
{
Map<String, String> options = new HashMap<>();
options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "30");
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "1825");
Map<String, String> unvalidated = validateOptions(options);
assertTrue(unvalidated.isEmpty());
try
{
options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "0");
validateOptions(options);
fail(String.format("%s == 0 should be rejected", DateTieredCompactionStrategyOptions.BASE_TIME_KEY));
}
catch (ConfigurationException e) {}
try
{
options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "-1337");
validateOptions(options);
fail(String.format("Negative %s should be rejected", DateTieredCompactionStrategyOptions.BASE_TIME_KEY));
}
catch (ConfigurationException e)
{
options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "1");
}
try
{
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "-1337");
validateOptions(options);
fail(String.format("Negative %s should be rejected", DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY));
}
catch (ConfigurationException e)
{
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "0");
}
try
{
options.put(DateTieredCompactionStrategyOptions.MAX_WINDOW_SIZE_KEY, "-1");
validateOptions(options);
fail(String.format("Negative %s should be rejected", DateTieredCompactionStrategyOptions.MAX_WINDOW_SIZE_KEY));
}
catch (ConfigurationException e)
{
options.put(DateTieredCompactionStrategyOptions.MAX_WINDOW_SIZE_KEY, "0");
}
options.put("bad_option", "1.0");
unvalidated = validateOptions(options);
assertTrue(unvalidated.containsKey("bad_option"));
}
@Test
public void testTimeConversions()
{
Map<String, String> options = new HashMap<>();
options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "30");
options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "SECONDS");
DateTieredCompactionStrategyOptions opts = new DateTieredCompactionStrategyOptions(options);
assertEquals(opts.maxSSTableAge, TimeUnit.SECONDS.convert(365*1000, TimeUnit.DAYS));
options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "MILLISECONDS");
opts = new DateTieredCompactionStrategyOptions(options);
assertEquals(opts.maxSSTableAge, TimeUnit.MILLISECONDS.convert(365*1000, TimeUnit.DAYS));
options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "MICROSECONDS");
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "10");
opts = new DateTieredCompactionStrategyOptions(options);
assertEquals(opts.maxSSTableAge, TimeUnit.MICROSECONDS.convert(10, TimeUnit.DAYS));
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "0.5");
opts = new DateTieredCompactionStrategyOptions(options);
assertEquals(opts.maxSSTableAge, TimeUnit.MICROSECONDS.convert(1, TimeUnit.DAYS) / 2);
options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "HOURS");
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, "0.5");
opts = new DateTieredCompactionStrategyOptions(options);
assertEquals(opts.maxSSTableAge, 12);
}
@Test
public void testGetBuckets()
{
List<Pair<String, Long>> pairs = Lists.newArrayList(
Pair.create("a", 199L),
Pair.create("b", 299L),
Pair.create("a", 1L),
Pair.create("b", 201L)
);
List<List<String>> buckets = getBuckets(pairs, 100L, 2, 200L, Long.MAX_VALUE);
assertEquals(2, buckets.size());
for (List<String> bucket : buckets)
{
assertEquals(2, bucket.size());
assertEquals(bucket.get(0), bucket.get(1));
}
pairs = Lists.newArrayList(
Pair.create("a", 2000L),
Pair.create("b", 3600L),
Pair.create("a", 200L),
Pair.create("c", 3950L),
Pair.create("too new", 4125L),
Pair.create("b", 3899L),
Pair.create("c", 3900L)
);
buckets = getBuckets(pairs, 100L, 3, 4050L, Long.MAX_VALUE);
// targets (divPosition, size): (40, 100), (39, 100), (12, 300), (3, 900), (0, 2700)
// in other words: 0 - 2699, 2700 - 3599, 3600 - 3899, 3900 - 3999, 4000 - 4099
assertEquals(3, buckets.size());
for (List<String> bucket : buckets)
{
assertEquals(2, bucket.size());
assertEquals(bucket.get(0), bucket.get(1));
}
// Test base 1.
pairs = Lists.newArrayList(
Pair.create("a", 200L),
Pair.create("a", 299L),
Pair.create("b", 2000L),
Pair.create("b", 2014L),
Pair.create("c", 3610L),
Pair.create("c", 3690L),
Pair.create("d", 3898L),
Pair.create("d", 3899L),
Pair.create("e", 3900L),
Pair.create("e", 3950L),
Pair.create("too new", 4125L)
);
buckets = getBuckets(pairs, 100L, 1, 4050L, Long.MAX_VALUE);
assertEquals(5, buckets.size());
for (List<String> bucket : buckets)
{
assertEquals(2, bucket.size());
assertEquals(bucket.get(0), bucket.get(1));
}
}
@Test
public void testPrepBucket()
{
Keyspace keyspace = Keyspace.open(KEYSPACE1);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1);
cfs.disableAutoCompaction();
ByteBuffer value = ByteBuffer.wrap(new byte[100]);
// create 3 sstables
int numSSTables = 3;
for (int r = 0; r < numSSTables; r++)
{
DecoratedKey key = Util.dk(String.valueOf(r));
new RowUpdateBuilder(cfs.metadata(), r, key.getKey())
.clustering("column")
.add("val", value).build().applyUnsafe();
cfs.forceBlockingFlush();
}
cfs.forceBlockingFlush();
List<SSTableReader> sstrs = new ArrayList<>(cfs.getLiveSSTables());
List<SSTableReader> newBucket = newestBucket(Collections.singletonList(sstrs.subList(0, 2)), 4, 32, 9, 10, Long.MAX_VALUE, new SizeTieredCompactionStrategyOptions());
assertTrue("incoming bucket should not be accepted when it has below the min threshold SSTables", newBucket.isEmpty());
newBucket = newestBucket(Collections.singletonList(sstrs.subList(0, 2)), 4, 32, 10, 10, Long.MAX_VALUE, new SizeTieredCompactionStrategyOptions());
assertFalse("non-incoming bucket should be accepted when it has at least 2 SSTables", newBucket.isEmpty());
assertEquals("an sstable with a single value should have equal min/max timestamps", sstrs.get(0).getMinTimestamp(), sstrs.get(0).getMaxTimestamp());
assertEquals("an sstable with a single value should have equal min/max timestamps", sstrs.get(1).getMinTimestamp(), sstrs.get(1).getMaxTimestamp());
assertEquals("an sstable with a single value should have equal min/max timestamps", sstrs.get(2).getMinTimestamp(), sstrs.get(2).getMaxTimestamp());
cfs.truncateBlocking();
}
@Test
public void testFilterOldSSTables()
{
Keyspace keyspace = Keyspace.open(KEYSPACE1);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1);
cfs.disableAutoCompaction();
ByteBuffer value = ByteBuffer.wrap(new byte[100]);
// create 3 sstables
int numSSTables = 3;
for (int r = 0; r < numSSTables; r++)
{
DecoratedKey key = Util.dk(String.valueOf(r));
new RowUpdateBuilder(cfs.metadata(), r, key.getKey())
.clustering("column")
.add("val", value).build().applyUnsafe();
cfs.forceBlockingFlush();
}
cfs.forceBlockingFlush();
Iterable<SSTableReader> filtered;
List<SSTableReader> sstrs = new ArrayList<>(cfs.getLiveSSTables());
filtered = filterOldSSTables(sstrs, 0, 2);
assertEquals("when maxSSTableAge is zero, no sstables should be filtered", sstrs.size(), Iterables.size(filtered));
filtered = filterOldSSTables(sstrs, 1, 2);
assertEquals("only the newest 2 sstables should remain", 2, Iterables.size(filtered));
filtered = filterOldSSTables(sstrs, 1, 3);
assertEquals("only the newest sstable should remain", 1, Iterables.size(filtered));
filtered = filterOldSSTables(sstrs, 1, 4);
assertEquals("no sstables should remain when all are too old", 0, Iterables.size(filtered));
cfs.truncateBlocking();
}
@Test
public void testDropExpiredSSTables() throws InterruptedException
{
Keyspace keyspace = Keyspace.open(KEYSPACE1);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1);
cfs.disableAutoCompaction();
ByteBuffer value = ByteBuffer.wrap(new byte[100]);
// create 2 sstables
DecoratedKey key = Util.dk(String.valueOf("expired"));
new RowUpdateBuilder(cfs.metadata(), System.currentTimeMillis(), 1, key.getKey())
.clustering("column")
.add("val", value).build().applyUnsafe();
cfs.forceBlockingFlush();
SSTableReader expiredSSTable = cfs.getLiveSSTables().iterator().next();
Thread.sleep(10);
key = Util.dk(String.valueOf("nonexpired"));
new RowUpdateBuilder(cfs.metadata(), System.currentTimeMillis(), key.getKey())
.clustering("column")
.add("val", value).build().applyUnsafe();
cfs.forceBlockingFlush();
assertEquals(cfs.getLiveSSTables().size(), 2);
Map<String, String> options = new HashMap<>();
options.put(DateTieredCompactionStrategyOptions.BASE_TIME_KEY, "30");
options.put(DateTieredCompactionStrategyOptions.TIMESTAMP_RESOLUTION_KEY, "MILLISECONDS");
options.put(DateTieredCompactionStrategyOptions.MAX_SSTABLE_AGE_KEY, Double.toString((1d / (24 * 60 * 60))));
options.put(DateTieredCompactionStrategyOptions.EXPIRED_SSTABLE_CHECK_FREQUENCY_SECONDS_KEY, "0");
DateTieredCompactionStrategy dtcs = new DateTieredCompactionStrategy(cfs, options);
for (SSTableReader sstable : cfs.getLiveSSTables())
dtcs.addSSTable(sstable);
dtcs.startup();
assertNull(dtcs.getNextBackgroundTask((int) (System.currentTimeMillis() / 1000)));
Thread.sleep(2000);
AbstractCompactionTask t = dtcs.getNextBackgroundTask((int) (System.currentTimeMillis()/1000));
assertNotNull(t);
assertEquals(1, Iterables.size(t.transaction.originals()));
SSTableReader sstable = t.transaction.originals().iterator().next();
assertEquals(sstable, expiredSSTable);
t.transaction.abort();
cfs.truncateBlocking();
}
@Test
public void testSTCSBigWindow()
{
Keyspace keyspace = Keyspace.open(KEYSPACE1);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF_STANDARD1);
cfs.disableAutoCompaction();
ByteBuffer bigValue = ByteBuffer.wrap(new byte[10000]);
ByteBuffer value = ByteBuffer.wrap(new byte[100]);
int numSSTables = 40;
// create big sstabels out of half:
long timestamp = System.currentTimeMillis();
for (int r = 0; r < numSSTables / 2; r++)
{
for (int i = 0; i < 10; i++)
{
DecoratedKey key = Util.dk(String.valueOf(r));
new RowUpdateBuilder(cfs.metadata(), timestamp, key.getKey())
.clustering("column")
.add("val", bigValue).build().applyUnsafe();
}
cfs.forceBlockingFlush();
}
// and small ones:
for (int r = 0; r < numSSTables / 2; r++)
{
DecoratedKey key = Util.dk(String.valueOf(r));
new RowUpdateBuilder(cfs.metadata(), timestamp, key.getKey())
.clustering("column")
.add("val", value).build().applyUnsafe();
cfs.forceBlockingFlush();
}
Map<String, String> options = new HashMap<>();
options.put(SizeTieredCompactionStrategyOptions.MIN_SSTABLE_SIZE_KEY, "1");
DateTieredCompactionStrategy dtcs = new DateTieredCompactionStrategy(cfs, options);
for (SSTableReader sstable : cfs.getSSTables(SSTableSet.CANONICAL))
dtcs.addSSTable(sstable);
AbstractCompactionTask task = dtcs.getNextBackgroundTask(0);
assertEquals(20, task.transaction.originals().size());
task.transaction.abort();
cfs.truncateBlocking();
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Preconditions;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Iterator;
import javax.annotation.Nullable;
/**
* ReplaceMessages replaces user-visible messages with alternatives.
* It uses Google specific JsMessageVisitor implementation.
*
* @author anatol@google.com (Anatol Pomazau)
*/
@GwtIncompatible("JsMessage")
final class ReplaceMessages extends JsMessageVisitor {
private final MessageBundle bundle;
private final boolean strictReplacement;
static final DiagnosticType BUNDLE_DOES_NOT_HAVE_THE_MESSAGE =
DiagnosticType.error("JSC_BUNDLE_DOES_NOT_HAVE_THE_MESSAGE",
"Message with id = {0} could not be found in replacement bundle");
ReplaceMessages(AbstractCompiler compiler, MessageBundle bundle,
boolean checkDuplicatedMessages, JsMessage.Style style,
boolean strictReplacement) {
super(compiler, checkDuplicatedMessages, style, bundle.idGenerator());
this.bundle = bundle;
this.strictReplacement = strictReplacement;
}
@Override
void processMessageFallback(
Node callNode, JsMessage message1, JsMessage message2) {
boolean isFirstMessageTranslated =
(bundle.getMessage(message1.getId()) != null);
boolean isSecondMessageTranslated =
(bundle.getMessage(message2.getId()) != null);
Node replacementNode =
isSecondMessageTranslated && !isFirstMessageTranslated ?
callNode.getChildAtIndex(2) : callNode.getSecondChild();
callNode.getParent().replaceChild(callNode,
replacementNode.detachFromParent());
}
@Override
protected void processJsMessage(JsMessage message,
JsMessageDefinition definition) {
// Get the replacement.
JsMessage replacement = bundle.getMessage(message.getId());
if (replacement == null) {
if (strictReplacement) {
compiler.report(JSError.make(
definition.getMessageNode(), BUNDLE_DOES_NOT_HAVE_THE_MESSAGE,
message.getId()));
// Fallback to the default message
return;
} else {
// In case if it is not a strict replacement we could leave original
// message.
replacement = message;
}
}
// Replace the message.
Node newValue;
Node msgNode = definition.getMessageNode();
try {
newValue = getNewValueNode(replacement, msgNode);
} catch (MalformedException e) {
compiler.report(JSError.make(
e.getNode(), MESSAGE_TREE_MALFORMED, e.getMessage()));
newValue = msgNode;
}
if (newValue != msgNode) {
newValue.useSourceInfoIfMissingFromForTree(msgNode);
msgNode.getParent().replaceChild(msgNode, newValue);
compiler.reportCodeChange();
}
}
/**
* Constructs a node representing a message's value, or, if possible, just
* modifies {@code origValueNode} so that it accurately represents the
* message's value.
*
* @param message a message
* @param origValueNode the message's original value node
* @return a Node that can replace {@code origValueNode}
*
* @throws MalformedException if the passed node's subtree structure is
* not as expected
*/
private Node getNewValueNode(JsMessage message, Node origValueNode)
throws MalformedException {
switch (origValueNode.getType()) {
case FUNCTION:
// The message is a function. Modify the function node.
updateFunctionNode(message, origValueNode);
return origValueNode;
case STRING:
// The message is a simple string. Modify the string node.
String newString = message.toString();
if (!origValueNode.getString().equals(newString)) {
origValueNode.setString(newString);
compiler.reportCodeChange();
}
return origValueNode;
case ADD:
// The message is a simple string. Create a string node.
return IR.string(message.toString());
case CALL:
// The message is a function call. Replace it with a string expression.
return replaceCallNode(message, origValueNode);
default:
throw new MalformedException(
"Expected FUNCTION, STRING, or ADD node; found: " +
origValueNode.getType(), origValueNode);
}
}
/**
* Updates the descendants of a FUNCTION node to represent a message's value.
* <p>
* The tree looks something like:
* <pre>
* function
* |-- name
* |-- lp
* | |-- name <arg1>
* | -- name <arg2>
* -- block
* |
* --return
* |
* --add
* |-- string foo
* -- name <arg1>
* </pre>
*
* @param message a message
* @param functionNode the message's original FUNCTION value node
*
* @throws MalformedException if the passed node's subtree structure is
* not as expected
*/
private void updateFunctionNode(JsMessage message, Node functionNode)
throws MalformedException {
checkNode(functionNode, Token.FUNCTION);
Node nameNode = functionNode.getFirstChild();
checkNode(nameNode, Token.NAME);
Node argListNode = nameNode.getNext();
checkNode(argListNode, Token.PARAM_LIST);
Node oldBlockNode = argListNode.getNext();
checkNode(oldBlockNode, Token.BLOCK);
Iterator<CharSequence> iterator = message.parts().iterator();
Node valueNode = iterator.hasNext()
? constructAddOrStringNode(iterator, argListNode)
: IR.string("");
Node newBlockNode = IR.block(IR.returnNode(valueNode));
// TODO(user): checkTreeEqual is overkill. I am in process of rewriting
// these functions.
if (newBlockNode.checkTreeEquals(oldBlockNode) != null) {
newBlockNode.useSourceInfoIfMissingFromForTree(oldBlockNode);
functionNode.replaceChild(oldBlockNode, newBlockNode);
compiler.reportCodeChange();
}
}
/**
* Creates a parse tree corresponding to the remaining message parts in
* an iteration. The result will contain only STRING nodes, NAME nodes
* (corresponding to placeholder references), and/or ADD nodes used to
* combine the other two types.
*
* @param partsIterator an iterator over message parts
* @param argListNode an LP node whose children are valid placeholder names
* @return the root of the constructed parse tree
*
* @throws MalformedException if {@code partsIterator} contains a
* placeholder reference that does not correspond to a valid argument in
* the arg list
*/
private static Node constructAddOrStringNode(Iterator<CharSequence> partsIterator,
Node argListNode)
throws MalformedException {
CharSequence part = partsIterator.next();
Node partNode = null;
if (part instanceof JsMessage.PlaceholderReference) {
JsMessage.PlaceholderReference phRef =
(JsMessage.PlaceholderReference) part;
for (Node node : argListNode.children()) {
if (node.isName()) {
String arg = node.getString();
// We ignore the case here because the transconsole only supports
// uppercase placeholder names, but function arguments in JavaScript
// code can have mixed case.
if (arg.equalsIgnoreCase(phRef.getName())) {
partNode = IR.name(arg);
}
}
}
if (partNode == null) {
throw new MalformedException(
"Unrecognized message placeholder referenced: " + phRef.getName(),
argListNode);
}
} else {
// The part is just a string literal.
partNode = IR.string(part.toString());
}
if (partsIterator.hasNext()) {
return IR.add(partNode,
constructAddOrStringNode(partsIterator, argListNode));
} else {
return partNode;
}
}
/**
* Replaces a CALL node with an inlined message value.
* <p>
* The call tree looks something like:
* <pre>
* call
* |-- getprop
* | |-- name 'goog'
* | +-- string 'getMsg'
* |
* |-- string 'Hi {$userName}! Welcome to {$product}.'
* +-- objlit
* |-- string 'userName'
* |-- name 'someUserName'
* |-- string 'product'
* +-- call
* +-- name 'getProductName'
* <pre>
* <p>
* For that example, we'd return:
* <pre>
* add
* |-- string 'Hi '
* +-- add
* |-- name someUserName
* +-- add
* |-- string '! Welcome to '
* +-- add
* |-- call
* | +-- name 'getProductName'
* +-- string '.'
* </pre>
* @param message a message
* @param callNode the message's original CALL value node
* @return a STRING node, or an ADD node that does string concatenation, if
* the message has one or more placeholders
*
* @throws MalformedException if the passed node's subtree structure is
* not as expected
*/
private Node replaceCallNode(JsMessage message, Node callNode)
throws MalformedException {
checkNode(callNode, Token.CALL);
Node getPropNode = callNode.getFirstChild();
checkNode(getPropNode, Token.GETPROP);
Node stringExprNode = getPropNode.getNext();
checkStringExprNode(stringExprNode);
Node objLitNode = stringExprNode.getNext();
// Build the replacement tree.
return constructStringExprNode(
message.parts().iterator(), objLitNode, callNode);
}
/**
* Creates a parse tree corresponding to the remaining message parts in an
* iteration. The result consists of one or more STRING nodes, placeholder
* replacement value nodes (which can be arbitrary expressions), and ADD
* nodes.
*
* @param parts an iterator over message parts
* @param objLitNode an OBJLIT node mapping placeholder names to values
* @return the root of the constructed parse tree
*
* @throws MalformedException if {@code parts} contains a placeholder
* reference that does not correspond to a valid placeholder name
*/
private static Node constructStringExprNode(
Iterator<CharSequence> parts, Node objLitNode, Node refNode) throws MalformedException {
Preconditions.checkNotNull(refNode);
CharSequence part = parts.next();
Node partNode = null;
if (part instanceof JsMessage.PlaceholderReference) {
JsMessage.PlaceholderReference phRef =
(JsMessage.PlaceholderReference) part;
// The translated message is null
if (objLitNode == null) {
throw new MalformedException("Empty placeholder value map " +
"for a translated message with placeholders.", refNode);
}
for (Node key = objLitNode.getFirstChild(); key != null;
key = key.getNext()) {
if (key.getString().equals(phRef.getName())) {
Node valueNode = key.getFirstChild();
partNode = valueNode.cloneTree();
}
}
if (partNode == null) {
throw new MalformedException(
"Unrecognized message placeholder referenced: " + phRef.getName(),
objLitNode);
}
} else {
// The part is just a string literal.
partNode = IR.string(part.toString());
}
if (parts.hasNext()) {
return IR.add(partNode,
constructStringExprNode(parts, objLitNode, refNode));
} else {
return partNode;
}
}
/**
* Checks that a node is a valid string expression (either a string literal
* or a concatenation of string literals).
*
* @throws IllegalArgumentException if the node is null or the wrong type
*/
private static void checkStringExprNode(@Nullable Node node) {
if (node == null) {
throw new IllegalArgumentException("Expected a string; found: null");
}
switch (node.getType()) {
case STRING:
break;
case ADD:
Node c = node.getFirstChild();
checkStringExprNode(c);
checkStringExprNode(c.getNext());
break;
default:
throw new IllegalArgumentException(
"Expected a string; found: " + node.getType());
}
}
}
| |
/*
* Copyright (c) 1995, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.awt;
import java.awt.peer.TextFieldPeer;
import java.awt.event.*;
import java.util.EventListener;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
import java.io.IOException;
import javax.accessibility.*;
/**
* A <code>TextField</code> object is a text component
* that allows for the editing of a single line of text.
* <p>
* For example, the following image depicts a frame with four
* text fields of varying widths. Two of these text fields
* display the predefined text <code>"Hello"</code>.
* <p>
* <img src="doc-files/TextField-1.gif" alt="The preceding text describes this image."
* style="float:center; margin: 7px 10px;">
* <p>
* Here is the code that produces these four text fields:
*
* <hr><blockquote><pre>
* TextField tf1, tf2, tf3, tf4;
* // a blank text field
* tf1 = new TextField();
* // blank field of 20 columns
* tf2 = new TextField("", 20);
* // predefined text displayed
* tf3 = new TextField("Hello!");
* // predefined text in 30 columns
* tf4 = new TextField("Hello", 30);
* </pre></blockquote><hr>
* <p>
* Every time the user types a key in the text field, one or
* more key events are sent to the text field. A <code>KeyEvent</code>
* may be one of three types: keyPressed, keyReleased, or keyTyped.
* The properties of a key event indicate which of these types
* it is, as well as additional information about the event,
* such as what modifiers are applied to the key event and the
* time at which the event occurred.
* <p>
* The key event is passed to every <code>KeyListener</code>
* or <code>KeyAdapter</code> object which registered to receive such
* events using the component's <code>addKeyListener</code> method.
* (<code>KeyAdapter</code> objects implement the
* <code>KeyListener</code> interface.)
* <p>
* It is also possible to fire an <code>ActionEvent</code>.
* If action events are enabled for the text field, they may
* be fired by pressing the <code>Return</code> key.
* <p>
* The <code>TextField</code> class's <code>processEvent</code>
* method examines the action event and passes it along to
* <code>processActionEvent</code>. The latter method redirects the
* event to any <code>ActionListener</code> objects that have
* registered to receive action events generated by this
* text field.
*
* @author Sami Shaio
* @see java.awt.event.KeyEvent
* @see java.awt.event.KeyAdapter
* @see java.awt.event.KeyListener
* @see java.awt.event.ActionEvent
* @see java.awt.Component#addKeyListener
* @see java.awt.TextField#processEvent
* @see java.awt.TextField#processActionEvent
* @see java.awt.TextField#addActionListener
* @since JDK1.0
*/
public class TextField extends TextComponent {
/**
* The number of columns in the text field.
* A column is an approximate average character
* width that is platform-dependent.
* Guaranteed to be non-negative.
*
* @serial
* @see #setColumns(int)
* @see #getColumns()
*/
int columns;
/**
* The echo character, which is used when
* the user wishes to disguise the characters
* typed into the text field.
* The disguises are removed if echoChar = <code>0</code>.
*
* @serial
* @see #getEchoChar()
* @see #setEchoChar(char)
* @see #echoCharIsSet()
*/
char echoChar;
transient ActionListener actionListener;
private static final String base = "textfield";
private static int nameCounter = 0;
/*
* JDK 1.1 serialVersionUID
*/
private static final long serialVersionUID = -2966288784432217853L;
/**
* Initialize JNI field and method ids
*/
private static native void initIDs();
static {
/* ensure that the necessary native libraries are loaded */
Toolkit.loadLibraries();
if (!GraphicsEnvironment.isHeadless()) {
initIDs();
}
}
/**
* Constructs a new text field.
* @exception HeadlessException if GraphicsEnvironment.isHeadless()
* returns true.
* @see java.awt.GraphicsEnvironment#isHeadless
*/
public TextField() throws HeadlessException {
this("", 0);
}
/**
* Constructs a new text field initialized with the specified text.
* @param text the text to be displayed. If
* <code>text</code> is <code>null</code>, the empty
* string <code>""</code> will be displayed.
* @exception HeadlessException if GraphicsEnvironment.isHeadless()
* returns true.
* @see java.awt.GraphicsEnvironment#isHeadless
*/
public TextField(String text) throws HeadlessException {
this(text, (text != null) ? text.length() : 0);
}
/**
* Constructs a new empty text field with the specified number
* of columns. A column is an approximate average character
* width that is platform-dependent.
* @param columns the number of columns. If
* <code>columns</code> is less than <code>0</code>,
* <code>columns</code> is set to <code>0</code>.
* @exception HeadlessException if GraphicsEnvironment.isHeadless()
* returns true.
* @see java.awt.GraphicsEnvironment#isHeadless
*/
public TextField(int columns) throws HeadlessException {
this("", columns);
}
/**
* Constructs a new text field initialized with the specified text
* to be displayed, and wide enough to hold the specified
* number of columns. A column is an approximate average character
* width that is platform-dependent.
* @param text the text to be displayed. If
* <code>text</code> is <code>null</code>, the empty
* string <code>""</code> will be displayed.
* @param columns the number of columns. If
* <code>columns</code> is less than <code>0</code>,
* <code>columns</code> is set to <code>0</code>.
* @exception HeadlessException if GraphicsEnvironment.isHeadless()
* returns true.
* @see java.awt.GraphicsEnvironment#isHeadless
*/
public TextField(String text, int columns) throws HeadlessException {
super(text);
this.columns = (columns >= 0) ? columns : 0;
}
/**
* Construct a name for this component. Called by getName() when the
* name is null.
*/
String constructComponentName() {
synchronized (TextField.class) {
return base + nameCounter++;
}
}
/**
* Creates the TextField's peer. The peer allows us to modify the
* appearance of the TextField without changing its functionality.
*/
public void addNotify() {
synchronized (getTreeLock()) {
if (peer == null)
peer = getToolkit().createTextField(this);
super.addNotify();
}
}
/**
* Gets the character that is to be used for echoing.
* <p>
* An echo character is useful for text fields where
* user input should not be echoed to the screen, as in
* the case of a text field for entering a password.
* If <code>echoChar</code> = <code>0</code>, user
* input is echoed to the screen unchanged.
* <p>
* A Java platform implementation may support only a limited,
* non-empty set of echo characters. This function returns the
* echo character originally requested via setEchoChar(). The echo
* character actually used by the TextField implementation might be
* different.
* @return the echo character for this text field.
* @see java.awt.TextField#echoCharIsSet
* @see java.awt.TextField#setEchoChar
*/
public char getEchoChar() {
return echoChar;
}
/**
* Sets the echo character for this text field.
* <p>
* An echo character is useful for text fields where
* user input should not be echoed to the screen, as in
* the case of a text field for entering a password.
* Setting <code>echoChar</code> = <code>0</code> allows
* user input to be echoed to the screen again.
* <p>
* A Java platform implementation may support only a limited,
* non-empty set of echo characters. Attempts to set an
* unsupported echo character will cause the default echo
* character to be used instead. Subsequent calls to getEchoChar()
* will return the echo character originally requested. This might
* or might not be identical to the echo character actually
* used by the TextField implementation.
* @param c the echo character for this text field.
* @see java.awt.TextField#echoCharIsSet
* @see java.awt.TextField#getEchoChar
* @since JDK1.1
*/
public void setEchoChar(char c) {
setEchoCharacter(c);
}
/**
* @deprecated As of JDK version 1.1,
* replaced by <code>setEchoChar(char)</code>.
*/
@Deprecated
public synchronized void setEchoCharacter(char c) {
if (echoChar != c) {
echoChar = c;
TextFieldPeer peer = (TextFieldPeer)this.peer;
if (peer != null) {
peer.setEchoChar(c);
}
}
}
/**
* Sets the text that is presented by this
* text component to be the specified text.
* @param t the new text.
* @see java.awt.TextComponent#getText
*/
public void setText(String t) {
super.setText(t);
// This could change the preferred size of the Component.
invalidateIfValid();
}
/**
* Indicates whether or not this text field has a
* character set for echoing.
* <p>
* An echo character is useful for text fields where
* user input should not be echoed to the screen, as in
* the case of a text field for entering a password.
* @return <code>true</code> if this text field has
* a character set for echoing;
* <code>false</code> otherwise.
* @see java.awt.TextField#setEchoChar
* @see java.awt.TextField#getEchoChar
*/
public boolean echoCharIsSet() {
return echoChar != 0;
}
/**
* Gets the number of columns in this text field. A column is an
* approximate average character width that is platform-dependent.
* @return the number of columns.
* @see java.awt.TextField#setColumns
* @since JDK1.1
*/
public int getColumns() {
return columns;
}
/**
* Sets the number of columns in this text field. A column is an
* approximate average character width that is platform-dependent.
* @param columns the number of columns.
* @see java.awt.TextField#getColumns
* @exception IllegalArgumentException if the value
* supplied for <code>columns</code>
* is less than <code>0</code>.
* @since JDK1.1
*/
public void setColumns(int columns) {
int oldVal;
synchronized (this) {
oldVal = this.columns;
if (columns < 0) {
throw new IllegalArgumentException("columns less than zero.");
}
if (columns != oldVal) {
this.columns = columns;
}
}
if (columns != oldVal) {
invalidate();
}
}
/**
* Gets the preferred size of this text field
* with the specified number of columns.
* @param columns the number of columns
* in this text field.
* @return the preferred dimensions for
* displaying this text field.
* @since JDK1.1
*/
public Dimension getPreferredSize(int columns) {
return preferredSize(columns);
}
/**
* @deprecated As of JDK version 1.1,
* replaced by <code>getPreferredSize(int)</code>.
*/
@Deprecated
public Dimension preferredSize(int columns) {
synchronized (getTreeLock()) {
TextFieldPeer peer = (TextFieldPeer)this.peer;
return (peer != null) ?
peer.getPreferredSize(columns) :
super.preferredSize();
}
}
/**
* Gets the preferred size of this text field.
* @return the preferred dimensions for
* displaying this text field.
* @since JDK1.1
*/
public Dimension getPreferredSize() {
return preferredSize();
}
/**
* @deprecated As of JDK version 1.1,
* replaced by <code>getPreferredSize()</code>.
*/
@Deprecated
public Dimension preferredSize() {
synchronized (getTreeLock()) {
return (columns > 0) ?
preferredSize(columns) :
super.preferredSize();
}
}
/**
* Gets the minimum dimensions for a text field with
* the specified number of columns.
* @param columns the number of columns in
* this text field.
* @since JDK1.1
*/
public Dimension getMinimumSize(int columns) {
return minimumSize(columns);
}
/**
* @deprecated As of JDK version 1.1,
* replaced by <code>getMinimumSize(int)</code>.
*/
@Deprecated
public Dimension minimumSize(int columns) {
synchronized (getTreeLock()) {
TextFieldPeer peer = (TextFieldPeer)this.peer;
return (peer != null) ?
peer.getMinimumSize(columns) :
super.minimumSize();
}
}
/**
* Gets the minimum dimensions for this text field.
* @return the minimum dimensions for
* displaying this text field.
* @since JDK1.1
*/
public Dimension getMinimumSize() {
return minimumSize();
}
/**
* @deprecated As of JDK version 1.1,
* replaced by <code>getMinimumSize()</code>.
*/
@Deprecated
public Dimension minimumSize() {
synchronized (getTreeLock()) {
return (columns > 0) ?
minimumSize(columns) :
super.minimumSize();
}
}
/**
* Adds the specified action listener to receive
* action events from this text field.
* If l is null, no exception is thrown and no action is performed.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param l the action listener.
* @see #removeActionListener
* @see #getActionListeners
* @see java.awt.event.ActionListener
* @since JDK1.1
*/
public synchronized void addActionListener(ActionListener l) {
if (l == null) {
return;
}
actionListener = AWTEventMulticaster.add(actionListener, l);
newEventsOnly = true;
}
/**
* Removes the specified action listener so that it no longer
* receives action events from this text field.
* If l is null, no exception is thrown and no action is performed.
* <p>Refer to <a href="doc-files/AWTThreadIssues.html#ListenersThreads"
* >AWT Threading Issues</a> for details on AWT's threading model.
*
* @param l the action listener.
* @see #addActionListener
* @see #getActionListeners
* @see java.awt.event.ActionListener
* @since JDK1.1
*/
public synchronized void removeActionListener(ActionListener l) {
if (l == null) {
return;
}
actionListener = AWTEventMulticaster.remove(actionListener, l);
}
/**
* Returns an array of all the action listeners
* registered on this textfield.
*
* @return all of this textfield's <code>ActionListener</code>s
* or an empty array if no action
* listeners are currently registered
*
* @see #addActionListener
* @see #removeActionListener
* @see java.awt.event.ActionListener
* @since 1.4
*/
public synchronized ActionListener[] getActionListeners() {
return getListeners(ActionListener.class);
}
/**
* Returns an array of all the objects currently registered
* as <code><em>Foo</em>Listener</code>s
* upon this <code>TextField</code>.
* <code><em>Foo</em>Listener</code>s are registered using the
* <code>add<em>Foo</em>Listener</code> method.
*
* <p>
* You can specify the <code>listenerType</code> argument
* with a class literal, such as
* <code><em>Foo</em>Listener.class</code>.
* For example, you can query a
* <code>TextField</code> <code>t</code>
* for its action listeners with the following code:
*
* <pre>ActionListener[] als = (ActionListener[])(t.getListeners(ActionListener.class));</pre>
*
* If no such listeners exist, this method returns an empty array.
*
* @param listenerType the type of listeners requested; this parameter
* should specify an interface that descends from
* <code>java.util.EventListener</code>
* @return an array of all objects registered as
* <code><em>Foo</em>Listener</code>s on this textfield,
* or an empty array if no such
* listeners have been added
* @exception ClassCastException if <code>listenerType</code>
* doesn't specify a class or interface that implements
* <code>java.util.EventListener</code>
*
* @see #getActionListeners
* @since 1.3
*/
public <T extends EventListener> T[] getListeners(Class<T> listenerType) {
EventListener l = null;
if (listenerType == ActionListener.class) {
l = actionListener;
} else {
return super.getListeners(listenerType);
}
return AWTEventMulticaster.getListeners(l, listenerType);
}
// REMIND: remove when filtering is done at lower level
boolean eventEnabled(AWTEvent e) {
if (e.id == ActionEvent.ACTION_PERFORMED) {
if ((eventMask & AWTEvent.ACTION_EVENT_MASK) != 0 ||
actionListener != null) {
return true;
}
return false;
}
return super.eventEnabled(e);
}
/**
* Processes events on this text field. If the event
* is an instance of <code>ActionEvent</code>,
* it invokes the <code>processActionEvent</code>
* method. Otherwise, it invokes <code>processEvent</code>
* on the superclass.
* <p>Note that if the event parameter is <code>null</code>
* the behavior is unspecified and may result in an
* exception.
*
* @param e the event
* @see java.awt.event.ActionEvent
* @see java.awt.TextField#processActionEvent
* @since JDK1.1
*/
protected void processEvent(AWTEvent e) {
if (e instanceof ActionEvent) {
processActionEvent((ActionEvent)e);
return;
}
super.processEvent(e);
}
/**
* Processes action events occurring on this text field by
* dispatching them to any registered
* <code>ActionListener</code> objects.
* <p>
* This method is not called unless action events are
* enabled for this component. Action events are enabled
* when one of the following occurs:
* <ul>
* <li>An <code>ActionListener</code> object is registered
* via <code>addActionListener</code>.
* <li>Action events are enabled via <code>enableEvents</code>.
* </ul>
* <p>Note that if the event parameter is <code>null</code>
* the behavior is unspecified and may result in an
* exception.
*
* @param e the action event
* @see java.awt.event.ActionListener
* @see java.awt.TextField#addActionListener
* @see java.awt.Component#enableEvents
* @since JDK1.1
*/
protected void processActionEvent(ActionEvent e) {
ActionListener listener = actionListener;
if (listener != null) {
listener.actionPerformed(e);
}
}
/**
* Returns a string representing the state of this <code>TextField</code>.
* This method is intended to be used only for debugging purposes, and the
* content and format of the returned string may vary between
* implementations. The returned string may be empty but may not be
* <code>null</code>.
*
* @return the parameter string of this text field
*/
protected String paramString() {
String str = super.paramString();
if (echoChar != 0) {
str += ",echo=" + echoChar;
}
return str;
}
/*
* Serialization support.
*/
/**
* The textField Serialized Data Version.
*
* @serial
*/
private int textFieldSerializedDataVersion = 1;
/**
* Writes default serializable fields to stream. Writes
* a list of serializable ActionListener(s) as optional data.
* The non-serializable ActionListener(s) are detected and
* no attempt is made to serialize them.
*
* @serialData Null terminated sequence of zero or more pairs.
* A pair consists of a String and Object.
* The String indicates the type of object and
* is one of the following :
* ActionListenerK indicating and ActionListener object.
*
* @see AWTEventMulticaster#save(ObjectOutputStream, String, EventListener)
* @see java.awt.Component#actionListenerK
*/
private void writeObject(ObjectOutputStream s)
throws IOException
{
s.defaultWriteObject();
AWTEventMulticaster.save(s, actionListenerK, actionListener);
s.writeObject(null);
}
/**
* Read the ObjectInputStream and if it isn't null,
* add a listener to receive action events fired by the
* TextField. Unrecognized keys or values will be
* ignored.
*
* @exception HeadlessException if
* <code>GraphicsEnvironment.isHeadless()</code> returns
* <code>true</code>
* @see #removeActionListener(ActionListener)
* @see #addActionListener(ActionListener)
* @see java.awt.GraphicsEnvironment#isHeadless
*/
private void readObject(ObjectInputStream s)
throws ClassNotFoundException, IOException, HeadlessException
{
// HeadlessException will be thrown by TextComponent's readObject
s.defaultReadObject();
// Make sure the state we just read in for columns has legal values
if (columns < 0) {
columns = 0;
}
// Read in listeners, if any
Object keyOrNull;
while(null != (keyOrNull = s.readObject())) {
String key = ((String)keyOrNull).intern();
if (actionListenerK == key) {
addActionListener((ActionListener)(s.readObject()));
} else {
// skip value for unrecognized key
s.readObject();
}
}
}
/////////////////
// Accessibility support
////////////////
/**
* Gets the AccessibleContext associated with this TextField.
* For text fields, the AccessibleContext takes the form of an
* AccessibleAWTTextField.
* A new AccessibleAWTTextField instance is created if necessary.
*
* @return an AccessibleAWTTextField that serves as the
* AccessibleContext of this TextField
* @since 1.3
*/
public AccessibleContext getAccessibleContext() {
if (accessibleContext == null) {
accessibleContext = new AccessibleAWTTextField();
}
return accessibleContext;
}
/**
* This class implements accessibility support for the
* <code>TextField</code> class. It provides an implementation of the
* Java Accessibility API appropriate to text field user-interface elements.
* @since 1.3
*/
protected class AccessibleAWTTextField extends AccessibleAWTTextComponent
{
/*
* JDK 1.3 serialVersionUID
*/
private static final long serialVersionUID = 6219164359235943158L;
/**
* Gets the state set of this object.
*
* @return an instance of AccessibleStateSet describing the states
* of the object
* @see AccessibleState
*/
public AccessibleStateSet getAccessibleStateSet() {
AccessibleStateSet states = super.getAccessibleStateSet();
states.add(AccessibleState.SINGLE_LINE);
return states;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.ha;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadLocalRandom;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.AppendTestUtil;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyDefault;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.hdfs.server.protocol.BlockReportContext;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.GenericTestUtils.DelayAnswer;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
public class TestDNFencing {
protected static final Log LOG = LogFactory.getLog(TestDNFencing.class);
private static final String TEST_FILE = "/testStandbyIsHot";
private static final Path TEST_FILE_PATH = new Path(TEST_FILE);
private static final int SMALL_BLOCK = 1024;
private Configuration conf;
private MiniDFSCluster cluster;
private NameNode nn1, nn2;
private FileSystem fs;
static {
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
}
@Before
public void setupCluster() throws Exception {
conf = new Configuration();
conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, SMALL_BLOCK);
// Bump up replication interval so that we only run replication
// checks explicitly.
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 600);
// Increase max streams so that we re-replicate quickly.
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 1000);
// See RandomDeleterPolicy javadoc.
conf.setClass(DFSConfigKeys.DFS_BLOCK_REPLICATOR_CLASSNAME_KEY,
RandomDeleterPolicy.class, BlockPlacementPolicy.class);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(3)
.build();
nn1 = cluster.getNameNode(0);
nn2 = cluster.getNameNode(1);
cluster.waitActive();
cluster.transitionToActive(0);
// Trigger block reports so that the first NN trusts all
// of the DNs, and will issue deletions
cluster.triggerBlockReports();
fs = HATestUtil.configureFailoverFs(cluster, conf);
}
@After
public void shutdownCluster() throws Exception {
if (cluster != null) {
banner("Shutting down cluster. NN1 metadata:");
doMetasave(nn1);
banner("Shutting down cluster. NN2 metadata:");
doMetasave(nn2);
cluster.shutdown();
cluster = null;
}
}
@Test
public void testDnFencing() throws Exception {
// Create a file with replication level 3.
DFSTestUtil.createFile(fs, TEST_FILE_PATH, 30*SMALL_BLOCK, (short)3, 1L);
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, TEST_FILE_PATH);
// Drop its replication count to 1, so it becomes over-replicated.
// Then compute the invalidation of the extra blocks and trigger
// heartbeats so the invalidations are flushed to the DNs.
nn1.getRpcServer().setReplication(TEST_FILE, (short) 1);
BlockManagerTestUtil.computeInvalidationWork(
nn1.getNamesystem().getBlockManager());
cluster.triggerHeartbeats();
// Transition nn2 to active even though nn1 still thinks it's active.
banner("Failing to NN2 but let NN1 continue to think it's active");
NameNodeAdapter.abortEditLogs(nn1);
NameNodeAdapter.enterSafeMode(nn1, false);
cluster.transitionToActive(1);
// Check that the standby picked up the replication change.
assertEquals(1,
nn2.getRpcServer().getFileInfo(TEST_FILE).getReplication());
// Dump some info for debugging purposes.
banner("NN2 Metadata immediately after failover");
doMetasave(nn2);
banner("Triggering heartbeats and block reports so that fencing is completed");
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
banner("Metadata after nodes have all block-reported");
doMetasave(nn2);
// Force a rescan of postponedMisreplicatedBlocks.
BlockManager nn2BM = nn2.getNamesystem().getBlockManager();
BlockManagerTestUtil.checkHeartbeat(nn2BM);
BlockManagerTestUtil.rescanPostponedMisreplicatedBlocks(nn2BM);
// The blocks should no longer be postponed.
assertEquals(0, nn2.getNamesystem().getPostponedMisreplicatedBlocks());
// Wait for NN2 to enact its deletions (replication monitor has to run, etc)
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertEquals(0, nn2.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0, nn2.getNamesystem().getPendingReplicationBlocks());
banner("Making sure the file is still readable");
FileSystem fs2 = cluster.getFileSystem(1);
DFSTestUtil.readFile(fs2, TEST_FILE_PATH);
banner("Waiting for the actual block files to get deleted from DNs.");
waitForTrueReplication(cluster, block, 1);
}
/**
* Test case which restarts the standby node in such a way that,
* when it exits safemode, it will want to invalidate a bunch
* of over-replicated block replicas. Ensures that if we failover
* at this point it won't lose data.
*/
@Test
public void testNNClearsCommandsOnFailoverAfterStartup()
throws Exception {
// Make lots of blocks to increase chances of triggering a bug.
DFSTestUtil.createFile(fs, TEST_FILE_PATH, 30*SMALL_BLOCK, (short)3, 1L);
banner("Shutting down NN2");
cluster.shutdownNameNode(1);
banner("Setting replication to 1, rolling edit log.");
nn1.getRpcServer().setReplication(TEST_FILE, (short) 1);
nn1.getRpcServer().rollEditLog();
// Start NN2 again. When it starts up, it will see all of the
// blocks as over-replicated, since it has the metadata for
// replication=1, but the DNs haven't yet processed the deletions.
banner("Starting NN2 again.");
cluster.restartNameNode(1);
nn2 = cluster.getNameNode(1);
banner("triggering BRs");
cluster.triggerBlockReports();
// We expect that both NN1 and NN2 will have some number of
// deletions queued up for the DNs.
banner("computing invalidation on nn1");
BlockManagerTestUtil.computeInvalidationWork(
nn1.getNamesystem().getBlockManager());
banner("computing invalidation on nn2");
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
// Dump some info for debugging purposes.
banner("Metadata immediately before failover");
doMetasave(nn2);
// Transition nn2 to active even though nn1 still thinks it's active
banner("Failing to NN2 but let NN1 continue to think it's active");
NameNodeAdapter.abortEditLogs(nn1);
NameNodeAdapter.enterSafeMode(nn1, false);
cluster.transitionToActive(1);
// Check that the standby picked up the replication change.
assertEquals(1,
nn2.getRpcServer().getFileInfo(TEST_FILE).getReplication());
// Dump some info for debugging purposes.
banner("Metadata immediately after failover");
doMetasave(nn2);
banner("Triggering heartbeats and block reports so that fencing is completed");
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
banner("Metadata after nodes have all block-reported");
doMetasave(nn2);
// Force a rescan of postponedMisreplicatedBlocks.
BlockManager nn2BM = nn2.getNamesystem().getBlockManager();
BlockManagerTestUtil.checkHeartbeat(nn2BM);
BlockManagerTestUtil.rescanPostponedMisreplicatedBlocks(nn2BM);
// The block should no longer be postponed.
assertEquals(0, nn2.getNamesystem().getPostponedMisreplicatedBlocks());
// Wait for NN2 to enact its deletions (replication monitor has to run, etc)
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
HATestUtil.waitForNNToIssueDeletions(nn2);
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertEquals(0, nn2.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0, nn2.getNamesystem().getPendingReplicationBlocks());
banner("Making sure the file is still readable");
FileSystem fs2 = cluster.getFileSystem(1);
DFSTestUtil.readFile(fs2, TEST_FILE_PATH);
}
/**
* Test case that reduces replication of a file with a lot of blocks
* and then fails over right after those blocks enter the DN invalidation
* queues on the active. Ensures that fencing is correct and no replicas
* are lost.
*/
@Test
public void testNNClearsCommandsOnFailoverWithReplChanges()
throws Exception {
// Make lots of blocks to increase chances of triggering a bug.
DFSTestUtil.createFile(fs, TEST_FILE_PATH, 30*SMALL_BLOCK, (short)1, 1L);
banner("rolling NN1's edit log, forcing catch-up");
HATestUtil.waitForStandbyToCatchUp(nn1, nn2);
// Get some new replicas reported so that NN2 now considers
// them over-replicated and schedules some more deletions
nn1.getRpcServer().setReplication(TEST_FILE, (short) 2);
while (BlockManagerTestUtil.getComputedDatanodeWork(
nn1.getNamesystem().getBlockManager()) > 0) {
LOG.info("Getting more replication work computed");
}
BlockManager bm1 = nn1.getNamesystem().getBlockManager();
while (bm1.getPendingReplicationBlocksCount() > 0) {
BlockManagerTestUtil.updateState(bm1);
cluster.triggerHeartbeats();
Thread.sleep(1000);
}
banner("triggering BRs");
cluster.triggerBlockReports();
nn1.getRpcServer().setReplication(TEST_FILE, (short) 1);
banner("computing invalidation on nn1");
BlockManagerTestUtil.computeInvalidationWork(
nn1.getNamesystem().getBlockManager());
doMetasave(nn1);
banner("computing invalidation on nn2");
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
doMetasave(nn2);
// Dump some info for debugging purposes.
banner("Metadata immediately before failover");
doMetasave(nn2);
// Transition nn2 to active even though nn1 still thinks it's active
banner("Failing to NN2 but let NN1 continue to think it's active");
NameNodeAdapter.abortEditLogs(nn1);
NameNodeAdapter.enterSafeMode(nn1, false);
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
cluster.transitionToActive(1);
// Check that the standby picked up the replication change.
assertEquals(1,
nn2.getRpcServer().getFileInfo(TEST_FILE).getReplication());
// Dump some info for debugging purposes.
banner("Metadata immediately after failover");
doMetasave(nn2);
banner("Triggering heartbeats and block reports so that fencing is completed");
cluster.triggerHeartbeats();
cluster.triggerBlockReports();
banner("Metadata after nodes have all block-reported");
doMetasave(nn2);
// Force a rescan of postponedMisreplicatedBlocks.
BlockManager nn2BM = nn2.getNamesystem().getBlockManager();
BlockManagerTestUtil.checkHeartbeat(nn2BM);
BlockManagerTestUtil.rescanPostponedMisreplicatedBlocks(nn2BM);
// The block should no longer be postponed.
assertEquals(0, nn2.getNamesystem().getPostponedMisreplicatedBlocks());
// Wait for NN2 to enact its deletions (replication monitor has to run, etc)
BlockManagerTestUtil.computeInvalidationWork(
nn2.getNamesystem().getBlockManager());
HATestUtil.waitForNNToIssueDeletions(nn2);
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertEquals(0, nn2.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0, nn2.getNamesystem().getPendingReplicationBlocks());
banner("Making sure the file is still readable");
FileSystem fs2 = cluster.getFileSystem(1);
DFSTestUtil.readFile(fs2, TEST_FILE_PATH);
}
/**
* Regression test for HDFS-2742. The issue in this bug was:
* - DN does a block report while file is open. This BR contains
* the block in RBW state.
* - Standby queues the RBW state in PendingDatanodeMessages
* - Standby processes edit logs during failover. Before fixing
* this bug, it was mistakenly applying the RBW reported state
* after the block had been completed, causing the block to get
* marked corrupt. Instead, we should now be applying the RBW
* message on OP_ADD, and then the FINALIZED message on OP_CLOSE.
*/
@Test
public void testBlockReportsWhileFileBeingWritten() throws Exception {
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
AppendTestUtil.write(out, 0, 10);
out.hflush();
// Block report will include the RBW replica, but will be
// queued on the StandbyNode.
cluster.triggerBlockReports();
} finally {
IOUtils.closeStream(out);
}
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
// Verify that no replicas are marked corrupt, and that the
// file is readable from the failed-over standby.
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
BlockManagerTestUtil.updateState(nn2.getNamesystem().getBlockManager());
assertEquals(0, nn1.getNamesystem().getCorruptReplicaBlocks());
assertEquals(0, nn2.getNamesystem().getCorruptReplicaBlocks());
DFSTestUtil.readFile(fs, TEST_FILE_PATH);
}
/**
* Test that, when a block is re-opened for append, the related
* datanode messages are correctly queued by the SBN because
* they have future states and genstamps.
*/
@Test
public void testQueueingWithAppend() throws Exception {
int numQueued = 0;
int numDN = cluster.getDataNodes().size();
// case 1: create file and call hflush after write
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
AppendTestUtil.write(out, 0, 10);
out.hflush();
// Opening the file will report RBW replicas, but will be
// queued on the StandbyNode.
// However, the delivery of RBW messages is delayed by HDFS-7217 fix.
// Apply cluster.triggerBlockReports() to trigger the reporting sooner.
//
cluster.triggerBlockReports();
numQueued += numDN; // RBW messages
// The cluster.triggerBlockReports() call above does a full
// block report that incurs 3 extra RBW messages
numQueued += numDN; // RBW messages
} finally {
IOUtils.closeStream(out);
numQueued += numDN; // blockReceived messages
}
cluster.triggerBlockReports();
numQueued += numDN;
assertEquals(numQueued, cluster.getNameNode(1).getNamesystem().
getPendingDataNodeMessageCount());
// case 2: append to file and call hflush after write
try {
out = fs.append(TEST_FILE_PATH);
AppendTestUtil.write(out, 10, 10);
out.hflush();
cluster.triggerBlockReports();
numQueued += numDN * 2; // RBW messages, see comments in case 1
} finally {
IOUtils.closeStream(out);
cluster.triggerHeartbeats();
numQueued += numDN; // blockReceived
}
assertEquals(numQueued, cluster.getNameNode(1).getNamesystem().
getPendingDataNodeMessageCount());
// case 3: similar to case 2, except no hflush is called.
try {
out = fs.append(TEST_FILE_PATH);
AppendTestUtil.write(out, 20, 10);
} finally {
// The write operation in the try block is buffered, thus no RBW message
// is reported yet until the closeStream call here. When closeStream is
// called, before HDFS-7217 fix, there would be three RBW messages
// (blockReceiving), plus three FINALIZED messages (blockReceived)
// delivered to NN. However, because of HDFS-7217 fix, the reporting of
// RBW messages is postponed. In this case, they are even overwritten
// by the blockReceived messages of the same block when they are waiting
// to be delivered. All this happens within the closeStream() call.
// What's delivered to NN is the three blockReceived messages. See
// BPServiceActor#addPendingReplicationBlockInfo
//
IOUtils.closeStream(out);
numQueued += numDN; // blockReceived
}
cluster.triggerBlockReports();
numQueued += numDN;
LOG.info("Expect " + numQueued + " and got: " + cluster.getNameNode(1).getNamesystem().
getPendingDataNodeMessageCount());
assertEquals(numQueued, cluster.getNameNode(1).getNamesystem().
getPendingDataNodeMessageCount());
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
// Verify that no replicas are marked corrupt, and that the
// file is readable from the failed-over standby.
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
BlockManagerTestUtil.updateState(nn2.getNamesystem().getBlockManager());
assertEquals(0, nn1.getNamesystem().getCorruptReplicaBlocks());
assertEquals(0, nn2.getNamesystem().getCorruptReplicaBlocks());
AppendTestUtil.check(fs, TEST_FILE_PATH, 30);
}
/**
* Another regression test for HDFS-2742. This tests the following sequence:
* - DN does a block report while file is open. This BR contains
* the block in RBW state.
* - The block report is delayed in reaching the standby.
* - The file is closed.
* - The standby processes the OP_ADD and OP_CLOSE operations before
* the RBW block report arrives.
* - The standby should not mark the block as corrupt.
*/
@Test
public void testRBWReportArrivesAfterEdits() throws Exception {
final CountDownLatch brFinished = new CountDownLatch(1);
DelayAnswer delayer = new GenericTestUtils.DelayAnswer(LOG) {
@Override
protected Object passThrough(InvocationOnMock invocation)
throws Throwable {
try {
return super.passThrough(invocation);
} finally {
// inform the test that our block report went through.
brFinished.countDown();
}
}
};
FSDataOutputStream out = fs.create(TEST_FILE_PATH);
try {
AppendTestUtil.write(out, 0, 10);
out.hflush();
DataNode dn = cluster.getDataNodes().get(0);
DatanodeProtocolClientSideTranslatorPB spy =
DataNodeTestUtils.spyOnBposToNN(dn, nn2);
Mockito.doAnswer(delayer)
.when(spy).blockReport(
Mockito.<DatanodeRegistration>anyObject(),
Mockito.anyString(),
Mockito.<StorageBlockReport[]>anyObject(),
Mockito.<BlockReportContext>anyObject());
dn.scheduleAllBlockReport(0);
delayer.waitForCall();
} finally {
IOUtils.closeStream(out);
}
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
delayer.proceed();
brFinished.await();
// Verify that no replicas are marked corrupt, and that the
// file is readable from the failed-over standby.
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
BlockManagerTestUtil.updateState(nn2.getNamesystem().getBlockManager());
assertEquals(0, nn1.getNamesystem().getCorruptReplicaBlocks());
assertEquals(0, nn2.getNamesystem().getCorruptReplicaBlocks());
DFSTestUtil.readFile(fs, TEST_FILE_PATH);
}
/**
* Print a big banner in the test log to make debug easier.
*/
private void banner(String string) {
LOG.info("\n\n\n\n================================================\n" +
string + "\n" +
"==================================================\n\n");
}
private void doMetasave(NameNode nn2) {
nn2.getNamesystem().writeLock();
try {
PrintWriter pw = new PrintWriter(System.err);
nn2.getNamesystem().getBlockManager().metaSave(pw);
pw.flush();
} finally {
nn2.getNamesystem().writeUnlock();
}
}
private void waitForTrueReplication(final MiniDFSCluster cluster,
final ExtendedBlock block, final int waitFor) throws Exception {
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
try {
return getTrueReplication(cluster, block) == waitFor;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}, 500, 10000);
}
private int getTrueReplication(MiniDFSCluster cluster, ExtendedBlock block)
throws IOException {
int count = 0;
for (DataNode dn : cluster.getDataNodes()) {
if (DataNodeTestUtils.getFSDataset(dn).getStoredBlock(
block.getBlockPoolId(), block.getBlockId()) != null) {
count++;
}
}
return count;
}
/**
* A BlockPlacementPolicy which, rather than using space available, makes
* random decisions about which excess replica to delete. This is because,
* in the test cases, the two NNs will usually (but not quite always)
* make the same decision of which replica to delete. The fencing issues
* are exacerbated when the two NNs make different decisions, which can
* happen in "real life" when they have slightly out-of-sync heartbeat
* information regarding disk usage.
*/
public static class RandomDeleterPolicy extends BlockPlacementPolicyDefault {
public RandomDeleterPolicy() {
super();
}
@Override
public DatanodeStorageInfo chooseReplicaToDelete(
Collection<DatanodeStorageInfo> moreThanOne,
Collection<DatanodeStorageInfo> exactlyOne,
List<StorageType> excessTypes,
Map<String, List<DatanodeStorageInfo>> rackMap) {
Collection<DatanodeStorageInfo> chooseFrom = !moreThanOne.isEmpty() ?
moreThanOne : exactlyOne;
List<DatanodeStorageInfo> l = Lists.newArrayList(chooseFrom);
return l.get(ThreadLocalRandom.current().nextInt(l.size()));
}
}
}
| |
/* Copyright 2019 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.wfm.topology.flowhs.service;
import org.openkilda.floodlight.api.response.SpeakerFlowSegmentResponse;
import org.openkilda.floodlight.flow.response.FlowErrorResponse;
import org.openkilda.messaging.Message;
import org.openkilda.messaging.command.flow.CreateFlowLoopRequest;
import org.openkilda.messaging.command.flow.DeleteFlowLoopRequest;
import org.openkilda.messaging.command.flow.FlowRequest;
import org.openkilda.messaging.error.ErrorData;
import org.openkilda.messaging.error.ErrorMessage;
import org.openkilda.messaging.error.ErrorType;
import org.openkilda.model.Flow;
import org.openkilda.pce.PathComputer;
import org.openkilda.persistence.PersistenceManager;
import org.openkilda.persistence.repositories.FlowRepository;
import org.openkilda.persistence.repositories.KildaConfigurationRepository;
import org.openkilda.persistence.repositories.RepositoryFactory;
import org.openkilda.persistence.repositories.history.FlowEventRepository;
import org.openkilda.wfm.CommandContext;
import org.openkilda.wfm.share.flow.resources.FlowResourcesManager;
import org.openkilda.wfm.share.utils.FsmExecutor;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateContext;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateFsm;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateFsm.Event;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateFsm.State;
import org.openkilda.wfm.topology.flowhs.mapper.RequestedFlowMapper;
import org.openkilda.wfm.topology.flowhs.model.RequestedFlow;
import com.google.common.annotations.VisibleForTesting;
import lombok.extern.slf4j.Slf4j;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
@Slf4j
public class FlowUpdateService {
@VisibleForTesting
final Map<String, FlowUpdateFsm> fsms = new HashMap<>();
private final FlowUpdateFsm.Factory fsmFactory;
private final FsmExecutor<FlowUpdateFsm, State, Event, FlowUpdateContext> fsmExecutor
= new FsmExecutor<>(Event.NEXT);
private final FlowUpdateHubCarrier carrier;
private final FlowRepository flowRepository;
private final FlowEventRepository flowEventRepository;
private final KildaConfigurationRepository kildaConfigurationRepository;
public FlowUpdateService(FlowUpdateHubCarrier carrier, PersistenceManager persistenceManager,
PathComputer pathComputer, FlowResourcesManager flowResourcesManager,
int pathAllocationRetriesLimit, int pathAllocationRetryDelay,
int speakerCommandRetriesLimit) {
this.carrier = carrier;
RepositoryFactory repositoryFactory = persistenceManager.getRepositoryFactory();
flowRepository = repositoryFactory.createFlowRepository();
flowEventRepository = repositoryFactory.createFlowEventRepository();
kildaConfigurationRepository = repositoryFactory.createKildaConfigurationRepository();
fsmFactory = new FlowUpdateFsm.Factory(carrier, persistenceManager, pathComputer, flowResourcesManager,
pathAllocationRetriesLimit, pathAllocationRetryDelay,
speakerCommandRetriesLimit);
}
/**
* Handles request for flow update.
*
* @param key command identifier.
* @param request request data.
*/
public void handleUpdateRequest(String key, CommandContext commandContext, FlowRequest request) {
Optional<Flow> flow = flowRepository.findById(request.getFlowId());
request.setLoopSwitchId(flow.map(Flow::getLoopSwitchId).orElse(null));
handleRequest(key, commandContext, request);
}
/**
* Handles async response from worker.
*
* @param key command identifier.
*/
public void handleAsyncResponse(String key, SpeakerFlowSegmentResponse flowResponse) {
log.debug("Received flow command response {}", flowResponse);
FlowUpdateFsm fsm = fsms.get(key);
if (fsm == null) {
log.warn("Failed to find a FSM: received response with key {} for non pending FSM", key);
return;
}
FlowUpdateContext context = FlowUpdateContext.builder()
.speakerFlowResponse(flowResponse)
.build();
if (flowResponse instanceof FlowErrorResponse) {
fsmExecutor.fire(fsm, Event.ERROR_RECEIVED, context);
} else {
fsmExecutor.fire(fsm, Event.RESPONSE_RECEIVED, context);
}
removeIfFinished(fsm, key);
}
/**
* Handles timeout case.
*
* @param key command identifier.
*/
public void handleTimeout(String key) {
log.debug("Handling timeout for {}", key);
FlowUpdateFsm fsm = fsms.get(key);
if (fsm == null) {
log.warn("Failed to find a FSM: timeout event for non pending FSM with key {}", key);
return;
}
fsmExecutor.fire(fsm, Event.TIMEOUT, null);
removeIfFinished(fsm, key);
}
/**
* Handles create flow loop request.
*
* @param request request to handle.
*/
public void handleCreateFlowLoopRequest(String key, CommandContext commandContext,
CreateFlowLoopRequest request) {
Optional<Flow> flow = flowRepository.findById(request.getFlowId());
if (flow.isPresent()) {
FlowRequest flowRequest = RequestedFlowMapper.INSTANCE.toFlowRequest(flow.get());
flowRequest.setLoopSwitchId(request.getSwitchId());
handleRequest(key, commandContext, flowRequest);
} else {
carrier.sendNorthboundResponse(buildFlowNotFoundErrorMessage(request.getFlowId(), commandContext));
}
}
/**
* Handles delete flow loop request.
*
* @param request request to handle.
*/
public void handleDeleteFlowLoopRequest(String key, CommandContext commandContext,
DeleteFlowLoopRequest request) {
Optional<Flow> flow = flowRepository.findById(request.getFlowId());
if (flow.isPresent()) {
FlowRequest flowRequest = RequestedFlowMapper.INSTANCE.toFlowRequest(flow.get());
flowRequest.setLoopSwitchId(null);
handleRequest(key, commandContext, flowRequest);
} else {
carrier.sendNorthboundResponse(buildFlowNotFoundErrorMessage(request.getFlowId(), commandContext));
}
}
private void handleRequest(String key, CommandContext commandContext, FlowRequest request) {
log.debug("Handling flow update request with key {} and flow ID: {}", key, request.getFlowId());
if (fsms.containsKey(key)) {
log.error("Attempt to create a FSM with key {}, while there's another active FSM with the same key.", key);
return;
}
String eventKey = commandContext.getCorrelationId();
if (flowEventRepository.existsByTaskId(eventKey)) {
log.error("Attempt to reuse key {}, but there's a history record(s) for it.", eventKey);
return;
}
FlowUpdateFsm fsm = fsmFactory.newInstance(commandContext, request.getFlowId());
fsms.put(key, fsm);
RequestedFlow requestedFlow = RequestedFlowMapper.INSTANCE.toRequestedFlow(request);
if (requestedFlow.getFlowEncapsulationType() == null) {
requestedFlow.setFlowEncapsulationType(kildaConfigurationRepository.getOrDefault()
.getFlowEncapsulationType());
}
FlowUpdateContext context = FlowUpdateContext.builder()
.targetFlow(requestedFlow)
.bulkUpdateFlowIds(request.getBulkUpdateFlowIds())
.doNotRevert(request.isDoNotRevert())
.build();
fsmExecutor.fire(fsm, Event.NEXT, context);
removeIfFinished(fsm, key);
}
private Message buildFlowNotFoundErrorMessage(String flowId, CommandContext commandContext) {
String description = String.format("Flow '%s' not found.", flowId);
ErrorData error = new ErrorData(ErrorType.NOT_FOUND, "Flow not found", description);
return new ErrorMessage(error, commandContext.getCreateTime(), commandContext.getCorrelationId());
}
private void removeIfFinished(FlowUpdateFsm fsm, String key) {
if (fsm.isTerminated()) {
log.debug("FSM with key {} is finished with state {}", key, fsm.getCurrentState());
fsms.remove(key);
carrier.cancelTimeoutCallback(key);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.viewfs;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.FsConstants;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests for viewfs implementation of default fs level values.
* This tests for both passing in a path (based on mount point)
* to obtain the default value of the fs that the path is mounted on
* or just passing in no arguments.
*/
public class TestViewFsDefaultValue {
static final String testFileDir = "/tmp/test/";
static final String testFileName = testFileDir + "testFileStatusSerialziation";
static final String NOT_IN_MOUNTPOINT_FILENAME = "/NotInMountpointFile";
private static MiniDFSCluster cluster;
private static final FileSystemTestHelper fileSystemTestHelper = new FileSystemTestHelper();
private static final Configuration CONF = new Configuration();
private static FileSystem fHdfs;
private static FileSystem vfs;
private static Path testFilePath;
private static Path testFileDirPath;
// Use NotInMountpoint path to trigger the exception
private static Path notInMountpointPath;
@BeforeClass
public static void clusterSetupAtBegining() throws IOException,
LoginException, URISyntaxException {
CONF.setLong(DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE_DEFAULT);
CONF.setInt(DFS_BYTES_PER_CHECKSUM_KEY, DFS_BYTES_PER_CHECKSUM_DEFAULT);
CONF.setInt(DFS_CLIENT_WRITE_PACKET_SIZE_KEY,
DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT);
CONF.setInt(DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT + 1);
CONF.setInt(IO_FILE_BUFFER_SIZE_KEY, IO_FILE_BUFFER_SIZE_DEFAULT);
cluster = new MiniDFSCluster.Builder(CONF).numDataNodes(DFS_REPLICATION_DEFAULT + 1).build();
cluster.waitClusterUp();
fHdfs = cluster.getFileSystem();
fileSystemTestHelper.createFile(fHdfs, testFileName);
fileSystemTestHelper.createFile(fHdfs, NOT_IN_MOUNTPOINT_FILENAME);
Configuration conf = ViewFileSystemTestSetup.createConfig();
conf.setInt(DFS_REPLICATION_KEY, DFS_REPLICATION_DEFAULT + 1);
ConfigUtil.addLink(conf, "/tmp", new URI(fHdfs.getUri().toString() +
"/tmp"));
vfs = FileSystem.get(FsConstants.VIEWFS_URI, conf);
testFileDirPath = new Path (testFileDir);
testFilePath = new Path (testFileName);
notInMountpointPath = new Path(NOT_IN_MOUNTPOINT_FILENAME);
}
/**
* Test that default blocksize values can be retrieved on the client side.
*/
@Test
public void testGetDefaultBlockSize()
throws IOException, URISyntaxException {
// createFile does not use defaultBlockSize to create the file,
// but we are only looking at the defaultBlockSize, so this
// test should still pass
try {
vfs.getDefaultBlockSize(notInMountpointPath);
fail("getServerDefaults on viewFs did not throw excetion!");
} catch (NotInMountpointException e) {
assertEquals(vfs.getDefaultBlockSize(testFilePath),
DFS_BLOCK_SIZE_DEFAULT);
}
}
/**
* Test that default replication values can be retrieved on the client side.
*/
@Test
public void testGetDefaultReplication()
throws IOException, URISyntaxException {
try {
vfs.getDefaultReplication(notInMountpointPath);
fail("getDefaultReplication on viewFs did not throw excetion!");
} catch (NotInMountpointException e) {
assertEquals(vfs.getDefaultReplication(testFilePath),
DFS_REPLICATION_DEFAULT+1);
}
}
/**
* Test that server default values can be retrieved on the client side.
*/
@Test
public void testServerDefaults() throws IOException {
try {
vfs.getServerDefaults(notInMountpointPath);
fail("getServerDefaults on viewFs did not throw excetion!");
} catch (NotInMountpointException e) {
FsServerDefaults serverDefaults = vfs.getServerDefaults(testFilePath);
assertEquals(DFS_BLOCK_SIZE_DEFAULT, serverDefaults.getBlockSize());
assertEquals(DFS_BYTES_PER_CHECKSUM_DEFAULT,
serverDefaults.getBytesPerChecksum());
assertEquals(DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT,
serverDefaults.getWritePacketSize());
assertEquals(IO_FILE_BUFFER_SIZE_DEFAULT,
serverDefaults.getFileBufferSize());
assertEquals(DFS_REPLICATION_DEFAULT + 1,
serverDefaults.getReplication());
}
}
/**
* Test that getContentSummary can be retrieved on the client side.
*/
@Test
public void testGetContentSummary() throws IOException {
FileSystem hFs = cluster.getFileSystem(0);
final DistributedFileSystem dfs = (DistributedFileSystem)hFs;
dfs.setQuota(testFileDirPath, 100, 500);
ContentSummary cs = vfs.getContentSummary(testFileDirPath);
assertEquals(100, cs.getQuota());
assertEquals(500, cs.getSpaceQuota());
}
/**
* Test that getQuotaUsage can be retrieved on the client side.
*/
@Test
public void testGetQuotaUsage() throws IOException {
FileSystem hFs = cluster.getFileSystem(0);
final DistributedFileSystem dfs = (DistributedFileSystem)hFs;
dfs.setQuota(testFileDirPath, 100, 500);
QuotaUsage qu = vfs.getQuotaUsage(testFileDirPath);
assertEquals(100, qu.getQuota());
assertEquals(500, qu.getSpaceQuota());
}
/**
* Test that getQuotaUsage can be retrieved on the client side if
* storage types are defined.
*/
@Test
public void testGetQuotaUsageWithStorageTypes() throws IOException {
FileSystem hFs = cluster.getFileSystem(0);
final DistributedFileSystem dfs = (DistributedFileSystem)hFs;
dfs.setQuotaByStorageType(testFileDirPath, StorageType.SSD, 500);
dfs.setQuotaByStorageType(testFileDirPath, StorageType.DISK, 600);
QuotaUsage qu = vfs.getQuotaUsage(testFileDirPath);
assertEquals(500, qu.getTypeQuota(StorageType.SSD));
assertEquals(600, qu.getTypeQuota(StorageType.DISK));
}
/**
* Test that getQuotaUsage can be retrieved on the client side if
* quota isn't defined.
*/
@Test
public void testGetQuotaUsageWithQuotaDefined() throws IOException {
FileSystem hFs = cluster.getFileSystem(0);
final DistributedFileSystem dfs = (DistributedFileSystem)hFs;
dfs.setQuota(testFileDirPath, -1, -1);
dfs.setQuotaByStorageType(testFileDirPath, StorageType.SSD, -1);
dfs.setQuotaByStorageType(testFileDirPath, StorageType.DISK, -1);
QuotaUsage qu = vfs.getQuotaUsage(testFileDirPath);
assertEquals(-1, qu.getTypeQuota(StorageType.SSD));
assertEquals(-1, qu.getQuota());
assertEquals(-1, qu.getSpaceQuota());
assertEquals(2, qu.getFileAndDirectoryCount());
assertEquals(0, qu.getTypeConsumed(StorageType.SSD));
assertTrue(qu.getSpaceConsumed() > 0);
}
@AfterClass
public static void cleanup() throws IOException {
fHdfs.delete(new Path(testFileName), true);
fHdfs.delete(notInMountpointPath, true);
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.plugins;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.marketplace.MarketplaceRequests;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.PermanentInstallationID;
import com.intellij.openapi.application.ex.ApplicationInfoEx;
import com.intellij.openapi.application.impl.ApplicationInfoImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.updateSettings.impl.PluginDownloader;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import com.intellij.openapi.util.BuildNumber;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.util.Url;
import com.intellij.util.Urls;
import com.intellij.util.io.URLUtil;
import com.intellij.util.text.VersionComparatorUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author stathik
*/
public final class RepositoryHelper {
private static final Logger LOG = Logger.getInstance(RepositoryHelper.class);
@SuppressWarnings("SpellCheckingInspection") private static final String PLUGIN_LIST_FILE = "availables.xml";
private static final String MARKETPLACE_PLUGIN_ID = "com.intellij.marketplace";
private static final String ULTIMATE_MODULE = "com.intellij.modules.ultimate";
/**
* Returns a list of configured plugin hosts.
* Note that the list always ends with {@code null} element denoting a main plugin repository.
*/
public static @NotNull List<String> getPluginHosts() {
List<String> hosts = new ArrayList<>(UpdateSettings.getInstance().getPluginHosts());
String pluginsUrl = ApplicationInfoEx.getInstanceEx().getBuiltinPluginsUrl();
if (pluginsUrl != null && !"__BUILTIN_PLUGINS_URL__".equals(pluginsUrl)) {
hosts.add(pluginsUrl);
}
hosts.add(null); // main plugin repository
return hosts;
}
/**
* Loads list of plugins, compatible with a current build, from a main plugin repository.
*
* @deprecated Use `loadPlugins` only for custom repositories. Use {@link MarketplaceRequests} for getting descriptors.
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.2")
public static @NotNull List<IdeaPluginDescriptor> loadPlugins(@Nullable ProgressIndicator indicator) throws IOException {
return loadPlugins(null, indicator);
}
/**
* Use method only for getting plugins from custom repositories
*
* @deprecated Please use {@link #loadPlugins(String, BuildNumber, ProgressIndicator)} to get a list of {@link PluginNode}s.
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
public static @NotNull List<IdeaPluginDescriptor> loadPlugins(@Nullable String repositoryUrl,
@Nullable ProgressIndicator indicator) throws IOException {
return new ArrayList<>(loadPlugins(repositoryUrl, null, indicator));
}
/**
* Use method only for getting plugins from custom repositories
*/
public static @NotNull List<PluginNode> loadPlugins(@Nullable String repositoryUrl,
@Nullable BuildNumber build,
@Nullable ProgressIndicator indicator) throws IOException {
Path pluginListFile;
Url url;
if (repositoryUrl == null) {
LOG.error("Using deprecated API for getting plugins from Marketplace");
String base = ApplicationInfoImpl.getShadowInstance().getPluginsListUrl();
url = Urls.newFromEncoded(base).addParameters(Map.of("uuid", PermanentInstallationID.get())); // NON-NLS
pluginListFile = Paths.get(PathManager.getPluginsPath(), PLUGIN_LIST_FILE);
}
else {
url = Urls.newFromEncoded(repositoryUrl);
pluginListFile = null;
}
if (!URLUtil.FILE_PROTOCOL.equals(url.getScheme())) {
url = url.addParameters(Map.of("build", ApplicationInfoImpl.orFromPluginsCompatibleBuild(build)));
}
if (indicator != null) {
indicator.setText2(IdeBundle.message("progress.connecting.to.plugin.manager", url.getAuthority()));
}
List<PluginNode> descriptors = MarketplaceRequests.getInstance()
.readOrUpdateFile(pluginListFile,
url.toExternalForm(),
indicator,
IdeBundle.message("progress.downloading.list.of.plugins", url.getAuthority()),
MarketplaceRequests::parsePluginList);
return process(descriptors,
build != null ? build : PluginManagerCore.getBuildNumber(),
repositoryUrl);
}
private static @NotNull List<PluginNode> process(@NotNull List<PluginNode> list,
@NotNull BuildNumber build,
@Nullable String repositoryUrl) {
Map<PluginId, PluginNode> result = new LinkedHashMap<>(list.size());
boolean isPaidPluginsRequireMarketplacePlugin = isPaidPluginsRequireMarketplacePlugin();
for (PluginNode node : list) {
PluginId pluginId = node.getPluginId();
if (repositoryUrl != null && node.getDownloadUrl() == null) {
LOG.debug("Malformed plugin record (id:" + pluginId + " repository:" + repositoryUrl + ")");
continue;
}
if (PluginManagerCore.isBrokenPlugin(node) || PluginManagerCore.isIncompatible(node, build)) {
LOG.debug("An incompatible plugin (id:" + pluginId + " repository:" + repositoryUrl + ")");
continue;
}
if (repositoryUrl != null) {
node.setRepositoryName(repositoryUrl);
}
if (node.getName() == null) {
String url = node.getDownloadUrl();
node.setName(FileUtilRt.getNameWithoutExtension(url.substring(url.lastIndexOf('/') + 1)));
}
PluginNode previous = result.get(pluginId);
if (previous == null || VersionComparatorUtil.compare(node.getVersion(), previous.getVersion()) > 0) {
result.put(pluginId, node);
}
addMarketplacePluginDependencyIfRequired(node, isPaidPluginsRequireMarketplacePlugin);
}
return result
.values()
.stream()
.collect(Collectors.toUnmodifiableList());
}
/**
* If plugin is paid (has `productCode`) and IDE is not JetBrains "ultimate" then MARKETPLACE_PLUGIN_ID is required
*/
public static void addMarketplacePluginDependencyIfRequired(@NotNull PluginNode node) {
boolean isPaidPluginsRequireMarketplacePlugin = isPaidPluginsRequireMarketplacePlugin();
addMarketplacePluginDependencyIfRequired(node, isPaidPluginsRequireMarketplacePlugin);
}
private static boolean isPaidPluginsRequireMarketplacePlugin() {
boolean isCommunityIDE = !ideContainsUltimateModule();
boolean isVendorNotJetBrains = !ApplicationInfoImpl.getShadowInstance().isVendorJetBrains();
return isCommunityIDE || isVendorNotJetBrains;
}
private static void addMarketplacePluginDependencyIfRequired(@NotNull PluginNode node, boolean isPaidPluginsRequireMarketplacePlugin) {
if (isPaidPluginsRequireMarketplacePlugin && node.getProductCode() != null) {
node.addDepends(MARKETPLACE_PLUGIN_ID, false);
}
}
private static boolean ideContainsUltimateModule() {
IdeaPluginDescriptor corePlugin = PluginManagerCore.getPlugin(PluginManagerCore.CORE_ID);
IdeaPluginDescriptorImpl corePluginImpl = (corePlugin instanceof IdeaPluginDescriptorImpl) ? (IdeaPluginDescriptorImpl)corePlugin : null;
return corePluginImpl != null && corePluginImpl.getModules().contains(PluginId.getId(ULTIMATE_MODULE));
}
@ApiStatus.Internal
public static @NotNull Collection<PluginNode> mergePluginsFromRepositories(@NotNull List<PluginNode> marketplacePlugins,
@NotNull List<PluginNode> customPlugins,
boolean addMissing) {
Map<PluginId, PluginNode> compatiblePluginMap = new HashMap<>(marketplacePlugins.size());
for (PluginNode marketplacePlugin : marketplacePlugins) {
compatiblePluginMap.put(marketplacePlugin.getPluginId(), marketplacePlugin);
}
for (PluginNode customPlugin : customPlugins) {
PluginId pluginId = customPlugin.getPluginId();
IdeaPluginDescriptor plugin = compatiblePluginMap.get(pluginId);
if (plugin == null && addMissing ||
plugin != null && PluginDownloader.compareVersionsSkipBrokenAndIncompatible(customPlugin.getVersion(), plugin) > 0) {
compatiblePluginMap.put(pluginId, customPlugin);
}
}
return compatiblePluginMap.values();
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.workflow.mgt;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.CarbonContext;
import org.wso2.carbon.identity.workflow.mgt.bean.Parameter;
import org.wso2.carbon.identity.workflow.mgt.bean.Workflow;
import org.wso2.carbon.identity.workflow.mgt.bean.WorkflowRequest;
import org.wso2.carbon.identity.workflow.mgt.bean.WorkflowRequestAssociation;
import org.wso2.carbon.identity.workflow.mgt.dto.Association;
import org.wso2.carbon.identity.workflow.mgt.dto.Template;
import org.wso2.carbon.identity.workflow.mgt.dto.WorkflowEvent;
import org.wso2.carbon.identity.workflow.mgt.dto.WorkflowImpl;
import org.wso2.carbon.identity.workflow.mgt.dto.WorkflowWizard;
import org.wso2.carbon.identity.workflow.mgt.exception.InternalWorkflowException;
import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowException;
import org.wso2.carbon.identity.workflow.mgt.exception.WorkflowRuntimeException;
import org.wso2.carbon.identity.workflow.mgt.internal.WorkflowServiceDataHolder;
import org.wso2.carbon.identity.workflow.mgt.template.AbstractTemplate;
import org.wso2.carbon.identity.workflow.mgt.util.WFConstant;
import org.wso2.carbon.identity.workflow.mgt.util.WorkflowRequestStatus;
import org.wso2.carbon.identity.workflow.mgt.workflow.AbstractWorkflow;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
public class WorkflowManagementAdminService {
private static Log log = LogFactory.getLog(WorkflowManagementAdminService.class);
private WorkflowWizard getWorkflow(org.wso2.carbon.identity.workflow.mgt.bean.Workflow workflowBean)
throws WorkflowException {
try {
WorkflowWizard workflow = null;
if (workflowBean != null) {
workflow = new WorkflowWizard();
workflow.setWorkflowId(workflowBean.getWorkflowId());
workflow.setWorkflowName(workflowBean.getWorkflowName());
workflow.setWorkflowDescription(workflowBean.getWorkflowDescription());
//workflow.setTemplateId(workflowBean.getTemplateId());
//workflow.setWorkflowImplId(workflowBean.getWorkflowImplId());
AbstractTemplate abstractTemplate =
WorkflowServiceDataHolder.getInstance().getTemplates().get(workflowBean.getTemplateId());
Template template = new Template();
template.setTemplateId(abstractTemplate.getTemplateId());
template.setName(abstractTemplate.getName());
template.setDescription(abstractTemplate.getDescription());
template.setParametersMetaData(abstractTemplate.getParametersMetaData());
workflow.setTemplate(template);
AbstractWorkflow abstractWorkflow =
WorkflowServiceDataHolder.getInstance().getWorkflowImpls()
.get(workflowBean.getTemplateId()).get(workflowBean.getWorkflowImplId());
WorkflowImpl workflowimpl = new WorkflowImpl();
workflowimpl.setWorkflowImplId(abstractWorkflow.getWorkflowImplId());
workflowimpl.setWorkflowImplName(abstractWorkflow.getWorkflowImplName());
workflowimpl.setTemplateId(abstractWorkflow.getTemplateId());
workflowimpl.setParametersMetaData(abstractWorkflow.getParametersMetaData());
workflow.setWorkflowImpl(workflowimpl);
List<Parameter> workflowParams = WorkflowServiceDataHolder.getInstance().getWorkflowService()
.getWorkflowParameters(workflowBean.getWorkflowId());
List<Parameter> templateParams = new ArrayList<>();
List<Parameter> workflowImplParams = new ArrayList<>();
for (Parameter parameter : workflowParams) {
if (parameter.getHolder().equals(WFConstant.ParameterHolder.TEMPLATE)) {
templateParams.add(parameter);
} else if (parameter.getHolder().equals(WFConstant.ParameterHolder.WORKFLOW_IMPL)) {
workflowImplParams.add(parameter);
}
}
workflow.setTemplateParameters(templateParams.toArray(new Parameter[templateParams.size()]));
workflow.setWorkflowImplParameters(workflowImplParams
.toArray(new Parameter[workflowImplParams.size()]));
}
return workflow;
} catch (InternalWorkflowException e) {
String errorMsg =
"Error occurred while reading workflow object details for given workflow id, " + e.getMessage();
log.error(errorMsg);
throw new WorkflowException(errorMsg, e);
}
}
public WorkflowWizard getWorkflow(String workflowId) throws WorkflowException {
org.wso2.carbon.identity.workflow.mgt.bean.Workflow workflowBean =
WorkflowServiceDataHolder.getInstance().getWorkflowService().getWorkflow(workflowId);
return getWorkflow(workflowBean);
}
public WorkflowEvent[] listWorkflowEvents() {
List<WorkflowEvent> events = WorkflowServiceDataHolder.getInstance().getWorkflowService().listWorkflowEvents();
return events.toArray(new WorkflowEvent[events.size()]);
}
public Template[] listTemplates() throws WorkflowException {
List<Template> templates = WorkflowServiceDataHolder.getInstance().getWorkflowService().listTemplates();
return templates.toArray(new Template[templates.size()]);
}
public Template getTemplate(String templateId) throws WorkflowException {
return WorkflowServiceDataHolder.getInstance().getWorkflowService().getTemplate(templateId);
}
public WorkflowImpl getWorkflowImpl(String templateId, String implementationId) throws WorkflowException {
return WorkflowServiceDataHolder.getInstance().getWorkflowService().getWorkflowImpl(templateId,
implementationId);
}
public WorkflowImpl[] listWorkflowImpls(String templateId) throws WorkflowException {
List<WorkflowImpl> workflowList =
WorkflowServiceDataHolder.getInstance().getWorkflowService().listWorkflowImpls(templateId);
return workflowList.toArray(new WorkflowImpl[workflowList.size()]);
}
public void addWorkflow(WorkflowWizard workflow) throws WorkflowException {
String id = workflow.getWorkflowId();
if (StringUtils.isBlank(id)) {
id = UUID.randomUUID().toString();
}
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
try {
org.wso2.carbon.identity.workflow.mgt.bean.Workflow workflowBean = new org.wso2.carbon.identity.workflow
.mgt.bean.Workflow();
workflowBean.setWorkflowId(id);
workflowBean.setWorkflowName(workflow.getWorkflowName());
workflowBean.setWorkflowDescription(workflow.getWorkflowDescription());
String templateId = workflow.getTemplateId() == null ? workflow.getTemplate().getTemplateId() :
workflow.getTemplateId();
if (templateId == null) {
throw new WorkflowException("template id can't be empty");
}
workflowBean.setTemplateId(templateId);
String workflowImplId =
workflow.getWorkflowImplId() == null ? workflow.getWorkflowImpl().getWorkflowImplId() :
workflow.getWorkflowImplId();
if (workflowImplId == null) {
throw new WorkflowException("workflowimpl id can't be empty");
}
workflowBean.setWorkflowImplId(workflowImplId);
List<Parameter> parameterList = new ArrayList<>();
if (workflow.getTemplateParameters() != null) {
parameterList.addAll(Arrays.asList(workflow.getTemplateParameters()));
}
if (workflow.getWorkflowImplParameters() != null) {
parameterList.addAll(Arrays.asList(workflow.getWorkflowImplParameters()));
}
WorkflowServiceDataHolder.getInstance().getWorkflowService()
.addWorkflow(workflowBean, parameterList, tenantId);
} catch (WorkflowRuntimeException e) {
log.error("Error when adding workflow " + workflow.getWorkflowName(), e);
throw new WorkflowException(e.getMessage());
} catch (WorkflowException e) {
log.error("Server error when adding workflow " + workflow.getWorkflowName(), e);
throw new WorkflowException("Server error occurred when adding the workflow");
}
}
public void addAssociation(String associationName, String workflowId, String eventId, String condition) throws
WorkflowException {
try {
WorkflowServiceDataHolder.getInstance().getWorkflowService()
.addAssociation(associationName, workflowId, eventId, condition);
} catch (WorkflowRuntimeException e) {
log.error("Error when adding association " + associationName, e);
throw new WorkflowException(e.getMessage());
} catch (WorkflowException e) {
log.error("Server error when adding association of workflow " + workflowId + " with " + eventId, e);
throw new WorkflowException("Server error occurred when associating the workflow with the event");
}
}
public void changeAssociationState(String associationId, boolean isEnable) throws WorkflowException {
try {
WorkflowServiceDataHolder.getInstance().getWorkflowService()
.changeAssociationState(associationId, isEnable);
} catch (WorkflowRuntimeException e) {
log.error("Error when changing an association ", e);
throw new WorkflowException(e.getMessage());
} catch (WorkflowException e) {
log.error("Server error when changing state of association ", e);
throw new WorkflowException("Server error occurred when changing the state of association");
}
}
public WorkflowWizard[] listWorkflows() throws WorkflowException {
List<WorkflowWizard> workflowWizards = new ArrayList<>();
List<Workflow> workflowBeans = null;
int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId();
try {
workflowBeans = WorkflowServiceDataHolder.getInstance().getWorkflowService().listWorkflows(tenantId);
for (Workflow workflow : workflowBeans) {
WorkflowWizard workflowTmp = getWorkflow(workflow);
workflowWizards.add(workflowTmp);
}
} catch (InternalWorkflowException e) {
log.error("Server error when listing workflows", e);
throw new WorkflowException("Server error occurred when listing workflows");
}
return workflowWizards.toArray(new WorkflowWizard[workflowWizards.size()]);
}
public void removeWorkflow(String id) throws WorkflowException {
try {
WorkflowServiceDataHolder.getInstance().getWorkflowService().removeWorkflow(id);
} catch (InternalWorkflowException e) {
log.error("Server error when removing workflow " + id, e);
throw new WorkflowException("Server error occurred when removing workflow");
}
}
public void removeAssociation(String associationId) throws WorkflowException {
try {
WorkflowServiceDataHolder.getInstance().getWorkflowService()
.removeAssociation(Integer.parseInt(associationId));
} catch (InternalWorkflowException e) {
log.error("Server error when removing association " + associationId, e);
throw new WorkflowException("Server error occurred when removing association");
}
}
public Association[] listAssociations(String workflowId) throws WorkflowException {
List<Association> associations;
try {
associations =
WorkflowServiceDataHolder.getInstance().getWorkflowService().getAssociationsForWorkflow(workflowId);
} catch (InternalWorkflowException e) {
log.error("Server error when listing associations for workflow id:" + workflowId, e);
throw new WorkflowException("Server error when listing associations");
}
if (CollectionUtils.isEmpty(associations)) {
return new Association[0];
}
return associations.toArray(new Association[associations.size()]);
}
public Association[] listAllAssociations() throws WorkflowException {
List<Association> associations;
try {
associations = WorkflowServiceDataHolder.getInstance().getWorkflowService().listAllAssociations();
} catch (InternalWorkflowException e) {
log.error("Server error when listing all associations", e);
throw new WorkflowException("Server error when listing associations");
}
if (CollectionUtils.isEmpty(associations)) {
return new Association[0];
}
return associations.toArray(new Association[associations.size()]);
}
//TODO:Below method should refactor
public WorkflowEvent getEvent(String eventId) {
return WorkflowServiceDataHolder.getInstance().getWorkflowService().getEvent(eventId);
}
/**
* Returns array of requests initiated by a user.
*
* @param user
* @param beginDate
* @param endDate
* @param dateCategory
* @return
* @throws WorkflowException
*/
public WorkflowRequest[] getRequestsCreatedByUser(String user, String beginDate, String endDate, String
dateCategory, String status) throws WorkflowException {
int tenant = CarbonContext.getThreadLocalCarbonContext().getTenantId();
return WorkflowServiceDataHolder.getInstance().getWorkflowService()
.getRequestsFromFilter(user, beginDate, endDate, dateCategory, tenant, status);
}
/**
* Return array of requests according to createdAt and updatedAt filter
*
* @param beginDate
* @param endDate
* @param dateCategory
* @return
* @throws WorkflowException
*/
public WorkflowRequest[] getRequestsInFilter(String beginDate, String endDate, String
dateCategory, String status) throws WorkflowException {
int tenant = CarbonContext.getThreadLocalCarbonContext().getTenantId();
return WorkflowServiceDataHolder.getInstance().getWorkflowService()
.getRequestsFromFilter("", beginDate, endDate, dateCategory, tenant, status);
}
/**
* Move Workflow request to DELETED state.
*
* @param requestId
* @throws WorkflowException
*/
public void deleteWorkflowRequest(String requestId) throws WorkflowException {
WorkflowServiceDataHolder.getInstance().getWorkflowService()
.deleteWorkflowRequest(requestId);
}
/**
* Get workflows of a request.
*
* @param requestId
* @return
* @throws WorkflowException
*/
public WorkflowRequestAssociation[] getWorkflowsOfRequest(String requestId) throws WorkflowException {
return WorkflowServiceDataHolder.getInstance().getWorkflowService().getWorkflowsOfRequest(requestId);
}
}
| |
package edu.unicen.nbody;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Graphics;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.concurrent.Semaphore;
//required to paint on screen
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
/**
* Based on the tutorial: http://physics.princeton.edu/~fpretori/Nbody/index.htm
* Code refactored by Juan Manuel Rodriguez
*/
public class Simulation extends JPanel{
/**
*
*/
private static final long serialVersionUID = 7505333197481537020L;
private int nBodies = 100;
private Body bodies[];
private JTextField editText;
private JLabel label;
private JButton stop;
private JButton restart;
private JCheckBox bechmark;
private volatile boolean shouldrun=false;
private volatile Semaphore waitPaint;
public static void main(String[] args) {
(new Simulation()).init();
}
//Init the graphical interface
public void init()
{
startthebodies(nBodies);
editText=new JTextField("100",5);
restart=new JButton("Restart");
stop=new JButton("Stop");
stop.setEnabled(false);
label=new JLabel("Number of bodies:");
ButtonListener myButtonListener = new ButtonListener();
stop.addActionListener(myButtonListener);
restart.addActionListener(myButtonListener);
bechmark = new JCheckBox("Bechmark");
bechmark.setSelected(false);
JFrame jFrame = new JFrame("NBody");
jFrame.setBounds(100, 100, 1028, 1028);
jFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
jFrame.setLayout(new BorderLayout());
jFrame.add(this, BorderLayout.CENTER);
JPanel panel = new JPanel(new FlowLayout());
panel.add(label);
panel.add(editText);
panel.add(restart);
panel.add(stop);
panel.add(bechmark);
jFrame.add(panel, BorderLayout.PAGE_END);;
jFrame.setVisible(true);
}
//This method gets called when the applet is terminated. It stops the code
public void stop()
{
shouldrun=false;
if((waitPaint !=null) && (waitPaint.availablePermits()==0))
waitPaint.release();
}
//Called by the applet initally. It can be executed again by calling repaint();
public void paint(Graphics g)
{
int w = this.getWidth()/2;
int h = this.getHeight()/2;
g.translate(w, h); //Originally the origin is in the top right. Put it in its normal place
g.clearRect(-w, -w, 2*getWidth(), 2*getHeight());
//check if we stopped the applet, and if not, draw the particles where they are
if (shouldrun) {
for(int i=0; i<nBodies; i++) {
g.setColor(bodies[i].getColor());
g.fillOval((int) Math.round(bodies[i].getRx()*w/1e18),(int) Math.round(bodies[i].getRy()*h/1e18),8,8);
}
this.waitPaint.release();
}
}
//the bodies are initialized in circular orbits around the central mass.
//This is just some physics to do that
public static double circlev(double rx, double ry) {
double solarmass=1.98892e30;
double r2=Math.sqrt(rx*rx+ry*ry);
double numerator=(6.67e-11)*1e6*solarmass;
return Math.sqrt(numerator/r2);
}
//Initialize N bodies with random positions and circular velocities
public void startthebodies(int N) {
double solarmass=1.98892e30;
bodies = new Body[N];
for (int i = 0; i < N; i++) {
double px = 1e18*exp(-1.8)*(.5-Math.random());
double py = 1e18*exp(-1.8)*(.5-Math.random());
double magv = circlev(px,py);
double absangle = Math.atan(Math.abs(py/px));
double thetav= Math.PI/2-absangle;
double vx = -1*Math.signum(py)*Math.cos(thetav)*magv;
double vy = Math.signum(px)*Math.sin(thetav)*magv;
//Orient a random 2D circular orbit
if (Math.random() <=.5) {
vx=-vx;
vy=-vy;
}
double mass = Math.random()*solarmass*10+1e20;
//Color the masses in green gradients by mass
int red = (int) Math.floor(mass*254/(solarmass*10+1e20));
int blue = (int) Math.floor(mass*254/(solarmass*10+1e20));
int green = 255;
Color color = new Color(red, green, blue);
bodies[i] = new Body(px, py, vx, vy, mass, color);
}
//Put the central mass in
bodies[0]= new Body(0,0,0,0,1e6*solarmass,Color.red);//put a heavy body in the center
}
public static double exp(double lambda) {
return -Math.log(1 - Math.random()) / lambda;
}
public class ButtonListener implements ActionListener{
public void actionPerformed(ActionEvent evt)
{
// Get label of the button clicked in event passed in
String arg = evt.getActionCommand();
if (arg.equals("Restart"))
{
shouldrun=true;
nBodies = Integer.parseInt(editText.getText());
startthebodies(nBodies);
waitPaint = new Semaphore(1);
Thread t = new Thread(() -> {
repaint();
while(shouldrun){
try {
waitPaint.acquire();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (!shouldrun) {
System.out.println("Closing thread");
return;
}
long time = System.currentTimeMillis();
for (int i = 0; i < nBodies; i++) {
bodies[i].resetForce();
//Notice-2 loops-->N^2 complexity
for (int j = 0; j < nBodies; j++) {
if (i != j) bodies[i].addForce(bodies[j]);
}
}
//Then, loop again and update the bodies using timestep dt
for (int i = 0; i < nBodies; i++) {
bodies[i].update(1e11);
}
if (bechmark.isSelected()){
System.err.println("Frame time: " + (System.currentTimeMillis()-time) + " ms." );
}
repaint();
}
System.out.println("Closing thread");
});
t.start();
restart.setEnabled(false);
stop.setEnabled(true);
}
else if (arg.equals("Stop")) {
stop();
restart.setEnabled(true);
stop.setEnabled(false);
}
}
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (C) 2015 Andreas Grimmer <a.grimmer@gmx.at>
* Christoph Sperl <ch.sperl@gmx.at>
* Stefan Wurzinger <swurzinger@gmx.at>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.java.html.leaflet;
import net.java.html.js.JavaScriptBody;
import static net.java.html.leaflet.ILayer.registerLayerType;
/**
* A class for drawing polygon overlays on a map.
*/
public class Polygon extends PolyLine {
static {
Options.initJS();
registerLayerType("L.Polygon", new Function<Object, ILayer>() {
@Override
public ILayer apply(Object obj) {
return new Polygon(obj);
}
});
}
Polygon(Object jsObj) {
super(jsObj);
}
/**
* Instantiates a polygon object given an array of geographical points.
* @param latlngs Points of the polygon. Shouldn't contain an additional last point equal to the first one.
*/
public Polygon(LatLng[] latlngs) {
this(latlngs, new PolyLineOptions());
}
/**
* Instantiates a polygon object given an array of geographical points and an options object (the same as for Polyline).
* @param latlngs Points of the polygon. Shouldn't contain an additional last point equal to the first one.
* @param options configuration options
*/
public Polygon(LatLng[] latlngs, PolyLineOptions options) {
super(createHelper(latlngs, options));
}
private static Object createHelper(LatLng[] latlngs, PolyLineOptions options) {
Object[] latlngsJS = new Object[latlngs.length];
for (int q = 0; q < latlngsJS.length; q++) {
latlngsJS[q] = latlngs[q].getJSObj();
}
return create(latlngsJS, options.getJSObj());
}
@JavaScriptBody(args = {"latlngs", "options"}, body
= "return L.polygon(latlngs, options);")
private static native Object create(Object[] latlngs, Object options);
// ------- Methods -------------------------------------------
//TODO: GeoJSON wrapper
/*
public String toGeoJSON() {
return toGeoJSONInternal(jsObj);
}
*/
//TODO: GeoJSON wrapper
/*
@JavaScriptBody(args = { "jsObj" }, body =
"return jsObj.toGeoJSON();")
private static native Object toGeoJSONInternal(Object jsObj);
*/
// ------- PolyLine Methods -------------------------------------------
/**
* Adds a given point to the polygon.
* @param latlng point to add
* @return this
*/
@Override
public Polygon addLatLng(LatLng latlng) {
super.addLatLng(latlng);
return this;
}
/**
* Replaces all the points in the polygon with the given array of geographical points.
* @param latlngs array of points that replace the existing ones
* @return this
*/
@Override
public Polygon setLatLngs(LatLng[] latlngs) {
super.setLatLngs(latlngs);
return this;
}
// ------- Path Methods -------------------------------------------
/**
* Adds the layer to the map.
*
* @param map The map
* @return this
*/
@Override
public Polygon addTo(Map map) {
super.addTo(map);
return this;
}
/**
* Changes the appearance of a Path based on the options in the
* {@link PathOptions} object.
*
* @param options path configuration options
* @return this
*/
@Override
public Polygon setStyle(PathOptions options) {
super.setStyle(options);
return this;
}
/**
* Brings the layer to the top of all path layers.
*
* @return this
*/
@Override
public Polygon bringToFront() {
super.bringToFront();
return this;
}
/**
* Brings the layer to the bottom of all path layers.
*
* @return this
*/
@Override
public Polygon bringToBack() {
super.bringToBack();
return this;
}
/**
* Redraws the layer. Sometimes useful after you changed the coordinates
* that the path uses.
*
* @return this
*/
@Override
public Polygon redraw() {
super.redraw();
return this;
}
// ------- Popup methods -------------------------------------------
/**
* Binds a popup with a particular HTML content to a click on this polygon.
* You can also open the bound popup with the Polygon <code>openPopup</code>
* method.
*
* @param html HTML content of the popup
* @return this
*/
@Override
public Polygon bindPopup(String html) {
super.bindPopup(html);
return this;
}
/**
* Binds a popup to a click on this polygon. You can also open the bound
* popup with the Polygon <code>openPopup</code> method.
*
* @param popup popup object
* @return this
*/
@Override
public Polygon bindPopup(Popup popup) {
super.bindPopup(popup);
return this;
}
/**
* Binds a popup with a particular popup configuration options to a click on
* this polygon. You can also open the bound popup with the Polygon
* <code>openPopup</code> method.
*
* @param popup popup object
* @param options popup configuration options
* @return this
*/
@Override
public Polygon bindPopup(Popup popup, PopupOptions options) {
super.bindPopup(popup, options);
return this;
}
/**
* Unbinds the popup previously bound to the polygon with
* <code>bindPopup</code>.
*
* @return this
*/
@Override
public Polygon unbindPopup() {
super.unbindPopup();
return this;
}
/**
* Opens the popup previously bound by the <code>bindPopup</code> method.
*
* @return this
*/
@Override
public Polygon openPopup() {
super.openPopup();
return this;
}
/**
* Closes the bound popup of the polygon if it's opened.
*
* @return this
*/
@Override
public Polygon closePopup() {
super.closePopup();
return this;
}
}
| |
/*
* Copyright (C) 2013, 2014 Brett Wooldridge
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaxxer.hikari;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.zaxxer.hikari.util.PropertyBeanSetter;
import com.zaxxer.hikari.util.UtilityElf;
public abstract class AbstractHikariConfig implements HikariConfigMBean
{
private static final Logger LOGGER = LoggerFactory.getLogger(HikariConfig.class);
private static final long CONNECTION_TIMEOUT = TimeUnit.SECONDS.toMillis(30);
private static final long VALIDATION_TIMEOUT = TimeUnit.SECONDS.toMillis(5);
private static final long IDLE_TIMEOUT = TimeUnit.MINUTES.toMillis(10);
private static final long MAX_LIFETIME = TimeUnit.MINUTES.toMillis(30);
private static int poolNumber;
private static boolean unitTest;
// Properties changeable at runtime through the MBean
//
private volatile long connectionTimeout;
private volatile long validationTimeout;
private volatile long idleTimeout;
private volatile long leakDetectionThreshold;
private volatile long maxLifetime;
private volatile int maxPoolSize;
private volatile int minIdle;
// Properties NOT changeable at runtime
//
private String catalog;
private String connectionCustomizerClassName;
private String connectionInitSql;
private String connectionTestQuery;
private String dataSourceClassName;
private String dataSourceJndiName;
private String driverClassName;
private String jdbcUrl;
private String password;
private String poolName;
private String transactionIsolationName;
private String username;
private boolean isAutoCommit;
private boolean isReadOnly;
private boolean isInitializationFailFast;
private boolean isIsolateInternalQueries;
private boolean isRegisterMbeans;
private boolean isAllowPoolSuspension;
private DataSource dataSource;
private Properties dataSourceProperties;
private IConnectionCustomizer customizer;
private ThreadFactory threadFactory;
private Object metricRegistry;
private Object healthCheckRegistry;
private Properties healthCheckProperties;
/**
* Default constructor
*/
public AbstractHikariConfig()
{
dataSourceProperties = new Properties();
healthCheckProperties = new Properties();
connectionTimeout = CONNECTION_TIMEOUT;
validationTimeout = VALIDATION_TIMEOUT;
idleTimeout = IDLE_TIMEOUT;
isAutoCommit = true;
isInitializationFailFast = true;
minIdle = -1;
maxPoolSize = 10;
maxLifetime = MAX_LIFETIME;
customizer = new IConnectionCustomizer() {
@Override
public void customize(Connection connection) throws SQLException
{
}
};
String systemProp = System.getProperty("hikaricp.configurationFile");
if ( systemProp != null) {
loadProperties(systemProp);
}
}
/**
* Construct a HikariConfig from the specified properties object.
*
* @param properties the name of the property file
*/
public AbstractHikariConfig(Properties properties)
{
this();
PropertyBeanSetter.setTargetFromProperties(this, properties);
}
/**
* Construct a HikariConfig from the specified property file name. <code>propertyFileName</code>
* will first be treated as a path in the file-system, and if that fails the
* ClassLoader.getResourceAsStream(propertyFileName) will be tried.
*
* @param propertyFileName the name of the property file
*/
public AbstractHikariConfig(String propertyFileName)
{
this();
loadProperties(propertyFileName);
}
/**
* Get the default catalog name to be set on connections.
*
* @return the default catalog name
*/
public String getCatalog()
{
return catalog;
}
/**
* Set the default catalog name to be set on connections.
*
* @param catalog the catalog name, or null
*/
public void setCatalog(String catalog)
{
this.catalog = catalog;
}
/**
* Get the name of the connection customizer class to instantiate and execute
* on all new connections.
*
* @return the name of the customizer class, or null
*/
@Deprecated
public String getConnectionCustomizerClassName()
{
return connectionCustomizerClassName;
}
/**
* Set the name of the connection customizer class to instantiate and execute
* on all new connections.
*
* @param connectionCustomizerClassName the name of the customizer class
*/
@Deprecated
public void setConnectionCustomizerClassName(String connectionCustomizerClassName)
{
this.connectionCustomizerClassName = connectionCustomizerClassName;
LOGGER.warn("The connectionCustomizerClassName property has been deprecated and may be removed in a future release");
}
/**
* Get the customizer instance specified by the user.
*
* @return an instance of IConnectionCustomizer
*/
@Deprecated
public IConnectionCustomizer getConnectionCustomizer()
{
return customizer;
}
/**
* Set the connection customizer to be used by the pool.
*
* @param customizer an instance of IConnectionCustomizer
*/
@Deprecated
public void setConnectionCustomizer(IConnectionCustomizer customizer)
{
this.customizer = customizer;
LOGGER.warn("The connectionCustomizer property has been deprecated and may be removed in a future release");
}
/**
* Get the SQL query to be executed to test the validity of connections.
*
* @return the SQL query string, or null
*/
public String getConnectionTestQuery()
{
return connectionTestQuery;
}
/**
* Set the SQL query to be executed to test the validity of connections. Using
* the JDBC4 <code>Connection.isValid()</code> method to test connection validity can
* be more efficient on some databases and is recommended. See
* {@link HikariConfig#setJdbc4ConnectionTest(boolean)}.
*
* @param connectionTestQuery a SQL query string
*/
public void setConnectionTestQuery(String connectionTestQuery)
{
this.connectionTestQuery = connectionTestQuery;
}
/**
* Get the SQL string that will be executed on all new connections when they are
* created, before they are added to the pool.
*
* @return the SQL to execute on new connections, or null
*/
public String getConnectionInitSql()
{
return connectionInitSql;
}
/**
* Set the SQL string that will be executed on all new connections when they are
* created, before they are added to the pool. If this query fails, it will be
* treated as a failed connection attempt.
*
* @param connectionInitSql the SQL to execute on new connections
*/
public void setConnectionInitSql(String connectionInitSql)
{
this.connectionInitSql = connectionInitSql;
}
/** {@inheritDoc} */
@Override
public long getConnectionTimeout()
{
return connectionTimeout;
}
/** {@inheritDoc} */
@Override
public void setConnectionTimeout(long connectionTimeoutMs)
{
if (connectionTimeoutMs == 0) {
this.connectionTimeout = Integer.MAX_VALUE;
}
else if (connectionTimeoutMs < 1000) {
throw new IllegalArgumentException("connectionTimeout cannot be less than 1000ms");
}
else {
this.connectionTimeout = connectionTimeoutMs;
}
}
/** {@inheritDoc} */
@Override
public long getValidationTimeout()
{
return validationTimeout;
}
/** {@inheritDoc} */
@Override
public void setValidationTimeout(long validationTimeoutMs)
{
if (validationTimeoutMs < 1000) {
throw new IllegalArgumentException("validationTimeout cannot be less than 1000ms");
}
else {
this.validationTimeout = validationTimeoutMs;
}
}
/**
* Get the {@link DataSource} that has been explicitly specified to be wrapped by the
* pool.
*
* @return the {@link DataSource} instance, or null
*/
public DataSource getDataSource()
{
return dataSource;
}
/**
* Set a {@link DataSource} for the pool to explicitly wrap. This setter is not
* available through property file based initialization.
*
* @param dataSource a specific {@link DataSource} to be wrapped by the pool
*/
public void setDataSource(DataSource dataSource)
{
this.dataSource = dataSource;
}
public String getDataSourceClassName()
{
return dataSourceClassName;
}
public void setDataSourceClassName(String className)
{
this.dataSourceClassName = className;
}
public void addDataSourceProperty(String propertyName, Object value)
{
dataSourceProperties.put(propertyName, value);
}
public String getDataSourceJNDI()
{
return this.dataSourceJndiName;
}
public void setDataSourceJNDI(String jndiDataSource)
{
this.dataSourceJndiName = jndiDataSource;
}
public Properties getDataSourceProperties()
{
return dataSourceProperties;
}
public void setDataSourceProperties(Properties dsProperties)
{
dataSourceProperties.putAll(dsProperties);
}
public String getDriverClassName()
{
return driverClassName;
}
public void setDriverClassName(String driverClassName)
{
try {
Class<?> driverClass = this.getClass().getClassLoader().loadClass(driverClassName);
driverClass.newInstance();
this.driverClassName = driverClassName;
}
catch (Exception e) {
throw new RuntimeException("driverClassName specified class '" + driverClassName + "' could not be loaded", e);
}
}
/** {@inheritDoc} */
@Override
public long getIdleTimeout()
{
return idleTimeout;
}
/** {@inheritDoc} */
@Override
public void setIdleTimeout(long idleTimeoutMs)
{
if (idleTimeoutMs < 0) {
throw new IllegalArgumentException("idleTimeout cannot be negative");
}
this.idleTimeout = idleTimeoutMs;
}
public String getJdbcUrl()
{
return jdbcUrl;
}
public void setJdbcUrl(String jdbcUrl)
{
this.jdbcUrl = jdbcUrl;
}
/**
* Get the default auto-commit behavior of connections in the pool.
*
* @return the default auto-commit behavior of connections
*/
public boolean isAutoCommit()
{
return isAutoCommit;
}
/**
* Set the default auto-commit behavior of connections in the pool.
*
* @param isAutoCommit the desired auto-commit default for connections
*/
public void setAutoCommit(boolean isAutoCommit)
{
this.isAutoCommit = isAutoCommit;
}
/**
* Get the pool suspension behavior (allowed or disallowed).
*
* @return the pool suspension behavior
*/
public boolean isAllowPoolSuspension()
{
return isAllowPoolSuspension;
}
/**
* Set whether or not pool suspension is allowed. There is a performance
* impact when pool suspension is enabled. Unless you need it (for a
* redundancy system for example) do not enable it.
*
* @param isAllowPoolSuspension the desired pool suspension allowance
*/
public void setAllowPoolSuspension(boolean isAllowPoolSuspension)
{
this.isAllowPoolSuspension = isAllowPoolSuspension;
}
/**
* Get whether or not the construction of the pool should throw an exception
* if the minimum number of connections cannot be created.
*
* @return whether or not initialization should fail on error immediately
*/
public boolean isInitializationFailFast()
{
return isInitializationFailFast;
}
/**
* Set whether or not the construction of the pool should throw an exception
* if the minimum number of connections cannot be created.
*
* @param failFast true if the pool should fail if the minimum connections cannot be created
*/
public void setInitializationFailFast(boolean failFast)
{
isInitializationFailFast = failFast;
}
public boolean isIsolateInternalQueries()
{
return isIsolateInternalQueries;
}
public void setIsolateInternalQueries(boolean isolate)
{
this.isIsolateInternalQueries = isolate;
}
@Deprecated
public boolean isJdbc4ConnectionTest()
{
return false;
}
@Deprecated
public void setJdbc4ConnectionTest(boolean useIsValid)
{
// ignored deprecated property
LOGGER.warn("The jdbcConnectionTest property is now deprecated, see the documentation for connectionTestQuery");
}
/**
* Get the Codahale MetricRegistry, could be null.
*
* @return the codahale MetricRegistry instance
*/
public Object getMetricRegistry()
{
return metricRegistry;
}
/**
* Set a Codahale MetricRegistry to use for HikariCP.
*
* @param metricRegistry the Codahale MetricRegistry to set
*/
public void setMetricRegistry(Object metricRegistry)
{
if (metricRegistry != null) {
if (metricRegistry instanceof String) {
try {
InitialContext initCtx = new InitialContext();
metricRegistry = (MetricRegistry) initCtx.lookup((String) metricRegistry);
}
catch (NamingException e) {
throw new IllegalArgumentException(e);
}
}
if (!(metricRegistry instanceof MetricRegistry)) {
throw new IllegalArgumentException("Class must be an instance of com.codahale.metrics.MetricRegistry");
}
}
this.metricRegistry = metricRegistry;
}
/**
* Get the Codahale HealthCheckRegistry, could be null.
*
* @return the Codahale HealthCheckRegistry instance
*/
public Object getHealthCheckRegistry()
{
return healthCheckRegistry;
}
/**
* Set a Codahale HealthCheckRegistry to use for HikariCP.
*
* @param healthCheckRegistry the Codahale HealthCheckRegistry to set
*/
public void setHealthCheckRegistry(Object healthCheckRegistry)
{
if (healthCheckRegistry != null) {
if (healthCheckRegistry instanceof String) {
try {
InitialContext initCtx = new InitialContext();
healthCheckRegistry = (HealthCheckRegistry) initCtx.lookup((String) healthCheckRegistry);
}
catch (NamingException e) {
throw new IllegalArgumentException(e);
}
}
if (!(healthCheckRegistry instanceof HealthCheckRegistry)) {
throw new IllegalArgumentException("Class must be an instance of com.codahale.metrics.health.HealthCheckRegistry");
}
}
this.healthCheckRegistry = healthCheckRegistry;
}
public Properties getHealthCheckProperties()
{
return healthCheckProperties;
}
public void setHealthCheckProperties(Properties healthCheckProperties)
{
this.healthCheckProperties.putAll(healthCheckProperties);
}
public void addHealthCheckProperty(String key, String value)
{
healthCheckProperties.setProperty(key, value);
}
public boolean isReadOnly()
{
return isReadOnly;
}
public void setReadOnly(boolean readOnly)
{
this.isReadOnly = readOnly;
}
public boolean isRegisterMbeans()
{
return isRegisterMbeans;
}
public void setRegisterMbeans(boolean register)
{
this.isRegisterMbeans = register;
}
/** {@inheritDoc} */
@Override
public long getLeakDetectionThreshold()
{
return leakDetectionThreshold;
}
/** {@inheritDoc} */
@Override
public void setLeakDetectionThreshold(long leakDetectionThresholdMs)
{
this.leakDetectionThreshold = leakDetectionThresholdMs;
}
/** {@inheritDoc} */
@Override
public long getMaxLifetime()
{
return maxLifetime;
}
/** {@inheritDoc} */
@Override
public void setMaxLifetime(long maxLifetimeMs)
{
this.maxLifetime = maxLifetimeMs;
}
/** {@inheritDoc} */
@Override
public int getMaximumPoolSize()
{
return maxPoolSize;
}
/** {@inheritDoc} */
@Override
public void setMaximumPoolSize(int maxPoolSize)
{
if (maxPoolSize < 1) {
throw new IllegalArgumentException("maxPoolSize cannot be less than 1");
}
this.maxPoolSize = maxPoolSize;
}
/** {@inheritDoc} */
@Override
public int getMinimumIdle()
{
return minIdle;
}
/** {@inheritDoc} */
@Override
public void setMinimumIdle(int minIdle)
{
if (minIdle < 0) {
throw new IllegalArgumentException("minimumIdle cannot be negative");
}
this.minIdle = minIdle;
}
/**
* Get the default password to use for DataSource.getConnection(username, password) calls.
* @return the password
*/
public String getPassword()
{
return password;
}
/**
* Set the default password to use for DataSource.getConnection(username, password) calls.
* @param password the password
*/
public void setPassword(String password)
{
this.password = password;
}
/** {@inheritDoc} */
@Override
public String getPoolName()
{
return poolName;
}
/**
* Set the name of the connection pool. This is primarily used for the MBean
* to uniquely identify the pool configuration.
*
* @param poolName the name of the connection pool to use
*/
public void setPoolName(String poolName)
{
this.poolName = poolName;
}
public String getTransactionIsolation()
{
return transactionIsolationName;
}
/**
* Set the default transaction isolation level. The specified value is the
* constant name from the <code>Connection</code> class, eg.
* <code>TRANSACTION_REPEATABLE_READ</code>.
*
* @param isolationLevel the name of the isolation level
*/
public void setTransactionIsolation(String isolationLevel)
{
this.transactionIsolationName = isolationLevel;
}
/**
* Get the default username used for DataSource.getConnection(username, password) calls.
*
* @return the username
*/
public String getUsername()
{
return username;
}
/**
* Set the default username used for DataSource.getConnection(username, password) calls.
*
* @param username the username
*/
public void setUsername(String username)
{
this.username = username;
}
/**
* Get the thread factory used to create threads.
*
* @return the thread factory (may be null, in which case the default thread factory is used)
*/
public ThreadFactory getThreadFactory()
{
return threadFactory;
}
/**
* Set the thread factory to be used to create threads.
*
* @param threadFactory the thread factory (setting to null causes the default thread factory to be used)
*/
public void setThreadFactory(ThreadFactory threadFactory)
{
this.threadFactory = threadFactory;
}
public void validate()
{
Logger logger = LoggerFactory.getLogger(getClass());
validateNumerics();
if (connectionCustomizerClassName != null) {
try {
getClass().getClassLoader().loadClass(connectionCustomizerClassName);
}
catch (Exception e) {
logger.warn("connectionCustomizationClass specified class '" + connectionCustomizerClassName + "' could not be loaded", e);
connectionCustomizerClassName = null;
}
}
if (driverClassName != null && jdbcUrl == null) {
logger.error("when specifying driverClassName, jdbcUrl must also be specified");
throw new IllegalStateException("when specifying driverClassName, jdbcUrl must also be specified");
}
else if (driverClassName != null && dataSourceClassName != null) {
logger.error("both driverClassName and dataSourceClassName are specified, one or the other should be used");
throw new IllegalStateException("both driverClassName and dataSourceClassName are specified, one or the other should be used");
}
else if (jdbcUrl != null) {
// OK
}
else if (dataSource == null && dataSourceClassName == null) {
logger.error("one of either dataSource, dataSourceClassName, or jdbcUrl and driverClassName must be specified");
throw new IllegalArgumentException("one of either dataSource or dataSourceClassName must be specified");
}
else if (dataSource != null && dataSourceClassName != null) {
logger.warn("both dataSource and dataSourceClassName are specified, ignoring dataSourceClassName");
}
if (transactionIsolationName != null) {
UtilityElf.getTransactionIsolation(transactionIsolationName);
}
if (poolName == null) {
poolName = "HikariPool-" + poolNumber++;
}
if (LOGGER.isDebugEnabled() || unitTest) {
logConfiguration();
}
}
private void validateNumerics()
{
Logger logger = LoggerFactory.getLogger(getClass());
if (validationTimeout > connectionTimeout && connectionTimeout != 0) {
logger.warn("validationTimeout is greater than connectionTimeout, setting validationTimeout to connectionTimeout.");
validationTimeout = connectionTimeout;
}
if (minIdle < 0 || minIdle > maxPoolSize) {
minIdle = maxPoolSize;
}
if (maxLifetime < 0) {
logger.error("maxLifetime cannot be negative.");
throw new IllegalArgumentException("maxLifetime cannot be negative.");
}
else if (maxLifetime > 0 && maxLifetime < TimeUnit.SECONDS.toMillis(30)) {
logger.warn("maxLifetime is less than 30000ms, using default {}ms.", MAX_LIFETIME);
maxLifetime = MAX_LIFETIME;
}
if (idleTimeout != 0 && idleTimeout < TimeUnit.SECONDS.toMillis(10)) {
logger.warn("idleTimeout is less than 10000ms, using default {}ms.", IDLE_TIMEOUT);
idleTimeout = IDLE_TIMEOUT;
}
else if (idleTimeout > maxLifetime && maxLifetime > 0) {
logger.warn("idleTimeout is greater than maxLifetime, setting to maxLifetime.");
idleTimeout = maxLifetime;
}
if (leakDetectionThreshold != 0 && leakDetectionThreshold < TimeUnit.SECONDS.toMillis(2) && !unitTest) {
logger.warn("leakDetectionThreshold is less than 2000ms, setting to minimum 2000ms.");
leakDetectionThreshold = 2000L;
}
}
private void logConfiguration()
{
LOGGER.debug("HikariCP pool {} configuration:", poolName);
final Set<String> propertyNames = new TreeSet<String>(PropertyBeanSetter.getPropertyNames(HikariConfig.class));
for (String prop : propertyNames) {
try {
Object value = PropertyBeanSetter.getProperty(prop, this);
if ("dataSourceProperties".equals(prop)) {
Properties dsProps = PropertyBeanSetter.copyProperties(dataSourceProperties);
dsProps.setProperty("password", "<masked>");
value = dsProps;
}
value = (prop.contains("password") ? "<masked>" : value);
LOGGER.debug((prop + "................................................").substring(0, 32) + (value != null ? value : ""));
}
catch (Exception e) {
continue;
}
}
}
abstract protected void loadProperties(String propertyFileName);
public void copyState(AbstractHikariConfig other)
{
for (Field field : AbstractHikariConfig.class.getDeclaredFields()) {
if (!Modifier.isFinal(field.getModifiers())) {
field.setAccessible(true);
try {
field.set(other, field.get(this));
}
catch (Exception e) {
throw new RuntimeException("Exception copying HikariConfig state: " + e.getMessage(), e);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.util;
import groovy.lang.Closure;
import groovy.test.GroovyAssert;
import junit.framework.TestCase;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Logger;
/**
* A JUnit 3 {@link junit.framework.TestCase} base class in Groovy.
*
* In case JUnit 4 is used, see {@link groovy.test.GroovyAssert}.
*
* @see groovy.test.GroovyAssert
*
* @author <a href="mailto:bob@werken.com">bob mcwhirter</a>
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @author Dierk Koenig (the notYetImplemented feature, changes to shouldFail)
* @author Andre Steingress
*/
public class GroovyTestCase extends TestCase {
protected static Logger log = Logger.getLogger(GroovyTestCase.class.getName());
private static final AtomicInteger scriptFileNameCounter = new AtomicInteger(0);
public static final String TEST_SCRIPT_NAME_PREFIX = "TestScript";
private boolean useAgileDoxNaming = false;
/**
* Overload the getName() method to make the test cases look more like AgileDox
* (thanks to Joe Walnes for this tip!)
*/
public String getName() {
if (useAgileDoxNaming) {
return super.getName().substring(4).replaceAll("([A-Z])", " $1").toLowerCase();
} else {
return super.getName();
}
}
public String getMethodName() {
return super.getName();
}
/**
* Asserts that the arrays are equivalent and contain the same values
*
* @param expected
* @param value
*/
protected void assertArrayEquals(Object[] expected, Object[] value) {
String message =
"expected array: " + InvokerHelper.toString(expected) + " value array: " + InvokerHelper.toString(value);
assertNotNull(message + ": expected should not be null", expected);
assertNotNull(message + ": value should not be null", value);
assertEquals(message, expected.length, value.length);
for (int i = 0, size = expected.length; i < size; i++) {
assertEquals("value[" + i + "] when " + message, expected[i], value[i]);
}
}
/**
* Asserts that the array of characters has a given length
*
* @param length expected length
* @param array the array
*/
protected void assertLength(int length, char[] array) {
assertEquals(length, array.length);
}
/**
* Asserts that the array of ints has a given length
*
* @param length expected length
* @param array the array
*/
protected void assertLength(int length, int[] array) {
assertEquals(length, array.length);
}
/**
* Asserts that the array of objects has a given length
*
* @param length expected length
* @param array the array
*/
protected void assertLength(int length, Object[] array) {
assertEquals(length, array.length);
}
/**
* Asserts that the array of characters contains a given char
*
* @param expected expected character to be found
* @param array the array
*/
protected void assertContains(char expected, char[] array) {
for (int i = 0; i < array.length; ++i) {
if (array[i] == expected) {
return;
}
}
StringBuilder message = new StringBuilder();
message.append(expected).append(" not in {");
for (int i = 0; i < array.length; ++i) {
message.append("'").append(array[i]).append("'");
if (i < (array.length - 1)) {
message.append(", ");
}
}
message.append(" }");
fail(message.toString());
}
/**
* Asserts that the array of ints contains a given int
*
* @param expected expected int
* @param array the array
*/
protected void assertContains(int expected, int[] array) {
for (int anInt : array) {
if (anInt == expected) {
return;
}
}
StringBuilder message = new StringBuilder();
message.append(expected).append(" not in {");
for (int i = 0; i < array.length; ++i) {
message.append("'").append(array[i]).append("'");
if (i < (array.length - 1)) {
message.append(", ");
}
}
message.append(" }");
fail(message.toString());
}
/**
* Asserts that the value of toString() on the given object matches the
* given text string
*
* @param value the object to be output to the console
* @param expected the expected String representation
*/
protected void assertToString(Object value, String expected) {
Object console = InvokerHelper.invokeMethod(value, "toString", null);
assertEquals("toString() on value: " + value, expected, console);
}
/**
* Asserts that the value of inspect() on the given object matches the
* given text string
*
* @param value the object to be output to the console
* @param expected the expected String representation
*/
protected void assertInspect(Object value, String expected) {
Object console = InvokerHelper.invokeMethod(value, "inspect", null);
assertEquals("inspect() on value: " + value, expected, console);
}
/**
* see {@link groovy.test.GroovyAssert#assertScript(String)}
*/
protected void assertScript(final String script) throws Exception {
GroovyAssert.assertScript(script);
}
// TODO should this be synchronised?
protected String getTestClassName() {
return TEST_SCRIPT_NAME_PREFIX + getMethodName() + (scriptFileNameCounter.getAndIncrement()) + ".groovy";
}
/**
* see {@link groovy.test.GroovyAssert#shouldFail(groovy.lang.Closure)}
*/
protected String shouldFail(Closure code) {
return GroovyAssert.shouldFail(code).getMessage();
}
/**
* see {@link groovy.test.GroovyAssert#shouldFail(Class, groovy.lang.Closure)}
*/
protected String shouldFail(Class clazz, Closure code) {
return GroovyAssert.shouldFail(clazz, code).getMessage();
}
/**
* see {@link groovy.test.GroovyAssert#shouldFailWithCause(Class, groovy.lang.Closure)}
*/
protected String shouldFailWithCause(Class clazz, Closure code) {
return GroovyAssert.shouldFailWithCause(clazz, code).getMessage();
}
/**
* see {@link groovy.test.GroovyAssert#shouldFail(Class, String)}
*/
protected String shouldFail(Class clazz, String script) {
return GroovyAssert.shouldFail(clazz, script).getMessage();
}
/**
* see {@link groovy.test.GroovyAssert#shouldFail(String)}
*/
protected String shouldFail(String script) {
return GroovyAssert.shouldFail(script).getMessage();
}
/**
* Returns a copy of a string in which all EOLs are \n.
*/
protected String fixEOLs(String value) {
return value.replaceAll("(\\r\\n?)|\n", "\n");
}
/**
* see {@link groovy.test.GroovyAssert#notYetImplemented(java.lang.Object)}
*/
public static boolean notYetImplemented(Object caller) {
return GroovyAssert.notYetImplemented(caller);
}
/**
* Convenience method for subclasses of GroovyTestCase, identical to
* <pre> GroovyTestCase.notYetImplemented(this); </pre>.
*
* @return <code>false</code> when not itself already in the call stack
* @see #notYetImplemented(java.lang.Object)
*/
public boolean notYetImplemented() {
return notYetImplemented(this);
}
public static void assertEquals(String message, Object expected, Object actual) {
if (expected == null && actual == null)
return;
if (expected != null && DefaultTypeTransformation.compareEqual(expected, actual))
return;
TestCase.assertEquals(message, expected, actual);
}
public static void assertEquals(Object expected, Object actual) {
assertEquals(null, expected, actual);
}
public static void assertEquals(String expected, String actual) {
assertEquals(null, expected, actual);
}
}
| |
// Copyright 2012 Cloudera Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.impala.analysis;
import java.io.StringReader;
import java.util.Map;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.impala.authorization.AuthorizationChecker;
import com.cloudera.impala.authorization.AuthorizationConfig;
import com.cloudera.impala.authorization.AuthorizeableColumn;
import com.cloudera.impala.authorization.AuthorizeableTable;
import com.cloudera.impala.authorization.AuthorizeableDb;
import com.cloudera.impala.authorization.Privilege;
import com.cloudera.impala.authorization.PrivilegeRequest;
import com.cloudera.impala.authorization.PrivilegeRequestBuilder;
import com.cloudera.impala.catalog.AuthorizationException;
import com.cloudera.impala.catalog.Db;
import com.cloudera.impala.catalog.ImpaladCatalog;
import com.cloudera.impala.catalog.Table;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.thrift.TAccessEvent;
import com.cloudera.impala.thrift.TDescribeTableOutputStyle;
import com.cloudera.impala.thrift.TQueryCtx;
import com.cloudera.impala.common.Pair;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
/**
* Wrapper class for parser and analyzer.
*/
public class AnalysisContext {
private final static Logger LOG = LoggerFactory.getLogger(AnalysisContext.class);
private final ImpaladCatalog catalog_;
private final TQueryCtx queryCtx_;
private final AuthorizationConfig authzConfig_;
// Set in analyze()
private AnalysisResult analysisResult_;
public AnalysisContext(ImpaladCatalog catalog, TQueryCtx queryCtx,
AuthorizationConfig authzConfig) {
catalog_ = catalog;
queryCtx_ = queryCtx;
authzConfig_ = authzConfig;
}
static public class AnalysisResult {
private StatementBase stmt_;
private Analyzer analyzer_;
private CreateTableStmt tmpCreateTableStmt_;
public boolean isAlterTableStmt() { return stmt_ instanceof AlterTableStmt; }
public boolean isAlterViewStmt() { return stmt_ instanceof AlterViewStmt; }
public boolean isComputeStatsStmt() { return stmt_ instanceof ComputeStatsStmt; }
public boolean isQueryStmt() { return stmt_ instanceof QueryStmt; }
public boolean isInsertStmt() { return stmt_ instanceof InsertStmt; }
public boolean isDropDbStmt() { return stmt_ instanceof DropDbStmt; }
public boolean isDropTableOrViewStmt() {
return stmt_ instanceof DropTableOrViewStmt;
}
public boolean isDropFunctionStmt() { return stmt_ instanceof DropFunctionStmt; }
public boolean isDropDataSrcStmt() { return stmt_ instanceof DropDataSrcStmt; }
public boolean isDropStatsStmt() { return stmt_ instanceof DropStatsStmt; }
public boolean isCreateTableLikeStmt() {
return stmt_ instanceof CreateTableLikeStmt;
}
public boolean isCreateViewStmt() { return stmt_ instanceof CreateViewStmt; }
public boolean isCreateTableAsSelectStmt() {
return stmt_ instanceof CreateTableAsSelectStmt;
}
public boolean isCreateTableStmt() { return stmt_ instanceof CreateTableStmt; }
public boolean isCreateDbStmt() { return stmt_ instanceof CreateDbStmt; }
public boolean isCreateUdfStmt() { return stmt_ instanceof CreateUdfStmt; }
public boolean isCreateUdaStmt() { return stmt_ instanceof CreateUdaStmt; }
public boolean isCreateDataSrcStmt() { return stmt_ instanceof CreateDataSrcStmt; }
public boolean isLoadDataStmt() { return stmt_ instanceof LoadDataStmt; }
public boolean isUseStmt() { return stmt_ instanceof UseStmt; }
public boolean isSetStmt() { return stmt_ instanceof SetStmt; }
public boolean isShowTablesStmt() { return stmt_ instanceof ShowTablesStmt; }
public boolean isShowDbsStmt() { return stmt_ instanceof ShowDbsStmt; }
public boolean isShowDataSrcsStmt() { return stmt_ instanceof ShowDataSrcsStmt; }
public boolean isShowStatsStmt() { return stmt_ instanceof ShowStatsStmt; }
public boolean isShowFunctionsStmt() { return stmt_ instanceof ShowFunctionsStmt; }
public boolean isShowCreateTableStmt() {
return stmt_ instanceof ShowCreateTableStmt;
}
public boolean isShowFilesStmt() { return stmt_ instanceof ShowFilesStmt; }
public boolean isDescribeStmt() { return stmt_ instanceof DescribeStmt; }
public boolean isResetMetadataStmt() { return stmt_ instanceof ResetMetadataStmt; }
public boolean isExplainStmt() { return stmt_.isExplain(); }
public boolean isShowRolesStmt() { return stmt_ instanceof ShowRolesStmt; }
public boolean isShowGrantRoleStmt() { return stmt_ instanceof ShowGrantRoleStmt; }
public boolean isCreateDropRoleStmt() { return stmt_ instanceof CreateDropRoleStmt; }
public boolean isGrantRevokeRoleStmt() {
return stmt_ instanceof GrantRevokeRoleStmt;
}
public boolean isGrantRevokePrivStmt() {
return stmt_ instanceof GrantRevokePrivStmt;
}
public boolean isTruncateStmt() { return stmt_ instanceof TruncateStmt; }
public boolean isCatalogOp() {
return isUseStmt() || isViewMetadataStmt() || isDdlStmt();
}
private boolean isDdlStmt() {
return isCreateTableLikeStmt() || isCreateTableStmt() ||
isCreateViewStmt() || isCreateDbStmt() || isDropDbStmt() ||
isDropTableOrViewStmt() || isResetMetadataStmt() || isAlterTableStmt() ||
isAlterViewStmt() || isComputeStatsStmt() || isCreateUdfStmt() ||
isCreateUdaStmt() || isDropFunctionStmt() || isCreateTableAsSelectStmt() ||
isCreateDataSrcStmt() || isDropDataSrcStmt() || isDropStatsStmt() ||
isCreateDropRoleStmt() || isGrantRevokeStmt() || isTruncateStmt();
}
private boolean isViewMetadataStmt() {
return isShowFilesStmt() || isShowTablesStmt() || isShowDbsStmt() ||
isShowFunctionsStmt() || isShowRolesStmt() || isShowGrantRoleStmt() ||
isShowCreateTableStmt() || isShowDataSrcsStmt() || isShowStatsStmt() ||
isDescribeStmt();
}
private boolean isGrantRevokeStmt() {
return isGrantRevokeRoleStmt() || isGrantRevokePrivStmt();
}
public boolean isDmlStmt() {
return isInsertStmt();
}
public AlterTableStmt getAlterTableStmt() {
Preconditions.checkState(isAlterTableStmt());
return (AlterTableStmt) stmt_;
}
public AlterViewStmt getAlterViewStmt() {
Preconditions.checkState(isAlterViewStmt());
return (AlterViewStmt) stmt_;
}
public ComputeStatsStmt getComputeStatsStmt() {
Preconditions.checkState(isComputeStatsStmt());
return (ComputeStatsStmt) stmt_;
}
public CreateTableLikeStmt getCreateTableLikeStmt() {
Preconditions.checkState(isCreateTableLikeStmt());
return (CreateTableLikeStmt) stmt_;
}
public CreateViewStmt getCreateViewStmt() {
Preconditions.checkState(isCreateViewStmt());
return (CreateViewStmt) stmt_;
}
public CreateTableAsSelectStmt getCreateTableAsSelectStmt() {
Preconditions.checkState(isCreateTableAsSelectStmt());
return (CreateTableAsSelectStmt) stmt_;
}
public CreateTableStmt getCreateTableStmt() {
Preconditions.checkState(isCreateTableStmt());
return (CreateTableStmt) stmt_;
}
public CreateTableStmt getTmpCreateTableStmt() {
return tmpCreateTableStmt_;
}
public CreateDbStmt getCreateDbStmt() {
Preconditions.checkState(isCreateDbStmt());
return (CreateDbStmt) stmt_;
}
public CreateUdfStmt getCreateUdfStmt() {
Preconditions.checkState(isCreateUdfStmt());
return (CreateUdfStmt) stmt_;
}
public CreateUdaStmt getCreateUdaStmt() {
Preconditions.checkState(isCreateUdfStmt());
return (CreateUdaStmt) stmt_;
}
public DropDbStmt getDropDbStmt() {
Preconditions.checkState(isDropDbStmt());
return (DropDbStmt) stmt_;
}
public DropTableOrViewStmt getDropTableOrViewStmt() {
Preconditions.checkState(isDropTableOrViewStmt());
return (DropTableOrViewStmt) stmt_;
}
public TruncateStmt getTruncateStmt() {
Preconditions.checkState(isTruncateStmt());
return (TruncateStmt) stmt_;
}
public DropFunctionStmt getDropFunctionStmt() {
Preconditions.checkState(isDropFunctionStmt());
return (DropFunctionStmt) stmt_;
}
public LoadDataStmt getLoadDataStmt() {
Preconditions.checkState(isLoadDataStmt());
return (LoadDataStmt) stmt_;
}
public QueryStmt getQueryStmt() {
Preconditions.checkState(isQueryStmt());
return (QueryStmt) stmt_;
}
public InsertStmt getInsertStmt() {
if (isCreateTableAsSelectStmt()) {
return getCreateTableAsSelectStmt().getInsertStmt();
} else {
Preconditions.checkState(isInsertStmt());
return (InsertStmt) stmt_;
}
}
public DropTableOrViewStmt getCleanupStmt() {
Preconditions.checkState(isCreateTableAsSelectStmt());
return getCreateTableAsSelectStmt().getCleanupStmt();
}
public UseStmt getUseStmt() {
Preconditions.checkState(isUseStmt());
return (UseStmt) stmt_;
}
public SetStmt getSetStmt() {
Preconditions.checkState(isSetStmt());
return (SetStmt) stmt_;
}
public ShowTablesStmt getShowTablesStmt() {
Preconditions.checkState(isShowTablesStmt());
return (ShowTablesStmt) stmt_;
}
public ShowDbsStmt getShowDbsStmt() {
Preconditions.checkState(isShowDbsStmt());
return (ShowDbsStmt) stmt_;
}
public ShowDataSrcsStmt getShowDataSrcsStmt() {
Preconditions.checkState(isShowDataSrcsStmt());
return (ShowDataSrcsStmt) stmt_;
}
public ShowStatsStmt getShowStatsStmt() {
Preconditions.checkState(isShowStatsStmt());
return (ShowStatsStmt) stmt_;
}
public ShowFunctionsStmt getShowFunctionsStmt() {
Preconditions.checkState(isShowFunctionsStmt());
return (ShowFunctionsStmt) stmt_;
}
public ShowFilesStmt getShowFilesStmt() {
Preconditions.checkState(isShowFilesStmt());
return (ShowFilesStmt) stmt_;
}
public DescribeStmt getDescribeStmt() {
Preconditions.checkState(isDescribeStmt());
return (DescribeStmt) stmt_;
}
public ShowCreateTableStmt getShowCreateTableStmt() {
Preconditions.checkState(isShowCreateTableStmt());
return (ShowCreateTableStmt) stmt_;
}
public StatementBase getStmt() { return stmt_; }
public Analyzer getAnalyzer() { return analyzer_; }
public Set<TAccessEvent> getAccessEvents() { return analyzer_.getAccessEvents(); }
public boolean requiresRewrite() {
return analyzer_.containsSubquery() && !(stmt_ instanceof CreateViewStmt)
&& !(stmt_ instanceof AlterViewStmt);
}
public String getJsonLineageGraph() { return analyzer_.getSerializedLineageGraph(); }
}
/**
* Parse and analyze 'stmt'. If 'stmt' is a nested query (i.e. query that
* contains subqueries), it is also rewritten by performing subquery unnesting.
* The transformed stmt is then re-analyzed in a new analysis context.
*
* The result of analysis can be retrieved by calling
* getAnalysisResult().
*
* @throws AnalysisException
* On any other error, including parsing errors. Also thrown when any
* missing tables are detected as a result of running analysis.
*/
public void analyze(String stmt) throws AnalysisException {
Analyzer analyzer = new Analyzer(catalog_, queryCtx_, authzConfig_);
analyze(stmt, analyzer);
}
/**
* Parse and analyze 'stmt' using a specified Analyzer.
*/
public void analyze(String stmt, Analyzer analyzer) throws AnalysisException {
SqlScanner input = new SqlScanner(new StringReader(stmt));
SqlParser parser = new SqlParser(input);
try {
analysisResult_ = new AnalysisResult();
analysisResult_.analyzer_ = analyzer;
if (analysisResult_.analyzer_ == null) {
analysisResult_.analyzer_ = new Analyzer(catalog_, queryCtx_, authzConfig_);
}
analysisResult_.stmt_ = (StatementBase) parser.parse().value;
if (analysisResult_.stmt_ == null) return;
// For CTAS, we copy the create statement in case we have to create a new CTAS
// statement after a query rewrite.
if (analysisResult_.stmt_ instanceof CreateTableAsSelectStmt) {
analysisResult_.tmpCreateTableStmt_ =
((CreateTableAsSelectStmt)analysisResult_.stmt_).getCreateStmt().clone();
}
analysisResult_.stmt_.analyze(analysisResult_.analyzer_);
boolean isExplain = analysisResult_.isExplainStmt();
// Check if we need to rewrite the statement.
if (analysisResult_.requiresRewrite()) {
StatementBase rewrittenStmt = StmtRewriter.rewrite(analysisResult_);
// Re-analyze the rewritten statement.
Preconditions.checkNotNull(rewrittenStmt);
analysisResult_ = new AnalysisResult();
analysisResult_.analyzer_ = new Analyzer(catalog_, queryCtx_, authzConfig_);
analysisResult_.stmt_ = rewrittenStmt;
analysisResult_.stmt_.analyze(analysisResult_.analyzer_);
LOG.trace("rewrittenStmt: " + rewrittenStmt.toSql());
if (isExplain) analysisResult_.stmt_.setIsExplain();
Preconditions.checkState(!analysisResult_.requiresRewrite());
}
} catch (AnalysisException e) {
// Don't wrap AnalysisExceptions in another AnalysisException
throw e;
} catch (Exception e) {
throw new AnalysisException(parser.getErrorMsg(stmt), e);
}
}
/**
* Authorize an analyzed statement.
* analyze() must have already been called. Throws an AuthorizationException if the
* user doesn't have sufficient privileges to run this statement.
*/
public void authorize(AuthorizationChecker authzChecker) throws AuthorizationException {
Preconditions.checkNotNull(analysisResult_);
Analyzer analyzer = getAnalyzer();
// Process statements for which column-level privilege requests may be registered.
if (analysisResult_.isQueryStmt() || analysisResult_.isInsertStmt() ||
analysisResult_.isCreateTableAsSelectStmt() ||
analysisResult_.isCreateViewStmt() || analysisResult_.isAlterViewStmt()) {
// Map of table name to a list of privilege requests associated with that table.
// These include both table-level and column-level privilege requests.
Map<String, List<PrivilegeRequest>> tablePrivReqs = Maps.newHashMap();
// Privilege requests that are not column or table-level.
List<PrivilegeRequest> otherPrivReqs = Lists.newArrayList();
// Group the registered privilege requests based on the table they reference.
for (PrivilegeRequest privReq: analyzer.getPrivilegeReqs()) {
String tableName = privReq.getAuthorizeable().getFullTableName();
if (tableName == null) {
otherPrivReqs.add(privReq);
} else {
List<PrivilegeRequest> requests = tablePrivReqs.get(tableName);
if (requests == null) {
requests = Lists.newArrayList();
tablePrivReqs.put(tableName, requests);
}
// The table-level SELECT must be the first table-level request, and it
// must precede all column-level privilege requests.
Preconditions.checkState((requests.isEmpty() ||
!(privReq.getAuthorizeable() instanceof AuthorizeableColumn)) ||
(requests.get(0).getAuthorizeable() instanceof AuthorizeableTable &&
requests.get(0).getPrivilege() == Privilege.SELECT));
requests.add(privReq);
}
}
// Check any non-table, non-column privilege requests first.
for (PrivilegeRequest request: otherPrivReqs) {
authorizePrivilegeRequest(authzChecker, request);
}
// Authorize table accesses, one table at a time, by considering both table and
// column-level privilege requests.
for (Map.Entry<String, List<PrivilegeRequest>> entry: tablePrivReqs.entrySet()) {
authorizeTableAccess(authzChecker, entry.getValue());
}
} else {
for (PrivilegeRequest privReq: analyzer.getPrivilegeReqs()) {
Preconditions.checkState(
!(privReq.getAuthorizeable() instanceof AuthorizeableColumn) ||
analysisResult_.isDescribeStmt());
authorizePrivilegeRequest(authzChecker, privReq);
}
}
// Check any masked requests.
for (Pair<PrivilegeRequest, String> maskedReq: analyzer.getMaskedPrivilegeReqs()) {
if (!authzChecker.hasAccess(analyzer.getUser(), maskedReq.first)) {
throw new AuthorizationException(maskedReq.second);
}
}
}
/**
* Authorize a privilege request.
* Throws an AuthorizationException if the user doesn't have sufficient privileges for
* this request. Also, checks if the request references a system database.
*/
private void authorizePrivilegeRequest(AuthorizationChecker authzChecker,
PrivilegeRequest request) throws AuthorizationException {
Preconditions.checkNotNull(request);
String dbName = null;
if (request.getAuthorizeable() != null) {
dbName = request.getAuthorizeable().getDbName();
}
// If this is a system database, some actions should always be allowed
// or disabled, regardless of what is in the auth policy.
if (dbName != null && checkSystemDbAccess(dbName, request.getPrivilege())) {
return;
}
authzChecker.checkAccess(getAnalyzer().getUser(), request);
}
/**
* Authorize a list of privilege requests associated with a single table.
* It checks if the user has sufficient table-level privileges and if that is
* not the case, it falls back on checking column-level privileges, if any. This
* function requires 'SELECT' requests to be ordered by table and then by column
* privilege requests. Throws an AuthorizationException if the user doesn't have
* sufficient privileges.
*/
private void authorizeTableAccess(AuthorizationChecker authzChecker,
List<PrivilegeRequest> requests) throws AuthorizationException {
Preconditions.checkState(!requests.isEmpty());
Analyzer analyzer = getAnalyzer();
boolean hasTableSelectPriv = true;
boolean hasColumnSelectPriv = false;
for (PrivilegeRequest request: requests) {
if (request.getAuthorizeable() instanceof AuthorizeableTable) {
try {
authorizePrivilegeRequest(authzChecker, request);
} catch (AuthorizationException e) {
// Authorization fails if we fail to authorize any table-level request that is
// not a SELECT privilege (e.g. INSERT).
if (request.getPrivilege() != Privilege.SELECT) throw e;
hasTableSelectPriv = false;
}
} else {
Preconditions.checkState(
request.getAuthorizeable() instanceof AuthorizeableColumn);
if (hasTableSelectPriv) continue;
if (authzChecker.hasAccess(analyzer.getUser(), request)) {
hasColumnSelectPriv = true;
continue;
}
// Make sure we don't reveal any column names in the error message.
throw new AuthorizationException(String.format("User '%s' does not have " +
"privileges to execute '%s' on: %s", analyzer.getUser().getName(),
request.getPrivilege().toString(),
request.getAuthorizeable().getFullTableName()));
}
}
if (!hasTableSelectPriv && !hasColumnSelectPriv) {
throw new AuthorizationException(String.format("User '%s' does not have " +
"privileges to execute 'SELECT' on: %s", analyzer.getUser().getName(),
requests.get(0).getAuthorizeable().getFullTableName()));
}
}
/**
* Throws an AuthorizationException if the dbName is a system db
* and the user is trying to modify it.
* Returns true if this is a system db and the action is allowed.
*/
private boolean checkSystemDbAccess(String dbName, Privilege privilege)
throws AuthorizationException {
Db db = catalog_.getDb(dbName);
if (db != null && db.isSystemDb()) {
switch (privilege) {
case VIEW_METADATA:
case ANY:
return true;
default:
throw new AuthorizationException("Cannot modify system database.");
}
}
return false;
}
public AnalysisResult getAnalysisResult() { return analysisResult_; }
public Analyzer getAnalyzer() { return getAnalysisResult().getAnalyzer(); }
}
| |
package tw.gov.tainan.tainanwelfare.db;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonParser;
import com.google.gson.JsonSerializer;
import tw.gov.tainan.tainanwelfare.dbentity.LandmarkDBEntity;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
public class landmark_PO {
private Context context;
private DBHelper dbhelper;
public landmark_PO(Context context) {
this.context = context;
dbhelper = DBHelper.getInstance(context);
}
public ArrayList<LandmarkDBEntity> GetLandmarkList(){
ArrayList<LandmarkDBEntity> resultList = new ArrayList<LandmarkDBEntity>();
SQLiteDatabase db = dbhelper.getReadableDatabase();
Cursor c1 = db.rawQuery("SELECT SEQ, NAME, TOWN, ADDRESS, PHONE, " +
"LONGITUDE, LATITUDE, SCORE, INFO, MARK_TYPE_SEQ FROM LANDMARK", null);
if(c1.getCount() > 0){
c1.moveToFirst();
while(!c1.isAfterLast()){
LandmarkDBEntity tempobj = new LandmarkDBEntity();
tempobj.setSEQ(c1.getString(0));
tempobj.setName(c1.getString(1));
tempobj.setTown(c1.getString(2));
tempobj.setAddress(c1.getString(3));
tempobj.setPhone(c1.getString(4));
tempobj.setLongitude(c1.getFloat(5));
tempobj.setLatitude(c1.getFloat(6));
tempobj.setScore(c1.getString(7));
if(c1.getString(8) != null){
HashMap<String, String> info = new HashMap<String, String>();
try {
JSONObject jsonobject = new JSONObject(c1.getString(8));
Iterator<String> keys = jsonobject.keys();
while(keys.hasNext()){
String key = (String)keys.next();
info.put(key, jsonobject.getString(key));
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
tempobj.setInfo(info);
}
tempobj.setMarkTypeSeq(c1.getInt(9));
resultList.add(tempobj);
c1.moveToNext();
}
c1.close();
}
return resultList;
}
public ArrayList<LandmarkDBEntity> GetSearchLandmarkList(String SearchStr){
ArrayList<LandmarkDBEntity> resultList = new ArrayList<LandmarkDBEntity>();
SQLiteDatabase db = dbhelper.getReadableDatabase();
Cursor c1 = db.rawQuery("SELECT SEQ, NAME, TOWN, ADDRESS, PHONE, " +
"LONGITUDE, LATITUDE, SCORE, INFO, MARK_TYPE_SEQ FROM LANDMARK " +
"WHERE NAME like ? OR ADDRESS like ? OR PHONE like ? OR INFO like ? ",
new String[] { "%" + SearchStr + "%", "%" + SearchStr + "%",
"%" + SearchStr + "%", "%" + SearchStr + "%" });
if(c1.getCount() > 0){
c1.moveToFirst();
while(!c1.isAfterLast()){
LandmarkDBEntity tempobj = new LandmarkDBEntity();
tempobj.setSEQ(c1.getString(0));
tempobj.setName(c1.getString(1));
tempobj.setTown(c1.getString(2));
tempobj.setAddress(c1.getString(3));
tempobj.setPhone(c1.getString(4));
tempobj.setLongitude(c1.getFloat(5));
tempobj.setLatitude(c1.getFloat(6));
tempobj.setScore(c1.getString(7));
HashMap<String, String> info = new HashMap<String, String>();
try {
JSONObject jsonobject = new JSONObject(c1.getString(8));
Iterator<String> keys = jsonobject.keys();
while(keys.hasNext()){
String key = (String)keys.next();
info.put(key, jsonobject.getString(key));
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
tempobj.setInfo(info);
tempobj.setMarkTypeSeq(c1.getInt(9));
resultList.add(tempobj);
c1.moveToNext();
}
c1.close();
}
return resultList;
}
public ArrayList<LandmarkDBEntity> GetOptionLandmarkList(String OptionStr){
ArrayList<LandmarkDBEntity> resultList = new ArrayList<LandmarkDBEntity>();
SQLiteDatabase db = dbhelper.getReadableDatabase();
Cursor c1 = db.rawQuery("SELECT SEQ, NAME, TOWN, ADDRESS, PHONE, " +
"LONGITUDE, LATITUDE, SCORE, INFO, MARK_TYPE_SEQ FROM LANDMARK " +
"WHERE MARK_TYPE_SEQ IN (?)", new String[] { OptionStr });
if(c1.getCount() > 0){
c1.moveToFirst();
while(!c1.isAfterLast()){
LandmarkDBEntity tempobj = new LandmarkDBEntity();
tempobj.setSEQ(c1.getString(0));
tempobj.setName(c1.getString(1));
tempobj.setTown(c1.getString(2));
tempobj.setAddress(c1.getString(3));
tempobj.setPhone(c1.getString(4));
tempobj.setLongitude(c1.getFloat(5));
tempobj.setLatitude(c1.getFloat(6));
tempobj.setScore(c1.getString(7));
HashMap<String, String> info = new HashMap<String, String>();
try {
JSONObject jsonobject = new JSONObject(c1.getString(8));
Iterator<String> keys = jsonobject.keys();
while(keys.hasNext()){
String key = (String)keys.next();
info.put(key, jsonobject.getString(key));
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
tempobj.setInfo(info);
tempobj.setMarkTypeSeq(c1.getInt(9));
resultList.add(tempobj);
c1.moveToNext();
}
c1.close();
}
return resultList;
}
public ArrayList<LandmarkDBEntity> GetOptionSearchLandmarkList(String OptionStr, String SearchStr){
ArrayList<LandmarkDBEntity> resultList = new ArrayList<LandmarkDBEntity>();
SQLiteDatabase db = dbhelper.getReadableDatabase();
Cursor c1 = null;
if(SearchStr.isEmpty()){
c1 = db.rawQuery("SELECT SEQ, NAME, TOWN, ADDRESS, PHONE, " +
"LONGITUDE, LATITUDE, SCORE, INFO, MARK_TYPE_SEQ FROM LANDMARK " +
"WHERE MARK_TYPE_SEQ IN ( " + OptionStr + " )", null);
}
else{
c1 = db.rawQuery("SELECT SEQ, NAME, TOWN, ADDRESS, PHONE, " +
"LONGITUDE, LATITUDE, SCORE, INFO, MARK_TYPE_SEQ FROM LANDMARK " +
"WHERE MARK_TYPE_SEQ IN ( " + OptionStr + " ) AND (NAME like ? OR ADDRESS like ? OR PHONE like ? OR INFO like ? )"
, new String[] { "%" + SearchStr + "%", "%" + SearchStr + "%",
"%" + SearchStr + "%", "%" + SearchStr + "%" });
}
if(c1.getCount() > 0){
c1.moveToFirst();
while(!c1.isAfterLast()){
LandmarkDBEntity tempobj = new LandmarkDBEntity();
tempobj.setSEQ(c1.getString(0));
tempobj.setName(c1.getString(1));
tempobj.setTown(c1.getString(2));
tempobj.setAddress(c1.getString(3));
tempobj.setPhone(c1.getString(4));
tempobj.setLongitude(c1.getFloat(5));
tempobj.setLatitude(c1.getFloat(6));
tempobj.setScore(c1.getString(7));
HashMap<String, String> info = new HashMap<String, String>();
try {
JSONObject jsonobject = new JSONObject(c1.getString(8));
Iterator<String> keys = jsonobject.keys();
while(keys.hasNext()){
String key = (String)keys.next();
info.put(key, jsonobject.getString(key));
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
tempobj.setInfo(info);
tempobj.setMarkTypeSeq(c1.getInt(9));
resultList.add(tempobj);
c1.moveToNext();
}
c1.close();
}
return resultList;
}
public void InsertLandmark(LandmarkDBEntity entity){
SQLiteDatabase db = dbhelper.getWritableDatabase();
ContentValues values = new ContentValues();
values.put("SEQ", entity.getSEQ());
values.put("NAME", entity.getName());
values.put("TOWN", entity.getTown());
values.put("ADDRESS", entity.getAddress());
values.put("PHONE", entity.getPhone());
values.put("LONGITUDE", entity.getLongitude());
values.put("LATITUDE", entity.getLatitude());
values.put("SCORE", entity.getScore());
Gson gson = new GsonBuilder().create();
String jsonString = gson.toJson(entity.getInfo());
values.put("INFO", jsonString);
values.put("MARK_TYPE_SEQ", entity.getMarkTypeSeq());
db.replace("LANDMARK", null, values);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.rds.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p/>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/ResetDBParameterGroup" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ResetDBParameterGroupRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the DB parameter group.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
* </ul>
*/
private String dBParameterGroupName;
/**
* <p>
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By default,
* all parameters in the DB parameter group are reset to default values.
* </p>
*/
private Boolean resetAllParameters;
/**
* <p>
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a single
* request.
* </p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Parameter> parameters;
/**
* Default constructor for ResetDBParameterGroupRequest object. Callers should use the setter or fluent setter
* (with...) methods to initialize the object after creating it.
*/
public ResetDBParameterGroupRequest() {
}
/**
* Constructs a new ResetDBParameterGroupRequest object. Callers should use the setter or fluent setter (with...)
* methods to initialize any additional object members.
*
* @param dBParameterGroupName
* The name of the DB parameter group.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
*/
public ResetDBParameterGroupRequest(String dBParameterGroupName) {
setDBParameterGroupName(dBParameterGroupName);
}
/**
* <p>
* The name of the DB parameter group.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
* </ul>
*
* @param dBParameterGroupName
* The name of the DB parameter group.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
*/
public void setDBParameterGroupName(String dBParameterGroupName) {
this.dBParameterGroupName = dBParameterGroupName;
}
/**
* <p>
* The name of the DB parameter group.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
* </ul>
*
* @return The name of the DB parameter group.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
*/
public String getDBParameterGroupName() {
return this.dBParameterGroupName;
}
/**
* <p>
* The name of the DB parameter group.
* </p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
* </ul>
*
* @param dBParameterGroupName
* The name of the DB parameter group.</p>
* <p>
* Constraints:
* </p>
* <ul>
* <li>
* <p>
* Must match the name of an existing <code>DBParameterGroup</code>.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetDBParameterGroupRequest withDBParameterGroupName(String dBParameterGroupName) {
setDBParameterGroupName(dBParameterGroupName);
return this;
}
/**
* <p>
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By default,
* all parameters in the DB parameter group are reset to default values.
* </p>
*
* @param resetAllParameters
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By
* default, all parameters in the DB parameter group are reset to default values.
*/
public void setResetAllParameters(Boolean resetAllParameters) {
this.resetAllParameters = resetAllParameters;
}
/**
* <p>
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By default,
* all parameters in the DB parameter group are reset to default values.
* </p>
*
* @return A value that indicates whether to reset all parameters in the DB parameter group to default values. By
* default, all parameters in the DB parameter group are reset to default values.
*/
public Boolean getResetAllParameters() {
return this.resetAllParameters;
}
/**
* <p>
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By default,
* all parameters in the DB parameter group are reset to default values.
* </p>
*
* @param resetAllParameters
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By
* default, all parameters in the DB parameter group are reset to default values.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetDBParameterGroupRequest withResetAllParameters(Boolean resetAllParameters) {
setResetAllParameters(resetAllParameters);
return this;
}
/**
* <p>
* A value that indicates whether to reset all parameters in the DB parameter group to default values. By default,
* all parameters in the DB parameter group are reset to default values.
* </p>
*
* @return A value that indicates whether to reset all parameters in the DB parameter group to default values. By
* default, all parameters in the DB parameter group are reset to default values.
*/
public Boolean isResetAllParameters() {
return this.resetAllParameters;
}
/**
* <p>
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a single
* request.
* </p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* </p>
*
* @return To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the
* following: <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be
* modified in a single request.</p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
*/
public java.util.List<Parameter> getParameters() {
if (parameters == null) {
parameters = new com.amazonaws.internal.SdkInternalList<Parameter>();
}
return parameters;
}
/**
* <p>
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a single
* request.
* </p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* </p>
*
* @param parameters
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a
* single request.</p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
*/
public void setParameters(java.util.Collection<Parameter> parameters) {
if (parameters == null) {
this.parameters = null;
return;
}
this.parameters = new com.amazonaws.internal.SdkInternalList<Parameter>(parameters);
}
/**
* <p>
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a single
* request.
* </p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setParameters(java.util.Collection)} or {@link #withParameters(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param parameters
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a
* single request.</p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetDBParameterGroupRequest withParameters(Parameter... parameters) {
if (this.parameters == null) {
setParameters(new com.amazonaws.internal.SdkInternalList<Parameter>(parameters.length));
}
for (Parameter ele : parameters) {
this.parameters.add(ele);
}
return this;
}
/**
* <p>
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a single
* request.
* </p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code> value
* for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* </p>
*
* @param parameters
* To reset the entire DB parameter group, specify the <code>DBParameterGroup</code> name and
* <code>ResetAllParameters</code> parameters. To reset specific parameters, provide a list of the following:
* <code>ParameterName</code> and <code>ApplyMethod</code>. A maximum of 20 parameters can be modified in a
* single request.</p>
* <p>
* <b>MySQL</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>MariaDB</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>immediate</code> | <code>pending-reboot</code>
* </p>
* <p>
* You can use the immediate value with dynamic parameters only. You can use the <code>pending-reboot</code>
* value for both dynamic and static parameters, and changes are applied when DB instance reboots.
* </p>
* <p>
* <b>Oracle</b>
* </p>
* <p>
* Valid Values (for Apply method): <code>pending-reboot</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ResetDBParameterGroupRequest withParameters(java.util.Collection<Parameter> parameters) {
setParameters(parameters);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDBParameterGroupName() != null)
sb.append("DBParameterGroupName: ").append(getDBParameterGroupName()).append(",");
if (getResetAllParameters() != null)
sb.append("ResetAllParameters: ").append(getResetAllParameters()).append(",");
if (getParameters() != null)
sb.append("Parameters: ").append(getParameters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ResetDBParameterGroupRequest == false)
return false;
ResetDBParameterGroupRequest other = (ResetDBParameterGroupRequest) obj;
if (other.getDBParameterGroupName() == null ^ this.getDBParameterGroupName() == null)
return false;
if (other.getDBParameterGroupName() != null && other.getDBParameterGroupName().equals(this.getDBParameterGroupName()) == false)
return false;
if (other.getResetAllParameters() == null ^ this.getResetAllParameters() == null)
return false;
if (other.getResetAllParameters() != null && other.getResetAllParameters().equals(this.getResetAllParameters()) == false)
return false;
if (other.getParameters() == null ^ this.getParameters() == null)
return false;
if (other.getParameters() != null && other.getParameters().equals(this.getParameters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDBParameterGroupName() == null) ? 0 : getDBParameterGroupName().hashCode());
hashCode = prime * hashCode + ((getResetAllParameters() == null) ? 0 : getResetAllParameters().hashCode());
hashCode = prime * hashCode + ((getParameters() == null) ? 0 : getParameters().hashCode());
return hashCode;
}
@Override
public ResetDBParameterGroupRequest clone() {
return (ResetDBParameterGroupRequest) super.clone();
}
}
| |
package com.gdn.venice.facade;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import javax.ejb.EJBException;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.Query;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.log4j.Logger;
import com.gdn.venice.facade.callback.SessionCallback;
import com.gdn.venice.facade.finder.FinderReturn;
import com.gdn.venice.persistence.FinArFundsInRefund;
import com.djarum.raf.utilities.JPQLAdvancedQueryCriteria;
import com.djarum.raf.utilities.JPQLQueryStringBuilder;
import com.djarum.raf.utilities.Log4jLoggerFactory;
/**
* Session Bean implementation class FinArFundsInRefundSessionEJBBean
*
* <p>
* <b>author:</b> <a href="mailto:david@pwsindonesia.com">David Forden</a>
* <p>
* <b>version:</b> 1.0
* <p>
* <b>since:</b> 2011
*
*/
@Stateless(mappedName = "FinArFundsInRefundSessionEJBBean")
public class FinArFundsInRefundSessionEJBBean implements FinArFundsInRefundSessionEJBRemote,
FinArFundsInRefundSessionEJBLocal {
/*
* Implements an IOC model for pre/post callbacks to persist, merge, and
* remove operations. The onPrePersist, onPostPersist, onPreMerge,
* onPostMerge, onPreRemove and OnPostRemove operations must be implemented
* by the callback class.
*/
private String _sessionCallbackClassName = null;
// A reference to the callback object that has been instantiated
private SessionCallback _callback = null;
protected static Logger _log = null;
// The configuration file to use
private String _configFile = System.getenv("VENICE_HOME")
+ "/conf/module-config.xml";
//The binding array used when binding variables into a JPQL query
private Object[] bindingArray = null;
@PersistenceContext(unitName = "GDN-Venice-Persistence", type = PersistenceContextType.TRANSACTION)
protected EntityManager em;
/**
* Default constructor.
*/
public FinArFundsInRefundSessionEJBBean() {
super();
Log4jLoggerFactory loggerFactory = new Log4jLoggerFactory();
_log = loggerFactory
.getLog4JLogger("com.gdn.venice.facade.FinArFundsInRefundSessionEJBBean");
// If the configuration is successful then instantiate the callback
if (this.configure())
this.instantiateTriggerCallback();
}
/**
* Reads the venice configuration file and configures the EJB's
* triggerCallbackClassName
*/
private Boolean configure() {
_log.debug("Venice Configuration File:" + _configFile);
try {
XMLConfiguration config = new XMLConfiguration(_configFile);
/*
* Get the index entry for the adapter configuration from the
* configuration file - there will be multiple adapter
* configurations
*/
@SuppressWarnings({ "rawtypes" })
List callbacks = config
.getList("sessionBeanConfig.callback.[@name]");
Integer beanConfigIndex = new Integer(Integer.MAX_VALUE);
@SuppressWarnings("rawtypes")
Iterator i = callbacks.iterator();
while (i.hasNext()) {
String beanName = (String) i.next();
if (this.getClass().getSimpleName().equals(beanName)) {
beanConfigIndex = callbacks.indexOf(beanName);
_log.debug("Bean configuration for " + beanName
+ " found at " + beanConfigIndex);
}
}
this._sessionCallbackClassName = config
.getString("sessionBeanConfig.callback(" + beanConfigIndex + ").[@class]");
_log.debug("Loaded configuration for _sessionCallbackClassName:"
+ _sessionCallbackClassName);
} catch (ConfigurationException e) {
_log.error("A ConfigurationException occured when processing the configuration file"
+ e.getMessage());
e.printStackTrace();
return Boolean.FALSE;
}
return Boolean.TRUE;
}
/**
* Instantiates the trigger callback handler class
*
* @return
*/
Boolean instantiateTriggerCallback() {
if (_sessionCallbackClassName != null
&& !_sessionCallbackClassName.isEmpty())
try {
Class<?> c = Class.forName(_sessionCallbackClassName);
_callback = (SessionCallback) c.newInstance();
} catch (ClassNotFoundException e) {
_log.error("A ClassNotFoundException occured when trying to instantiate:"
+ this._sessionCallbackClassName);
e.printStackTrace();
return Boolean.FALSE;
} catch (InstantiationException e) {
_log.error("A InstantiationException occured when trying to instantiate:"
+ this._sessionCallbackClassName);
e.printStackTrace();
return Boolean.FALSE;
} catch (IllegalAccessException e) {
_log.error("A IllegalAccessException occured when trying to instantiate:"
+ this._sessionCallbackClassName);
e.printStackTrace();
return Boolean.FALSE;
}
return Boolean.TRUE;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#queryByRange(java.lang
* .String, int, int)
*/
@Override
@SuppressWarnings({ "unchecked" })
public List<FinArFundsInRefund> queryByRange(String jpqlStmt, int firstResult,
int maxResults) {
Long startTime = System.currentTimeMillis();
_log.debug("queryByRange()");
Query query = null;
try {
query = em.createQuery(jpqlStmt);
if(this.bindingArray != null){
for(int i = 0; i < bindingArray.length; ++i){
if(bindingArray[i] != null){
query.setParameter(i+1, bindingArray[i]);
}
}
}
} catch (Exception e) {
_log.error("An exception occured when calling em.createQuery():"
+ e.getMessage());
throw new EJBException(e);
}
try {
if (firstResult > 0) {
query = query.setFirstResult(firstResult);
}
if (maxResults > 0) {
query = query.setMaxResults(maxResults);
}
} catch (Exception e) {
_log.error("An exception occured when accessing the result set of a query:"
+ e.getMessage());
throw new EJBException(e);
}
List<FinArFundsInRefund> returnList = (List<FinArFundsInRefund>)query.getResultList();
this.bindingArray = null;
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("queryByRange() duration:" + duration + "ms");
return returnList;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#persistFinArFundsInRefund(com
* .gdn.venice.persistence.FinArFundsInRefund)
*/
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public FinArFundsInRefund persistFinArFundsInRefund(FinArFundsInRefund finArFundsInRefund) {
Long startTime = System.currentTimeMillis();
_log.debug("persistFinArFundsInRefund()");
// Call the onPrePersist() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPrePersist(finArFundsInRefund)) {
_log.error("An onPrePersist callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPrePersist callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
FinArFundsInRefund existingFinArFundsInRefund = null;
if (finArFundsInRefund != null && finArFundsInRefund.getRefundRecordId() != null) {
_log.debug("persistFinArFundsInRefund:em.find()");
try {
existingFinArFundsInRefund = em.find(FinArFundsInRefund.class,
finArFundsInRefund.getRefundRecordId());
} catch (Exception e) {
_log.error("An exception occured when calling em.find():"
+ e.getMessage());
throw new EJBException(e);
}
}
if (existingFinArFundsInRefund == null) {
_log.debug("persistFinArFundsInRefund:em.persist()");
try {
em.persist(finArFundsInRefund);
} catch (Exception e) {
_log.error("An exception occured when calling em.persist():"
+ e.getMessage());
throw new EJBException(e);
}
_log.debug("persistFinArFundsInRefund:em.flush()");
try {
em.flush();
em.clear();
} catch (Exception e) {
_log.error("An exception occured when calling em.flush():"
+ e.getMessage());
throw new EJBException(e);
}
// Call the onPostPersist() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPostPersist(finArFundsInRefund)) {
_log.error("An onPostPersist callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPostPersist callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("persistFinArFundsInRefund() duration:" + duration + "ms");
return finArFundsInRefund;
} else {
throw new EJBException("FinArFundsInRefund exists!. FinArFundsInRefund = "
+ finArFundsInRefund.getRefundRecordId());
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#persistFinArFundsInRefundList
* (java.util.List)
*/
@Override
@SuppressWarnings("rawtypes")
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public ArrayList<FinArFundsInRefund> persistFinArFundsInRefundList(
List<FinArFundsInRefund> finArFundsInRefundList) {
_log.debug("persistFinArFundsInRefundList()");
Iterator i = finArFundsInRefundList.iterator();
while (i.hasNext()) {
this.persistFinArFundsInRefund((FinArFundsInRefund) i.next());
}
return (ArrayList<FinArFundsInRefund>)finArFundsInRefundList;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#mergeFinArFundsInRefund(com.
* gdn.venice.persistence.FinArFundsInRefund)
*/
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public FinArFundsInRefund mergeFinArFundsInRefund(FinArFundsInRefund finArFundsInRefund) {
Long startTime = System.currentTimeMillis();
_log.debug("mergeFinArFundsInRefund()");
// Call the onPreMerge() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPreMerge(finArFundsInRefund)) {
_log.error("An onPreMerge callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPreMerge callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
FinArFundsInRefund existing = null;
if (finArFundsInRefund.getRefundRecordId() != null){
_log.debug("mergeFinArFundsInRefund:em.find()");
existing = em.find(FinArFundsInRefund.class, finArFundsInRefund.getRefundRecordId());
}
if (existing == null) {
return this.persistFinArFundsInRefund(finArFundsInRefund);
} else {
_log.debug("mergeFinArFundsInRefund:em.merge()");
try {
em.merge(finArFundsInRefund);
} catch (Exception e) {
_log.error("An exception occured when calling em.merge():"
+ e.getMessage());
throw new EJBException(e);
}
_log.debug("mergeFinArFundsInRefund:em.flush()");
try {
em.flush();
em.clear();
} catch (Exception e) {
_log.error("An exception occured when calling em.flush():"
+ e.getMessage());
throw new EJBException(e);
}
FinArFundsInRefund newobject = em.find(FinArFundsInRefund.class,
finArFundsInRefund.getRefundRecordId());
_log.debug("mergeFinArFundsInRefund():em.refresh");
try {
em.refresh(newobject);
} catch (Exception e) {
_log.error("An exception occured when calling em.refresh():"
+ e.getMessage());
throw new EJBException(e);
}
// Call the onPostMerge() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPostMerge(newobject)) {
_log.error("An onPostMerge callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPostMerge callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("mergeFinArFundsInRefund() duration:" + duration + "ms");
return newobject;
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#mergeFinArFundsInRefundList(
* java.util.List)
*/
@Override
@SuppressWarnings("rawtypes")
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public ArrayList<FinArFundsInRefund> mergeFinArFundsInRefundList(
List<FinArFundsInRefund> finArFundsInRefundList) {
_log.debug("mergeFinArFundsInRefundList()");
Iterator i = finArFundsInRefundList.iterator();
while (i.hasNext()) {
this.mergeFinArFundsInRefund((FinArFundsInRefund) i.next());
}
return (ArrayList<FinArFundsInRefund>)finArFundsInRefundList;
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#removeFinArFundsInRefund(com.
* gdn.venice.persistence.FinArFundsInRefund)
*/
@Override
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void removeFinArFundsInRefund(FinArFundsInRefund finArFundsInRefund) {
Long startTime = System.currentTimeMillis();
_log.debug("removeFinArFundsInRefund()");
// Call the onPreRemove() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPreRemove(finArFundsInRefund)) {
_log.error("An onPreRemove callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPreRemove callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
_log.debug("removeFinArFundsInRefund:em.find()");
finArFundsInRefund = em.find(FinArFundsInRefund.class, finArFundsInRefund.getRefundRecordId());
try {
_log.debug("removeFinArFundsInRefund:em.remove()");
em.remove(finArFundsInRefund);
} catch (Exception e) {
_log.error("An exception occured when calling em.remove():"
+ e.getMessage());
throw new EJBException(e);
}
// Call the onPostRemove() callback and throw an exception if it fails
if (this._callback != null) {
if (!this._callback.onPostRemove(finArFundsInRefund)) {
_log.error("An onPostRemove callback operation failed for:"
+ this._sessionCallbackClassName);
throw new EJBException(
"An onPostRemove callback operation failed for:"
+ this._sessionCallbackClassName);
}
}
_log.debug("removeFinArFundsInRefund:em.flush()");
em.flush();
em.clear();
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("removeFinArFundsInRefund() duration:" + duration + "ms");
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#removeFinArFundsInRefundList(
* java.util.List)
*/
@Override
@SuppressWarnings("rawtypes")
@TransactionAttribute(TransactionAttributeType.REQUIRED)
public void removeFinArFundsInRefundList(List<FinArFundsInRefund> finArFundsInRefundList) {
_log.debug("removeFinArFundsInRefundList()");
Iterator i = finArFundsInRefundList.iterator();
while (i.hasNext()) {
this.removeFinArFundsInRefund((FinArFundsInRefund) i.next());
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#findByFinArFundsInRefundLike(
* com.gdn.venice.persistence.FinArFundsInRefund, int, int)
*/
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public List<FinArFundsInRefund> findByFinArFundsInRefundLike(FinArFundsInRefund finArFundsInRefund,
JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults) {
Long startTime = System.currentTimeMillis();
_log.debug("findByFinArFundsInRefundLike()");
JPQLQueryStringBuilder qb = new JPQLQueryStringBuilder(finArFundsInRefund);
HashMap complexTypeBindings = new HashMap();
String stmt = qb.buildQueryString(complexTypeBindings, criteria);
if(criteria != null){
/*
* Get the binding array from the query builder and make
* it available to the queryByRange method
*/
this.bindingArray = qb.getBindingArray();
for(int i = 0; i < qb.getBindingArray().length; i++){
System.out.println("Bindings:" + i + ":" + qb.getBindingArray()[i]);
}
List<FinArFundsInRefund> finArFundsInRefundList = this.queryByRange(stmt, firstResult, maxResults);
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("findByFinArFundsInRefundLike() duration:" + duration + "ms");
return finArFundsInRefundList;
}else{
String errMsg = "A query has been initiated with null criteria.";
_log.error(errMsg);
throw new EJBException(errMsg);
}
}
/*
* (non-Javadoc)
*
* @see
* com.gdn.venice.facade.FinArFundsInRefundSessionEJBRemote#findByFinArFundsInRefundLikeFR(
* com.gdn.venice.persistence.FinArFundsInRefund, int, int)
*/
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
public FinderReturn findByFinArFundsInRefundLikeFR(FinArFundsInRefund finArFundsInRefund,
JPQLAdvancedQueryCriteria criteria, int firstResult, int maxResults) {
Long startTime = System.currentTimeMillis();
_log.debug("findByFinArFundsInRefundLikeFR()");
JPQLQueryStringBuilder qb = new JPQLQueryStringBuilder(finArFundsInRefund);
HashMap complexTypeBindings = new HashMap();
String stmt = qb.buildQueryString(complexTypeBindings, criteria);
if(criteria != null){
/*
* Get the binding array from the query builder and make
* it available to the queryByRange method
*/
this.bindingArray = qb.getBindingArray();
for(int i = 0; i < qb.getBindingArray().length; i++){
System.out.println("Bindings:" + i + ":" + qb.getBindingArray()[i]);
}
//Set the finder return object with the count of the total query rows
FinderReturn fr = new FinderReturn();
String countStmt = "select count(o) " + stmt.substring(stmt.indexOf("from"));
Query query = null;
try {
query = em.createQuery(countStmt);
if(this.bindingArray != null){
for(int i = 0; i < bindingArray.length; ++i){
if(bindingArray[i] != null){
query.setParameter(i+1, bindingArray[i]);
}
}
}
Long totalRows = (Long)query.getSingleResult();
fr.setNumQueryRows(totalRows);
} catch (Exception e) {
_log.error("An exception occured when calling em.createQuery():"
+ e.getMessage());
throw new EJBException(e);
}
//Set the finder return object with the query list
fr.setResultList(this.queryByRange(stmt, firstResult, maxResults));
Long endTime = System.currentTimeMillis();
Long duration = startTime - endTime;
_log.debug("findByFinArFundsInRefundLike() duration:" + duration + "ms");
return fr;
}else{
String errMsg = "A query has been initiated with null criteria.";
_log.error(errMsg);
throw new EJBException(errMsg);
}
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.clubobsidian.obsidianengine.asm.commons;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.clubobsidian.obsidianengine.asm.Handle;
import com.clubobsidian.obsidianengine.asm.Label;
import com.clubobsidian.obsidianengine.asm.MethodVisitor;
import com.clubobsidian.obsidianengine.asm.Opcodes;
import com.clubobsidian.obsidianengine.asm.Type;
/**
* A {@link com.clubobsidian.obsidianengine.asm.MethodVisitor} to insert before, after and around
* advices in methods and constructors.
* <p>
* The behavior for constructors is like this:
* <ol>
*
* <li>as long as the INVOKESPECIAL for the object initialization has not been
* reached, every bytecode instruction is dispatched in the ctor code visitor</li>
*
* <li>when this one is reached, it is only added in the ctor code visitor and a
* JP invoke is added</li>
*
* <li>after that, only the other code visitor receives the instructions</li>
*
* </ol>
*
* @author Eugene Kuleshov
* @author Eric Bruneton
*/
public abstract class AdviceAdapter extends GeneratorAdapter implements Opcodes {
private static final Object THIS = new Object();
private static final Object OTHER = new Object();
protected int methodAccess;
protected String methodDesc;
private boolean constructor;
private boolean superInitialized;
private List<Object> stackFrame;
private Map<Label, List<Object>> branches;
/**
* Creates a new {@link AdviceAdapter}.
*
* @param api
* the ASM API version implemented by this visitor. Must be one
* of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}.
* @param mv
* the method visitor to which this adapter delegates calls.
* @param access
* the method's access flags (see {@link Opcodes}).
* @param name
* the method's name.
* @param desc
* the method's descriptor (see {@link Type Type}).
*/
protected AdviceAdapter(final int api, final MethodVisitor mv,
final int access, final String name, final String desc) {
super(api, mv, access, name, desc);
methodAccess = access;
methodDesc = desc;
constructor = "<init>".equals(name);
}
@Override
public void visitCode() {
mv.visitCode();
if (constructor) {
stackFrame = new ArrayList<Object>();
branches = new HashMap<Label, List<Object>>();
} else {
superInitialized = true;
onMethodEnter();
}
}
@Override
public void visitLabel(final Label label) {
mv.visitLabel(label);
if (constructor && branches != null) {
List<Object> frame = branches.get(label);
if (frame != null) {
stackFrame = frame;
branches.remove(label);
}
}
}
@Override
public void visitInsn(final int opcode) {
if (constructor) {
int s;
switch (opcode) {
case RETURN: // empty stack
onMethodExit(opcode);
break;
case IRETURN: // 1 before n/a after
case FRETURN: // 1 before n/a after
case ARETURN: // 1 before n/a after
case ATHROW: // 1 before n/a after
popValue();
onMethodExit(opcode);
break;
case LRETURN: // 2 before n/a after
case DRETURN: // 2 before n/a after
popValue();
popValue();
onMethodExit(opcode);
break;
case NOP:
case LALOAD: // remove 2 add 2
case DALOAD: // remove 2 add 2
case LNEG:
case DNEG:
case FNEG:
case INEG:
case L2D:
case D2L:
case F2I:
case I2B:
case I2C:
case I2S:
case I2F:
case ARRAYLENGTH:
break;
case ACONST_NULL:
case ICONST_M1:
case ICONST_0:
case ICONST_1:
case ICONST_2:
case ICONST_3:
case ICONST_4:
case ICONST_5:
case FCONST_0:
case FCONST_1:
case FCONST_2:
case F2L: // 1 before 2 after
case F2D:
case I2L:
case I2D:
pushValue(OTHER);
break;
case LCONST_0:
case LCONST_1:
case DCONST_0:
case DCONST_1:
pushValue(OTHER);
pushValue(OTHER);
break;
case IALOAD: // remove 2 add 1
case FALOAD: // remove 2 add 1
case AALOAD: // remove 2 add 1
case BALOAD: // remove 2 add 1
case CALOAD: // remove 2 add 1
case SALOAD: // remove 2 add 1
case POP:
case IADD:
case FADD:
case ISUB:
case LSHL: // 3 before 2 after
case LSHR: // 3 before 2 after
case LUSHR: // 3 before 2 after
case L2I: // 2 before 1 after
case L2F: // 2 before 1 after
case D2I: // 2 before 1 after
case D2F: // 2 before 1 after
case FSUB:
case FMUL:
case FDIV:
case FREM:
case FCMPL: // 2 before 1 after
case FCMPG: // 2 before 1 after
case IMUL:
case IDIV:
case IREM:
case ISHL:
case ISHR:
case IUSHR:
case IAND:
case IOR:
case IXOR:
case MONITORENTER:
case MONITOREXIT:
popValue();
break;
case POP2:
case LSUB:
case LMUL:
case LDIV:
case LREM:
case LADD:
case LAND:
case LOR:
case LXOR:
case DADD:
case DMUL:
case DSUB:
case DDIV:
case DREM:
popValue();
popValue();
break;
case IASTORE:
case FASTORE:
case AASTORE:
case BASTORE:
case CASTORE:
case SASTORE:
case LCMP: // 4 before 1 after
case DCMPL:
case DCMPG:
popValue();
popValue();
popValue();
break;
case LASTORE:
case DASTORE:
popValue();
popValue();
popValue();
popValue();
break;
case DUP:
pushValue(peekValue());
break;
case DUP_X1:
s = stackFrame.size();
stackFrame.add(s - 2, stackFrame.get(s - 1));
break;
case DUP_X2:
s = stackFrame.size();
stackFrame.add(s - 3, stackFrame.get(s - 1));
break;
case DUP2:
s = stackFrame.size();
stackFrame.add(s - 2, stackFrame.get(s - 1));
stackFrame.add(s - 2, stackFrame.get(s - 1));
break;
case DUP2_X1:
s = stackFrame.size();
stackFrame.add(s - 3, stackFrame.get(s - 1));
stackFrame.add(s - 3, stackFrame.get(s - 1));
break;
case DUP2_X2:
s = stackFrame.size();
stackFrame.add(s - 4, stackFrame.get(s - 1));
stackFrame.add(s - 4, stackFrame.get(s - 1));
break;
case SWAP:
s = stackFrame.size();
stackFrame.add(s - 2, stackFrame.get(s - 1));
stackFrame.remove(s);
break;
}
} else {
switch (opcode) {
case RETURN:
case IRETURN:
case FRETURN:
case ARETURN:
case LRETURN:
case DRETURN:
case ATHROW:
onMethodExit(opcode);
break;
}
}
mv.visitInsn(opcode);
}
@Override
public void visitVarInsn(final int opcode, final int var) {
super.visitVarInsn(opcode, var);
if (constructor) {
switch (opcode) {
case ILOAD:
case FLOAD:
pushValue(OTHER);
break;
case LLOAD:
case DLOAD:
pushValue(OTHER);
pushValue(OTHER);
break;
case ALOAD:
pushValue(var == 0 ? THIS : OTHER);
break;
case ASTORE:
case ISTORE:
case FSTORE:
popValue();
break;
case LSTORE:
case DSTORE:
popValue();
popValue();
break;
}
}
}
@Override
public void visitFieldInsn(final int opcode, final String owner,
final String name, final String desc) {
mv.visitFieldInsn(opcode, owner, name, desc);
if (constructor) {
char c = desc.charAt(0);
boolean longOrDouble = c == 'J' || c == 'D';
switch (opcode) {
case GETSTATIC:
pushValue(OTHER);
if (longOrDouble) {
pushValue(OTHER);
}
break;
case PUTSTATIC:
popValue();
if (longOrDouble) {
popValue();
}
break;
case PUTFIELD:
popValue();
popValue();
if (longOrDouble) {
popValue();
}
break;
// case GETFIELD:
default:
if (longOrDouble) {
pushValue(OTHER);
}
}
}
}
@Override
public void visitIntInsn(final int opcode, final int operand) {
mv.visitIntInsn(opcode, operand);
if (constructor && opcode != NEWARRAY) {
pushValue(OTHER);
}
}
@Override
public void visitLdcInsn(final Object cst) {
mv.visitLdcInsn(cst);
if (constructor) {
pushValue(OTHER);
if (cst instanceof Double || cst instanceof Long) {
pushValue(OTHER);
}
}
}
@Override
public void visitMultiANewArrayInsn(final String desc, final int dims) {
mv.visitMultiANewArrayInsn(desc, dims);
if (constructor) {
for (int i = 0; i < dims; i++) {
popValue();
}
pushValue(OTHER);
}
}
@Override
public void visitTypeInsn(final int opcode, final String type) {
mv.visitTypeInsn(opcode, type);
// ANEWARRAY, CHECKCAST or INSTANCEOF don't change stack
if (constructor && opcode == NEW) {
pushValue(OTHER);
}
}
@Deprecated
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc) {
if (api >= Opcodes.ASM5) {
super.visitMethodInsn(opcode, owner, name, desc);
return;
}
doVisitMethodInsn(opcode, owner, name, desc,
opcode == Opcodes.INVOKEINTERFACE);
}
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc, final boolean itf) {
if (api < Opcodes.ASM5) {
super.visitMethodInsn(opcode, owner, name, desc, itf);
return;
}
doVisitMethodInsn(opcode, owner, name, desc, itf);
}
private void doVisitMethodInsn(int opcode, final String owner,
final String name, final String desc, final boolean itf) {
mv.visitMethodInsn(opcode, owner, name, desc, itf);
if (constructor) {
Type[] types = Type.getArgumentTypes(desc);
for (int i = 0; i < types.length; i++) {
popValue();
if (types[i].getSize() == 2) {
popValue();
}
}
switch (opcode) {
// case INVOKESTATIC:
// break;
case INVOKEINTERFACE:
case INVOKEVIRTUAL:
popValue(); // objectref
break;
case INVOKESPECIAL:
Object type = popValue(); // objectref
if (type == THIS && !superInitialized) {
onMethodEnter();
superInitialized = true;
// once super has been initialized it is no longer
// necessary to keep track of stack state
constructor = false;
}
break;
}
Type returnType = Type.getReturnType(desc);
if (returnType != Type.VOID_TYPE) {
pushValue(OTHER);
if (returnType.getSize() == 2) {
pushValue(OTHER);
}
}
}
}
@Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
Object... bsmArgs) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
if (constructor) {
Type[] types = Type.getArgumentTypes(desc);
for (int i = 0; i < types.length; i++) {
popValue();
if (types[i].getSize() == 2) {
popValue();
}
}
Type returnType = Type.getReturnType(desc);
if (returnType != Type.VOID_TYPE) {
pushValue(OTHER);
if (returnType.getSize() == 2) {
pushValue(OTHER);
}
}
}
}
@Override
public void visitJumpInsn(final int opcode, final Label label) {
mv.visitJumpInsn(opcode, label);
if (constructor) {
switch (opcode) {
case IFEQ:
case IFNE:
case IFLT:
case IFGE:
case IFGT:
case IFLE:
case IFNULL:
case IFNONNULL:
popValue();
break;
case IF_ICMPEQ:
case IF_ICMPNE:
case IF_ICMPLT:
case IF_ICMPGE:
case IF_ICMPGT:
case IF_ICMPLE:
case IF_ACMPEQ:
case IF_ACMPNE:
popValue();
popValue();
break;
case JSR:
pushValue(OTHER);
break;
}
addBranch(label);
}
}
@Override
public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
final Label[] labels) {
mv.visitLookupSwitchInsn(dflt, keys, labels);
if (constructor) {
popValue();
addBranches(dflt, labels);
}
}
@Override
public void visitTableSwitchInsn(final int min, final int max,
final Label dflt, final Label... labels) {
mv.visitTableSwitchInsn(min, max, dflt, labels);
if (constructor) {
popValue();
addBranches(dflt, labels);
}
}
@Override
public void visitTryCatchBlock(Label start, Label end, Label handler,
String type) {
super.visitTryCatchBlock(start, end, handler, type);
if (constructor && !branches.containsKey(handler)) {
List<Object> stackFrame = new ArrayList<Object>();
stackFrame.add(OTHER);
branches.put(handler, stackFrame);
}
}
private void addBranches(final Label dflt, final Label[] labels) {
addBranch(dflt);
for (int i = 0; i < labels.length; i++) {
addBranch(labels[i]);
}
}
private void addBranch(final Label label) {
if (branches.containsKey(label)) {
return;
}
branches.put(label, new ArrayList<Object>(stackFrame));
}
private Object popValue() {
return stackFrame.remove(stackFrame.size() - 1);
}
private Object peekValue() {
return stackFrame.get(stackFrame.size() - 1);
}
private void pushValue(final Object o) {
stackFrame.add(o);
}
/**
* Called at the beginning of the method or after super class call in
* the constructor. <br>
* <br>
*
* <i>Custom code can use or change all the local variables, but should not
* change state of the stack.</i>
*/
protected void onMethodEnter() {
}
/**
* Called before explicit exit from the method using either return or throw.
* Top element on the stack contains the return value or exception instance.
* For example:
*
* <pre>
* public void onMethodExit(int opcode) {
* if(opcode==RETURN) {
* visitInsn(ACONST_NULL);
* } else if(opcode==ARETURN || opcode==ATHROW) {
* dup();
* } else {
* if(opcode==LRETURN || opcode==DRETURN) {
* dup2();
* } else {
* dup();
* }
* box(Type.getReturnType(this.methodDesc));
* }
* visitIntInsn(SIPUSH, opcode);
* visitMethodInsn(INVOKESTATIC, owner, "onExit", "(Ljava/lang/Object;I)V");
* }
*
* // an actual call back method
* public static void onExit(Object param, int opcode) {
* ...
* </pre>
*
* <br>
* <br>
*
* <i>Custom code can use or change all the local variables, but should not
* change state of the stack.</i>
*
* @param opcode
* one of the RETURN, IRETURN, FRETURN, ARETURN, LRETURN, DRETURN
* or ATHROW
*
*/
protected void onMethodExit(int opcode) {
}
// TODO onException, onMethodCall
}
| |
package org.apache.maven.plugin.checkstyle;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.List;
import org.apache.maven.plugin.logging.Log;
import org.apache.maven.project.MavenProject;
import com.puppycrawl.tools.checkstyle.DefaultLogger;
import com.puppycrawl.tools.checkstyle.api.AuditListener;
/**
* @author <a href="mailto:olamy@apache.org">olamy</a>
* @since 2.5
* @version $Id$
*/
public class CheckstyleExecutorRequest
{
/**
* Specifies the names filter of the source files to be used for Checkstyle.
*/
private String includes;
/**
* Specifies the names filter of the source files to be excluded for Checkstyle.
*/
private String excludes;
private MavenProject project;
private Log log;
private String suppressionsLocation;
private boolean includeTestSourceDirectory;
private File testSourceDirectory;
private File sourceDirectory;
private boolean failsOnError;
private AuditListener listener;
private boolean consoleOutput;
private DefaultLogger defaultLogger;
private ByteArrayOutputStream stringOutputStream;
private String propertiesLocation;
//
private String configLocation;
private String propertyExpansion;
private String headerLocation;
private String cacheFile;
private String suppressionsFileExpression;
private String encoding;
/**
* @since 2.8
*/
private boolean aggregate = false;
/**
* @since 2.8
*/
private List<MavenProject> reactorProjects;
/**
* Constructor.
*/
public CheckstyleExecutorRequest( )
{
//nothing
}
/**
* Returns the includes parameter.
*
* @return The includes parameter.
*/
public String getIncludes()
{
return includes;
}
public CheckstyleExecutorRequest setIncludes( String includes )
{
this.includes = includes;
return this;
}
public String getExcludes()
{
return excludes;
}
public CheckstyleExecutorRequest setExcludes( String excludes )
{
this.excludes = excludes;
return this;
}
public MavenProject getProject()
{
return project;
}
public CheckstyleExecutorRequest setProject( MavenProject project )
{
this.project = project;
return this;
}
public Log getLog()
{
return log;
}
public CheckstyleExecutorRequest setLog( Log log )
{
this.log = log;
return this;
}
public String getSuppressionsLocation()
{
return suppressionsLocation;
}
public CheckstyleExecutorRequest setSuppressionsLocation( String suppressionsLocation )
{
this.suppressionsLocation = suppressionsLocation;
return this;
}
public boolean isIncludeTestSourceDirectory()
{
return includeTestSourceDirectory;
}
public CheckstyleExecutorRequest setIncludeTestSourceDirectory( boolean includeTestSourceDirectory )
{
this.includeTestSourceDirectory = includeTestSourceDirectory;
return this;
}
public File getTestSourceDirectory()
{
return testSourceDirectory;
}
public CheckstyleExecutorRequest setTestSourceDirectory( File testSourceDirectory )
{
this.testSourceDirectory = testSourceDirectory;
return this;
}
public File getSourceDirectory()
{
return sourceDirectory;
}
public CheckstyleExecutorRequest setSourceDirectory( File sourceDirectory )
{
this.sourceDirectory = sourceDirectory;
return this;
}
public boolean isFailsOnError()
{
return failsOnError;
}
public CheckstyleExecutorRequest setFailsOnError( boolean failsOnError )
{
this.failsOnError = failsOnError;
return this;
}
public AuditListener getListener()
{
return listener;
}
public CheckstyleExecutorRequest setListener( AuditListener listener )
{
this.listener = listener;
return this;
}
public boolean isConsoleOutput()
{
return consoleOutput;
}
public CheckstyleExecutorRequest setConsoleOutput( boolean consoleOutput )
{
this.consoleOutput = consoleOutput;
return this;
}
public CheckstyleExecutorRequest setConsoleListener( DefaultLogger defaultLogger )
{
this.defaultLogger = defaultLogger;
return this;
}
public DefaultLogger getConsoleListener()
{
return this.defaultLogger;
}
public ByteArrayOutputStream getStringOutputStream()
{
return stringOutputStream;
}
public CheckstyleExecutorRequest setStringOutputStream( ByteArrayOutputStream stringOutputStream )
{
this.stringOutputStream = stringOutputStream;
return this;
}
public String getConfigLocation()
{
return configLocation;
}
public CheckstyleExecutorRequest setConfigLocation( String configLocation )
{
this.configLocation = configLocation;
return this;
}
public String getPropertyExpansion()
{
return propertyExpansion;
}
public CheckstyleExecutorRequest setPropertyExpansion( String propertyExpansion )
{
this.propertyExpansion = propertyExpansion;
return this;
}
public String getHeaderLocation()
{
return headerLocation;
}
public CheckstyleExecutorRequest setHeaderLocation( String headerLocation )
{
this.headerLocation = headerLocation;
return this;
}
public String getCacheFile()
{
return cacheFile;
}
public CheckstyleExecutorRequest setCacheFile( String cacheFile )
{
this.cacheFile = cacheFile;
return this;
}
public String getSuppressionsFileExpression()
{
return suppressionsFileExpression;
}
public CheckstyleExecutorRequest setSuppressionsFileExpression( String suppressionsFileExpression )
{
this.suppressionsFileExpression = suppressionsFileExpression;
return this;
}
public String getEncoding()
{
return encoding;
}
public CheckstyleExecutorRequest setEncoding( String encoding )
{
this.encoding = encoding;
return this;
}
public String getPropertiesLocation()
{
return propertiesLocation;
}
public void setPropertiesLocation( String propertiesLocation )
{
this.propertiesLocation = propertiesLocation;
}
/**
* Returns true if the report is aggregated.
*
* @return <code>true</code> if the report is aggregated.
*/
public boolean isAggregate()
{
return aggregate;
}
/**
* Sets the aggregate parameter.
*
* @param pAggregate <code>true</code> if an aggregated report is desidered.
* @return This object.
*/
public CheckstyleExecutorRequest setAggregate( boolean pAggregate )
{
this.aggregate = pAggregate;
return this;
}
/**
* Returns the list of reactor projects.
*
* @return The reactor projects.
*/
public List<MavenProject> getReactorProjects()
{
return reactorProjects;
}
/**
* Sets the list of reactor projects.
*
* @param pReactorProjects The reactor projects.
* @return This object.
*/
public CheckstyleExecutorRequest setReactorProjects( List<MavenProject> pReactorProjects )
{
this.reactorProjects = pReactorProjects;
return this;
}
}
| |
package org.sakaiproject.acadtermmanage.tool.pages;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import org.apache.wicket.extensions.markup.html.repeater.data.sort.OrderByBorder;
import org.apache.wicket.extensions.markup.html.repeater.data.sort.SortOrder;
import org.apache.wicket.extensions.markup.html.repeater.util.SortParam;
import org.apache.wicket.extensions.markup.html.repeater.util.SortableDataProvider;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Button;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.NumberTextField;
import org.apache.wicket.markup.html.form.RequiredTextField;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.navigation.paging.PagingNavigator;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.markup.repeater.RepeatingView;
import org.apache.wicket.markup.repeater.data.DataView;
import org.apache.wicket.markup.repeater.data.IDataProvider;
import org.apache.wicket.model.CompoundPropertyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.LoadableDetachableModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.ResourceModel;
import org.sakaiproject.acadtermmanage.exceptions.DuplicateKeyException;
import org.sakaiproject.acadtermmanage.exceptions.NoSuchKeyException;
import org.sakaiproject.acadtermmanage.model.Semester;
import org.sakaiproject.acadtermmanage.tool.AcademicTermConstants;
import org.sakaiproject.acadtermmanage.tool.util.ComparatorFactory;
import org.sakaiproject.acadtermmanage.tool.wicketstuff.ActionLink;
import org.sakaiproject.acadtermmanage.tool.wicketstuff.ActionPanel;
import lombok.extern.slf4j.Slf4j;
// TODO fromDate must not be after startDate => validator
@Slf4j
public class SemesterPage extends BasePage implements AcademicTermConstants{
private static final long serialVersionUID = 1L;
private static final int DEFAULT_ITEMS_PER_PAGE = 10;
public SemesterPage(){
super();
IDataProvider<Semester> dataProvider = createDataProvider();
SemesterForm semesterEditor =createSemesterForm("form", new Semester());
add(semesterEditor);
DataView<Semester>dataView = createDataView(dataProvider, semesterEditor);
add(dataView);
if (dataProvider instanceof SortableSemesterDataProvider){
addOrderBorders((SortableSemesterDataProvider) dataProvider, dataView);
}
add(createPagingForm("pagingform",dataView));
}
protected IDataProvider<Semester> createDataProvider(){
return new SortableSemesterDataProvider();
}
protected PagingForm createPagingForm(String componentId, DataView<?> view){
return new PagingForm(componentId,view);
}
private static class PagingForm extends Form<Long> {
private static final long serialVersionUID = 1L;
private final DataView<?> theView;
private final PagingNavigator pager;
private boolean visible;
public PagingForm(String id, DataView<?> dv){
super(id, Model.of(new Long(dv.getItemsPerPage())));
this.theView = dv;
NumberTextField<Long> numField = new NumberTextField<Long>("pagecount", super.getModel());
//(IModel<Long>)super.getDefaultModel());
numField.setType(Long.class);
numField.setMinimum(1L);
numField.setConvertEmptyInputStringToNull(true);
numField.setRequired(true);
numField.setLabel(new ResourceModel("lbl_pagecount"));
add(numField);
pager = new PagingNavigator("navigator", this.theView);
add(pager);
setOutputMarkupId(true);
updateVisibility();
}
private final void updateVisibility(){
this.visible = theView.getItemCount()>theView.getItemsPerPage();
}
@Override
public void onSubmit(){
log.debug("paging form submit");
Long newPageNumber = (Long)getDefaultModelObject();
if (newPageNumber != null) {
theView.setItemsPerPage(newPageNumber);
updateVisibility();
}
}
@Override
public boolean isVisible(){
return this.visible;
}
}
protected SemesterForm createSemesterForm(String componentid, Semester term){
return new SemesterForm(componentid, term);
}
private class SemesterForm extends Form <Semester>{
private static final long serialVersionUID = 1L;
// Using a String instead of a boolean to figure out if this is an edit or an insert
// Reason: we need the old EID anyway because the EID might be one of the fields that has been changed.
// And without the old eid it's impossible to know which entry has been edited...
private String updateEID=null;
// boolean isUpdate=false;
private ResourceModel updateButtonModel;
private ResourceModel addButtonModel;
private Button okButton;
public SemesterForm(String id, Semester sem) {
super(id, new CompoundPropertyModel<Semester>(sem));
add(new RequiredTextField<String>(PROP_EID));
add(new TextField<String>(PROP_TITLE));
add(new TextField(PROP_START));
add(new TextField(PROP_END));
add(new TextField<String>(PROP_DESC){
private static final long serialVersionUID = 1L;
@Override
protected boolean shouldTrimInput() {
return false;
}
});
add(new CheckBox(PROP_CURRENT));
updateButtonModel = new ResourceModel(LABEL_BUTTON_SAVE);
addButtonModel = new ResourceModel(LABEL_BUTTON_ADD);
okButton = new Button("okbutton", addButtonModel);
add(okButton);
Button cancelButton = new Button("cancelbutton", new ResourceModel(LABEL_BUTTON_CANCEL)){
private static final long serialVersionUID = 1L;
@Override
public void onSubmit() {
clearForm();
}
@Override
public boolean isVisible() {
return updateEID != null; // only show when editing
}
};
cancelButton.setDefaultFormProcessing(false);
add(cancelButton);
}
private void clearForm(){
super.setDefaultModelObject(new Semester()); // clearing (the model under) the form field
this.updateEID = null;
okButton.setDefaultModel(addButtonModel);
super.clearInput(); // otherwise at least the date fields keep their values after "cancel",
// probably because of "cancelButton.setDefaultFormProcessing(false);"
}
@Override
public void onSubmit(){
Semester s = (Semester)getDefaultModelObject();
log.debug("submit: {}; updateEID:{}", s, updateEID);
replaceNullWithEmptyString(s);
if (updateEID != null) {
doUpdate(updateEID, s);
}
else {
doInsert(s);
}
clearForm();
}
public void setUpdateEID(String originalEID) {
this.updateEID = originalEID;
}
private void doUpdate(String oldEID, Semester s){
log.debug("doUpdate()");
try {
semesterLogic.updateSemester(oldEID, s);
info("AcademicSession updated!");
okButton.setDefaultModel(updateButtonModel);
}
catch (NoSuchKeyException nse){
log.error(nse.getMessage(),nse);
error(nse.getMessage());
}
}
private void doInsert(Semester s) {
log.debug("doInsert()");
try {
boolean success = semesterLogic.addSemester(s);
if(success){
log.debug("doInsert: success");
info("AcademicSession added");
// super.setDefaultModelObject(new Semester());
} else {
log.debug("FAIL!");
error("Error adding item");
}
}
catch (DuplicateKeyException ide){
error("EID \""+s.getEid()+"\" is already in use");
}
}
@Override
protected void onBeforeRender() {
// could probably be done simply in setUpdateEID()..
if (updateEID != null) {
okButton.setDefaultModel(updateButtonModel);
}
super.onBeforeRender();
}
// In contrast to the database, I'll allow empty title and description fields because
// the user gets to set (and see) the unique EIDs manually.
// Although: it might be nicer to remove the title textbox and just label the eid textbox "title" and then
// do a "setTitle(getEid());"
private void replaceNullWithEmptyString(Semester s){
if (s.getDescription()==null){
s.setDescription("");
}
if (s.getTitle()==null){
s.setTitle("");
}
}
}
public interface SortingChangeObserver{
void notifySortChange(String propertyThatChanged);
}
private class SortableSemesterDataProvider extends SortableDataProvider<Semester, String> implements SortingChangeObserver{
private static final long serialVersionUID = 1L;
private List<Semester> list;
private boolean needToSortList=true;
public SortableSemesterDataProvider(){
super.setSort(new SortParam<String>(PROP_START, false)); // newest goes first
}
private List<Semester> getData() {
if(list == null) {
log.debug("(re)loading the semester list");
list = semesterLogic.getSemesters();
needToSortList = true;
}
return list;
}
@Override
public void setSort(final String property, final SortOrder order){
log.debug("setSort called: {}/{}", property, order);
super.setSort(property, order);
}
@Override
public IModel<Semester> model(Semester object){
return new DetachableSemesterModel(object);
}
@Override
public void detach(){
list = null;
super.detach();
}
@Override
public void notifySortChange(String property){
needToSortList=true;
// not sure if this is the right place, but currently the feedback box looks f*cked up
// after sorting the table, so removing it here seems like a good idea
SemesterPage.this.clearFeedback();
log.debug("someone explicitly told me about an order change. Sort property is now: {}", property);
}
@Override
public Iterator<? extends Semester> iterator(long first, long count) {
List<Semester>myList = getData();
if (needToSortList) {
log.debug("sorting the collection");
String prop = getSort().getProperty();
SortOrder order = getSortState().getPropertySortOrder(prop);
Comparator<Semester> comp = ComparatorFactory.createComparator(prop);
Collections.sort(myList, comp);
if (order == SortOrder.DESCENDING) {
Collections.reverse(myList);
}
needToSortList = false;
}
else {
log.debug("skipped sorting because the sort order should be the same as before..");
}
int fint = (int)first;
int fcount = (int)count;
return myList.subList(fint, fint + fcount).iterator();
}
@Override
public long size() {
return getData().size(); // accessing "list" via getData() to ensure "list" is not null
}
}
private DataView<Semester>createDataView(IDataProvider<Semester> listDataProvider, final SemesterForm semesterEditor){
DataView<Semester> dataView = new DataView<Semester>("row", listDataProvider) {
private static final long serialVersionUID = 1L;
@Override
protected void populateItem(Item<Semester> item) {
Semester sem = item.getModelObject();
RepeatingView repeatingView = new RepeatingView("dataRow");
CompoundPropertyModel<Semester> model = new CompoundPropertyModel<Semester>(sem);
repeatingView.add(new Label(repeatingView.newChildId(), model.bind(PROP_EID)));
repeatingView.add(new Label(repeatingView.newChildId(), model.bind(PROP_TITLE)));
repeatingView.add(new Label(repeatingView.newChildId(),model.bind(PROP_START)));
repeatingView.add(new Label(repeatingView.newChildId(),model.bind(PROP_END)));
repeatingView.add(new Label(repeatingView.newChildId(), model.bind(PROP_DESC)));
repeatingView.add(new Label(repeatingView.newChildId(), getString(sem.isCurrent()?"lbl_yes":"lbl_no")));
ActionLink<Semester> el = new ActionLink<Semester>(model) {
private static final long serialVersionUID = 1L;
@Override
public void onClick()
{
IModel <Semester> m = getModel();
Semester selected = m.getObject();
semesterEditor.setModelObject(selected);
semesterEditor.setUpdateEID(selected.getEid());
SemesterPage.this.clearFeedback();
}
};
el.setBody(new ResourceModel(LABEL_EDIT));
repeatingView.add(new ActionPanel<Semester>(repeatingView.newChildId(), el));
item.add(repeatingView);
}
};
dataView.setItemsPerPage(DEFAULT_ITEMS_PER_PAGE);
return dataView;
}
private void addOrderBorders(final SortableSemesterDataProvider dp, final DataView<Semester>dataView){
String [] MY_PROPS = new String []{PROP_EID, PROP_TITLE,PROP_START,PROP_END,PROP_DESC,PROP_CURRENT};
for (final String prop: MY_PROPS) {
add(new OrderByBorder<String>("orderBy_"+prop, prop, dp) {
private static final long serialVersionUID = 1L;
@Override
protected void onSortChanged()
{
dataView.setCurrentPage(0);
dp.notifySortChange(prop);
}
});
}
}
private class DetachableSemesterModel extends LoadableDetachableModel<Semester>{
private static final long serialVersionUID = 1L;
private final String eid;
public DetachableSemesterModel(Semester t){
this.eid = t.getEid();
}
@SuppressWarnings("unused")
public DetachableSemesterModel(String eid){
this.eid = eid;
}
public int hashCode() {
return eid.hashCode();
}
/**
* used for dataview with ReuseIfModelsEqualStrategy item reuse strategy
*
* @see org.apache.wicket.markup.repeater.ReuseIfModelsEqualStrategy
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(final Object obj){
if (obj == this){
return true;
}
else if (obj == null){
return false;
}
else if (obj instanceof DetachableSemesterModel) {
DetachableSemesterModel other = (DetachableSemesterModel)obj;
return eid.equals(other.eid);
}
return false;
}
/**
* @see org.apache.wicket.model.LoadableDetachableModel#load()
*/
protected Semester load(){
return semesterLogic.getSemester(eid);
}
}
}
| |
package com.peterphi.std.guice.web.rest.auth.interceptor;
import com.codahale.metrics.Meter;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.inject.Provider;
import com.peterphi.std.guice.apploader.GuiceProperties;
import com.peterphi.std.guice.common.auth.AuthScope;
import com.peterphi.std.guice.common.auth.annotations.AuthConstraint;
import com.peterphi.std.guice.common.cached.CacheManager;
import com.peterphi.std.guice.common.serviceprops.composite.GuiceConfig;
import com.peterphi.std.guice.web.HttpCallContext;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.log4j.Logger;
import java.util.List;
/**
* Intercepts calls to methods annotated with AuthConstraint (or whose superclass is annotated with AuthConstraint) and enforces
* those constraints
*/
class AuthConstraintMethodInterceptor implements MethodInterceptor
{
private static final Logger log = Logger.getLogger(AuthConstraintMethodInterceptor.class);
private final Provider<AuthConstraintUserInterrogator> interrogatorProvider;
private final GuiceConfig config;
private final Meter calls;
private final Meter granted;
private final Meter denied;
private final Meter authenticatedDenied;
private final boolean onlyServletRequest;
private final Cache<String, AuthScope> scopes = CacheManager.build("AuthScopes", CacheBuilder.newBuilder());
private final String noAnnotationScopeId;
public AuthConstraintMethodInterceptor(final Provider<AuthConstraintUserInterrogator> interrogatorProvider,
final GuiceConfig config,
final Meter calls,
final Meter granted,
final Meter denied,
final Meter authenticatedDenied)
{
if (interrogatorProvider == null)
throw new IllegalArgumentException("Must provide a user interrogator!");
this.interrogatorProvider = interrogatorProvider;
this.config = config;
this.calls = calls;
this.granted = granted;
this.denied = denied;
this.authenticatedDenied = authenticatedDenied;
this.onlyServletRequest = config.getBoolean(GuiceProperties.AUTHZ_ONLY_SERVLET_REQUEST, true);
this.noAnnotationScopeId = config.get(GuiceProperties.AUTHZ_UNANNOTATED_WEB_METHOD_AUTHSCOPE_ID,
AuthConstraint.DEFAULT_ID);
}
@Override
public Object invoke(final MethodInvocation invocation) throws Throwable
{
// Never handle calls to base methods (like hashCode, toString, etc.)
if (invocation.getMethod().getDeclaringClass().equals(Object.class))
return invocation.proceed();
if (log.isTraceEnabled())
log.trace("Check authn for: " + invocation.getMethod());
// Skip auth if we're not inside a Servlet call and we are only to enforce auth constraints on service calls
if (onlyServletRequest && HttpCallContext.peek() == null)
{
if (log.isTraceEnabled())
log.trace("Skip authn, should only run on servlet requests and this is not a servlet request");
return invocation.proceed();
}
calls.mark();
final AuthConstraint constraint = readConstraint(invocation);
final AuthConstraintUserInterrogator interrogator = interrogatorProvider.get();
if (interrogator == null)
throw new IllegalArgumentException("Provider for AuthConstraintUserInterrogator returned null! Cannot apply AuthConstraint to method " +
invocation.getMethod());
// Acquire the auth scope (for constraint override)
final AuthScope scope = getScope(constraint);
// Test the user
if (passes(scope, constraint, interrogator))
{
granted.mark();
return invocation.proceed();
}
else
{
if (!interrogator.getUser().isAnonymous())
authenticatedDenied.mark();
denied.mark();
// Throw an exception to refuse access
throw interrogator.getUser().getAccessRefuser().refuse(scope, constraint, interrogator.getUser());
}
}
/**
* Determines whether a given user has the necessary role to pass a constraint
*
* @param constraint
* the constraint to use to test the user
* @param user
* the current user
*
* @return true if the user passes, otherwise false
*/
private boolean passes(final AuthScope scope, final AuthConstraint constraint, final AuthConstraintUserInterrogator user)
{
if (scope.getSkip(constraint))
{
if (log.isTraceEnabled())
log.trace("Allowing method invocation (skip=true).");
return true;
}
else
{
final List<String> requireAnyRoles = scope.getRoles(constraint);
assert (requireAnyRoles != null);
boolean pass = false;
for (String role : requireAnyRoles)
{
if (!pass && user.hasRole(role))
{
if (log.isTraceEnabled())
log.trace("Allow method invocation: user " + user + " has role " + role);
pass = true;
}
}
if (!pass && log.isTraceEnabled())
log.trace("Deny method invocation: user " + user + " does not have any of roles " + requireAnyRoles);
return pass;
}
}
private AuthScope getScope(final AuthConstraint constraint)
{
if (constraint == null)
return getScope(noAnnotationScopeId);
else
return getScope(constraint.id());
}
private AuthScope getScope(final String id)
{
AuthScope scope = scopes.getIfPresent(id);
if (scope == null)
{
final List<String> roles;
final Boolean skip;
final Boolean forceSkip;
/**
* N.B. With the scope as 'default', the effective guice properties read are {@link GuiceProperties#AUTHZ_DEFAULT_ROLE}, {@link GuiceProperties#AUTHZ_DEFAULT_SKIP}, {@link GuiceProperties#AUTHZ_DEFAULT_FORCE_SKIP} -
* these are documented as separate properties for the convenience of users.
*/
{
roles = config.getList("framework.webauth.scope." + id + ".role", null);
skip = config.getBoolean("framework.webauth.scope." + id + ".skip", null);
forceSkip = config.getBoolean("framework.webauth.scope." + id + ".force-skip", null);
}
scope = new AuthScope(id, roles, skip, forceSkip);
scopes.put(id, scope);
}
return scope;
}
private AuthConstraint readConstraint(final MethodInvocation invocation)
{
if (invocation.getMethod().isAnnotationPresent(AuthConstraint.class))
return invocation.getMethod().getAnnotation(AuthConstraint.class);
else if (invocation.getMethod().getDeclaringClass().isAnnotationPresent(AuthConstraint.class))
return invocation.getMethod().getDeclaringClass().getAnnotation(AuthConstraint.class);
else
return null; // No AuthConstraint specified
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.radComponents;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream;
import com.intellij.uiDesigner.GridChangeUtil;
import com.intellij.uiDesigner.UIDesignerBundle;
import com.intellij.uiDesigner.UIFormXmlConstants;
import com.intellij.uiDesigner.XmlWriter;
import com.intellij.uiDesigner.actions.*;
import com.intellij.uiDesigner.compiler.FormLayoutUtils;
import com.intellij.uiDesigner.compiler.Utils;
import com.intellij.uiDesigner.core.GridConstraints;
import com.intellij.uiDesigner.designSurface.*;
import com.intellij.uiDesigner.lw.FormLayoutSerializer;
import com.intellij.uiDesigner.propertyInspector.Property;
import com.intellij.uiDesigner.propertyInspector.properties.AbstractInsetsProperty;
import com.intellij.uiDesigner.propertyInspector.properties.AlignPropertyProvider;
import com.intellij.uiDesigner.propertyInspector.properties.HorzAlignProperty;
import com.intellij.uiDesigner.propertyInspector.properties.VertAlignProperty;
import com.intellij.uiDesigner.snapShooter.SnapshotContext;
import com.intellij.util.ArrayUtil;
import com.jgoodies.forms.factories.FormFactory;
import com.jgoodies.forms.layout.*;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author yole
*/
public class RadFormLayoutManager extends RadAbstractGridLayoutManager implements AlignPropertyProvider {
private FormLayoutColumnProperties myPropertiesPanel;
private final Map<RadComponent, MyPropertyChangeListener> myListenerMap = new HashMap<RadComponent, MyPropertyChangeListener>();
@NonNls private static final String ENCODED_FORMSPEC_GROW = "d:grow";
private static final Size DEFAULT_NOGROW_SIZE = new BoundedSize(Sizes.DEFAULT, new ConstantSize(4, ConstantSize.PIXEL), null);
@Nullable public String getName() {
return UIFormXmlConstants.LAYOUT_FORM;
}
@Override @Nullable
public LayoutManager createLayout() {
return new FormLayout(ENCODED_FORMSPEC_GROW, ENCODED_FORMSPEC_GROW);
}
@Override
protected void changeLayoutFromGrid(final RadContainer container, final List<RadComponent> contents, final List<Boolean> canRowsGrow,
final List<Boolean> canColumnsGrow) {
int rowCount = canRowsGrow.size();
int columnCount = canColumnsGrow.size();
int rowCountWithGaps = (rowCount == 0) ? 0 : rowCount * 2 - 1;
int columnCountWithGaps = (columnCount == 0) ? 0 : columnCount * 2 - 1;
RowSpec[] rowSpecs = new RowSpec [rowCountWithGaps];
ColumnSpec[] colSpecs = new ColumnSpec [columnCountWithGaps];
for(int i=0; i<rowCount; i++) {
rowSpecs [i*2] = canRowsGrow.get(i).booleanValue() ? new RowSpec(ENCODED_FORMSPEC_GROW) : new RowSpec(DEFAULT_NOGROW_SIZE);
if (i*2+1 < rowSpecs.length) {
rowSpecs [i*2+1] = FormFactory.RELATED_GAP_ROWSPEC;
}
}
for(int i=0; i<columnCount; i++) {
colSpecs [i*2] = canColumnsGrow.get(i).booleanValue() ? new ColumnSpec(ENCODED_FORMSPEC_GROW) : new ColumnSpec(DEFAULT_NOGROW_SIZE);
if (i*2+1 < colSpecs.length) {
colSpecs [i*2+1] = FormFactory.RELATED_GAP_COLSPEC;
}
}
container.setLayoutManager(this, new FormLayout(colSpecs, rowSpecs));
}
@Override
protected void changeLayoutFromIndexed(final RadContainer container, final List<RadComponent> components) {
int maxSizePolicy = 0;
for(RadComponent c: components) {
maxSizePolicy = Math.max(maxSizePolicy, c.getConstraints().getHSizePolicy());
}
ColumnSpec[] colSpecs = new ColumnSpec [components.size() * 2 - 1];
for(int i=0; i<components.size(); i++) {
colSpecs [i*2] = components.get(i).getConstraints().getHSizePolicy() == maxSizePolicy
? new ColumnSpec(ENCODED_FORMSPEC_GROW)
: FormFactory.DEFAULT_COLSPEC;
if (i*2+1 < colSpecs.length) {
colSpecs [i*2+1] = FormFactory.RELATED_GAP_COLSPEC;
}
}
container.setLayoutManager(this, new FormLayout(colSpecs, new RowSpec[] { FormFactory.DEFAULT_ROWSPEC } ));
}
@Override
public void writeLayout(final XmlWriter writer, final RadContainer radContainer) {
FormLayout layout = (FormLayout) radContainer.getLayout();
for(int i=1; i<=layout.getRowCount(); i++) {
RowSpec rowSpec = layout.getRowSpec(i);
writer.startElement(UIFormXmlConstants.ELEMENT_ROWSPEC);
try {
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_VALUE, FormLayoutUtils.getEncodedSpec(rowSpec));
}
finally {
writer.endElement();
}
}
for(int i=1; i<=layout.getColumnCount(); i++) {
ColumnSpec columnSpec = layout.getColumnSpec(i);
writer.startElement(UIFormXmlConstants.ELEMENT_COLSPEC);
try {
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_VALUE, FormLayoutUtils.getEncodedSpec(columnSpec));
}
finally {
writer.endElement();
}
}
writeGroups(writer, UIFormXmlConstants.ELEMENT_ROWGROUP, layout.getRowGroups());
writeGroups(writer, UIFormXmlConstants.ELEMENT_COLGROUP, layout.getColumnGroups());
}
private static void writeGroups(final XmlWriter writer, final String elementName, final int[][] groups) {
for(int[] group: groups) {
writer.startElement(elementName);
try {
for(int member: group) {
writer.startElement(UIFormXmlConstants.ELEMENT_MEMBER);
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_INDEX, member);
writer.endElement();
}
}
finally {
writer.endElement();
}
}
}
@Override
public void addComponentToContainer(final RadContainer container, final RadComponent component, final int index) {
MyPropertyChangeListener listener = new MyPropertyChangeListener(component);
myListenerMap.put(component, listener);
component.addPropertyChangeListener(listener);
final CellConstraints cc = gridToCellConstraints(component);
if (component.getCustomLayoutConstraints() instanceof CellConstraints) {
CellConstraints customCellConstraints = (CellConstraints) component.getCustomLayoutConstraints();
cc.insets = customCellConstraints.insets;
}
component.setCustomLayoutConstraints(cc);
container.getDelegee().add(component.getDelegee(), cc, index);
}
@Override
public void removeComponentFromContainer(final RadContainer container, final RadComponent component) {
final MyPropertyChangeListener listener = myListenerMap.get(component);
if (listener != null) {
component.removePropertyChangeListener(listener);
myListenerMap.remove(component);
}
super.removeComponentFromContainer(container, component);
}
private static CellConstraints gridToCellConstraints(final RadComponent component) {
GridConstraints gc = component.getConstraints();
CellConstraints.Alignment hAlign = ((gc.getHSizePolicy() & GridConstraints.SIZEPOLICY_WANT_GROW) != 0)
? CellConstraints.FILL
: CellConstraints.DEFAULT;
CellConstraints.Alignment vAlign = ((gc.getVSizePolicy() & GridConstraints.SIZEPOLICY_WANT_GROW) != 0)
? CellConstraints.FILL
: CellConstraints.DEFAULT;
if (component.getCustomLayoutConstraints() instanceof CellConstraints) {
CellConstraints cc = (CellConstraints) component.getCustomLayoutConstraints();
hAlign = cc.hAlign;
vAlign = cc.vAlign;
}
return new CellConstraints(gc.getColumn()+1, gc.getRow()+1, gc.getColSpan(), gc.getRowSpan(), hAlign, vAlign);
}
@Override
public void writeChildConstraints(final XmlWriter writer, final RadComponent child) {
writeGridConstraints(writer, child);
if (child.getCustomLayoutConstraints() instanceof CellConstraints) {
CellConstraints cc = (CellConstraints) child.getCustomLayoutConstraints();
writer.startElement(UIFormXmlConstants.ELEMENT_FORMS);
try {
if (!cc.insets.equals(new Insets(0, 0, 0, 0))) {
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_TOP, cc.insets.top);
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_LEFT, cc.insets.left);
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_BOTTOM, cc.insets.bottom);
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_RIGHT, cc.insets.right);
}
if (cc.hAlign != CellConstraints.DEFAULT) {
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_DEFAULTALIGN_HORZ, false);
}
if (cc.vAlign != CellConstraints.DEFAULT) {
writer.addAttribute(UIFormXmlConstants.ATTRIBUTE_DEFAULTALIGN_VERT, false);
}
}
finally {
writer.endElement();
}
}
}
private static FormLayout getFormLayout(final RadContainer container) {
return (FormLayout) container.getLayout();
}
@Override public int getGridRowCount(RadContainer container) {
return getFormLayout(container).getRowCount();
}
@Override public int getGridColumnCount(RadContainer container) {
return getFormLayout(container).getColumnCount();
}
@Override public int[] getGridCellCoords(RadContainer container, boolean isRow) {
final FormLayout.LayoutInfo layoutInfo = getFormLayout(container).getLayoutInfo(container.getDelegee());
int[] origins = isRow ? layoutInfo.rowOrigins : layoutInfo.columnOrigins;
int[] result = new int [origins.length-1];
System.arraycopy(origins, 0, result, 0, result.length);
return result;
}
@Override public int[] getGridCellSizes(RadContainer container, boolean isRow) {
final FormLayout.LayoutInfo layoutInfo = getFormLayout(container).getLayoutInfo(container.getDelegee());
int[] origins = isRow ? layoutInfo.rowOrigins : layoutInfo.columnOrigins;
int[] result = new int [origins.length-1];
for(int i=0; i<result.length; i++) {
result [i] = origins [i+1] - origins [i];
}
return result;
}
@Override public int[] getHorizontalGridLines(RadContainer container) {
final FormLayout.LayoutInfo layoutInfo = getFormLayout(container).getLayoutInfo(container.getDelegee());
return layoutInfo.rowOrigins;
}
@Override public int[] getVerticalGridLines(RadContainer container) {
final FormLayout.LayoutInfo layoutInfo = getFormLayout(container).getLayoutInfo(container.getDelegee());
return layoutInfo.columnOrigins;
}
@Override public int getGridRowAt(RadContainer container, int y) {
final FormLayout.LayoutInfo layoutInfo = getFormLayout(container).getLayoutInfo(container.getDelegee());
return findCell(layoutInfo.rowOrigins, y);
}
@Override public int getGridColumnAt(RadContainer container, int x) {
final FormLayout.LayoutInfo layoutInfo = getFormLayout(container).getLayoutInfo(container.getDelegee());
return findCell(layoutInfo.columnOrigins, x);
}
private static int findCell(final int[] origins, final int coord) {
for(int i=0; i<origins.length-1; i++) {
if (coord >= origins [i] && coord < origins [i+1]) return i;
}
return -1;
}
@NotNull @Override
public ComponentDropLocation getDropLocation(@NotNull RadContainer container, @Nullable final Point location) {
FormLayout formLayout = getFormLayout(container);
if (formLayout.getRowCount() == 0 || formLayout.getColumnCount() == 0) {
if (location != null) {
Rectangle rc = new Rectangle(new Point(), container.getDelegee().getSize());
return new FormFirstComponentInsertLocation(container, location, rc);
}
}
final FormLayout.LayoutInfo layoutInfo = formLayout.getLayoutInfo(container.getDelegee());
if (location != null && location.x > layoutInfo.getWidth()) {
int row = findCell(layoutInfo.rowOrigins, location.y);
if (row == -1) {
return NoDropLocation.INSTANCE;
}
return new GridInsertLocation(container, row, getGridColumnCount(container)-1, GridInsertMode.ColumnAfter);
}
if (location != null && location.y > layoutInfo.getHeight()) {
int column = findCell(layoutInfo.columnOrigins, location.x);
if (column == -1) {
return NoDropLocation.INSTANCE;
}
return new GridInsertLocation(container, getGridRowCount(container)-1, column, GridInsertMode.RowAfter);
}
if (container.getGridRowCount() == 1 && container.getGridColumnCount() == 1 &&
getComponentAtGrid(container, 0, 0) == null) {
final Rectangle rc = getGridCellRangeRect(container, 0, 0, 0, 0);
if (location == null) {
return new FormFirstComponentInsertLocation(container, rc, 0, 0);
}
return new FormFirstComponentInsertLocation(container, location, rc);
}
return super.getDropLocation(container, location);
}
@Override
public CustomPropertiesPanel getRowColumnPropertiesPanel(RadContainer container, boolean isRow, int[] selectedIndices) {
if (myPropertiesPanel == null) {
myPropertiesPanel = new FormLayoutColumnProperties();
}
myPropertiesPanel.showProperties(container, isRow, selectedIndices);
return myPropertiesPanel;
}
@Override
public ActionGroup getCaptionActions() {
DefaultActionGroup group = new DefaultActionGroup();
group.add(new InsertBeforeAction());
group.add(new InsertAfterAction());
group.add(new SplitAction());
group.add(new DeleteAction());
group.add(new GroupRowsColumnsAction());
group.add(new UngroupRowsColumnsAction());
return group;
}
@Override
public boolean canCellGrow(RadContainer container, boolean isRow, int index) {
FormLayout layout = (FormLayout) container.getLayout();
FormSpec spec = isRow ? layout.getRowSpec(index+1) : layout.getColumnSpec(index+1);
return spec.getResizeWeight() > 0.01d;
}
@Override
public void setChildDragging(RadComponent child, boolean dragging) {
// do nothing here - otherwise the layout will jump around
}
@Override
public void paintCaptionDecoration(final RadContainer container, final boolean isRow, final int index, final Graphics2D g2d,
final Rectangle rc) {
// don't paint gap rows/columns with red background
if (isGapCell(container, isRow, index)) {
g2d.setColor(Color.LIGHT_GRAY);
g2d.fillRect(rc.x, rc.y, rc.width, rc.height);
}
if (canCellGrow(container, isRow, index)) {
drawGrowMarker(isRow, g2d, rc);
}
FormLayout layout = (FormLayout) container.getLayout();
int[][] groups = isRow ? layout.getRowGroups() : layout.getColumnGroups();
//noinspection MultipleVariablesInDeclaration
boolean haveTopLeft = false, haveTopRight = false, haveTopLine = false;
//noinspection MultipleVariablesInDeclaration
boolean haveBottomLeft = false, haveBottomRight = false, haveBottomLine = false;
boolean inGroup = false;
for(int i=0; i<groups.length; i++) {
int minMember = Integer.MAX_VALUE;
int maxMember = -1;
for(int member: groups [i]) {
minMember = Math.min(member-1, minMember);
maxMember = Math.max(member-1, maxMember);
inGroup = inGroup || (member-1 == index);
}
if (minMember <= index && index <= maxMember) {
if (i % 2 == 0) {
haveTopLeft = haveTopLeft || index > minMember;
haveTopRight = haveTopRight || index < maxMember;
haveTopLine = haveTopLine || inGroup;
}
else {
haveBottomLeft = haveBottomLeft || index > minMember;
haveBottomRight = haveBottomRight || index < maxMember;
haveBottomLine = haveBottomLine || inGroup;
}
}
}
g2d.setColor(Color.BLUE);
drawGroupLine(rc, isRow, g2d, true, haveTopLeft, haveTopRight, haveTopLine);
drawGroupLine(rc, isRow, g2d, false, haveBottomLeft, haveBottomRight, haveBottomLine);
}
private static void drawGroupLine(final Rectangle rc, final boolean isRow, final Graphics2D g2d, boolean isTop,
final boolean haveLeft, final boolean haveRight, final boolean haveLine) {
int maxX = (int) rc.getMaxX();
int maxY = (int) rc.getMaxY();
Point linePos = isTop ? new Point(rc.x+1, rc.y+1) : new Point(rc.x+3, rc.y+3);
Point markerPos = new Point(rc.x+6, rc.y+6);
int midX = (int) rc.getCenterX();
int midY = (int) rc.getCenterY();
if (haveLine) {
if (isRow) {
g2d.drawLine(linePos.x, midY, markerPos.x, midY);
}
else {
g2d.drawLine(midX, linePos.y, midX, markerPos.y);
}
}
if (haveLeft) {
if (isRow) {
g2d.drawLine(linePos.x, rc.y, linePos.x, midY);
}
else {
g2d.drawLine(rc.x, linePos.y, midX, linePos.y);
}
}
if (haveRight) {
if (isRow) {
g2d.drawLine(linePos.x, midY, linePos.x, maxY);
}
else {
g2d.drawLine(midX, linePos.y, maxX, linePos.y);
}
}
}
@Override
public Property[] getContainerProperties(final Project project) {
return Property.EMPTY_ARRAY;
}
@Override
public Property[] getComponentProperties(final Project project, final RadComponent component) {
return new Property[] {
HorzAlignProperty.getInstance(project),
VertAlignProperty.getInstance(project),
new ComponentInsetsProperty()
};
}
@Override
public int insertGridCells(final RadContainer grid, final int cellIndex, final boolean isRow, final boolean isBefore, final boolean grow) {
FormSpec formSpec;
if (isRow) {
formSpec = grow ? new RowSpec(ENCODED_FORMSPEC_GROW) : new RowSpec(DEFAULT_NOGROW_SIZE);
}
else {
formSpec = grow ? new ColumnSpec(ENCODED_FORMSPEC_GROW) : new ColumnSpec(DEFAULT_NOGROW_SIZE);
}
insertGridCells(grid, cellIndex, isRow, isBefore, formSpec);
return getGridCellCount(grid, isRow) == 1 ? 1 : 2;
}
@Override
public void copyGridCells(RadContainer source, final RadContainer destination, final boolean isRow, int cellIndex, int cellCount, int targetIndex) {
FormLayout sourceLayout = getFormLayout(source);
FormLayout destinationLayout = getFormLayout(destination);
if (isRow) {
insertOrAppendRow(destinationLayout, targetIndex+1, FormFactory.RELATED_GAP_ROWSPEC);
}
else {
insertOrAppendColumn(destinationLayout, targetIndex+1, FormFactory.RELATED_GAP_COLSPEC);
}
targetIndex++;
if (targetIndex < cellIndex) cellIndex++;
copyFormSpecs(sourceLayout, destinationLayout, isRow, cellIndex, cellCount, targetIndex);
updateGridConstraintsFromCellConstraints(destination);
}
private static void copyFormSpecs(final FormLayout sourceLayout,
final FormLayout destinationLayout,
final boolean isRow,
int cellIndex,
int cellCount,
int targetIndex) {
for(int i=0; i < cellCount; i++) {
if (isRow) {
RowSpec rowSpec = sourceLayout.getRowSpec(cellIndex + 1);
insertOrAppendRow(destinationLayout, targetIndex+1, rowSpec);
}
else {
ColumnSpec colSpec = sourceLayout.getColumnSpec(cellIndex + 1);
insertOrAppendColumn(destinationLayout, targetIndex+1, colSpec);
}
cellIndex += (targetIndex < cellIndex && sourceLayout == destinationLayout) ? 2 : 1;
targetIndex++;
}
}
@Override
public void copyGridSection(final RadContainer source, final RadContainer destination, final Rectangle rc) {
final FormLayout destinationLayout = new FormLayout();
destination.setLayout(destinationLayout);
copyFormSpecs(getFormLayout(source), destinationLayout, true, rc.y, rc.height, 0);
copyFormSpecs(getFormLayout(source), destinationLayout, false, rc.x, rc.width, 0);
}
@Override
public int getGapCellCount() {
return 1;
}
@Override
public int getGapCellSize(final RadContainer container, boolean isRow) {
Size size = isRow ? FormFactory.RELATED_GAP_ROWSPEC.getSize() : FormFactory.RELATED_GAP_COLSPEC.getSize();
if (size instanceof ConstantSize) {
return ((ConstantSize) size).getPixelSize(container.getDelegee());
}
return 0;
}
@Override
public boolean isGapCell(RadContainer grid, boolean isRow, int cellIndex) {
if (cellIndex < 0 || cellIndex >= getGridCellCount(grid, isRow)) {
return false;
}
if (cellIndex % 2 == 1) {
final GridChangeUtil.CellStatus status = GridChangeUtil.canDeleteCell(grid, cellIndex, isRow);
if (status == GridChangeUtil.CellStatus.Empty || status == GridChangeUtil.CellStatus.Redundant) {
return true;
}
}
return false;
}
@Override
public int getCellIndexBase() {
return 1;
}
/**
* @return index where new column or row was actually inserted (0-based)
*/
private int insertGridCells(RadContainer grid, int cellIndex, boolean isRow, boolean isBefore, FormSpec formSpec) {
FormLayout formLayout = (FormLayout) grid.getLayout();
int index = isBefore ? cellIndex+1 : cellIndex+2;
if (isRow) {
if (getGridCellCount(grid, true) > 0) {
insertOrAppendRow(formLayout, index, FormFactory.RELATED_GAP_ROWSPEC);
if (!isBefore) index++;
}
insertOrAppendRow(formLayout, index, (RowSpec) formSpec);
}
else {
if (getGridCellCount(grid, false) > 0) {
insertOrAppendColumn(formLayout, index, FormFactory.RELATED_GAP_COLSPEC);
if (!isBefore) index++;
}
insertOrAppendColumn(formLayout, index, (ColumnSpec)formSpec);
}
updateGridConstraintsFromCellConstraints(grid);
return index-1;
}
private static void insertOrAppendRow(final FormLayout formLayout, final int index, final RowSpec rowSpec) {
if (index == formLayout.getRowCount()+1) {
formLayout.appendRow(rowSpec);
}
else {
formLayout.insertRow(index, rowSpec);
}
}
private static void insertOrAppendColumn(final FormLayout formLayout, final int index, final ColumnSpec columnSpec) {
if (index == formLayout.getColumnCount()+1) {
formLayout.appendColumn(columnSpec);
}
else {
formLayout.insertColumn(index, columnSpec);
}
}
@Override
public int deleteGridCells(final RadContainer grid, final int cellIndex, final boolean isRow) {
int result = 1;
FormLayout formLayout = (FormLayout) grid.getLayout();
adjustDeletedCellOrigins(grid, cellIndex, isRow);
if (isRow) {
int[][] groupIndices = formLayout.getRowGroups();
groupIndices = removeDeletedCell(groupIndices, cellIndex+1);
formLayout.setRowGroups(groupIndices);
formLayout.removeRow(cellIndex+1);
updateGridConstraintsFromCellConstraints(grid);
if (formLayout.getRowCount() > 0 && formLayout.getRowCount() % 2 == 0) {
int gapRowIndex = (cellIndex >= grid.getGridRowCount()) ? cellIndex-1 : cellIndex;
if (GridChangeUtil.isRowEmpty(grid, gapRowIndex)) {
formLayout.removeRow(gapRowIndex+1);
updateGridConstraintsFromCellConstraints(grid);
result++;
}
}
}
else {
int[][] groupIndices = formLayout.getColumnGroups();
groupIndices = removeDeletedCell(groupIndices, cellIndex+1);
formLayout.setColumnGroups(groupIndices);
formLayout.removeColumn(cellIndex+1);
updateGridConstraintsFromCellConstraints(grid);
if (formLayout.getColumnCount() > 0 && formLayout.getColumnCount() % 2 == 0) {
int gapColumnIndex = (cellIndex >= grid.getGridColumnCount()) ? cellIndex-1 : cellIndex;
if (GridChangeUtil.isColumnEmpty(grid, gapColumnIndex)) {
formLayout.removeColumn(gapColumnIndex+1);
updateGridConstraintsFromCellConstraints(grid);
result++;
}
}
}
return result;
}
private void adjustDeletedCellOrigins(final RadContainer grid, final int cellIndex, final boolean isRow) {
int gapCellDelta = isGapCell(grid, isRow, cellIndex+1) ? 2 : 1;
for(RadComponent component: grid.getComponents()) {
// ensure that we don't have component origins in the deleted cells
final GridConstraints gc = component.getConstraints();
if (gc.getCell(isRow) == cellIndex) {
final int span = gc.getSpan(isRow);
if (span > gapCellDelta) {
gc.setCell(isRow, cellIndex+gapCellDelta);
gc.setSpan(isRow, span -gapCellDelta);
updateConstraints(component);
}
else {
throw new IllegalArgumentException("Attempt to delete grid row/column which contains origins of 1-span components");
}
}
}
}
private static int[][] removeDeletedCell(final int[][] groupIndices, final int deletedIndex) {
for(int i=0; i<groupIndices.length; i++) {
for(int j=0; j<groupIndices [i].length; j++) {
if (groupIndices [i][j] == deletedIndex) {
int[][] newIndices;
if (groupIndices [i].length <= 2) {
// deleted cell is contained in a group with 1 or 2 cells => delete entire group
newIndices = new int[groupIndices.length-1][];
for (int newI = 0; newI < i; newI++) {
newIndices [newI] = new int[groupIndices [newI].length];
System.arraycopy(groupIndices [newI], 0, newIndices [newI], 0, groupIndices [newI].length);
}
for(int newI=i+1; newI<groupIndices.length; newI++) {
newIndices [newI-1] = new int[groupIndices [newI].length];
System.arraycopy(groupIndices [newI], 0, newIndices [newI-1], 0, groupIndices [newI].length);
}
}
else {
// deleted cell is contained in a group with more than 2 cells => keep the group and delete only the item
newIndices = new int[groupIndices.length][];
for(int newI=0; newI<groupIndices.length; newI++) {
if (newI == i) {
newIndices [newI] = new int[groupIndices [newI].length-1];
System.arraycopy(groupIndices [newI], 0, newIndices [newI], 0, j);
System.arraycopy(groupIndices [newI], j+1, newIndices [newI], j, groupIndices [i].length-j-1);
}
else {
newIndices [newI] = new int[groupIndices [newI].length];
System.arraycopy(groupIndices [newI], 0, newIndices [newI], 0, groupIndices [i].length);
}
}
}
return newIndices;
}
}
}
return groupIndices;
}
@Override @Nullable
public String getCellResizeTooltip(RadContainer container, boolean isRow, int cell, int newSize) {
final String size = getUpdatedSize(container, isRow, cell, newSize).toString();
return isRow
? UIDesignerBundle.message("tooltip.resize.row", cell+getCellIndexBase(), size)
: UIDesignerBundle.message("tooltip.resize.column", cell+getCellIndexBase(), size);
}
@Override
public void processCellResized(RadContainer container, final boolean isRow, final int cell, final int newSize) {
FormLayout formLayout = (FormLayout) container.getLayout();
final ConstantSize updatedSize = getUpdatedSize(container, isRow, cell, newSize);
FormSpec newSpec;
if (isRow) {
RowSpec rowSpec = formLayout.getRowSpec(cell+1);
newSpec = new RowSpec(rowSpec.getDefaultAlignment(), updatedSize, rowSpec.getResizeWeight());
}
else {
ColumnSpec colSpec = formLayout.getColumnSpec(cell+1);
newSpec = new ColumnSpec(colSpec.getDefaultAlignment(), updatedSize, colSpec.getResizeWeight());
}
setSpec(formLayout, newSpec, cell+1, isRow);
resizeSameGroupCells(cell, formLayout, newSpec, isRow);
}
// Explicitly resize all cells in the group to desired size to make sure that the resize operation is effective (IDEADEV-10202)
private static void resizeSameGroupCells(final int cell, final FormLayout formLayout, final FormSpec newSpec, final boolean isRow) {
int[][] groups = isRow ? formLayout.getRowGroups() : formLayout.getColumnGroups();
for(int[] group: groups) {
boolean foundGroup = false;
for(int groupCell: group) {
if (groupCell == cell+1) {
foundGroup = true;
break;
}
}
if (foundGroup) {
for(int groupCell: group) {
setSpec(formLayout, newSpec, groupCell, isRow);
}
break;
}
}
}
private static void setSpec(final FormLayout formLayout, final FormSpec newSpec, final int cell, boolean isRow) {
if (isRow) {
formLayout.setRowSpec(cell, (RowSpec) newSpec);
}
else {
formLayout.setColumnSpec(cell, (ColumnSpec) newSpec);
}
}
private static ConstantSize getUpdatedSize(RadContainer container, boolean isRow, int cell, int newPx) {
FormLayout formLayout = (FormLayout) container.getLayout();
if (isRow) {
return scaleSize(formLayout.getRowSpec(cell+1), container, newPx);
}
else {
return scaleSize(formLayout.getColumnSpec(cell+1), container, newPx);
}
}
private static ConstantSize scaleSize(final FormSpec rowSpec, final RadContainer container, final int newPx) {
if (rowSpec.getSize() instanceof ConstantSize) {
ConstantSize oldSize = (ConstantSize) rowSpec.getSize();
int oldPx = oldSize.getPixelSize(container.getDelegee());
double newValue = Math.round(oldSize.getValue() * newPx / oldPx * 10) / 10;
return new ConstantSize(newValue, oldSize.getUnit());
}
return new ConstantSize(newPx, ConstantSize.PIXEL);
}
@Override
public void processCellsMoved(final RadContainer container, final boolean isRow, final int[] cellsToMove, int targetCell) {
for(int i=0; i<cellsToMove.length; i++) {
final int sourceCell = cellsToMove[i];
moveCells(container, isRow, sourceCell, targetCell);
if (sourceCell < targetCell) {
for(int j=i+1; j<cellsToMove.length; j++) {
cellsToMove [j] -= 2;
}
}
else {
targetCell += 2;
}
}
}
private void moveCells(final RadContainer container, final boolean isRow, final int cell, int targetCell) {
if (targetCell >= cell && targetCell <= cell+2) {
return;
}
FormLayout layout = (FormLayout) container.getLayout();
List<RadComponent> componentsToMove = new ArrayList<RadComponent>();
FormSpec oldSpec = isRow ? layout.getRowSpec(cell+1) : layout.getColumnSpec(cell+1);
for(RadComponent c: container.getComponents()) {
if (c.getConstraints().getCell(isRow) == cell) {
componentsToMove.add(c);
container.removeComponent(c);
}
}
int count = deleteGridCells(container, cell, isRow);
int insertCell = (targetCell > cell) ? targetCell - count - 1 : targetCell;
final boolean isBefore = targetCell < cell;
int newIndex = insertGridCells(container, insertCell, isRow, isBefore, oldSpec);
for(RadComponent c: componentsToMove) {
c.getConstraints().setCell(isRow, newIndex);
container.addComponent(c);
}
}
private static void updateGridConstraintsFromCellConstraints(RadContainer grid) {
FormLayout layout = (FormLayout) grid.getLayout();
for(RadComponent c: grid.getComponents()) {
CellConstraints cc = layout.getConstraints(c.getDelegee());
GridConstraints gc = c.getConstraints();
copyCellToGridConstraints(gc, cc);
}
}
private static void copyCellToGridConstraints(final GridConstraints gc, final CellConstraints cc) {
gc.setColumn(cc.gridX-1);
gc.setRow(cc.gridY-1);
gc.setColSpan(cc.gridWidth);
gc.setRowSpan(cc.gridHeight);
}
public int getAlignment(RadComponent component, boolean horizontal) {
CellConstraints cc = (CellConstraints) component.getCustomLayoutConstraints();
CellConstraints.Alignment al = horizontal ? cc.hAlign : cc.vAlign;
if (al == CellConstraints.DEFAULT) {
FormLayout formLayout = (FormLayout) component.getParent().getLayout();
FormSpec formSpec = horizontal
? formLayout.getColumnSpec(component.getConstraints().getColumn()+1)
: formLayout.getRowSpec(component.getConstraints().getRow()+1);
final FormSpec.DefaultAlignment defaultAlignment = formSpec.getDefaultAlignment();
if (defaultAlignment.equals(RowSpec.FILL)) {
return GridConstraints.ALIGN_FILL;
}
if (defaultAlignment.equals(RowSpec.TOP) || defaultAlignment.equals(ColumnSpec.LEFT)) {
return GridConstraints.ALIGN_LEFT;
}
if (defaultAlignment.equals(RowSpec.CENTER)) {
return GridConstraints.ALIGN_CENTER;
}
return GridConstraints.ALIGN_RIGHT;
}
return Utils.alignFromConstraints(component.getConstraints(), horizontal);
}
public void setAlignment(RadComponent component, boolean horizontal, int alignment) {
CellConstraints cc = (CellConstraints) component.getCustomLayoutConstraints();
if (horizontal) {
cc.hAlign = FormLayoutSerializer.ourHorizontalAlignments [alignment];
}
else {
cc.vAlign = FormLayoutSerializer.ourVerticalAlignments [alignment];
}
updateConstraints(component);
}
public void resetAlignment(RadComponent component, boolean horizontal) {
CellConstraints cc = (CellConstraints) component.getCustomLayoutConstraints();
if (horizontal) {
cc.hAlign = CellConstraints.DEFAULT;
}
else {
cc.vAlign = CellConstraints.DEFAULT;
}
updateConstraints(component);
}
public boolean isAlignmentModified(RadComponent component, boolean horizontal) {
CellConstraints cc = (CellConstraints) component.getCustomLayoutConstraints();
CellConstraints.Alignment al = horizontal ? cc.hAlign : cc.vAlign;
return al != CellConstraints.DEFAULT;
}
private static void updateConstraints(final RadComponent component) {
FormLayout layout = (FormLayout) component.getParent().getLayout();
layout.setConstraints(component.getDelegee(), gridToCellConstraints(component));
component.getParent().revalidate();
}
public int getMinCellCount() {
return 0;
}
@Override
public void createSnapshotLayout(final SnapshotContext context,
final JComponent parent,
final RadContainer container,
final LayoutManager layout) {
ColumnSpec[] colSpecs;
RowSpec[] rowSpecs;
int[][] rowGroups;
int[][] columnGroups;
try {
Method method = layout.getClass().getMethod("getRowCount", ArrayUtil.EMPTY_CLASS_ARRAY);
int rowCount = ((Integer)method.invoke(layout, ArrayUtil.EMPTY_OBJECT_ARRAY)).intValue();
method = layout.getClass().getMethod("getColumnCount", ArrayUtil.EMPTY_CLASS_ARRAY);
int columnCount = ((Integer)method.invoke(layout, ArrayUtil.EMPTY_OBJECT_ARRAY)).intValue();
rowSpecs = new RowSpec[rowCount];
colSpecs = new ColumnSpec[columnCount];
method = layout.getClass().getMethod("getRowSpec", int.class);
for (int i = 0; i < rowCount; i++) {
rowSpecs[i] = (RowSpec)createSerializedCopy(method.invoke(layout, i + 1));
}
method = layout.getClass().getMethod("getColumnSpec", int.class);
for (int i = 0; i < columnCount; i++) {
colSpecs[i] = (ColumnSpec)createSerializedCopy(method.invoke(layout, i + 1));
}
method = layout.getClass().getMethod("getRowGroups", ArrayUtil.EMPTY_CLASS_ARRAY);
rowGroups = (int[][])method.invoke(layout);
method = layout.getClass().getMethod("getColumnGroups", ArrayUtil.EMPTY_CLASS_ARRAY);
columnGroups = (int[][])method.invoke(layout);
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
final FormLayout formLayout = new FormLayout(colSpecs, rowSpecs);
formLayout.setRowGroups(rowGroups);
formLayout.setColumnGroups(columnGroups);
container.setLayout(formLayout);
}
private static Object createSerializedCopy(final Object original) {
// FormLayout may have been loaded with a different classloader, so we need to create a copy through serialization
Object copy;
try {
BufferExposingByteArrayOutputStream baos = new BufferExposingByteArrayOutputStream();
ObjectOutputStream os = new ObjectOutputStream(baos);
try {
os.writeObject(original);
}
finally {
os.close();
}
InputStream bais = new ByteArrayInputStream(baos.getInternalBuffer(), 0, baos.size());
ObjectInputStream is = new ObjectInputStream(bais);
try {
copy = is.readObject();
}
finally {
is.close();
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
return copy;
}
@Override
public void addSnapshotComponent(final JComponent parent,
final JComponent child,
final RadContainer container,
final RadComponent component) {
CellConstraints cc;
try {
LayoutManager layout = parent.getLayout();
//noinspection HardCodedStringLiteral
Method method = layout.getClass().getMethod("getConstraints", Component.class);
cc = (CellConstraints)createSerializedCopy(method.invoke(layout, child));
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
copyCellToGridConstraints(component.getConstraints(), cc);
component.setCustomLayoutConstraints(cc);
container.addComponent(component);
}
private static class MyPropertyChangeListener implements PropertyChangeListener {
private final RadComponent myComponent;
public MyPropertyChangeListener(final RadComponent component) {
myComponent = component;
}
public void propertyChange(PropertyChangeEvent evt) {
if (evt.getPropertyName().equals(RadComponent.PROP_CONSTRAINTS)) {
updateConstraints(myComponent);
}
}
}
private static class ComponentInsetsProperty extends AbstractInsetsProperty<RadComponent> {
public ComponentInsetsProperty() {
super(null, "Insets");
}
public Insets getValue(final RadComponent component) {
if (component.getCustomLayoutConstraints() instanceof CellConstraints) {
final CellConstraints cellConstraints = (CellConstraints)component.getCustomLayoutConstraints();
return cellConstraints.insets;
}
return new Insets(0, 0, 0, 0);
}
protected void setValueImpl(final RadComponent component, final Insets value) throws Exception {
if (component.getCustomLayoutConstraints() instanceof CellConstraints) {
final CellConstraints cellConstraints = (CellConstraints)component.getCustomLayoutConstraints();
cellConstraints.insets = value;
FormLayout layout = (FormLayout) component.getParent().getLayout();
CellConstraints cc = (CellConstraints)layout.getConstraints(component.getDelegee()).clone();
cc.insets = value;
layout.setConstraints(component.getDelegee(), cc);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.