gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.test;
import java.io.File;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.zookeeper.PortAssignment;
import org.apache.zookeeper.ZKTestCase;
import org.apache.zookeeper.server.quorum.FastLeaderElection;
import org.apache.zookeeper.server.quorum.QuorumPeer;
import org.apache.zookeeper.server.quorum.Vote;
import org.apache.zookeeper.server.quorum.QuorumPeer.QuorumServer;
import org.apache.zookeeper.server.quorum.QuorumPeer.ServerState;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class FLETest extends ZKTestCase {
protected static final Logger LOG = LoggerFactory.getLogger(FLETest.class);
private final int MAX_LOOP_COUNTER = 300;
private FLETest.LEThread leThread;
static class TestVote {
TestVote(int id, long leader) {
this.leader = leader;
}
long leader;
}
int countVotes(HashSet<TestVote> hs, long id) {
int counter = 0;
for(TestVote v : hs){
if(v.leader == id) counter++;
}
return counter;
}
int count;
HashMap<Long,QuorumServer> peers;
ArrayList<LEThread> threads;
HashMap<Integer, HashSet<TestVote> > voteMap;
HashMap<Long, LEThread> quora;
File tmpdir[];
int port[];
int successCount;
volatile Vote votes[];
volatile long leader = -1;
//volatile int round = 1;
Random rand = new Random();
Set<Long> joinedThreads;
@Before
public void setUp() throws Exception {
count = 7;
peers = new HashMap<Long,QuorumServer>(count);
threads = new ArrayList<LEThread>(count);
voteMap = new HashMap<Integer, HashSet<TestVote> >();
votes = new Vote[count];
tmpdir = new File[count];
port = new int[count];
successCount = 0;
joinedThreads = new HashSet<Long>();
}
@After
public void tearDown() throws Exception {
for (int i = 0; i < threads.size(); i++) {
leThread = threads.get(i);
QuorumBase.shutdown(leThread.peer);
}
}
/**
* Implements the behavior of a peer during the leader election rounds
* of tests.
*/
class LEThread extends Thread {
FLETest self;
int i;
QuorumPeer peer;
int totalRounds;
ConcurrentHashMap<Long, HashSet<Integer> > quora;
LEThread(FLETest self, QuorumPeer peer, int i, int rounds, ConcurrentHashMap<Long, HashSet<Integer> > quora) {
this.self = self;
this.i = i;
this.peer = peer;
this.totalRounds = rounds;
this.quora = quora;
LOG.info("Constructor: " + getName());
}
public void run() {
try {
Vote v = null;
while(true) {
/*
* Set the state of the peer to LOOKING and look for leader
*/
peer.setPeerState(ServerState.LOOKING);
LOG.info("Going to call leader election again.");
v = peer.getElectionAlg().lookForLeader();
if(v == null){
LOG.info("Thread " + i + " got a null vote");
break;
}
/*
* Done with the election round, so now we set the vote in
* the peer. A real zookeeper would take care of setting the
* current vote. Here we do it manually.
*/
peer.setCurrentVote(v);
LOG.info("Finished election: " + i + ", " + v.getId());
votes[i] = v;
/*
* Get the current value of the logical clock for this peer
* so that we know in which round this peer has executed.
*/
int lc = (int) ((FastLeaderElection) peer.getElectionAlg()).getLogicalClock();
/*
* The leader executes the following block, which essentially shuts down
* the peer if it is not the last round.
*/
if (v.getId() == i) {
LOG.info("I'm the leader: " + i);
if (lc < this.totalRounds) {
LOG.info("Leader " + i + " dying");
FastLeaderElection election =
(FastLeaderElection) peer.getElectionAlg();
election.shutdown();
// Make sure the vote is reset to -1 after shutdown.
Assert.assertEquals(-1, election.getVote().getId());
LOG.info("Leader " + i + " dead");
break;
}
}
/*
* If the peer has done enough rounds, then consider joining. The thread
* will only join if it is part of a quorum supporting the current
* leader. Otherwise it will try again.
*/
if (lc >= this.totalRounds) {
/*
* quora keeps the supporters of a given leader, so
* we first update it with the vote of this peer.
*/
if(quora.get(v.getId()) == null) quora.put(v.getId(), new HashSet<Integer>());
quora.get(v.getId()).add(i);
/*
* we now wait until a quorum supports the same leader.
*/
if(waitForQuorum(v.getId())){
synchronized(self){
/*
* Assert that the state of the thread is the one expected.
*/
if(v.getId() == i){
Assert.assertTrue("Wrong state" + peer.getPeerState(),
peer.getPeerState() == ServerState.LEADING);
leader = i;
} else {
Assert.assertTrue("Wrong state" + peer.getPeerState(),
peer.getPeerState() == ServerState.FOLLOWING);
}
/*
* Global variable keeping track of
* how many peers have successfully
* joined.
*/
successCount++;
joinedThreads.add((long)i);
self.notify();
}
/*
* I'm done so joining.
*/
break;
} else {
quora.get(v.getId()).remove(i);
}
}
/*
* This sleep time represents the time a follower
* would take to declare the leader dead and start
* a new leader election.
*/
Thread.sleep(100);
}
LOG.debug("Thread " + i + " votes " + v);
} catch (InterruptedException e) {
Assert.fail(e.toString());
}
}
/**
* Auxiliary method to make sure that enough followers terminated.
*
* @return boolean followers successfully joined.
*/
boolean waitForQuorum(long id)
throws InterruptedException {
int loopCounter = 0;
while((quora.get(id).size() <= count/2) && (loopCounter < MAX_LOOP_COUNTER)){
Thread.sleep(100);
loopCounter++;
}
if((loopCounter >= MAX_LOOP_COUNTER) && (quora.get(id).size() <= count/2)){
return false;
} else {
return true;
}
}
}
@Test
public void testSingleElection() throws Exception {
try{
runElection(1);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
@Test
public void testDoubleElection() throws Exception {
try{
runElection(2);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
@Test
public void testTripleElection() throws Exception {
try{
runElection(3);
} catch (Exception e) {
Assert.fail(e.toString());
}
}
/**
* Test leader election for a number of rounds. In all rounds but the last one
* we kill the leader.
*
* @param rounds
* @throws Exception
*/
private void runElection(int rounds) throws Exception {
ConcurrentHashMap<Long, HashSet<Integer> > quora =
new ConcurrentHashMap<Long, HashSet<Integer> >();
LOG.info("TestLE: " + getTestName()+ ", " + count);
/*
* Creates list of peers.
*/
for(int i = 0; i < count; i++) {
port[i] = PortAssignment.unique();
peers.put(Long.valueOf(i),
new QuorumServer(i,
new InetSocketAddress(
"127.0.0.1", PortAssignment.unique()),
new InetSocketAddress(
"127.0.0.1", PortAssignment.unique()),
new InetSocketAddress(
"127.0.0.1", port[i])));
tmpdir[i] = ClientBase.createTmpDir();
}
/*
* Start one LEThread for each peer we want to run.
*/
for(int i = 0; i < count; i++) {
QuorumPeer peer = new QuorumPeer(peers, tmpdir[i], tmpdir[i],
port[i], 3, i, 1000, 2, 2);
peer.startLeaderElection();
LEThread thread = new LEThread(this, peer, i, rounds, quora);
thread.start();
threads.add(thread);
}
LOG.info("Started threads " + getTestName());
int waitCounter = 0;
synchronized(this){
while(((successCount <= count/2) || (leader == -1))
&& (waitCounter < MAX_LOOP_COUNTER))
{
this.wait(200);
waitCounter++;
}
}
LOG.info("Success count: " + successCount);
/*
* Lists what threads haven't joined. A thread doesn't join if
* it hasn't decided upon a leader yet. It can happen that a
* peer is slow or disconnected, and it can take longer to
* nominate and connect to the current leader.
*/
for (int i = 0; i < threads.size(); i++) {
if (threads.get(i).isAlive()) {
LOG.info("Threads didn't join: " + i);
}
}
/*
* If we have a majority, then we are good to go.
*/
if(successCount <= count/2){
Assert.fail("Fewer than a a majority has joined");
}
/*
* I'm done so joining.
*/
if(!joinedThreads.contains(leader)){
Assert.fail("Leader hasn't joined: " + leader);
}
}
/*
* Class to verify of the thread has become a follower
*/
static class VerifyState extends Thread {
volatile private boolean success = false;
private QuorumPeer peer;
public VerifyState(QuorumPeer peer) {
this.peer = peer;
}
public void run() {
setName("VerifyState-" + peer.getId());
while (true) {
if(peer.getPeerState() == ServerState.FOLLOWING) {
LOG.info("I am following");
success = true;
break;
} else if (peer.getPeerState() == ServerState.LEADING) {
LOG.info("I am leading");
success = false;
break;
}
try {
Thread.sleep(250);
} catch (Exception e) {
LOG.warn("Sleep failed ", e);
}
}
}
public boolean isSuccess() {
return success;
}
}
/*
* For ZOOKEEPER-975 verify that a peer joining an established cluster
* does not go in LEADING state.
*/
@Test
public void testJoin() throws Exception {
int sid;
QuorumPeer peer;
int waitTime = 10 * 1000;
ArrayList<QuorumPeer> peerList = new ArrayList<QuorumPeer>();
for(sid = 0; sid < 3; sid++) {
port[sid] = PortAssignment.unique();
peers.put(Long.valueOf(sid),
new QuorumServer(sid,
new InetSocketAddress(
"127.0.0.1", PortAssignment.unique()),
new InetSocketAddress(
"127.0.0.1", PortAssignment.unique()),
new InetSocketAddress(
"127.0.0.1", port[sid])));
tmpdir[sid] = ClientBase.createTmpDir();
}
// start 2 peers and verify if they form the cluster
for (sid = 0; sid < 2; sid++) {
peer = new QuorumPeer(peers, tmpdir[sid], tmpdir[sid],
port[sid], 3, sid, 2000, 2, 2);
LOG.info("Starting peer " + peer.getId());
peer.start();
peerList.add(sid, peer);
}
peer = peerList.get(0);
VerifyState v1 = new VerifyState(peerList.get(0));
v1.start();
v1.join(waitTime);
Assert.assertFalse("Unable to form cluster in " +
waitTime + " ms",
!v1.isSuccess());
// Start 3rd peer and check if it goes in LEADING state
peer = new QuorumPeer(peers, tmpdir[sid], tmpdir[sid],
port[sid], 3, sid, 2000, 2, 2);
LOG.info("Starting peer " + peer.getId());
peer.start();
peerList.add(sid, peer);
v1 = new VerifyState(peer);
v1.start();
v1.join(waitTime);
if (v1.isAlive()) {
Assert.fail("Peer " + peer.getId() + " failed to join the cluster " +
"within " + waitTime + " ms");
} else if (!v1.isSuccess()) {
Assert.fail("Incorrect LEADING state for peer " + peer.getId());
}
// cleanup
for (int id = 0; id < 3; id++) {
peer = peerList.get(id);
if (peer != null) {
peer.shutdown();
}
}
}
/*
* For ZOOKEEPER-1732 verify that it is possible to join an ensemble with
* inconsistent election round information.
*/
@Test
public void testJoinInconsistentEnsemble() throws Exception {
int sid;
QuorumPeer peer;
int waitTime = 10 * 1000;
ArrayList<QuorumPeer> peerList = new ArrayList<QuorumPeer>();
for(sid = 0; sid < 3; sid++) {
peers.put(Long.valueOf(sid),
new QuorumServer(sid,
new InetSocketAddress(
"127.0.0.1", PortAssignment.unique()),
new InetSocketAddress(
"127.0.0.1", PortAssignment.unique())));
tmpdir[sid] = ClientBase.createTmpDir();
port[sid] = PortAssignment.unique();
}
// start 2 peers and verify if they form the cluster
for (sid = 0; sid < 2; sid++) {
peer = new QuorumPeer(peers, tmpdir[sid], tmpdir[sid],
port[sid], 3, sid, 2000, 2, 2);
LOG.info("Starting peer " + peer.getId());
peer.start();
peerList.add(sid, peer);
}
peer = peerList.get(0);
VerifyState v1 = new VerifyState(peerList.get(0));
v1.start();
v1.join(waitTime);
Assert.assertFalse("Unable to form cluster in " +
waitTime + " ms",
!v1.isSuccess());
// Change the election round for one of the members of the ensemble
long leaderSid = peer.getCurrentVote().getId();
long zxid = peer.getCurrentVote().getZxid();
long electionEpoch = peer.getCurrentVote().getElectionEpoch();
ServerState state = peer.getCurrentVote().getState();
long peerEpoch = peer.getCurrentVote().getPeerEpoch();
Vote newVote = new Vote(leaderSid, zxid+100, electionEpoch+100, peerEpoch, state);
peer.setCurrentVote(newVote);
// Start 3rd peer and check if it joins the quorum
peer = new QuorumPeer(peers, tmpdir[2], tmpdir[2],
port[2], 3, 2, 2000, 2, 2);
LOG.info("Starting peer " + peer.getId());
peer.start();
peerList.add(sid, peer);
v1 = new VerifyState(peer);
v1.start();
v1.join(waitTime);
if (v1.isAlive()) {
Assert.fail("Peer " + peer.getId() + " failed to join the cluster " +
"within " + waitTime + " ms");
}
// cleanup
for (int id = 0; id < 3; id++) {
peer = peerList.get(id);
if (peer != null) {
peer.shutdown();
}
}
}
}
| |
/*
* All rights reserved. (C) Copyright 2009, Trinity College Dublin
*/
package com.mind_era.knime.common.view.impl;
import java.awt.event.MouseListener;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.List;
import java.util.Set;
import javax.annotation.CheckReturnValue;
import javax.annotation.Nonnull;
import javax.swing.DefaultBoundedRangeModel;
import javax.swing.JLabel;
import javax.swing.JSlider;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.knime.core.node.defaultnodesettings.SettingsModel;
import com.mind_era.knime.common.util.select.Selectable;
import com.mind_era.knime.common.util.swing.SelectionType;
import com.mind_era.knime.common.util.swing.VariableControl;
import com.mind_era.knime.common.view.ControlsHandler;
import com.mind_era.knime.common.view.ListSelection;
/**
* A {@link VariableControl} with {@link VariableControl.ControlTypes#Slider}.
*
* @author <a href="mailto:bakosg@tcd.ie">Gabor Bakos</a>
* @param <Model>
* Type of the model for values.
* @param <Sel>
* The type of the container of {@code Model}s.
*/
@Nonnull
@CheckReturnValue
public class SliderControl<Model, Sel extends Selectable<Model>> extends
AbstractVariableControl<Model, Sel> {
private final JSlider slider = new JSlider(new DefaultBoundedRangeModel(1,
0, 1, 1));
/**
* @param model
* The {@link SettingsModelListSelection}.
* @param selectionType
* The initial {@link SelectionType}.
* @param controlsHandler
* The used {@link ControlsHandler}.
* @param changeListener
* The {@link ChangeListener} associated to the {@code model}.
* @param domainModel
* The model for possible parameters and selections.
*/
public SliderControl(final SettingsModelListSelection model,
final SelectionType selectionType,
final ControlsHandler<SettingsModel, Model, Sel> controlsHandler,
final ChangeListener changeListener, final Sel domainModel) {
super(model, selectionType, controlsHandler, changeListener,
domainModel);
switch (selectionType) {
case MultipleAtLeastOne:
case MultipleOrNone:
if (model.getSelection().size() > 1) {
model.setSelection(Collections.singleton(model.getSelection()
.iterator().next()));
}
break;
case Single:
case Unmodifiable:
break;
default:
break;
}
slider.setName(model.getConfigName());
slider.setSnapToTicks(true);
slider.setPaintLabels(true);
updateComponent();
getPanel().add(slider);
}
/*
* (non-Javadoc)
*
* @see
* org.knime.core.node.defaultnodesettings.DialogComponent#setEnabledComponents
* (boolean)
*/
@Override
protected void setEnabledComponents(final boolean enabled) {
slider.setEnabled(enabled);
}
/*
* (non-Javadoc)
*
* @see
* com.mind_era.knime.common.view.impl.AbstractVariableControl#updateComponent()
*/
@Override
protected void updateComponent() {
@SuppressWarnings("unchecked")
final ListSelection<String> model = (ListSelection<String>) getModel();
final List<String> possibleValues = model.getPossibleValues();
final Dictionary<Integer, JLabel> labels = new Hashtable<Integer, JLabel>();
int i = 1;
for (final String valueStr : possibleValues) {
labels.put(Integer.valueOf(i++), new JLabel(valueStr));
}
slider.setLabelTable(labels);
slider.getModel().setMinimum(1);
slider.getModel().setMaximum(i - 1);
final String selectionStr = model.getSelection().iterator().next();
final int selected = select(possibleValues, selectionStr);
if (slider.getValue() != selected) {
slider.setValue(selected);
}
slider.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
final Set<String> newSelection = Collections
.singleton(((JLabel) slider.getLabelTable().get(
Integer.valueOf(slider.getValue()))).getText());
if (getSelectionType() != SelectionType.Unmodifiable) {
if (!newSelection.equals(model.getSelection())) {
model.setSelection(newSelection);
updateComponent();
}
} else {
if (!newSelection.equals(model.getSelection())) {
updateComponent();
}
}
}
});
}
/**
* Selects the index of {@code selectionStr} in {@code possibleValues}
* (starting from {@code 1}).
*
* @param possibleValues
* A {@link List} of {@link String}s.
* @param selectionStr
* A {@link String} from {@code possibleValues}.
* @return The index of {@code selectionStr} in {@code possibleValues}
* starting from {@code 1}.
*/
private int select(final List<String> possibleValues,
final String selectionStr) {
int i = 1;
for (final String val : possibleValues) {
if (val.equals(selectionStr)) {
return i;
}
++i;
}
throw new IllegalStateException("Not found selection: " + selectionStr
+ " in : " + possibleValues);
}
/*
* (non-Javadoc)
*
* @see com.mind_era.knime.common.view.impl.AbstractVariableControl#getType()
*/
@Override
public ControlTypes getType() {
return ControlTypes.Slider;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + (slider == null ? 0 : slider.hashCode());
return result;
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final SliderControl<?, ?> other = (SliderControl<?, ?>) obj;
if (slider == null) {
if (other.slider != null) {
return false;
}
} else if (slider != other.slider) {
return false;
}
return true;
}
@Override
protected void notifyChange(final MouseListener listener,
final AbstractVariableControl.Change change) {
switch (change) {
case add:
slider.addMouseListener(listener);
break;
case remove:
slider.removeMouseListener(listener);
break;
default:
break;
}
super.notifyChange(listener, change);
}
}
| |
/*
* Copyright 2004, 2005, 2006, 2017 Acegi Technology Pty Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.acls.jdbc;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.sql.DataSource;
import org.springframework.core.convert.ConversionException;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.ResultSetExtractor;
import org.springframework.security.acls.domain.AccessControlEntryImpl;
import org.springframework.security.acls.domain.AclAuthorizationStrategy;
import org.springframework.security.acls.domain.AclImpl;
import org.springframework.security.acls.domain.AuditLogger;
import org.springframework.security.acls.domain.DefaultPermissionFactory;
import org.springframework.security.acls.domain.DefaultPermissionGrantingStrategy;
import org.springframework.security.acls.domain.GrantedAuthoritySid;
import org.springframework.security.acls.domain.ObjectIdentityRetrievalStrategyImpl;
import org.springframework.security.acls.domain.PermissionFactory;
import org.springframework.security.acls.domain.PrincipalSid;
import org.springframework.security.acls.model.AccessControlEntry;
import org.springframework.security.acls.model.Acl;
import org.springframework.security.acls.model.AclCache;
import org.springframework.security.acls.model.MutableAcl;
import org.springframework.security.acls.model.NotFoundException;
import org.springframework.security.acls.model.ObjectIdentity;
import org.springframework.security.acls.model.ObjectIdentityGenerator;
import org.springframework.security.acls.model.Permission;
import org.springframework.security.acls.model.PermissionGrantingStrategy;
import org.springframework.security.acls.model.Sid;
import org.springframework.security.acls.model.UnloadedSidException;
import org.springframework.security.util.FieldUtils;
import org.springframework.util.Assert;
/**
* Performs lookups in a manner that is compatible with ANSI SQL.
* <p>
* NB: This implementation does attempt to provide reasonably optimised lookups - within
* the constraints of a normalised database and standard ANSI SQL features. If you are
* willing to sacrifice either of these constraints (e.g. use a particular database
* feature such as hierarchical queries or materalized views, or reduce normalisation) you
* are likely to achieve better performance. In such situations you will need to provide
* your own custom <code>LookupStrategy</code>. This class does not support subclassing,
* as it is likely to change in future releases and therefore subclassing is unsupported.
* <p>
* There are two SQL queries executed, one in the <tt>lookupPrimaryKeys</tt> method and
* one in <tt>lookupObjectIdentities</tt>. These are built from the same select and "order
* by" clause, using a different where clause in each case. In order to use custom schema
* or column names, each of these SQL clauses can be customized, but they must be
* consistent with each other and with the expected result set generated by the the
* default values.
*
* @author Ben Alex
*/
public class BasicLookupStrategy implements LookupStrategy {
private static final String DEFAULT_SELECT_CLAUSE_COLUMNS = "select acl_object_identity.object_id_identity, "
+ "acl_entry.ace_order, " + "acl_object_identity.id as acl_id, " + "acl_object_identity.parent_object, "
+ "acl_object_identity.entries_inheriting, " + "acl_entry.id as ace_id, " + "acl_entry.mask, "
+ "acl_entry.granting, " + "acl_entry.audit_success, " + "acl_entry.audit_failure, "
+ "acl_sid.principal as ace_principal, " + "acl_sid.sid as ace_sid, "
+ "acli_sid.principal as acl_principal, " + "acli_sid.sid as acl_sid, " + "acl_class.class ";
private static final String DEFAULT_SELECT_CLAUSE_ACL_CLASS_ID_TYPE_COLUMN = ", acl_class.class_id_type ";
private static final String DEFAULT_SELECT_CLAUSE_FROM = "from acl_object_identity "
+ "left join acl_sid acli_sid on acli_sid.id = acl_object_identity.owner_sid "
+ "left join acl_class on acl_class.id = acl_object_identity.object_id_class "
+ "left join acl_entry on acl_object_identity.id = acl_entry.acl_object_identity "
+ "left join acl_sid on acl_entry.sid = acl_sid.id " + "where ( ";
public static final String DEFAULT_SELECT_CLAUSE = DEFAULT_SELECT_CLAUSE_COLUMNS + DEFAULT_SELECT_CLAUSE_FROM;
public static final String DEFAULT_ACL_CLASS_ID_SELECT_CLAUSE = DEFAULT_SELECT_CLAUSE_COLUMNS
+ DEFAULT_SELECT_CLAUSE_ACL_CLASS_ID_TYPE_COLUMN + DEFAULT_SELECT_CLAUSE_FROM;
private static final String DEFAULT_LOOKUP_KEYS_WHERE_CLAUSE = "(acl_object_identity.id = ?)";
private static final String DEFAULT_LOOKUP_IDENTITIES_WHERE_CLAUSE = "(acl_object_identity.object_id_identity = ? and acl_class.class = ?)";
public static final String DEFAULT_ORDER_BY_CLAUSE = ") order by acl_object_identity.object_id_identity"
+ " asc, acl_entry.ace_order asc";
private final AclAuthorizationStrategy aclAuthorizationStrategy;
private ObjectIdentityGenerator objectIdentityGenerator;
private PermissionFactory permissionFactory = new DefaultPermissionFactory();
private final AclCache aclCache;
private final PermissionGrantingStrategy grantingStrategy;
private final JdbcTemplate jdbcTemplate;
private int batchSize = 50;
private final Field fieldAces = FieldUtils.getField(AclImpl.class, "aces");
private final Field fieldAcl = FieldUtils.getField(AccessControlEntryImpl.class, "acl");
// SQL Customization fields
private String selectClause = DEFAULT_SELECT_CLAUSE;
private String lookupPrimaryKeysWhereClause = DEFAULT_LOOKUP_KEYS_WHERE_CLAUSE;
private String lookupObjectIdentitiesWhereClause = DEFAULT_LOOKUP_IDENTITIES_WHERE_CLAUSE;
private String orderByClause = DEFAULT_ORDER_BY_CLAUSE;
private AclClassIdUtils aclClassIdUtils;
/**
* Constructor accepting mandatory arguments
* @param dataSource to access the database
* @param aclCache the cache where fully-loaded elements can be stored
* @param aclAuthorizationStrategy authorization strategy (required)
*/
public BasicLookupStrategy(DataSource dataSource, AclCache aclCache,
AclAuthorizationStrategy aclAuthorizationStrategy, AuditLogger auditLogger) {
this(dataSource, aclCache, aclAuthorizationStrategy, new DefaultPermissionGrantingStrategy(auditLogger));
}
/**
* Creates a new instance
* @param dataSource to access the database
* @param aclCache the cache where fully-loaded elements can be stored
* @param aclAuthorizationStrategy authorization strategy (required)
* @param grantingStrategy the PermissionGrantingStrategy
*/
public BasicLookupStrategy(DataSource dataSource, AclCache aclCache,
AclAuthorizationStrategy aclAuthorizationStrategy, PermissionGrantingStrategy grantingStrategy) {
Assert.notNull(dataSource, "DataSource required");
Assert.notNull(aclCache, "AclCache required");
Assert.notNull(aclAuthorizationStrategy, "AclAuthorizationStrategy required");
Assert.notNull(grantingStrategy, "grantingStrategy required");
this.jdbcTemplate = new JdbcTemplate(dataSource);
this.aclCache = aclCache;
this.aclAuthorizationStrategy = aclAuthorizationStrategy;
this.grantingStrategy = grantingStrategy;
this.objectIdentityGenerator = new ObjectIdentityRetrievalStrategyImpl();
this.aclClassIdUtils = new AclClassIdUtils();
this.fieldAces.setAccessible(true);
this.fieldAcl.setAccessible(true);
}
private String computeRepeatingSql(String repeatingSql, int requiredRepetitions) {
Assert.isTrue(requiredRepetitions > 0, "requiredRepetitions must be > 0");
String startSql = this.selectClause;
String endSql = this.orderByClause;
StringBuilder sqlStringBldr = new StringBuilder(
startSql.length() + endSql.length() + requiredRepetitions * (repeatingSql.length() + 4));
sqlStringBldr.append(startSql);
for (int i = 1; i <= requiredRepetitions; i++) {
sqlStringBldr.append(repeatingSql);
if (i != requiredRepetitions) {
sqlStringBldr.append(" or ");
}
}
sqlStringBldr.append(endSql);
return sqlStringBldr.toString();
}
@SuppressWarnings("unchecked")
private List<AccessControlEntryImpl> readAces(AclImpl acl) {
try {
return (List<AccessControlEntryImpl>) this.fieldAces.get(acl);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException("Could not obtain AclImpl.aces field", ex);
}
}
private void setAclOnAce(AccessControlEntryImpl ace, AclImpl acl) {
try {
this.fieldAcl.set(ace, acl);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException("Could not or set AclImpl on AccessControlEntryImpl fields", ex);
}
}
private void setAces(AclImpl acl, List<AccessControlEntryImpl> aces) {
try {
this.fieldAces.set(acl, aces);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException("Could not set AclImpl entries", ex);
}
}
/**
* Locates the primary key IDs specified in "findNow", adding AclImpl instances with
* StubAclParents to the "acls" Map.
* @param acls the AclImpls (with StubAclParents)
* @param findNow Long-based primary keys to retrieve
* @param sids
*/
private void lookupPrimaryKeys(final Map<Serializable, Acl> acls, final Set<Long> findNow, final List<Sid> sids) {
Assert.notNull(acls, "ACLs are required");
Assert.notEmpty(findNow, "Items to find now required");
String sql = computeRepeatingSql(this.lookupPrimaryKeysWhereClause, findNow.size());
Set<Long> parentsToLookup = this.jdbcTemplate.query(sql, (ps) -> setKeys(ps, findNow),
new ProcessResultSet(acls, sids));
// Lookup the parents, now that our JdbcTemplate has released the database
// connection (SEC-547)
if (parentsToLookup.size() > 0) {
lookupPrimaryKeys(acls, parentsToLookup, sids);
}
}
private void setKeys(PreparedStatement ps, Set<Long> findNow) throws SQLException {
int i = 0;
for (Long toFind : findNow) {
i++;
ps.setLong(i, toFind);
}
}
/**
* The main method.
* <p>
* WARNING: This implementation completely disregards the "sids" argument! Every item
* in the cache is expected to contain all SIDs. If you have serious performance needs
* (e.g. a very large number of SIDs per object identity), you'll probably want to
* develop a custom {@link LookupStrategy} implementation instead.
* <p>
* The implementation works in batch sizes specified by {@link #batchSize}.
* @param objects the identities to lookup (required)
* @param sids the SIDs for which identities are required (ignored by this
* implementation)
* @return a <tt>Map</tt> where keys represent the {@link ObjectIdentity} of the
* located {@link Acl} and values are the located {@link Acl} (never <tt>null</tt>
* although some entries may be missing; this method should not throw
* {@link NotFoundException}, as a chain of {@link LookupStrategy}s may be used to
* automatically create entries if required)
*/
@Override
public final Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> objects, List<Sid> sids) {
Assert.isTrue(this.batchSize >= 1, "BatchSize must be >= 1");
Assert.notEmpty(objects, "Objects to lookup required");
// Map<ObjectIdentity,Acl>
// contains FULLY loaded Acl objects
Map<ObjectIdentity, Acl> result = new HashMap<>();
Set<ObjectIdentity> currentBatchToLoad = new HashSet<>();
for (int i = 0; i < objects.size(); i++) {
final ObjectIdentity oid = objects.get(i);
boolean aclFound = false;
// Check we don't already have this ACL in the results
if (result.containsKey(oid)) {
aclFound = true;
}
// Check cache for the present ACL entry
if (!aclFound) {
Acl acl = this.aclCache.getFromCache(oid);
// Ensure any cached element supports all the requested SIDs
// (they should always, as our base impl doesn't filter on SID)
if (acl != null) {
Assert.state(acl.isSidLoaded(sids),
"Error: SID-filtered element detected when implementation does not perform SID filtering "
+ "- have you added something to the cache manually?");
result.put(acl.getObjectIdentity(), acl);
aclFound = true;
}
}
// Load the ACL from the database
if (!aclFound) {
currentBatchToLoad.add(oid);
}
// Is it time to load from JDBC the currentBatchToLoad?
if ((currentBatchToLoad.size() == this.batchSize) || ((i + 1) == objects.size())) {
if (currentBatchToLoad.size() > 0) {
Map<ObjectIdentity, Acl> loadedBatch = lookupObjectIdentities(currentBatchToLoad, sids);
// Add loaded batch (all elements 100% initialized) to results
result.putAll(loadedBatch);
// Add the loaded batch to the cache
for (Acl loadedAcl : loadedBatch.values()) {
this.aclCache.putInCache((AclImpl) loadedAcl);
}
currentBatchToLoad.clear();
}
}
}
return result;
}
/**
* Looks up a batch of <code>ObjectIdentity</code>s directly from the database.
* <p>
* The caller is responsible for optimization issues, such as selecting the identities
* to lookup, ensuring the cache doesn't contain them already, and adding the returned
* elements to the cache etc.
* <p>
* This subclass is required to return fully valid <code>Acl</code>s, including
* properly-configured parent ACLs.
*/
private Map<ObjectIdentity, Acl> lookupObjectIdentities(final Collection<ObjectIdentity> objectIdentities,
List<Sid> sids) {
Assert.notEmpty(objectIdentities, "Must provide identities to lookup");
// contains Acls with StubAclParents
Map<Serializable, Acl> acls = new HashMap<>();
// Make the "acls" map contain all requested objectIdentities
// (including markers to each parent in the hierarchy)
String sql = computeRepeatingSql(this.lookupObjectIdentitiesWhereClause, objectIdentities.size());
Set<Long> parentsToLookup = this.jdbcTemplate.query(sql,
(ps) -> setupLookupObjectIdentitiesStatement(ps, objectIdentities), new ProcessResultSet(acls, sids));
// Lookup the parents, now that our JdbcTemplate has released the database
// connection (SEC-547)
if (parentsToLookup.size() > 0) {
lookupPrimaryKeys(acls, parentsToLookup, sids);
}
// Finally, convert our "acls" containing StubAclParents into true Acls
Map<ObjectIdentity, Acl> resultMap = new HashMap<>();
for (Acl inputAcl : acls.values()) {
Assert.isInstanceOf(AclImpl.class, inputAcl, "Map should have contained an AclImpl");
Assert.isInstanceOf(Long.class, ((AclImpl) inputAcl).getId(), "Acl.getId() must be Long");
Acl result = convert(acls, (Long) ((AclImpl) inputAcl).getId());
resultMap.put(result.getObjectIdentity(), result);
}
return resultMap;
}
private void setupLookupObjectIdentitiesStatement(PreparedStatement ps, Collection<ObjectIdentity> objectIdentities)
throws SQLException {
int i = 0;
for (ObjectIdentity oid : objectIdentities) {
// Determine prepared statement values for this iteration
String type = oid.getType();
// No need to check for nulls, as guaranteed non-null by
// ObjectIdentity.getIdentifier() interface contract
String identifier = oid.getIdentifier().toString();
// Inject values
ps.setString((2 * i) + 1, identifier);
ps.setString((2 * i) + 2, type);
i++;
}
}
/**
* The final phase of converting the <code>Map</code> of <code>AclImpl</code>
* instances which contain <code>StubAclParent</code>s into proper, valid
* <code>AclImpl</code>s with correct ACL parents.
* @param inputMap the unconverted <code>AclImpl</code>s
* @param currentIdentity the current<code>Acl</code> that we wish to convert (this
* may be
*/
private AclImpl convert(Map<Serializable, Acl> inputMap, Long currentIdentity) {
Assert.notEmpty(inputMap, "InputMap required");
Assert.notNull(currentIdentity, "CurrentIdentity required");
// Retrieve this Acl from the InputMap
Acl uncastAcl = inputMap.get(currentIdentity);
Assert.isInstanceOf(AclImpl.class, uncastAcl, "The inputMap contained a non-AclImpl");
AclImpl inputAcl = (AclImpl) uncastAcl;
Acl parent = inputAcl.getParentAcl();
if ((parent != null) && parent instanceof StubAclParent) {
// Lookup the parent
StubAclParent stubAclParent = (StubAclParent) parent;
parent = convert(inputMap, stubAclParent.getId());
}
// Now we have the parent (if there is one), create the true AclImpl
AclImpl result = new AclImpl(inputAcl.getObjectIdentity(), inputAcl.getId(), this.aclAuthorizationStrategy,
this.grantingStrategy, parent, null, inputAcl.isEntriesInheriting(), inputAcl.getOwner());
// Copy the "aces" from the input to the destination
// Obtain the "aces" from the input ACL
List<AccessControlEntryImpl> aces = readAces(inputAcl);
// Create a list in which to store the "aces" for the "result" AclImpl instance
List<AccessControlEntryImpl> acesNew = new ArrayList<>();
// Iterate over the "aces" input and replace each nested
// AccessControlEntryImpl.getAcl() with the new "result" AclImpl instance
// This ensures StubAclParent instances are removed, as per SEC-951
for (AccessControlEntryImpl ace : aces) {
setAclOnAce(ace, result);
acesNew.add(ace);
}
// Finally, now that the "aces" have been converted to have the "result" AclImpl
// instance, modify the "result" AclImpl instance
setAces(result, acesNew);
return result;
}
/**
* Creates a particular implementation of {@link Sid} depending on the arguments.
* @param sid the name of the sid representing its unique identifier. In typical ACL
* database schema it's located in table {@code acl_sid} table, {@code sid} column.
* @param isPrincipal whether it's a user or granted authority like role
* @return the instance of Sid with the {@code sidName} as an identifier
*/
protected Sid createSid(boolean isPrincipal, String sid) {
if (isPrincipal) {
return new PrincipalSid(sid);
}
return new GrantedAuthoritySid(sid);
}
/**
* Sets the {@code PermissionFactory} instance which will be used to convert loaded
* permission data values to {@code Permission}s. A {@code DefaultPermissionFactory}
* will be used by default.
* @param permissionFactory
*/
public final void setPermissionFactory(PermissionFactory permissionFactory) {
this.permissionFactory = permissionFactory;
}
public final void setBatchSize(int batchSize) {
this.batchSize = batchSize;
}
/**
* The SQL for the select clause. If customizing in order to modify column names,
* schema etc, the other SQL customization fields must also be set to match.
* @param selectClause the select clause, which defaults to
* {@link #DEFAULT_SELECT_CLAUSE}.
*/
public final void setSelectClause(String selectClause) {
this.selectClause = selectClause;
}
/**
* The SQL for the where clause used in the <tt>lookupPrimaryKey</tt> method.
*/
public final void setLookupPrimaryKeysWhereClause(String lookupPrimaryKeysWhereClause) {
this.lookupPrimaryKeysWhereClause = lookupPrimaryKeysWhereClause;
}
/**
* The SQL for the where clause used in the <tt>lookupObjectIdentities</tt> method.
*/
public final void setLookupObjectIdentitiesWhereClause(String lookupObjectIdentitiesWhereClause) {
this.lookupObjectIdentitiesWhereClause = lookupObjectIdentitiesWhereClause;
}
/**
* The SQL for the "order by" clause used in both queries.
*/
public final void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public final void setAclClassIdSupported(boolean aclClassIdSupported) {
if (aclClassIdSupported) {
Assert.isTrue(this.selectClause.equals(DEFAULT_SELECT_CLAUSE),
"Cannot set aclClassIdSupported and override the select clause; "
+ "just override the select clause");
this.selectClause = DEFAULT_ACL_CLASS_ID_SELECT_CLAUSE;
}
}
public final void setObjectIdentityGenerator(ObjectIdentityGenerator objectIdentityGenerator) {
Assert.notNull(objectIdentityGenerator, "objectIdentityGenerator cannot be null");
this.objectIdentityGenerator = objectIdentityGenerator;
}
public final void setConversionService(ConversionService conversionService) {
this.aclClassIdUtils = new AclClassIdUtils(conversionService);
}
private class ProcessResultSet implements ResultSetExtractor<Set<Long>> {
private final Map<Serializable, Acl> acls;
private final List<Sid> sids;
ProcessResultSet(Map<Serializable, Acl> acls, List<Sid> sids) {
Assert.notNull(acls, "ACLs cannot be null");
this.acls = acls;
this.sids = sids; // can be null
}
/**
* Implementation of {@link ResultSetExtractor#extractData(ResultSet)}. Creates an
* {@link Acl} for each row in the {@link ResultSet} and ensures it is in member
* field <tt>acls</tt>. Any {@link Acl} with a parent will have the parents id
* returned in a set. The returned set of ids may requires further processing.
* @param rs The {@link ResultSet} to be processed
* @return a list of parent IDs remaining to be looked up (may be empty, but never
* <tt>null</tt>)
* @throws SQLException
*/
@Override
public Set<Long> extractData(ResultSet rs) throws SQLException {
Set<Long> parentIdsToLookup = new HashSet<>(); // Set of parent_id Longs
while (rs.next()) {
// Convert current row into an Acl (albeit with a StubAclParent)
convertCurrentResultIntoObject(this.acls, rs);
// Figure out if this row means we need to lookup another parent
long parentId = rs.getLong("parent_object");
if (parentId != 0) {
// See if it's already in the "acls"
if (this.acls.containsKey(parentId)) {
continue; // skip this while iteration
}
// Now try to find it in the cache
MutableAcl cached = BasicLookupStrategy.this.aclCache.getFromCache(parentId);
if ((cached == null) || !cached.isSidLoaded(this.sids)) {
parentIdsToLookup.add(parentId);
}
else {
// Pop into the acls map, so our convert method doesn't
// need to deal with an unsynchronized AclCache
this.acls.put(cached.getId(), cached);
}
}
}
// Return the parents left to lookup to the caller
return parentIdsToLookup;
}
/**
* Accepts the current <code>ResultSet</code> row, and converts it into an
* <code>AclImpl</code> that contains a <code>StubAclParent</code>
* @param acls the Map we should add the converted Acl to
* @param rs the ResultSet focused on a current row
* @throws SQLException if something goes wrong converting values
* @throws ConversionException if can't convert to the desired Java type
*/
private void convertCurrentResultIntoObject(Map<Serializable, Acl> acls, ResultSet rs) throws SQLException {
Long id = rs.getLong("acl_id");
// If we already have an ACL for this ID, just create the ACE
Acl acl = acls.get(id);
if (acl == null) {
// Make an AclImpl and pop it into the Map
// If the Java type is a String, check to see if we can convert it to the
// target id type, e.g. UUID.
Serializable identifier = (Serializable) rs.getObject("object_id_identity");
identifier = BasicLookupStrategy.this.aclClassIdUtils.identifierFrom(identifier, rs);
ObjectIdentity objectIdentity = BasicLookupStrategy.this.objectIdentityGenerator
.createObjectIdentity(identifier, rs.getString("class"));
Acl parentAcl = null;
long parentAclId = rs.getLong("parent_object");
if (parentAclId != 0) {
parentAcl = new StubAclParent(parentAclId);
}
boolean entriesInheriting = rs.getBoolean("entries_inheriting");
Sid owner = createSid(rs.getBoolean("acl_principal"), rs.getString("acl_sid"));
acl = new AclImpl(objectIdentity, id, BasicLookupStrategy.this.aclAuthorizationStrategy,
BasicLookupStrategy.this.grantingStrategy, parentAcl, null, entriesInheriting, owner);
acls.put(id, acl);
}
// Add an extra ACE to the ACL (ORDER BY maintains the ACE list order)
// It is permissible to have no ACEs in an ACL (which is detected by a null
// ACE_SID)
if (rs.getString("ace_sid") != null) {
Long aceId = rs.getLong("ace_id");
Sid recipient = createSid(rs.getBoolean("ace_principal"), rs.getString("ace_sid"));
int mask = rs.getInt("mask");
Permission permission = BasicLookupStrategy.this.permissionFactory.buildFromMask(mask);
boolean granting = rs.getBoolean("granting");
boolean auditSuccess = rs.getBoolean("audit_success");
boolean auditFailure = rs.getBoolean("audit_failure");
AccessControlEntryImpl ace = new AccessControlEntryImpl(aceId, acl, recipient, permission, granting,
auditSuccess, auditFailure);
// Field acesField = FieldUtils.getField(AclImpl.class, "aces");
List<AccessControlEntryImpl> aces = readAces((AclImpl) acl);
// Add the ACE if it doesn't already exist in the ACL.aces field
if (!aces.contains(ace)) {
aces.add(ace);
}
}
}
}
private static class StubAclParent implements Acl {
private final Long id;
StubAclParent(Long id) {
this.id = id;
}
Long getId() {
return this.id;
}
@Override
public List<AccessControlEntry> getEntries() {
throw new UnsupportedOperationException("Stub only");
}
@Override
public ObjectIdentity getObjectIdentity() {
throw new UnsupportedOperationException("Stub only");
}
@Override
public Sid getOwner() {
throw new UnsupportedOperationException("Stub only");
}
@Override
public Acl getParentAcl() {
throw new UnsupportedOperationException("Stub only");
}
@Override
public boolean isEntriesInheriting() {
throw new UnsupportedOperationException("Stub only");
}
@Override
public boolean isGranted(List<Permission> permission, List<Sid> sids, boolean administrativeMode)
throws NotFoundException, UnloadedSidException {
throw new UnsupportedOperationException("Stub only");
}
@Override
public boolean isSidLoaded(List<Sid> sids) {
throw new UnsupportedOperationException("Stub only");
}
}
}
| |
/*
* Copyright (c) 2010-2016 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.common.mapping;
import static org.testng.AssertJUnit.assertEquals;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import javax.xml.bind.JAXBException;
import javax.xml.namespace.QName;
import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType;
import org.xml.sax.SAXException;
import com.evolveum.midpoint.model.common.expression.ExpressionFactory;
import com.evolveum.midpoint.model.common.expression.ExpressionTestUtil;
import com.evolveum.midpoint.model.common.expression.ObjectDeltaObject;
import com.evolveum.midpoint.model.common.expression.Source;
import com.evolveum.midpoint.model.common.expression.StringPolicyResolver;
import com.evolveum.midpoint.prism.ItemDefinition;
import com.evolveum.midpoint.prism.OriginType;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.PrismObjectDefinition;
import com.evolveum.midpoint.prism.PrismPropertyDefinition;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.midpoint.prism.crypto.AESProtector;
import com.evolveum.midpoint.prism.crypto.EncryptionException;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.ObjectDelta;
import com.evolveum.midpoint.prism.delta.PrismValueDeltaSetTriple;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.MidPointPrismContextFactory;
import com.evolveum.midpoint.schema.constants.ExpressionConstants;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ObjectResolver;
import com.evolveum.midpoint.test.util.DirectoryFileObjectResolver;
import com.evolveum.midpoint.test.util.MidPointTestConstants;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.PrettyPrinter;
import com.evolveum.midpoint.util.exception.ExpressionEvaluationException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.MappingType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.StringPolicyType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ValuePolicyType;
/**
* The class that takes care of all the ornaments of value construction execution. It is used to make the
* tests easy to write.
*
* @author Radovan Semancik
*
*/
public class MappingTestEvaluator {
public static File TEST_DIR = new File(MidPointTestConstants.TEST_RESOURCES_DIR, "mapping");
public static final File USER_OLD_FILE = new File(TEST_DIR, "user-jack.xml");
public static final File ACCOUNT_FILE = new File(TEST_DIR, "account-jack.xml");
public static final String USER_OLD_OID = "2f9b9299-6f45-498f-bc8e-8d17c6b93b20";
private static final File PASSWORD_POLICY_FILE = new File(TEST_DIR, "password-policy.xml");
private PrismContext prismContext;
private MappingFactory mappingFactory;
AESProtector protector;
public PrismContext getPrismContext() {
return prismContext;
}
public void init() throws SAXException, IOException, SchemaException {
PrettyPrinter.setDefaultNamespacePrefix(MidPointConstants.NS_MIDPOINT_PUBLIC_PREFIX);
PrismTestUtil.resetPrismContext(MidPointPrismContextFactory.FACTORY);
prismContext = PrismTestUtil.createInitializedPrismContext();
ObjectResolver resolver = new DirectoryFileObjectResolver(MidPointTestConstants.OBJECTS_DIR);
protector = ExpressionTestUtil.createInitializedProtector(prismContext);
ExpressionFactory expressionFactory = ExpressionTestUtil.createInitializedExpressionFactory(resolver, protector, prismContext, null);
mappingFactory = new MappingFactory();
mappingFactory.setExpressionFactory(expressionFactory);
mappingFactory.setObjectResolver(resolver);
mappingFactory.setPrismContext(prismContext);
mappingFactory.setProfiling(true);
mappingFactory.setProtector(protector);
}
public AESProtector getProtector() {
return protector;
}
public <T> Mapping<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, final StringPolicyType policy, String defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException {
return this.<T>createMappingBuilder(filename, testName, policy, toPath(defaultTargetPropertyName), userDelta).build();
}
public <T> Mapping<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, String defaultTargetPropertyName,
ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException {
return this.<T>createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta).build();
}
public <T> Mapping.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, String defaultTargetPropertyName,
ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException {
return createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta);
}
public <T> Mapping<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, QName defaultTargetPropertyName,
ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException {
return this.<T>createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta).build();
}
public <T> Mapping<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, String defaultTargetPropertyName,
ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException {
return this.<T>createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta, userOld).build();
}
public <T> Mapping.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, String defaultTargetPropertyName,
ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld) throws SchemaException, IOException, JAXBException {
return this.createMappingBuilder(filename, testName, null, toPath(defaultTargetPropertyName), userDelta, userOld);
}
public <T> Mapping<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMapping(String filename, String testName, ItemPath defaultTargetPropertyName, ObjectDelta<UserType> userDelta) throws SchemaException, IOException, JAXBException, EncryptionException {
return this.<T>createMappingBuilder(filename, testName, null, defaultTargetPropertyName, userDelta).build();
}
public <T> Mapping.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, final StringPolicyType policy,
ItemPath defaultTargetPropertyPath, ObjectDelta<UserType> userDelta)
throws SchemaException, IOException, JAXBException, EncryptionException {
PrismObject<UserType> userOld = null;
if (userDelta == null || !userDelta.isAdd()) {
userOld = getUserOld();
}
return createMappingBuilder(filename, testName, policy, defaultTargetPropertyPath, userDelta, userOld);
}
public <T> Mapping.Builder<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createMappingBuilder(String filename, String testName, final StringPolicyType policy,
ItemPath defaultTargetPropertyPath, ObjectDelta<UserType> userDelta, PrismObject<UserType> userOld)
throws SchemaException, IOException, JAXBException {
MappingType mappingType = PrismTestUtil.parseAtomicValue(
new File(TEST_DIR, filename), MappingType.COMPLEX_TYPE);
Mapping.Builder<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mappingBuilder = mappingFactory.createMappingBuilder(mappingType, testName);
// Source context: user
ObjectDeltaObject<UserType> userOdo = new ObjectDeltaObject<>(userOld, userDelta, null);
userOdo.recompute();
mappingBuilder.setSourceContext(userOdo);
// Variable $user
mappingBuilder.addVariableDefinition(ExpressionConstants.VAR_USER, userOdo);
// Variable $account
PrismObject<ShadowType> account = getAccount();
ObjectDeltaObject<ShadowType> accountOdo = new ObjectDeltaObject<ShadowType>(account , null, null);
accountOdo.recompute();
mappingBuilder.addVariableDefinition(ExpressionConstants.VAR_ACCOUNT, accountOdo);
// Target context: user
PrismObjectDefinition<UserType> userDefinition = getUserDefinition();
mappingBuilder.setTargetContext(userDefinition);
StringPolicyResolver stringPolicyResolver = new StringPolicyResolver() {
ItemPath outputPath;
ItemDefinition outputDefinition;
@Override
public void setOutputPath(ItemPath outputPath) {
this.outputPath = outputPath;
}
@Override
public void setOutputDefinition(ItemDefinition outputDefinition) {
this.outputDefinition = outputDefinition;
}
@Override
public StringPolicyType resolve() {
return policy;
}
};
mappingBuilder.setStringPolicyResolver(stringPolicyResolver);
// Default target
if (defaultTargetPropertyPath != null) {
PrismPropertyDefinition<T> targetDefDefinition = userDefinition.findItemDefinition(defaultTargetPropertyPath);
if (targetDefDefinition == null) {
throw new IllegalArgumentException("The item path '"+defaultTargetPropertyPath+"' does not have a definition in "+userDefinition);
}
mappingBuilder.setDefaultTargetDefinition(targetDefDefinition);
}
return mappingBuilder;
}
public <T> Mapping<PrismPropertyValue<T>, PrismPropertyDefinition<T>> createInboudMapping(String filename, String testName, ItemDelta delta, UserType user, ShadowType account, ResourceType resource, final StringPolicyType policy) throws SchemaException, IOException, JAXBException{
MappingType mappingType = PrismTestUtil.parseAtomicValue(
new File(TEST_DIR, filename), MappingType.COMPLEX_TYPE);
Mapping.Builder<PrismPropertyValue<T>,PrismPropertyDefinition<T>> builder = mappingFactory.createMappingBuilder(mappingType, testName);
Source<PrismPropertyValue<T>,PrismPropertyDefinition<T>> defaultSource = new Source<>(null, delta, null, ExpressionConstants.VAR_INPUT);
defaultSource.recompute();
builder.setDefaultSource(defaultSource);
builder.setTargetContext(getUserDefinition());
builder.addVariableDefinition(ExpressionConstants.VAR_USER, user);
builder.addVariableDefinition(ExpressionConstants.VAR_FOCUS, user);
builder.addVariableDefinition(ExpressionConstants.VAR_ACCOUNT, account.asPrismObject());
builder.addVariableDefinition(ExpressionConstants.VAR_SHADOW, account.asPrismObject());
StringPolicyResolver stringPolicyResolver = new StringPolicyResolver() {
ItemPath outputPath;
ItemDefinition outputDefinition;
@Override
public void setOutputPath(ItemPath outputPath) {
this.outputPath = outputPath;
}
@Override
public void setOutputDefinition(ItemDefinition outputDefinition) {
this.outputDefinition = outputDefinition;
}
@Override
public StringPolicyType resolve() {
return policy;
}
};
builder.setStringPolicyResolver(stringPolicyResolver);
builder.setOriginType(OriginType.INBOUND);
builder.setOriginObject(resource);
return builder.build();
}
protected PrismObject<UserType> getUserOld() throws SchemaException, EncryptionException, IOException {
PrismObject<UserType> user = PrismTestUtil.parseObject(USER_OLD_FILE);
ProtectedStringType passwordPs = user.asObjectable().getCredentials().getPassword().getValue();
protector.encrypt(passwordPs);
return user;
}
protected PrismObject<ShadowType> getAccount() throws SchemaException, IOException {
return PrismTestUtil.parseObject(ACCOUNT_FILE);
}
public PrismObjectDefinition<UserType> getUserDefinition() {
return prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class);
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName,
ItemPath defaultTargetPropertyPath)
throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyPath, null);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName,
QName defaultTargetPropertyName)
throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, null);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMapping(String filename, String testName,
String defaultTargetPropertyName)
throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, null);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public void assertResult(OperationResult opResult) {
if (opResult.isEmpty()) {
// this is OK. Nothing added to result.
return;
}
opResult.computeStatus();
TestUtil.assertSuccess(opResult);
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicAdd(String filename, String testName,
String defaultTargetPropertyName,
String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
return evaluateMappingDynamicAdd(filename, testName, toPath(defaultTargetPropertyName), changedPropertyName, valuesToAdd);
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicAdd(String filename, String testName,
ItemPath defaultTargetPropertyPath,
String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
ObjectDelta<UserType> userDelta = ObjectDelta.createModificationAddProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName),
prismContext, valuesToAdd);
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyPath, userDelta);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicDelete(String filename, String testName,
String defaultTargetPropertyName,
String changedPropertyName, I... valuesToAdd) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
ObjectDelta<UserType> userDelta = ObjectDelta.createModificationDeleteProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName),
prismContext, valuesToAdd);
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName,
String defaultTargetPropertyName,
String changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName),
prismContext, valuesToReplace);
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName,
String defaultTargetPropertyName,
ItemPath changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, changedPropertyName,
prismContext, valuesToReplace);
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName,
ItemPath defaultTargetPropertyName,
String changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, toPath(changedPropertyName),
prismContext, valuesToReplace);
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public <T,I> PrismValueDeltaSetTriple<PrismPropertyValue<T>> evaluateMappingDynamicReplace(String filename, String testName,
ItemPath defaultTargetPropertyName,
ItemPath changedPropertyName, I... valuesToReplace) throws SchemaException, IOException, JAXBException, ExpressionEvaluationException, ObjectNotFoundException, EncryptionException {
ObjectDelta<UserType> userDelta = ObjectDelta.createModificationReplaceProperty(UserType.class, USER_OLD_OID, changedPropertyName,
prismContext, valuesToReplace);
Mapping<PrismPropertyValue<T>,PrismPropertyDefinition<T>> mapping = createMapping(filename, testName, defaultTargetPropertyName, userDelta);
OperationResult opResult = new OperationResult(testName);
mapping.evaluate(null, opResult);
assertResult(opResult);
PrismValueDeltaSetTriple<PrismPropertyValue<T>> outputTriple = mapping.getOutputTriple();
if (outputTriple != null) {
outputTriple.checkConsistence();
}
return outputTriple;
}
public ItemPath toPath(String propertyName) {
return new ItemPath(new QName(SchemaConstants.NS_C, propertyName));
}
public ItemPath toPath(QName propertyName) {
return new ItemPath(propertyName);
}
public static <T> T getSingleValue(String setName, Collection<PrismPropertyValue<T>> set) {
assertEquals("Expected single value in "+setName+" but found "+set.size()+" values: "+set, 1, set.size());
PrismPropertyValue<T> propertyValue = set.iterator().next();
return propertyValue.getValue();
}
public StringPolicyType getStringPolicy() throws SchemaException, IOException {
PrismObject<ValuePolicyType> passwordPolicy = PrismTestUtil.parseObject(PASSWORD_POLICY_FILE);
return passwordPolicy.asObjectable().getStringPolicy();
}
public Object createProtectedString(String string) throws EncryptionException {
return protector.encryptString(string);
}
public void assertProtectedString(String desc,
Collection<PrismPropertyValue<ProtectedStringType>> set,
String expected) throws EncryptionException {
assertEquals("Unexpected size of "+desc+": "+set, 1, set.size());
PrismPropertyValue<ProtectedStringType> pval = set.iterator().next();
ProtectedStringType ps = pval.getValue();
String zeroString = protector.decryptString(ps);
assertEquals("Unexpected value in "+desc+": "+set, expected, zeroString);
}
}
| |
/**
* This file is part of the Iritgo/Aktario Framework.
*
* Copyright (C) 2005-2011 Iritgo Technologies.
* Copyright (C) 2003-2005 BueroByte GbR.
*
* Iritgo licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.iritgo.aktario.framework.client;
import de.iritgo.aktario.core.Engine;
import de.iritgo.aktario.core.action.ActionProcessorRegistry;
import de.iritgo.aktario.core.action.ReceiveEntryNetworkActionProcessor;
import de.iritgo.aktario.core.action.SendEntryNetworkActionProcessor;
import de.iritgo.aktario.core.action.SimpleActionProcessor;
import de.iritgo.aktario.core.base.BaseObject;
import de.iritgo.aktario.core.config.Configuration;
import de.iritgo.aktario.core.config.ThreadPoolConfig;
import de.iritgo.aktario.core.network.ClientTransceiver;
import de.iritgo.aktario.core.network.NetworkService;
import de.iritgo.aktario.framework.IritgoEngine;
import de.iritgo.aktario.framework.action.ReceiveNetworkActionProcessor;
import de.iritgo.aktario.framework.appcontext.AppContext;
import de.iritgo.aktario.framework.base.InitIritgoException;
import de.iritgo.aktario.framework.base.NetworkProxyLinkedListManager;
import de.iritgo.aktario.framework.base.NetworkProxyManager;
import de.iritgo.aktario.framework.base.command.PingPong;
import de.iritgo.aktario.framework.base.command.SetLogLevel;
import de.iritgo.aktario.framework.client.command.ClientReloadPlugins;
import de.iritgo.aktario.framework.client.gui.ClientGUI;
import de.iritgo.aktario.framework.client.network.NetworkSystemListenerImpl;
import de.iritgo.aktario.framework.client.network.SendNetworkActionProcessor;
import de.iritgo.aktario.framework.client.network.SimpleSyncNetworkActionProcessor;
import de.iritgo.aktario.framework.console.ConsoleCommand;
import de.iritgo.aktario.framework.console.ConsoleCommandRegistry;
import de.iritgo.aktario.framework.console.ConsoleManager;
import de.iritgo.aktario.framework.dataobject.gui.GUIExtensionManager;
import de.iritgo.aktario.framework.manager.ClientManager;
import de.iritgo.aktario.framework.user.User;
import de.iritgo.aktario.framework.user.UserRegistry;
/**
*
*/
public class Client extends BaseObject
{
static private Client client;
private Engine engine;
private NetworkService networkService;
private ClientGUI clientGUI;
@SuppressWarnings("unused")
private ClientTransceiver clientTransceiver;
private ActionProcessorRegistry actionProcessorRegistry;
@SuppressWarnings("unused")
private SimpleActionProcessor simpleActionProcessor;
private AppContext appContext;
private UserRegistry userRegistry;
@SuppressWarnings("unused")
private NetworkProxyManager networkProxyManager;
@SuppressWarnings("unused")
private NetworkProxyLinkedListManager networkProxyLinkedListManager;
private GUIExtensionManager guiExtensionManager;
/**
* Standard constructor
*/
public Client()
{
}
/**
* Init all client functions.
*/
public void init() throws InitIritgoException
{
engine = Engine.instance();
appContext = AppContext.instance();
loadUser();
registerActionProcessors();
initBasics();
initResources();
registerConsoleCommands();
}
/**
* Return the Client, it must init before.
*
* @return The client.
*/
static public Client instance()
{
if (client == null)
{
client = new Client();
}
return client;
}
/**
* Return the ClientEngine, it must init before.
*
* @return The clientEngine.
*/
public Engine getEngine()
{
return engine;
}
public UserRegistry getUserRegistry()
{
return userRegistry;
}
public ClientGUI getClientGUI()
{
return clientGUI;
}
public GUIExtensionManager getGUIExtensionManager()
{
return guiExtensionManager;
}
public NetworkService getNetworkService()
{
return networkService;
}
/**
* Load the Userdata or set the user of default (-1).
*/
private void loadUser()
{
userRegistry = new UserRegistry();
User user = new User();
user.setUniqueId(- 1);
appContext.setUser(user);
}
/**
* Init all actionProcessors, it used for the Network and local actions.
*/
private void registerActionProcessors()
{
actionProcessorRegistry = engine.getActionProcessorRegistry();
ReceiveEntryNetworkActionProcessor receiveEntryNetworkActionProcessor = new ReceiveEntryNetworkActionProcessor(
"Client.ReceiveEntryNetworkActionProcessor", null, null);
actionProcessorRegistry.put(receiveEntryNetworkActionProcessor);
SendEntryNetworkActionProcessor sendEntryNetworkActionProcessor = new SendEntryNetworkActionProcessor(
"Client.SendEntryNetworkActionProcessor", null, null);
actionProcessorRegistry.put(sendEntryNetworkActionProcessor);
networkService = new NetworkService(engine.getThreadService(), receiveEntryNetworkActionProcessor,
sendEntryNetworkActionProcessor);
networkService.addNetworkSystemListener(new NetworkSystemListenerImpl());
}
/**
* This method create a default network action processor path.
*/
public void createDefaultNetworkProcessingSystem()
{
createReceive();
createSend();
}
private void createReceive()
{
ReceiveEntryNetworkActionProcessor receiveEntryNetworkActionProcessor = (ReceiveEntryNetworkActionProcessor) actionProcessorRegistry
.get("Client.ReceiveEntryNetworkActionProcessor");
ReceiveNetworkActionProcessor receiveNetworkActionProcessor = new ReceiveNetworkActionProcessor(null,
receiveEntryNetworkActionProcessor);
receiveEntryNetworkActionProcessor.addOutput(receiveNetworkActionProcessor);
SimpleSyncNetworkActionProcessor simpleSyncNetworkActionProcessor = new SimpleSyncNetworkActionProcessor(null,
receiveNetworkActionProcessor);
receiveNetworkActionProcessor.addOutput(simpleSyncNetworkActionProcessor);
SimpleActionProcessor simpleActionProcessor = new SimpleActionProcessor();
simpleSyncNetworkActionProcessor.addOutput(simpleActionProcessor);
}
private void createSend()
{
SendEntryNetworkActionProcessor sendEntryNetworkActionProcessor = (SendEntryNetworkActionProcessor) actionProcessorRegistry
.get("Client.SendEntryNetworkActionProcessor");
sendEntryNetworkActionProcessor.addOutput(new SendNetworkActionProcessor(networkService, null,
sendEntryNetworkActionProcessor));
}
private void registerConsoleCommands() throws InitIritgoException
{
ConsoleCommandRegistry consoleCommandRegistry = ((ConsoleManager) engine.getManagerRegistry().getManager(
"console")).getConsoleCommandRegistry();
consoleCommandRegistry.add(new ConsoleCommand("reloadplugins", new ClientReloadPlugins(),
"system.help.reloadplugin", 0));
consoleCommandRegistry.add(new ConsoleCommand("loglevel", new SetLogLevel(), "system.help.loglevel", 1));
consoleCommandRegistry.add(new ConsoleCommand("pingpong", new PingPong(), "system.help.pingpong", 0));
}
public void initGUI() throws InitIritgoException
{
ClientManager clientManager = (ClientManager) engine.getManagerRegistry().getManager("client");
if (clientManager == null)
{
return;
}
clientGUI = clientManager.getClientGUI();
clientGUI.init();
}
public void stopGUI() throws InitIritgoException
{
clientGUI.stopGUI();
clientGUI = null;
}
private void initBasics() throws InitIritgoException
{
Configuration config = IritgoEngine.instance().getConfiguration();
ThreadPoolConfig threadPoolConfig = config.getThreadPool();
int minThreads = threadPoolConfig.getMinThreads();
for (int i = 0; i < minThreads; ++i)
{
engine.getThreadService().addThreadSlot();
}
networkProxyManager = new NetworkProxyManager();
networkProxyLinkedListManager = new NetworkProxyLinkedListManager();
guiExtensionManager = new GUIExtensionManager();
Engine.instance().getManagerRegistry().addManager(guiExtensionManager);
}
private void initResources() throws InitIritgoException
{
engine.getResourceService().loadTranslationsWithClassLoader(IritgoEngine.class, "/resources/system");
}
public void startGUI() throws InitIritgoException
{
if (clientGUI != null)
{
clientGUI.startGUI();
}
}
public void startApplication() throws InitIritgoException
{
if (clientGUI != null)
{
clientGUI.startApplication();
}
}
public void lostNetworkConnection()
{
if (clientGUI != null)
{
clientGUI.lostNetworkConnection();
}
// Client clean all cached IObjects, proxy and proxyEvents.
Engine.instance().getProxyEventRegistry().clear();
Engine.instance().getProxyRegistry().clear();
Engine.instance().getBaseRegistry().clear();
Client.instance().getUserRegistry().clear();
}
public void stop()
{
if (clientGUI != null)
{
clientGUI.stopGUI();
clientGUI.stopApplication();
}
// Important for the lostNetworkConnection() method!
clientGUI = null;
networkService.closeChannel(appContext.getUser().getNetworkChannel());
}
}
| |
/**************************************************************************
Copyright (c) 2011:
Istituto Nazionale di Fisica Nucleare (INFN), Italy
Consorzio COMETA (COMETA), Italy
See http://www.infn.it and and http://www.consorzio-cometa.it for details on
the copyright holders.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author <a href="mailto:riccardo.bruno@ct.infn.it">Riccardo Bruno</a>(INFN)
****************************************************************************/
package it.infn.ct;
import it.infn.ct.GridEngine.Job.InfrastructureInfo;
import it.infn.ct.GridEngine.Job.JSagaJobSubmission;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.Properties;
import java.util.Random;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import it.infn.ct.GridEngine.Job.MultiInfrastructureJobSubmission;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* This class interfaces any call to the GridEngine library.
*
* @author <a href="mailto:riccardo.bruno@ct.infn.it">Riccardo Bruno</a>(INFN)
*/
public class GridEngineInterface {
/**
* Logger object.
*/
private static final Logger LOG =
Logger.getLogger(GridEngineInterface.class.getName());
/**
* Line separator constant.
*/
public static final String LS = System.getProperty("line.separator");
/**
* FridEngine ApplicationId associated to the APIServer executions.
*/
public static final int GRIDENGINE_APISRVAPPID = 10000;
/**
* 0xFF bitmask.
*/
public static final short FF_BITMASK = 0xff;
/*
* GridEngine UsersTracking DB
*/
/**
* UsersTrackingDB JNDI resource name.
*/
private String utdbJNDI;
/**
* UsersTrackingDB host name.
*/
private String utdbHost;
/**
* UsersTrackingDB host port number.
*/
private String utdbPort;
/**
* UsersTrackingDB host user name.
*/
private String utdbUser;
/**
* UsersTrackingDB host password.
*/
private String utdbPass;
/**
* UsersTrackingDB host database name.
*/
private String utdbName;
/**
* GridEngineDaemon configuration class.
*/
private APIServerDaemonConfig gedConfig;
/**
* GridEngineDaemon IP address.
*/
private String gedIPAddress;
/**
* The queue command.
*/
private APIServerDaemonCommand gedCommand;
/**
* Empty constructor for GridEngineInterface.
*/
public GridEngineInterface() {
LOG.debug("Initializing GridEngineInterface");
// Retrieve host IP address, used by JobSubmission
getIP();
// Prepare environment variable for GridEngineLogConfig.xml
setupGELogConfig();
}
/**
* Constructor for GridEngineInterface taking as input a given command.
* @param command - The queue comand
*/
public GridEngineInterface(final APIServerDaemonCommand command) {
this();
LOG.debug("GridEngineInterface command:" + LS + command);
this.gedCommand = command;
}
/**
* Constructor for GridEngineInterface taking as input the
* APIServerDaemonConfig and a given command.
* @param config - APIServerDaemon configuration class
* @param command - Queue command
*/
public GridEngineInterface(final APIServerDaemonConfig config,
final APIServerDaemonCommand command) {
this();
LOG.debug("GridEngineInterface command:" + LS + command);
setConfig(config);
this.gedCommand = command;
}
/**
* submit the job identified by the gedCommand values.
*/
public final void jobCancel() {
LOG.debug("Cancelling job");
return;
}
/**
* submit the job identified by the gedCommand values.
* @return path to output (not implemented)
*/
public final String jobOutput() {
LOG.debug("Getting job output");
return "NOTIMPLEMENTED";
}
/**
* submit the job identified by the gedCommand values.
* @return Job status
*/
public final String jobStatus() {
String jobStatus = null;
GridEngineInterfaceDB geiDB = null;
LOG.debug("Getting job status");
// It is more convenient to directly query the ActiveGridInteraction
// since GridEngine JobCheck threads are in charge to update this
try {
geiDB = new GridEngineInterfaceDB(utdbHost,
utdbPort,
utdbUser,
utdbPass,
utdbName);
jobStatus = geiDB.getJobStatus(gedCommand.getTargetId());
} catch (Exception e) {
LOG.fatal("Unable get command status:" + LS
+ gedCommand + LS
+ e.toString());
}
return jobStatus;
}
/**
* Return a JSON object containing information stored in file:
* <action_info>/<task_id>.json file, which contains the job description
* built by the APIServer translated for the GridEngine.
* @return JSON object with the GridEngine job descripton
* @throws IOException Exception in case of IO failures
*/
private JSONObject mkGEJobDesc() throws IOException {
JSONObject jsonJobDesc = null;
LOG.debug("Entering mkGEJobDesc");
String jobDescFileName = gedCommand.getActionInfo()
+ "/" + gedCommand.getTaskId() + ".json";
LOG.debug("JSON filename: " + jobDescFileName);
try {
InputStream is = new FileInputStream(jobDescFileName);
String jsonTxt = IOUtils.toString(is);
jsonJobDesc = (JSONObject) new JSONObject(jsonTxt);
LOG.debug("Loaded APIServer JobDesc:\n" + LS + jsonJobDesc);
} catch (Exception e) {
LOG.warn("Caught exception: " + e.toString());
}
// Now create the <task_id>.info file targeted for the GridEngine
JSONObject geTaskDescription = new JSONObject();
geTaskDescription.put("commonName",
String.format("%s", jsonJobDesc.getString("user")));
geTaskDescription.put("application",
GRIDENGINE_APISRVAPPID); // Take this value from properties
// or any other configuration source
geTaskDescription.put("identifier",
String.format("%s@%s", jsonJobDesc.getString("id"),
jsonJobDesc.getString("iosandbox")));
geTaskDescription.put("input_files",
jsonJobDesc.getJSONArray("input_files"));
geTaskDescription.put("output_files",
jsonJobDesc.getJSONArray("output_files"));
// Prepare the JobDescription
JSONObject geJobDescription = new JSONObject();
// Get app Info and Parameters
JSONObject appInfo = new JSONObject();
appInfo = jsonJobDesc.getJSONObject("application");
JSONArray appParams = new JSONArray();
appParams = appInfo.getJSONArray("parameters");
// Process application parameters
String jobArgs = "";
String paramName;
String paramValue;
for (int i = 0; i < appParams.length(); i++) {
JSONObject appParameter = appParams.getJSONObject(i);
// Get parameter name and value
paramName = appParameter.getString("param_name");
paramValue = appParameter.getString("param_value");
// Map task values to GE job description values
if (paramName.equals("jobdesc_executable")) {
geJobDescription.put("executable", paramValue);
} else if (paramName.equals("jobdesc_arguments")) {
// Further arguments will be added later
jobArgs = paramValue + " ";
} else if (paramName.equals("jobdesc_output")) {
geJobDescription.put("output", paramValue);
} else if (paramName.equals("jobdesc_error")) {
geJobDescription.put("error", paramValue);
} else if (paramName.equals("target_executor")) {
LOG.debug("target_executor : '" + paramValue + "'");
} else {
LOG.warn("Reached end of if-elif chain for "
+ "application param name: '"
+ paramName + "' with value: '"
+ paramValue + "'");
}
}
// Now add further arguments if specified in task
JSONArray jobArguments = jsonJobDesc.getJSONArray("arguments");
for (int j = 0; j < jobArguments.length(); j++) {
jobArgs += String.format("%s ", jobArguments.getString(j));
}
geJobDescription.put("arguments", jobArgs.trim());
// Get application specific settings
geTaskDescription.put("jobDescription", geJobDescription);
// Select one of the possible infrastructures among the one enabled
// A random strategy is currently implemented; this could be changed
// later
JSONArray jobInfrastructures = appInfo.getJSONArray("infrastructures");
JSONArray enabledInfras = new JSONArray();
for (int v = 0, w = 0; w < jobInfrastructures.length(); w++) {
JSONObject infra = jobInfrastructures.getJSONObject(w);
if (infra.getString("status").equals("enabled")) {
enabledInfras.put(v++, infra);
}
}
int selInfraIdx = 0;
Random rndGen = new Random();
if (enabledInfras.length() > 1) {
selInfraIdx = rndGen.nextInt(enabledInfras.length());
}
JSONObject selInfra = new JSONObject();
selInfra = enabledInfras.getJSONObject(selInfraIdx);
LOG.debug("Selected infra:" + LS + selInfra.toString());
// Process infrastructure: name, credentials and parameters
JSONObject geInfrastructure = new JSONObject();
geInfrastructure.put("name", selInfra.getString("name"));
JSONObject geCredentials = new JSONObject();
JSONArray infraParams = selInfra.getJSONArray("parameters");
for (int h = 0; h < infraParams.length(); h++) {
JSONObject infraParameter = infraParams.getJSONObject(h);
paramName = infraParameter.getString("name");
paramValue = infraParameter.getString("value");
LOG.info(h + ": " + paramName + " - " + paramValue);
// Job settings
if (paramName.equals("jobservice")) {
geInfrastructure.put("resourceManagers", paramValue);
} else if (paramName.equals("ce_list")) {
geInfrastructure.put("ce_list", paramValue);
} else if (paramName.equals("os_tpl")) {
geInfrastructure.put("os_tpl", paramValue);
} else if (paramName.equals("resource_tpl")) {
geInfrastructure.put("resource_tpl", paramValue);
} else if (paramName.equals("secured")) {
geInfrastructure.put("secured", paramValue);
} else if (paramName.equals("protocol")) {
geInfrastructure.put("protocol", paramValue);
} else if (paramName.equals("attributes_title")) {
geInfrastructure.put("attributes_title", paramValue);
} else if (paramName.equals("bdii")) {
geInfrastructure.put("bdii", paramValue);
} else if (paramName.equals("swtags")) {
geInfrastructure.put("swtags", paramValue);
} else if (paramName.equals("jdlRequirements")) {
geInfrastructure.put("jdlRequirements", paramValue);
} else if (paramName.equals("user_data")) {
geInfrastructure.put("user_data", paramValue);
} else if (paramName.equals("prefix")) {
geInfrastructure.put("prefix", paramValue);
} else if (paramName.equals("link")) {
geInfrastructure.put("link", paramValue);
} else if (paramName.equals("waitms")) {
geInfrastructure.put("waitms", paramValue);
} else if (paramName.equals("waitsshms")) {
geInfrastructure.put("waitsshms", paramValue);
} else if (paramName.equals("sshport")) {
geInfrastructure.put("sshport", paramValue);
// Credential settings
} else if (paramName.equals("username")) {
geCredentials.put("username", paramValue);
} else if (paramName.equals("password")) {
geCredentials.put("password", paramValue);
} else if (paramName.equals("eToken_host")) {
geCredentials.put("eToken_host", paramValue);
} else if (paramName.equals("eToken_port")) {
geCredentials.put("eToken_port", paramValue);
} else if (paramName.equals("eToken_id")) {
geCredentials.put("eToken_id", paramValue);
} else if (paramName.equals("voms")) {
geCredentials.put("voms", paramValue);
} else if (paramName.equals("voms_role")) {
geCredentials.put("voms_role", paramValue);
} else if (paramName.equals("rfc_proxy")) {
geCredentials.put("rfc_proxy", paramValue);
} else if (paramName.equals("disable-voms-proxy")) {
geCredentials.put("disable-voms-proxy", paramValue);
} else if (paramName.equals("proxy-renewal")) {
geCredentials.put("proxy-renewal", paramValue);
} else {
LOG.warn("Reached end of if-elif chain for infra_param name: '"
+ paramName + "' with value: '"
+ paramValue + "'");
}
}
geTaskDescription.put("infrastructure", geInfrastructure);
geTaskDescription.put("credentials", geCredentials);
// Now write the JSON translated for the GridEngine
String jsonTask = geTaskDescription.toString();
String jsonFileName = gedCommand.getActionInfo()
+ "/" + gedCommand.getTaskId() + ".ge_info";
try {
OutputStream os = new FileOutputStream(jsonFileName);
os.write(jsonTask.getBytes(
Charset.forName("UTF-8"))); // UTF-8 from properties
LOG.debug("GridEngine JobDescription written in file '"
+ jsonFileName + "':\n" + LS + jsonTask);
} catch (Exception e) {
LOG.warn("Caught exception: " + e.toString());
}
return geTaskDescription;
}
/**
* Prepare the I/O Sandbox.
* @param mijs - MultiInfrastructureJobSubmission class
* @param inputFiles - JSONObject containing input files
* @param outputFiles - JSONObject containing output files
*/
private void prepareIOSandbox(
final MultiInfrastructureJobSubmission mijs,
final JSONArray inputFiles,
final JSONArray outputFiles) {
// InputSandbox
String inputSandbox = "";
for (int i = 0; i < inputFiles.length(); i++) {
JSONObject inputEntry = inputFiles.getJSONObject(i);
if (inputEntry.getString("name").length() > 0) {
String comma = (i == 0) ? "" : ",";
inputSandbox += comma + gedCommand.getActionInfo()
+ "/" + inputEntry.getString("name");
}
}
mijs.setInputFiles(inputSandbox);
LOG.debug("inputSandbox: '" + inputSandbox + "'");
// OutputSandbox
String outputSandbox = "";
for (int i = 0; i < outputFiles.length(); i++) {
JSONObject outputEntry = outputFiles.getJSONObject(i);
if (outputEntry.getString("name").length() > 0) {
String comma = (i == 0) ? "" : ",";
outputSandbox += comma + outputEntry.getString("name");
}
}
mijs.setOutputFiles(outputSandbox);
LOG.debug("outputSandbox: '" + outputSandbox + "'");
}
/**
* Prepares JobDescription specified in JSONObject item to setup the given
* MultiInfrastructureJobSubmission object.
*
* @param mijs - MultiInfrastructureJobSubmission object instance
* @param geJobDescription - Object describing the job description
* @see MultiInfrastructureJobSubmission
*/
private void prepareJobDescription(
final MultiInfrastructureJobSubmission mijs,
final JSONObject geJobDescription) {
// Job description
mijs.setExecutable(geJobDescription.getString("executable"));
mijs.setJobOutput(geJobDescription.getString("output"));
mijs.setArguments(geJobDescription.getString("arguments"));
mijs.setJobError(geJobDescription.getString("error"));
mijs.setOutputPath(gedCommand.getActionInfo());
}
/**
* Retrieve the APIServerDaemon PATH to the GridEngineLogConfig.xml file and
* setup the GridEngineLogConfig.path environment variable accordingly This
* variable will be taken by GridEngine while building up its log.
*/
private void setupGELogConfig() {
URL geLogConfig = this.getClass().
getResource("GridEngineLogConfig.xml");
String geLogConfigEnvVar = geLogConfig.getPath();
LOG.debug("GridEngineLogConfig.xml at '" + geLogConfigEnvVar + "'");
Properties props = System.getProperties();
props.setProperty("GridEngineLogConfig.path", geLogConfigEnvVar);
System.setProperties(props);
}
/**
* Retrieve the id field of the ActiveGridInteraction table starting from
* the jobDesc table.
*
* @return UsersTrackingDB ActiveGridInteraction record id
*/
public final int getAGIId() {
int agiId = 0;
GridEngineInterfaceDB geiDB = null;
LOG.debug("Getting ActiveGridInteraciton' id field for task: "
+ gedCommand.getTaskId());
try {
geiDB = new GridEngineInterfaceDB(utdbHost,
utdbPort,
utdbUser,
utdbPass,
utdbName);
agiId = geiDB.getAGIId(gedCommand);
} catch (Exception e) {
LOG.fatal("Unable get id:" + LS + gedCommand + LS + e.toString());
}
return agiId;
}
/*
* GridEngine interfacing methods
*/
/**
* Load GridEngineDaemon configuration settings.
*
* @param config - GridEngineDaemon configuration object
*/
public final void setConfig(final APIServerDaemonConfig config) {
this.gedConfig = config;
// Extract class specific configutation
this.utdbJNDI = config.getGridEngineDBjndi();
this.utdbHost = config.getGridEngineDBhost();
this.utdbPort = config.getGridEngineDBPort();
this.utdbUser = config.getGridEngineDBuser();
this.utdbPass = config.getGridEngineDBPass();
this.utdbName = config.getGridEngineDBName();
LOG.debug("GridEngineInterface config:" + LS
+ " [UsersTrackingDB]" + LS
+ " db_jndi: '" + this.utdbJNDI + "'" + LS
+ " db_host: '" + this.utdbHost + "'" + LS
+ " db_port: '" + this.utdbPort + "'" + LS
+ " db_user: '" + this.utdbUser + "'" + LS
+ " db_pass: '" + this.utdbPass + "'" + LS
+ " db_name: '" + this.utdbName + "'" + LS);
}
/**
* Setup machine IP address, needed by job submission.
*/
private void getIP() {
try {
InetAddress addr = InetAddress.getLocalHost();
byte[] ipAddr = addr.getAddress();
int i = 0;
gedIPAddress = "" + (short) (ipAddr[i++] & FF_BITMASK)
+ ":" + (short) (ipAddr[i++] & FF_BITMASK)
+ ":" + (short) (ipAddr[i++] & FF_BITMASK)
+ ":" + (short) (ipAddr[i++] & FF_BITMASK);
} catch (Exception e) {
gedIPAddress = "";
LOG.fatal("Unable to get the portal IP address");
}
}
/**
* Retrieve the id field of the ActiveGridInteraction table starting from
* the jobDesc table.
* @return Job description
*
*/
public final String getJobDescription() {
String jobDesc = "";
GridEngineInterfaceDB geiDB = null;
LOG.debug("Getting jobDescription for AGI_id: "
+ gedCommand.getTargetId());
try {
geiDB = new GridEngineInterfaceDB(utdbHost,
utdbPort,
utdbUser,
utdbPass,
utdbName);
jobDesc = geiDB.getJobDescription(gedCommand.getTargetId());
} catch (Exception e) {
LOG.fatal("Unable get job description for command:" + LS
+ gedCommand + LS
+ e.toString());
}
return jobDesc;
}
/**
* submit the job identified by the gedCommand values.
*
* @return 0 - Ideally it should report the job id (not implemented in GE)
*/
public final int jobSubmit() {
int agiId = 0;
LOG.debug("Submitting job");
// MultiInfrastructureJobSubmission object
MultiInfrastructureJobSubmission mijs = null;
if ((utdbJNDI != null) && !utdbJNDI.isEmpty()) {
mijs = new MultiInfrastructureJobSubmission();
} else {
mijs = new MultiInfrastructureJobSubmission(
"jdbc:mysql://" + utdbHost
+ ":" + utdbPort
+ "/" + utdbName,
utdbUser,
utdbPass);
}
if (mijs == null) {
LOG.error("mijs is NULL, sorry!");
} else {
try {
LOG.debug("Loading GridEngine job JSON desc");
// Load <task_id>.json file in memory
JSONObject geJobDesc = mkGEJobDesc();
// application
int geAppId =
geJobDesc.getInt("application");
// commonName (user executing task)
String geCommonName =
geJobDesc.getString("commonName");
// infrastructure
JSONObject geInfrastructure =
geJobDesc.getJSONObject("infrastructure");
// jobDescription
JSONObject geJobDescription =
geJobDesc.getJSONObject("jobDescription");
// credentials
JSONObject geCredentials =
geJobDesc.getJSONObject("credentials");
// identifier
String jobIdentifier =
geJobDesc.getString("identifier");
// inputFiles
JSONArray inputFiles =
geJobDesc.getJSONArray("input_files");
// outputFiles
JSONArray outputFiles =
geJobDesc.getJSONArray("output_files");
// Loaded essential JSON components; now go through
// each adaptor specific setting:
// resourceManagers
String resourceManagers =
geInfrastructure.getString("resourceManagers");
String adaptor = resourceManagers.split(":")[0];
LOG.info("Adaptor is '" + adaptor + "'");
InfrastructureInfo[] infrastructures =
new InfrastructureInfo[1];
// eTokenServer variables for GSI based infrastructures
String eTokenHost;
String eTokenPort;
String eTokenId;
String voms;
String vomsRole;
String rfcProxy;
/*
* Each adaptor has its own specific settings Different adaptors
* may have in common some settings such as I/O Sandboxing, job
* description etc
*/
switch (adaptor) {
// SSH Adaptor
case "ssh":
try {
LOG.info("Entering SSH adaptor ...");
// Credential values
String username = geCredentials.getString("username");
String password = geCredentials.getString("password");
String[] sshEndPoint = {resourceManagers};
infrastructures[0] = new InfrastructureInfo(
resourceManagers,
"ssh",
username,
password,
sshEndPoint);
mijs.addInfrastructure(infrastructures[0]);
// Job description
prepareJobDescription(mijs, geJobDescription);
// IO Files
prepareIOSandbox(mijs, inputFiles, outputFiles);
// Submit asynchronously
agiId = 0;
mijs.submitJobAsync(geCommonName,
gedIPAddress,
geAppId,
jobIdentifier);
LOG.debug("AGI_id: " + agiId);
} catch (Exception e) {
LOG.fatal("Caught exception:" + LS + e.toString());
}
break;
// rOCCI Adaptor
case "rocci":
LOG.info("Entering rOCCI adaptor ...");
// Infrastructure values
String protocol = "";
String secured = "";
String prefix = "";
String userData = "";
String link = "";
String waitms = "";
String waitsshms = "";
String sshport = "";
String osTpl =
geInfrastructure.getString("os_tpl");
String resourceTpl =
geInfrastructure.getString("resource_tpl");
String attributesTitle =
geInfrastructure.getString("attributes_title");
// Infrastructure parameters that couldn't be specified
try {
protocol = geInfrastructure.getString("protocol");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
secured = geInfrastructure.getString("secured");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
userData = geInfrastructure.getString("user_data");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
prefix = geInfrastructure.getString("prefix");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
link = geInfrastructure.getString("link");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
waitms = geInfrastructure.getString("waitms");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
waitsshms = geInfrastructure.getString("waitsshms");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
try {
sshport = geInfrastructure.getString("sshport");
} catch (JSONException e) {
LOG.warn("Non mandatory value exception: "
+ e.toString());
}
// Credential values
eTokenHost = geCredentials.getString("eToken_host");
eTokenPort = geCredentials.getString("eToken_port");
eTokenId = geCredentials.getString("eToken_id");
voms = geCredentials.getString("voms");
vomsRole = geCredentials.getString("voms_role");
rfcProxy = geCredentials.getString("rfc_proxy");
// Building option statements
String prefixOpt =
(prefix.length() > 0)
? "prefix=" + prefix + "&" : "";
String mixinResTpl =
"mixin_resource_tpl=" + resourceTpl + "&";
String mixinOsTpl =
"mixin_os_tpl=" + osTpl + "&";
String attributeTitle =
"attributes_title=" + attributesTitle + "&";
String protocolOpt =
(protocol.length() > 0)
? "prptocol=" + protocol + "&" : "";
String securedFlag =
(secured.length() > 0)
? "secured=" + secured + "&" : "";
String userDataOpt =
(userData.length() > 0)
? "user_data=" + userData + "&" : "";
String linkOpt =
(link.length() > 0)
? "link=" + link + "&" : "";
String waitmsOpt =
(waitms.length() > 0)
? "waitms=" + waitms + "&" : "";
String waitsshmsOpt =
(waitsshms.length() > 0)
? "waitsshms=" + waitms + "&" : "";
String sshportOpt =
(sshport.length() > 0)
? "sshport=" + sshport + "&" : "";
// Generate the rOCCI endpoint
String[] rOCCIResourcesList = {
resourceManagers + "/?" + prefixOpt
+ "action=create&"
+ "resource=compute&" + mixinResTpl
+ mixinOsTpl + attributeTitle + protocolOpt
+ securedFlag + userDataOpt + linkOpt
+ waitmsOpt + waitsshmsOpt + sshportOpt
+ "auth=x509" };
LOG.info("rOCCI endpoint: '"
+ rOCCIResourcesList[0] + "'");
// Prepare the infrastructure
infrastructures[0] = new InfrastructureInfo(
resourceManagers, // Infrastruture
"rocci", // Adaptor
"", //
rOCCIResourcesList, // Resources list
eTokenHost, // eTokenServer host
eTokenPort, // eTokenServer port
eTokenId, // eToken id (md5sum)
voms, // VO
vomsRole, // VO.group.role
rfcProxy.equalsIgnoreCase("true") // ProxyRFC
);
mijs.addInfrastructure(infrastructures[0]);
// Setup JobDescription
prepareJobDescription(mijs, geJobDescription);
// I/O Sandbox
// In rOCCI output and error files have to be removed
// from outputFiles array replacing the file name
// with an empty string
for (int i = 0; i < outputFiles.length(); i++) {
JSONObject outputEntry =
outputFiles.getJSONObject(i);
if (outputEntry.getString("name").equals(
geJobDescription.getString("output"))
|| outputEntry.getString("name").equals(
geJobDescription.getString("error"))) {
LOG.debug("Skipping unnecessary file: '"
+ outputEntry.getString("name") + "'");
outputFiles.getJSONObject(i).put("name", "");
}
}
prepareIOSandbox(mijs, inputFiles, outputFiles);
// Submit asynchronously
agiId = 0;
mijs.submitJobAsync(geCommonName,
gedIPAddress,
geAppId,
jobIdentifier);
LOG.debug("AGI_id: " + agiId);
break;
// wms adaptor (EMI/gLite)
case "wms":
LOG.info("Entering wms adaptor ...");
// Infrastructure values
String infraName = geInfrastructure.getString("name");
LOG.info("infrastructure name: '" + infraName + "'");
String bdii = geInfrastructure.getString("bdii");
LOG.info("bdii: '" + bdii + "'");
// ceList, jdlRequirements and swtags are not mandatory
// catch JSONException exception if these values are
// missing
String[] ceList = null;
try {
ceList = geInfrastructure.getString(
"ce_list").split(",");
if ((ceList != null) && (ceList.length > 0)) {
LOG.info("ce_list:");
for (int i = 0; i < ceList.length; i++) {
LOG.info("CE[" + i + "]: '"
+ ceList[i] + "'");
}
}
} catch (JSONException e) {
LOG.warn("NO CE list specified");
}
String[] jdlRequirements = null;
try {
jdlRequirements = geInfrastructure.getString(
"jdlRequirements").split(";");
} catch (JSONException e) {
LOG.warn("jdlRequirements not specified");
}
String swtags = null;
try {
swtags = geInfrastructure.getString("swtags");
} catch (JSONException e) {
LOG.warn("swtags not specified");
}
// Credentials values
eTokenHost = geCredentials.getString("eToken_host");
eTokenPort = geCredentials.getString("eToken_port");
eTokenId = geCredentials.getString("eToken_id");
voms = geCredentials.getString("voms");
vomsRole = geCredentials.getString("voms_role");
rfcProxy = geCredentials.getString("rfc_proxy");
// In wms case resourceManager could contain more than
// one wms:// entrypoint specified by a comma separated
// string
String[] wmsList = resourceManagers.split(",");
LOG.info("Creating Infrastrcuture object");
infrastructures[0] = new InfrastructureInfo(
infraName, // Infrastructure name
"wms", // Adaptor
wmsList, // List of wmses
eTokenHost, // eTokenServer host
eTokenPort, // eTokenServer port
eTokenId, // eToken id (md5sum)
voms, // VO
vomsRole, // VO.group.role
(null != swtags) ? swtags : "" // Software Tags
);
// Select one of the available CEs if specified
// in the ceList. The selection will be done
// randomly
if ((ceList != null) && (ceList.length > 0)) {
Random rndGen = new Random();
int selCEindex = rndGen.nextInt(ceList.length);
mijs.setJobQueue(ceList[selCEindex]);
LOG.info("Selected CE from the list: '"
+ ceList[selCEindex] + "'");
} else {
LOG.info("No CE list specified, wms will choose");
}
// Specify infrastructure
mijs.addInfrastructure(infrastructures[0]);
// Setup JobDescription
prepareJobDescription(mijs, geJobDescription);
// I/O Sandbox
// In wms output and error files have to be removed
// from outputFiles array replacing the file name
// with an empty string
for (int i = 0; i < outputFiles.length(); i++) {
JSONObject outputEntry =
outputFiles.getJSONObject(i);
if (outputEntry.getString("name").equals(
geJobDescription.getString("output"))
|| outputEntry.getString("name").equals(
geJobDescription.getString("error"))) {
LOG.debug("Skipping unnecessary file: '"
+ outputEntry.getString("name") + "'");
outputFiles.getJSONObject(i).put("name", "");
}
}
prepareIOSandbox(mijs, inputFiles, outputFiles);
// JDL requirements
if ((jdlRequirements != null)
&& (jdlRequirements.length > 0)) {
mijs.setJDLRequirements(jdlRequirements);
}
// Submit asynchronously
agiId = 0;
mijs.submitJobAsync(geCommonName,
gedIPAddress,
geAppId,
jobIdentifier);
LOG.debug("AGI_id: " + agiId);
break;
default:
LOG.fatal("Unrecognized or unsupported adaptor found!");
}
} catch (IOException e) {
LOG.fatal("Unable to load APIServer JSON job description\n"
+ LS + e.toString());
} catch (Exception e) {
LOG.fatal("Unable to submit job: " + LS + e.toString());
}
}
return agiId;
}
/**
* Prepares the jobOuput for the APIServer.
*
* @return Directory containing output files
*/
public final String prepareJobOutput() {
String jobDescription = getJobDescription();
String tgzFileName = gedCommand.getActionInfo() + "/jobOutput/"
+ JSagaJobSubmission.removeNotAllowedCharacter(
jobDescription + "_"
+ gedCommand.getTargetId() + ".tgz");
LOG.debug("tgzFileName: '" + tgzFileName + "'");
try {
Process unpackTar = Runtime.getRuntime()
.exec("tar xzvf " + tgzFileName
+ " -C " + gedCommand.getActionInfo());
unpackTar.waitFor();
} catch (Exception e) {
LOG.fatal("Error extracting archive: " + tgzFileName);
}
return JSagaJobSubmission.removeNotAllowedCharacter(
jobDescription + "_" + gedCommand.getTargetId());
}
/**
* removeAGIRecord(int agiId).
* This method removes the specified ActiveGridInteraction record form the
* GridEngine' UsersTracking database.
* @param agiId - GridEngine ActiveGridInteractions record identifier
*/
public final void removeAGIRecord(final int agiId) {
GridEngineInterfaceDB geiDB = null;
LOG.debug("Removing record from ActiveGridInteraction with id: '"
+ agiId + "'");
try {
geiDB = new GridEngineInterfaceDB(utdbHost,
utdbPort,
utdbUser,
utdbPass,
utdbName);
geiDB.removeAGIRecord(agiId);
} catch (Exception e) {
LOG.fatal("Unable delete ActiveGridInteraction entry for id "
+ agiId + "command" + LS + e.toString());
}
}
}
| |
package de.jkitberatung.ica.wsh ;
import com4j.*;
/**
* IICAClient Interface
*/
@IID("{238F6F81-B8B4-11CF-8771-00A024541EE3}")
public interface IICAClient extends Com4jObject {
@VTID(7)
void tabStop(
boolean pbool);
@VTID(8)
boolean tabStop();
/**
* method Show AboutBox
*/
@VTID(9)
void aboutBox();
/**
* method Clear all properties
*/
@VTID(10)
void clearProps();
/**
* method Get property count
*/
@VTID(11)
int getPropCount();
/**
* method Delete propoperty
*/
@VTID(12)
void deleteProp(
java.lang.String name);
/**
* method Delete propoperty by index
*/
@VTID(13)
void deletePropByIndex(
int index);
/**
* method Get propoperty name by index
*/
@VTID(14)
java.lang.String getPropNameByIndex(
int index);
/**
* method Reset all propoperties
*/
@VTID(15)
void resetProps();
/**
* method Set propoperty
*/
@VTID(16)
void setProp(
java.lang.String name,
java.lang.String value);
/**
* method Get propoperty value
*/
@VTID(17)
java.lang.String getPropValue(
java.lang.String name);
/**
* method Get propoperty value by index
*/
@VTID(18)
java.lang.String getPropValueByIndex(
int index);
/**
* method Connect to server
*/
@VTID(19)
void connect();
/**
* method Disconnect from server
*/
@VTID(20)
void disconnect();
/**
* method Logoff from server
*/
@VTID(21)
void logoff();
/**
* method Load ICA file
*/
@VTID(22)
void loadIcaFile(
java.lang.String file);
/**
* method Run published application
*/
@VTID(23)
void runPublishedApplication(
java.lang.String appName,
java.lang.String arguments);
/**
* method Set session end action
*/
@VTID(24)
void setSessionEndAction(
de.jkitberatung.ica.wsh.ICASessionEndAction action);
/**
* method True if currently connected to server
*/
@VTID(25)
boolean isConnected();
/**
* method Get ICA Client interface version
*/
@VTID(26)
java.lang.String getInterfaceVersion();
/**
* method Get ICA Client identification
*/
@VTID(27)
java.lang.String getClientIdentification();
/**
* method Get session string
*/
@VTID(28)
java.lang.String getSessionString(
de.jkitberatung.ica.wsh.ICASessionString index);
/**
* method Get session counter
*/
@VTID(29)
int getSessionCounter(
de.jkitberatung.ica.wsh.ICASessionCounter index);
/**
* method Get last notification reason
*/
@VTID(30)
de.jkitberatung.ica.wsh.ICAEvent getNotificationReason();
/**
* method Startup
*/
@VTID(31)
void startup();
/**
* method Get Last Error
*/
@VTID(32)
int getLastError();
/**
* method Get Last Client Error
*/
@VTID(33)
int getLastClientError();
/**
* method Enable scaling
*/
@VTID(34)
int scaleEnable();
/**
* method Disable scaling
*/
@VTID(35)
int scaleDisable();
/**
* method Scale up to a larger size
*/
@VTID(36)
int scaleUp();
/**
* method Scale down to a smaller size
*/
@VTID(37)
int scaleDown();
/**
* method Scale to a size
*/
@VTID(38)
int scaleSize(
int width,
int height);
/**
* method Scale to a percent
*/
@VTID(39)
int scalePercent(
int percent);
/**
* method Scale to fit size of ICA Client Object window
*/
@VTID(40)
int scaleToFit();
/**
* method Popup the scaling dialog box
*/
@VTID(41)
int scaleDialog();
/**
* method CreateChannels
*/
@VTID(42)
int createChannels(
java.lang.String channelNames);
/**
* method SendChannelData
*/
@VTID(43)
int sendChannelData(
java.lang.String channelName,
java.lang.String data,
int dataSize,
de.jkitberatung.ica.wsh.ICAVCDataType dataType);
/**
* method GetChannelCount
*/
@VTID(44)
int getChannelCount();
/**
* method GetChannelName
*/
@VTID(45)
java.lang.String getChannelName(
int channelIndex);
/**
* method GetChannelNumber
*/
@VTID(46)
int getChannelNumber(
java.lang.String channelName);
/**
* method GetGlobalChannelCount
*/
@VTID(47)
int getGlobalChannelCount();
/**
* method GetGlobalChannelName
*/
@VTID(48)
java.lang.String getGlobalChannelName(
int channelIndex);
/**
* method GetGlobalChannelNumber
*/
@VTID(49)
int getGlobalChannelNumber(
java.lang.String channelName);
/**
* method GetMaxChannelCount
*/
@VTID(50)
int getMaxChannelCount();
/**
* method GetMaxChannelWrite
*/
@VTID(51)
int getMaxChannelWrite();
/**
* method GetMaxChannelRead
*/
@VTID(52)
int getMaxChannelRead();
/**
* method SetChannelFlags
*/
@VTID(53)
int setChannelFlags(
java.lang.String channelName,
int flags);
/**
* method GetChannelFlags
*/
@VTID(54)
int getChannelFlags(
java.lang.String channelName);
/**
* method GetChannelDataSize
*/
@VTID(55)
int getChannelDataSize(
java.lang.String channelName);
/**
* method GetChannelDataType
*/
@VTID(56)
de.jkitberatung.ica.wsh.ICAVCDataType getChannelDataType(
java.lang.String channelName);
/**
* method GetChannelData
*/
@VTID(57)
java.lang.String getChannelData(
java.lang.String channelName,
de.jkitberatung.ica.wsh.ICAVCDataType dataType);
/**
* method EnumerateServers
*/
@VTID(58)
int enumerateServers();
/**
* method EnumerateApplications
*/
@VTID(59)
int enumerateApplications();
/**
* method EnumerateFarms
*/
@VTID(60)
int enumerateFarms();
/**
* method GetEnumNameCount
*/
@VTID(61)
int getEnumNameCount(
int hndEnum);
/**
* method GetEnumNameByIndex
*/
@VTID(62)
java.lang.String getEnumNameByIndex(
int hndEnum,
int hndIndex);
/**
* method CloseEnumHandle
*/
@VTID(63)
int closeEnumHandle(
int hndEnum);
/**
* method GetWindowWidth
*/
@VTID(64)
int getWindowWidth(
de.jkitberatung.ica.wsh.ICAWindowType wndType,
int wndFlags);
/**
* method GetWindowHeight
*/
@VTID(65)
int getWindowHeight(
de.jkitberatung.ica.wsh.ICAWindowType wndType,
int wndFlags);
/**
* method SetWindowSize
*/
@VTID(66)
int setWindowSize(
de.jkitberatung.ica.wsh.ICAWindowType wndType,
int width,
int height,
int wndFlags);
/**
* method GetWindowXPosition
*/
@VTID(67)
int getWindowXPosition(
de.jkitberatung.ica.wsh.ICAWindowType wndType,
int wndFlags);
/**
* method GetWindowYPosition
*/
@VTID(68)
int getWindowYPosition(
de.jkitberatung.ica.wsh.ICAWindowType wndType,
int wndFlags);
/**
* method SetWindowPosition
*/
@VTID(69)
int setWindowPosition(
de.jkitberatung.ica.wsh.ICAWindowType wndType,
int xPos,
int yPos,
int wndFlags);
/**
* method DisplayWindow
*/
@VTID(70)
int displayWindow(
de.jkitberatung.ica.wsh.ICAWindowType wndType);
/**
* method HideWindow
*/
@VTID(71)
int hideWindow(
de.jkitberatung.ica.wsh.ICAWindowType wndType);
/**
* method UndockWindow
*/
@VTID(72)
int undockWindow();
/**
* method DockWindow
*/
@VTID(73)
int dockWindow();
/**
* method PlaceWindowOnTop
*/
@VTID(74)
int placeWindowOnTop();
/**
* method PlaceWindowOnBottom
*/
@VTID(75)
int placeWindowOnBottom();
/**
* method MinimizeWindow
*/
@VTID(76)
int minimizeWindow();
/**
* method MaximizeWindow
*/
@VTID(77)
int maximizeWindow();
/**
* method RestoreWindow
*/
@VTID(78)
int restoreWindow();
/**
* method ShowTitleBar
*/
@VTID(79)
int showTitleBar();
/**
* method HideTitleBar
*/
@VTID(80)
int hideTitleBar();
/**
* method EnableSizingBorder
*/
@VTID(81)
int enableSizingBorder();
/**
* method DisableSizingBorder
*/
@VTID(82)
int disableSizingBorder();
/**
* method FullScreenWindow
*/
@VTID(83)
int fullScreenWindow();
/**
* method FocusWindow
*/
@VTID(84)
int focusWindow();
/**
* method IsWindowDocked
*/
@VTID(85)
boolean isWindowDocked();
/**
* method GetSessionWidth
*/
@VTID(86)
int getSessionWidth();
/**
* method GetSessionHeight
*/
@VTID(87)
int getSessionHeight();
/**
* method GetSessionColorDepth
*/
@VTID(88)
int getSessionColorDepth();
/**
* method GetScreenWidth
*/
@VTID(89)
int getScreenWidth();
/**
* method GetScreenHeight
*/
@VTID(90)
int getScreenHeight();
/**
* method GetScreenColorDepth
*/
@VTID(91)
int getScreenColorDepth();
/**
* method NewWindow
*/
@VTID(92)
int newWindow(
int xPos,
int yPos,
int width,
int height,
int flags);
/**
* method DeleteWindow
*/
@VTID(93)
int deleteWindow();
/**
* method GetErrorMessage
*/
@VTID(94)
java.lang.String getErrorMessage(
int errCode);
/**
* method GetClientErrorMessage
*/
@VTID(95)
java.lang.String getClientErrorMessage(
int errCode);
/**
* method EnableKeyboardInput
*/
@VTID(96)
int enableKeyboardInput();
/**
* method DisableKeyboardInput
*/
@VTID(97)
int disableKeyboardInput();
/**
* method IsKeyboardInputEnabled
*/
@VTID(98)
boolean isKeyboardInputEnabled();
/**
* method EnableMouseInput
*/
@VTID(99)
int enableMouseInput();
/**
* method DisableMouseInput
*/
@VTID(100)
int disableMouseInput();
/**
* method IsMouseInputEnabled
*/
@VTID(101)
boolean isMouseInputEnabled();
/**
* method GetClientNetworkName
*/
@VTID(102)
java.lang.String getClientNetworkName();
/**
* method GetClientAddressCount
*/
@VTID(103)
int getClientAddressCount();
/**
* method GetClientAddress
*/
@VTID(104)
java.lang.String getClientAddress(
int index);
/**
* method AttachSession
*/
@VTID(105)
int attachSession(
java.lang.String pSessionId);
/**
* method DetachSession
*/
@VTID(106)
int detachSession(
java.lang.String pSessionId);
/**
* method GetCachedSessionCount
*/
@VTID(107)
int getCachedSessionCount();
/**
* method IsSessionAttached
*/
@VTID(108)
boolean isSessionAttached(
java.lang.String pSessionId);
/**
* method IsSessionDetached
*/
@VTID(109)
boolean isSessionDetached(
java.lang.String pSessionId);
/**
* method IsSessionRunning
*/
@VTID(110)
boolean isSessionRunning(
java.lang.String pSessionId);
/**
* method SetSessionId
*/
@VTID(111)
int setSessionId(
java.lang.String pSessionId);
@VTID(112)
int readyState();
@VTID(113)
void readyState(
int state);
/**
* property Address
*/
@VTID(114)
java.lang.String address();
/**
* property Address
*/
@VTID(115)
void address(
java.lang.String pVal);
/**
* property Application
*/
@VTID(116)
java.lang.String application();
/**
* property Application
*/
@VTID(117)
void application(
java.lang.String pVal);
/**
* property AudioBandwidthLimit
*/
@VTID(118)
de.jkitberatung.ica.wsh.ICASoundQuality audioBandwidthLimit();
/**
* property AudioBandwidthLimit
*/
@VTID(119)
void audioBandwidthLimit(
de.jkitberatung.ica.wsh.ICASoundQuality pVal);
/**
* property Border
*/
@VTID(120)
int border();
/**
* property Border
*/
@VTID(121)
void border(
int pVal);
/**
* property CDMAllowed
*/
@VTID(122)
boolean cdmAllowed();
/**
* property CDMAllowed
*/
@VTID(123)
void cdmAllowed(
boolean pVal);
/**
* property ClientAudio
*/
@VTID(124)
boolean clientAudio();
/**
* property ClientAudio
*/
@VTID(125)
void clientAudio(
boolean pVal);
/**
* property ClientName
*/
@VTID(126)
java.lang.String clientName();
/**
* property ClientName
*/
@VTID(127)
void clientName(
java.lang.String pVal);
/**
* property COMAllowed
*/
@VTID(128)
boolean comAllowed();
/**
* property COMAllowed
*/
@VTID(129)
void comAllowed(
boolean pVal);
/**
* property Compress
*/
@VTID(130)
boolean compress();
/**
* property Compress
*/
@VTID(131)
void compress(
boolean pVal);
/**
* property Connected
*/
@VTID(132)
boolean connected();
/**
* property ConnectionEntry
*/
@VTID(133)
java.lang.String connectionEntry();
/**
* property ConnectionEntry
*/
@VTID(134)
void connectionEntry(
java.lang.String pVal);
/**
* property CPMAllowed
*/
@VTID(135)
boolean cpmAllowed();
/**
* property CPMAllowed
*/
@VTID(136)
void cpmAllowed(
boolean pVal);
/**
* property CustomMessage
*/
@VTID(137)
java.lang.String customMessage();
/**
* property CustomMessage
*/
@VTID(138)
void customMessage(
java.lang.String pVal);
/**
* property Description
*/
@VTID(139)
java.lang.String description();
/**
* property Description
*/
@VTID(140)
void description(
java.lang.String pVal);
/**
* property DesiredColor
*/
@VTID(141)
de.jkitberatung.ica.wsh.ICAColorDepth desiredColor();
/**
* property DesiredColor
*/
@VTID(142)
void desiredColor(
de.jkitberatung.ica.wsh.ICAColorDepth pVal);
/**
* property DesiredHRes
*/
@VTID(143)
int desiredHRes();
/**
* property DesiredHRes
*/
@VTID(144)
void desiredHRes(
int pVal);
/**
* property DesiredVRes
*/
@VTID(145)
int desiredVRes();
/**
* property DesiredVRes
*/
@VTID(146)
void desiredVRes(
int pVal);
/**
* property Domain
*/
@VTID(147)
java.lang.String domain();
/**
* property Domain
*/
@VTID(148)
void domain(
java.lang.String pVal);
/**
* property Encrypt
*/
@VTID(149)
boolean encrypt();
/**
* property Encrypt
*/
@VTID(150)
void encrypt(
boolean pVal);
/**
* property Height
*/
@VTID(151)
int height();
/**
* property ICAFile
*/
@VTID(152)
java.lang.String icaFile();
/**
* property ICAFile
*/
@VTID(153)
void icaFile(
java.lang.String pVal);
/**
* property IconIndex
*/
@VTID(154)
int iconIndex();
/**
* property IconIndex
*/
@VTID(155)
void iconIndex(
int pVal);
/**
* property IconPath
*/
@VTID(156)
java.lang.String iconPath();
/**
* property IconPath
*/
@VTID(157)
void iconPath(
java.lang.String pVal);
/**
* property InitialProgram
*/
@VTID(158)
java.lang.String initialProgram();
/**
* property InitialProgram
*/
@VTID(159)
void initialProgram(
java.lang.String pVal);
/**
* property IPXBrowserAddress
*/
@VTID(160)
java.lang.String ipxBrowserAddress();
/**
* property IPXBrowserAddress
*/
@VTID(161)
void ipxBrowserAddress(
java.lang.String pVal);
/**
* property NetbiosBrowserAddress
*/
@VTID(162)
java.lang.String netbiosBrowserAddress();
/**
* property NetbiosBrowserAddress
*/
@VTID(163)
void netbiosBrowserAddress(
java.lang.String pVal);
/**
* property NotificationReason
*/
@VTID(164)
de.jkitberatung.ica.wsh.ICAEvent notificationReason();
/**
* property PersistentCacheEnabled
*/
@VTID(165)
boolean persistentCacheEnabled();
/**
* property PersistentCacheEnabled
*/
@VTID(166)
void persistentCacheEnabled(
boolean pVal);
/**
* property ProtocolSupport
*/
@VTID(167)
java.lang.String protocolSupport();
/**
* property ProtocolSupport
*/
@VTID(168)
void protocolSupport(
java.lang.String pVal);
/**
* property Reliable
*/
@VTID(169)
boolean reliable();
/**
* property Reliable
*/
@VTID(170)
void reliable(
boolean pVal);
/**
* property SessionEndAction
*/
@VTID(171)
de.jkitberatung.ica.wsh.ICASessionEndAction sessionEndAction();
/**
* property SessionEndAction
*/
@VTID(172)
void sessionEndAction(
de.jkitberatung.ica.wsh.ICASessionEndAction pVal);
/**
* property Start
*/
@VTID(173)
boolean start();
/**
* property Start
*/
@VTID(174)
void start(
boolean pVal);
/**
* property TCPBrowserAddress
*/
@VTID(175)
java.lang.String tcpBrowserAddress();
/**
* property TCPBrowserAddress
*/
@VTID(176)
void tcpBrowserAddress(
java.lang.String pVal);
/**
* property TransportDriver
*/
@VTID(177)
java.lang.String transportDriver();
/**
* property TransportDriver
*/
@VTID(178)
void transportDriver(
java.lang.String pVal);
/**
* property UIActive
*/
@VTID(179)
boolean uiActive();
/**
* property UIActive
*/
@VTID(180)
void uiActive(
boolean pVal);
/**
* property UpdatesAllowed
*/
@VTID(181)
boolean updatesAllowed();
/**
* property UpdatesAllowed
*/
@VTID(182)
void updatesAllowed(
boolean pVal);
/**
* property Username
*/
@VTID(183)
java.lang.String username();
/**
* property Username
*/
@VTID(184)
void username(
java.lang.String pVal);
/**
* property Version
*/
@VTID(185)
java.lang.String version();
/**
* property VSLAllowed
*/
@VTID(186)
boolean vslAllowed();
/**
* property VSLAllowed
*/
@VTID(187)
void vslAllowed(
boolean pVal);
/**
* property Width
*/
@VTID(188)
int width();
/**
* property WinstationDriver
*/
@VTID(189)
java.lang.String winstationDriver();
/**
* property WinstationDriver
*/
@VTID(190)
void winstationDriver(
java.lang.String pVal);
/**
* property WorkDirectory
*/
@VTID(191)
java.lang.String workDirectory();
/**
* property WorkDirectory
*/
@VTID(192)
void workDirectory(
java.lang.String pVal);
/**
* property AppsrvIni
*/
@VTID(193)
java.lang.String appsrvIni();
/**
* property AppsrvIni
*/
@VTID(194)
void appsrvIni(
java.lang.String pVal);
/**
* property ModuleIni
*/
@VTID(195)
java.lang.String moduleIni();
/**
* property ModuleIni
*/
@VTID(196)
void moduleIni(
java.lang.String pVal);
/**
* property WfclientIni
*/
@VTID(197)
java.lang.String wfclientIni();
/**
* property WfclientIni
*/
@VTID(198)
void wfclientIni(
java.lang.String pVal);
/**
* property ClientPath
*/
@VTID(199)
java.lang.String clientPath();
/**
* property ClientVersion
*/
@VTID(200)
java.lang.String clientVersion();
/**
* property LogAppend
*/
@VTID(201)
boolean logAppend();
/**
* property LogAppend
*/
@VTID(202)
void logAppend(
boolean pVal);
/**
* property LogConnect
*/
@VTID(203)
boolean logConnect();
/**
* property LogConnect
*/
@VTID(204)
void logConnect(
boolean pVal);
/**
* property LogErrors
*/
@VTID(205)
boolean logErrors();
/**
* property LogErrors
*/
@VTID(206)
void logErrors(
boolean pVal);
/**
* property LogFile
*/
@VTID(207)
java.lang.String logFile();
/**
* property LogFile
*/
@VTID(208)
void logFile(
java.lang.String pVal);
/**
* property LogFlush
*/
@VTID(209)
boolean logFlush();
/**
* property LogFlush
*/
@VTID(210)
void logFlush(
boolean pVal);
/**
* property LogKeyboard
*/
@VTID(211)
boolean logKeyboard();
/**
* property LogKeyboard
*/
@VTID(212)
void logKeyboard(
boolean pVal);
/**
* property LogReceive
*/
@VTID(213)
boolean logReceive();
/**
* property LogReceive
*/
@VTID(214)
void logReceive(
boolean pVal);
/**
* property LogTransmit
*/
@VTID(215)
boolean logTransmit();
/**
* property LogTransmit
*/
@VTID(216)
void logTransmit(
boolean pVal);
/**
* property Title
*/
@VTID(217)
java.lang.String title();
/**
* property Title
*/
@VTID(218)
void title(
java.lang.String pVal);
/**
* property Launch
*/
@VTID(219)
boolean launch();
/**
* property Launch
*/
@VTID(220)
void launch(
boolean pVal);
/**
* property BackgroundColor
*/
@VTID(221)
int backgroundColor();
/**
* property BackgroundColor
*/
@VTID(222)
void backgroundColor(
int pVal);
/**
* property BorderColor
*/
@VTID(223)
int borderColor();
/**
* property BorderColor
*/
@VTID(224)
void borderColor(
int pVal);
/**
* property TextColor
*/
@VTID(225)
int textColor();
/**
* property TextColor
*/
@VTID(226)
void textColor(
int pVal);
/**
* property EncryptionLevelSession
*/
@VTID(227)
java.lang.String encryptionLevelSession();
/**
* property EncryptionLevelSession
*/
@VTID(228)
void encryptionLevelSession(
java.lang.String pVal);
/**
* property HttpBrowserAddress
*/
@VTID(229)
java.lang.String httpBrowserAddress();
/**
* property HttpBrowserAddress
*/
@VTID(230)
void httpBrowserAddress(
java.lang.String pVal);
/**
* property BrowserProtocol
*/
@VTID(231)
java.lang.String browserProtocol();
/**
* property BrowserProtocol
*/
@VTID(232)
void browserProtocol(
java.lang.String pVal);
/**
* property LocHTTPBrowserAddress
*/
@VTID(233)
java.lang.String locHTTPBrowserAddress();
/**
* property LocHTTPBrowserAddress
*/
@VTID(234)
void locHTTPBrowserAddress(
java.lang.String pVal);
/**
* property LocIPXBrowserAddress
*/
@VTID(235)
java.lang.String locIPXBrowserAddress();
/**
* property LocIPXBrowserAddress
*/
@VTID(236)
void locIPXBrowserAddress(
java.lang.String pVal);
/**
* property LocNETBIOSBrowserAddress
*/
@VTID(237)
java.lang.String locNETBIOSBrowserAddress();
/**
* property LocNETBIOSBrowserAddress
*/
@VTID(238)
void locNETBIOSBrowserAddress(
java.lang.String pVal);
/**
* property LocTCPBrowserAddress
*/
@VTID(239)
java.lang.String locTCPBrowserAddress();
/**
* property LocTCPBrowserAddress
*/
@VTID(240)
void locTCPBrowserAddress(
java.lang.String pVal);
/**
* property DoNotUseDefaultCSL
*/
@VTID(241)
boolean doNotUseDefaultCSL();
/**
* property DoNotUseDefaultCSL
*/
@VTID(242)
void doNotUseDefaultCSL(
boolean pVal);
/**
* property ICAPortNumber
*/
@VTID(243)
int icaPortNumber();
/**
* property ICAPortNumber
*/
@VTID(244)
void icaPortNumber(
int pVal);
/**
* property KeyboardTimer
*/
@VTID(245)
int keyboardTimer();
/**
* property KeyboardTimer
*/
@VTID(246)
void keyboardTimer(
int pVal);
/**
* property MouseTimer
*/
@VTID(247)
int mouseTimer();
/**
* property MouseTimer
*/
@VTID(248)
void mouseTimer(
int pVal);
/**
* property Scrollbars
*/
@VTID(249)
boolean scrollbars();
/**
* property Scrollbars
*/
@VTID(250)
void scrollbars(
boolean pVal);
/**
* property ScalingHeight
*/
@VTID(251)
int scalingHeight();
/**
* property ScalingHeight
*/
@VTID(252)
void scalingHeight(
int pVal);
/**
* property ScalingMode
*/
@VTID(253)
de.jkitberatung.ica.wsh.ICAScalingMode scalingMode();
/**
* property ScalingMode
*/
@VTID(254)
void scalingMode(
de.jkitberatung.ica.wsh.ICAScalingMode pVal);
/**
* property ScalingPercent
*/
@VTID(255)
int scalingPercent();
/**
* property ScalingPercent
*/
@VTID(256)
void scalingPercent(
int pVal);
/**
* property ScalingWidth
*/
@VTID(257)
int scalingWidth();
/**
* property ScalingWidth
*/
@VTID(258)
void scalingWidth(
int pVal);
/**
* property VirtualChannels
*/
@VTID(259)
java.lang.String virtualChannels();
/**
* property VirtualChannels
*/
@VTID(260)
void virtualChannels(
java.lang.String pVal);
/**
* property UseAlternateAddress
*/
@VTID(261)
int useAlternateAddress();
/**
* property UseAlternateAddress
*/
@VTID(262)
void useAlternateAddress(
int pVal);
/**
* property BrowserRetry
*/
@VTID(263)
int browserRetry();
/**
* property BrowserRetry
*/
@VTID(264)
void browserRetry(
int pVal);
/**
* property BrowserTimeout
*/
@VTID(265)
int browserTimeout();
/**
* property BrowserTimeout
*/
@VTID(266)
void browserTimeout(
int pVal);
/**
* property LanaNumber
*/
@VTID(267)
int lanaNumber();
/**
* property LanaNumber
*/
@VTID(268)
void lanaNumber(
int pVal);
/**
* property ICASOCKSProtocolVersion
*/
@VTID(269)
int icasocksProtocolVersion();
/**
* property ICASOCKSProtocolVersion
*/
@VTID(270)
void icasocksProtocolVersion(
int pVal);
/**
* property ICASOCKSProxyHost
*/
@VTID(271)
java.lang.String icasocksProxyHost();
/**
* property ICASOCKSProxyHost
*/
@VTID(272)
void icasocksProxyHost(
java.lang.String pVal);
/**
* property ICASOCKSProxyPortNumber
*/
@VTID(273)
int icasocksProxyPortNumber();
/**
* property ICASOCKSProxyPortNumber
*/
@VTID(274)
void icasocksProxyPortNumber(
int pVal);
/**
* property ICASOCKSRFC1929Username
*/
@VTID(275)
java.lang.String icasocksrfC1929Username();
/**
* property ICASOCKSRFC1929Username
*/
@VTID(276)
void icasocksrfC1929Username(
java.lang.String pVal);
/**
* property ICASOCKSTimeout
*/
@VTID(277)
int icasocksTimeout();
/**
* property ICASOCKSTimeout
*/
@VTID(278)
void icasocksTimeout(
int pVal);
/**
* property SSLEnable
*/
@VTID(279)
boolean sslEnable();
/**
* property SSLEnable
*/
@VTID(280)
void sslEnable(
boolean pVal);
/**
* property SSLProxyHost
*/
@VTID(281)
java.lang.String sslProxyHost();
/**
* property SSLProxyHost
*/
@VTID(282)
void sslProxyHost(
java.lang.String pVal);
/**
* property SSLCiphers
*/
@VTID(283)
java.lang.String sslCiphers();
/**
* property SSLCiphers
*/
@VTID(284)
void sslCiphers(
java.lang.String pVal);
/**
* property SSLNoCACerts
*/
@VTID(285)
int sslNoCACerts();
/**
* property SSLNoCACerts
*/
@VTID(286)
void sslNoCACerts(
int pVal);
/**
* property SSLCommonName
*/
@VTID(287)
java.lang.String sslCommonName();
/**
* property SSLCommonName
*/
@VTID(288)
void sslCommonName(
java.lang.String pVal);
/**
* property AUTHUsername
*/
@VTID(289)
java.lang.String authUsername();
/**
* property AUTHUsername
*/
@VTID(290)
void authUsername(
java.lang.String pVal);
/**
* property XmlAddressResolutionType
*/
@VTID(291)
java.lang.String xmlAddressResolutionType();
/**
* property XmlAddressResolutionType
*/
@VTID(292)
void xmlAddressResolutionType(
java.lang.String pVal);
/**
* property AutoScale
*/
@VTID(293)
boolean autoScale();
/**
* property AutoScale
*/
@VTID(294)
void autoScale(
boolean pVal);
/**
* property AutoAppResize
*/
@VTID(295)
boolean autoAppResize();
/**
* property AutoAppResize
*/
@VTID(296)
void autoAppResize(
boolean pVal);
/**
* property Hotkey1Char
*/
@VTID(297)
java.lang.String hotkey1Char();
/**
* property Hotkey1Char
*/
@VTID(298)
void hotkey1Char(
java.lang.String pVal);
/**
* property Hotkey1Shift
*/
@VTID(299)
java.lang.String hotkey1Shift();
/**
* property Hotkey1Shift
*/
@VTID(300)
void hotkey1Shift(
java.lang.String pVal);
/**
* property Hotkey2Char
*/
@VTID(301)
java.lang.String hotkey2Char();
/**
* property Hotkey2Char
*/
@VTID(302)
void hotkey2Char(
java.lang.String pVal);
/**
* property Hotkey2Shift
*/
@VTID(303)
java.lang.String hotkey2Shift();
/**
* property Hotkey2Shift
*/
@VTID(304)
void hotkey2Shift(
java.lang.String pVal);
/**
* property Hotkey3Char
*/
@VTID(305)
java.lang.String hotkey3Char();
/**
* property Hotkey3Char
*/
@VTID(306)
void hotkey3Char(
java.lang.String pVal);
/**
* property Hotkey3Shift
*/
@VTID(307)
java.lang.String hotkey3Shift();
/**
* property Hotkey3Shift
*/
@VTID(308)
void hotkey3Shift(
java.lang.String pVal);
/**
* property Hotkey4Char
*/
@VTID(309)
java.lang.String hotkey4Char();
/**
* property Hotkey4Char
*/
@VTID(310)
void hotkey4Char(
java.lang.String pVal);
/**
* property Hotkey4Shift
*/
@VTID(311)
java.lang.String hotkey4Shift();
/**
* property Hotkey4Shift
*/
@VTID(312)
void hotkey4Shift(
java.lang.String pVal);
/**
* property Hotkey5Char
*/
@VTID(313)
java.lang.String hotkey5Char();
/**
* property Hotkey5Char
*/
@VTID(314)
void hotkey5Char(
java.lang.String pVal);
/**
* property Hotkey5Shift
*/
@VTID(315)
java.lang.String hotkey5Shift();
/**
* property Hotkey5Shift
*/
@VTID(316)
void hotkey5Shift(
java.lang.String pVal);
/**
* property Hotkey6Char
*/
@VTID(317)
java.lang.String hotkey6Char();
/**
* property Hotkey6Char
*/
@VTID(318)
void hotkey6Char(
java.lang.String pVal);
/**
* property Hotkey6Shift
*/
@VTID(319)
java.lang.String hotkey6Shift();
/**
* property Hotkey6Shift
*/
@VTID(320)
void hotkey6Shift(
java.lang.String pVal);
/**
* property Hotkey7Char
*/
@VTID(321)
java.lang.String hotkey7Char();
/**
* property Hotkey7Char
*/
@VTID(322)
void hotkey7Char(
java.lang.String pVal);
/**
* property Hotkey7Shift
*/
@VTID(323)
java.lang.String hotkey7Shift();
/**
* property Hotkey7Shift
*/
@VTID(324)
void hotkey7Shift(
java.lang.String pVal);
/**
* property Hotkey8Char
*/
@VTID(325)
java.lang.String hotkey8Char();
/**
* property Hotkey8Char
*/
@VTID(326)
void hotkey8Char(
java.lang.String pVal);
/**
* property Hotkey8Shift
*/
@VTID(327)
java.lang.String hotkey8Shift();
/**
* property Hotkey8Shift
*/
@VTID(328)
void hotkey8Shift(
java.lang.String pVal);
/**
* property Hotkey9Char
*/
@VTID(329)
java.lang.String hotkey9Char();
/**
* property Hotkey9Char
*/
@VTID(330)
void hotkey9Char(
java.lang.String pVal);
/**
* property Hotkey9Shift
*/
@VTID(331)
java.lang.String hotkey9Shift();
/**
* property Hotkey9Shift
*/
@VTID(332)
void hotkey9Shift(
java.lang.String pVal);
/**
* property Hotkey10Char
*/
@VTID(333)
java.lang.String hotkey10Char();
/**
* property Hotkey10Char
*/
@VTID(334)
void hotkey10Char(
java.lang.String pVal);
/**
* property Hotkey10Shift
*/
@VTID(335)
java.lang.String hotkey10Shift();
/**
* property Hotkey10Shift
*/
@VTID(336)
void hotkey10Shift(
java.lang.String pVal);
/**
* property ControlWindowText
*/
@VTID(337)
java.lang.String controlWindowText();
/**
* property ControlWindowText
*/
@VTID(338)
void controlWindowText(
java.lang.String pVal);
/**
* property CacheICAFile
*/
@VTID(339)
boolean cacheICAFile();
/**
* property CacheICAFile
*/
@VTID(340)
void cacheICAFile(
boolean pVal);
/**
* property ScreenPercent
*/
@VTID(341)
int screenPercent();
/**
* property ScreenPercent
*/
@VTID(342)
void screenPercent(
int pVal);
/**
* property TWIMode
*/
@VTID(343)
boolean twiMode();
/**
* property TWIMode
*/
@VTID(344)
void twiMode(
boolean pVal);
/**
* property TransportReconnectEnabled
*/
@VTID(345)
boolean transportReconnectEnabled();
/**
* property TransportReconnectEnabled
*/
@VTID(346)
void transportReconnectEnabled(
boolean pVal);
/**
* property TransportReconnectDelay
*/
@VTID(347)
int transportReconnectDelay();
/**
* property TransportReconnectDelay
*/
@VTID(348)
void transportReconnectDelay(
int pVal);
/**
* property TransportReconnectRetries
*/
@VTID(349)
int transportReconnectRetries();
/**
* property TransportReconnectRetries
*/
@VTID(350)
void transportReconnectRetries(
int pVal);
/**
* property AutoLogonAllowed
*/
@VTID(351)
boolean autoLogonAllowed();
/**
* property AutoLogonAllowed
*/
@VTID(352)
void autoLogonAllowed(
boolean pVal);
/**
* property EnableSessionSharingClient
*/
@VTID(353)
boolean enableSessionSharingClient();
/**
* property EnableSessionSharingClient
*/
@VTID(354)
void enableSessionSharingClient(
boolean pVal);
/**
* property SessionSharingName
*/
@VTID(355)
java.lang.String sessionSharingName();
/**
* property SessionSharingName
*/
@VTID(356)
void sessionSharingName(
java.lang.String pVal);
/**
* property SessionSharingLaunchOnly
*/
@VTID(357)
boolean sessionSharingLaunchOnly();
/**
* property SessionSharingLaunchOnly
*/
@VTID(358)
void sessionSharingLaunchOnly(
boolean pVal);
/**
* property DisableCtrlAltDel
*/
@VTID(359)
boolean disableCtrlAltDel();
/**
* property DisableCtrlAltDel
*/
@VTID(360)
void disableCtrlAltDel(
boolean pVal);
/**
* property SessionCacheEnable
*/
@VTID(361)
boolean sessionCacheEnable();
/**
* property SessionCacheEnable
*/
@VTID(362)
void sessionCacheEnable(
boolean pVal);
/**
* property SessionCacheTimeout
*/
@VTID(363)
int sessionCacheTimeout();
/**
* property SessionCacheTimeout
*/
@VTID(364)
void sessionCacheTimeout(
int pVal);
/**
* property session
*/
@VTID(365)
de.jkitberatung.ica.wsh.ISession session();
/**
* property OutputMode
*/
@VTID(366)
de.jkitberatung.ica.wsh.OutputMode outputMode();
/**
* property OutputMode
*/
@VTID(367)
void outputMode(
de.jkitberatung.ica.wsh.OutputMode pVal);
/**
* property SessionExitTimeout
*/
@VTID(368)
int sessionExitTimeout();
/**
* property SessionExitTimeout
*/
@VTID(369)
void sessionExitTimeout(
int pVal);
/**
* property EnableSessionSharingHost
*/
@VTID(370)
boolean enableSessionSharingHost();
/**
* property EnableSessionSharingHost
*/
@VTID(371)
void enableSessionSharingHost(
boolean pVal);
/**
* property LongCommandLine
*/
@VTID(372)
java.lang.String longCommandLine();
/**
* property LongCommandLine
*/
@VTID(373)
void longCommandLine(
java.lang.String pVal);
/**
* property TWIDisableSessionSharing
*/
@VTID(374)
boolean twiDisableSessionSharing();
/**
* property TWIDisableSessionSharing
*/
@VTID(375)
void twiDisableSessionSharing(
boolean pVal);
/**
* property SessionSharingKey
*/
@VTID(376)
java.lang.String sessionSharingKey();
/**
* property SessionSharingKey
*/
@VTID(377)
void sessionSharingKey(
java.lang.String pVal);
/**
* method DisconnectSessions
*/
@VTID(378)
int disconnectSessions(
java.lang.String pGroupId);
/**
* method LogoffSessions
*/
@VTID(379)
int logoffSessions(
java.lang.String pGroupId);
/**
* method SetSessionGroupId
*/
@VTID(380)
int setSessionGroupId(
java.lang.String pGroupId);
/**
* method GetSessionHandle
*/
@VTID(381)
int getSessionHandle();
/**
* method SwitchSession
*/
@VTID(382)
int switchSession(
int hSession);
/**
* method GetSessionCount
*/
@VTID(383)
int getSessionCount();
/**
* method GetSessionHandleByIndex
*/
@VTID(384)
int getSessionHandleByIndex(
int index);
/**
* method GetSessionGroupCount
*/
@VTID(385)
int getSessionGroupCount(
java.lang.String pGroupId);
/**
* property IPCLaunch
*/
@VTID(386)
boolean ipcLaunch();
/**
* property IPCLaunch
*/
@VTID(387)
void ipcLaunch(
boolean pVal);
/**
* property AudioDuringDetach
*/
@VTID(388)
boolean audioDuringDetach();
/**
* property AudioDuringDetach
*/
@VTID(389)
void audioDuringDetach(
boolean pVal);
/**
* property Hotkey11Char
*/
@VTID(390)
java.lang.String hotkey11Char();
/**
* property Hotkey11Char
*/
@VTID(391)
void hotkey11Char(
java.lang.String pVal);
/**
* property Hotkey11Shift
*/
@VTID(392)
java.lang.String hotkey11Shift();
/**
* property Hotkey11Shift
*/
@VTID(393)
void hotkey11Shift(
java.lang.String pVal);
/**
* method IsPassThrough
*/
@VTID(394)
boolean isPassThrough();
/**
* property VirtualCOMPortEmulation
*/
@VTID(395)
boolean virtualCOMPortEmulation();
/**
* property VirtualCOMPortEmulation
*/
@VTID(396)
void virtualCOMPortEmulation(
boolean pVal);
/**
* method SetSessionSize
*/
@VTID(397)
int setSessionSize(
int depth,
int hDesiredHres,
int hDesiredVres,
int bSingleMonitor);
/**
* method GetEngineWndHandle
*/
@VTID(398)
long getEngineWndHandle();
/**
* method CreateChannelComms
*/
@VTID(399)
boolean createChannelComms(
java.lang.String channelName,
java.lang.String pipeName);
/**
* method EnumerateCCMSessions
*/
@VTID(400)
int enumerateCCMSessions();
/**
* method StartMonitoringCCMSession
*/
@VTID(401)
void startMonitoringCCMSession(
java.lang.String ccmSessionID,
boolean bReserved);
/**
* method StopMonitoringCCMSession
*/
@VTID(402)
void stopMonitoringCCMSession(
java.lang.String ccmSessionID);
/**
* method GetCDMSecuritySettings
*/
@VTID(403)
int getCDMSecuritySettings();
/**
* method SetCDMSecuritySettings
*/
@VTID(404)
void setCDMSecuritySettings(
int secSetting);
/**
* method GetAudioInSecuritySettings
*/
@VTID(405)
int getAudioInSecuritySettings();
/**
* method SetAudioInSecuritySettings
*/
@VTID(406)
void setAudioInSecuritySettings(
int secSetting);
/**
* method GetFlashSecuritySettings
*/
@VTID(407)
int getFlashSecuritySettings();
/**
* method SetFlashSecuritySettings
*/
@VTID(408)
void setFlashSecuritySettings(
int secSetting);
}
| |
/*
* Copyright (c) 2010-2017 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.web.component.form.multivalue;
import com.evolveum.midpoint.gui.api.component.BasePanel;
import com.evolveum.midpoint.gui.api.model.NonEmptyModel;
import com.evolveum.midpoint.gui.api.util.WebComponentUtil;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import com.evolveum.midpoint.web.page.admin.configuration.component.EmptyOnBlurAjaxFormUpdatingBehaviour;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.AjaxLink;
import org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink;
import org.apache.wicket.behavior.AttributeAppender;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.ListView;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.StringResourceModel;
import org.jetbrains.annotations.Nullable;
import java.io.Serializable;
import java.util.Iterator;
import java.util.List;
/**
* @author shood
* */
public class MultiValueTextEditPanel<T extends Serializable> extends BasePanel<List<T>> {
private static final String ID_PLACEHOLDER_CONTAINER = "placeholderContainer";
private static final String ID_PLACEHOLDER_ADD = "placeholderAdd";
private static final String ID_REPEATER = "repeater";
private static final String ID_TEXT = "input";
private static final String ID_BUTTON_GROUP = "buttonGroup";
private static final String ID_ADD = "add";
private static final String ID_REMOVE = "delete";
private static final String ID_EDIT = "edit";
private static final String CSS_DISABLED = " disabled";
@Nullable private final IModel<T> selectedModel; // holding the selected item
public MultiValueTextEditPanel(String id, IModel<List<T>> model, IModel<T> selectedModel, boolean inputEnabled, boolean showPlaceholder,
NonEmptyModel<Boolean> readOnlyModel) {
super(id, model);
setOutputMarkupId(true);
this.selectedModel = selectedModel;
initLayout(inputEnabled, showPlaceholder, readOnlyModel);
}
private void initLayout(final boolean inputEnabled, final boolean showPlaceholder, final NonEmptyModel<Boolean> readOnlyModel) {
WebMarkupContainer placeholderContainer = new WebMarkupContainer(ID_PLACEHOLDER_CONTAINER);
placeholderContainer.setOutputMarkupPlaceholderTag(true);
placeholderContainer.setOutputMarkupPlaceholderTag(true);
placeholderContainer.add(new VisibleEnableBehaviour(){
@Override
public boolean isVisible() {
return showPlaceholder && (getModel().getObject() == null || getModel().getObject().isEmpty());
}
});
add(placeholderContainer);
AjaxLink<Void> placeholderAdd = new AjaxLink<Void>(ID_PLACEHOLDER_ADD) {
@Override
public void onClick(AjaxRequestTarget target) {
addValuePerformed(target);
}
};
placeholderAdd.add(new AttributeAppender("class", new IModel<String>() {
@Override
public String getObject() {
if (buttonsDisabled()) {
return " " + CSS_DISABLED;
}
return "";
}
}));
placeholderAdd.add(WebComponentUtil.visibleIfFalse(readOnlyModel));
placeholderAdd.setOutputMarkupId(true);
placeholderAdd.setOutputMarkupPlaceholderTag(true);
placeholderContainer.add(placeholderAdd);
ListView repeater = new ListView<T>(ID_REPEATER, getModel()) {
@Override
protected void populateItem(final ListItem<T> item) {
TextField text = new TextField<>(ID_TEXT, createTextModel(item.getModel()));
text.add(new EmptyOnBlurAjaxFormUpdatingBehaviour());
text.add(AttributeAppender.replace("placeholder", createEmptyItemPlaceholder()));
if (selectedModel != null && item.getModelObject() == selectedModel.getObject()) {
text.add(AttributeAppender.append("style", "background-color: #FFFFD0;")); // TODO color constant
}
if (!inputEnabled) {
text.add(new AttributeModifier("disabled", "disabled"));
}
item.add(text);
WebMarkupContainer buttonGroup = new WebMarkupContainer(ID_BUTTON_GROUP);
item.add(buttonGroup);
initButtons(buttonGroup, item, readOnlyModel);
}
};
repeater.setOutputMarkupId(true);
repeater.setOutputMarkupPlaceholderTag(true);
repeater.add(new VisibleEnableBehaviour(){
@Override
public boolean isVisible() {
return getModel().getObject() != null && !getModel().getObject().isEmpty();
}
});
add(repeater);
}
private void initButtons(WebMarkupContainer buttonGroup, final ListItem<T> item, NonEmptyModel<Boolean> readOnlyModel) {
AjaxSubmitLink edit = new AjaxSubmitLink(ID_EDIT) {
@Override
protected void onSubmit(AjaxRequestTarget target) {
editPerformed(target, item.getModelObject());
}
@Override
protected void onError(AjaxRequestTarget target) {
target.add(getPageBase().getFeedbackPanel());
}
};
edit.add(new AttributeAppender("class", new IModel<String>() {
@Override
public String getObject() {
if (buttonsDisabled()) {
return " " + CSS_DISABLED;
}
return "";
}
}));
buttonGroup.add(edit);
AjaxLink<Void> add = new AjaxLink<Void>(ID_ADD) {
@Override
public void onClick(AjaxRequestTarget target) {
addValuePerformed(target);
}
};
add.add(new AttributeAppender("class", getPlusClassModifier(item)));
add.add(WebComponentUtil.visibleIfFalse(readOnlyModel));
buttonGroup.add(add);
AjaxLink<Void> remove = new AjaxLink<Void>(ID_REMOVE) {
@Override
public void onClick(AjaxRequestTarget target) {
removeValuePerformed(target, item);
}
};
remove.add(new AttributeAppender("class", getMinusClassModifier()));
remove.add(WebComponentUtil.visibleIfFalse(readOnlyModel));
buttonGroup.add(remove);
}
protected String getPlusClassModifier(ListItem<T> item){
if(buttonsDisabled()){
return CSS_DISABLED;
}
int size = getModelObject().size();
if (size <= 1) {
return "";
}
if (item.getIndex() == size - 1) {
return "";
}
return CSS_DISABLED;
}
protected String getMinusClassModifier(){
if(buttonsDisabled()){
return CSS_DISABLED;
}
return "";
}
protected T createNewEmptyItem(){
return (T)"";
}
protected StringResourceModel createEmptyItemPlaceholder(){
return createStringResource("TextField.universal.placeholder");
}
protected void addValuePerformed(AjaxRequestTarget target){
List<T> objects = getModelObject();
T added = createNewEmptyItem();
objects.add(added);
performAddValueHook(target, added);
editPerformed(target, added);
target.add(this);
}
protected IModel<String> createTextModel(final IModel<T> model) {
return new IModel<String>() {
@Override
public String getObject() {
T obj = model.getObject();
return obj != null ? obj.toString() : null;
}
@Override
public void setObject(String object) {
model.setObject((T) object);
}
@Override
public void detach() {
}
};
}
protected void removeValuePerformed(AjaxRequestTarget target, ListItem<T> item){
List<T> objects = getModelObject();
Iterator<T> iterator = objects.iterator();
while (iterator.hasNext()) {
T object = iterator.next();
if (object.equals(item.getModelObject())) {
iterator.remove();
break;
}
}
performRemoveValueHook(target, item);
target.add(this);
}
/**
* Override to provide handling of edit event (edit button clicked)
* */
protected void editPerformed(AjaxRequestTarget target, T object){}
/**
* Override to provide the information about buttons enabled/disabled status
* */
protected boolean buttonsDisabled(){
return false;
}
/**
* Override to provide custom hook when adding new value
* */
protected void performAddValueHook(AjaxRequestTarget target, T added){}
/**
* Override to provide custom hook when removing value from list
* */
protected void performRemoveValueHook(AjaxRequestTarget target, ListItem<T> item){}
}
| |
/*******************************************************************************
*
* Copyright (C) 2015-2022 the BBoxDB project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package org.bboxdb.tools.cli;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.bboxdb.misc.Const;
public class OptionsHelper {
/**
* Build the command line options
* @return
*/
public static Options buildOptions() {
final Options options = new Options();
// Help
final Option help = Option.builder(CLIParameter.HELP)
.desc("Show this help")
.build();
options.addOption(help);
// Be verbose
final Option verbose = Option.builder(CLIParameter.VERBOSE)
.desc("Be verbose")
.build();
options.addOption(verbose);
// Action
final Option action = Option.builder(CLIParameter.ACTION)
.hasArg()
.argName("action")
.desc("The CLI action to execute")
.build();
options.addOption(action);
// Host
final Option host = Option.builder(CLIParameter.ZOOKEEPER_HOST)
.hasArg()
.argName("zookeeperhost")
.desc("The Zookeeper endpoint to connect to (default: 127.0.0.1:2181)")
.build();
options.addOption(host);
// Cluster name
final Option clusterName = Option.builder(CLIParameter.ZOOKEEPER_CLUSTER_NAME)
.hasArg()
.argName("clustername")
.desc("The name of the cluster (default: mycluster)")
.build();
options.addOption(clusterName);
// Distribution group
final Option distributionGroup = Option.builder(CLIParameter.DISTRIBUTION_GROUP)
.hasArg()
.argName("distributiongroup")
.desc("The distribution group")
.build();
options.addOption(distributionGroup);
// Dimensions
final Option dimensions = Option.builder(CLIParameter.DIMENSIONS)
.hasArg()
.argName("dimensions")
.desc("The number of dimensions")
.build();
options.addOption(dimensions);
// Replication factor
final Option replicationFactor = Option.builder(CLIParameter.REPLICATION_FACTOR)
.hasArg()
.argName("replicationfactor")
.desc("The replication factor")
.build();
options.addOption(replicationFactor);
// Max region size
final Option maxRegionSize = Option.builder(CLIParameter.MAX_REGION_SIZE)
.hasArg()
.argName("max region size (in MB)")
.desc("Default: " + Const.DEFAULT_MAX_REGION_SIZE_IN_MB)
.build();
options.addOption(maxRegionSize);
// Min region size
final Option minRegionSize = Option.builder(CLIParameter.MIN_REGION_SIZE)
.hasArg()
.argName("min region size (in MB)")
.desc("Default: " + Const.DEFAULT_MIN_REGION_SIZE_IN_MB)
.build();
options.addOption(minRegionSize);
// Resource placement
final Option resourcePlacement = Option.builder(CLIParameter.RESOURCE_PLACEMENT)
.hasArg()
.argName("ressource placement")
.desc("Default: " + Const.DEFAULT_PLACEMENT_STRATEGY)
.build();
options.addOption(resourcePlacement);
// Resource placement config
final Option resourcePlacementConfig = Option.builder(CLIParameter.RESOURCE_PLACEMENT_CONFIG)
.hasArg()
.argName("ressource placement config")
.desc("Default: " + Const.DEFAULT_PLACEMENT_CONFIG)
.build();
options.addOption(resourcePlacementConfig);
// Space partitioner
final Option spacePartitioner = Option.builder(CLIParameter.SPACE_PARTITIONER)
.hasArg()
.argName("space partitioner")
.desc("Default: " + Const.DEFAULT_SPACE_PARTITIONER)
.build();
options.addOption(spacePartitioner);
// Space partitioner
final Option spacePartitionerConfig = Option.builder(CLIParameter.SPACE_PARTITIONER_CONFIG)
.hasArg()
.argName("space partitioner configuration")
.desc("Default: " + Const.DEFAULT_SPACE_PARTITIONER_CONFIG)
.build();
options.addOption(spacePartitionerConfig);
// Table duplicates
final Option duplplicatesInTable = Option.builder(CLIParameter.DUPLICATES)
.hasArg()
.argName("duplicates")
.desc("Allow duplicates in the table, default: false")
.build();
options.addOption(duplplicatesInTable);
// Table ttl
final Option ttlForTable = Option.builder(CLIParameter.TTL)
.hasArg()
.argName("ttl")
.desc("The TTL of the tuple versions in milliseconds")
.build();
options.addOption(ttlForTable);
// Table versions
final Option versionsForTable = Option.builder(CLIParameter.VERSIONS)
.hasArg()
.argName("versions")
.desc("The amount of versions for a tuple")
.build();
options.addOption(versionsForTable);
// Filename
final Option file = Option.builder(CLIParameter.FILE)
.hasArg()
.argName("file")
.desc("The file to read")
.build();
options.addOption(file);
// Format
final Option format = Option.builder(CLIParameter.FORMAT)
.hasArg()
.argName("format")
.desc("The format of the file")
.build();
options.addOption(format);
// Table
final Option table = Option.builder(CLIParameter.TABLE)
.hasArg()
.argName("table")
.desc("The table to carry out the action")
.build();
options.addOption(table);
// Key
final Option key = Option.builder(CLIParameter.KEY)
.hasArg()
.argName("key")
.desc("The name of the key")
.build();
options.addOption(key);
// BBox
final Option bbox = Option.builder(CLIParameter.BOUNDING_BOX)
.hasArg()
.argName("bounding box")
.desc("The bounding box of the tuple")
.build();
options.addOption(bbox);
// BBox padding
final Option bboxPadding = Option.builder(CLIParameter.BOUNDING_BOX_PADDING)
.hasArg()
.argName("bounding box padding")
.desc("The bounding box padding")
.build();
options.addOption(bboxPadding);
// Value
final Option value = Option.builder(CLIParameter.VALUE)
.hasArg()
.argName("value")
.desc("The value of the tuple")
.build();
options.addOption(value);
// Time
final Option time = Option.builder(CLIParameter.TIMESTAMP)
.hasArg()
.argName("timestamp")
.desc("The version time stamp of the tuple")
.build();
options.addOption(time);
// Custom filter class
final Option filterclass = Option.builder(CLIParameter.CUSTOM_FILTER_CLASS)
.hasArg()
.argName("filterclass")
.desc("The classname of the custom filter")
.build();
options.addOption(filterclass);
// Custom filter value
final Option filtervalue = Option.builder(CLIParameter.CUSTOM_FILTER_VALUE)
.hasArg()
.argName("filtervalue")
.desc("The value for the custom filter")
.build();
options.addOption(filtervalue);
// Number of partitions
final Option partitions = Option.builder(CLIParameter.PARTITIONS)
.hasArg()
.argName("partitions")
.desc("The number of partitions in the prepartitions")
.build();
options.addOption(partitions);
return options;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.util.jndi.ExampleBean;
/**
* Unit test for IntrospectionSupport
*/
public class IntrospectionSupportTest extends ContextTestSupport {
public void testOverloadSetterChooseStringSetter() throws Exception {
MyOverloadedBean overloadedBean = new MyOverloadedBean();
IntrospectionSupport.setProperty(context.getTypeConverter(), overloadedBean, "bean", "James");
assertEquals("James", overloadedBean.getName());
}
public void testOverloadSetterChooseBeanSetter() throws Exception {
MyOverloadedBean overloadedBean = new MyOverloadedBean();
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
IntrospectionSupport.setProperty(context.getTypeConverter(), overloadedBean, "bean", bean);
assertEquals("Claus", overloadedBean.getName());
}
public void testOverloadSetterChooseUsingTypeConverter() throws Exception {
MyOverloadedBean overloadedBean = new MyOverloadedBean();
Object value = "Willem".getBytes();
// should use byte[] -> String type converter and call the setBean(String) setter method
IntrospectionSupport.setProperty(context.getTypeConverter(), overloadedBean, "bean", value);
assertEquals("Willem", overloadedBean.getName());
}
public class MyOverloadedBean {
private ExampleBean bean;
public void setBean(ExampleBean bean) {
this.bean = bean;
}
public void setBean(String name) {
bean = new ExampleBean();
bean.setName(name);
}
public String getName() {
return bean.getName();
}
}
public class MyBuilderBean {
private String name;
public String getName() {
return name;
}
public MyBuilderBean setName(String name) {
this.name = name;
return this;
}
}
public class MyOtherBuilderBean extends MyBuilderBean {
}
public class MyOtherOtherBuilderBean extends MyOtherBuilderBean {
public MyOtherOtherBuilderBean setName(String name) {
super.setName(name);
return this;
}
}
public void testIsSetterBuilderPatternSupport() throws Exception {
Method setter = MyBuilderBean.class.getMethod("setName", String.class);
Method setter2 = MyOtherBuilderBean.class.getMethod("setName", String.class);
Method setter3 = MyOtherOtherBuilderBean.class.getMethod("setName", String.class);
assertFalse(IntrospectionSupport.isSetter(setter, false));
assertTrue(IntrospectionSupport.isSetter(setter, true));
assertFalse(IntrospectionSupport.isSetter(setter2, false));
assertTrue(IntrospectionSupport.isSetter(setter2, true));
assertFalse(IntrospectionSupport.isSetter(setter3, false));
assertTrue(IntrospectionSupport.isSetter(setter3, true));
}
public void testHasProperties() throws Exception {
Map<String, Object> empty = Collections.emptyMap();
assertFalse(IntrospectionSupport.hasProperties(empty, null));
assertFalse(IntrospectionSupport.hasProperties(empty, ""));
assertFalse(IntrospectionSupport.hasProperties(empty, "foo."));
Map<String, Object> param = new HashMap<String, Object>();
assertFalse(IntrospectionSupport.hasProperties(param, null));
assertFalse(IntrospectionSupport.hasProperties(param, ""));
assertFalse(IntrospectionSupport.hasProperties(param, "foo."));
param.put("name", "Claus");
assertTrue(IntrospectionSupport.hasProperties(param, null));
assertTrue(IntrospectionSupport.hasProperties(param, ""));
assertFalse(IntrospectionSupport.hasProperties(param, "foo."));
param.put("foo.name", "Hadrian");
assertTrue(IntrospectionSupport.hasProperties(param, null));
assertTrue(IntrospectionSupport.hasProperties(param, ""));
assertTrue(IntrospectionSupport.hasProperties(param, "foo."));
}
public void testGetProperties() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, null);
assertEquals(3, map.size());
assertEquals("Claus", map.get("name"));
String price = map.get("price").toString();
assertTrue(price.startsWith("10"));
assertEquals(null, map.get("id"));
}
public void testAnotherGetProperties() throws Exception {
AnotherExampleBean bean = new AnotherExampleBean();
bean.setId("123");
bean.setName("Claus");
bean.setPrice(10.0);
Date date = new Date(0);
bean.setDate(date);
bean.setGoldCustomer(true);
bean.setLittle(true);
Collection<?> children = new ArrayList<Object>();
bean.setChildren(children);
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, null);
assertEquals(7, map.size());
assertEquals("Claus", map.get("name"));
String price = map.get("price").toString();
assertTrue(price.startsWith("10"));
assertSame(date, map.get("date"));
assertSame(children, map.get("children"));
assertEquals(Boolean.TRUE, map.get("goldCustomer"));
assertEquals(Boolean.TRUE, map.get("little"));
assertEquals("123", map.get("id"));
}
public void testGetPropertiesOptionPrefix() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
bean.setId("123");
Map<String, Object> map = new HashMap<String, Object>();
IntrospectionSupport.getProperties(bean, map, "bean.");
assertEquals(3, map.size());
assertEquals("Claus", map.get("bean.name"));
String price = map.get("bean.price").toString();
assertTrue(price.startsWith("10"));
assertEquals("123", map.get("bean.id"));
}
public void testGetProperty() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setId("123");
bean.setName("Claus");
bean.setPrice(10.0);
Object name = IntrospectionSupport.getProperty(bean, "name");
assertEquals("Claus", name);
}
public void testSetProperty() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setId("123");
bean.setName("Claus");
bean.setPrice(10.0);
IntrospectionSupport.setProperty(bean, "name", "James");
assertEquals("James", bean.getName());
}
public void testAnotherGetProperty() throws Exception {
AnotherExampleBean bean = new AnotherExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Date date = new Date(0);
bean.setDate(date);
bean.setGoldCustomer(true);
bean.setLittle(true);
Collection<?> children = new ArrayList<Object>();
bean.setChildren(children);
Object name = IntrospectionSupport.getProperty(bean, "name");
assertEquals("Claus", name);
assertSame(date, IntrospectionSupport.getProperty(bean, "date"));
assertSame(children, IntrospectionSupport.getProperty(bean, "children"));
assertEquals(Boolean.TRUE, IntrospectionSupport.getProperty(bean, "goldCustomer"));
assertEquals(Boolean.TRUE, IntrospectionSupport.getProperty(bean, "little"));
}
public void testGetPropertyLocaleIndependent() throws Exception {
Locale oldLocale = Locale.getDefault();
Locale.setDefault(new Locale("tr", "TR"));
try {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
bean.setId("1");
Object name = IntrospectionSupport.getProperty(bean, "name");
Object id = IntrospectionSupport.getProperty(bean, "id");
Object price = IntrospectionSupport.getProperty(bean, "price");
assertEquals("Claus", name);
assertEquals(10.0, price);
assertEquals("1", id);
} finally {
Locale.setDefault(oldLocale);
}
}
public void testGetPropertyGetter() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Method name = IntrospectionSupport.getPropertyGetter(ExampleBean.class, "name");
assertEquals("getName", name.getName());
try {
IntrospectionSupport.getPropertyGetter(ExampleBean.class, "xxx");
fail("Should have thrown exception");
} catch (NoSuchMethodException e) {
assertEquals("org.apache.camel.util.jndi.ExampleBean.getXxx()", e.getMessage());
}
}
public void testGetPropertySetter() throws Exception {
ExampleBean bean = new ExampleBean();
bean.setName("Claus");
bean.setPrice(10.0);
Method name = IntrospectionSupport.getPropertySetter(ExampleBean.class, "name");
assertEquals("setName", name.getName());
try {
IntrospectionSupport.getPropertySetter(ExampleBean.class, "xxx");
fail("Should have thrown exception");
} catch (NoSuchMethodException e) {
assertEquals("org.apache.camel.util.jndi.ExampleBean.setXxx", e.getMessage());
}
}
public void testIsGetter() throws Exception {
ExampleBean bean = new ExampleBean();
Method name = bean.getClass().getMethod("getName", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(name));
assertEquals(false, IntrospectionSupport.isSetter(name));
Method price = bean.getClass().getMethod("getPrice", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(price));
assertEquals(false, IntrospectionSupport.isSetter(price));
}
public void testIsSetter() throws Exception {
ExampleBean bean = new ExampleBean();
Method name = bean.getClass().getMethod("setName", String.class);
assertEquals(false, IntrospectionSupport.isGetter(name));
assertEquals(true, IntrospectionSupport.isSetter(name));
Method price = bean.getClass().getMethod("setPrice", double.class);
assertEquals(false, IntrospectionSupport.isGetter(price));
assertEquals(true, IntrospectionSupport.isSetter(price));
}
public void testOtherIsGetter() throws Exception {
OtherExampleBean bean = new OtherExampleBean();
Method customerId = bean.getClass().getMethod("getCustomerId", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(customerId));
assertEquals(false, IntrospectionSupport.isSetter(customerId));
Method goldCustomer = bean.getClass().getMethod("isGoldCustomer", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(goldCustomer));
assertEquals(false, IntrospectionSupport.isSetter(goldCustomer));
Method silverCustomer = bean.getClass().getMethod("isSilverCustomer", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(silverCustomer));
assertEquals(false, IntrospectionSupport.isSetter(silverCustomer));
Method company = bean.getClass().getMethod("getCompany", (Class<?>[]) null);
assertEquals(true, IntrospectionSupport.isGetter(company));
assertEquals(false, IntrospectionSupport.isSetter(company));
Method setupSomething = bean.getClass().getMethod("setupSomething", Object.class);
assertEquals(false, IntrospectionSupport.isGetter(setupSomething));
assertEquals(false, IntrospectionSupport.isSetter(setupSomething));
}
public void testOtherIsSetter() throws Exception {
OtherExampleBean bean = new OtherExampleBean();
Method customerId = bean.getClass().getMethod("setCustomerId", int.class);
assertEquals(false, IntrospectionSupport.isGetter(customerId));
assertEquals(true, IntrospectionSupport.isSetter(customerId));
Method goldCustomer = bean.getClass().getMethod("setGoldCustomer", boolean.class);
assertEquals(false, IntrospectionSupport.isGetter(goldCustomer));
assertEquals(true, IntrospectionSupport.isSetter(goldCustomer));
Method silverCustomer = bean.getClass().getMethod("setSilverCustomer", Boolean.class);
assertEquals(false, IntrospectionSupport.isGetter(silverCustomer));
assertEquals(true, IntrospectionSupport.isSetter(silverCustomer));
Method company = bean.getClass().getMethod("setCompany", String.class);
assertEquals(false, IntrospectionSupport.isGetter(company));
assertEquals(true, IntrospectionSupport.isSetter(company));
Method setupSomething = bean.getClass().getMethod("setupSomething", Object.class);
assertEquals(false, IntrospectionSupport.isGetter(setupSomething));
assertEquals(false, IntrospectionSupport.isSetter(setupSomething));
}
public void testFindSetterMethodsOrderedByParameterType() throws Exception {
List<Method> setters = IntrospectionSupport.findSetterMethodsOrderedByParameterType(MyOverloadedBean.class, "bean", false);
assertNotNull(setters);
assertEquals(2, setters.size());
assertEquals(ExampleBean.class, setters.get(0).getParameterTypes()[0]);
assertEquals(String.class, setters.get(1).getParameterTypes()[0]);
}
}
| |
package cellsociety_team20;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import cell.Cell;
import parameter.IParameter;
import simulation.Simulation;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
public class XMLEditor {
//simulation parameters
private int mySimulationMode;
private String myAuthor;
private double mySimulationSpeed;
private int myDefaultState;
private int myCellShape;
private int myGridEdge;
private int myGridOutlineVisible;
private int myRandomlyGenerated;
private int myNumCellsOccupied;
private int myOccupiedCellState;
private Map<String, String> mySimParameters;
private String myInputFileName;
private Grid grid;
/**
* @param inputFileName
*/
public XMLEditor(String inputFileName) {
myInputFileName = inputFileName;
readGridFromFile();
}
public XMLEditor(File inputFile) {
myInputFileName = inputFile.getAbsolutePath();
readGridFromFile();
}
/**
*
*/
public void readGridFromFile() {
initializeDefaults();
try {
File inputFile = new File(myInputFileName);
DocumentBuilderFactory dbFactory
= DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(inputFile);
if(doc == null){
throw new IllegalArgumentException("File not found.");
}
doc.getDocumentElement().normalize();
Element rootNode = (Element) doc.getElementsByTagName("grid").item(0);
int gridHeight = Integer.parseInt(rootNode.getAttribute("h"));
int gridWidth = Integer.parseInt(rootNode.getAttribute("w"));
Element simulationElement = (Element) doc.getElementsByTagName("simulation").item(0);
if(simulationElement != null){
mySimulationMode = Integer.parseInt(simulationElement.getAttribute("mode"));
} else throw new IllegalArgumentException("No simulation mode set");
extractParameters(doc);
if(myRandomlyGenerated == 1) {
grid = new GridSquare(gridWidth, gridHeight, myDefaultState, myNumCellsOccupied, myOccupiedCellState, true);
}
grid = new GridSquare(gridWidth, gridHeight, new Cell(myDefaultState));
grid.setWrap(myGridEdge);
populateGrid(doc);
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
public void writeGridToFile(Grid grid, File outputFile, Simulation currentSim){
try {
DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
Document doc = docBuilder.newDocument();
Element root = doc.createElement("grid");
doc.appendChild(root);
root.setAttribute("w", Integer.toString(grid.getGridWidth()));
root.setAttribute("h", Integer.toString(grid.getGridHeight()));
Element simulation = doc.createElement("simulation");
simulation.setAttribute("mode", Integer.toString(mySimulationMode));
root.appendChild(simulation);
writeSimulationParametersToXML(doc, simulation, currentSim);
writeGridToXML(doc, root, grid);
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource dp = new DOMSource(doc);
StreamResult result = new StreamResult(outputFile);
transformer.transform(dp, result);
System.out.println("File saved!");
} catch (ParserConfigurationException pce) {
pce.printStackTrace();
} catch (TransformerException tfe) {
tfe.printStackTrace();
}
}
private void writeGridToXML(Document doc, Element root, Grid grid){
for (cell.Cell cell : grid.getCellIterator()) {
if(cell.getState() != myDefaultState) {
Element curCell = doc.createElement("cell");
curCell.setAttribute("x", Integer.toString(cell.getX()));
curCell.setAttribute("y", Integer.toString(cell.getY()));
curCell.appendChild(doc.createTextNode(Integer.toString(cell.getState())));
root.appendChild(curCell);
}
}
}
private void writeSimulationParametersToXML(Document doc, Element sim, Simulation currentSim){
writeStringToXML(doc, "author", sim, myAuthor);
writeDoubleToXML(doc, "simulationSpeed", sim, mySimulationSpeed);
writeIntegerToXML(doc, "defaultState", sim, myDefaultState);
writeSimulationSubParametersToXML(doc, writeStringToXML(doc, "parameters", sim, ""), currentSim);
writeIntegerToXML(doc, "cellShape", sim, myCellShape);
writeIntegerToXML(doc, "gridEdge", sim, myGridEdge);
writeIntegerToXML(doc, "gridOutlineVisible", sim, myGridOutlineVisible);
writeIntegerToXML(doc, "randomlyGenerated", sim, myRandomlyGenerated);
}
private void writeSimulationSubParametersToXML(Document doc, Element params, Simulation currentSim){
List<IParameter> parameterList = currentSim.getParameters();
for(IParameter param : parameterList){
Element e = doc.createElement("param");
e.setAttribute("name", param.getName());
e.appendChild(doc.createTextNode(param.toString()));
params.appendChild(e);
}
}
private void writeDoubleToXML(Document doc, String tag, Element parent, double contents){
Element e = doc.createElement(tag);
e.appendChild(doc.createTextNode(Double.toString(contents)));
parent.appendChild(e);
}
private void writeIntegerToXML(Document doc, String tag, Element parent, int contents){
Element e = doc.createElement(tag);
e.appendChild(doc.createTextNode(Integer.toString(contents)));
parent.appendChild(e);
}
private Element writeStringToXML(Document doc, String tag, Element parent, String contents){
Element e = doc.createElement(tag);
e.appendChild(doc.createTextNode(contents));
parent.appendChild(e);
return e;
}
private void initializeDefaults(){
mySimulationSpeed = 1;
myDefaultState = 0;
myCellShape = 0;
myGridEdge = 0;
myGridOutlineVisible = 1;
myRandomlyGenerated = 0;
}
private void populateGrid(Document doc) {
NodeList cells = doc.getElementsByTagName("cell");
for (int i = 0; i < cells.getLength(); i++) {
Node currentCell = cells.item(i);
Element eElement = (Element) currentCell;
if (eElement.getNodeName() == "cell") {
int xPos = Integer.parseInt(eElement.getAttribute("x"));
int yPos = Integer.parseInt(eElement.getAttribute("y"));
if((xPos >= grid.getGridWidth()) || (yPos >= grid.getGridHeight())){
throw new IllegalArgumentException("Attempting to place cell out of grid bounds");
}
int cellState = Integer.parseInt(eElement.getTextContent());
if(Double.isNaN(cellState)){
throw new IllegalArgumentException("Invalid cell state");
}
grid.setGridCell(xPos, yPos, cellState, mySimulationMode);
}
}
}
private void extractParameters(Document doc) {
myAuthor = getStringFromXML(doc, "author");
mySimulationSpeed = getDoubleFromXML(doc, "simulationSpeed");
myDefaultState = getIntegerFromXML(doc, "defaultState");
myCellShape = getIntegerFromXML(doc, "cellShape");
myGridEdge = getIntegerFromXML(doc, "gridEdge");
myGridOutlineVisible = getIntegerFromXML(doc, "gridOutlineVisible");
myRandomlyGenerated = getIntegerFromXML(doc, "randomlyGenerated");
if(myRandomlyGenerated != 0) {
myNumCellsOccupied = getIntegerFromXML(doc, "numCellsOccupied");
myOccupiedCellState = getIntegerFromXML(doc, "occupiedCellState");
}
Node param;
String name;
String value;
Map<String, String> extracted = new HashMap<String, String>();
Node params = doc.getElementsByTagName("parameters").item(0);
if (params != null) {
for (int i=0; i<params.getChildNodes().getLength(); i++) {
param = params.getChildNodes().item(i);
if (!param.hasAttributes()) {
continue;
}
name = param.getAttributes().getNamedItem("name").getTextContent();
value = param.getTextContent();
extracted.put(name, value);
System.out.println(String.format("Param '%s' => '%s'", name, value));
}
}
mySimParameters = extracted;
}
private int getIntegerFromXML(Document doc, String field) {
System.out.println("Attempting to read in " + field);
return Integer.parseInt(doc.getElementsByTagName(field).item(0).getTextContent());
}
private double getDoubleFromXML(Document doc, String field) {
return Double.parseDouble((doc.getElementsByTagName(field).item(0)).getTextContent());
}
private String getStringFromXML(Document doc, String field) {
return doc.getElementsByTagName(field).item(0).getTextContent();
}
/**
* @return
*/
public Grid getGrid() {
return grid;
}
/**
*
*/
public void printGrid() {
grid.printGrid();
}
/**
* @return
*/
public int getSimulationMode() {
return mySimulationMode;
}
/**
* @return
*/
public double getSimulationSpeed() {
return mySimulationSpeed;
}
public Map<String, String> getSimParameters() {
return mySimParameters;
}
/**
*
*/
public void dumpConfigurations() {
System.out.println("Simulation mode " + mySimulationMode);
System.out.println("Author " + myAuthor);
System.out.println("Simulation speed " + mySimulationSpeed);
System.out.println("Default state " + myDefaultState);
System.out.println("Cell shape " + myCellShape);
System.out.println("Grid edge mode " + myGridEdge);
System.out.println("Grid outline visibility " + myGridOutlineVisible);
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.registry.extensions.handlers;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.impl.builder.StAXOMBuilder;
import org.apache.axiom.om.util.AXIOMUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.uddi.api_v3.AuthToken;
import org.wso2.carbon.registry.common.utils.artifact.manager.ArtifactManager;
import org.wso2.carbon.registry.core.*;
import org.wso2.carbon.registry.core.config.Mount;
import org.wso2.carbon.registry.core.config.RegistryContext;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.handlers.Handler;
import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.registry.extensions.beans.BusinessServiceInfo;
import org.wso2.carbon.registry.extensions.handlers.utils.*;
import org.wso2.carbon.registry.extensions.services.Utils;
import org.wso2.carbon.registry.extensions.utils.CommonConstants;
import org.wso2.carbon.registry.extensions.utils.CommonUtil;
import org.wso2.carbon.registry.uddi.utils.UDDIUtil;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import java.io.StringReader;
import java.util.*;
/**
* Handler to process the SOAP service.
*/
public class SOAPServiceMediaTypeHandler extends Handler {
private static final Log log = LogFactory.getLog(SOAPServiceMediaTypeHandler.class);
private static final String TRUNK = "trunk";
private String defaultEnvironment;
private boolean disableWSDLValidation = false;
private boolean disableWADLValidation = false;
private List<String> smartLifecycleLinks = new LinkedList<String>();
private String defaultServiceVersion = CommonConstants.SERVICE_VERSION_DEFAULT_VALUE;
private boolean disableSymlinkCreation = true;
public void setDefaultServiceVersion(String defaultServiceVersion) {
this.defaultServiceVersion = defaultServiceVersion;
}
public void setSmartLifecycleLinks(OMElement locationConfiguration) throws RegistryException {
Iterator confElements = locationConfiguration.getChildElements();
while (confElements.hasNext()) {
OMElement confElement = (OMElement) confElements.next();
if (confElement.getQName().equals(new QName("key"))) {
smartLifecycleLinks.add(confElement.getText());
}
}
}
public boolean isDisableSymlinkCreation() {
return disableSymlinkCreation;
}
public void setDisableSymlinkCreation(String disableSymlinkCreation) {
this.disableSymlinkCreation = Boolean.toString(true).equals(disableSymlinkCreation);
}
/**
* This method will handle the SOAP service related add/update operation
* @param requestContext
* @throws RegistryException
*/
public void put(RequestContext requestContext) throws RegistryException {
WSDLProcessor wsdl = null;
if (!CommonUtil.isUpdateLockAvailable()) {
return;
}
CommonUtil.acquireUpdateLock();
try {
Registry registry = requestContext.getRegistry();
Resource resource = requestContext.getResource();
if (resource == null) {
throw new RegistryException("The resource is not available.");
}
String originalServicePath = requestContext.getResourcePath().getPath();
String resourceName = RegistryUtils.getResourceName(originalServicePath);
OMElement serviceInfoElement, previousServiceInfoElement = null;
Object resourceContent = resource.getContent();
String serviceInfo;
if (resourceContent instanceof String) {
serviceInfo = (String) resourceContent;
} else {
serviceInfo = RegistryUtils.decodeBytes((byte[]) resourceContent);
}
try {
XMLStreamReader reader = XMLInputFactory.newInstance().createXMLStreamReader(
new StringReader(serviceInfo));
StAXOMBuilder builder = new StAXOMBuilder(reader);
serviceInfoElement = builder.getDocumentElement();
} catch (Exception e) {
StringBuilder msg = new StringBuilder("Error in parsing the service content of the service. ")
.append("The requested path to store the service: ").append(originalServicePath).append(".");
log.error(msg.toString());
throw new RegistryException(msg.toString(), e);
}
// derive the service path that the service should be saved.
String serviceName = CommonUtil.getServiceName(serviceInfoElement);
String serviceNamespace = CommonUtil.getServiceNamespace(serviceInfoElement);
String serviceVersion = CommonUtil.getServiceVersion(
serviceInfoElement);
if (serviceVersion.length() == 0) {
serviceVersion = defaultServiceVersion;
CommonUtil.setServiceVersion(serviceInfoElement, serviceVersion);
resource.setContent(serviceInfoElement.toString());
}
String servicePath = "";
if (resource.getProperty(CommonConstants.SOURCE_PROPERTY) != null &&
resource.getProperty(CommonConstants.SOURCE_PROPERTY).equalsIgnoreCase(CommonConstants.SOURCE_AUTO)){
servicePath = CommonUtil.getRegistryPath(registry.getRegistryContext(),originalServicePath);
} else {
if (serviceInfoElement.getChildrenWithLocalName("newServicePath").hasNext()) {
Iterator OmElementIterator = serviceInfoElement.getChildrenWithLocalName("newServicePath");
while (OmElementIterator.hasNext()) {
OMElement next = (OMElement) OmElementIterator.next();
servicePath = next.getText();
break;
}
} else {
if (registry.resourceExists(originalServicePath)) {
//Fixing REGISTRY-1790. Save the Service to the given original
//service path if there is a service already exists there
servicePath = originalServicePath;
} else {
servicePath = getServicePath(registry, resource, serviceInfoElement, serviceName,
serviceNamespace,serviceVersion);
}
}
}
// saving the artifact id.
String serviceId = resource.getUUID();
if (serviceId == null) {
// generate a service id
serviceId = UUID.randomUUID().toString();
resource.setUUID(serviceId);
}
if (registry.resourceExists(servicePath)) {
Resource oldResource = registry.get(servicePath);
String oldContent;
Object content = oldResource.getContent();
if (content instanceof String) {
oldContent = (String) content;
} else {
oldContent = RegistryUtils.decodeBytes((byte[]) content);
}
OMElement oldServiceInfoElement = null;
if (serviceInfo.equals(oldContent)) {
//TODO: This needs a better solution. This fix was put in place to avoid
// duplication of services under /_system/governance, when no changes were made.
// However, the fix is not perfect and needs to be rethought. Perhaps the logic
// below can be reshaped a bit, or may be we don't need to compare the
// difference over here with a little fix to the Governance API end. - Janaka.
//We have fixed this assuming that the temp path where services are stored is under
// /_system/governance/[serviceName]
//Hence if we are to change that location, then we need to change the following code segment as well
String tempPath = RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH
+ RegistryConstants.PATH_SEPARATOR + resourceName;
if (!originalServicePath.equals(tempPath)) {
String path = RegistryUtils.getRelativePathToOriginal(servicePath,
RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
ArtifactManager.getArtifactManager().getTenantArtifactRepository().
addArtifact(path);
return;
}
requestContext.setProcessingComplete(true);
return;
}
if ("true".equals(resource.getProperty("registry.DefinitionImport"))) {
resource.removeProperty("registry.DefinitionImport");
try {
XMLStreamReader reader = XMLInputFactory.newInstance().
createXMLStreamReader(new StringReader(oldContent));
StAXOMBuilder builder = new StAXOMBuilder(reader);
oldServiceInfoElement = builder.getDocumentElement();
CommonUtil.setServiceName(oldServiceInfoElement, CommonUtil.getServiceName(serviceInfoElement));
CommonUtil.setServiceNamespace(oldServiceInfoElement,
CommonUtil.getServiceNamespace(serviceInfoElement));
CommonUtil.setDefinitionURL(oldServiceInfoElement,
CommonUtil.getDefinitionURL(serviceInfoElement));
CommonUtil.setEndpointEntries(oldServiceInfoElement,
CommonUtil.getEndpointEntries(serviceInfoElement));
CommonUtil.setServiceVersion(oldServiceInfoElement,
org.wso2.carbon.registry.common.utils.CommonUtil.getServiceVersion(
serviceInfoElement));
serviceInfoElement = oldServiceInfoElement;
resource.setContent(serviceInfoElement.toString());
resource.setDescription(oldResource.getDescription());
} catch (Exception e) {
StringBuilder msg = new StringBuilder("Error in parsing the service content of the service. ")
.append("The requested path to store the service: ").append(originalServicePath)
.append(".");
log.error(msg.toString());
throw new RegistryException(msg.toString(), e);
}
}
try {
previousServiceInfoElement = AXIOMUtil.stringToOM(oldContent);
} catch (XMLStreamException e) {
StringBuilder msg = new StringBuilder("Error in parsing the service content of the service. ")
.append("The requested path to store the service: ").append(originalServicePath)
.append(".");
log.error(msg.toString());
throw new RegistryException(msg.toString(), e);
}
} else if ("true".equals(resource.getProperty("registry.DefinitionImport"))) {
resource.removeProperty("registry.DefinitionImport");
}
String definitionURL = CommonUtil.getDefinitionURL(serviceInfoElement);
String oldDefinition = null;
if (previousServiceInfoElement != null) {
oldDefinition = CommonUtil.getDefinitionURL(previousServiceInfoElement);
if ((!"".equals(oldDefinition) && "".equals(definitionURL))
|| (!"".endsWith(oldDefinition) && !oldDefinition.equals(definitionURL))) {
try {
registry.removeAssociation(servicePath, oldDefinition, CommonConstants.DEPENDS);
registry.removeAssociation(oldDefinition, servicePath, CommonConstants.USED_BY);
EndpointUtils.removeEndpointEntry(oldDefinition, serviceInfoElement, registry);
resource.setContent(RegistryUtils.decodeBytes((serviceInfoElement.toString()).getBytes()));
} catch (RegistryException e) {
throw new RegistryException("Failed to remove endpoints from Service UI : " + serviceName, e);
}
}
}
boolean alreadyAdded = false;
if (definitionURL != null &&
(definitionURL.startsWith("http://") || definitionURL.startsWith("https://"))) {
String definitionPath;
if (definitionURL.toLowerCase().endsWith("wsdl")) {
wsdl = buildWSDLProcessor(requestContext);
RequestContext context = new RequestContext(registry, requestContext.getRepository(),
requestContext.getVersionRepository());
context.setResourcePath(new ResourcePath(RegistryConstants.PATH_SEPARATOR + serviceName + ".wsdl"));
context.setSourceURL(definitionURL);
Resource tmpResource = new ResourceImpl();
tmpResource.setProperty("version", serviceVersion);
tmpResource.setProperty(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO);
Map<String, String> map = CommonUtil.getOverviewEntries(serviceInfoElement);
for (Map.Entry<String, String> entry : map.entrySet()){
tmpResource.addProperty(entry.getKey(), entry.getValue());
}
context.setResource(tmpResource);
definitionPath = wsdl.addWSDLToRegistry(context, definitionURL, null, false, false,
disableWSDLValidation, disableSymlinkCreation);
} else if (definitionURL.toLowerCase().endsWith("wadl")) {
WADLProcessor wadlProcessor = buildWADLProcessor(requestContext);
wadlProcessor.setCreateService(false);
RequestContext context = new RequestContext(registry, requestContext.getRepository(),
requestContext.getVersionRepository());
context.setResourcePath(new ResourcePath(RegistryConstants.PATH_SEPARATOR + serviceName + ".wadl"));
context.setSourceURL(definitionURL);
Resource tmpResource = new ResourceImpl();
tmpResource.setProperty("version", serviceVersion);
tmpResource.setProperty(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO);
Map<String, String> map = CommonUtil.getOverviewEntries(serviceInfoElement);
for (Map.Entry<String, String> entry : map.entrySet()){
tmpResource.addProperty(entry.getKey(), entry.getValue());
}
context.setResource(tmpResource);
definitionPath = wadlProcessor.importWADLToRegistry(context, null, disableWADLValidation);
} else {
throw new RegistryException("Invalid service definition found. Please enter a valid WSDL/WADL URL");
}
if (definitionPath == null) {
return;
}
definitionURL = RegistryUtils.getRelativePath(requestContext.getRegistryContext(), definitionPath);
CommonUtil.setDefinitionURL(serviceInfoElement, definitionURL);
resource.setContent(RegistryUtils.decodeBytes((serviceInfoElement.toString()).getBytes()));
// updating the wsdl/wadl url
((ResourceImpl) resource).prepareContentForPut();
persistServiceResource(registry, resource, servicePath);
alreadyAdded = true;
// and make the associations
registry.addAssociation(servicePath, definitionPath, CommonConstants.DEPENDS);
registry.addAssociation(definitionPath, servicePath, CommonConstants.USED_BY);
} else if (definitionURL != null && definitionURL.startsWith(RegistryConstants.ROOT_PATH)) {
// it seems definitionUrl is a registry path..
String definitionPath =
RegistryUtils.getAbsolutePath(requestContext.getRegistryContext(), definitionURL);
//if (!definitionPath.startsWith(RegistryUtils.getAbsolutePath(requestContext.getRegistryContext(),
// RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH))) {
// definitionPath = RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + definitionPath;
// }
definitionPath = CommonUtil.getRegistryPath(requestContext.getRegistry().getRegistryContext(),definitionPath);
boolean addItHere = false;
if (!registry.resourceExists(definitionPath)) {
StringBuilder msg = new StringBuilder("Associating service to a non-existing WSDL. wsdl url: ")
.append(definitionPath).append(", ").append("service path: ").append(servicePath)
.append(".");
log.error(msg.toString());
throw new RegistryException(msg.toString());
}
if (!registry.resourceExists(servicePath)) {
addItHere = true;
} else {
Association[] dependencies = registry.getAssociations(servicePath, CommonConstants.DEPENDS);
boolean dependencyFound = false;
if (dependencies != null) {
for (Association dependency : dependencies) {
if (definitionPath.equals(dependency.getDestinationPath())) {
dependencyFound = true;
}
}
}
if (!dependencyFound) {
addItHere = true;
}
}
if (addItHere) { // add the service right here..
((ResourceImpl) resource).prepareContentForPut();
persistServiceResource(registry, resource, servicePath);
alreadyAdded = true;
// and make the associations
registry.addAssociation(servicePath, definitionPath, CommonConstants.DEPENDS);
registry.addAssociation(definitionPath, servicePath, CommonConstants.USED_BY);
}
}
if (!alreadyAdded) {
// we are adding the resource anyway.
((ResourceImpl) resource).prepareContentForPut();
persistServiceResource(registry, resource, servicePath);
}
if (definitionURL != null) {
if (oldDefinition == null) {
EndpointUtils.saveEndpointsFromServices(requestContext,servicePath, serviceInfoElement,
registry,
CommonUtil.getUnchrootedSystemRegistry(requestContext));
} else if (oldDefinition != null && !definitionURL.equals(oldDefinition)) {
EndpointUtils.saveEndpointsFromServices(requestContext,servicePath, serviceInfoElement,
registry,
CommonUtil.getUnchrootedSystemRegistry(requestContext));
}
}
String symlinkLocation = RegistryUtils.getAbsolutePath(requestContext.getRegistryContext(),
requestContext.getResource().getProperty(
RegistryConstants.SYMLINK_PROPERTY_NAME));
if (!servicePath.equals(originalServicePath)) {
// we are creating a sym link from service path to original service path.
Resource serviceResource = requestContext.getRegistry().get(
RegistryUtils.getParentPath(originalServicePath));
String isLink = serviceResource.getProperty("registry.link");
String mountPoint = serviceResource.getProperty("registry.mountpoint");
String targetPoint = serviceResource.getProperty("registry.targetpoint");
String actualPath = serviceResource.getProperty("registry.actualpath");
if (isLink != null && mountPoint != null && targetPoint != null) {
symlinkLocation = actualPath + RegistryConstants.PATH_SEPARATOR;
}
if (symlinkLocation != null) {
requestContext.getSystemRegistry().createLink(symlinkLocation + resourceName, servicePath);
}
}
// in this flow the resource is already added. marking the process completed..
requestContext.setProcessingComplete(true);
if (wsdl != null &&
CommonConstants.ENABLE.equals(System.getProperty(CommonConstants.UDDI_SYSTEM_PROPERTY))) {
AuthToken authToken = UDDIUtil.getPublisherAuthToken();
if (authToken == null) {
return;
}
//creating the business service info bean
BusinessServiceInfo businessServiceInfo = new BusinessServiceInfo();
//Following lines removed for fixing REGISTRY-1898.
// businessServiceInfo.setServiceName(serviceName.trim());
// businessServiceInfo.setServiceNamespace(serviceNamespace.trim());
// businessServiceInfo.setServiceEndpoints(CommonUtil.getEndpointEntries(serviceInfoElement));
// businessServiceInfo.setDocuments(CommonUtil.getDocLinks(serviceInfoElement));
businessServiceInfo.setServiceDescription(CommonUtil.getServiceDescription(serviceInfoElement));
WSDLInfo wsdlInfo = wsdl.getMasterWSDLInfo();
businessServiceInfo.setServiceWSDLInfo(wsdlInfo);
UDDIPublisher publisher = new UDDIPublisher();
publisher.publishBusinessService(authToken, businessServiceInfo);
}
String path = RegistryUtils
.getRelativePathToOriginal(servicePath, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
ArtifactManager.getArtifactManager().getTenantArtifactRepository().addArtifact(path);
} finally {
CommonUtil.releaseUpdateLock();
}
}
private String getServicePath(Registry registry, Resource resource, OMElement serviceInfoElement,
String serviceName, String serviceNamespace, String serviceVersion) {
String servicePath;
if (Utils.getRxtService() == null) {
servicePath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(),
registry.getRegistryContext().getServicePath() +
(serviceNamespace == null ? "" : CommonUtil
.derivePathFragmentFromNamespace(
serviceNamespace)) +
serviceVersion + "/" + serviceName);
} else {
String pathExpression = Utils.getRxtService().getStoragePath(resource.getMediaType());
servicePath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(),
CommonUtil.getPathFromPathExpression(pathExpression, serviceInfoElement, null));
servicePath = CommonUtil.getRegistryPath(registry.getRegistryContext(), servicePath);
}
return servicePath;
}
/**
* Method to customize the WSDL Processor.
*
* @param requestContext the request context for the import/put operation
* @return the WSDL Processor instance.
*/
@SuppressWarnings("unused")
protected WSDLProcessor buildWSDLProcessor(RequestContext requestContext) {
return new WSDLProcessor(requestContext);
}
/**
* Method to customize the WADL Processor.
*
* @param requestContext the request context for the import/put operation
* @return the WADL Processor instance.
*/
@SuppressWarnings("unused")
protected WADLProcessor buildWADLProcessor(RequestContext requestContext) {
return new WADLProcessor(requestContext);
}
private void persistServiceResource(Registry registry, Resource resource,
String servicePath) throws RegistryException {
registry.put(servicePath, resource);
}
public void setDisableWSDLValidation(String disableWSDLValidation) {
this.disableWSDLValidation = Boolean.toString(true).equals(disableWSDLValidation);
}
public void setDisableWADLValidation(String disableWADLValidation) {
this.disableWADLValidation = Boolean.getBoolean(disableWADLValidation);
}
public String mergeServiceContent(String newContent, String oldContent) {
return newContent;
}
@Override
public void delete(RequestContext requestContext) throws RegistryException {
if (!CommonUtil.isUpdateLockAvailable()) {
return;
}
CommonUtil.acquireUpdateLock();
try {
Registry registry = requestContext.getRegistry();
ResourcePath resourcePath = requestContext.getResourcePath();
if (resourcePath == null) {
throw new RegistryException("The resource path is not available.");
}
Resource resource = registry.get(resourcePath.getPath());
} finally {
CommonUtil.releaseUpdateLock();
}
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.consumer;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.core.exception.ThrownByFunctionalInterfaceException;
import at.gridtec.lambda4j.core.util.ThrowableUtils;
import at.gridtec.lambda4j.function.conversion.ThrowableBooleanToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableByteToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableCharToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableFloatToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableIntToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableLongToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ThrowableShortToDoubleFunction;
import at.gridtec.lambda4j.function.to.ThrowableToDoubleFunction;
import at.gridtec.lambda4j.operator.unary.ThrowableDoubleUnaryOperator;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Objects;
import java.util.function.DoubleConsumer;
import java.util.function.Function;
/**
* Represents an operation that accepts one {@code double}-valued input argument and returns no result which is able to
* throw any {@link Throwable}. This is a primitive specialization of {@link ThrowableConsumer}. Unlike most other
* functional interfaces, {@code ThrowableDoubleConsumer} is expected to operate via side-effects.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #acceptThrows(double)}.
*
* @param <X> The type of the throwable to be thrown by this consumer
* @apiNote This is a throwable JDK lambda.
* @see ThrowableConsumer
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ThrowableDoubleConsumer<X extends Throwable> extends Lambda, DoubleConsumer {
/**
* Constructs a {@link ThrowableDoubleConsumer} based on a lambda expression or a method reference. Thereby the
* given lambda expression or method reference is returned on an as-is basis to implicitly transform it to the
* desired type. With this method, it is possible to ensure that correct type is used from lambda expression or
* method reference.
*
* @param <X> The type of the throwable to be thrown by this consumer
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ThrowableDoubleConsumer} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <X extends Throwable> ThrowableDoubleConsumer<X> of(@Nullable final ThrowableDoubleConsumer<X> expression) {
return expression;
}
/**
* Calls the given {@link ThrowableDoubleConsumer} with the given argument and returns its result.
*
* @param <X> The type of the throwable to be thrown by this consumer
* @param consumer The consumer to be called
* @param value The argument to the consumer
* @throws NullPointerException If given argument is {@code null}
* @throws X Any throwable from this consumers action
*/
static <X extends Throwable> void call(@Nonnull final ThrowableDoubleConsumer<? extends X> consumer,
double value) throws X {
Objects.requireNonNull(consumer);
consumer.acceptThrows(value);
}
/**
* Applies this consumer to the given argument.
*
* @param value The argument to the consumer
* @throws X Any throwable from this consumers action
*/
void acceptThrows(double value) throws X;
/**
* Applies this consumer to the given argument.
*
* @param value The argument to the consumer
* @apiNote This method mainly exists to use this {@link ThrowableDoubleConsumer} in JRE specific methods only
* accepting {@link DoubleConsumer}. If this consumer should be applied, then the {@link #acceptThrows(double)}
* method should be used.
* @apiNote Overrides the {@link DoubleConsumer#accept(double)} method by using a redefinition as default method.
* This implementation calls the {@link #acceptThrows(double)} method of this function and catches the eventually
* thrown {@link Throwable} from it. If it is of type {@link RuntimeException} or {@link Error} it is rethrown as
* is. Other {@code Throwable} types are wrapped in a {@link ThrownByFunctionalInterfaceException}.
*/
@Override
default void accept(double value) {
// TODO: Remove commented code below
/*try {
this.acceptThrows(value);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable);
}*/
nest().accept(value);
}
/**
* Returns the number of arguments for this consumer.
*
* @return The number of arguments for this consumer.
* @implSpec The default implementation always returns {@code 1}.
*/
@Nonnegative
default int arity() {
return 1;
}
/**
* Returns a composed {@link ThrowableConsumer} that first applies the {@code before} function to its input, and
* then applies this consumer to the result.
*
* @param <A> The type of the argument to the given function, and of composed consumer
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableConsumer} that first applies the {@code before} function to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A> ThrowableConsumer<A, X> compose(
@Nonnull final ThrowableToDoubleFunction<? super A, ? extends X> before) {
Objects.requireNonNull(before);
return (a) -> acceptThrows(before.applyAsDoubleThrows(a));
}
/**
* Returns a composed {@link ThrowableBooleanConsumer} that first applies the {@code before} function to its input,
* and then applies this consumer to the result. This method is just convenience, to provide the ability to execute
* an operation which accepts {@code boolean} input, before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableBooleanConsumer} that first applies the {@code before} function to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ThrowableBooleanConsumer<X> composeFromBoolean(
@Nonnull final ThrowableBooleanToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableByteConsumer} that first applies the {@code before} function to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableByteConsumer} that first applies the {@code before} function to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ThrowableByteConsumer<X> composeFromByte(@Nonnull final ThrowableByteToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableCharConsumer} that first applies the {@code before} function to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableCharConsumer} that first applies the {@code before} function to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ThrowableCharConsumer<X> composeFromChar(@Nonnull final ThrowableCharToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableDoubleConsumer} that first applies the {@code before} operator to its input,
* and then applies this consumer to the result. This method is just convenience, to provide the ability to execute
* an operation which accepts {@code double} input, before this primitive consumer is executed.
*
* @param before The operator to apply before this consumer is applied
* @return A composed {@code ThrowableDoubleConsumer} that first applies the {@code before} operator to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ThrowableDoubleConsumer<X> composeFromDouble(
@Nonnull final ThrowableDoubleUnaryOperator<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableFloatConsumer} that first applies the {@code before} function to its input,
* and then applies this consumer to the result. This method is just convenience, to provide the ability to execute
* an operation which accepts {@code float} input, before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableFloatConsumer} that first applies the {@code before} function to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ThrowableFloatConsumer<X> composeFromFloat(
@Nonnull final ThrowableFloatToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableIntConsumer} that first applies the {@code before} function to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableIntConsumer} that first applies the {@code before} function to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ThrowableIntConsumer<X> composeFromInt(@Nonnull final ThrowableIntToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableLongConsumer} that first applies the {@code before} function to
* its input, and then applies this consumer to the result.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableLongConsumer} that first applies the {@code before} function to its input, and
* then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ThrowableLongConsumer<X> composeFromLong(@Nonnull final ThrowableLongToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableShortConsumer} that first applies the {@code before} function to its input,
* and then applies this consumer to the result. This method is just convenience, to provide the ability to execute
* an operation which accepts {@code short} input, before this primitive consumer is executed.
*
* @param before The function to apply before this consumer is applied
* @return A composed {@code ThrowableShortConsumer} that first applies the {@code before} function to its input,
* and then applies this consumer to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ThrowableShortConsumer<X> composeFromShort(
@Nonnull final ThrowableShortToDoubleFunction<? extends X> before) {
Objects.requireNonNull(before);
return (value) -> acceptThrows(before.applyAsDoubleThrows(value));
}
/**
* Returns a composed {@link ThrowableDoubleConsumer} that performs, in sequence, this consumer followed by the
* {@code after} consumer. If performing this consumer throws an exception, the {@code after} consumer will not be
* performed.
*
* @param after The consumer to apply after this consumer is applied
* @return A composed {@link ThrowableDoubleConsumer} that performs, in sequence, this consumer followed by the
* {@code after} consumer.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ThrowableDoubleConsumer<X> andThen(@Nonnull final ThrowableDoubleConsumer<? extends X> after) {
Objects.requireNonNull(after);
return (value) -> {
acceptThrows(value);
after.acceptThrows(value);
};
}
/**
* Returns a composed {@link ThrowableConsumer} which represents this {@link ThrowableDoubleConsumer}. Thereby the
* primitive input argument for this consumer is autoboxed. This method provides the possibility to use this {@code
* ThrowableDoubleConsumer} with methods provided by the {@code JDK}.
*
* @return A composed {@code ThrowableConsumer} which represents this {@code ThrowableDoubleConsumer}.
*/
@Nonnull
default ThrowableConsumer<Double, X> boxed() {
return this::acceptThrows;
}
/**
* Returns a composed {@link DoubleConsumer2} that applies this consumer to its input and nests the thrown {@link
* Throwable} from it. The {@code Throwable} is nested (wrapped) in a {@link ThrownByFunctionalInterfaceException},
* which is constructed from the thrown {@code Throwable}s message and the thrown {@code Throwable} itself.
*
* @return A composed {@link DoubleConsumer2} that applies this consumer to its input and nests the thrown {@code
* Throwable} from it.
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest(Function)
* @see ThrownByFunctionalInterfaceException
*/
@Nonnull
default DoubleConsumer2 nest() {
return nest(throwable -> new ThrownByFunctionalInterfaceException(throwable.getMessage(), throwable));
}
/**
* Returns a composed {@link DoubleConsumer2} that applies this consumer to its input and nests the thrown {@link
* Throwable} from it using {@code mapper} operation. Thereby {@code mapper} may modify the thrown {@code
* Throwable}, regarding its implementation, and returns it nested (wrapped) in a {@link RuntimeException}.
*
* @param mapper The operation to map the thrown {@code Throwable} to {@code RuntimeException}
* @return A composed {@link DoubleConsumer2} that applies this consumer to its input and nests the thrown {@code
* Throwable} from it using {@code mapper} operation.
* @throws NullPointerException If given argument is {@code null}
* @implNote If thrown {@code Throwable} is of type {@link Error} it is thrown as-is and thus not nested.
* @see #nest()
*/
@Nonnull
default DoubleConsumer2 nest(@Nonnull final Function<? super Throwable, ? extends RuntimeException> mapper) {
return recover(throwable -> {
throw mapper.apply(throwable);
});
}
/**
* Returns a composed {@link DoubleConsumer2} that first applies this consumer to its input, and then applies the
* {@code recover} operation if a {@link Throwable} is thrown from this one. The {@code recover} operation is
* represented by a curried operation which is called with throwable information and same argument of this
* consumer.
*
* @param recover The operation to apply if this consumer throws a {@code Throwable}
* @return A composed {@link DoubleConsumer2} that first applies this consumer to its input, and then applies the
* {@code recover} operation if a {@code Throwable} is thrown from this one.
* @throws NullPointerException If given argument or the returned enclosing consumer is {@code null}
* @implSpec The implementation checks that the returned enclosing consumer from {@code recover} operation is not
* {@code null}. If it is, then a {@link NullPointerException} with appropriate message is thrown.
* @implNote If thrown {@code Throwable} is of type {@link Error}, it is thrown as-is and thus not passed to {@code
* recover} operation.
*/
@Nonnull
default DoubleConsumer2 recover(@Nonnull final Function<? super Throwable, ? extends DoubleConsumer> recover) {
Objects.requireNonNull(recover);
return (value) -> {
try {
this.acceptThrows(value);
} catch (Error e) {
throw e;
} catch (Throwable throwable) {
final DoubleConsumer consumer = recover.apply(throwable);
Objects.requireNonNull(consumer, () -> "recover returned null for " + throwable.getClass() + ": "
+ throwable.getMessage());
consumer.accept(value);
}
};
}
/**
* Returns a composed {@link DoubleConsumer2} that applies this consumer to its input and sneakily throws the
* thrown {@link Throwable} from it, if it is not of type {@link RuntimeException} or {@link Error}. This means that
* each throwable thrown from the returned composed consumer behaves exactly the same as an <em>unchecked</em>
* throwable does. As a result, there is no need to handle the throwable of this consumer in the returned composed
* consumer by either wrapping it in an <em>unchecked</em> throwable or to declare it in the {@code throws} clause,
* as it would be done in a non sneaky throwing consumer.
* <p>
* What sneaky throwing simply does, is to fake out the compiler and thus it bypasses the principle of
* <em>checked</em> throwables. On the JVM (class file) level, all throwables, checked or not, can be thrown
* regardless of the {@code throws} clause of methods, which is why this works at all.
* <p>
* However, when using this method to get a sneaky throwing consumer variant of this throwable consumer, the
* following advantages, disadvantages and limitations will apply:
* <p>
* If the calling-code is to handle the sneakily thrown throwable, it is required to add it to the {@code throws}
* clause of the method that applies the returned composed consumer. The compiler will not force the declaration in
* the {@code throws} clause anymore.
* <p>
* If the calling-code already handles the sneakily thrown throwable, the compiler requires it to be added to the
* {@code throws} clause of the method that applies the returned composed consumer. If not added, the compiler will
* error that the caught throwable is never thrown in the corresponding {@code try} block.
* <p>
* If the returned composed consumer is directly surrounded by a {@code try}-{@code catch} block to catch the
* sneakily thrown throwable from it, the compiler will error that the caught throwable is never thrown in the
* corresponding {@code try} block.
* <p>
* In any case, if the throwable is not added to the to the {@code throws} clause of the method that applies the
* returned composed consumer, the calling-code won't be able to catch the throwable by name. It will bubble and
* probably be caught in some {@code catch} statement, catching a base type such as {@code try { ... }
* catch(RuntimeException e) { ... }} or {@code try { ... } catch(Exception e) { ... }}, but perhaps this is
* intended.
* <p>
* When the called code never throws the specific throwable that it declares, it should obviously be omitted. For
* example: {@code new String(byteArr, "UTF-8") throws UnsupportedEncodingException}, but {@code UTF-8} is
* guaranteed by the Java specification to be always present. Here, the {@code throws} declaration is a nuisance and
* any solution to silence it with minimal boilerplate is welcome. The throwable should therefore be omitted in the
* {@code throws} clause of the method that applies the returned composed consumer.
* <p>
* With all that mentioned, the following example will demonstrate this methods correct use:
* <pre>{@code
* // when called with illegal value ClassNotFoundException is thrown
* public Class<?> sneakyThrowingFunctionalInterface(final String className) throws ClassNotFoundException {
* return ThrowableFunction.of(Class::forName) // create the correct throwable functional interface
* .sneakyThrow() // create a non-throwable variant which is able to sneaky throw (this method)
* .apply(className); // apply non-throwable variant -> may sneaky throw a throwable
* }
*
* // call the the method which surround the sneaky throwing functional interface
* public void callingMethod() {
* try {
* final Class<?> clazz = sneakyThrowingFunctionalInterface("some illegal class name");
* // ... do something with clazz ...
* } catch(ClassNotFoundException e) {
* // ... do something with e ...
* }
* }
* }</pre>
* In conclusion, this somewhat contentious ability should be used carefully, of course, with the advantages,
* disadvantages and limitations described above kept in mind.
*
* @return A composed {@link DoubleConsumer2} that applies this consumer to its input and sneakily throws the thrown
* {@link Throwable} from it, unless it is of type {@link RuntimeException} or {@link Error}.
* @implNote If thrown {@link Throwable} is of type {@link RuntimeException} or {@link Error}, it is thrown as-is
* and thus not sneakily thrown.
*/
@Nonnull
default DoubleConsumer2 sneakyThrow() {
return (value) -> {
try {
this.acceptThrows(value);
} catch (RuntimeException | Error e) {
throw e;
} catch (Throwable throwable) {
throw ThrowableUtils.sneakyThrow(throwable);
}
};
}
}
| |
/*
* Copyright 2003 - 2018 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.esjp.products.util;
import java.util.UUID;
import org.efaps.admin.common.SystemConfiguration;
import org.efaps.admin.datamodel.IEnum;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.api.annotation.EFapsSysConfAttribute;
import org.efaps.api.annotation.EFapsSysConfLink;
import org.efaps.api.annotation.EFapsSystemConfiguration;
import org.efaps.esjp.admin.common.systemconfiguration.BooleanSysConfAttribute;
import org.efaps.esjp.admin.common.systemconfiguration.EnumSysConfAttribute;
import org.efaps.esjp.admin.common.systemconfiguration.IntegerSysConfAttribute;
import org.efaps.esjp.admin.common.systemconfiguration.PropertiesSysConfAttribute;
import org.efaps.esjp.admin.common.systemconfiguration.StringSysConfAttribute;
import org.efaps.esjp.admin.common.systemconfiguration.SysConfLink;
import org.efaps.esjp.products.ProductFamily_Base.NameDefinition;
import org.efaps.util.cache.CacheReloadException;
/**
* @author The eFaps Team
*/
@EFapsUUID("96b4a9bc-bfcf-41fa-9c03-4dbc6279cf63")
@EFapsApplication("eFapsApp-Products")
@EFapsSystemConfiguration("e53cd705-e463-47dc-a400-4ace4ed72071")
public final class Products
{
/** The base. */
public static final String BASE = "org.efaps.products.";
/** Products-Configuration. */
public static final UUID SYSCONFUUID = UUID.fromString("e53cd705-e463-47dc-a400-4ace4ed72071");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATEINDIVIDUAL = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "ActivateIndividual")
.description(" Activate the individual management menu in general.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATEINFINITE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Infinite.Activate")
.description("Activate the Infiniteproduct managements.")
.defaultValue(true);
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATEPRICEMASSUP = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "ActivatePriceMassUpdate")
.description(" Activate the menu for updating Product Prices on mass.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATEPRICEGRP = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "PriceGroup.Activate")
.description("Activate the price group management.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute BATCH_ACTARCHIVE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Batch.ActivateArchives")
.defaultValue(true)
.description("Activate the possibility to relate arcives to a batch product. "
+ "Only works if eFapsApp-Archives is installed also.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute COSTACTIVATEALT = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Cost.ActivateAlternative")
.description(" Activate the possibility to register alterntative costs.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute FAMILY_ACTIVATE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Family.Activate")
.description(" Activate the individual management menu in general.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute FAMILY_ACTIVATE_UNSPSC = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Family.ActivateUNSPSC")
.description("Activate the use of UNSPSC.");
/** See description. */
@EFapsSysConfAttribute
public static final IntegerSysConfAttribute FAMILY_SUFFIXLENGTH = new IntegerSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Family.SuffixLength")
.defaultValue(3)
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute FAMILY_NAMESEP = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Family.NameSeparator")
.defaultValue(" - ")
.description("Seperator String used to seperate the diffenert parts of a complete family name.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute FAMILY_NAMEINCLLINE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Family.NameIncludeLine")
.defaultValue(true)
.description("Include the line in the name.");
/** See description. */
@EFapsSysConfAttribute
public static final EnumSysConfAttribute<NameDefinition> FAMILY_NAMEDEF = new EnumSysConfAttribute<NameDefinition>()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Family.NameDefinition")
.clazz(NameDefinition.class)
.defaultValue(NameDefinition.ALL)
.description("Name Definition");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATEVARIANT = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Variant.Activate")
.description(" Activate the variant management menu in general.");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute VARIANTCONFIG = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Variant.Configuration")
.description(" Configuration for the Variant mechanism.")
.concatenate(true);
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute VARIANTACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Variant.ActivateFamilies")
.description("Activate the family management for generics.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute VARIANTFAMPRE = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Variant.FamiliesPrefix")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute VARIANTACTCLASS = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Variant.ActivateClassification")
.description("Activate the classifcation for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATEGENERIC = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Generic.Activate")
.description(" Activate the generic product management in general.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute GENERICACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Generic.ActivateFamilies")
.description("Activate the family management for generics.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute GENERICFAMPRE = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Generic.FamiliesPrefix")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute GENERIC_NAMEFRMT = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Generic.NameFormat")
.description("StringFormat to be used to create "
+ "the name from FamilyCode (first Parameter) and Name (second Parameter)")
.defaultValue("%s.%s");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute INDIVIDUAL_ACTARCHIVE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Individual.ActivateArchives")
.defaultValue(true)
.description("Activate the possibility to relate arcives to a indiviual product. "
+ "Only works if eFapsApp-Archives is installed also.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute INFINITEACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Infinite.ActivateFamilies")
.description("Activate the family management for Infinite products.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute INFINITEFAMPRE = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Infinite.FamiliesPrefix")
.description("Activate the family management for Infinite products.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute INFINITE_NAMEFRMT = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Infinite.NameFormat")
.description("StringFormat to be used to create "
+ "the name from FamilyCode (first Parameter) and Name (second Parameter)")
.defaultValue("%s.%s");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute INFINITEDESCR = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Infinite.Descriptions")
.description("Substitutor values: Default=${Name}, Products_ProductStandartClass=text ${name} .");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute ACTIVATE_MATERIAL = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.Activate")
.description("Activate the generic product management for materials.")
.defaultValue(true);
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute MATERIAL_ISGENERIC = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.IsGeneric")
.description("Activate the generic product management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute MATERIAL_ACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.ActivateFamilies")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute MATERIAL_ACTCLASS = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.ActivateClassification")
.description("Activate the classifcation for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute MATERIAL_ACTIND = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.ActivateIndividual")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute MATERIAL_FAMPRE = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.FamiliesPrefix")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute MATERIAL_NAMEFRMT = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.NameFormat")
.description("StringFormat to be used to create "
+ "the name from FamilyCode (first Parameter) and Name (second Parameter)")
.defaultValue("%s.%s");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute MATERIAL_DESCR = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Material.Descriptions")
.description("Substitutor values: Default=${Name}, Products_ProductStandartClass=text ${name} .");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute STANDART_ACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.ActivateFamilies")
.description("Activate the family management for standart products.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute STANDART_ACTNOTE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.ActivateNote")
.description("Activate the note field for standart products.");
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute STANDART_ACTBARCODES = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.ActivateBarcodes")
.description("Activate the Barcodes field set for standart products.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute STANDART_ACTCLASS = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.ActivateClassification")
.description("Activate the classifcation for standart products.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute STANDART_ACTIND = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.ActivateIndividual")
.description("Activate the individual for standart products. "
+ "Also needs the general mechanism activated");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute STANDART_CONV = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.Conversion")
.addDefaultValue("Dimension01", "bc921c98-9e50-4614-a9c4-83a22fca3105")
.description("Activate the conversion management for standart products.");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute STANDART_DESCR = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.Descriptions")
.description("Substitutor values: Default=${Name}, Products_ProductStandartClass=text ${name} .");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute STANDART_FAMPRE = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.FamiliesPrefix")
.description("Activate the family management for standart products.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute STANDART_NAMEFRMT = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.NameFormat")
.description("StringFormat to be used to create "
+ "the name from FamilyCode (first Parameter) and Name (second Parameter)")
.defaultValue("%s.%s");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute STANDARTIMG = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Standart.Image")
.addDefaultValue("Image4Doc.Create", "false")
.addDefaultValue("Image4Doc.Width", "250")
.addDefaultValue("Image4Doc.Height", "250")
.addDefaultValue("Image4Doc.Enlarge", "false")
.addDefaultValue("Thumbnail.Create", "true")
.addDefaultValue("Thumbnail.Width", "150")
.addDefaultValue("Thumbnail.Height", "150")
.addDefaultValue("Thumbnail.Enlarge", "false")
.description("Configuration for Image.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute SALESPARTLIST_ACTIVATE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "SalesPartList.Activate")
.description("Activate the SalesPartList.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute SALESPARTLIST_ACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "SalesPartList.ActivateFamilies")
.description("Activate the family management for SalesPartList.");
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute SALESPARTLIST_ACTBARCODES = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "SalesPartList.ActivateBarcodes")
.description("Activate the Barcodes field set for SalesPartList.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute SERV_ACTIVATE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Service.Activate")
.description("Activate service products.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute SERV_ACTFAM = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Service.ActivateFamilies")
.description("Activate the family management for service.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute SERV_ACTCLASS = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Service.ActivateClassification")
.description("Activate the classifcation for service products.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute SERV_FAMPRE = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Service.FamiliesPrefix")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfAttribute
public static final StringSysConfAttribute SERV_NAMEFRMT = new StringSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Service.NameFormat")
.description("StringFormat to be used to create "
+ "the name from FamilyCode (first Parameter) and Name (second Parameter)")
.defaultValue("%s.%s");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute SERV_DESCR = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "Service.Descriptions")
.description("Substitutor values: Default=${Name}, Products_ProductStandartClass=text ${name} .");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute TEXTPOS_ACTIVATE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "TextPosition.Activate")
.description("Activate the Textposition.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute TREEVIEWACT = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "TreeView.Activate")
.description("Activate the TreeView.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute REPINVENTORY_CLASSACTIVATE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.defaultValue(true)
.key(Products.BASE + "report.InventoryReport.Classification.Activate")
.description("Activate Classification for the InventoryReport.");
/** See description. */
@EFapsSysConfAttribute
public static final IntegerSysConfAttribute REPINVENTORY_CLASSLEVEL = new IntegerSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "report.InventoryReport.ClassificationLevel")
.description("Level of Classification to present.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute REPINVENTORY_FAMILYACTIVATE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "report.InventoryReport.Family.Activate")
.description("Activate Classification for the InventoryReport.");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute REPCOSTCONFIG = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "report.CostReport")
.description("Configuration for CostReport.");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute REPLASTMOVE = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "report.LastMovementReport")
.description("Configuration for LastMovementReport.");
/** See description. */
@EFapsSysConfAttribute
public static final PropertiesSysConfAttribute REPPRICELIST = new PropertiesSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "report.PriceListReport")
.addDefaultValue("ShowClassification", "true")
.addDefaultValue("ShowFamily", "false")
.addDefaultValue("ActiveProductsOnly", "true")
.addDefaultValue("Type", "Products_ProductPricelistRetail")
.description("Configuration for PriceListReport.");
/** See description. */
@EFapsSysConfAttribute
public static final BooleanSysConfAttribute REPPRICELIST_ACTBARCODE = new BooleanSysConfAttribute()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "report.PriceListReport.ActivateBarcodes")
.description("Activate Barcodes for PriceListReport.");
/** See description. */
@EFapsSysConfLink
public static final SysConfLink DEFAULTDIMENSION = new SysConfLink()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "DefaultDimension")
.description("Activate the family management for materials.");
/** See description. */
@EFapsSysConfLink
public static final SysConfLink DEFAULTWAREHOUSE = new SysConfLink()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "DefaultWareHouse")
.description("Link to a default warehouse instance.");
/** See description. */
@EFapsSysConfLink
public static final SysConfLink DEFAULTSTORAGEGRP4BOM = new SysConfLink()
.sysConfUUID(Products.SYSCONFUUID)
.key(Products.BASE + "DefaultStorageGroup4BOMCalculator")
.description("Link to a default StorageGroup instance used by the BOMCalculator.");
/**
* Singelton.
*/
private Products()
{
}
/**
* The Enum ProductIndividual.
*
*/
public enum ProductIndividual
implements IEnum
{
/** The none. */
NONE,
/** The individual. */
INDIVIDUAL,
/** The batch. */
BATCH;
@Override
public int getInt()
{
return ordinal();
}
}
/**
* The Enum ProductIndividual.
*
*/
public enum CostingState
implements IEnum
{
/** The none. */
ACTIVE,
/** The individual. */
FIXED,
/** The batch. */
INACTIVE;
@Override
public int getInt()
{
return ordinal();
}
}
/**
* @return the SystemConfigruation for Sales
* @throws CacheReloadException on error
*/
public static SystemConfiguration getSysConfig()
throws CacheReloadException
{
return SystemConfiguration.get(Products.SYSCONFUUID);
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linecorp.armeria.common;
import static com.google.common.base.Charsets.UTF_16;
import static com.google.common.base.Charsets.UTF_8;
import static com.linecorp.armeria.common.MediaType.ANY_APPLICATION_TYPE;
import static com.linecorp.armeria.common.MediaType.ANY_AUDIO_TYPE;
import static com.linecorp.armeria.common.MediaType.ANY_IMAGE_TYPE;
import static com.linecorp.armeria.common.MediaType.ANY_TEXT_TYPE;
import static com.linecorp.armeria.common.MediaType.ANY_TYPE;
import static com.linecorp.armeria.common.MediaType.ANY_VIDEO_TYPE;
import static com.linecorp.armeria.common.MediaType.HTML_UTF_8;
import static com.linecorp.armeria.common.MediaType.JPEG;
import static com.linecorp.armeria.common.MediaType.PLAIN_TEXT_UTF_8;
import static java.lang.reflect.Modifier.isFinal;
import static java.lang.reflect.Modifier.isPublic;
import static java.lang.reflect.Modifier.isStatic;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.UnsupportedCharsetException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableListMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Streams;
import com.google.common.testing.EqualsTester;
import com.google.common.testing.NullPointerTester;
/**
* Tests for {@link MediaType}.
*
* @author Gregory Kick
*/
public class MediaTypeTest {
@Test // reflection
public void testParse_useConstants() throws Exception {
for (MediaType constant : getConstants()) {
assertSame(constant, MediaType.parse(constant.toString()));
}
}
@Test // reflection
public void testCreate_useConstants() throws Exception {
for (MediaType constant : getConstants()) {
assertSame(constant, MediaType.create(constant.type(), constant.subtype())
.withParameters(constant.parameters()));
}
}
@Test // reflection
public void testConstants_charset() throws Exception {
for (Field field : getConstantFields()) {
Optional<Charset> charset = ((MediaType) field.get(null)).charset();
if (field.getName().endsWith("_UTF_8")) {
assertThat(charset).hasValue(UTF_8);
} else {
assertThat(charset).isEmpty();
}
}
}
@Test // reflection
public void testConstants_areUnique() {
assertThat(getConstants()).doesNotHaveDuplicates();
}
// reflection
private static Iterable<Field> getConstantFields() {
return Arrays.stream(MediaType.class.getDeclaredFields()).filter((Predicate<Field>) input -> {
int modifiers = input.getModifiers();
return isPublic(modifiers) && isStatic(modifiers) && isFinal(modifiers) &&
MediaType.class.equals(input.getType());
}).collect(Collectors.toList());
}
// reflection
private static Iterable<MediaType> getConstants() {
return Streams.stream(getConstantFields()).map(input -> {
try {
return (MediaType) input.get(null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}).collect(Collectors.toList());
}
@Test
public void testCreate_invalidType() {
assertThatThrownBy(() -> MediaType.create("te><t", "plaintext"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testCreate_invalidSubtype() {
assertThatThrownBy(() -> MediaType.create("text", "pl@intext"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testCreate_wildcardTypeDeclaredSubtype() {
assertThatThrownBy(() -> MediaType.create("*", "text"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testCreateApplicationType() {
MediaType newType = MediaType.createApplicationType("yams");
assertEquals("application", newType.type());
assertEquals("yams", newType.subtype());
}
@Test
public void testCreateAudioType() {
MediaType newType = MediaType.createAudioType("yams");
assertEquals("audio", newType.type());
assertEquals("yams", newType.subtype());
}
@Test
public void testCreateImageType() {
MediaType newType = MediaType.createImageType("yams");
assertEquals("image", newType.type());
assertEquals("yams", newType.subtype());
}
@Test
public void testCreateTextType() {
MediaType newType = MediaType.createTextType("yams");
assertEquals("text", newType.type());
assertEquals("yams", newType.subtype());
}
@Test
public void testCreateVideoType() {
MediaType newType = MediaType.createVideoType("yams");
assertEquals("video", newType.type());
assertEquals("yams", newType.subtype());
}
@Test
public void testGetType() {
assertEquals("text", MediaType.parse("text/plain").type());
assertEquals("application",
MediaType.parse("application/atom+xml; charset=utf-8").type());
}
@Test
public void testGetSubtype() {
assertEquals("plain", MediaType.parse("text/plain").subtype());
assertEquals("atom+xml",
MediaType.parse("application/atom+xml; charset=utf-8").subtype());
}
private static final Map<String, Collection<String>> PARAMETERS =
ImmutableListMultimap.of("a", "1", "a", "2", "b", "3").asMap();
@Test
public void testGetParameters() {
assertEquals(ImmutableMap.of(), MediaType.parse("text/plain").parameters());
assertEquals(ImmutableMap.of("charset", ImmutableList.of("utf-8")),
MediaType.parse("application/atom+xml; charset=utf-8").parameters());
assertEquals(PARAMETERS,
MediaType.parse("application/atom+xml; a=1; a=2; b=3").parameters());
}
@Test
public void testWithoutParameters() {
assertSame(MediaType.parse("image/gif"),
MediaType.parse("image/gif").withoutParameters());
assertEquals(MediaType.parse("image/gif"),
MediaType.parse("image/gif; foo=bar").withoutParameters());
}
@Test
public void testWithParameters() {
assertEquals(MediaType.parse("text/plain; a=1; a=2; b=3"),
MediaType.parse("text/plain").withParameters(PARAMETERS));
assertEquals(MediaType.parse("text/plain; a=1; a=2; b=3"),
MediaType.parse("text/plain; a=1; a=2; b=3").withParameters(PARAMETERS));
}
@Test
public void testWithParameters_invalidAttribute() {
MediaType mediaType = MediaType.parse("text/plain");
Map<String, Collection<String>> parameters =
ImmutableListMultimap.of("a", "1", "@", "2", "b", "3").asMap();
assertThatThrownBy(() -> mediaType.withParameters(parameters))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testWithParameter() {
assertEquals(MediaType.parse("text/plain; a=1"),
MediaType.parse("text/plain").withParameter("a", "1"));
assertEquals(MediaType.parse("text/plain; a=1"),
MediaType.parse("text/plain; a=1; a=2").withParameter("a", "1"));
assertEquals(MediaType.parse("text/plain; a=3"),
MediaType.parse("text/plain; a=1; a=2").withParameter("a", "3"));
assertEquals(MediaType.parse("text/plain; a=1; a=2; b=3"),
MediaType.parse("text/plain; a=1; a=2").withParameter("b", "3"));
}
@Test
public void testWithParameter_invalidAttribute() {
MediaType mediaType = MediaType.parse("text/plain");
assertThatThrownBy(() -> mediaType.withParameter("@", "2"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testWithCharset() {
assertEquals(MediaType.parse("text/plain; charset=utf-8"),
MediaType.parse("text/plain").withCharset(UTF_8));
assertEquals(MediaType.parse("text/plain; charset=utf-8"),
MediaType.parse("text/plain; charset=utf-16").withCharset(UTF_8));
}
@Test
public void testHasWildcard() {
Assert.assertFalse(PLAIN_TEXT_UTF_8.hasWildcard());
Assert.assertFalse(JPEG.hasWildcard());
Assert.assertTrue(ANY_TYPE.hasWildcard());
Assert.assertTrue(ANY_APPLICATION_TYPE.hasWildcard());
Assert.assertTrue(ANY_AUDIO_TYPE.hasWildcard());
Assert.assertTrue(ANY_IMAGE_TYPE.hasWildcard());
Assert.assertTrue(ANY_TEXT_TYPE.hasWildcard());
Assert.assertTrue(ANY_VIDEO_TYPE.hasWildcard());
}
@Test
public void testIs() {
Assert.assertTrue(PLAIN_TEXT_UTF_8.is(ANY_TYPE));
Assert.assertTrue(JPEG.is(ANY_TYPE));
Assert.assertTrue(ANY_TEXT_TYPE.is(ANY_TYPE));
Assert.assertTrue(PLAIN_TEXT_UTF_8.is(ANY_TEXT_TYPE));
Assert.assertTrue(PLAIN_TEXT_UTF_8.withoutParameters().is(ANY_TEXT_TYPE));
Assert.assertFalse(JPEG.is(ANY_TEXT_TYPE));
Assert.assertTrue(PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8));
Assert.assertTrue(PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8.withoutParameters()));
Assert.assertFalse(PLAIN_TEXT_UTF_8.withoutParameters().is(PLAIN_TEXT_UTF_8));
Assert.assertFalse(PLAIN_TEXT_UTF_8.is(HTML_UTF_8));
Assert.assertFalse(PLAIN_TEXT_UTF_8.withParameter("charset", "UTF-16").is(PLAIN_TEXT_UTF_8));
Assert.assertFalse(PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8.withParameter("charset", "UTF-16")));
}
@Test
public void testParse_empty() {
assertThatThrownBy(() -> MediaType.parse("")).isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testParse_badInput() {
assertThatThrownBy(() -> MediaType.parse("/"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("te<t/plain"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/pl@in"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain;"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; "))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a="))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=@"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=\"@"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=1;"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=1; "))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=1; b"))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=1; b="))
.isInstanceOf(IllegalArgumentException.class);
assertThatThrownBy(() -> MediaType.parse("text/plain; a=\u2025"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void testGetCharset() {
assertThat(MediaType.parse("text/plain").charset()).isEmpty();
assertThat(MediaType.parse("text/plain; charset=utf-8").charset()).hasValue(UTF_8);
}
@Test // Non-UTF-8 Charset
public void testGetCharset_utf16() {
assertThat(MediaType.parse("text/plain; charset=utf-16").charset()).hasValue(UTF_16);
}
@Test
public void testGetCharset_tooMany() {
MediaType mediaType = MediaType.parse("text/plain; charset=utf-8; charset=utf-16");
assertThatThrownBy(mediaType::charset).isInstanceOf(IllegalStateException.class);
}
@Test
public void testGetCharset_illegalCharset() {
MediaType mediaType = MediaType.parse(
"text/plain; charset=\"!@#$%^&*()\"");
assertThatThrownBy(mediaType::charset).isInstanceOf(IllegalCharsetNameException.class);
}
@Test
public void testGetCharset_unsupportedCharset() {
MediaType mediaType = MediaType.parse(
"text/plain; charset=utf-wtf");
assertThatThrownBy(mediaType::charset).isInstanceOf(UnsupportedCharsetException.class);
}
@Test
public void testEquals() {
new EqualsTester()
.addEqualityGroup(
MediaType.create("text", "plain"),
MediaType.create("TEXT", "PLAIN"),
MediaType.parse("text/plain"),
MediaType.parse("TEXT/PLAIN"),
MediaType.create("text", "plain").withParameter("a", "1").withoutParameters())
.addEqualityGroup(
MediaType.create("text", "plain").withCharset(UTF_8),
MediaType.create("text", "plain").withParameter("CHARSET", "UTF-8"),
MediaType.create("text", "plain")
.withParameters(ImmutableMultimap.of("charset", "utf-8").asMap()),
MediaType.parse("text/plain;charset=utf-8"),
MediaType.parse("text/plain; charset=utf-8"),
MediaType.parse("text/plain; charset=utf-8"),
MediaType.parse("text/plain; \tcharset=utf-8"),
MediaType.parse("text/plain; \r\n\tcharset=utf-8"),
MediaType.parse("text/plain; CHARSET=utf-8"),
MediaType.parse("text/plain; charset=\"utf-8\""),
MediaType.parse("text/plain; charset=\"\\u\\tf-\\8\""),
MediaType.parse("text/plain; charset=UTF-8"),
MediaType.parse("text/plain ; charset=utf-8"))
.addEqualityGroup(MediaType.parse("text/plain; charset=utf-8; charset=utf-8"))
.addEqualityGroup(
MediaType.create("text", "plain").withParameter("a", "value"),
MediaType.create("text", "plain").withParameter("A", "value"))
.addEqualityGroup(
MediaType.create("text", "plain").withParameter("a", "VALUE"),
MediaType.create("text", "plain").withParameter("A", "VALUE"))
.addEqualityGroup(
MediaType.create("text", "plain")
.withParameters(ImmutableListMultimap.of("a", "1", "a", "2").asMap()),
MediaType.create("text", "plain")
.withParameters(ImmutableListMultimap.of("a", "2", "a", "1").asMap()))
.addEqualityGroup(MediaType.create("text", "csv"))
.addEqualityGroup(MediaType.create("application", "atom+xml"))
.testEquals();
}
@Test // Non-UTF-8 Charset
public void testEquals_nonUtf8Charsets() {
new EqualsTester()
.addEqualityGroup(MediaType.create("text", "plain"))
.addEqualityGroup(MediaType.create("text", "plain").withCharset(UTF_8))
.addEqualityGroup(MediaType.create("text", "plain").withCharset(UTF_16))
.testEquals();
}
@Test // com.google.common.testing.NullPointerTester
public void testNullPointer() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicConstructors(MediaType.class);
tester.testAllPublicStaticMethods(MediaType.class);
tester.testAllPublicInstanceMethods(MediaType.parse("text/plain"));
}
@Test
public void testToString() {
assertEquals("text/plain", MediaType.create("text", "plain").toString());
assertEquals("text/plain; something=\"cr@zy\"; something-else=\"crazy with spaces\"",
MediaType.create("text", "plain")
.withParameter("something", "cr@zy")
.withParameter("something-else", "crazy with spaces")
.toString());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.SftpException;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.component.file.GenericFile;
import org.apache.camel.component.file.GenericFileOperationFailedException;
import org.apache.camel.component.file.GenericFileProcessStrategy;
import org.apache.camel.util.FileUtil;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Secure FTP consumer
*/
public class SftpConsumer extends RemoteFileConsumer<SftpRemoteFile> {
private static final Logger LOG = LoggerFactory.getLogger(SftpConsumer.class);
private String endpointPath;
private transient String sftpConsumerToString;
public SftpConsumer(RemoteFileEndpoint<SftpRemoteFile> endpoint, Processor processor, RemoteFileOperations<SftpRemoteFile> operations, GenericFileProcessStrategy<SftpRemoteFile> processStrategy) {
super(endpoint, processor, operations, processStrategy);
this.endpointPath = endpoint.getConfiguration().getDirectory();
}
@Override
protected void doStart() throws Exception {
// turn off scheduler first, so autoCreate is handled before scheduler starts
boolean startScheduler = isStartScheduler();
setStartScheduler(false);
try {
super.doStart();
if (endpoint.isAutoCreate()) {
LOG.debug("Auto creating directory: {}", endpoint.getConfiguration().getDirectory());
try {
connectIfNecessary();
operations.buildDirectory(endpoint.getConfiguration().getDirectory(), true);
} catch (GenericFileOperationFailedException e) {
// log a WARN as we want to start the consumer.
LOG.warn("Error auto creating directory: " + endpoint.getConfiguration().getDirectory()
+ " due " + e.getMessage() + ". This exception is ignored.", e);
}
}
} finally {
if (startScheduler) {
setStartScheduler(true);
startScheduler();
}
}
}
@Override
protected boolean pollDirectory(String fileName, List<GenericFile<SftpRemoteFile>> fileList, int depth) {
String currentDir = null;
if (isStepwise()) {
// must remember current dir so we stay in that directory after the poll
currentDir = operations.getCurrentDirectory();
}
// strip trailing slash
fileName = FileUtil.stripTrailingSeparator(fileName);
boolean answer = doPollDirectory(fileName, null, fileList, depth);
if (currentDir != null) {
operations.changeCurrentDirectory(currentDir);
}
return answer;
}
protected boolean pollSubDirectory(String absolutePath, String dirName, List<GenericFile<SftpRemoteFile>> fileList, int depth) {
boolean answer = doSafePollSubDirectory(absolutePath, dirName, fileList, depth);
// change back to parent directory when finished polling sub directory
if (isStepwise()) {
operations.changeToParentDirectory();
}
return answer;
}
@Override
protected boolean doPollDirectory(String absolutePath, String dirName, List<GenericFile<SftpRemoteFile>> fileList, int depth) {
LOG.trace("doPollDirectory from absolutePath: {}, dirName: {}", absolutePath, dirName);
depth++;
// remove trailing /
dirName = FileUtil.stripTrailingSeparator(dirName);
// compute dir depending on stepwise is enabled or not
String dir = null;
List<SftpRemoteFile> files = null;
try {
if (isStepwise()) {
dir = ObjectHelper.isNotEmpty(dirName) ? dirName : absolutePath;
operations.changeCurrentDirectory(dir);
} else {
dir = absolutePath;
}
LOG.trace("Polling directory: {}", dir);
if (isUseList()) {
if (isStepwise()) {
files = operations.listFiles();
} else {
files = operations.listFiles(dir);
}
} else {
// we cannot use the LIST command(s) so we can only poll a named file
// so created a pseudo file with that name
fileExpressionResult = evaluateFileExpression();
if (fileExpressionResult != null) {
SftpRemoteFile file = new SftpRemoteFileSingle(fileExpressionResult);
files = new ArrayList<>(1);
files.add(file);
}
}
} catch (GenericFileOperationFailedException e) {
if (ignoreCannotRetrieveFile(null, null, e)) {
LOG.debug("Cannot list files in directory {} due directory does not exists or file permission error.", dir);
} else {
throw e;
}
}
if (files == null || files.isEmpty()) {
// no files in this directory to poll
LOG.trace("No files found in directory: {}", dir);
return true;
} else {
// we found some files
LOG.trace("Found {} in directory: {}", files.size(), dir);
}
if (getEndpoint().isPreSort()) {
Collections.sort(files, (a, b) -> a.getFilename().compareTo(b.getFilename()));
}
for (SftpRemoteFile file : files) {
if (LOG.isTraceEnabled()) {
LOG.trace("SftpFile[fileName={}, longName={}, dir={}]", file.getFilename(), file.getLongname(), file.isDirectory());
}
// check if we can continue polling in files
if (!canPollMoreFiles(fileList)) {
return false;
}
if (file.isDirectory()) {
RemoteFile<SftpRemoteFile> remote = asRemoteFile(absolutePath, file, getEndpoint().getCharset());
if (endpoint.isRecursive() && depth < endpoint.getMaxDepth() && isValidFile(remote, true, files)) {
// recursive scan and add the sub files and folders
String subDirectory = file.getFilename();
String path = ObjectHelper.isNotEmpty(absolutePath) ? absolutePath + "/" + subDirectory : subDirectory;
boolean canPollMore = pollSubDirectory(path, subDirectory, fileList, depth);
if (!canPollMore) {
return false;
}
}
// we cannot use file.getAttrs().isLink on Windows, so we dont invoke the method
// just assuming its a file we should poll
} else {
RemoteFile<SftpRemoteFile> remote = asRemoteFile(absolutePath, file, getEndpoint().getCharset());
if (depth >= endpoint.getMinDepth() && isValidFile(remote, false, files)) {
// matched file so add
fileList.add(remote);
}
}
}
return true;
}
@Override
protected boolean isMatched(GenericFile<SftpRemoteFile> file, String doneFileName, List<SftpRemoteFile> files) {
String onlyName = FileUtil.stripPath(doneFileName);
for (SftpRemoteFile f : files) {
if (f.getFilename().equals(onlyName)) {
return true;
}
}
LOG.trace("Done file: {} does not exist", doneFileName);
return false;
}
@Override
protected boolean ignoreCannotRetrieveFile(String name, Exchange exchange, Exception cause) {
if (getEndpoint().getConfiguration().isIgnoreFileNotFoundOrPermissionError()) {
SftpException sftp = ObjectHelper.getException(SftpException.class, cause);
if (sftp != null) {
return sftp.id == ChannelSftp.SSH_FX_NO_SUCH_FILE || sftp.id == ChannelSftp.SSH_FX_PERMISSION_DENIED;
}
}
return super.ignoreCannotRetrieveFile(name, exchange, cause);
}
private RemoteFile<SftpRemoteFile> asRemoteFile(String absolutePath, SftpRemoteFile file, String charset) {
RemoteFile<SftpRemoteFile> answer = new RemoteFile<>();
answer.setCharset(charset);
answer.setEndpointPath(endpointPath);
answer.setFile(file);
answer.setFileNameOnly(file.getFilename());
answer.setFileLength(file.getFileLength());
answer.setLastModified(file.getLastModified());
answer.setHostname(((RemoteFileConfiguration) endpoint.getConfiguration()).getHost());
answer.setDirectory(file.isDirectory());
// absolute or relative path
boolean absolute = FileUtil.hasLeadingSeparator(absolutePath);
answer.setAbsolute(absolute);
// create a pseudo absolute name
String dir = FileUtil.stripTrailingSeparator(absolutePath);
String absoluteFileName = FileUtil.stripLeadingSeparator(dir + "/" + file.getFilename());
// if absolute start with a leading separator otherwise let it be relative
if (absolute) {
absoluteFileName = "/" + absoluteFileName;
}
answer.setAbsoluteFilePath(absoluteFileName);
// the relative filename, skip the leading endpoint configured path
String relativePath = StringHelper.after(absoluteFileName, endpointPath);
// skip trailing /
relativePath = FileUtil.stripLeadingSeparator(relativePath);
answer.setRelativeFilePath(relativePath);
// the file name should be the relative path
answer.setFileName(answer.getRelativeFilePath());
return answer;
}
@Override
protected void updateFileHeaders(GenericFile<SftpRemoteFile> file, Message message) {
Object rf = file.getFile().getRemoteFile();
if (rf != null) {
ChannelSftp.LsEntry e = (ChannelSftp.LsEntry) rf;
long length = e.getAttrs().getSize();
long modified = e.getAttrs().getMTime() * 1000L;
file.setFileLength(length);
file.setLastModified(modified);
if (length >= 0) {
message.setHeader(Exchange.FILE_LENGTH, length);
}
if (modified >= 0) {
message.setHeader(Exchange.FILE_LAST_MODIFIED, modified);
}
}
}
private boolean isStepwise() {
RemoteFileConfiguration config = (RemoteFileConfiguration) endpoint.getConfiguration();
return config.isStepwise();
}
private boolean isUseList() {
RemoteFileConfiguration config = (RemoteFileConfiguration) endpoint.getConfiguration();
return config.isUseList();
}
@Override
public String toString() {
if (sftpConsumerToString == null) {
sftpConsumerToString = "SftpConsumer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
return sftpConsumerToString;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.OutputGroupProvider;
import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.RunfilesProvider;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.rules.RuleConfiguredTargetFactory;
import com.google.devtools.build.lib.rules.cpp.CcLinkParams;
import com.google.devtools.build.lib.rules.cpp.CcLinkParamsProvider;
import com.google.devtools.build.lib.rules.cpp.CcLinkParamsStore;
import com.google.devtools.build.lib.rules.cpp.CppCompilationContext;
import com.google.devtools.build.lib.rules.cpp.LinkerInput;
import com.google.devtools.build.lib.rules.java.JavaCompilationArgs.ClasspathType;
/**
* Implementation for the java_library rule.
*/
public class JavaLibrary implements RuleConfiguredTargetFactory {
private final JavaSemantics semantics;
protected JavaLibrary(JavaSemantics semantics) {
this.semantics = semantics;
}
@Override
public ConfiguredTarget create(RuleContext ruleContext)
throws InterruptedException, RuleErrorException {
JavaCommon common = new JavaCommon(ruleContext, semantics);
RuleConfiguredTargetBuilder builder = init(ruleContext, common);
return builder != null ? builder.build() : null;
}
public RuleConfiguredTargetBuilder init(RuleContext ruleContext, final JavaCommon common)
throws InterruptedException {
JavaTargetAttributes.Builder attributesBuilder = common.initCommon();
// Collect the transitive dependencies.
JavaCompilationHelper helper = new JavaCompilationHelper(
ruleContext, semantics, common.getJavacOpts(), attributesBuilder);
helper.addLibrariesToAttributes(common.targetsTreatedAsDeps(ClasspathType.COMPILE_ONLY));
if (ruleContext.hasErrors()) {
return null;
}
semantics.checkRule(ruleContext, common);
JavaCompilationArtifacts.Builder javaArtifactsBuilder = new JavaCompilationArtifacts.Builder();
if (ruleContext.hasErrors()) {
common.setJavaCompilationArtifacts(JavaCompilationArtifacts.EMPTY);
return null;
}
JavaConfiguration javaConfig = ruleContext.getFragment(JavaConfiguration.class);
NestedSetBuilder<Artifact> filesBuilder = NestedSetBuilder.stableOrder();
JavaTargetAttributes attributes = helper.getAttributes();
if (attributes.hasMessages()) {
helper.setTranslations(
semantics.translate(ruleContext, javaConfig, attributes.getMessages()));
}
ruleContext.checkSrcsSamePackage(true);
Artifact jar = null;
Artifact srcJar = ruleContext.getImplicitOutputArtifact(
JavaSemantics.JAVA_LIBRARY_SOURCE_JAR);
Artifact classJar = ruleContext.getImplicitOutputArtifact(
JavaSemantics.JAVA_LIBRARY_CLASS_JAR);
if (attributes.hasSourceFiles() || attributes.hasSourceJars() || attributes.hasResources()
|| attributes.hasMessages()) {
// We only want to add a jar to the classpath of a dependent rule if it has content.
javaArtifactsBuilder.addRuntimeJar(classJar);
jar = classJar;
}
filesBuilder.add(classJar);
Artifact manifestProtoOutput = helper.createManifestProtoOutput(classJar);
// The gensrc jar is created only if the target uses annotation processing.
// Otherwise, it is null, and the source jar action will not depend on the compile action.
Artifact genSourceJar = null;
Artifact genClassJar = null;
if (helper.usesAnnotationProcessing()) {
genClassJar = helper.createGenJar(classJar);
genSourceJar = helper.createGensrcJar(classJar);
helper.createGenJarAction(classJar, manifestProtoOutput, genClassJar);
}
Artifact outputDepsProto = helper.createOutputDepsProtoArtifact(classJar, javaArtifactsBuilder);
helper.createCompileActionWithInstrumentation(classJar, manifestProtoOutput, genSourceJar,
outputDepsProto, javaArtifactsBuilder);
helper.createSourceJarAction(srcJar, genSourceJar);
Artifact iJar = null;
if ((attributes.hasSourceFiles() || attributes.hasSourceJars()) && jar != null) {
iJar = helper.createCompileTimeJarAction(jar, javaArtifactsBuilder);
}
boolean neverLink = JavaCommon.isNeverLink(ruleContext);
JavaCompilationArtifacts javaArtifacts = javaArtifactsBuilder.build();
common.setJavaCompilationArtifacts(javaArtifacts);
common.setClassPathFragment(
new ClasspathConfiguredFragment(
javaArtifacts, attributes, neverLink, helper.getBootclasspathOrDefault()));
CppCompilationContext transitiveCppDeps = common.collectTransitiveCppDeps();
NestedSet<Artifact> transitiveSourceJars = common.collectTransitiveSourceJars(srcJar);
// If sources are empty, treat this library as a forwarding node for dependencies.
JavaCompilationArgs javaCompilationArgs =
common.collectJavaCompilationArgs(false, neverLink, false);
JavaCompilationArgs recursiveJavaCompilationArgs =
common.collectJavaCompilationArgs(true, neverLink, false);
NestedSet<Artifact> compileTimeJavaDepArtifacts = common.collectCompileTimeDependencyArtifacts(
javaArtifacts.getCompileTimeDependencyArtifact());
NestedSet<Artifact> runTimeJavaDepArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER);
NestedSet<LinkerInput> transitiveJavaNativeLibraries =
common.collectTransitiveJavaNativeLibraries();
CcLinkParamsStore ccLinkParamsStore = new CcLinkParamsStore() {
@Override
protected void collect(CcLinkParams.Builder builder, boolean linkingStatically,
boolean linkShared) {
builder.addTransitiveTargets(common.targetsTreatedAsDeps(ClasspathType.BOTH),
JavaCcLinkParamsProvider.TO_LINK_PARAMS, CcLinkParamsProvider.TO_LINK_PARAMS);
}
};
RuleConfiguredTargetBuilder builder =
new RuleConfiguredTargetBuilder(ruleContext);
semantics.addProviders(
ruleContext, common, ImmutableList.<String>of(), classJar, srcJar,
genClassJar, genSourceJar, ImmutableMap.<Artifact, Artifact>of(),
filesBuilder, builder);
NestedSet<Artifact> filesToBuild = filesBuilder.build();
common.addTransitiveInfoProviders(builder, filesToBuild, classJar);
common.addGenJarsProvider(builder, genClassJar, genSourceJar);
NestedSet<Artifact> proguardSpecs = new ProguardLibrary(ruleContext).collectProguardSpecs();
CcLinkParamsProvider ccLinkParamsProvider = new CcLinkParamsProvider(ccLinkParamsStore);
JavaCompilationArgsProvider compilationArgsProvider =
JavaCompilationArgsProvider.create(
javaCompilationArgs, recursiveJavaCompilationArgs,
compileTimeJavaDepArtifacts, runTimeJavaDepArtifacts);
JavaProvider javaProvider = new JavaProvider(compilationArgsProvider);
builder
.add(
JavaRuleOutputJarsProvider.class,
JavaRuleOutputJarsProvider.builder()
.addOutputJar(classJar, iJar, srcJar)
.setJdeps(outputDepsProto)
.build())
.add(
JavaRuntimeJarProvider.class,
new JavaRuntimeJarProvider(javaArtifacts.getRuntimeJars()))
.add(
RunfilesProvider.class,
RunfilesProvider.simple(
JavaCommon.getRunfiles(ruleContext, semantics, javaArtifacts, neverLink)))
.setFilesToBuild(filesToBuild)
.add(JavaNeverlinkInfoProvider.class, new JavaNeverlinkInfoProvider(neverLink))
.add(CppCompilationContext.class, transitiveCppDeps)
.add(JavaCompilationArgsProvider.class, compilationArgsProvider)
.add(JavaProvider.class, javaProvider)
.add(CcLinkParamsProvider.class, ccLinkParamsProvider)
.addNativeDeclaredProvider(ccLinkParamsProvider)
.addNativeDeclaredProvider(javaProvider)
.add(
JavaNativeLibraryProvider.class,
new JavaNativeLibraryProvider(transitiveJavaNativeLibraries))
.add(
JavaSourceInfoProvider.class,
JavaSourceInfoProvider.fromJavaTargetAttributes(attributes, semantics))
.add(
JavaSourceJarsProvider.class,
JavaSourceJarsProvider.create(transitiveSourceJars, ImmutableList.of(srcJar)))
// TODO(bazel-team): this should only happen for java_plugin
.add(JavaPluginInfoProvider.class, JavaCommon.getTransitivePlugins(ruleContext))
.add(ProguardSpecProvider.class, new ProguardSpecProvider(proguardSpecs))
.addOutputGroup(JavaSemantics.SOURCE_JARS_OUTPUT_GROUP, transitiveSourceJars)
.addOutputGroup(OutputGroupProvider.HIDDEN_TOP_LEVEL, proguardSpecs);
if (ruleContext.hasErrors()) {
return null;
}
return builder;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Tom Huybrechts
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model.listeners;
import hudson.ExtensionPoint;
import hudson.ExtensionListView;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.AbstractBuild;
import hudson.model.BuildListener;
import hudson.model.Environment;
import hudson.model.JobProperty;
import hudson.model.Run;
import hudson.model.TaskListener;
import jenkins.model.Jenkins;
import hudson.scm.SCM;
import hudson.tasks.BuildWrapper;
import hudson.util.CopyOnWriteList;
import org.jvnet.tiger_types.Types;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Receives notifications about builds.
*
* <p>
* Listener is always Hudson-wide, so once registered it gets notifications for every build
* that happens in this Hudson.
*
* <p>
* This is an abstract class so that methods added in the future won't break existing listeners.
*
* @author Kohsuke Kawaguchi
* @since 1.145
*/
public abstract class RunListener<R extends Run> implements ExtensionPoint {
public final Class<R> targetType;
protected RunListener(Class<R> targetType) {
this.targetType = targetType;
}
protected RunListener() {
Type type = Types.getBaseClass(getClass(), RunListener.class);
if (type instanceof ParameterizedType)
targetType = Types.erasure(Types.getTypeArgument(type,0));
else
throw new IllegalStateException(getClass()+" uses the raw type for extending RunListener");
}
/**
* Called after a build is completed.
*
* @param r
* The completed build.
* @param listener
* The listener for this build. This can be used to produce log messages, for example,
* which becomes a part of the "console output" of this build. But when this method runs,
* the build is considered completed, so its status cannot be changed anymore.
*/
public void onCompleted(R r, TaskListener listener) {}
/**
* Called after a build is moved to the {@link Run.State#COMPLETED} state.
*
* <p>
* At this point, all the records related to a build is written down to the disk. As such,
* {@link TaskListener} is no longer available. This happens later than {@link #onCompleted(Run, TaskListener)}.
*/
public void onFinalized(R r) {}
/**
* Called when a build is started (i.e. it was in the queue, and will now start running
* on an executor)
*
* @param r
* The started build.
* @param listener
* The listener for this build. This can be used to produce log messages, for example,
* which becomes a part of the "console output" of this build.
*/
public void onStarted(R r, TaskListener listener) {}
/**
* Runs before the {@link SCM#checkout(AbstractBuild, Launcher, FilePath, BuildListener, File)} runs, and performs a set up.
* Can contribute additional properties/env vars to the environment.
*
* <p>
* A typical strategy is for implementations to check {@link JobProperty}s and other configuration
* of the project to determine the environment to inject, which allows you to achieve the equivalent of
* {@link BuildWrapper}, but without UI.
*
* @param build
* The build in progress for which an {@link Environment} object is created.
* Never null.
* @param launcher
* This launcher can be used to launch processes for this build.
* If the build runs remotely, launcher will also run a job on that remote machine.
* Never null.
* @param listener
* Can be used to send any message.
* @return
* non-null if the build can continue, null if there was an error
* and the build needs to be aborted.
* @throws IOException
* terminates the build abnormally. Hudson will handle the exception
* and reports a nice error message.
* @since 1.410
*/
public Environment setUpEnvironment( AbstractBuild build, Launcher launcher, BuildListener listener ) throws IOException, InterruptedException {
return new Environment() {};
}
/**
* Called right before a build is going to be deleted.
*
* @param r The build.
*/
public void onDeleted(R r) {}
/**
* Registers this object as an active listener so that it can start getting
* callbacks invoked.
*
* @deprecated as of 1.281
* Put {@link Extension} on your class to get it auto-registered.
*/
public void register() {
all().add(this);
}
/**
* Reverse operation of {@link #register()}.
*/
public void unregister() {
all().remove(this);
}
/**
* List of registered listeners.
* @deprecated as of 1.281
* Use {@link #all()} for read access, and use {@link Extension} for registration.
*/
public static final CopyOnWriteList<RunListener> LISTENERS = ExtensionListView.createCopyOnWriteList(RunListener.class);
/**
* Fires the {@link #onCompleted(Run, TaskListener)} event.
*/
public static void fireCompleted(Run r, TaskListener listener) {
for (RunListener l : all()) {
if(l.targetType.isInstance(r))
try {
l.onCompleted(r,listener);
} catch (Throwable e) {
report(e);
}
}
}
/**
* Fires the {@link #onStarted(Run, TaskListener)} event.
*/
public static void fireStarted(Run r, TaskListener listener) {
for (RunListener l : all()) {
if(l.targetType.isInstance(r))
try {
l.onStarted(r,listener);
} catch (Exception e) {
report(e);
}
}
}
/**
* Fires the {@link #onFinalized(Run)} event.
*/
public static void fireFinalized(Run r) {
for (RunListener l : all()) {
if(l.targetType.isInstance(r))
try {
l.onFinalized(r);
} catch (Exception e) {
report(e);
}
}
}
/**
* Fires the {@link #onFinalized(Run)} event.
*/
public static void fireDeleted(Run r) {
for (RunListener l : all()) {
if(l.targetType.isInstance(r))
try {
l.onDeleted(r);
} catch (Exception e) {
report(e);
}
}
}
/**
* Returns all the registered {@link RunListener} descriptors.
*/
public static ExtensionList<RunListener> all() {
return Jenkins.getInstance().getExtensionList(RunListener.class);
}
private static void report(Throwable e) {
LOGGER.log(Level.WARNING, "RunListener failed",e);
}
private static final Logger LOGGER = Logger.getLogger(RunListener.class.getName());
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.camel.CamelContext;
import org.apache.camel.NamedNode;
import org.apache.camel.StaticService;
import org.apache.camel.ThreadPoolRejectedPolicy;
import org.apache.camel.model.OptionalIdentifiedDefinition;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.ProcessorDefinitionHelper;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.spi.ExecutorServiceManager;
import org.apache.camel.spi.LifecycleStrategy;
import org.apache.camel.spi.ThreadPoolFactory;
import org.apache.camel.spi.ThreadPoolProfile;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.StopWatch;
import org.apache.camel.util.TimeUtils;
import org.apache.camel.util.URISupport;
import org.apache.camel.util.concurrent.CamelThreadFactory;
import org.apache.camel.util.concurrent.SizedScheduledExecutorService;
import org.apache.camel.util.concurrent.ThreadHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default {@link org.apache.camel.spi.ExecutorServiceManager}.
*
* @version
*/
public class DefaultExecutorServiceManager extends ServiceSupport implements ExecutorServiceManager {
private static final Logger LOG = LoggerFactory.getLogger(DefaultExecutorServiceManager.class);
private final CamelContext camelContext;
private ThreadPoolFactory threadPoolFactory = new DefaultThreadPoolFactory();
private final List<ExecutorService> executorServices = new CopyOnWriteArrayList<ExecutorService>();
private String threadNamePattern;
private long shutdownAwaitTermination = 10000;
private String defaultThreadPoolProfileId = "defaultThreadPoolProfile";
private final Map<String, ThreadPoolProfile> threadPoolProfiles = new ConcurrentHashMap<String, ThreadPoolProfile>();
private ThreadPoolProfile defaultProfile;
public DefaultExecutorServiceManager(CamelContext camelContext) {
this.camelContext = camelContext;
defaultProfile = new ThreadPoolProfile(defaultThreadPoolProfileId);
defaultProfile.setDefaultProfile(true);
defaultProfile.setPoolSize(10);
defaultProfile.setMaxPoolSize(20);
defaultProfile.setKeepAliveTime(60L);
defaultProfile.setTimeUnit(TimeUnit.SECONDS);
defaultProfile.setMaxQueueSize(1000);
defaultProfile.setAllowCoreThreadTimeOut(false);
defaultProfile.setRejectedPolicy(ThreadPoolRejectedPolicy.CallerRuns);
registerThreadPoolProfile(defaultProfile);
}
@Override
public ThreadPoolFactory getThreadPoolFactory() {
return threadPoolFactory;
}
@Override
public void setThreadPoolFactory(ThreadPoolFactory threadPoolFactory) {
this.threadPoolFactory = threadPoolFactory;
}
@Override
public void registerThreadPoolProfile(ThreadPoolProfile profile) {
ObjectHelper.notNull(profile, "profile");
ObjectHelper.notEmpty(profile.getId(), "id", profile);
threadPoolProfiles.put(profile.getId(), profile);
}
@Override
public ThreadPoolProfile getThreadPoolProfile(String id) {
return threadPoolProfiles.get(id);
}
@Override
public ThreadPoolProfile getDefaultThreadPoolProfile() {
return getThreadPoolProfile(defaultThreadPoolProfileId);
}
@Override
public void setDefaultThreadPoolProfile(ThreadPoolProfile defaultThreadPoolProfile) {
threadPoolProfiles.remove(defaultThreadPoolProfileId);
defaultThreadPoolProfile.addDefaults(defaultProfile);
LOG.info("Using custom DefaultThreadPoolProfile: " + defaultThreadPoolProfile);
this.defaultThreadPoolProfileId = defaultThreadPoolProfile.getId();
defaultThreadPoolProfile.setDefaultProfile(true);
registerThreadPoolProfile(defaultThreadPoolProfile);
}
@Override
public String getThreadNamePattern() {
return threadNamePattern;
}
@Override
public void setThreadNamePattern(String threadNamePattern) {
// must set camel id here in the pattern and let the other placeholders be resolved on demand
String name = threadNamePattern.replaceFirst("#camelId#", this.camelContext.getName());
this.threadNamePattern = name;
}
@Override
public long getShutdownAwaitTermination() {
return shutdownAwaitTermination;
}
@Override
public void setShutdownAwaitTermination(long shutdownAwaitTermination) {
this.shutdownAwaitTermination = shutdownAwaitTermination;
}
@Override
public String resolveThreadName(String name) {
return ThreadHelper.resolveThreadName(threadNamePattern, name);
}
@Override
public Thread newThread(String name, Runnable runnable) {
ThreadFactory factory = createThreadFactory(name, true);
return factory.newThread(runnable);
}
@Override
public ExecutorService newDefaultThreadPool(Object source, String name) {
return newThreadPool(source, name, getDefaultThreadPoolProfile());
}
@Override
public ScheduledExecutorService newDefaultScheduledThreadPool(Object source, String name) {
return newScheduledThreadPool(source, name, getDefaultThreadPoolProfile());
}
@Override
public ExecutorService newThreadPool(Object source, String name, String profileId) {
ThreadPoolProfile profile = getThreadPoolProfile(profileId);
if (profile != null) {
return newThreadPool(source, name, profile);
} else {
// no profile with that id
return null;
}
}
@Override
public ExecutorService newThreadPool(Object source, String name, ThreadPoolProfile profile) {
String sanitizedName = URISupport.sanitizeUri(name);
ObjectHelper.notNull(profile, "ThreadPoolProfile");
ThreadPoolProfile defaultProfile = getDefaultThreadPoolProfile();
profile.addDefaults(defaultProfile);
ThreadFactory threadFactory = createThreadFactory(sanitizedName, true);
ExecutorService executorService = threadPoolFactory.newThreadPool(profile, threadFactory);
onThreadPoolCreated(executorService, source, profile.getId());
if (LOG.isDebugEnabled()) {
LOG.debug("Created new ThreadPool for source: {} with name: {}. -> {}", new Object[]{source, sanitizedName, executorService});
}
return executorService;
}
@Override
public ExecutorService newThreadPool(Object source, String name, int poolSize, int maxPoolSize) {
ThreadPoolProfile profile = new ThreadPoolProfile(name);
profile.setPoolSize(poolSize);
profile.setMaxPoolSize(maxPoolSize);
return newThreadPool(source, name, profile);
}
@Override
public ExecutorService newSingleThreadExecutor(Object source, String name) {
return newFixedThreadPool(source, name, 1);
}
@Override
public ExecutorService newCachedThreadPool(Object source, String name) {
String sanitizedName = URISupport.sanitizeUri(name);
ExecutorService answer = threadPoolFactory.newCachedThreadPool(createThreadFactory(sanitizedName, true));
onThreadPoolCreated(answer, source, null);
if (LOG.isDebugEnabled()) {
LOG.debug("Created new CachedThreadPool for source: {} with name: {}. -> {}", new Object[]{source, sanitizedName, answer});
}
return answer;
}
@Override
public ExecutorService newFixedThreadPool(Object source, String name, int poolSize) {
ThreadPoolProfile profile = new ThreadPoolProfile(name);
profile.setPoolSize(poolSize);
profile.setMaxPoolSize(poolSize);
profile.setKeepAliveTime(0L);
return newThreadPool(source, name, profile);
}
@Override
public ScheduledExecutorService newSingleThreadScheduledExecutor(Object source, String name) {
return newScheduledThreadPool(source, name, 1);
}
@Override
public ScheduledExecutorService newScheduledThreadPool(Object source, String name, ThreadPoolProfile profile) {
String sanitizedName = URISupport.sanitizeUri(name);
profile.addDefaults(getDefaultThreadPoolProfile());
ScheduledExecutorService answer = threadPoolFactory.newScheduledThreadPool(profile, createThreadFactory(sanitizedName, true));
onThreadPoolCreated(answer, source, null);
if (LOG.isDebugEnabled()) {
LOG.debug("Created new ScheduledThreadPool for source: {} with name: {}. -> {}", new Object[]{source, sanitizedName, answer});
}
return answer;
}
@Override
public ScheduledExecutorService newScheduledThreadPool(Object source, String name, String profileId) {
ThreadPoolProfile profile = getThreadPoolProfile(profileId);
if (profile != null) {
return newScheduledThreadPool(source, name, profile);
} else {
// no profile with that id
return null;
}
}
@Override
public ScheduledExecutorService newScheduledThreadPool(Object source, String name, int poolSize) {
ThreadPoolProfile profile = new ThreadPoolProfile(name);
profile.setPoolSize(poolSize);
return newScheduledThreadPool(source, name, profile);
}
@Override
public void shutdown(ExecutorService executorService) {
doShutdown(executorService, 0, false);
}
@Override
public void shutdownGraceful(ExecutorService executorService) {
doShutdown(executorService, getShutdownAwaitTermination(), false);
}
@Override
public void shutdownGraceful(ExecutorService executorService, long shutdownAwaitTermination) {
doShutdown(executorService, shutdownAwaitTermination, false);
}
private boolean doShutdown(ExecutorService executorService, long shutdownAwaitTermination, boolean failSafe) {
if (executorService == null) {
return false;
}
boolean warned = false;
// shutting down a thread pool is a 2 step process. First we try graceful, and if that fails, then we go more aggressively
// and try shutting down again. In both cases we wait at most the given shutdown timeout value given
// (total wait could then be 2 x shutdownAwaitTermination, but when we shutdown the 2nd time we are aggressive and thus
// we ought to shutdown much faster)
if (!executorService.isShutdown()) {
StopWatch watch = new StopWatch();
LOG.trace("Shutdown of ExecutorService: {} with await termination: {} millis", executorService, shutdownAwaitTermination);
executorService.shutdown();
if (shutdownAwaitTermination > 0) {
try {
if (!awaitTermination(executorService, shutdownAwaitTermination)) {
warned = true;
LOG.warn("Forcing shutdown of ExecutorService: {} due first await termination elapsed.", executorService);
executorService.shutdownNow();
// we are now shutting down aggressively, so wait to see if we can completely shutdown or not
if (!awaitTermination(executorService, shutdownAwaitTermination)) {
LOG.warn("Cannot completely force shutdown of ExecutorService: {} due second await termination elapsed.", executorService);
}
}
} catch (InterruptedException e) {
warned = true;
LOG.warn("Forcing shutdown of ExecutorService: {} due interrupted.", executorService);
// we were interrupted during shutdown, so force shutdown
executorService.shutdownNow();
}
}
// if we logged at WARN level, then report at INFO level when we are complete so the end user can see this in the log
if (warned) {
LOG.info("Shutdown of ExecutorService: {} is shutdown: {} and terminated: {} took: {}.",
new Object[]{executorService, executorService.isShutdown(), executorService.isTerminated(), TimeUtils.printDuration(watch.taken())});
} else if (LOG.isDebugEnabled()) {
LOG.debug("Shutdown of ExecutorService: {} is shutdown: {} and terminated: {} took: {}.",
new Object[]{executorService, executorService.isShutdown(), executorService.isTerminated(), TimeUtils.printDuration(watch.taken())});
}
}
// let lifecycle strategy be notified as well which can let it be managed in JMX as well
ThreadPoolExecutor threadPool = null;
if (executorService instanceof ThreadPoolExecutor) {
threadPool = (ThreadPoolExecutor) executorService;
} else if (executorService instanceof SizedScheduledExecutorService) {
threadPool = ((SizedScheduledExecutorService) executorService).getScheduledThreadPoolExecutor();
}
if (threadPool != null) {
for (LifecycleStrategy lifecycle : camelContext.getLifecycleStrategies()) {
lifecycle.onThreadPoolRemove(camelContext, threadPool);
}
}
// remove reference as its shutdown (do not remove if fail-safe)
if (!failSafe) {
executorServices.remove(executorService);
}
return warned;
}
@Override
public List<Runnable> shutdownNow(ExecutorService executorService) {
return doShutdownNow(executorService, false);
}
private List<Runnable> doShutdownNow(ExecutorService executorService, boolean failSafe) {
ObjectHelper.notNull(executorService, "executorService");
List<Runnable> answer = null;
if (!executorService.isShutdown()) {
if (failSafe) {
// log as warn, as we shutdown as fail-safe, so end user should see more details in the log.
LOG.warn("Forcing shutdown of ExecutorService: {}", executorService);
} else {
LOG.debug("Forcing shutdown of ExecutorService: {}", executorService);
}
answer = executorService.shutdownNow();
if (LOG.isTraceEnabled()) {
LOG.trace("Shutdown of ExecutorService: {} is shutdown: {} and terminated: {}.",
new Object[]{executorService, executorService.isShutdown(), executorService.isTerminated()});
}
}
// let lifecycle strategy be notified as well which can let it be managed in JMX as well
ThreadPoolExecutor threadPool = null;
if (executorService instanceof ThreadPoolExecutor) {
threadPool = (ThreadPoolExecutor) executorService;
} else if (executorService instanceof SizedScheduledExecutorService) {
threadPool = ((SizedScheduledExecutorService) executorService).getScheduledThreadPoolExecutor();
}
if (threadPool != null) {
for (LifecycleStrategy lifecycle : camelContext.getLifecycleStrategies()) {
lifecycle.onThreadPoolRemove(camelContext, threadPool);
}
}
// remove reference as its shutdown (do not remove if fail-safe)
if (!failSafe) {
executorServices.remove(executorService);
}
return answer;
}
@Override
public boolean awaitTermination(ExecutorService executorService, long shutdownAwaitTermination) throws InterruptedException {
// log progress every 2nd second so end user is aware of we are shutting down
StopWatch watch = new StopWatch();
long interval = Math.min(2000, shutdownAwaitTermination);
boolean done = false;
while (!done && interval > 0) {
if (executorService.awaitTermination(interval, TimeUnit.MILLISECONDS)) {
done = true;
} else {
LOG.info("Waited {} for ExecutorService: {} to terminate...", TimeUtils.printDuration(watch.taken()), executorService);
// recalculate interval
interval = Math.min(2000, shutdownAwaitTermination - watch.taken());
}
}
return done;
}
/**
* Strategy callback when a new {@link java.util.concurrent.ExecutorService} have been created.
*
* @param executorService the created {@link java.util.concurrent.ExecutorService}
*/
protected void onNewExecutorService(ExecutorService executorService) {
// noop
}
@Override
protected void doStart() throws Exception {
if (threadNamePattern == null) {
// set default name pattern which includes the camel context name
threadNamePattern = "Camel (" + camelContext.getName() + ") thread ##counter# - #name#";
}
}
@Override
protected void doStop() throws Exception {
// noop
}
@Override
protected void doShutdown() throws Exception {
// shutdown all remainder executor services by looping and doing this aggressively
// as by normal all threads pool should have been shutdown using proper lifecycle
// by their EIPs, components etc. This is acting as a fail-safe during shutdown
// of CamelContext itself.
Set<ExecutorService> forced = new LinkedHashSet<ExecutorService>();
if (!executorServices.isEmpty()) {
// at first give a bit of time to shutdown nicely as the thread pool is most likely in the process of being shutdown also
LOG.debug("Giving time for {} ExecutorService's to shutdown properly (acting as fail-safe)", executorServices.size());
for (ExecutorService executorService : executorServices) {
try {
boolean warned = doShutdown(executorService, getShutdownAwaitTermination(), true);
// remember the thread pools that was forced to shutdown (eg warned)
if (warned) {
forced.add(executorService);
}
} catch (Throwable e) {
// only log if something goes wrong as we want to shutdown them all
LOG.warn("Error occurred during shutdown of ExecutorService: "
+ executorService + ". This exception will be ignored.", e);
}
}
}
// log the thread pools which was forced to shutdown so it may help the user to identify a problem of his
if (!forced.isEmpty()) {
LOG.warn("Forced shutdown of {} ExecutorService's which has not been shutdown properly (acting as fail-safe)", forced.size());
for (ExecutorService executorService : forced) {
LOG.warn(" forced -> {}", executorService);
}
}
forced.clear();
// clear list
executorServices.clear();
// do not clear the default profile as we could potential be restarted
Iterator<ThreadPoolProfile> it = threadPoolProfiles.values().iterator();
while (it.hasNext()) {
ThreadPoolProfile profile = it.next();
if (!profile.isDefaultProfile()) {
it.remove();
}
}
}
/**
* Invoked when a new thread pool is created.
* This implementation will invoke the {@link LifecycleStrategy#onThreadPoolAdd(org.apache.camel.CamelContext,
* java.util.concurrent.ThreadPoolExecutor, String, String, String, String) LifecycleStrategy.onThreadPoolAdd} method,
* which for example will enlist the thread pool in JMX management.
*
* @param executorService the thread pool
* @param source the source to use the thread pool
* @param threadPoolProfileId profile id, if the thread pool was created from a thread pool profile
*/
private void onThreadPoolCreated(ExecutorService executorService, Object source, String threadPoolProfileId) {
// add to internal list of thread pools
executorServices.add(executorService);
String id;
String sourceId = null;
String routeId = null;
// extract id from source
if (source instanceof NamedNode) {
id = ((OptionalIdentifiedDefinition<?>) source).idOrCreate(this.camelContext.getNodeIdFactory());
// and let source be the short name of the pattern
sourceId = ((NamedNode) source).getShortName();
} else if (source instanceof String) {
id = (String) source;
} else if (source != null) {
if (source instanceof StaticService) {
// the source is static service so its name would be unique
id = source.getClass().getSimpleName();
} else {
// fallback and use the simple class name with hashcode for the id so its unique for this given source
id = source.getClass().getSimpleName() + "(" + ObjectHelper.getIdentityHashCode(source) + ")";
}
} else {
// no source, so fallback and use the simple class name from thread pool and its hashcode identity so its unique
id = executorService.getClass().getSimpleName() + "(" + ObjectHelper.getIdentityHashCode(executorService) + ")";
}
// id is mandatory
ObjectHelper.notEmpty(id, "id for thread pool " + executorService);
// extract route id if possible
if (source instanceof ProcessorDefinition) {
RouteDefinition route = ProcessorDefinitionHelper.getRoute((ProcessorDefinition<?>) source);
if (route != null) {
routeId = route.idOrCreate(this.camelContext.getNodeIdFactory());
}
}
// let lifecycle strategy be notified as well which can let it be managed in JMX as well
ThreadPoolExecutor threadPool = null;
if (executorService instanceof ThreadPoolExecutor) {
threadPool = (ThreadPoolExecutor) executorService;
} else if (executorService instanceof SizedScheduledExecutorService) {
threadPool = ((SizedScheduledExecutorService) executorService).getScheduledThreadPoolExecutor();
}
if (threadPool != null) {
for (LifecycleStrategy lifecycle : camelContext.getLifecycleStrategies()) {
lifecycle.onThreadPoolAdd(camelContext, threadPool, id, sourceId, routeId, threadPoolProfileId);
}
}
// now call strategy to allow custom logic
onNewExecutorService(executorService);
}
private ThreadFactory createThreadFactory(String name, boolean isDaemon) {
ThreadFactory threadFactory = new CamelThreadFactory(threadNamePattern, name, isDaemon);
return threadFactory;
}
}
| |
/*
* Copyright 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package discounty.com.google.zxing.qrcode.encoder;
import discounty.com.google.zxing.EncodeHintType;
import discounty.com.google.zxing.WriterException;
import discounty.com.google.zxing.common.BitArray;
import discounty.com.google.zxing.common.CharacterSetECI;
import discounty.com.google.zxing.common.reedsolomon.GenericGF;
import discounty.com.google.zxing.common.reedsolomon.ReedSolomonEncoder;
import discounty.com.google.zxing.qrcode.decoder.ErrorCorrectionLevel;
import discounty.com.google.zxing.qrcode.decoder.Mode;
import discounty.com.google.zxing.qrcode.decoder.Version;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
/**
* @author satorux@google.com (Satoru Takabayashi) - creator
* @author dswitkin@google.com (Daniel Switkin) - ported from C++
*/
public final class Encoder {
// The original table is defined in the table 5 of JISX0510:2004 (p.19).
private static final int[] ALPHANUMERIC_TABLE = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 0x00-0x0f
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 0x10-0x1f
36, -1, -1, -1, 37, 38, -1, -1, -1, -1, 39, 40, -1, 41, 42, 43, // 0x20-0x2f
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 44, -1, -1, -1, -1, -1, // 0x30-0x3f
-1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, // 0x40-0x4f
25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, -1, -1, -1, -1, -1, // 0x50-0x5f
};
static final String DEFAULT_BYTE_MODE_ENCODING = "ISO-8859-1";
private Encoder() {
}
// The mask penalty calculation is complicated. See Table 21 of JISX0510:2004 (p.45) for details.
// Basically it applies four rules and summate all penalties.
private static int calculateMaskPenalty(ByteMatrix matrix) {
return MaskUtil.applyMaskPenaltyRule1(matrix)
+ MaskUtil.applyMaskPenaltyRule2(matrix)
+ MaskUtil.applyMaskPenaltyRule3(matrix)
+ MaskUtil.applyMaskPenaltyRule4(matrix);
}
/**
* @param content text to encode
* @param ecLevel error correction level to use
* @return {@link QRCode} representing the encoded QR code
* @throws WriterException if encoding can't succeed, because of for example invalid content
* or configuration
*/
public static QRCode encode(String content, ErrorCorrectionLevel ecLevel) throws WriterException {
return encode(content, ecLevel, null);
}
public static QRCode encode(String content,
ErrorCorrectionLevel ecLevel,
Map<EncodeHintType,?> hints) throws WriterException {
// Determine what character encoding has been specified by the caller, if any
String encoding = DEFAULT_BYTE_MODE_ENCODING;
if (hints != null && hints.containsKey(EncodeHintType.CHARACTER_SET)) {
encoding = hints.get(EncodeHintType.CHARACTER_SET).toString();
}
// Pick an encoding mode appropriate for the content. Note that this will not attempt to use
// multiple modes / segments even if that were more efficient. Twould be nice.
Mode mode = chooseMode(content, encoding);
// This will store the header information, like mode and
// length, as well as "header" segments like an ECI segment.
BitArray headerBits = new BitArray();
// Append ECI segment if applicable
if (mode == Mode.BYTE && !DEFAULT_BYTE_MODE_ENCODING.equals(encoding)) {
CharacterSetECI eci = CharacterSetECI.getCharacterSetECIByName(encoding);
if (eci != null) {
appendECI(eci, headerBits);
}
}
// (With ECI in place,) Write the mode marker
appendModeInfo(mode, headerBits);
// Collect data within the main segment, separately, to count its size if needed. Don't add it to
// main payload yet.
BitArray dataBits = new BitArray();
appendBytes(content, mode, dataBits, encoding);
// Hard part: need to know version to know how many bits length takes. But need to know how many
// bits it takes to know version. First we take a guess at version by assuming version will be
// the minimum, 1:
int provisionalBitsNeeded = headerBits.getSize()
+ mode.getCharacterCountBits(Version.getVersionForNumber(1))
+ dataBits.getSize();
Version provisionalVersion = chooseVersion(provisionalBitsNeeded, ecLevel);
// Use that guess to calculate the right version. I am still not sure this works in 100% of cases.
int bitsNeeded = headerBits.getSize()
+ mode.getCharacterCountBits(provisionalVersion)
+ dataBits.getSize();
Version version = chooseVersion(bitsNeeded, ecLevel);
BitArray headerAndDataBits = new BitArray();
headerAndDataBits.appendBitArray(headerBits);
// Find "length" of main segment and write it
int numLetters = mode == Mode.BYTE ? dataBits.getSizeInBytes() : content.length();
appendLengthInfo(numLetters, version, mode, headerAndDataBits);
// Put data together into the overall payload
headerAndDataBits.appendBitArray(dataBits);
Version.ECBlocks ecBlocks = version.getECBlocksForLevel(ecLevel);
int numDataBytes = version.getTotalCodewords() - ecBlocks.getTotalECCodewords();
// Terminate the bits properly.
terminateBits(numDataBytes, headerAndDataBits);
// Interleave data bits with error correction code.
BitArray finalBits = interleaveWithECBytes(headerAndDataBits,
version.getTotalCodewords(),
numDataBytes,
ecBlocks.getNumBlocks());
QRCode qrCode = new QRCode();
qrCode.setECLevel(ecLevel);
qrCode.setMode(mode);
qrCode.setVersion(version);
// Choose the mask pattern and set to "qrCode".
int dimension = version.getDimensionForVersion();
ByteMatrix matrix = new ByteMatrix(dimension, dimension);
int maskPattern = chooseMaskPattern(finalBits, ecLevel, version, matrix);
qrCode.setMaskPattern(maskPattern);
// Build the matrix and set it to "qrCode".
MatrixUtil.buildMatrix(finalBits, ecLevel, version, maskPattern, matrix);
qrCode.setMatrix(matrix);
return qrCode;
}
/**
* @return the code point of the table used in alphanumeric mode or
* -1 if there is no corresponding code in the table.
*/
static int getAlphanumericCode(int code) {
if (code < ALPHANUMERIC_TABLE.length) {
return ALPHANUMERIC_TABLE[code];
}
return -1;
}
public static Mode chooseMode(String content) {
return chooseMode(content, null);
}
/**
* Choose the best mode by examining the content. Note that 'encoding' is used as a hint;
* if it is Shift_JIS, and the input is only double-byte Kanji, then we return {@link Mode#KANJI}.
*/
private static Mode chooseMode(String content, String encoding) {
if ("Shift_JIS".equals(encoding) && isOnlyDoubleByteKanji(content)) {
// Choose Kanji mode if all input are double-byte characters
return Mode.KANJI;
}
boolean hasNumeric = false;
boolean hasAlphanumeric = false;
for (int i = 0; i < content.length(); ++i) {
char c = content.charAt(i);
if (c >= '0' && c <= '9') {
hasNumeric = true;
} else if (getAlphanumericCode(c) != -1) {
hasAlphanumeric = true;
} else {
return Mode.BYTE;
}
}
if (hasAlphanumeric) {
return Mode.ALPHANUMERIC;
}
if (hasNumeric) {
return Mode.NUMERIC;
}
return Mode.BYTE;
}
private static boolean isOnlyDoubleByteKanji(String content) {
byte[] bytes;
try {
bytes = content.getBytes("Shift_JIS");
} catch (UnsupportedEncodingException ignored) {
return false;
}
int length = bytes.length;
if (length % 2 != 0) {
return false;
}
for (int i = 0; i < length; i += 2) {
int byte1 = bytes[i] & 0xFF;
if ((byte1 < 0x81 || byte1 > 0x9F) && (byte1 < 0xE0 || byte1 > 0xEB)) {
return false;
}
}
return true;
}
private static int chooseMaskPattern(BitArray bits,
ErrorCorrectionLevel ecLevel,
Version version,
ByteMatrix matrix) throws WriterException {
int minPenalty = Integer.MAX_VALUE; // Lower penalty is better.
int bestMaskPattern = -1;
// We try all mask patterns to choose the best one.
for (int maskPattern = 0; maskPattern < QRCode.NUM_MASK_PATTERNS; maskPattern++) {
MatrixUtil.buildMatrix(bits, ecLevel, version, maskPattern, matrix);
int penalty = calculateMaskPenalty(matrix);
if (penalty < minPenalty) {
minPenalty = penalty;
bestMaskPattern = maskPattern;
}
}
return bestMaskPattern;
}
private static Version chooseVersion(int numInputBits, ErrorCorrectionLevel ecLevel) throws WriterException {
// In the following comments, we use numbers of Version 7-H.
for (int versionNum = 1; versionNum <= 40; versionNum++) {
Version version = Version.getVersionForNumber(versionNum);
// numBytes = 196
int numBytes = version.getTotalCodewords();
// getNumECBytes = 130
Version.ECBlocks ecBlocks = version.getECBlocksForLevel(ecLevel);
int numEcBytes = ecBlocks.getTotalECCodewords();
// getNumDataBytes = 196 - 130 = 66
int numDataBytes = numBytes - numEcBytes;
int totalInputBytes = (numInputBits + 7) / 8;
if (numDataBytes >= totalInputBytes) {
return version;
}
}
throw new WriterException("Data too big");
}
/**
* Terminate bits as described in 8.4.8 and 8.4.9 of JISX0510:2004 (p.24).
*/
static void terminateBits(int numDataBytes, BitArray bits) throws WriterException {
int capacity = numDataBytes * 8;
if (bits.getSize() > capacity) {
throw new WriterException("data bits cannot fit in the QR Code" + bits.getSize() + " > " +
capacity);
}
for (int i = 0; i < 4 && bits.getSize() < capacity; ++i) {
bits.appendBit(false);
}
// Append termination bits. See 8.4.8 of JISX0510:2004 (p.24) for details.
// If the last byte isn't 8-bit aligned, we'll add padding bits.
int numBitsInLastByte = bits.getSize() & 0x07;
if (numBitsInLastByte > 0) {
for (int i = numBitsInLastByte; i < 8; i++) {
bits.appendBit(false);
}
}
// If we have more space, we'll fill the space with padding patterns defined in 8.4.9 (p.24).
int numPaddingBytes = numDataBytes - bits.getSizeInBytes();
for (int i = 0; i < numPaddingBytes; ++i) {
bits.appendBits((i & 0x01) == 0 ? 0xEC : 0x11, 8);
}
if (bits.getSize() != capacity) {
throw new WriterException("Bits size does not equal capacity");
}
}
/**
* Get number of data bytes and number of error correction bytes for block id "blockID". Store
* the result in "numDataBytesInBlock", and "numECBytesInBlock". See table 12 in 8.5.1 of
* JISX0510:2004 (p.30)
*/
static void getNumDataBytesAndNumECBytesForBlockID(int numTotalBytes,
int numDataBytes,
int numRSBlocks,
int blockID,
int[] numDataBytesInBlock,
int[] numECBytesInBlock) throws WriterException {
if (blockID >= numRSBlocks) {
throw new WriterException("Block ID too large");
}
// numRsBlocksInGroup2 = 196 % 5 = 1
int numRsBlocksInGroup2 = numTotalBytes % numRSBlocks;
// numRsBlocksInGroup1 = 5 - 1 = 4
int numRsBlocksInGroup1 = numRSBlocks - numRsBlocksInGroup2;
// numTotalBytesInGroup1 = 196 / 5 = 39
int numTotalBytesInGroup1 = numTotalBytes / numRSBlocks;
// numTotalBytesInGroup2 = 39 + 1 = 40
int numTotalBytesInGroup2 = numTotalBytesInGroup1 + 1;
// numDataBytesInGroup1 = 66 / 5 = 13
int numDataBytesInGroup1 = numDataBytes / numRSBlocks;
// numDataBytesInGroup2 = 13 + 1 = 14
int numDataBytesInGroup2 = numDataBytesInGroup1 + 1;
// numEcBytesInGroup1 = 39 - 13 = 26
int numEcBytesInGroup1 = numTotalBytesInGroup1 - numDataBytesInGroup1;
// numEcBytesInGroup2 = 40 - 14 = 26
int numEcBytesInGroup2 = numTotalBytesInGroup2 - numDataBytesInGroup2;
// Sanity checks.
// 26 = 26
if (numEcBytesInGroup1 != numEcBytesInGroup2) {
throw new WriterException("EC bytes mismatch");
}
// 5 = 4 + 1.
if (numRSBlocks != numRsBlocksInGroup1 + numRsBlocksInGroup2) {
throw new WriterException("RS blocks mismatch");
}
// 196 = (13 + 26) * 4 + (14 + 26) * 1
if (numTotalBytes !=
((numDataBytesInGroup1 + numEcBytesInGroup1) *
numRsBlocksInGroup1) +
((numDataBytesInGroup2 + numEcBytesInGroup2) *
numRsBlocksInGroup2)) {
throw new WriterException("Total bytes mismatch");
}
if (blockID < numRsBlocksInGroup1) {
numDataBytesInBlock[0] = numDataBytesInGroup1;
numECBytesInBlock[0] = numEcBytesInGroup1;
} else {
numDataBytesInBlock[0] = numDataBytesInGroup2;
numECBytesInBlock[0] = numEcBytesInGroup2;
}
}
/**
* Interleave "bits" with corresponding error correction bytes. On success, store the result in
* "result". The interleave rule is complicated. See 8.6 of JISX0510:2004 (p.37) for details.
*/
static BitArray interleaveWithECBytes(BitArray bits,
int numTotalBytes,
int numDataBytes,
int numRSBlocks) throws WriterException {
// "bits" must have "getNumDataBytes" bytes of data.
if (bits.getSizeInBytes() != numDataBytes) {
throw new WriterException("Number of bits and data bytes does not match");
}
// Step 1. Divide data bytes into blocks and generate error correction bytes for them. We'll
// store the divided data bytes blocks and error correction bytes blocks into "blocks".
int dataBytesOffset = 0;
int maxNumDataBytes = 0;
int maxNumEcBytes = 0;
// Since, we know the number of reedsolmon blocks, we can initialize the vector with the number.
Collection<BlockPair> blocks = new ArrayList<>(numRSBlocks);
for (int i = 0; i < numRSBlocks; ++i) {
int[] numDataBytesInBlock = new int[1];
int[] numEcBytesInBlock = new int[1];
getNumDataBytesAndNumECBytesForBlockID(
numTotalBytes, numDataBytes, numRSBlocks, i,
numDataBytesInBlock, numEcBytesInBlock);
int size = numDataBytesInBlock[0];
byte[] dataBytes = new byte[size];
bits.toBytes(8*dataBytesOffset, dataBytes, 0, size);
byte[] ecBytes = generateECBytes(dataBytes, numEcBytesInBlock[0]);
blocks.add(new BlockPair(dataBytes, ecBytes));
maxNumDataBytes = Math.max(maxNumDataBytes, size);
maxNumEcBytes = Math.max(maxNumEcBytes, ecBytes.length);
dataBytesOffset += numDataBytesInBlock[0];
}
if (numDataBytes != dataBytesOffset) {
throw new WriterException("Data bytes does not match offset");
}
BitArray result = new BitArray();
// First, place data blocks.
for (int i = 0; i < maxNumDataBytes; ++i) {
for (BlockPair block : blocks) {
byte[] dataBytes = block.getDataBytes();
if (i < dataBytes.length) {
result.appendBits(dataBytes[i], 8);
}
}
}
// Then, place error correction blocks.
for (int i = 0; i < maxNumEcBytes; ++i) {
for (BlockPair block : blocks) {
byte[] ecBytes = block.getErrorCorrectionBytes();
if (i < ecBytes.length) {
result.appendBits(ecBytes[i], 8);
}
}
}
if (numTotalBytes != result.getSizeInBytes()) { // Should be same.
throw new WriterException("Interleaving error: " + numTotalBytes + " and " +
result.getSizeInBytes() + " differ.");
}
return result;
}
static byte[] generateECBytes(byte[] dataBytes, int numEcBytesInBlock) {
int numDataBytes = dataBytes.length;
int[] toEncode = new int[numDataBytes + numEcBytesInBlock];
for (int i = 0; i < numDataBytes; i++) {
toEncode[i] = dataBytes[i] & 0xFF;
}
new ReedSolomonEncoder(GenericGF.QR_CODE_FIELD_256).encode(toEncode, numEcBytesInBlock);
byte[] ecBytes = new byte[numEcBytesInBlock];
for (int i = 0; i < numEcBytesInBlock; i++) {
ecBytes[i] = (byte) toEncode[numDataBytes + i];
}
return ecBytes;
}
/**
* Append mode info. On success, store the result in "bits".
*/
static void appendModeInfo(Mode mode, BitArray bits) {
bits.appendBits(mode.getBits(), 4);
}
/**
* Append length info. On success, store the result in "bits".
*/
static void appendLengthInfo(int numLetters, Version version, Mode mode, BitArray bits) throws WriterException {
int numBits = mode.getCharacterCountBits(version);
if (numLetters >= (1 << numBits)) {
throw new WriterException(numLetters + " is bigger than " + ((1 << numBits) - 1));
}
bits.appendBits(numLetters, numBits);
}
/**
* Append "bytes" in "mode" mode (encoding) into "bits". On success, store the result in "bits".
*/
static void appendBytes(String content,
Mode mode,
BitArray bits,
String encoding) throws WriterException {
switch (mode) {
case NUMERIC:
appendNumericBytes(content, bits);
break;
case ALPHANUMERIC:
appendAlphanumericBytes(content, bits);
break;
case BYTE:
append8BitBytes(content, bits, encoding);
break;
case KANJI:
appendKanjiBytes(content, bits);
break;
default:
throw new WriterException("Invalid mode: " + mode);
}
}
static void appendNumericBytes(CharSequence content, BitArray bits) {
int length = content.length();
int i = 0;
while (i < length) {
int num1 = content.charAt(i) - '0';
if (i + 2 < length) {
// Encode three numeric letters in ten bits.
int num2 = content.charAt(i + 1) - '0';
int num3 = content.charAt(i + 2) - '0';
bits.appendBits(num1 * 100 + num2 * 10 + num3, 10);
i += 3;
} else if (i + 1 < length) {
// Encode two numeric letters in seven bits.
int num2 = content.charAt(i + 1) - '0';
bits.appendBits(num1 * 10 + num2, 7);
i += 2;
} else {
// Encode one numeric letter in four bits.
bits.appendBits(num1, 4);
i++;
}
}
}
static void appendAlphanumericBytes(CharSequence content, BitArray bits) throws WriterException {
int length = content.length();
int i = 0;
while (i < length) {
int code1 = getAlphanumericCode(content.charAt(i));
if (code1 == -1) {
throw new WriterException();
}
if (i + 1 < length) {
int code2 = getAlphanumericCode(content.charAt(i + 1));
if (code2 == -1) {
throw new WriterException();
}
// Encode two alphanumeric letters in 11 bits.
bits.appendBits(code1 * 45 + code2, 11);
i += 2;
} else {
// Encode one alphanumeric letter in six bits.
bits.appendBits(code1, 6);
i++;
}
}
}
static void append8BitBytes(String content, BitArray bits, String encoding)
throws WriterException {
byte[] bytes;
try {
bytes = content.getBytes(encoding);
} catch (UnsupportedEncodingException uee) {
throw new WriterException(uee);
}
for (byte b : bytes) {
bits.appendBits(b, 8);
}
}
static void appendKanjiBytes(String content, BitArray bits) throws WriterException {
byte[] bytes;
try {
bytes = content.getBytes("Shift_JIS");
} catch (UnsupportedEncodingException uee) {
throw new WriterException(uee);
}
int length = bytes.length;
for (int i = 0; i < length; i += 2) {
int byte1 = bytes[i] & 0xFF;
int byte2 = bytes[i + 1] & 0xFF;
int code = (byte1 << 8) | byte2;
int subtracted = -1;
if (code >= 0x8140 && code <= 0x9ffc) {
subtracted = code - 0x8140;
} else if (code >= 0xe040 && code <= 0xebbf) {
subtracted = code - 0xc140;
}
if (subtracted == -1) {
throw new WriterException("Invalid byte sequence");
}
int encoded = ((subtracted >> 8) * 0xc0) + (subtracted & 0xff);
bits.appendBits(encoded, 13);
}
}
private static void appendECI(CharacterSetECI eci, BitArray bits) {
bits.appendBits(Mode.ECI.getBits(), 4);
// This is correct for values up to 127, which is all we need now.
bits.appendBits(eci.getValue(), 8);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.rest.action.search.RestSearchAction;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.StringJoiner;
public final class Request {
static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON;
private final String method;
private final String endpoint;
private final Map<String, String> parameters;
private final HttpEntity entity;
public Request(String method, String endpoint, Map<String, String> parameters, HttpEntity entity) {
this.method = Objects.requireNonNull(method, "method cannot be null");
this.endpoint = Objects.requireNonNull(endpoint, "endpoint cannot be null");
this.parameters = Objects.requireNonNull(parameters, "parameters cannot be null");
this.entity = entity;
}
public String getMethod() {
return method;
}
public String getEndpoint() {
return endpoint;
}
public Map<String, String> getParameters() {
return parameters;
}
public HttpEntity getEntity() {
return entity;
}
@Override
public String toString() {
return "Request{" +
"method='" + method + '\'' +
", endpoint='" + endpoint + '\'' +
", params=" + parameters +
", hasBody=" + (entity != null) +
'}';
}
static Request delete(DeleteRequest deleteRequest) {
String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
Params parameters = Params.builder();
parameters.withRouting(deleteRequest.routing());
parameters.withParent(deleteRequest.parent());
parameters.withTimeout(deleteRequest.timeout());
parameters.withVersion(deleteRequest.version());
parameters.withVersionType(deleteRequest.versionType());
parameters.withRefreshPolicy(deleteRequest.getRefreshPolicy());
parameters.withWaitForActiveShards(deleteRequest.waitForActiveShards());
return new Request(HttpDelete.METHOD_NAME, endpoint, parameters.getParams(), null);
}
static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) {
String endpoint = endpoint(deleteIndexRequest.indices(), Strings.EMPTY_ARRAY, "");
Params parameters = Params.builder();
parameters.withTimeout(deleteIndexRequest.timeout());
parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout());
parameters.withIndicesOptions(deleteIndexRequest.indicesOptions());
return new Request(HttpDelete.METHOD_NAME, endpoint, parameters.getParams(), null);
}
static Request info() {
return new Request(HttpGet.METHOD_NAME, "/", Collections.emptyMap(), null);
}
static Request bulk(BulkRequest bulkRequest) throws IOException {
Params parameters = Params.builder();
parameters.withTimeout(bulkRequest.timeout());
parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy());
// Bulk API only supports newline delimited JSON or Smile. Before executing
// the bulk, we need to check that all requests have the same content-type
// and this content-type is supported by the Bulk API.
XContentType bulkContentType = null;
for (int i = 0; i < bulkRequest.numberOfActions(); i++) {
DocWriteRequest<?> request = bulkRequest.requests().get(i);
DocWriteRequest.OpType opType = request.opType();
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
bulkContentType = enforceSameContentType((IndexRequest) request, bulkContentType);
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = (UpdateRequest) request;
if (updateRequest.doc() != null) {
bulkContentType = enforceSameContentType(updateRequest.doc(), bulkContentType);
}
if (updateRequest.upsertRequest() != null) {
bulkContentType = enforceSameContentType(updateRequest.upsertRequest(), bulkContentType);
}
}
}
if (bulkContentType == null) {
bulkContentType = XContentType.JSON;
}
final byte separator = bulkContentType.xContent().streamSeparator();
final ContentType requestContentType = createContentType(bulkContentType);
ByteArrayOutputStream content = new ByteArrayOutputStream();
for (DocWriteRequest<?> request : bulkRequest.requests()) {
DocWriteRequest.OpType opType = request.opType();
try (XContentBuilder metadata = XContentBuilder.builder(bulkContentType.xContent())) {
metadata.startObject();
{
metadata.startObject(opType.getLowercase());
if (Strings.hasLength(request.index())) {
metadata.field("_index", request.index());
}
if (Strings.hasLength(request.type())) {
metadata.field("_type", request.type());
}
if (Strings.hasLength(request.id())) {
metadata.field("_id", request.id());
}
if (Strings.hasLength(request.routing())) {
metadata.field("_routing", request.routing());
}
if (Strings.hasLength(request.parent())) {
metadata.field("_parent", request.parent());
}
if (request.version() != Versions.MATCH_ANY) {
metadata.field("_version", request.version());
}
VersionType versionType = request.versionType();
if (versionType != VersionType.INTERNAL) {
if (versionType == VersionType.EXTERNAL) {
metadata.field("_version_type", "external");
} else if (versionType == VersionType.EXTERNAL_GTE) {
metadata.field("_version_type", "external_gte");
} else if (versionType == VersionType.FORCE) {
metadata.field("_version_type", "force");
}
}
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
IndexRequest indexRequest = (IndexRequest) request;
if (Strings.hasLength(indexRequest.getPipeline())) {
metadata.field("pipeline", indexRequest.getPipeline());
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {
UpdateRequest updateRequest = (UpdateRequest) request;
if (updateRequest.retryOnConflict() > 0) {
metadata.field("_retry_on_conflict", updateRequest.retryOnConflict());
}
if (updateRequest.fetchSource() != null) {
metadata.field("_source", updateRequest.fetchSource());
}
}
metadata.endObject();
}
metadata.endObject();
BytesRef metadataSource = metadata.bytes().toBytesRef();
content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length);
content.write(separator);
}
BytesRef source = null;
if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
IndexRequest indexRequest = (IndexRequest) request;
BytesReference indexSource = indexRequest.source();
XContentType indexXContentType = indexRequest.getContentType();
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, indexSource, indexXContentType)) {
try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
builder.copyCurrentStructure(parser);
source = builder.bytes().toBytesRef();
}
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {
source = XContentHelper.toXContent((UpdateRequest) request, bulkContentType, false).toBytesRef();
}
if (source != null) {
content.write(source.bytes, source.offset, source.length);
content.write(separator);
}
}
HttpEntity entity = new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType);
return new Request(HttpPost.METHOD_NAME, "/_bulk", parameters.getParams(), entity);
}
static Request exists(GetRequest getRequest) {
Request request = get(getRequest);
return new Request(HttpHead.METHOD_NAME, request.endpoint, request.parameters, null);
}
static Request get(GetRequest getRequest) {
String endpoint = endpoint(getRequest.index(), getRequest.type(), getRequest.id());
Params parameters = Params.builder();
parameters.withPreference(getRequest.preference());
parameters.withRouting(getRequest.routing());
parameters.withParent(getRequest.parent());
parameters.withRefresh(getRequest.refresh());
parameters.withRealtime(getRequest.realtime());
parameters.withStoredFields(getRequest.storedFields());
parameters.withVersion(getRequest.version());
parameters.withVersionType(getRequest.versionType());
parameters.withFetchSourceContext(getRequest.fetchSourceContext());
return new Request(HttpGet.METHOD_NAME, endpoint, parameters.getParams(), null);
}
static Request index(IndexRequest indexRequest) {
String method = Strings.hasLength(indexRequest.id()) ? HttpPut.METHOD_NAME : HttpPost.METHOD_NAME;
boolean isCreate = (indexRequest.opType() == DocWriteRequest.OpType.CREATE);
String endpoint = endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id(), isCreate ? "_create" : null);
Params parameters = Params.builder();
parameters.withRouting(indexRequest.routing());
parameters.withParent(indexRequest.parent());
parameters.withTimeout(indexRequest.timeout());
parameters.withVersion(indexRequest.version());
parameters.withVersionType(indexRequest.versionType());
parameters.withPipeline(indexRequest.getPipeline());
parameters.withRefreshPolicy(indexRequest.getRefreshPolicy());
parameters.withWaitForActiveShards(indexRequest.waitForActiveShards());
BytesRef source = indexRequest.source().toBytesRef();
ContentType contentType = createContentType(indexRequest.getContentType());
HttpEntity entity = new ByteArrayEntity(source.bytes, source.offset, source.length, contentType);
return new Request(method, endpoint, parameters.getParams(), entity);
}
static Request ping() {
return new Request(HttpHead.METHOD_NAME, "/", Collections.emptyMap(), null);
}
static Request update(UpdateRequest updateRequest) throws IOException {
String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update");
Params parameters = Params.builder();
parameters.withRouting(updateRequest.routing());
parameters.withParent(updateRequest.parent());
parameters.withTimeout(updateRequest.timeout());
parameters.withRefreshPolicy(updateRequest.getRefreshPolicy());
parameters.withWaitForActiveShards(updateRequest.waitForActiveShards());
parameters.withDocAsUpsert(updateRequest.docAsUpsert());
parameters.withFetchSourceContext(updateRequest.fetchSource());
parameters.withRetryOnConflict(updateRequest.retryOnConflict());
parameters.withVersion(updateRequest.version());
parameters.withVersionType(updateRequest.versionType());
// The Java API allows update requests with different content types
// set for the partial document and the upsert document. This client
// only accepts update requests that have the same content types set
// for both doc and upsert.
XContentType xContentType = null;
if (updateRequest.doc() != null) {
xContentType = updateRequest.doc().getContentType();
}
if (updateRequest.upsertRequest() != null) {
XContentType upsertContentType = updateRequest.upsertRequest().getContentType();
if ((xContentType != null) && (xContentType != upsertContentType)) {
throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType + "]" +
" and upsert [" + upsertContentType + "] documents");
} else {
xContentType = upsertContentType;
}
}
if (xContentType == null) {
xContentType = Requests.INDEX_CONTENT_TYPE;
}
HttpEntity entity = createEntity(updateRequest, xContentType);
return new Request(HttpPost.METHOD_NAME, endpoint, parameters.getParams(), entity);
}
static Request search(SearchRequest searchRequest) throws IOException {
String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search");
Params params = Params.builder();
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
params.withRouting(searchRequest.routing());
params.withPreference(searchRequest.preference());
params.withIndicesOptions(searchRequest.indicesOptions());
params.putParam("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
if (searchRequest.requestCache() != null) {
params.putParam("request_cache", Boolean.toString(searchRequest.requestCache()));
}
params.putParam("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize()));
if (searchRequest.scroll() != null) {
params.putParam("scroll", searchRequest.scroll().keepAlive());
}
HttpEntity entity = null;
if (searchRequest.source() != null) {
entity = createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE);
}
return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), entity);
}
static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException {
HttpEntity entity = createEntity(searchScrollRequest, REQUEST_BODY_CONTENT_TYPE);
return new Request("GET", "/_search/scroll", Collections.emptyMap(), entity);
}
static Request clearScroll(ClearScrollRequest clearScrollRequest) throws IOException {
HttpEntity entity = createEntity(clearScrollRequest, REQUEST_BODY_CONTENT_TYPE);
return new Request("DELETE", "/_search/scroll", Collections.emptyMap(), entity);
}
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
}
static String endpoint(String[] indices, String[] types, String endpoint) {
return endpoint(String.join(",", indices), String.join(",", types), endpoint);
}
/**
* Utility method to build request's endpoint.
*/
static String endpoint(String... parts) {
StringJoiner joiner = new StringJoiner("/", "/", "");
for (String part : parts) {
if (Strings.hasLength(part)) {
joiner.add(part);
}
}
return joiner.toString();
}
/**
* Returns a {@link ContentType} from a given {@link XContentType}.
*
* @param xContentType the {@link XContentType}
* @return the {@link ContentType}
*/
@SuppressForbidden(reason = "Only allowed place to convert a XContentType to a ContentType")
public static ContentType createContentType(final XContentType xContentType) {
return ContentType.create(xContentType.mediaTypeWithoutParameters(), (Charset) null);
}
/**
* Utility class to build request's parameters map and centralize all parameter names.
*/
static class Params {
private final Map<String, String> params = new HashMap<>();
private Params() {
}
Params putParam(String key, String value) {
if (Strings.hasLength(value)) {
if (params.putIfAbsent(key, value) != null) {
throw new IllegalArgumentException("Request parameter [" + key + "] is already registered");
}
}
return this;
}
Params putParam(String key, TimeValue value) {
if (value != null) {
return putParam(key, value.getStringRep());
}
return this;
}
Params withDocAsUpsert(boolean docAsUpsert) {
if (docAsUpsert) {
return putParam("doc_as_upsert", Boolean.TRUE.toString());
}
return this;
}
Params withFetchSourceContext(FetchSourceContext fetchSourceContext) {
if (fetchSourceContext != null) {
if (fetchSourceContext.fetchSource() == false) {
putParam("_source", Boolean.FALSE.toString());
}
if (fetchSourceContext.includes() != null && fetchSourceContext.includes().length > 0) {
putParam("_source_include", String.join(",", fetchSourceContext.includes()));
}
if (fetchSourceContext.excludes() != null && fetchSourceContext.excludes().length > 0) {
putParam("_source_exclude", String.join(",", fetchSourceContext.excludes()));
}
}
return this;
}
Params withMasterTimeout(TimeValue masterTimeout) {
return putParam("master_timeout", masterTimeout);
}
Params withParent(String parent) {
return putParam("parent", parent);
}
Params withPipeline(String pipeline) {
return putParam("pipeline", pipeline);
}
Params withPreference(String preference) {
return putParam("preference", preference);
}
Params withRealtime(boolean realtime) {
if (realtime == false) {
return putParam("realtime", Boolean.FALSE.toString());
}
return this;
}
Params withRefresh(boolean refresh) {
if (refresh) {
return withRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
}
return this;
}
Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) {
if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
return putParam("refresh", refreshPolicy.getValue());
}
return this;
}
Params withRetryOnConflict(int retryOnConflict) {
if (retryOnConflict > 0) {
return putParam("retry_on_conflict", String.valueOf(retryOnConflict));
}
return this;
}
Params withRouting(String routing) {
return putParam("routing", routing);
}
Params withStoredFields(String[] storedFields) {
if (storedFields != null && storedFields.length > 0) {
return putParam("stored_fields", String.join(",", storedFields));
}
return this;
}
Params withTimeout(TimeValue timeout) {
return putParam("timeout", timeout);
}
Params withVersion(long version) {
if (version != Versions.MATCH_ANY) {
return putParam("version", Long.toString(version));
}
return this;
}
Params withVersionType(VersionType versionType) {
if (versionType != VersionType.INTERNAL) {
return putParam("version_type", versionType.name().toLowerCase(Locale.ROOT));
}
return this;
}
Params withWaitForActiveShards(ActiveShardCount activeShardCount) {
if (activeShardCount != null && activeShardCount != ActiveShardCount.DEFAULT) {
return putParam("wait_for_active_shards", activeShardCount.toString().toLowerCase(Locale.ROOT));
}
return this;
}
Params withIndicesOptions(IndicesOptions indicesOptions) {
putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable()));
putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices()));
String expandWildcards;
if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) {
expandWildcards = "none";
} else {
StringJoiner joiner = new StringJoiner(",");
if (indicesOptions.expandWildcardsOpen()) {
joiner.add("open");
}
if (indicesOptions.expandWildcardsClosed()) {
joiner.add("closed");
}
expandWildcards = joiner.toString();
}
putParam("expand_wildcards", expandWildcards);
return this;
}
Map<String, String> getParams() {
return Collections.unmodifiableMap(params);
}
static Params builder() {
return new Params();
}
}
/**
* Ensure that the {@link IndexRequest}'s content type is supported by the Bulk API and that it conforms
* to the current {@link BulkRequest}'s content type (if it's known at the time of this method get called).
*
* @return the {@link IndexRequest}'s content type
*/
static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable XContentType xContentType) {
XContentType requestContentType = indexRequest.getContentType();
if (requestContentType != XContentType.JSON && requestContentType != XContentType.SMILE) {
throw new IllegalArgumentException("Unsupported content-type found for request with content-type [" + requestContentType
+ "], only JSON and SMILE are supported");
}
if (xContentType == null) {
return requestContentType;
}
if (requestContentType != xContentType) {
throw new IllegalArgumentException("Mismatching content-type found for request with content-type [" + requestContentType
+ "], previous requests have content-type [" + xContentType + "]");
}
return xContentType;
}
}
| |
/*
* Copyright 2012 Matt Corallo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.anoncoin.store;
import com.google.anoncoin.core.*;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.*;
import java.util.LinkedList;
import java.util.List;
// Originally written for Apache Derby, but its DELETE (and general) performance was awful
/**
* A full pruned block store using the H2 pure-java embedded database.
*
* Note that because of the heavy delete load on the database, during IBD,
* you may see the database files grow quite large (around 1.5G).
* H2 automatically frees some space at shutdown, so close()ing the database
* decreases the space usage somewhat (to only around 1.3G).
*/
public class H2FullPrunedBlockStore implements FullPrunedBlockStore {
private static final Logger log = LoggerFactory.getLogger(H2FullPrunedBlockStore.class);
private Sha256Hash chainHeadHash;
private StoredBlock chainHeadBlock;
private Sha256Hash verifiedChainHeadHash;
private StoredBlock verifiedChainHeadBlock;
private NetworkParameters params;
private ThreadLocal<Connection> conn;
private List<Connection> allConnections;
private String connectionURL;
private int fullStoreDepth;
static final String driver = "org.h2.Driver";
static final String CREATE_SETTINGS_TABLE = "CREATE TABLE settings ( "
+ "name VARCHAR(32) NOT NULL CONSTRAINT settings_pk PRIMARY KEY,"
+ "value BLOB"
+ ")";
static final String CHAIN_HEAD_SETTING = "chainhead";
static final String VERIFIED_CHAIN_HEAD_SETTING = "verifiedchainhead";
static final String CREATE_HEADERS_TABLE = "CREATE TABLE headers ( "
+ "hash BINARY(28) NOT NULL CONSTRAINT headers_pk PRIMARY KEY,"
+ "chainWork BLOB NOT NULL,"
+ "height INT NOT NULL,"
+ "header BLOB NOT NULL,"
+ "wasUndoable BOOL NOT NULL"
+ ")";
static final String CREATE_UNDOABLE_TABLE = "CREATE TABLE undoableBlocks ( "
+ "hash BINARY(28) NOT NULL CONSTRAINT undoableBlocks_pk PRIMARY KEY,"
+ "height INT NOT NULL,"
+ "txOutChanges BLOB,"
+ "transactions BLOB"
+ ")";
static final String CREATE_UNDOABLE_TABLE_INDEX = "CREATE INDEX heightIndex ON undoableBlocks (height)";
static final String CREATE_OPEN_OUTPUT_INDEX_TABLE = "CREATE TABLE openOutputsIndex ("
+ "hash BINARY(32) NOT NULL CONSTRAINT openOutputsIndex_pk PRIMARY KEY,"
+ "height INT NOT NULL,"
+ "id BIGINT NOT NULL AUTO_INCREMENT"
+ ")";
static final String CREATE_OPEN_OUTPUT_TABLE = "CREATE TABLE openOutputs ("
+ "id BIGINT NOT NULL,"
+ "index INT NOT NULL,"
+ "value BLOB NOT NULL,"
+ "scriptBytes BLOB NOT NULL,"
+ "PRIMARY KEY (id, index),"
+ "CONSTRAINT openOutputs_fk FOREIGN KEY (id) REFERENCES openOutputsIndex(id)"
+ ")";
/**
* Creates a new H2FullPrunedBlockStore
* @param params A copy of the NetworkParameters used
* @param dbName The path to the database on disk
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe)
* @throws BlockStoreException if the database fails to open for any reason
*/
public H2FullPrunedBlockStore(NetworkParameters params, String dbName, int fullStoreDepth) throws BlockStoreException {
this.params = params;
this.fullStoreDepth = fullStoreDepth;
connectionURL = "jdbc:h2:" + dbName + ";create=true";
conn = new ThreadLocal<Connection>();
allConnections = new LinkedList<Connection>();
try {
Class.forName(driver);
log.info(driver + " loaded. ");
} catch (java.lang.ClassNotFoundException e) {
log.error("check CLASSPATH for H2 jar ", e);
}
maybeConnect();
try {
// Create tables if needed
if (!tableExists("settings"))
createTables();
initFromDatabase();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
/**
* Creates a new H2FullPrunedBlockStore with the given cache size
* @param params A copy of the NetworkParameters used
* @param dbName The path to the database on disk
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe)
* @param cacheSize The number of kilobytes to dedicate to H2 Cache (the default value of 16MB (16384) is a safe bet
* to achieve good performance/cost when importing blocks from disk, past 32MB makes little sense,
* and below 4MB sees a sharp drop in performance)
* @throws BlockStoreException if the database fails to open for any reason
*/
public H2FullPrunedBlockStore(NetworkParameters params, String dbName, int fullStoreDepth, int cacheSize) throws BlockStoreException {
this(params, dbName, fullStoreDepth);
try {
Statement s = conn.get().createStatement();
s.executeUpdate("SET CACHE_SIZE " + cacheSize);
s.close();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
private synchronized void maybeConnect() throws BlockStoreException {
try {
if (conn.get() != null)
return;
conn.set(DriverManager.getConnection(connectionURL));
allConnections.add(conn.get());
log.info("Made a new connection to database " + connectionURL);
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public synchronized void close() {
for (Connection conn : allConnections) {
try {
conn.rollback();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
allConnections.clear();
}
public void resetStore() throws BlockStoreException {
maybeConnect();
try {
Statement s = conn.get().createStatement();
s.executeUpdate("DROP TABLE settings");
s.executeUpdate("DROP TABLE headers");
s.executeUpdate("DROP TABLE undoableBlocks");
s.executeUpdate("DROP TABLE openOutputs");
s.executeUpdate("DROP TABLE openOutputsIndex");
s.close();
createTables();
initFromDatabase();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
private void createTables() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
log.debug("H2FullPrunedBlockStore : CREATE headers table");
s.executeUpdate(CREATE_HEADERS_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE settings table");
s.executeUpdate(CREATE_SETTINGS_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE undoable block table");
s.executeUpdate(CREATE_UNDOABLE_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE undoable block index");
s.executeUpdate(CREATE_UNDOABLE_TABLE_INDEX);
log.debug("H2FullPrunedBlockStore : CREATE open output index table");
s.executeUpdate(CREATE_OPEN_OUTPUT_INDEX_TABLE);
log.debug("H2FullPrunedBlockStore : CREATE open output table");
s.executeUpdate(CREATE_OPEN_OUTPUT_TABLE);
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERIFIED_CHAIN_HEAD_SETTING + "', NULL)");
s.close();
createNewStore(params);
}
private void initFromDatabase() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
ResultSet rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt H2 block store - no chain head pointer");
}
Sha256Hash hash = new Sha256Hash(rs.getBytes(1));
rs.close();
this.chainHeadBlock = get(hash);
this.chainHeadHash = hash;
if (this.chainHeadBlock == null)
{
throw new BlockStoreException("corrupt H2 block store - head block not found");
}
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + VERIFIED_CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt H2 block store - no verified chain head pointer");
}
hash = new Sha256Hash(rs.getBytes(1));
rs.close();
s.close();
this.verifiedChainHeadBlock = get(hash);
this.verifiedChainHeadHash = hash;
if (this.verifiedChainHeadBlock == null)
{
throw new BlockStoreException("corrupt H2 block store - verified head block not found");
}
}
private void createNewStore(NetworkParameters params) throws BlockStoreException {
try {
// Set up the genesis block. When we start out fresh, it is by
// definition the top of the chain.
StoredBlock storedGenesisHeader = new StoredBlock(params.genesisBlock.cloneAsHeader(), params.genesisBlock.getWork(), 0);
// The coinbase in the genesis block is not spendable. This is because of how the reference client inits
// its database - the genesis transaction isn't actually in the db so its spent flags can never be updated.
List<Transaction> genesisTransactions = Lists.newLinkedList();
StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.genesisBlock.getHash(), genesisTransactions);
put(storedGenesisHeader, storedGenesis);
setChainHead(storedGenesisHeader);
setVerifiedChainHead(storedGenesisHeader);
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
private boolean tableExists(String table) throws SQLException {
Statement s = conn.get().createStatement();
try {
ResultSet results = s.executeQuery("SELECT * FROM " + table + " WHERE 1 = 2");
results.close();
return true;
} catch (SQLException ex) {
return false;
} finally {
s.close();
}
}
/**
* Dumps information about the size of actual data in the database to standard output
* The only truly useless data counted is printed in the form "N in id indexes"
* This does not take database indexes into account
*/
public void dumpSizes() throws SQLException, BlockStoreException {
maybeConnect();
Statement s = conn.get().createStatement();
long size = 0;
long totalSize = 0;
int count = 0;
ResultSet rs = s.executeQuery("SELECT name, value FROM settings");
while (rs.next()) {
size += rs.getString(1).length();
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Settings size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT chainWork, header FROM headers");
while (rs.next()) {
size += 28; // hash
size += rs.getBytes(1).length;
size += 4; // height
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Headers size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT txOutChanges, transactions FROM undoableBlocks");
while (rs.next()) {
size += 28; // hash
size += 4; // height
byte[] txOutChanges = rs.getBytes(1);
byte[] transactions = rs.getBytes(2);
if (txOutChanges == null)
size += transactions.length;
else
size += txOutChanges.length;
// size += the space to represent NULL
count++;
}
rs.close();
System.out.printf("Undoable Blocks size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT id FROM openOutputsIndex");
while (rs.next()) {
size += 32; // hash
size += 4; // height
size += 8; // id
count++;
}
rs.close();
System.out.printf("Open Outputs Index size: %d, count: %d, size in id indexes: %d%n", size, count, count * 8);
totalSize += size; size = 0; count = 0;
long scriptSize = 0;
rs = s.executeQuery("SELECT value, scriptBytes FROM openOutputs");
while (rs.next()) {
size += 8; // id
size += 4; // index
size += rs.getBytes(1).length;
size += rs.getBytes(2).length;
scriptSize += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Open Outputs size: %d, count: %d, average size: %f, average script size: %f (%d in id indexes)%n",
size, count, (double)size/count, (double)scriptSize/count, count * 8);
totalSize += size;
System.out.println("Total Size: " + totalSize);
s.close();
}
private void putUpdateStoredBlock(StoredBlock storedBlock, boolean wasUndoable) throws SQLException {
try {
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO headers(hash, chainWork, height, header, wasUndoable)"
+ " VALUES(?, ?, ?, ?, ?)");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
s.setBytes(2, storedBlock.getChainWork().toByteArray());
s.setInt(3, storedBlock.getHeight());
s.setBytes(4, storedBlock.getHeader().unsafeAnoncoinSerialize());
s.setBoolean(5, wasUndoable);
s.executeUpdate();
s.close();
} catch (SQLException e) {
// It is possible we try to add a duplicate StoredBlock if we upgraded
// In that case, we just update the entry to mark it wasUndoable
if (e.getErrorCode() != 23505 || !wasUndoable)
throw e;
PreparedStatement s = conn.get().prepareStatement("UPDATE headers SET wasUndoable=? WHERE hash=?");
s.setBoolean(1, true);
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(2, hashBytes);
s.executeUpdate();
s.close();
}
}
public void put(StoredBlock storedBlock) throws BlockStoreException {
maybeConnect();
try {
putUpdateStoredBlock(storedBlock, false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException {
maybeConnect();
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
int height = storedBlock.getHeight();
byte[] transactions = null;
byte[] txOutChanges = null;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if (undoableBlock.getTxOutChanges() != null) {
undoableBlock.getTxOutChanges().serializeToStream(bos);
txOutChanges = bos.toByteArray();
} else {
int numTxn = undoableBlock.getTransactions().size();
bos.write((int) (0xFF & (numTxn >> 0)));
bos.write((int) (0xFF & (numTxn >> 8)));
bos.write((int) (0xFF & (numTxn >> 16)));
bos.write((int) (0xFF & (numTxn >> 24)));
for (Transaction tx : undoableBlock.getTransactions())
tx.anoncoinSerialize(bos);
transactions = bos.toByteArray();
}
bos.close();
} catch (IOException e) {
throw new BlockStoreException(e);
}
try {
try {
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO undoableBlocks(hash, height, txOutChanges, transactions)"
+ " VALUES(?, ?, ?, ?)");
s.setBytes(1, hashBytes);
s.setInt(2, height);
if (transactions == null) {
s.setBytes(3, txOutChanges);
s.setNull(4, Types.BLOB);
} else {
s.setNull(3, Types.BLOB);
s.setBytes(4, transactions);
}
s.executeUpdate();
s.close();
try {
putUpdateStoredBlock(storedBlock, true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
} catch (SQLException e) {
if (e.getErrorCode() != 23505)
throw new BlockStoreException(e);
// There is probably an update-or-insert statement, but it wasn't obvious from the docs
PreparedStatement s =
conn.get().prepareStatement("UPDATE undoableBlocks SET txOutChanges=?, transactions=?"
+ " WHERE hash = ?");
s.setBytes(3, hashBytes);
if (transactions == null) {
s.setBytes(1, txOutChanges);
s.setNull(2, Types.BLOB);
} else {
s.setNull(1, Types.BLOB);
s.setBytes(2, transactions);
}
s.executeUpdate();
s.close();
}
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredBlock get(Sha256Hash hash, boolean wasUndoableOnly) throws BlockStoreException {
// Optimize for chain head
if (chainHeadHash != null && chainHeadHash.equals(hash))
return chainHeadBlock;
if (verifiedChainHeadHash != null && verifiedChainHeadHash.equals(hash))
return verifiedChainHeadBlock;
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT chainWork, height, header, wasUndoable FROM headers WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
if (wasUndoableOnly && !results.getBoolean(4))
return null;
BigInteger chainWork = new BigInteger(results.getBytes(1));
int height = results.getInt(2);
Block b = new Block(params, results.getBytes(3));
b.verifyHeader();
StoredBlock stored = new StoredBlock(b, chainWork, height);
return stored;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (VerificationException e) {
// Should not be able to happen unless the database contains bad
// blocks.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock get(Sha256Hash hash) throws BlockStoreException {
return get(hash, false);
}
public StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException {
return get(hash, true);
}
public StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT txOutChanges, transactions FROM undoableBlocks WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
byte[] txOutChanges = results.getBytes(1);
byte[] transactions = results.getBytes(2);
StoredUndoableBlock block;
if (txOutChanges == null) {
int offset = 0;
int numTxn = ((transactions[offset++] & 0xFF) << 0) |
((transactions[offset++] & 0xFF) << 8) |
((transactions[offset++] & 0xFF) << 16) |
((transactions[offset++] & 0xFF) << 24);
List<Transaction> transactionList = new LinkedList<Transaction>();
for (int i = 0; i < numTxn; i++) {
Transaction tx = new Transaction(params, transactions, offset);
transactionList.add(tx);
offset += tx.getMessageSize();
}
block = new StoredUndoableBlock(hash, transactionList);
} else {
TransactionOutputChanges outChangesObject =
new TransactionOutputChanges(new ByteArrayInputStream(txOutChanges));
block = new StoredUndoableBlock(hash, outChangesObject);
}
return block;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (NullPointerException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ClassCastException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (IOException e) {
// Corrupted database.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock getChainHead() throws BlockStoreException {
return chainHeadBlock;
}
public void setChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.chainHeadHash = hash;
this.chainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredBlock getVerifiedChainHead() throws BlockStoreException {
return verifiedChainHeadBlock;
}
public void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.verifiedChainHeadHash = hash;
this.verifiedChainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, VERIFIED_CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
if (this.chainHeadBlock.getHeight() < chainHead.getHeight())
setChainHead(chainHead);
removeUndoableBlocksWhereHeightIsLessThan(chainHead.getHeight() - fullStoreDepth);
}
private void removeUndoableBlocksWhereHeightIsLessThan(int height) throws BlockStoreException {
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM undoableBlocks WHERE height <= ?");
s.setInt(1, height);
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT openOutputsIndex.height, openOutputs.value, openOutputs.scriptBytes " +
"FROM openOutputsIndex NATURAL JOIN openOutputs " +
"WHERE openOutputsIndex.hash = ? AND openOutputs.index = ?");
s.setBytes(1, hash.getBytes());
// index is actually an unsigned int
s.setInt(2, (int)index);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
int height = results.getInt(1);
BigInteger value = new BigInteger(results.getBytes(2));
// Tell the StoredTransactionOutput that we are a coinbase, as that is encoded in height
StoredTransactionOutput txout = new StoredTransactionOutput(hash, index, value, height, true, results.getBytes(3));
return txout;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
try {
s = conn.get().prepareStatement("INSERT INTO openOutputsIndex(hash, height)"
+ " VALUES(?, ?)");
s.setBytes(1, out.getHash().getBytes());
s.setInt(2, out.getHeight());
s.executeUpdate();
} catch (SQLException e) {
if (e.getErrorCode() != 23505)
throw e;
} finally {
if (s != null)
s.close();
}
s = conn.get().prepareStatement("INSERT INTO openOutputs (id, index, value, scriptBytes) " +
"VALUES ((SELECT id FROM openOutputsIndex WHERE hash = ?), " +
"?, ?, ?)");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.setBytes(3, out.getValue().toByteArray());
s.setBytes(4, out.getScriptBytes());
s.executeUpdate();
s.close();
} catch (SQLException e) {
if (e.getErrorCode() != 23505)
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException(e); }
}
}
public void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
// TODO: This should only need one query (maybe a stored procedure)
if (getTransactionOutput(out.getHash(), out.getIndex()) == null)
throw new BlockStoreException("Tried to remove a StoredTransactionOutput from H2FullPrunedBlockStore that it didn't have!");
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM openOutputs " +
"WHERE id = (SELECT id FROM openOutputsIndex WHERE hash = ?) AND index = ?");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.executeUpdate();
s.close();
// This is quite an ugly query, is there no better way?
s = conn.get().prepareStatement("DELETE FROM openOutputsIndex " +
"WHERE hash = ? AND 1 = (CASE WHEN ((SELECT COUNT(*) FROM openOutputs WHERE id =" +
"(SELECT id FROM openOutputsIndex WHERE hash = ?)) = 0) THEN 1 ELSE 0 END)");
s.setBytes(1, out.getHash().getBytes());
s.setBytes(2, out.getHash().getBytes());
s.executeUpdate();
s.close();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void beginDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
try {
conn.get().setAutoCommit(false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void commitDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
try {
conn.get().commit();
conn.get().setAutoCommit(true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void abortDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
try {
conn.get().rollback();
conn.get().setAutoCommit(true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT COUNT(*) FROM openOutputsIndex " +
"WHERE hash = ?");
s.setBytes(1, hash.getBytes());
ResultSet results = s.executeQuery();
if (!results.next()) {
throw new BlockStoreException("Got no results from a COUNT(*) query");
}
int count = results.getInt(1);
return count != 0;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
}
| |
package org.jtheque.views.impl.windows;
/*
* Copyright JTheque (Baptiste Wicht)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.jtheque.core.Core;
import org.jtheque.core.lifecycle.LifeCycle;
import org.jtheque.core.lifecycle.TitleListener;
import org.jtheque.i18n.LanguageService;
import org.jtheque.ui.Model;
import org.jtheque.ui.UIUtils;
import org.jtheque.ui.components.Borders;
import org.jtheque.ui.components.LayerTabbedPane;
import org.jtheque.ui.utils.builders.JThequePanelBuilder;
import org.jtheque.ui.utils.builders.PanelBuilder;
import org.jtheque.ui.utils.windows.frames.SwingFrameView;
import org.jtheque.utils.SimplePropertiesCache;
import org.jtheque.utils.collections.CollectionUtils;
import org.jtheque.utils.ui.GridBagUtils;
import org.jtheque.utils.ui.SwingUtils;
import org.jtheque.views.Views;
import org.jtheque.views.components.MainComponent;
import org.jtheque.views.impl.MainController;
import org.jtheque.views.impl.components.MainTabbedPane;
import org.jtheque.views.impl.components.menu.JThequeMenuBar;
import org.jtheque.views.impl.components.panel.JThequeStateBar;
import org.jtheque.views.windows.MainView;
import javax.annotation.Resource;
import javax.swing.JComponent;
import javax.swing.JFrame;
import javax.swing.JPanel;
import java.awt.Color;
import java.awt.Component;
import java.awt.Insets;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.util.Collection;
/**
* The main view of JTheque.
*
* @author Baptiste Wicht
*/
public final class MainViewImpl extends SwingFrameView<Model> implements TitleListener, MainView {
private MainTabbedPane tab;
private MainController controller;
private static final int DEFAULT_WIDTH = 830;
private static final int DEFAULT_HEIGHT = 645;
private WindowListener tempListener;
private int current;
private JThequeStateBar stateBar;
@Resource
private LanguageService languageService;
@Resource
private Views views;
@Resource
private UIUtils uiUtils;
@Resource
private Core core;
@Resource
private LifeCycle lifeCycle;
private final JThequeMenuBar menuBar;
/**
* Construct a new MainViewImpl.
*
* @param menuBar The menu bar.
*/
public MainViewImpl(JThequeMenuBar menuBar) {
super();
this.menuBar = menuBar;
SimplePropertiesCache.put("mainView", this);
}
/**
* Build the view.
*/
@Override
public void init() {
SwingUtils.assertEDT("MainViewImpl.init()");
setTitle(lifeCycle.getTitle());
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
tempListener = new TempWindowAdapter();
addWindowListener(tempListener);
JComponent background = new JPanel();
background.setBackground(Color.black);
setContentPane(background);
uiUtils.configureView(this, "main", DEFAULT_WIDTH, DEFAULT_HEIGHT);
lifeCycle.addTitleListener(this);
}
/**
* Build the entire view with the final content.
*/
public void fill() {
controller = new MainController(core, uiUtils, lifeCycle, this);
SwingUtils.inEdt(new Runnable() {
@Override
public void run() {
setContentPane(new JPanel());
buildContentPane();
menuBar.buildMenu();
setJMenuBar(menuBar);
removeWindowListener(tempListener);
addWindowListener(controller);
}
});
}
@Override
public void sendMessage(String message, final Object value) {
if ("add".equals(message)) {
SwingUtils.inEdt(new Runnable() {
@Override
public void run() {
addComponent();
refresh();
}
});
} else if ("remove".equals(message)) {
SwingUtils.inEdt(new Runnable() {
@Override
public void run() {
removeComponent((MainComponent) value);
refresh();
}
});
}
}
/**
* Add the main component to the view.
*/
private void addComponent() {
if (current > 1) {
tab.refreshComponents();
current++;
} else {
current++;
buildContentPane();
}
}
/**
* Remove the given main component.
*
* @param component The main component to remove.
*/
private void removeComponent(MainComponent component) {
if (current > 2) {
tab.removeMainComponent(component);
current--;
} else {
current--;
buildContentPane();
}
}
/**
* Build the content pane.
*/
private void buildContentPane() {
getContentPane().removeAll();
PanelBuilder builder = new JThequePanelBuilder((JPanel) getContentPane());
builder.setBorder(Borders.EMPTY_BORDER);
builder.setDefaultInsets(new Insets(0, 0, 0, 0));
builder.add(getMainComponent(), builder.gbcSet(0, 0, GridBagUtils.BOTH, GridBagUtils.FIRST_LINE_START, 1.0, 1.0));
stateBar = new JThequeStateBar(views);
SimplePropertiesCache.put("statebar-loaded", true);
builder.add(stateBar, builder.gbcSet(0, 1, GridBagUtils.HORIZONTAL, GridBagUtils.LAST_LINE_START));
}
/**
* Return the current main component.
*
* @return The current main component.
*/
private Component getMainComponent() {
Component mainComponent;
if (current == 0) {
mainComponent = new JPanel();
mainComponent.setBackground(Color.white);
} else if (current == 1) {
Collection<MainComponent> components = views.getMainComponents();
mainComponent = CollectionUtils.first(components).getImpl();
} else {
if (tab == null) {
tab = new MainTabbedPane(languageService, views);
tab.addChangeListener(controller);
}
mainComponent = tab;
}
return mainComponent;
}
@Override
public LayerTabbedPane getTabbedPane() {
return tab;
}
@Override
public void titleUpdated(String title) {
setTitle(title);
}
@Override
public void setSelectedComponent(Object component) {
tab.setSelectedComponent((Component) component);
}
@Override
public JComponent getSelectedComponent() {
return tab.getSelectedComponent();
}
@Override
public JThequeStateBar getStateBar() {
return stateBar;
}
@Override
public void closeDown() {
if (uiUtils != null) {
uiUtils.saveState(this, "main");
}
super.closeDown();
}
/**
* The temporary window adapter.
*
* @author Baptiste Wicht
*/
private final class TempWindowAdapter extends WindowAdapter {
@Override
public void windowClosing(WindowEvent e) {
lifeCycle.exit();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal.deadlock;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.junit.Test;
import org.apache.geode.DataSerializable;
import org.apache.geode.cache.execute.Function;
import org.apache.geode.cache.execute.FunctionContext;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.distributed.DistributedLockService;
import org.apache.geode.distributed.DistributedSystemDisconnectedException;
import org.apache.geode.distributed.LockServiceDestroyedException;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.pdx.internal.TypeRegistry;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.AsyncInvocation;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.Invoke;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.SerializableCallable;
import org.apache.geode.test.dunit.SerializableRunnable;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase;
public class GemFireDeadlockDetectorDUnitTest extends JUnit4CacheTestCase {
private static final Set<Thread> stuckThreads =
Collections.synchronizedSet(new HashSet<>());
@Override
public final void preTearDownCacheTestCase() throws Exception {
disconnectAllFromDS();
}
private void stopStuckThreads() {
Invoke.invokeInEveryVM(new SerializableRunnable() {
@Override
public void run() {
for (Thread thread : stuckThreads) {
thread.interrupt();
disconnectFromDS();
try {
thread.join(30000);
assertTrue(!thread.isAlive());
} catch (InterruptedException e) {
Assert.fail("interrupted", e);
}
}
}
});
}
public GemFireDeadlockDetectorDUnitTest() {
super();
}
@Test
public void testNoDeadlock() {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
TypeRegistry.init();
// Make sure a deadlock from a previous test is cleared.
disconnectAllFromDS();
createCache(vm0);
createCache(vm1);
getSystem();
GemFireDeadlockDetector detect = new GemFireDeadlockDetector();
assertEquals(null, detect.find().findCycle());
}
private static final Lock lock = new ReentrantLock();
@Test
public void testDistributedDeadlockWithFunction() throws Throwable {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
TypeRegistry.init();
getSystem();
InternalDistributedMember member1 = createCache(vm0);
final InternalDistributedMember member2 = createCache(vm1);
getBlackboard().initBlackboard();
// Have two threads lock locks on different members in different orders.
String gateOnMember1 = "gateOnMember1";
String gateOnMember2 = "gateOnMember2";
// This thread locks the lock member1 first, then member2.
AsyncInvocation async1 = lockTheLocks(vm0, member2, gateOnMember1, gateOnMember2);
// This thread locks the lock member2 first, then member1.
AsyncInvocation async2 = lockTheLocks(vm1, member1, gateOnMember2, gateOnMember1);
try {
final LinkedList<Dependency>[] deadlockHolder = new LinkedList[1];
await("waiting for deadlock").until(() -> {
GemFireDeadlockDetector detect = new GemFireDeadlockDetector();
LinkedList<Dependency> deadlock = detect.find().findCycle();
if (deadlock != null) {
deadlockHolder[0] = deadlock;
}
return deadlock != null;
});
LinkedList<Dependency> deadlock = deadlockHolder[0];
LogWriterUtils.getLogWriter().info("Deadlock=" + DeadlockDetector.prettyFormat(deadlock));
assertEquals(8, deadlock.size());
stopStuckThreads();
} finally {
try {
waitForAsyncInvocation(async1, 45, TimeUnit.SECONDS);
} finally {
waitForAsyncInvocation(async2, 45, TimeUnit.SECONDS);
}
}
}
private AsyncInvocation lockTheLocks(VM vm0, final InternalDistributedMember member,
final String gateToSignal, final String gateToWaitOn) {
return vm0.invokeAsync(new SerializableRunnable() {
@Override
public void run() {
lock.lock();
try {
try {
getBlackboard().signalGate(gateToSignal);
getBlackboard().waitForGate(gateToWaitOn, 10, TimeUnit.SECONDS);
} catch (TimeoutException | InterruptedException e) {
throw new RuntimeException("failed", e);
}
ResultCollector collector = FunctionService.onMember(member).execute(new TestFunction());
// wait the function to lock the lock on member.
collector.getResult();
} finally {
lock.unlock();
}
}
});
}
@Test
public void testDistributedDeadlockWithDLock() throws Throwable {
Host host = Host.getHost(0);
VM vm0 = host.getVM(0);
VM vm1 = host.getVM(1);
getBlackboard().initBlackboard();
TypeRegistry.init();
getSystem();
AsyncInvocation async1 = lockTheDLocks(vm0, "one", "two");
AsyncInvocation async2 = lockTheDLocks(vm1, "two", "one");
await("waiting for locks to be acquired")
.untilAsserted(() -> assertTrue(getBlackboard().isGateSignaled("one")));
await("waiting for locks to be acquired")
.untilAsserted(() -> assertTrue(getBlackboard().isGateSignaled("two")));
GemFireDeadlockDetector detect = new GemFireDeadlockDetector();
LinkedList<Dependency> deadlock = detect.find().findCycle();
assertTrue(deadlock != null);
System.out.println("Deadlock=" + DeadlockDetector.prettyFormat(deadlock));
assertEquals(4, deadlock.size());
disconnectAllFromDS();
try {
waitForAsyncInvocation(async1, 45, TimeUnit.SECONDS);
} finally {
waitForAsyncInvocation(async2, 45, TimeUnit.SECONDS);
}
}
private void waitForAsyncInvocation(AsyncInvocation async1, int howLong, TimeUnit units)
throws java.util.concurrent.ExecutionException, InterruptedException {
try {
async1.get(howLong, units);
} catch (TimeoutException e) {
fail("test is leaving behind an async invocation thread");
}
}
private AsyncInvocation lockTheDLocks(VM vm, final String first, final String second) {
return vm.invokeAsync(new SerializableRunnable() {
@Override
public void run() {
try {
getCache();
DistributedLockService dls = DistributedLockService.create("deadlock_test", getSystem());
dls.lock(first, 10 * 1000, -1);
getBlackboard().signalGate(first);
getBlackboard().waitForGate(second, 30, TimeUnit.SECONDS);
// this will block since the other DUnit VM will have locked the second key
try {
dls.lock(second, 10 * 1000, -1);
} catch (LockServiceDestroyedException expected) {
// this is ok, the test is terminating
} catch (DistributedSystemDisconnectedException expected) {
// this is ok, the test is terminating
}
} catch (Exception e) {
throw new RuntimeException("test failed", e);
}
}
});
}
private InternalDistributedMember createCache(VM vm) {
return (InternalDistributedMember) vm.invoke(new SerializableCallable() {
@Override
public Object call() {
getCache();
return getSystem().getDistributedMember();
}
});
}
private static class TestFunction implements Function, DataSerializable {
public TestFunction() {}
private static final int LOCK_WAIT_TIME = 1000;
@Override
public boolean hasResult() {
return true;
}
@Override
public void execute(FunctionContext context) {
boolean acquired = false;
try {
stuckThreads.add(Thread.currentThread());
acquired = lock.tryLock(LOCK_WAIT_TIME, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// ignore
} finally {
if (acquired) {
lock.unlock();
}
stuckThreads.remove(Thread.currentThread());
context.getResultSender().lastResult(null);
}
}
@Override
public String getId() {
return getClass().getCanonicalName();
}
@Override
public boolean optimizeForWrite() {
return false;
}
@Override
public boolean isHA() {
return false;
}
@Override
public void toData(DataOutput out) throws IOException {
}
@Override
public void fromData(DataInput in) throws IOException, ClassNotFoundException {
}
}
}
| |
package org.wikipedia.settings;
import org.wikipedia.R;
import org.wikipedia.WikipediaApp;
import org.wikipedia.data.GsonMarshaller;
import org.wikipedia.data.TabUnmarshaller;
import org.wikipedia.page.tabs.Tab;
import org.wikipedia.theme.Theme;
import retrofit.RestAdapter;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.util.Collections;
import java.util.List;
import static org.wikipedia.settings.PrefsIoUtil.contains;
import static org.wikipedia.settings.PrefsIoUtil.getBoolean;
import static org.wikipedia.settings.PrefsIoUtil.getInt;
import static org.wikipedia.settings.PrefsIoUtil.getKey;
import static org.wikipedia.settings.PrefsIoUtil.getLong;
import static org.wikipedia.settings.PrefsIoUtil.getString;
import static org.wikipedia.settings.PrefsIoUtil.remove;
import static org.wikipedia.settings.PrefsIoUtil.setBoolean;
import static org.wikipedia.settings.PrefsIoUtil.setInt;
import static org.wikipedia.settings.PrefsIoUtil.setLong;
import static org.wikipedia.settings.PrefsIoUtil.setString;
/** Shared preferences utility for convenient POJO access. */
public final class Prefs {
@Nullable
public static String getAppChannel() {
return getString(R.string.preference_key_app_channel, null);
}
public static void setAppChannel(@Nullable String channel) {
setString(R.string.preference_key_app_channel, channel);
}
@NonNull
public static String getAppChannelKey() {
return getKey(R.string.preference_key_app_channel);
}
@Nullable
public static String getAppInstallId() {
return getString(R.string.preference_key_reading_app_install_id, null);
}
public static void setAppInstallId(@Nullable String id) {
// The app install ID uses readingAppInstallID for backwards compatibility with analytics.
setString(R.string.preference_key_reading_app_install_id, id);
}
@Nullable
public static String getAppLanguageCode() {
return getString(R.string.preference_key_language, null);
}
public static void setAppLanguageCode(@Nullable String code) {
setString(R.string.preference_key_language, code);
}
public static int getThemeId() {
return getInt(R.string.preference_key_color_theme, Theme.getFallback().getMarshallingId());
}
public static void setThemeId(int theme) {
setInt(R.string.preference_key_color_theme, theme);
}
@NonNull
public static String getCookieDomains() {
return getString(R.string.preference_key_cookie_domains, "");
}
public static void setCookieDomains(@Nullable String domains) {
setString(R.string.preference_key_cookie_domains, domains);
}
@NonNull
public static String getCookiesForDomain(@NonNull String domain) {
return getString(getCookiesForDomainKey(domain), "");
}
public static void setCookiesForDomain(@NonNull String domain, @Nullable String cookies) {
setString(getCookiesForDomainKey(domain), cookies);
}
public static void removeCookiesForDomain(@NonNull String domain) {
remove(getCookiesForDomainKey(domain));
}
public static boolean isShowDeveloperSettingsEnabled() {
return getBoolean(R.string.preference_key_show_developer_settings,
WikipediaApp.getInstance().isDevRelease());
}
public static void setShowDeveloperSettingsEnabled(boolean enabled) {
setBoolean(R.string.preference_key_show_developer_settings, enabled);
}
@NonNull
public static String getEditTokenWikis() {
return getString(R.string.preference_key_edittoken_wikis, "");
}
public static void setEditTokenWikis(@Nullable String wikis) {
setString(R.string.preference_key_edittoken_wikis, wikis);
}
@Nullable
public static String getEditTokenForWiki(@NonNull String wiki) {
return getString(getEditTokenForWikiKey(wiki), null);
}
public static void setEditTokenForWiki(@NonNull String wiki, @Nullable String token) {
setString(getEditTokenForWikiKey(wiki), token);
}
public static void removeEditTokenForWiki(@NonNull String wiki) {
remove(getEditTokenForWikiKey(wiki));
}
public static void removeLoginUsername() {
remove(R.string.preference_key_login_username);
}
@Nullable
public static String getLoginPassword() {
return getString(R.string.preference_key_login_password, null);
}
public static void setLoginPassword(@Nullable String password) {
setString(R.string.preference_key_login_password, password);
}
public static boolean hasLoginPassword() {
return contains(R.string.preference_key_login_password);
}
public static void removeLoginPassword() {
remove(R.string.preference_key_login_password);
}
public static int getLoginUserId() {
return getInt(R.string.preference_key_login_user_id, 0);
}
public static void setLoginUserId(int id) {
setInt(R.string.preference_key_login_user_id, id);
}
public static void removeLoginUserId() {
remove(R.string.preference_key_login_user_id);
}
@Nullable
public static String getLoginUsername() {
return getString(R.string.preference_key_login_username, null);
}
public static void setLoginUsername(@Nullable String username) {
setString(R.string.preference_key_login_username, username);
}
public static boolean hasLoginUsername() {
return contains(R.string.preference_key_login_username);
}
@Nullable
public static String getMruLanguageCodeCsv() {
return getString(R.string.preference_key_language_mru, null);
}
public static void setMruLanguageCodeCsv(@Nullable String csv) {
setString(R.string.preference_key_language_mru, csv);
}
@NonNull
public static String getRemoteConfigJson() {
return getString(R.string.preference_key_remote_config, "{}");
}
public static void setRemoteConfigJson(@Nullable String json) {
setString(R.string.preference_key_remote_config, json);
}
public static void setTabs(@NonNull List<Tab> tabs) {
setString(R.string.preference_key_tabs, GsonMarshaller.marshal(tabs));
}
@NonNull
public static List<Tab> getTabs() {
return hasTabs()
? TabUnmarshaller.unmarshal(getString(R.string.preference_key_tabs, null))
: Collections.<Tab>emptyList();
}
public static boolean hasTabs() {
return contains(R.string.preference_key_tabs);
}
public static int getTextSizeMultiplier() {
return getInt(R.string.preference_key_text_size_multiplier, 0);
}
public static void setTextSizeMultiplier(int multiplier) {
setInt(R.string.preference_key_text_size_multiplier, multiplier);
}
public static boolean isEventLoggingEnabled() {
return getBoolean(R.string.preference_key_eventlogging_opt_in, true);
}
public static boolean isExperimentalHtmlPageLoadEnabled() {
return getBoolean(R.string.preference_key_exp_html_page_load, false);
}
public static boolean forceRestbaseUsage() {
return getBoolean(R.string.preference_key_use_restbase, false);
}
public static RestAdapter.LogLevel getRetrofitLogLevel() {
String prefValue = getString(R.string.preference_key_retrofit_log_level, null);
if (prefValue == null) {
return RestAdapter.LogLevel.NONE;
}
switch (prefValue) {
case "BASIC":
return RestAdapter.LogLevel.BASIC;
case "HEADERS":
return RestAdapter.LogLevel.HEADERS;
case "HEADERS_AND_ARGS":
return RestAdapter.LogLevel.HEADERS_AND_ARGS;
case "FULL":
return RestAdapter.LogLevel.FULL;
case "NONE":
default:
return RestAdapter.LogLevel.NONE;
}
}
public static String getRestbaseUriFormat() {
return getString(R.string.preference_key_restbase_uri_format, "%1$s://%2$s/api/rest_v1");
}
public static long getLastRunTime(@NonNull String task) {
return getLong(getLastRunTimeKey(task), 0);
}
public static void setLastRunTime(@NonNull String task, long time) {
setLong(getLastRunTimeKey(task), time);
}
public static boolean isShowZeroInterstitialEnabled() {
return getBoolean(R.string.preference_key_zero_interstitial, true);
}
public static boolean isSelectTextTutorialEnabled() {
return getBoolean(R.string.preference_key_select_text_tutorial_enabled, true);
}
public static void setSelectTextTutorialEnabled(boolean enabled) {
setBoolean(R.string.preference_key_select_text_tutorial_enabled, enabled);
}
public static boolean isShareTutorialEnabled() {
return getBoolean(R.string.preference_key_share_tutorial_enabled, true);
}
public static void setShareTutorialEnabled(boolean enabled) {
setBoolean(R.string.preference_key_share_tutorial_enabled, enabled);
}
public static boolean isFeatureSelectTextAndShareTutorialEnabled() {
return getBoolean(R.string.preference_key_feature_select_text_and_share_tutorials_enabled, true);
}
public static void setFeatureSelectTextAndShareTutorialEnabled(boolean enabled) {
setBoolean(R.string.preference_key_feature_select_text_and_share_tutorials_enabled, enabled);
}
public static boolean hasFeatureSelectTextAndShareTutorial() {
return contains(R.string.preference_key_feature_select_text_and_share_tutorials_enabled);
}
public static boolean isTocTutorialEnabled() {
return getBoolean(R.string.preference_key_toc_tutorial_enabled, true);
}
public static void setTocTutorialEnabled(boolean enabled) {
setBoolean(R.string.preference_key_toc_tutorial_enabled, enabled);
}
public static boolean isImageDownloadEnabled() {
return getBoolean(R.string.preference_key_show_images, true);
}
private static String getCookiesForDomainKey(@NonNull String domain) {
return getKey(R.string.preference_key_cookies_for_domain_format, domain);
}
private static String getLastRunTimeKey(@NonNull String task) {
return getKey(R.string.preference_key_last_run_time_format, task);
}
private static String getEditTokenForWikiKey(String wiki) {
return getKey(R.string.preference_key_edittoken_for_wiki_format, wiki);
}
private Prefs() { }
}
| |
package org.docksidestage.oracle.dbflute.resola.cbean.cq.bs;
import java.util.*;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.*;
import org.dbflute.cbean.ckey.*;
import org.dbflute.cbean.coption.*;
import org.dbflute.cbean.cvalue.ConditionValue;
import org.dbflute.cbean.ordering.*;
import org.dbflute.cbean.scoping.*;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.dbmeta.DBMetaProvider;
import org.docksidestage.oracle.dbflute.resola.allcommon.*;
import org.docksidestage.oracle.dbflute.resola.cbean.*;
import org.docksidestage.oracle.dbflute.resola.cbean.cq.*;
/**
* The abstract condition-query of STATION.
* @author DBFlute(AutoGenerator)
*/
public abstract class ResolaAbstractBsStationCQ extends AbstractConditionQuery {
// ===================================================================================
// Constructor
// ===========
public ResolaAbstractBsStationCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) {
super(referrerQuery, sqlClause, aliasName, nestLevel);
}
// ===================================================================================
// DB Meta
// =======
@Override
protected DBMetaProvider xgetDBMetaProvider() {
return ResolaDBMetaInstanceHandler.getProvider();
}
public String asTableDbName() {
return "STATION";
}
// ===================================================================================
// Query
// =====
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationId The value of stationId as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setStationId_Equal(Integer stationId) {
doSetStationId_Equal(stationId);
}
protected void doSetStationId_Equal(Integer stationId) {
regStationId(CK_EQ, stationId);
}
/**
* NotEqual(<>). And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationId The value of stationId as notEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setStationId_NotEqual(Integer stationId) {
doSetStationId_NotEqual(stationId);
}
protected void doSetStationId_NotEqual(Integer stationId) {
regStationId(CK_NES, stationId);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationId The value of stationId as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setStationId_GreaterThan(Integer stationId) {
regStationId(CK_GT, stationId);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationId The value of stationId as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setStationId_LessThan(Integer stationId) {
regStationId(CK_LT, stationId);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationId The value of stationId as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setStationId_GreaterEqual(Integer stationId) {
regStationId(CK_GE, stationId);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationId The value of stationId as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setStationId_LessEqual(Integer stationId) {
regStationId(CK_LE, stationId);
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param minNumber The min number of stationId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of stationId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of range-of. (NotNull)
*/
public void setStationId_RangeOf(Integer minNumber, Integer maxNumber, ConditionOptionCall<RangeOfOption> opLambda) {
setStationId_RangeOf(minNumber, maxNumber, xcROOP(opLambda));
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param minNumber The min number of stationId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of stationId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param rangeOfOption The option of range-of. (NotNull)
*/
protected void setStationId_RangeOf(Integer minNumber, Integer maxNumber, RangeOfOption rangeOfOption) {
regROO(minNumber, maxNumber, xgetCValueStationId(), "STATION_ID", rangeOfOption);
}
/**
* InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationIdList The collection of stationId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationId_InScope(Collection<Integer> stationIdList) {
doSetStationId_InScope(stationIdList);
}
protected void doSetStationId_InScope(Collection<Integer> stationIdList) {
regINS(CK_INS, cTL(stationIdList), xgetCValueStationId(), "STATION_ID");
}
/**
* NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
* @param stationIdList The collection of stationId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationId_NotInScope(Collection<Integer> stationIdList) {
doSetStationId_NotInScope(stationIdList);
}
protected void doSetStationId_NotInScope(Collection<Integer> stationIdList) {
regINS(CK_NINS, cTL(stationIdList), xgetCValueStationId(), "STATION_ID");
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
*/
public void setStationId_IsNull() { regStationId(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* STATION_ID: {PK, NotNull, NUMBER(9)}
*/
public void setStationId_IsNotNull() { regStationId(CK_ISNN, DOBJ); }
protected void regStationId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueStationId(), "STATION_ID"); }
protected abstract ConditionValue xgetCValueStationId();
/**
* Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as equal. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_Equal(String stationName) {
doSetStationName_Equal(fRES(stationName));
}
protected void doSetStationName_Equal(String stationName) {
regStationName(CK_EQ, stationName);
}
/**
* NotEqual(<>). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as notEqual. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_NotEqual(String stationName) {
doSetStationName_NotEqual(fRES(stationName));
}
protected void doSetStationName_NotEqual(String stationName) {
regStationName(CK_NES, stationName);
}
/**
* GreaterThan(>). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as greaterThan. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_GreaterThan(String stationName) {
regStationName(CK_GT, fRES(stationName));
}
/**
* LessThan(<). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as lessThan. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_LessThan(String stationName) {
regStationName(CK_LT, fRES(stationName));
}
/**
* GreaterEqual(>=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as greaterEqual. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_GreaterEqual(String stationName) {
regStationName(CK_GE, fRES(stationName));
}
/**
* LessEqual(<=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as lessEqual. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_LessEqual(String stationName) {
regStationName(CK_LE, fRES(stationName));
}
/**
* InScope {in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationNameList The collection of stationName as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_InScope(Collection<String> stationNameList) {
doSetStationName_InScope(stationNameList);
}
protected void doSetStationName_InScope(Collection<String> stationNameList) {
regINS(CK_INS, cTL(stationNameList), xgetCValueStationName(), "STATION_NAME");
}
/**
* NotInScope {not in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationNameList The collection of stationName as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setStationName_NotInScope(Collection<String> stationNameList) {
doSetStationName_NotInScope(stationNameList);
}
protected void doSetStationName_NotInScope(Collection<String> stationNameList) {
regINS(CK_NINS, cTL(stationNameList), xgetCValueStationName(), "STATION_NAME");
}
/**
* LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)} <br>
* <pre>e.g. setStationName_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> op.<span style="color: #CC4747">likeContain()</span>);</pre>
* @param stationName The value of stationName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param opLambda The callback for option of like-search. (NotNull)
*/
public void setStationName_LikeSearch(String stationName, ConditionOptionCall<LikeSearchOption> opLambda) {
setStationName_LikeSearch(stationName, xcLSOP(opLambda));
}
/**
* LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)} <br>
* <pre>e.g. setStationName_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre>
* @param stationName The value of stationName as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param likeSearchOption The option of like-search. (NotNull)
*/
protected void setStationName_LikeSearch(String stationName, LikeSearchOption likeSearchOption) {
regLSQ(CK_LS, fRES(stationName), xgetCValueStationName(), "STATION_NAME", likeSearchOption);
}
/**
* NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br>
* And NullOrEmptyIgnored, SeveralRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param opLambda The callback for option of like-search. (NotNull)
*/
public void setStationName_NotLikeSearch(String stationName, ConditionOptionCall<LikeSearchOption> opLambda) {
setStationName_NotLikeSearch(stationName, xcLSOP(opLambda));
}
/**
* NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br>
* And NullOrEmptyIgnored, SeveralRegistered. <br>
* STATION_NAME: {NotNull, VARCHAR2(200)}
* @param stationName The value of stationName as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param likeSearchOption The option of not-like-search. (NotNull)
*/
protected void setStationName_NotLikeSearch(String stationName, LikeSearchOption likeSearchOption) {
regLSQ(CK_NLS, fRES(stationName), xgetCValueStationName(), "STATION_NAME", likeSearchOption);
}
protected void regStationName(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueStationName(), "STATION_NAME"); }
protected abstract ConditionValue xgetCValueStationName();
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* @param birthdate The value of birthdate as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setBirthdate_Equal(java.time.LocalDateTime birthdate) {
regBirthdate(CK_EQ, birthdate);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* @param birthdate The value of birthdate as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setBirthdate_GreaterThan(java.time.LocalDateTime birthdate) {
regBirthdate(CK_GT, birthdate);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* @param birthdate The value of birthdate as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setBirthdate_LessThan(java.time.LocalDateTime birthdate) {
regBirthdate(CK_LT, birthdate);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* @param birthdate The value of birthdate as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setBirthdate_GreaterEqual(java.time.LocalDateTime birthdate) {
regBirthdate(CK_GE, birthdate);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* @param birthdate The value of birthdate as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setBirthdate_LessEqual(java.time.LocalDateTime birthdate) {
regBirthdate(CK_LE, birthdate);
}
/**
* FromTo with various options. (versatile) {(default) fromDatetime <= column <= toDatetime} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* <pre>e.g. setBirthdate_FromTo(fromDate, toDate, op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> op.<span style="color: #CC4747">compareAsDate()</span>);</pre>
* @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of birthdate. (basically NotNull: if op.allowOneSide(), null allowed)
* @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of birthdate. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of from-to. (NotNull)
*/
public void setBirthdate_FromTo(java.time.LocalDateTime fromDatetime, java.time.LocalDateTime toDatetime, ConditionOptionCall<FromToOption> opLambda) {
setBirthdate_FromTo(fromDatetime, toDatetime, xcFTOP(opLambda));
}
/**
* FromTo with various options. (versatile) {(default) fromDatetime <= column <= toDatetime} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
* <pre>e.g. setBirthdate_FromTo(fromDate, toDate, new <span style="color: #CC4747">FromToOption</span>().compareAsDate());</pre>
* @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of birthdate. (basically NotNull: if op.allowOneSide(), null allowed)
* @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of birthdate. (basically NotNull: if op.allowOneSide(), null allowed)
* @param fromToOption The option of from-to. (NotNull)
*/
protected void setBirthdate_FromTo(java.time.LocalDateTime fromDatetime, java.time.LocalDateTime toDatetime, FromToOption fromToOption) {
String nm = "BIRTHDATE"; FromToOption op = fromToOption;
regFTQ(xfFTHD(fromDatetime, nm, op), xfFTHD(toDatetime, nm, op), xgetCValueBirthdate(), nm, op);
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
*/
public void setBirthdate_IsNull() { regBirthdate(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* BIRTHDATE: {DATE(7)}
*/
public void setBirthdate_IsNotNull() { regBirthdate(CK_ISNN, DOBJ); }
protected void regBirthdate(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueBirthdate(), "BIRTHDATE"); }
protected abstract ConditionValue xgetCValueBirthdate();
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* @param formalizedDatetime The value of formalizedDatetime as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setFormalizedDatetime_Equal(java.time.LocalDateTime formalizedDatetime) {
regFormalizedDatetime(CK_EQ, formalizedDatetime);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* @param formalizedDatetime The value of formalizedDatetime as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setFormalizedDatetime_GreaterThan(java.time.LocalDateTime formalizedDatetime) {
regFormalizedDatetime(CK_GT, formalizedDatetime);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* @param formalizedDatetime The value of formalizedDatetime as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setFormalizedDatetime_LessThan(java.time.LocalDateTime formalizedDatetime) {
regFormalizedDatetime(CK_LT, formalizedDatetime);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* @param formalizedDatetime The value of formalizedDatetime as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setFormalizedDatetime_GreaterEqual(java.time.LocalDateTime formalizedDatetime) {
regFormalizedDatetime(CK_GE, formalizedDatetime);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* @param formalizedDatetime The value of formalizedDatetime as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setFormalizedDatetime_LessEqual(java.time.LocalDateTime formalizedDatetime) {
regFormalizedDatetime(CK_LE, formalizedDatetime);
}
/**
* FromTo with various options. (versatile) {(default) fromDatetime <= column <= toDatetime} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* <pre>e.g. setFormalizedDatetime_FromTo(fromDate, toDate, op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> op.<span style="color: #CC4747">compareAsDate()</span>);</pre>
* @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of formalizedDatetime. (basically NotNull: if op.allowOneSide(), null allowed)
* @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of formalizedDatetime. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of from-to. (NotNull)
*/
public void setFormalizedDatetime_FromTo(java.time.LocalDateTime fromDatetime, java.time.LocalDateTime toDatetime, ConditionOptionCall<FromToOption> opLambda) {
setFormalizedDatetime_FromTo(fromDatetime, toDatetime, xcFTOP(opLambda));
}
/**
* FromTo with various options. (versatile) {(default) fromDatetime <= column <= toDatetime} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
* <pre>e.g. setFormalizedDatetime_FromTo(fromDate, toDate, new <span style="color: #CC4747">FromToOption</span>().compareAsDate());</pre>
* @param fromDatetime The from-datetime(yyyy/MM/dd HH:mm:ss.SSS) of formalizedDatetime. (basically NotNull: if op.allowOneSide(), null allowed)
* @param toDatetime The to-datetime(yyyy/MM/dd HH:mm:ss.SSS) of formalizedDatetime. (basically NotNull: if op.allowOneSide(), null allowed)
* @param fromToOption The option of from-to. (NotNull)
*/
protected void setFormalizedDatetime_FromTo(java.time.LocalDateTime fromDatetime, java.time.LocalDateTime toDatetime, FromToOption fromToOption) {
String nm = "FORMALIZED_DATETIME"; FromToOption op = fromToOption;
regFTQ(xfFTHD(fromDatetime, nm, op), xfFTHD(toDatetime, nm, op), xgetCValueFormalizedDatetime(), nm, op);
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
*/
public void setFormalizedDatetime_IsNull() { regFormalizedDatetime(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* FORMALIZED_DATETIME: {TIMESTAMP(3)(11, 3)}
*/
public void setFormalizedDatetime_IsNotNull() { regFormalizedDatetime(CK_ISNN, DOBJ); }
protected void regFormalizedDatetime(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueFormalizedDatetime(), "FORMALIZED_DATETIME"); }
protected abstract ConditionValue xgetCValueFormalizedDatetime();
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCount The value of homeCount as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setHomeCount_Equal(Integer homeCount) {
doSetHomeCount_Equal(homeCount);
}
protected void doSetHomeCount_Equal(Integer homeCount) {
regHomeCount(CK_EQ, homeCount);
}
/**
* NotEqual(<>). And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCount The value of homeCount as notEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setHomeCount_NotEqual(Integer homeCount) {
doSetHomeCount_NotEqual(homeCount);
}
protected void doSetHomeCount_NotEqual(Integer homeCount) {
regHomeCount(CK_NES, homeCount);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCount The value of homeCount as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setHomeCount_GreaterThan(Integer homeCount) {
regHomeCount(CK_GT, homeCount);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCount The value of homeCount as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setHomeCount_LessThan(Integer homeCount) {
regHomeCount(CK_LT, homeCount);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCount The value of homeCount as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setHomeCount_GreaterEqual(Integer homeCount) {
regHomeCount(CK_GE, homeCount);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCount The value of homeCount as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setHomeCount_LessEqual(Integer homeCount) {
regHomeCount(CK_LE, homeCount);
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param minNumber The min number of homeCount. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of homeCount. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of range-of. (NotNull)
*/
public void setHomeCount_RangeOf(Integer minNumber, Integer maxNumber, ConditionOptionCall<RangeOfOption> opLambda) {
setHomeCount_RangeOf(minNumber, maxNumber, xcROOP(opLambda));
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param minNumber The min number of homeCount. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of homeCount. (basically NotNull: if op.allowOneSide(), null allowed)
* @param rangeOfOption The option of range-of. (NotNull)
*/
protected void setHomeCount_RangeOf(Integer minNumber, Integer maxNumber, RangeOfOption rangeOfOption) {
regROO(minNumber, maxNumber, xgetCValueHomeCount(), "HOME_COUNT", rangeOfOption);
}
/**
* InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCountList The collection of homeCount as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setHomeCount_InScope(Collection<Integer> homeCountList) {
doSetHomeCount_InScope(homeCountList);
}
protected void doSetHomeCount_InScope(Collection<Integer> homeCountList) {
regINS(CK_INS, cTL(homeCountList), xgetCValueHomeCount(), "HOME_COUNT");
}
/**
* NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
* @param homeCountList The collection of homeCount as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setHomeCount_NotInScope(Collection<Integer> homeCountList) {
doSetHomeCount_NotInScope(homeCountList);
}
protected void doSetHomeCount_NotInScope(Collection<Integer> homeCountList) {
regINS(CK_NINS, cTL(homeCountList), xgetCValueHomeCount(), "HOME_COUNT");
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
*/
public void setHomeCount_IsNull() { regHomeCount(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* HOME_COUNT: {NUMBER(9)}
*/
public void setHomeCount_IsNotNull() { regHomeCount(CK_ISNN, DOBJ); }
protected void regHomeCount(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueHomeCount(), "HOME_COUNT"); }
protected abstract ConditionValue xgetCValueHomeCount();
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometer The value of workingKilometer as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setWorkingKilometer_Equal(java.math.BigDecimal workingKilometer) {
doSetWorkingKilometer_Equal(workingKilometer);
}
protected void doSetWorkingKilometer_Equal(java.math.BigDecimal workingKilometer) {
regWorkingKilometer(CK_EQ, workingKilometer);
}
/**
* NotEqual(<>). And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometer The value of workingKilometer as notEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setWorkingKilometer_NotEqual(java.math.BigDecimal workingKilometer) {
doSetWorkingKilometer_NotEqual(workingKilometer);
}
protected void doSetWorkingKilometer_NotEqual(java.math.BigDecimal workingKilometer) {
regWorkingKilometer(CK_NES, workingKilometer);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometer The value of workingKilometer as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setWorkingKilometer_GreaterThan(java.math.BigDecimal workingKilometer) {
regWorkingKilometer(CK_GT, workingKilometer);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometer The value of workingKilometer as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setWorkingKilometer_LessThan(java.math.BigDecimal workingKilometer) {
regWorkingKilometer(CK_LT, workingKilometer);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometer The value of workingKilometer as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setWorkingKilometer_GreaterEqual(java.math.BigDecimal workingKilometer) {
regWorkingKilometer(CK_GE, workingKilometer);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometer The value of workingKilometer as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setWorkingKilometer_LessEqual(java.math.BigDecimal workingKilometer) {
regWorkingKilometer(CK_LE, workingKilometer);
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param minNumber The min number of workingKilometer. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of workingKilometer. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of range-of. (NotNull)
*/
public void setWorkingKilometer_RangeOf(java.math.BigDecimal minNumber, java.math.BigDecimal maxNumber, ConditionOptionCall<RangeOfOption> opLambda) {
setWorkingKilometer_RangeOf(minNumber, maxNumber, xcROOP(opLambda));
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param minNumber The min number of workingKilometer. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of workingKilometer. (basically NotNull: if op.allowOneSide(), null allowed)
* @param rangeOfOption The option of range-of. (NotNull)
*/
protected void setWorkingKilometer_RangeOf(java.math.BigDecimal minNumber, java.math.BigDecimal maxNumber, RangeOfOption rangeOfOption) {
regROO(minNumber, maxNumber, xgetCValueWorkingKilometer(), "WORKING_KILOMETER", rangeOfOption);
}
/**
* InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometerList The collection of workingKilometer as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setWorkingKilometer_InScope(Collection<java.math.BigDecimal> workingKilometerList) {
doSetWorkingKilometer_InScope(workingKilometerList);
}
protected void doSetWorkingKilometer_InScope(Collection<java.math.BigDecimal> workingKilometerList) {
regINS(CK_INS, cTL(workingKilometerList), xgetCValueWorkingKilometer(), "WORKING_KILOMETER");
}
/**
* NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
* @param workingKilometerList The collection of workingKilometer as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setWorkingKilometer_NotInScope(Collection<java.math.BigDecimal> workingKilometerList) {
doSetWorkingKilometer_NotInScope(workingKilometerList);
}
protected void doSetWorkingKilometer_NotInScope(Collection<java.math.BigDecimal> workingKilometerList) {
regINS(CK_NINS, cTL(workingKilometerList), xgetCValueWorkingKilometer(), "WORKING_KILOMETER");
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
*/
public void setWorkingKilometer_IsNull() { regWorkingKilometer(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* WORKING_KILOMETER: {NUMBER(4, 2)}
*/
public void setWorkingKilometer_IsNotNull() { regWorkingKilometer(CK_ISNN, DOBJ); }
protected void regWorkingKilometer(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueWorkingKilometer(), "WORKING_KILOMETER"); }
protected abstract ConditionValue xgetCValueWorkingKilometer();
// ===================================================================================
// ScalarCondition
// ===============
/**
* Prepare ScalarCondition as equal. <br>
* {where FOO = (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<ResolaStationCB> scalar_Equal() {
return xcreateSLCFunction(CK_EQ, ResolaStationCB.class);
}
/**
* Prepare ScalarCondition as equal. <br>
* {where FOO <> (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<ResolaStationCB> scalar_NotEqual() {
return xcreateSLCFunction(CK_NES, ResolaStationCB.class);
}
/**
* Prepare ScalarCondition as greaterThan. <br>
* {where FOO > (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<ResolaStationCB> scalar_GreaterThan() {
return xcreateSLCFunction(CK_GT, ResolaStationCB.class);
}
/**
* Prepare ScalarCondition as lessThan. <br>
* {where FOO < (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<ResolaStationCB> scalar_LessThan() {
return xcreateSLCFunction(CK_LT, ResolaStationCB.class);
}
/**
* Prepare ScalarCondition as greaterEqual. <br>
* {where FOO >= (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<ResolaStationCB> scalar_GreaterEqual() {
return xcreateSLCFunction(CK_GE, ResolaStationCB.class);
}
/**
* Prepare ScalarCondition as lessEqual. <br>
* {where FOO <= (select max(BAR) from ...)}
* <pre>
* cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery<ResolaStationCB>() {
* public void query(ResolaStationCB subCB) {
* subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span>
* subCB.query().setBar...
* }
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<ResolaStationCB> scalar_LessEqual() {
return xcreateSLCFunction(CK_LE, ResolaStationCB.class);
}
@SuppressWarnings("unchecked")
protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) {
assertObjectNotNull("subQuery", sq);
ResolaStationCB cb = xcreateScalarConditionCB(); sq.query((CB)cb);
String pp = keepScalarCondition(cb.query()); // for saving query-value
cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by
registerScalarCondition(fn, cb.query(), pp, rd, cs, op);
}
public abstract String keepScalarCondition(ResolaStationCQ sq);
protected ResolaStationCB xcreateScalarConditionCB() {
ResolaStationCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb;
}
protected ResolaStationCB xcreateScalarConditionPartitionByCB() {
ResolaStationCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb;
}
// ===================================================================================
// MyselfDerived
// =============
public void xsmyselfDerive(String fn, SubQuery<ResolaStationCB> sq, String al, DerivedReferrerOption op) {
assertObjectNotNull("subQuery", sq);
ResolaStationCB cb = new ResolaStationCB(); cb.xsetupForDerivedReferrer(this);
lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "STATION_ID";
registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op);
}
public abstract String keepSpecifyMyselfDerived(ResolaStationCQ sq);
/**
* Prepare for (Query)MyselfDerived (correlated sub-query).
* @return The object to set up a function for myself table. (NotNull)
*/
public HpQDRFunction<ResolaStationCB> myselfDerived() {
return xcreateQDRFunctionMyselfDerived(ResolaStationCB.class);
}
@SuppressWarnings("unchecked")
protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) {
assertObjectNotNull("subQuery", sq);
ResolaStationCB cb = new ResolaStationCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb);
String pk = "STATION_ID";
String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value.
String prpp = keepQueryMyselfDerivedParameter(vl);
registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op);
}
public abstract String keepQueryMyselfDerived(ResolaStationCQ sq);
public abstract String keepQueryMyselfDerivedParameter(Object vl);
// ===================================================================================
// MyselfExists
// ============
/**
* Prepare for MyselfExists (correlated sub-query).
* @param subCBLambda The implementation of sub-query. (NotNull)
*/
public void myselfExists(SubQuery<ResolaStationCB> subCBLambda) {
assertObjectNotNull("subCBLambda", subCBLambda);
ResolaStationCB cb = new ResolaStationCB(); cb.xsetupForMyselfExists(this);
lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query());
registerMyselfExists(cb.query(), pp);
}
public abstract String keepMyselfExists(ResolaStationCQ sq);
// ===================================================================================
// Manual Order
// ============
/**
* Order along manual ordering information.
* <pre>
* cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span>
* });
* <span style="color: #3F7E5E">// order by </span>
* <span style="color: #3F7E5E">// case</span>
* <span style="color: #3F7E5E">// when BIRTHDATE >= '2000/01/01' then 0</span>
* <span style="color: #3F7E5E">// else 1</span>
* <span style="color: #3F7E5E">// end asc, ...</span>
*
* cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal);
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized);
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional);
* });
* <span style="color: #3F7E5E">// order by </span>
* <span style="color: #3F7E5E">// case</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span>
* <span style="color: #3F7E5E">// else 3</span>
* <span style="color: #3F7E5E">// end asc, ...</span>
* </pre>
* <p>This function with Union is unsupported!</p>
* <p>The order values are bound (treated as bind parameter).</p>
* @param opLambda The callback for option of manual-order containing order values. (NotNull)
*/
public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public!
xdoWithManualOrder(cMOO(opLambda));
}
// ===================================================================================
// Small Adjustment
// ================
// ===================================================================================
// Very Internal
// =============
protected ResolaStationCB newMyCB() {
return new ResolaStationCB();
}
// very internal (for suppressing warn about 'Not Use Import')
protected String xabUDT() { return Date.class.getName(); }
protected String xabCQ() { return ResolaStationCQ.class.getName(); }
protected String xabLSO() { return LikeSearchOption.class.getName(); }
protected String xabSLCS() { return HpSLCSetupper.class.getName(); }
protected String xabSCP() { return SubQuery.class.getName(); }
}
| |
/* -*-mode:java; c-basic-offset:2; -*- */
/*
Copyright (c) 2000-2011 ymnk, JCraft,Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
3. The names of the authors may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* This program is based on zlib-1.1.3, so all credit should go authors
* Jean-loup Gailly(jloup@gzip.org) and Mark Adler(madler@alumni.caltech.edu)
* and contributors of zlib.
*/
package com.jcraft.jzlib;
final class Inflate{
static final private int MAX_WBITS=15; // 32K LZ77 window
// preset dictionary flag in zlib header
static final private int PRESET_DICT=0x20;
static final int Z_NO_FLUSH=0;
static final int Z_PARTIAL_FLUSH=1;
static final int Z_SYNC_FLUSH=2;
static final int Z_FULL_FLUSH=3;
static final int Z_FINISH=4;
static final private int Z_DEFLATED=8;
static final private int Z_OK=0;
static final private int Z_STREAM_END=1;
static final private int Z_NEED_DICT=2;
static final private int Z_ERRNO=-1;
static final private int Z_STREAM_ERROR=-2;
static final private int Z_DATA_ERROR=-3;
static final private int Z_MEM_ERROR=-4;
static final private int Z_BUF_ERROR=-5;
static final private int Z_VERSION_ERROR=-6;
static final private int METHOD=0; // waiting for method byte
static final private int FLAG=1; // waiting for flag byte
static final private int DICT4=2; // four dictionary check bytes to go
static final private int DICT3=3; // three dictionary check bytes to go
static final private int DICT2=4; // two dictionary check bytes to go
static final private int DICT1=5; // one dictionary check byte to go
static final private int DICT0=6; // waiting for inflateSetDictionary
static final private int BLOCKS=7; // decompressing blocks
static final private int CHECK4=8; // four check bytes to go
static final private int CHECK3=9; // three check bytes to go
static final private int CHECK2=10; // two check bytes to go
static final private int CHECK1=11; // one check byte to go
static final private int DONE=12; // finished check, done
static final private int BAD=13; // got an error--stay here
static final private int HEAD=14;
static final private int LENGTH=15;
static final private int TIME=16;
static final private int OS=17;
static final private int EXLEN=18;
static final private int EXTRA=19;
static final private int NAME=20;
static final private int COMMENT=21;
static final private int HCRC=22;
static final private int FLAGS=23;
int mode; // current inflate mode
// mode dependent information
int method; // if FLAGS, method byte
// if CHECK, check values to compare
long was = -1; // computed check value
long need; // stream check value
// if BAD, inflateSync's marker bytes count
int marker;
// mode independent information
int wrap; // flag for no wrapper
int wbits; // log2(window size) (8..15, defaults to 15)
InfBlocks blocks; // current inflate_blocks state
private final ZStream z;
private int flags;
private int need_bytes = -1;
private byte[] crcbuf=new byte[4];
GZIPHeader gheader = null;
int inflateReset(){
if(z == null) return Z_STREAM_ERROR;
z.total_in = z.total_out = 0;
z.msg = null;
this.mode = HEAD;
this.need_bytes = -1;
this.blocks.reset();
return Z_OK;
}
int inflateEnd(){
if(blocks != null){
blocks.free();
}
return Z_OK;
}
Inflate(ZStream z){
this.z=z;
}
int inflateInit(int w){
z.msg = null;
blocks = null;
// handle undocumented wrap option (no zlib header or check)
wrap = 0;
if(w < 0){
w = - w;
}
else {
wrap = (w >> 4) + 1;
if(w < 48)
w &= 15;
}
if(w<8 ||w>15){
inflateEnd();
return Z_STREAM_ERROR;
}
if(blocks != null && wbits != w){
blocks.free();
blocks=null;
}
// set window size
wbits=w;
this.blocks=new InfBlocks(z, 1<<w);
// reset state
inflateReset();
return Z_OK;
}
int inflate(int f){
int hold = 0;
int r;
int b;
if(z == null || z.next_in == null){
if(f == Z_FINISH && this.mode==HEAD)
return Z_OK;
return Z_STREAM_ERROR;
}
f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK;
r = Z_BUF_ERROR;
while (true){
switch (this.mode){
case HEAD:
if(wrap==0){
this.mode = BLOCKS;
break;
}
try { r=readBytes(2, r, f); }
catch(Return e){ return e.r; }
if((wrap&2)!=0 && this.need == 0x8b1fL) { // gzip header
z.adler=new CRC32();
checksum(2, this.need);
if(gheader==null)
gheader=new GZIPHeader();
this.mode = FLAGS;
break;
}
flags = 0;
this.method = ((int)this.need)&0xff;
b=((int)(this.need>>8))&0xff;
if((wrap&1)==0 || // check if zlib header allowed
(((this.method << 8)+b) % 31)!=0){
this.mode = BAD;
z.msg = "incorrect header check";
// since zlib 1.2, it is allowted to inflateSync for this case.
/*
this.marker = 5; // can't try inflateSync
*/
break;
}
if((this.method&0xf)!=Z_DEFLATED){
this.mode = BAD;
z.msg="unknown compression method";
// since zlib 1.2, it is allowted to inflateSync for this case.
/*
this.marker = 5; // can't try inflateSync
*/
break;
}
if((this.method>>4)+8>this.wbits){
this.mode = BAD;
z.msg="invalid window size";
// since zlib 1.2, it is allowted to inflateSync for this case.
/*
this.marker = 5; // can't try inflateSync
*/
break;
}
z.adler=new Adler32();
if((b&PRESET_DICT)==0){
this.mode = BLOCKS;
break;
}
this.mode = DICT4;
case DICT4:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need=((z.next_in[z.next_in_index++]&0xff)<<24)&0xff000000L;
this.mode=DICT3;
case DICT3:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need+=((z.next_in[z.next_in_index++]&0xff)<<16)&0xff0000L;
this.mode=DICT2;
case DICT2:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need+=((z.next_in[z.next_in_index++]&0xff)<<8)&0xff00L;
this.mode=DICT1;
case DICT1:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need += (z.next_in[z.next_in_index++]&0xffL);
z.adler.reset(this.need);
this.mode = DICT0;
return Z_NEED_DICT;
case DICT0:
this.mode = BAD;
z.msg = "need dictionary";
this.marker = 0; // can try inflateSync
return Z_STREAM_ERROR;
case BLOCKS:
r = this.blocks.proc(r);
if(r == Z_DATA_ERROR){
this.mode = BAD;
this.marker = 0; // can try inflateSync
break;
}
if(r == Z_OK){
r = f;
}
if(r != Z_STREAM_END){
return r;
}
r = f;
this.was=z.adler.getValue();
this.blocks.reset();
if(this.wrap==0){
this.mode=DONE;
break;
}
this.mode=CHECK4;
case CHECK4:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need=((z.next_in[z.next_in_index++]&0xff)<<24)&0xff000000L;
this.mode=CHECK3;
case CHECK3:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need+=((z.next_in[z.next_in_index++]&0xff)<<16)&0xff0000L;
this.mode = CHECK2;
case CHECK2:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need+=((z.next_in[z.next_in_index++]&0xff)<<8)&0xff00L;
this.mode = CHECK1;
case CHECK1:
if(z.avail_in==0)return r;r=f;
z.avail_in--; z.total_in++;
this.need+=(z.next_in[z.next_in_index++]&0xffL);
if(flags!=0){ // gzip
this.need = ((this.need&0xff000000)>>24 |
(this.need&0x00ff0000)>>8 |
(this.need&0x0000ff00)<<8 |
(this.need&0x0000ffff)<<24)&0xffffffffL;
}
if(((int)(this.was)) != ((int)(this.need))){
z.msg = "incorrect data check";
// chack is delayed
/*
this.mode = BAD;
this.marker = 5; // can't try inflateSync
break;
*/
}
else if(flags!=0 && gheader!=null){
gheader.crc = this.need;
}
this.mode = LENGTH;
case LENGTH:
if (wrap!=0 && flags!=0) {
try { r=readBytes(4, r, f); }
catch(Return e){ return e.r; }
if(z.msg!=null && z.msg.equals("incorrect data check")){
this.mode = BAD;
this.marker = 5; // can't try inflateSync
break;
}
if (this.need != (z.total_out & 0xffffffffL)) {
z.msg = "incorrect length check";
this.mode = BAD;
break;
}
z.msg = null;
}
else {
if(z.msg!=null && z.msg.equals("incorrect data check")){
this.mode = BAD;
this.marker = 5; // can't try inflateSync
break;
}
}
this.mode = DONE;
case DONE:
return Z_STREAM_END;
case BAD:
return Z_DATA_ERROR;
case FLAGS:
try { r=readBytes(2, r, f); }
catch(Return e){ return e.r; }
flags = ((int)this.need)&0xffff;
if ((flags & 0xff) != Z_DEFLATED) {
z.msg = "unknown compression method";
this.mode = BAD;
break;
}
if ((flags & 0xe000)!=0) {
z.msg = "unknown header flags set";
this.mode = BAD;
break;
}
if ((flags & 0x0200)!=0){
checksum(2, this.need);
}
this.mode = TIME;
case TIME:
try { r=readBytes(4, r, f); }
catch(Return e){ return e.r; }
if(gheader!=null)
gheader.time = this.need;
if ((flags & 0x0200)!=0){
checksum(4, this.need);
}
this.mode = OS;
case OS:
try { r=readBytes(2, r, f); }
catch(Return e){ return e.r; }
if(gheader!=null){
gheader.xflags = ((int)this.need)&0xff;
gheader.os = (((int)this.need)>>8)&0xff;
}
if ((flags & 0x0200)!=0){
checksum(2, this.need);
}
this.mode = EXLEN;
case EXLEN:
if ((flags & 0x0400)!=0) {
try { r=readBytes(2, r, f); }
catch(Return e){ return e.r; }
if(gheader!=null){
gheader.extra = new byte[((int)this.need)&0xffff];
}
if ((flags & 0x0200)!=0){
checksum(2, this.need);
}
}
else if(gheader!=null){
gheader.extra=null;
}
this.mode = EXTRA;
case EXTRA:
if ((flags & 0x0400)!=0) {
try {
r=readBytes(r, f);
if(gheader!=null){
byte[] foo = tmp_string.toByteArray();
tmp_string=null;
if(foo.length == gheader.extra.length){
System.arraycopy(foo, 0, gheader.extra, 0, foo.length);
}
else{
z.msg = "bad extra field length";
this.mode = BAD;
break;
}
}
}
catch(Return e){ return e.r; }
}
else if(gheader!=null){
gheader.extra=null;
}
this.mode = NAME;
case NAME:
if ((flags & 0x0800)!=0) {
try {
r=readString(r, f);
if(gheader!=null){
gheader.name=tmp_string.toByteArray();
}
tmp_string=null;
}
catch(Return e){ return e.r; }
}
else if(gheader!=null){
gheader.name=null;
}
this.mode = COMMENT;
case COMMENT:
if ((flags & 0x1000)!=0) {
try {
r=readString(r, f);
if(gheader!=null){
gheader.comment=tmp_string.toByteArray();
}
tmp_string=null;
}
catch(Return e){ return e.r; }
}
else if(gheader!=null){
gheader.comment=null;
}
this.mode = HCRC;
case HCRC:
if ((flags & 0x0200)!=0) {
try { r=readBytes(2, r, f); }
catch(Return e){ return e.r; }
if(gheader!=null){
gheader.hcrc=(int)(this.need&0xffff);
}
if(this.need != (z.adler.getValue()&0xffffL)){
this.mode = BAD;
z.msg = "header crc mismatch";
this.marker = 5; // can't try inflateSync
break;
}
}
z.adler = new CRC32();
this.mode = BLOCKS;
break;
default:
return Z_STREAM_ERROR;
}
}
}
int inflateSetDictionary(byte[] dictionary, int dictLength){
if(z==null || (this.mode != DICT0 && this.wrap != 0)){
return Z_STREAM_ERROR;
}
int index=0;
int length = dictLength;
if(this.mode==DICT0){
long adler_need=z.adler.getValue();
z.adler.reset();
z.adler.update(dictionary, 0, dictLength);
if(z.adler.getValue()!=adler_need){
return Z_DATA_ERROR;
}
}
z.adler.reset();
if(length >= (1<<this.wbits)){
length = (1<<this.wbits)-1;
index=dictLength - length;
}
this.blocks.set_dictionary(dictionary, index, length);
this.mode = BLOCKS;
return Z_OK;
}
static private byte[] mark = {(byte)0, (byte)0, (byte)0xff, (byte)0xff};
int inflateSync(){
int n; // number of bytes to look at
int p; // pointer to bytes
int m; // number of marker bytes found in a row
long r, w; // temporaries to save total_in and total_out
// set up
if(z == null)
return Z_STREAM_ERROR;
if(this.mode != BAD){
this.mode = BAD;
this.marker = 0;
}
if((n=z.avail_in)==0)
return Z_BUF_ERROR;
p=z.next_in_index;
m=this.marker;
// search
while (n!=0 && m < 4){
if(z.next_in[p] == mark[m]){
m++;
}
else if(z.next_in[p]!=0){
m = 0;
}
else{
m = 4 - m;
}
p++; n--;
}
// restore
z.total_in += p-z.next_in_index;
z.next_in_index = p;
z.avail_in = n;
this.marker = m;
// return no joy or set up to restart on a new block
if(m != 4){
return Z_DATA_ERROR;
}
r=z.total_in; w=z.total_out;
inflateReset();
z.total_in=r; z.total_out = w;
this.mode = BLOCKS;
return Z_OK;
}
// Returns true if inflate is currently at the end of a block generated
// by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP
// implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH
// but removes the length bytes of the resulting empty stored block. When
// decompressing, PPP checks that at the end of input packet, inflate is
// waiting for these length bytes.
int inflateSyncPoint(){
if(z == null || this.blocks == null)
return Z_STREAM_ERROR;
return this.blocks.sync_point();
}
private int readBytes(int n, int r, int f) throws Return{
if(need_bytes == -1){
need_bytes=n;
this.need=0;
}
while(need_bytes>0){
if(z.avail_in==0){ throw new Return(r); }; r=f;
z.avail_in--; z.total_in++;
this.need = this.need |
((z.next_in[z.next_in_index++]&0xff)<<((n-need_bytes)*8));
need_bytes--;
}
if(n==2){
this.need&=0xffffL;
}
else if(n==4) {
this.need&=0xffffffffL;
}
need_bytes=-1;
return r;
}
class Return extends Exception{
int r;
Return(int r){this.r=r; }
}
private java.io.ByteArrayOutputStream tmp_string = null;
private int readString(int r, int f) throws Return{
if(tmp_string == null){
tmp_string=new java.io.ByteArrayOutputStream();
}
int b=0;
do {
if(z.avail_in==0){ throw new Return(r); }; r=f;
z.avail_in--; z.total_in++;
b = z.next_in[z.next_in_index];
if(b!=0) tmp_string.write(z.next_in, z.next_in_index, 1);
z.adler.update(z.next_in, z.next_in_index, 1);
z.next_in_index++;
}while(b!=0);
return r;
}
private int readBytes(int r, int f) throws Return{
if(tmp_string == null){
tmp_string=new java.io.ByteArrayOutputStream();
}
int b=0;
while(this.need>0){
if(z.avail_in==0){ throw new Return(r); }; r=f;
z.avail_in--; z.total_in++;
b = z.next_in[z.next_in_index];
tmp_string.write(z.next_in, z.next_in_index, 1);
z.adler.update(z.next_in, z.next_in_index, 1);
z.next_in_index++;
this.need--;
}
return r;
}
private void checksum(int n, long v){
for(int i=0; i<n; i++){
crcbuf[i]=(byte)(v&0xff);
v>>=8;
}
z.adler.update(crcbuf, 0, n);
}
public GZIPHeader getGZIPHeader(){
return gheader;
}
boolean inParsingHeader(){
switch(mode){
case HEAD:
case DICT4:
case DICT3:
case DICT2:
case DICT1:
case FLAGS:
case TIME:
case OS:
case EXLEN:
case EXTRA:
case NAME:
case COMMENT:
case HCRC:
return true;
default:
return false;
}
}
}
| |
package com.konkerlabs.platform.registry.business.services;
import com.konkerlabs.platform.registry.business.exceptions.BusinessException;
import com.konkerlabs.platform.registry.business.model.*;
import com.konkerlabs.platform.registry.business.model.enumerations.DateFormat;
import com.konkerlabs.platform.registry.business.model.enumerations.TimeZone;
import com.konkerlabs.platform.registry.business.repositories.PasswordBlacklistRepository;
import com.konkerlabs.platform.registry.business.repositories.UserRepository;
import com.konkerlabs.platform.registry.business.services.api.*;
import com.konkerlabs.platform.registry.config.EmailConfig;
import com.konkerlabs.platform.registry.config.PasswordUserConfig;
import com.konkerlabs.platform.security.managers.PasswordManager;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.security.NoSuchAlgorithmException;
import java.security.spec.InvalidKeySpecException;
import java.text.MessageFormat;
import java.time.Duration;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.*;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
@Service
public class UserServiceImpl implements UserService {
private Logger LOG = LoggerFactory.getLogger(UserServiceImpl.class);
private static final String EMAIL_PATTERN =
"^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@"
+ "[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$";
private Pattern patternEmail;
@Autowired
private UserRepository userRepository;
@Autowired
private PasswordBlacklistRepository passwordBlacklistRepository;
@Autowired
private ApplicationService applicationService;
@Autowired
private LocationSearchService locationSearchService;
@Autowired
private TenantService tenantService;
@Autowired
private TokenService tokenService;
@Autowired
private EmailService emailService;
@Autowired
private RoleService roleService;
@Autowired
private EmailConfig emailConfig;
@Autowired
private MessageSource messageSource;
private PasswordUserConfig passwordUserConfig;
private PasswordManager passwordManager;
public UserServiceImpl() {
passwordUserConfig = new PasswordUserConfig();
passwordManager = new PasswordManager();
patternEmail = Pattern.compile(EMAIL_PATTERN);
}
@Override
public ServiceResponse<User> save(User user,
String oldPassword,
String newPassword,
String newPasswordConfirmation) {
User fromStorage = userRepository.findOne(user.getEmail());
if (!Optional.ofNullable(fromStorage).isPresent() ||
!Optional.ofNullable(user.getEmail()).isPresent()
|| !user.getEmail().equals(fromStorage.getEmail())) {
LOG.debug("This user id is invalid:{}", Optional.ofNullable(user.getEmail()).isPresent() ? user.getEmail() : "NULL");
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_EMAIL.getCode())
.build();
}
if (!Optional.ofNullable(oldPassword).isPresent() ||
!Optional.ofNullable(newPassword).isPresent() ||
!Optional.ofNullable(newPasswordConfirmation).isPresent()) {
LOG.debug("Invalid password confirmation",
fromStorage.getTenant().toURI(),
fromStorage.getTenant().getLogLevel(),
fromStorage);
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_CONFIRMATION.getCode())
.build();
}
if (!StringUtils.isEmpty(newPassword)) {
try {
validateOldPassword(oldPassword, fromStorage);
} catch (BusinessException e) {
LOG.debug("Invalid current password",
fromStorage.getTenant().toURI(),
fromStorage.getTenant().getLogLevel());
return ServiceResponseBuilder.<User>error()
.withMessage(e.getMessage())
.build();
}
}
return save(user, newPassword, newPasswordConfirmation);
}
@Override
public ServiceResponse<User> save(User user, String newPassword, String newPasswordConfirmation) {
ServiceResponse<User> errorResponse = checkNullFields(user);
if (errorResponse != null) return errorResponse;
if (!Optional.ofNullable(newPassword).isPresent()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_INVALID.getCode())
.build();
}
if (!newPassword.equals(newPasswordConfirmation)) {
LOG.debug("Invalid password confirmation on user update",
user.getTenant().toURI(),
user.getTenant().getLogLevel(),
user);
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_CONFIRMATION.getCode())
.build();
}
User fromStorage = Optional.ofNullable(userRepository.findOne(user.getEmail())).orElse(user);
if (!Optional.ofNullable(user.getEmail()).isPresent() ||
!user.getEmail().equals(fromStorage.getEmail())) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_EMAIL.getCode())
.build();
}
if (!StringUtils.isEmpty(newPassword)) {
try {
validatePassword(user, newPassword, newPasswordConfirmation);
} catch (BusinessException e) {
return ServiceResponseBuilder.<User>error()
.withMessage(e.getMessage())
.build();
}
}
Optional.of(newPasswordConfirmation).ifPresent(password -> {
if (!StringUtils.isEmpty(newPasswordConfirmation)) {
try {
user.setPassword(encodePassword(password));
LOG.info(MessageFormat.format("User password has been changed, user \"{0}\"",
fromStorage.getEmail()),
fromStorage.getTenant().toURI(),
fromStorage.getTenant().getLogLevel(),
fromStorage);
} catch (Exception e) {
LOG.error("Error encoding password for user " + fromStorage.getEmail(),
fromStorage.getTenant().toURI(), fromStorage.getTenant().getLogLevel(), fromStorage);
}
}
});
if (Optional.ofNullable(user.getPassword()).isPresent() && !passwordManager.validateHash(user.getPassword())) {
LOG.debug(Errors.ERROR_SAVE_USER.getCode(), fromStorage.getTenant().toURI(), fromStorage.getTenant().getLogLevel(), fromStorage);
return ServiceResponseBuilder.<User>error()
.withMessage(Errors.ERROR_SAVE_USER.getCode()).build();
}
fillFrom(user, fromStorage);
return persistValidUser(fromStorage);
}
@Override
public ServiceResponse<User> save(String application,
String location,
String loggedUser,
User user,
String newPassword,
String newPasswordConfirmation) {
User fromStorage = Optional.ofNullable(userRepository.findOne(user.getEmail())).orElse(user);
if (loggedUser.equals(fromStorage.getEmail())
&& !Optional.ofNullable(application).equals(Optional.ofNullable(fromStorage.getApplication()))) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_PERMISSION_TO_CHANGE_APP_HIMSELF.getCode())
.build();
}
if (loggedUser.equals(fromStorage.getEmail())
&& !Optional.ofNullable(location).equals(Optional.ofNullable(fromStorage.getLocation()))) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_PERMISSION_TO_CHANGE_LOCATION_HIMSELF.getCode())
.build();
}
Application appFromDB = applicationService.getByApplicationName(
user.getTenant(),
"default".equals(application) ? user.getTenant().getDomainName() : application).getResult();
if ((Optional.ofNullable(application).isPresent()
|| Optional.ofNullable(location).isPresent())
&& !Optional.ofNullable(appFromDB).isPresent()) {
return ServiceResponseBuilder.<User>error()
.withMessage(ApplicationService.Validations.APPLICATION_DOES_NOT_EXIST.getCode())
.build();
}
if (Optional.ofNullable(fromStorage).isPresent()
&& !fromStorage.getTenant().equals(user.getTenant())) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.USER_EXIST.getCode())
.build();
}
ServiceResponse<Location> locationServiceResponse;
if ("default".equals(location)) {
locationServiceResponse = this.locationSearchService.findDefault(
user.getTenant(),
appFromDB);
} else {
locationServiceResponse = this.locationSearchService.findByName(
user.getTenant(),
appFromDB,
location,
false);
}
if (!locationServiceResponse.isOk()
&& Optional.ofNullable(location).isPresent()) {
return ServiceResponseBuilder.<User>error()
.withMessage(LocationService.Validations.LOCATION_GUID_DOES_NOT_EXIST.getCode())
.build();
}
if (Optional.ofNullable(appFromDB).isPresent()) {
Role role = roleService.findByName(RoleService.ROLE_IOT_READ_ONLY).getResult();
user.setRoles(Collections.singletonList(role));
} else {
Role role = roleService.findByName(RoleService.ROLE_IOT_USER).getResult();
user.setRoles(Collections.singletonList(role));
}
user.setApplication(appFromDB);
user.setLocation(locationServiceResponse.getResult());
return save(user, newPassword, newPasswordConfirmation);
}
private ServiceResponse<User> saveWithPasswordHash(User user, String passwordHash) {
ServiceResponse<User> errorResponse = checkNullFields(user);
if (errorResponse != null) return errorResponse;
User fromStorage = Optional.ofNullable(userRepository.findOne(user.getEmail())).orElse(user);
if (!Optional.ofNullable(user.getEmail()).isPresent() ||
!user.getEmail().equals(fromStorage.getEmail())) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_EMAIL.getCode())
.build();
}
if (!Optional.ofNullable(passwordHash).isPresent()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_HASH_INVALID.getCode())
.build();
}
if (!passwordManager.validateHash(passwordHash)) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_HASH_INVALID.getCode())
.build();
} else {
user.setPassword(passwordHash);
}
fillFrom(user, fromStorage);
return persistValidUser(fromStorage);
}
private ServiceResponse<User> persistValidUser(User user) {
try {
userRepository.save(user);
if (SecurityContextHolder.getContext().getAuthentication() instanceof UsernamePasswordAuthenticationToken) {
Optional.ofNullable(SecurityContextHolder.getContext().getAuthentication())
.ifPresent(authentication -> {
User principal = (User) Optional.ofNullable(authentication.getPrincipal())
.filter(p -> !p.equals("anonymousUser")).orElse(User.builder().build());
fillFrom(user, principal);
});
}
return ServiceResponseBuilder.<User>ok().withResult(user).build();
} catch (Exception e) {
LOG.debug("Error saving User update",
user.getTenant().toURI(),
user.getTenant().getLogLevel(), user);
return ServiceResponseBuilder.<User>error()
.withMessage(Errors.ERROR_SAVE_USER.getCode()).build();
}
}
private ServiceResponse<User> checkNullFields(User user) {
if (!Optional.ofNullable(user).isPresent()) {
LOG.debug("Invalid user details on update",
user.getTenant().toURI(),
user.getTenant().getLogLevel(),
user);
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_DETAILS.getCode())
.build();
}
if (!Optional.ofNullable(user.getEmail()).isPresent() ||
!patternEmail.matcher(user.getEmail()).matches()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_EMAIL.getCode())
.build();
}
if (!Optional.ofNullable(user.getDateFormat()).isPresent()) {
LOG.debug("Invalid date format preference update",
user.getTenant().toURI(),
user.getTenant().getLogLevel(),
user);
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_PREFERENCE_DATEFORMAT.getCode())
.build();
}
if (!Optional.ofNullable(user.getZoneId()).isPresent()) {
LOG.debug("Invalid locale preference update",
user.getTenant().toURI(),
user.getTenant().getLogLevel(),
user);
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_PREFERENCE_LOCALE.getCode())
.build();
}
if (!Optional.ofNullable(user.getLanguage()).isPresent()) {
LOG.debug("Invalid language preference update",
user.getTenant().toURI(),
user.getTenant().getLogLevel(),
user);
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_PREFERENCE_LANGUAGE.getCode())
.build();
}
return null;
}
@Override
public ServiceResponse<User> createAccount(User user, String newPassword, String newPasswordConfirmation) {
if (validateUserCreationLimit()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_LIMIT_CREATION.getCode())
.build();
}
// must validate password here also to don't leak the user existence
try {
validatePassword(user, newPassword, newPasswordConfirmation);
} catch (BusinessException e) {
return ServiceResponseBuilder.<User>error()
.withMessage(e.getMessage())
.build();
}
User fromStorage = userRepository.findByEmail(user.getEmail());
if (Optional.ofNullable(fromStorage).isPresent() && fromStorage.isActive()) {
return sendAccountExistsEmail(fromStorage);
} else if (Optional.ofNullable(fromStorage).isPresent()) {
sendValidateTokenEmail(fromStorage);
}
if (user.getName() == null || user.getName().isEmpty()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_NAME.getCode())
.build();
}
if (!Optional.ofNullable(user.getTenant().getName()).isPresent()){
user.getTenant().setName(user.getName());
}
ServiceResponse<Tenant> serviceResponse = tenantService.save(user.getTenant());
user.setTenant(serviceResponse.getResult());
ServiceResponse<Role> roleResponse = roleService.findByName(RoleService.ROLE_IOT_USER);
user.setRoles(Collections.singletonList(roleResponse.getResult()));
user.setZoneId(TimeZone.AMERICA_SAO_PAULO);
user.setDateFormat(DateFormat.YYYYMMDD);
user.setRegistrationDate(Instant.now());
ServiceResponse<User> save = save(user, newPassword, newPasswordConfirmation);
if (save.isOk()) {
sendValidateTokenEmail(user);
}
return save;
}
private ServiceResponse<User> sendAccountExistsEmail(User fromStorage) {
Map<String, Object> templateParam = new HashMap<>();
templateParam.put("link", emailConfig.getBaseurl().concat("login"));
templateParam.put("name", fromStorage.getName());
sendMail(fromStorage, templateParam, Messages.USER_HAS_ACCOUNT,"html/email-accountalreadyexists");
return ServiceResponseBuilder.<User>ok()
.withResult(fromStorage)
.build();
}
private void sendValidateTokenEmail(User user) {
ServiceResponse<String> responseToken = tokenService.generateToken(
TokenService.Purpose.VALIDATE_EMAIL,
user,
Duration.ofDays(2L));
Map<String, Object> templateParam = new HashMap<>();
templateParam.put("link", emailConfig.getBaseurl().concat("subscription/").concat(responseToken.getResult()));
templateParam.put("name", user.getName());
sendMail(user, templateParam, Messages.USER_SUBJECT_MAIL, "html/email-selfsubscription");
}
private boolean validateUserCreationLimit() {
Instant start = LocalDateTime
.now()
.withHour(0)
.withMinute(0)
.withSecond(0)
.toInstant(ZoneOffset.UTC);
Instant end = LocalDateTime
.now()
.withHour(23)
.withMinute(59)
.withSecond(59)
.toInstant(ZoneOffset.UTC);
Long countUsers = userRepository.countRegistrationsBetweenDate(start, end);
return countUsers >= 250L;
}
private void sendMail(User user, Map<String, Object> templateParam, Messages message, String templateName) {
emailService.send(
emailConfig.getSender(),
Collections.singletonList(user),
Collections.emptyList(),
messageSource.getMessage(message.getCode(), null, user.getLanguage().getLocale()),
templateName,
templateParam,
user.getLanguage().getLocale());
}
@Override
public ServiceResponse<User> createAccountWithPasswordHash(User user, String passwordHash) {
if (validateUserCreationLimit()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_LIMIT_CREATION.getCode())
.build();
}
// must validate hash here also to don't leak the user existence
if (!Optional.ofNullable(passwordHash).isPresent()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_HASH_INVALID.getCode())
.build();
}
if (!passwordManager.validateHash(passwordHash)) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_PASSWORD_HASH_INVALID.getCode())
.build();
}
User fromStorage = userRepository.findByEmail(user.getEmail());
if (Optional.ofNullable(fromStorage).isPresent()) {
return sendAccountExistsEmail(fromStorage);
}
if (user.getName() == null || user.getName().isEmpty()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.INVALID_USER_NAME.getCode())
.build();
}
if (!Optional.ofNullable(user.getTenant().getName()).isPresent()){
user.getTenant().setName(user.getName());
}
ServiceResponse<Tenant> serviceResponse = tenantService.save(user.getTenant());
user.setTenant(serviceResponse.getResult());
ServiceResponse<User> save = saveWithPasswordHash(user, passwordHash);
if (save.isOk()) {
sendValidateTokenEmail(user);
}
return save;
}
/**
* Fill new values from form
*
* @param form
* @param storage
*/
private void fillFrom(User form, User storage) {
storage.setLanguage(form.getLanguage());
storage.setZoneId(form.getZoneId());
storage.setAvatar(form.getAvatar());
storage.setDateFormat(form.getDateFormat());
storage.setPassword(!StringUtils.isEmpty(form.getPassword()) ? form.getPassword() : storage.getPassword());
storage.setName(form.getName());
storage.setPhone(form.getPhone());
storage.setNotificationViaEmail(form.isNotificationViaEmail());
storage.setApplication(form.getApplication());
storage.setLocation(form.getLocation());
if (!storage.isActive()) {
storage.setActive(form.isActive());
}
}
/**
* Encode the password
*
* @param password
* @return String encoded password
* @throws Exception
*/
private String encodePassword(String password) throws Exception {
if (!Optional.ofNullable(passwordManager).isPresent()) {
passwordManager = new PasswordManager();
}
return passwordManager.createHash(
password,
Optional.of(passwordUserConfig.getIterations())
);
}
/**
* Validate password change rules
*/
private void validatePassword(User fromForm,
String newPassword,
String newPasswordConfirmation
) throws BusinessException {
validatePasswordConfirmation(newPassword, newPasswordConfirmation);
validatePasswordLength(newPasswordConfirmation);
validatePasswordPattern(fromForm.getUsername(), newPasswordConfirmation);
validatePasswordBlackList(newPasswordConfirmation);
}
/**
* Validate informed oldPassword compatibility with stored password
*
* @param oldPassword
* @throws BusinessException
*/
private void validateOldPassword(String oldPassword, User fromStorage) throws BusinessException {
if (!Optional.ofNullable(passwordManager).isPresent()) {
passwordManager = new PasswordManager();
}
if (!Optional.ofNullable(fromStorage).isPresent()) {
throw new BusinessException(Validations.INVALID_USER_DETAILS.getCode());
}
try {
if (!passwordManager.validatePassword(oldPassword, fromStorage.getPassword())) {
throw new BusinessException(Validations.INVALID_PASSWORD_INVALID.getCode());
}
} catch (InvalidKeySpecException | NoSuchAlgorithmException e) {
throw new BusinessException(Validations.INVALID_PASSWORD_USER_DATA.getCode());
}
}
private void validatePasswordConfirmation(String newPassword, String newPasswordConfirmation)
throws BusinessException {
if (!newPassword.equals(newPasswordConfirmation)) {
throw new BusinessException(Validations.INVALID_PASSWORD_CONFIRMATION.getCode());
}
}
private void validatePasswordLength(String password) throws BusinessException {
if (password.length() < 12) {
throw new BusinessException(Validations.INVALID_PASSWORD_LENGTH.getCode());
}
}
private void validatePasswordPattern(String username, String password) throws BusinessException {
if (password.equalsIgnoreCase(username)) {
throw new BusinessException(Validations.INVALID_PASSWORD_USER_DATA.getCode());
}
}
private void validatePasswordBlackList(String password) throws BusinessException {
// blacklisted passwords are stored in SHA1 format, because some providers of
// leaked passwords publish the database in SHA1 format in order to
// protect possible private identifiable information. See haveibeenpwned.com .
String hashToBeSearched = DigestUtils.sha1Hex(password).toUpperCase();
User.PasswordBlacklist matches =
passwordBlacklistRepository.findOne(hashToBeSearched);
if (Optional.ofNullable(matches).isPresent()) {
throw new BusinessException(Validations.INVALID_PASSWORD_BLACKLISTED.getCode());
}
}
@Override
public ServiceResponse<User> findByEmail(String email) {
if (!Optional.ofNullable(email).isPresent() || email.isEmpty()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_EXIST_USER.getCode()).build();
}
User user = userRepository.findOne(email);
return ServiceResponseBuilder.<User>ok().withResult(user).build();
}
@Override
public ServiceResponse<List<User>> findAll(Tenant tenant) {
return ServiceResponseBuilder
.<List<User>>ok()
.withResult(userRepository.findAllByTenantId(tenant.getId()))
.build();
}
@Override
public ServiceResponse<User> findByTenantAndEmail(Tenant tenant, String email) {
User user = userRepository.findAllByTenantIdAndEmail(tenant.getId(), email);
if (!Optional.ofNullable(user).isPresent()) {
return ServiceResponseBuilder
.<User>error()
.withMessage(Validations.NO_EXIST_USER.getCode())
.build();
}
return ServiceResponseBuilder
.<User>ok()
.withResult(user)
.build();
}
@Override
public ServiceResponse<User> remove(Tenant tenant, User loggedUser, String emailUserToRemove) {
User user = userRepository.findAllByTenantIdAndEmail(tenant.getId(), emailUserToRemove);
if (!Optional.ofNullable(user).isPresent()) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_EXIST_USER.getCode())
.build();
}
if (loggedUser.equals(user)) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_PERMISSION_TO_REMOVE_HIMSELF.getCode())
.build();
}
if (Optional.ofNullable(loggedUser.getApplication()).isPresent()
&& !loggedUser.getApplication().equals(user.getApplication())) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_PERMISSION_TO_REMOVE.getCode())
.build();
}
if (Optional.ofNullable(loggedUser.getLocation()).isPresent()
&& !LocationTreeUtils.isSublocationOf(loggedUser.getLocation(), user.getLocation())) {
return ServiceResponseBuilder.<User>error()
.withMessage(Validations.NO_PERMISSION_TO_REMOVE.getCode())
.build();
}
userRepository.delete(user);
return ServiceResponseBuilder.<User>ok().build();
}
@Override
public ServiceResponse<List<User>> findAllByApplicationLocation(Tenant tenant, Application application, Location location) {
List<User> allUsers = new ArrayList<>();
allUsers.addAll(userRepository.findAllAdminUsers(tenant.getId()));
allUsers.addAll(userRepository.findAllByTenantIdApplicationName(tenant.getId(), application.getName()));
allUsers.addAll(userRepository.findAllByTenantIdApplicationNameLocationId(tenant.getId(), application.getName(), location.getId()));
return ServiceResponseBuilder.<List<User>>ok()
.withResult(allUsers.stream()
.distinct()
.collect(Collectors.toList()))
.build();
}
}
| |
/*
* Copyright 2011 Red Hat, Inc. and/or its affiliates.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA
*/
package org.infinispan.container.versioning;
import org.infinispan.CacheException;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.configuration.cache.VersioningScheme;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.test.SingleCacheManagerTest;
import org.infinispan.test.fwk.CleanupAfterMethod;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.transaction.LockingMode;
import org.infinispan.util.concurrent.IsolationLevel;
import org.testng.Assert;
import org.testng.annotations.Test;
import javax.transaction.RollbackException;
import javax.transaction.Transaction;
/**
* Tests local-mode versioning
*
* @author Manik Surtani
* @since 5.1
*/
@Test(testName = "container.versioning.LocalWriteSkewTest", groups = "functional")
@CleanupAfterMethod
public class LocalWriteSkewTest extends SingleCacheManagerTest {
@Override
protected EmbeddedCacheManager createCacheManager() throws Exception {
ConfigurationBuilder builder = TestCacheManagerFactory.getDefaultCacheConfiguration(true);
builder
.versioning()
.enable()
.scheme(VersioningScheme.SIMPLE)
.locking()
.isolationLevel(IsolationLevel.REPEATABLE_READ)
.writeSkewCheck(true)
.transaction()
.lockingMode(LockingMode.OPTIMISTIC);
EmbeddedCacheManager cm = TestCacheManagerFactory.createCacheManager(builder);
builder.locking().writeSkewCheck(false).versioning().disable();
cm.defineConfiguration("no-ws-chk", builder.build());
return cm;
}
public void testWriteSkewEnabled() throws Exception {
// Auto-commit is true
cache.put("hello", "world 1");
tm().begin();
Object v = cache.get("hello");
assert "world 1".equals(v);
Transaction t = tm().suspend();
// Create a write skew
cache.put("hello", "world 3");
tm().resume(t);
try {
cache.put("hello", "world 2");
assert false: "Should have detected write skew";
} catch (CacheException e) {
// expected
}
try {
tm().commit();
assert false: "Transaction should roll back";
} catch (RollbackException re) {
// expected
}
assert "world 3".equals(cache.get("hello"));
}
public void testWriteSkewMultiEntries() throws Exception {
tm().begin();
cache.put("k1", "v1");
cache.put("k2", "v2");
tm().commit();
tm().begin();
cache.put("k2", "v2000");
Object v = cache.get("k1");
assert "v1".equals(v);
assert "v2000".equals(cache.get("k2"));
Transaction t = tm().suspend();
// Create a write skew
// Auto-commit is true
cache.put("k1", "v3");
tm().resume(t);
try {
cache.put("k1", "v5000");
assert false: "Should have detected write skew";
} catch (CacheException e) {
// expected
}
try {
tm().commit();
assert false: "Transaction should roll back";
} catch (RollbackException re) {
// expected
}
assert "v3".equals(cache.get("k1"));
assert "v2".equals(cache.get("k2"));
}
public void testWriteSkewDisabled() throws Exception {
cache = cacheManager.getCache("no-ws-chk");
// Auto-commit is true
cache.put("hello", "world 1");
tm().begin();
Object v = cache.get("hello");
assert "world 1".equals(v);
Transaction t = tm().suspend();
// Create a write skew
cache.put("hello", "world 3");
tm().resume(t);
cache.put("hello", "world 2");
tm().commit();
assert "world 2".equals(cache.get("hello"));
}
public void testNullEntries() throws Exception {
// Auto-commit is true
cache.put("hello", "world");
tm().begin();
assert "world".equals(cache.get("hello"));
Transaction t = tm().suspend();
cache.remove("hello");
assert null == cache.get("hello");
tm().resume(t);
try {
cache.put("hello", "world2");
assert false: "Write skew should have been detected";
} catch (CacheException expected) {
// expected
}
try {
tm().commit();
assert false: "This transaction should roll back";
} catch (RollbackException expected) {
// expected
}
assert null == cache.get("hello");
}
public void testSameNodeKeyCreation() throws Exception {
tm().begin();
Assert.assertEquals(cache.get("NewKey"), null);
cache.put("NewKey", "v1");
Transaction tx0 = tm().suspend();
//other transaction do the same thing
tm().begin();
Assert.assertEquals(cache.get("NewKey"), null);
cache.put("NewKey", "v2");
tm().commit();
tm().resume(tx0);
try {
tm().commit();
Assert.fail("The transaction should rollback");
} catch (RollbackException expected) {
//expected
}
Assert.assertEquals(cache.get("NewKey"), "v2");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.nodelabels;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.nodelabels.RMNodeLabel;
import org.apache.hadoop.yarn.security.YarnAuthorizationProvider;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeLabelsUpdateSchedulerEvent;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
public class RMNodeLabelsManager extends CommonNodeLabelsManager {
protected static class Queue {
protected Set<String> accessibleNodeLabels;
protected Resource resource;
protected Queue() {
accessibleNodeLabels =
Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
resource = Resource.newInstance(0, 0);
}
}
ConcurrentMap<String, Queue> queueCollections =
new ConcurrentHashMap<String, Queue>();
private YarnAuthorizationProvider authorizer;
private RMContext rmContext = null;
@Override
protected void serviceInit(Configuration conf) throws Exception {
super.serviceInit(conf);
authorizer = YarnAuthorizationProvider.getInstance(conf);
}
@Override
public void addLabelsToNode(Map<NodeId, Set<String>> addedLabelsToNode)
throws IOException {
writeLock.lock();
try {
// get nodesCollection before edition
Map<String, Host> before = cloneNodeMap(addedLabelsToNode.keySet());
super.addLabelsToNode(addedLabelsToNode);
// get nodesCollection after edition
Map<String, Host> after = cloneNodeMap(addedLabelsToNode.keySet());
// update running nodes resources
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
protected void checkRemoveFromClusterNodeLabelsOfQueue(
Collection<String> labelsToRemove) throws IOException {
// Check if label to remove doesn't existed or null/empty, will throw
// exception if any of labels to remove doesn't meet requirement
for (String label : labelsToRemove) {
label = normalizeLabel(label);
// check if any queue contains this label
for (Entry<String, Queue> entry : queueCollections.entrySet()) {
String queueName = entry.getKey();
Set<String> queueLabels = entry.getValue().accessibleNodeLabels;
if (queueLabels.contains(label)) {
throw new IOException("Cannot remove label=" + label
+ ", because queue=" + queueName + " is using this label. "
+ "Please remove label on queue before remove the label");
}
}
}
}
@Override
public void removeFromClusterNodeLabels(Collection<String> labelsToRemove)
throws IOException {
writeLock.lock();
try {
if (!isInitNodeLabelStoreInProgress()) {
// We cannot remove node labels from collection when some queue(s) are
// using any of them.
// We will not do remove when recovery is in prpgress. During
// service starting, we will replay edit logs and recover state. It is
// possible that a history operation removed some labels which were not
// used by some queues in the past but are used by current queues.
checkRemoveFromClusterNodeLabelsOfQueue(labelsToRemove);
}
// copy before NMs
Map<String, Host> before = cloneNodeMap();
super.removeFromClusterNodeLabels(labelsToRemove);
updateResourceMappings(before, nodeCollections);
} finally {
writeLock.unlock();
}
}
@Override
public void addToCluserNodeLabels(Collection<NodeLabel> labels)
throws IOException {
writeLock.lock();
try {
super.addToCluserNodeLabels(labels);
} finally {
writeLock.unlock();
}
}
@Override
public void
removeLabelsFromNode(Map<NodeId, Set<String>> removeLabelsFromNode)
throws IOException {
writeLock.lock();
try {
// get nodesCollection before edition
Map<String, Host> before =
cloneNodeMap(removeLabelsFromNode.keySet());
super.removeLabelsFromNode(removeLabelsFromNode);
// get nodesCollection before edition
Map<String, Host> after = cloneNodeMap(removeLabelsFromNode.keySet());
// update running nodes resources
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
@Override
public void replaceLabelsOnNode(Map<NodeId, Set<String>> replaceLabelsToNode)
throws IOException {
writeLock.lock();
try {
Map<NodeId, Set<String>> effectiveModifiedLabelMappings =
getModifiedNodeLabelsMappings(replaceLabelsToNode);
if(effectiveModifiedLabelMappings.isEmpty()) {
LOG.info("No Modified Node label Mapping to replace");
return;
}
// get nodesCollection before edition
Map<String, Host> before =
cloneNodeMap(effectiveModifiedLabelMappings.keySet());
super.replaceLabelsOnNode(effectiveModifiedLabelMappings);
// get nodesCollection after edition
Map<String, Host> after =
cloneNodeMap(effectiveModifiedLabelMappings.keySet());
// update running nodes resources
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
private Map<NodeId, Set<String>> getModifiedNodeLabelsMappings(
Map<NodeId, Set<String>> replaceLabelsToNode) {
Map<NodeId, Set<String>> effectiveModifiedLabels = new HashMap<>();
for (Entry<NodeId, Set<String>> nodeLabelMappingEntry : replaceLabelsToNode
.entrySet()) {
NodeId nodeId = nodeLabelMappingEntry.getKey();
Set<String> modifiedNodeLabels = nodeLabelMappingEntry.getValue();
Set<String> labelsBeforeModification = null;
Host host = nodeCollections.get(nodeId.getHost());
if (host == null) {
effectiveModifiedLabels.put(nodeId, modifiedNodeLabels);
continue;
} else if (nodeId.getPort() == WILDCARD_PORT) {
labelsBeforeModification = host.labels;
} else if (host.nms.get(nodeId) != null) {
labelsBeforeModification = host.nms.get(nodeId).labels;
}
if (labelsBeforeModification == null
|| labelsBeforeModification.size() != modifiedNodeLabels.size()
|| !labelsBeforeModification.containsAll(modifiedNodeLabels)) {
effectiveModifiedLabels.put(nodeId, modifiedNodeLabels);
}
}
return effectiveModifiedLabels;
}
/*
* Following methods are used for setting if a node is up and running, and it
* will update running nodes resource
*/
public void activateNode(NodeId nodeId, Resource resource) {
writeLock.lock();
try {
// save if we have a node before
Map<String, Host> before = cloneNodeMap(ImmutableSet.of(nodeId));
createHostIfNonExisted(nodeId.getHost());
try {
createNodeIfNonExisted(nodeId);
} catch (IOException e) {
LOG.error("This shouldn't happen, cannot get host in nodeCollection"
+ " associated to the node being activated");
return;
}
Node nm = getNMInNodeSet(nodeId);
nm.resource = resource;
nm.running = true;
// Add node in labelsCollection
Set<String> labelsForNode = getLabelsByNode(nodeId);
if (labelsForNode != null) {
for (String label : labelsForNode) {
RMNodeLabel labelInfo = labelCollections.get(label);
if(labelInfo != null) {
labelInfo.addNodeId(nodeId);
}
}
}
// get the node after edition
Map<String, Host> after = cloneNodeMap(ImmutableSet.of(nodeId));
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
/*
* Following methods are used for setting if a node unregistered to RM
*/
public void deactivateNode(NodeId nodeId) {
writeLock.lock();
try {
// save if we have a node before
Map<String, Host> before = cloneNodeMap(ImmutableSet.of(nodeId));
Node nm = getNMInNodeSet(nodeId);
if (null != nm) {
if (isNodeLabelExplicit(nm.nodeId)) {
// When node deactivated, remove the nm from node collection if no
// labels explicitly set for this particular nm
// Save labels first, we need to remove label->nodes relation later
Set<String> savedNodeLabels = getLabelsOnNode(nodeId);
// Remove this node in nodes collection
nodeCollections.get(nodeId.getHost()).nms.remove(nodeId);
// Remove this node in labels->node
removeNodeFromLabels(nodeId, savedNodeLabels);
} else {
// set nm is not running, and its resource = 0
nm.running = false;
nm.resource = Resource.newInstance(0, 0);
}
}
// get the node after edition
Map<String, Host> after = cloneNodeMap(ImmutableSet.of(nodeId));
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
public void updateNodeResource(NodeId node, Resource newResource) {
deactivateNode(node);
activateNode(node, newResource);
}
public void reinitializeQueueLabels(Map<String, Set<String>> queueToLabels) {
writeLock.lock();
try {
// clear before set
this.queueCollections.clear();
for (Entry<String, Set<String>> entry : queueToLabels.entrySet()) {
String queue = entry.getKey();
Queue q = new Queue();
this.queueCollections.put(queue, q);
Set<String> labels = entry.getValue();
if (labels.contains(ANY)) {
continue;
}
q.accessibleNodeLabels.addAll(labels);
for (Host host : nodeCollections.values()) {
for (Entry<NodeId, Node> nentry : host.nms.entrySet()) {
NodeId nodeId = nentry.getKey();
Node nm = nentry.getValue();
if (nm.running && isNodeUsableByQueue(getLabelsByNode(nodeId), q)) {
Resources.addTo(q.resource, nm.resource);
}
}
}
}
} finally {
writeLock.unlock();
}
}
public Resource getQueueResource(String queueName, Set<String> queueLabels,
Resource clusterResource) {
readLock.lock();
try {
if (queueLabels.contains(ANY)) {
return clusterResource;
}
Queue q = queueCollections.get(queueName);
if (null == q) {
return Resources.none();
}
return q.resource;
} finally {
readLock.unlock();
}
}
/*
* Get active node count based on label.
*/
public int getActiveNMCountPerLabel(String label) {
if (label == null) {
return 0;
}
readLock.lock();
try {
RMNodeLabel labelInfo = labelCollections.get(label);
return (labelInfo == null) ? 0 : labelInfo.getNumActiveNMs();
} finally {
readLock.unlock();
}
}
public Set<String> getLabelsOnNode(NodeId nodeId) {
readLock.lock();
try {
Set<String> nodeLabels = getLabelsByNode(nodeId);
return Collections.unmodifiableSet(nodeLabels);
} finally {
readLock.unlock();
}
}
public boolean containsNodeLabel(String label) {
readLock.lock();
try {
return label != null
&& (label.isEmpty() || labelCollections.containsKey(label));
} finally {
readLock.unlock();
}
}
private Map<String, Host> cloneNodeMap(Set<NodeId> nodesToCopy) {
Map<String, Host> map = new HashMap<String, Host>();
for (NodeId nodeId : nodesToCopy) {
if (!map.containsKey(nodeId.getHost())) {
Host originalN = nodeCollections.get(nodeId.getHost());
if (null == originalN) {
continue;
}
Host n = originalN.copy();
n.nms.clear();
map.put(nodeId.getHost(), n);
}
Host n = map.get(nodeId.getHost());
if (WILDCARD_PORT == nodeId.getPort()) {
for (Entry<NodeId, Node> entry : nodeCollections
.get(nodeId.getHost()).nms.entrySet()) {
n.nms.put(entry.getKey(), entry.getValue().copy());
}
} else {
Node nm = getNMInNodeSet(nodeId);
if (null != nm) {
n.nms.put(nodeId, nm.copy());
}
}
}
return map;
}
@SuppressWarnings("unchecked")
private void updateResourceMappings(Map<String, Host> before,
Map<String, Host> after) {
// Get NMs in before only
Set<NodeId> allNMs = new HashSet<NodeId>();
for (Entry<String, Host> entry : before.entrySet()) {
allNMs.addAll(entry.getValue().nms.keySet());
}
for (Entry<String, Host> entry : after.entrySet()) {
allNMs.addAll(entry.getValue().nms.keySet());
}
// Map used to notify RM
Map<NodeId, Set<String>> newNodeToLabelsMap =
new HashMap<NodeId, Set<String>>();
// traverse all nms
for (NodeId nodeId : allNMs) {
Node oldNM;
if ((oldNM = getNMInNodeSet(nodeId, before, true)) != null) {
Set<String> oldLabels = getLabelsByNode(nodeId, before);
// no label in the past
if (oldLabels.isEmpty()) {
// update labels
RMNodeLabel label = labelCollections.get(NO_LABEL);
label.removeNode(oldNM.resource);
// update queues, all queue can access this node
for (Queue q : queueCollections.values()) {
Resources.subtractFrom(q.resource, oldNM.resource);
}
} else {
// update labels
for (String labelName : oldLabels) {
RMNodeLabel label = labelCollections.get(labelName);
if (null == label) {
continue;
}
label.removeNode(oldNM.resource);
}
// update queues, only queue can access this node will be subtract
for (Queue q : queueCollections.values()) {
if (isNodeUsableByQueue(oldLabels, q)) {
Resources.subtractFrom(q.resource, oldNM.resource);
}
}
}
}
Node newNM;
if ((newNM = getNMInNodeSet(nodeId, after, true)) != null) {
Set<String> newLabels = getLabelsByNode(nodeId, after);
newNodeToLabelsMap.put(nodeId, ImmutableSet.copyOf(newLabels));
// no label in the past
if (newLabels.isEmpty()) {
// update labels
RMNodeLabel label = labelCollections.get(NO_LABEL);
label.addNode(newNM.resource);
// update queues, all queue can access this node
for (Queue q : queueCollections.values()) {
Resources.addTo(q.resource, newNM.resource);
}
} else {
// update labels
for (String labelName : newLabels) {
RMNodeLabel label = labelCollections.get(labelName);
label.addNode(newNM.resource);
}
// update queues, only queue can access this node will be subtract
for (Queue q : queueCollections.values()) {
if (isNodeUsableByQueue(newLabels, q)) {
Resources.addTo(q.resource, newNM.resource);
}
}
}
}
}
// Notify RM
if (rmContext != null && rmContext.getDispatcher() != null) {
rmContext.getDispatcher().getEventHandler().handle(
new NodeLabelsUpdateSchedulerEvent(newNodeToLabelsMap));
}
}
public Resource getResourceByLabel(String label, Resource clusterResource) {
label = normalizeLabel(label);
if (label.equals(NO_LABEL)) {
return noNodeLabel.getResource();
}
readLock.lock();
try {
RMNodeLabel nodeLabel = labelCollections.get(label);
if (nodeLabel == null) {
return Resources.none();
}
return nodeLabel.getResource();
} finally {
readLock.unlock();
}
}
private boolean isNodeUsableByQueue(Set<String> nodeLabels, Queue q) {
// node without any labels can be accessed by any queue
if (nodeLabels == null || nodeLabels.isEmpty()
|| (nodeLabels.size() == 1 && nodeLabels.contains(NO_LABEL))) {
return true;
}
for (String label : nodeLabels) {
if (q.accessibleNodeLabels.contains(label)) {
return true;
}
}
return false;
}
private Map<String, Host> cloneNodeMap() {
Set<NodeId> nodesToCopy = new HashSet<NodeId>();
for (String nodeName : nodeCollections.keySet()) {
nodesToCopy.add(NodeId.newInstance(nodeName, WILDCARD_PORT));
}
return cloneNodeMap(nodesToCopy);
}
public boolean checkAccess(UserGroupInformation user) {
// make sure only admin can invoke
// this method
if (authorizer.isAdmin(user)) {
return true;
}
return false;
}
public void setRMContext(RMContext rmContext) {
this.rmContext = rmContext;
}
public List<RMNodeLabel> pullRMNodeLabelsInfo() {
readLock.lock();
try {
List<RMNodeLabel> infos = new ArrayList<RMNodeLabel>();
for (Entry<String, RMNodeLabel> entry : labelCollections.entrySet()) {
RMNodeLabel label = entry.getValue();
infos.add(label.getCopy());
}
Collections.sort(infos);
return infos;
} finally {
readLock.unlock();
}
}
}
| |
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.security.KeyPairGeneratorSpec;
import android.security.keystore.KeyGenParameterSpec;
import android.security.keystore.KeyProperties;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.preference.PreferenceManager;
import android.util.Base64;
import org.matrix.androidsdk.core.model.SecretKeyAndVersion;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.spec.AlgorithmParameterSpec;
import java.security.spec.RSAKeyGenParameterSpec;
import java.util.Calendar;
import java.util.zip.GZIPOutputStream;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
import javax.crypto.CipherOutputStream;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.KeyGenerator;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.GCMParameterSpec;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import javax.security.auth.x500.X500Principal;
public class CompatUtil {
private static final String TAG = CompatUtil.class.getSimpleName();
private static final String ANDROID_KEY_STORE_PROVIDER = "AndroidKeyStore";
private static final String AES_GCM_CIPHER_TYPE = "AES/GCM/NoPadding";
private static final int AES_GCM_KEY_SIZE_IN_BITS = 128;
private static final int AES_GCM_IV_LENGTH = 12;
private static final String AES_LOCAL_PROTECTION_KEY_ALIAS = "aes_local_protection";
private static final String RSA_WRAP_LOCAL_PROTECTION_KEY_ALIAS = "rsa_wrap_local_protection";
private static final String RSA_WRAP_CIPHER_TYPE = "RSA/NONE/PKCS1Padding";
private static final String AES_WRAPPED_PROTECTION_KEY_SHARED_PREFERENCE = "aes_wrapped_local_protection";
private static final String SHARED_KEY_ANDROID_VERSION_WHEN_KEY_HAS_BEEN_GENERATED = "android_version_when_key_has_been_generated";
private static SecretKeyAndVersion sSecretKeyAndVersion;
private static SecureRandom sPrng;
/**
* Create a GZIPOutputStream instance
* Special treatment on KitKat device, force the syncFlush param to false
* Before Kitkat, this param does not exist and after Kitkat it is set to false by default
*
* @param outputStream the output stream
*/
public static GZIPOutputStream createGzipOutputStream(OutputStream outputStream) throws IOException {
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.KITKAT) {
return new GZIPOutputStream(outputStream, false);
} else {
return new GZIPOutputStream(outputStream);
}
}
/**
* Returns the AES key used for local storage encryption/decryption with AES/GCM.
* The key is created if it does not exist already in the keystore.
* From Marshmallow, this key is generated and operated directly from the android keystore.
* From KitKat and before Marshmallow, this key is stored in the application shared preferences
* wrapped by a RSA key generated and operated directly from the android keystore.
*
* @param context the context holding the application shared preferences
*/
@RequiresApi(Build.VERSION_CODES.KITKAT)
private static synchronized SecretKeyAndVersion getAesGcmLocalProtectionKey(Context context)
throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException,
NoSuchProviderException, InvalidAlgorithmParameterException, NoSuchPaddingException,
InvalidKeyException, IllegalBlockSizeException, UnrecoverableKeyException {
if (sSecretKeyAndVersion == null) {
final KeyStore keyStore = KeyStore.getInstance(ANDROID_KEY_STORE_PROVIDER);
keyStore.load(null);
Log.i(TAG, "Loading local protection key");
SecretKey key;
final SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
// Get the version of Android when the key has been generated, default to the current version of the system. In this case, the
// key will be generated
final int androidVersionWhenTheKeyHasBeenGenerated
= sharedPreferences.getInt(SHARED_KEY_ANDROID_VERSION_WHEN_KEY_HAS_BEEN_GENERATED, Build.VERSION.SDK_INT);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (keyStore.containsAlias(AES_LOCAL_PROTECTION_KEY_ALIAS)) {
Log.i(TAG, "AES local protection key found in keystore");
key = (SecretKey) keyStore.getKey(AES_LOCAL_PROTECTION_KEY_ALIAS, null);
} else {
// Check if a key has been created on version < M (in case of OS upgrade)
key = readKeyApiL(sharedPreferences, keyStore);
if (key == null) {
Log.i(TAG, "Generating AES key with keystore");
final KeyGenerator generator = KeyGenerator.getInstance(KeyProperties.KEY_ALGORITHM_AES, ANDROID_KEY_STORE_PROVIDER);
generator.init(
new KeyGenParameterSpec.Builder(AES_LOCAL_PROTECTION_KEY_ALIAS,
KeyProperties.PURPOSE_ENCRYPT | KeyProperties.PURPOSE_DECRYPT)
.setBlockModes(KeyProperties.BLOCK_MODE_GCM)
.setKeySize(AES_GCM_KEY_SIZE_IN_BITS)
.setEncryptionPaddings(KeyProperties.ENCRYPTION_PADDING_NONE)
.build());
key = generator.generateKey();
sharedPreferences.edit()
.putInt(SHARED_KEY_ANDROID_VERSION_WHEN_KEY_HAS_BEEN_GENERATED, Build.VERSION.SDK_INT)
.apply();
}
}
} else {
key = readKeyApiL(sharedPreferences, keyStore);
if (key == null) {
Log.i(TAG, "Generating RSA key pair with keystore");
final KeyPairGenerator generator = KeyPairGenerator.getInstance(KeyProperties.KEY_ALGORITHM_RSA, ANDROID_KEY_STORE_PROVIDER);
final Calendar start = Calendar.getInstance();
final Calendar end = Calendar.getInstance();
end.add(Calendar.YEAR, 10);
generator.initialize(
new KeyPairGeneratorSpec.Builder(context)
.setAlgorithmParameterSpec(new RSAKeyGenParameterSpec(2048, RSAKeyGenParameterSpec.F4))
.setAlias(RSA_WRAP_LOCAL_PROTECTION_KEY_ALIAS)
.setSubject(new X500Principal("CN=matrix-android-sdk"))
.setStartDate(start.getTime())
.setEndDate(end.getTime())
.setSerialNumber(BigInteger.ONE)
.build());
final KeyPair keyPair = generator.generateKeyPair();
Log.i(TAG, "Generating wrapped AES key");
final byte[] aesKeyRaw = new byte[AES_GCM_KEY_SIZE_IN_BITS / Byte.SIZE];
getPrng().nextBytes(aesKeyRaw);
key = new SecretKeySpec(aesKeyRaw, "AES");
final Cipher cipher = Cipher.getInstance(RSA_WRAP_CIPHER_TYPE);
cipher.init(Cipher.WRAP_MODE, keyPair.getPublic());
byte[] wrappedAesKey = cipher.wrap(key);
sharedPreferences.edit()
.putString(AES_WRAPPED_PROTECTION_KEY_SHARED_PREFERENCE, Base64.encodeToString(wrappedAesKey, 0))
.putInt(SHARED_KEY_ANDROID_VERSION_WHEN_KEY_HAS_BEEN_GENERATED, Build.VERSION.SDK_INT)
.apply();
}
}
sSecretKeyAndVersion = new SecretKeyAndVersion(key, androidVersionWhenTheKeyHasBeenGenerated);
}
return sSecretKeyAndVersion;
}
/**
* Read the key, which may have been stored when the OS was < M
*
* @param sharedPreferences shared pref
* @param keyStore key store
* @return the key if it exists or null
*/
@Nullable
private static SecretKey readKeyApiL(SharedPreferences sharedPreferences, KeyStore keyStore)
throws KeyStoreException, NoSuchPaddingException, NoSuchAlgorithmException, InvalidKeyException, UnrecoverableKeyException {
final String wrappedAesKeyString = sharedPreferences.getString(AES_WRAPPED_PROTECTION_KEY_SHARED_PREFERENCE, null);
if (wrappedAesKeyString != null && keyStore.containsAlias(RSA_WRAP_LOCAL_PROTECTION_KEY_ALIAS)) {
Log.i(TAG, "RSA + wrapped AES local protection keys found in keystore");
final PrivateKey privateKey = (PrivateKey) keyStore.getKey(RSA_WRAP_LOCAL_PROTECTION_KEY_ALIAS, null);
final byte[] wrappedAesKey = Base64.decode(wrappedAesKeyString, 0);
final Cipher cipher = Cipher.getInstance(RSA_WRAP_CIPHER_TYPE);
cipher.init(Cipher.UNWRAP_MODE, privateKey);
return (SecretKey) cipher.unwrap(wrappedAesKey, "AES", Cipher.SECRET_KEY);
}
// Key does not exist
return null;
}
/**
* Returns the unique SecureRandom instance shared for all local storage encryption operations.
*/
private static SecureRandom getPrng() {
if (sPrng == null) {
sPrng = new SecureRandom();
}
return sPrng;
}
/**
* Create a CipherOutputStream instance.
* Before Kitkat, this method will return out as local storage encryption is not implemented for
* devices before KitKat.
*
* @param out the output stream
* @param context the context holding the application shared preferences
*/
@Nullable
public static OutputStream createCipherOutputStream(OutputStream out, Context context)
throws IOException, CertificateException, NoSuchAlgorithmException,
UnrecoverableKeyException, InvalidKeyException, InvalidAlgorithmParameterException,
NoSuchPaddingException, NoSuchProviderException, KeyStoreException, IllegalBlockSizeException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return out;
}
final SecretKeyAndVersion keyAndVersion = getAesGcmLocalProtectionKey(context);
if (keyAndVersion == null || keyAndVersion.getSecretKey() == null) {
throw new KeyStoreException();
}
final Cipher cipher = Cipher.getInstance(AES_GCM_CIPHER_TYPE);
byte[] iv;
if (keyAndVersion.getAndroidVersionWhenTheKeyHasBeenGenerated() >= Build.VERSION_CODES.M) {
cipher.init(Cipher.ENCRYPT_MODE, keyAndVersion.getSecretKey());
iv = cipher.getIV();
} else {
iv = new byte[AES_GCM_IV_LENGTH];
getPrng().nextBytes(iv);
cipher.init(Cipher.ENCRYPT_MODE, keyAndVersion.getSecretKey(), new IvParameterSpec(iv));
}
if (iv.length != AES_GCM_IV_LENGTH) {
Log.e(TAG, "Invalid IV length " + iv.length);
return null;
}
out.write(iv.length);
out.write(iv);
return new CipherOutputStream(out, cipher);
}
/**
* Create a CipherInputStream instance.
* Before Kitkat, this method will return `in` because local storage encryption is not implemented for devices before KitKat.
* Warning, if `in` is not an encrypted stream, it's up to the caller to close and reopen `in`, because the stream has been read.
*
* @param in the input stream
* @param context the context holding the application shared preferences
* @return in, or the created InputStream, or null if the InputStream `in` does not contain encrypted data
*/
@Nullable
public static InputStream createCipherInputStream(InputStream in, Context context)
throws NoSuchPaddingException, NoSuchAlgorithmException, CertificateException,
InvalidKeyException, KeyStoreException, UnrecoverableKeyException, IllegalBlockSizeException,
NoSuchProviderException, InvalidAlgorithmParameterException, IOException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
return in;
}
final int iv_len = in.read();
if (iv_len != AES_GCM_IV_LENGTH) {
Log.e(TAG, "Invalid IV length " + iv_len);
return null;
}
final byte[] iv = new byte[AES_GCM_IV_LENGTH];
in.read(iv);
final Cipher cipher = Cipher.getInstance(AES_GCM_CIPHER_TYPE);
final SecretKeyAndVersion keyAndVersion = getAesGcmLocalProtectionKey(context);
if (keyAndVersion == null || keyAndVersion.getSecretKey() == null) {
throw new KeyStoreException();
}
AlgorithmParameterSpec spec;
if (keyAndVersion.getAndroidVersionWhenTheKeyHasBeenGenerated() >= Build.VERSION_CODES.M) {
spec = new GCMParameterSpec(AES_GCM_KEY_SIZE_IN_BITS, iv);
} else {
spec = new IvParameterSpec(iv);
}
cipher.init(Cipher.DECRYPT_MODE, keyAndVersion.getSecretKey(), spec);
return new CipherInputStream(in, cipher);
}
}
| |
/*L
* Copyright SAIC, Ellumen and RSNA (CTP)
*
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/national-biomedical-image-archive/LICENSE.txt for details.
*/
package gov.nih.nci.nbia.textsupport;
import gov.nih.nci.nbia.dao.AbstractDAO;
import gov.nih.nci.nbia.internaldomain.CTImage;
import gov.nih.nci.nbia.internaldomain.GeneralEquipment;
import gov.nih.nci.nbia.internaldomain.GeneralImage;
import gov.nih.nci.nbia.internaldomain.GeneralSeries;
import gov.nih.nci.nbia.internaldomain.MRImage;
import gov.nih.nci.nbia.internaldomain.Patient;
import gov.nih.nci.nbia.internaldomain.Study;
import gov.nih.nci.nbia.internaldomain.TrialDataProvenance;
import gov.nih.nci.nbia.util.SpringApplicationContext;
import java.io.File;
import java.util.*;
import org.apache.log4j.Logger;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
public class TextSupportDAOImpl extends AbstractDAO
implements TextSupportDAO
{
static Logger log = Logger.getLogger(TextSupportDAOImpl.class);
private final static String PATIENT_QUERY="select distinct patient_id from submission_history where submission_timestamp between :low and :high";
private final static String PATIENT_VISIBILITY_QUERY="select distinct patient_id from general_series where series_instance_uid in (select series_instance_uid from qc_status_history where history_timestamp between :low and :high)";
private final static String PATIENT_SERIES_DELETED_QUERY="select distinct patient_id from general_series where series_instance_uid in (select data_id from deletion_audit_trail where data_type='GENERAL SERIES' and time_stamp between :low and :high)";
private final static String PATIENT_STUDY_DELETED_QUERY="select distinct patient_id from patient where patient_pk_id in(select patient_pk_id from study where study_instance_uid in (select data_id from deletion_audit_trail where data_type='STUDY' and time_stamp between :low and :high))";
private final static String PATIENT_DELETED_QUERY="select distinct data_id from deletion_audit_trail where data_type='PATIENT' and time_stamp between :low and :high";
private final static String PATIENT_QUERY_REINDEX="select distinct patient_id from submission_history where submission_timestamp <= :high";
private final static String PATIENT_CATEGORY_QUERY="select distinct patient_id from patient where patient_pk_id in (select patient_pk_id from trial_data_provenance where project=:project)";
private final static String MAX_TIME_QUERY ="select max(submission_timestamp) d from submission_history";
@Transactional(propagation=Propagation.REQUIRED)
public Date getMaxTimeStamp()
{
Date returnValue=null;
try {
List<Object> rs = this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(MAX_TIME_QUERY).addScalar("d", Hibernate.TIMESTAMP).list();
if (rs==null || rs.size()<1) return returnValue; //nothing to do
returnValue=(Date)rs.get(0);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getUpdatedPatients(Date high, Date low)
{
List<Object> returnValue = new ArrayList<Object>();
log.info("high-"+high+" low-"+low);
try {
if (low == null)
{
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_QUERY_REINDEX)
.setTimestamp("high", high).list();
} else
{
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_QUERY)
.setTimestamp("low", low)
.setTimestamp("high", high).list();
}
if (returnValue.size()==0) {
log.info("No new items in submission log");
return returnValue; //nothing to do
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getVisibilityUpdatedPatients(Date high, Date low)
{
List<Object> returnValue = new ArrayList<Object>();
log.info("high-"+high+" low-"+low);
try {
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_VISIBILITY_QUERY)
.setTimestamp("low", low)
.setTimestamp("high", high).list();
if (returnValue.size()==0) {
log.info("No changes to items in the visibility log");
return returnValue; //nothing to do
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getDeletedSeriesPatients(Date high, Date low)
{
List<Object> returnValue = new ArrayList<Object>();
log.info("high-"+high+" low-"+low);
try {
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_SERIES_DELETED_QUERY)
.setTimestamp("low", low)
.setTimestamp("high", high).list();
if (returnValue.size()==0) {
log.info("No new deleted series");
return returnValue; //nothing to do
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getDeletedStudyPatients(Date high, Date low)
{
List<Object> returnValue = new ArrayList<Object>();
log.info("high-"+high+" low-"+low);
try {
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_STUDY_DELETED_QUERY)
.setTimestamp("low", low)
.setTimestamp("high", high).list();
if (returnValue.size()==0) {
log.info("No new deleted studies");
return returnValue; //nothing to do
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getDeletedPatients(Date high, Date low)
{
List<Object> returnValue = new ArrayList<Object>();
log.info("high-"+high+" low-"+low);
try {
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_DELETED_QUERY)
.setTimestamp("low", low)
.setTimestamp("high", high).list();
if (returnValue.size()==0) {
log.info("No new deleted patients");
return returnValue; //nothing to do
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getPatientsForCollection(String collection)
{
List<Object> returnValue = new ArrayList<Object>();
try {
returnValue= this.getHibernateTemplate().getSessionFactory().getCurrentSession().createSQLQuery(PATIENT_CATEGORY_QUERY)
.setParameter("project", collection).list();
if (returnValue.size()==0) {
log.info("No patients for collection");
return returnValue; //nothing to do
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
// gave up on this hibernate would just mess up the sessions
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getPatients(String patientId)
{
List<Object> returnValue = new ArrayList<Object>();
try {
returnValue = getHibernateTemplate().getSessionFactory().getCurrentSession()
.createQuery("from Patient as patient where patient.patientId =?")
.setParameter(0, patientId)
.list();
if (returnValue.size()==0) return returnValue;
Patient patient=(Patient)returnValue.get(0);
// seems in order to deal with lazy loading it is best to iterate through the object graph
// and access each method before returning. Must be an issue with the transaction manager,
// but this fixes it for now.
TrialDataProvenance trialDP = patient.getDataProvenance();
if (trialDP!=null)
{
trialDP.getDpSiteId();
trialDP.getDpSiteName();
trialDP.getProject();
}
if (patient.getStudyCollection()!=null)
{
for (Study study : patient.getStudyCollection())
{
study.getId();
study.getAdmittingDiagnosesCodeSeq();
study.getAdmittingDiagnosesDesc();
study.getStudyDate();
study.getStudyDesc();
study.getStudyId();
study.getStudyTime();
study.getTimePointDesc();
study.getTimePointId();
study.getAgeGroup();
study.getOccupation();
if (study.getGeneralSeriesCollection()!=null)
{
for (GeneralSeries series : study.getGeneralSeriesCollection()){
series.getId();
series.getModality();
series.getLaterality();
series.getProtocolName();
series.getSeriesDesc();
series.getBodyPartExamined();
series.getTrialProtocolId();
series.getProtocolName();
series.getSite();
series.getSeriesDesc();
series.getAdmittingDiagnosesDesc();
series.getPatientSex();
series.getAgeGroup();
series.getPatientId();
series.getProject();
series.getSite();
if (series.getGeneralImageCollection()!=null)
{
for (GeneralImage image : series.getGeneralImageCollection())
{
image.getId();
image.getImageType();
image.getLossyImageCompression();
image.getImageOrientationPatient();
image.getImagePositionPatient();
image.getContrastBolusAgent();
image.getContrastBolusRoute();
image.getPatientPosition();
image.getImageComments();
image.getAnnotation();
image.getImageLaterality();
image.getPatientId();
image.getProject();
image.getUsFrameNum();
image.getUsColorDataPresent();
image.getUsMultiModality();
if (image.getMrImage()!=null)
{
MRImage mrImage = image.getMrImage();
mrImage.getImageTypeValue3();
mrImage.getScanningSequence();
mrImage.getSequenceVariant();
mrImage.getSequenceName();
mrImage.getImagedNucleus();
}
if (image.getCtimage()!=null)
{
CTImage ctImage = image.getCtimage();
ctImage.getScanOptions();
ctImage.getConvolutionKernel();
ctImage.getAnatomicRegionSeq();
}
if (image.getFilename()!=null)
{
image.getFilename();
}
}
}
if (series.getGeneralEquipment()!=null)
{
GeneralEquipment equipment = series.getGeneralEquipment();
equipment.getDeviceSerialNumber();
equipment.getManufacturer();
equipment.getInstitutionName();
equipment.getInstitutionName();
equipment.getManufacturerModelName();
equipment.getSoftwareVersions();
equipment.getStationName();
}
}
}
}
}
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getCollectionDesc(String collection)
{
List<Object> returnValue = new ArrayList<Object>();
try {
returnValue = getHibernateTemplate().getSessionFactory().getCurrentSession()
.createSQLQuery("select description descript from collection_descriptions where collection_name = :collection")
.addScalar("descript",Hibernate.TEXT)
.setParameter("collection", collection).list();
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
@Transactional(propagation=Propagation.REQUIRED)
public List<Object> getAnnotationFiles(Integer seriesPK)
{
List<Object> returnValue = new ArrayList<Object>();
try {
returnValue = getHibernateTemplate().getSessionFactory().getCurrentSession()
.createSQLQuery("SELECT annot.file_Path "+
"FROM Annotation annot WHERE annot.general_Series_Pk_Id="+seriesPK).list();
} catch (HibernateException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return returnValue;
}
}
| |
/* Copyright (c) 2001-2010, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.lib;
import java.util.NoSuchElementException;
import org.hsqldb.store.BaseHashMap;
/**
* @author Fred Toussi (fredt@users dot sourceforge.net)
* @version 1.9.0
* @since 1.7.2
*/
public class IntKeyIntValueHashMap extends BaseHashMap {
private Set keySet;
private Collection values;
public IntKeyIntValueHashMap() {
this(8);
}
public IntKeyIntValueHashMap(int initialCapacity)
throws IllegalArgumentException {
super(initialCapacity, BaseHashMap.intKeyOrValue,
BaseHashMap.intKeyOrValue, false);
}
public int get(int key) throws NoSuchElementException {
int lookup = getLookup(key);
if (lookup != -1) {
return intValueTable[lookup];
}
throw new NoSuchElementException();
}
public int get(int key, int defaultValue) {
int lookup = getLookup(key);
if (lookup != -1) {
return intValueTable[lookup];
}
return defaultValue;
}
public boolean get(int key, int[] value) {
int lookup = getLookup(key);
if (lookup != -1) {
value[0] = intValueTable[lookup];
return true;
}
return false;
}
public boolean put(int key, int value) {
int oldSize = size();
super.addOrRemove(key, value, null, null, false);
return oldSize != size();
}
public boolean remove(int key) {
int oldSize = size();
super.addOrRemove(key, 0, null, null, true);
return oldSize != size();
}
public Set keySet() {
if (keySet == null) {
keySet = new KeySet();
}
return keySet;
}
public Collection values() {
if (values == null) {
values = new Values();
}
return values;
}
class KeySet implements Set {
public Iterator iterator() {
return IntKeyIntValueHashMap.this.new BaseHashIterator(true);
}
public int size() {
return IntKeyIntValueHashMap.this.size();
}
public boolean contains(Object o) {
throw new RuntimeException();
}
public Object get(Object key) {
throw new RuntimeException();
}
public boolean add(Object value) {
throw new RuntimeException();
}
public boolean addAll(Collection c) {
throw new RuntimeException();
}
public boolean remove(Object o) {
throw new RuntimeException();
}
public boolean isEmpty() {
return size() == 0;
}
public void clear() {
IntKeyIntValueHashMap.this.clear();
}
}
class Values implements Collection {
public Iterator iterator() {
return IntKeyIntValueHashMap.this.new BaseHashIterator(false);
}
public int size() {
return IntKeyIntValueHashMap.this.size();
}
public boolean contains(Object o) {
throw new RuntimeException();
}
public boolean add(Object value) {
throw new RuntimeException();
}
public boolean addAll(Collection c) {
throw new RuntimeException();
}
public boolean remove(Object o) {
throw new RuntimeException();
}
public boolean isEmpty() {
return size() == 0;
}
public void clear() {
IntKeyIntValueHashMap.this.clear();
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.datapipeline.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Contains the parameters for AddTags.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/datapipeline-2012-10-29/AddTags" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AddTagsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the pipeline.
* </p>
*/
private String pipelineId;
/**
* <p>
* The tags to add, as key/value pairs.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Tag> tags;
/**
* <p>
* The ID of the pipeline.
* </p>
*
* @param pipelineId
* The ID of the pipeline.
*/
public void setPipelineId(String pipelineId) {
this.pipelineId = pipelineId;
}
/**
* <p>
* The ID of the pipeline.
* </p>
*
* @return The ID of the pipeline.
*/
public String getPipelineId() {
return this.pipelineId;
}
/**
* <p>
* The ID of the pipeline.
* </p>
*
* @param pipelineId
* The ID of the pipeline.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AddTagsRequest withPipelineId(String pipelineId) {
setPipelineId(pipelineId);
return this;
}
/**
* <p>
* The tags to add, as key/value pairs.
* </p>
*
* @return The tags to add, as key/value pairs.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<Tag>();
}
return tags;
}
/**
* <p>
* The tags to add, as key/value pairs.
* </p>
*
* @param tags
* The tags to add, as key/value pairs.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags);
}
/**
* <p>
* The tags to add, as key/value pairs.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* The tags to add, as key/value pairs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AddTagsRequest withTags(Tag... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* The tags to add, as key/value pairs.
* </p>
*
* @param tags
* The tags to add, as key/value pairs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AddTagsRequest withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPipelineId() != null)
sb.append("PipelineId: ").append(getPipelineId()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AddTagsRequest == false)
return false;
AddTagsRequest other = (AddTagsRequest) obj;
if (other.getPipelineId() == null ^ this.getPipelineId() == null)
return false;
if (other.getPipelineId() != null && other.getPipelineId().equals(this.getPipelineId()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPipelineId() == null) ? 0 : getPipelineId().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public AddTagsRequest clone() {
return (AddTagsRequest) super.clone();
}
}
| |
package DrummerMC.Extra_Stuff.Parts;
import cpw.mods.fml.common.Optional;
import cpw.mods.fml.common.Optional.Interface;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.init.Blocks;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.ChatComponentText;
import net.minecraft.util.Vec3;
import net.minecraftforge.common.util.ForgeDirection;
import DrummerMC.Extra_Stuff.Extra_Stuff;
import DrummerMC.Extra_Stuff.Api.Grid.IEnergyStorageGrid;
import DrummerMC.Extra_Stuff.Api.Util.EnergyType;
import DrummerMC.Extra_Stuff.network.ChatPacket;
import appeng.api.AEApi;
import appeng.api.config.Actionable;
import appeng.api.networking.IGridNode;
import appeng.api.networking.security.BaseActionSource;
import appeng.api.networking.storage.IStorageGrid;
import appeng.api.networking.ticking.IGridTickable;
import appeng.api.networking.ticking.TickRateModulation;
import appeng.api.networking.ticking.TickingRequest;
import appeng.api.parts.IPartRenderHelper;
import appeng.api.parts.PartItemStack;
import appeng.api.storage.data.IAEItemStack;
@Optional.InterfaceList(value = { @Interface(iface = "appeng.api.networking.ticking.IGridTickable", modid = "appliedenergistics2")})
public class PartExInterface extends PartBase implements IGridTickable{
public ItemStack[] inv = new ItemStack[9];
public int last = 0;
private boolean hasWork = false;
@Optional.Method(modid = "appliedenergistics2")
@Override
public ItemStack getItemStack(PartItemStack type) {
return new ItemStack(Extra_Stuff.partItem, 1, 2);
}
@Optional.Method(modid = "appliedenergistics2")
@Override
public TickingRequest getTickingRequest(IGridNode node) {
return new TickingRequest(1, 200, false, false);
}
@Optional.Method(modid = "appliedenergistics2")
@Override
public TickRateModulation tickingRequest(IGridNode node, int TicksSinceLastCall) {
if(this.getGridNode() != null){
IGridNode gridNode = this.getGridNode();
IStorageGrid storage = gridNode.getGrid().getCache(IStorageGrid.class);
if(this.tile.getWorldObj().getBlock(this.tile.xCoord + side.offsetX, this.tile.yCoord + side.offsetY, this.tile.zCoord + side.offsetZ).hasTileEntity(this.tile.getWorldObj().getBlockMetadata(this.tile.xCoord + side.offsetX, this.tile.yCoord + side.offsetY, this.tile.zCoord + side.offsetZ))){
TileEntity tileEntity = this.tile.getWorldObj().getTileEntity(this.tile.xCoord + side.offsetX, this.tile.yCoord + side.offsetY, this.tile.zCoord + side.offsetZ);
if(tileEntity instanceof IInventory){
int i = ((IInventory) tileEntity).getSizeInventory();
ItemStack[] invCopy = new ItemStack[9];
{
for(int num=0; num<9; num++)
if(inv[num]!=null)
invCopy[num] = inv[num].copy();
}
while(i>0){
i = i-1;
ItemStack stack = ((IInventory) tileEntity).getStackInSlot(i);
int i2 = 9;
while(i2>0){
i2 = i2 - 1;
if(!(invCopy[i2] == null || stack == null)){
if(invCopy[i2].isItemEqual(stack)){
if(invCopy[i2].stackSize > stack.stackSize){
invCopy[i2].stackSize = invCopy[i2].stackSize - stack.stackSize;
}else{
invCopy[i2] = null;
}
}
}
}
}
int a = 9;
while(a>0){
a = a -1;
int m = ((IInventory) tileEntity).getSizeInventory();;
while(m>0){
m = m - 1;
System.out.println(m+" "+ invCopy[a]);
if(((IInventory) tileEntity).isItemValidForSlot(m, invCopy[a])){
if(((IInventory) tileEntity).getStackInSlot(m)!= null && invCopy[a] != null){
if(((IInventory) tileEntity).getStackInSlot(m).isItemEqual(invCopy[a])&&((IInventory) tileEntity).getStackInSlot(m).getMaxStackSize()>((IInventory) tileEntity).getStackInSlot(m).stackSize&&((IInventory) tileEntity).getStackInSlot(m).stackSize<((IInventory) tileEntity).getInventoryStackLimit()){
int max = Math.min(((IInventory) tileEntity).getStackInSlot(m).getMaxStackSize(), ((IInventory) tileEntity).getInventoryStackLimit());
if(max-((IInventory) tileEntity).getStackInSlot(m).stackSize>=invCopy[a].stackSize){
IAEItemStack AEs = storage.getItemInventory().extractItems(AEApi.instance().storage().createItemStack(invCopy[a]), Actionable.MODULATE, new BaseActionSource());
if(AEs!=null){
ItemStack s = ((IInventory) tileEntity).getStackInSlot(m).copy();
s.stackSize = (int) (s.stackSize + AEs.getStackSize());
((IInventory) tileEntity).setInventorySlotContents(m, s);
if(invCopy[a].stackSize == (int) s.stackSize)
invCopy[a] = null;
else
invCopy[a].stackSize = invCopy[a].stackSize - s.stackSize;
}
}else{
int add = invCopy[a].stackSize - (max-((IInventory) tileEntity).getStackInSlot(m).stackSize);
ItemStack toRem = invCopy[a].copy();
toRem.stackSize = add;
IAEItemStack AEs = storage.getItemInventory().extractItems(AEApi.instance().storage().createItemStack(toRem), Actionable.MODULATE, new BaseActionSource());
if(AEs!=null){
ItemStack s = ((IInventory) tileEntity).getStackInSlot(m).copy();
s.stackSize = (int) (s.stackSize + AEs.getStackSize());
((IInventory) tileEntity).setInventorySlotContents(m, s);
if(invCopy[a].stackSize == (int) s.stackSize)
invCopy[a] = null;
else
invCopy[a].stackSize = invCopy[a].stackSize - s.stackSize;
}
}
}
}else if(invCopy[a] != null){
if(0<((IInventory) tileEntity).getInventoryStackLimit()){
int max = Math.min(64, ((IInventory) tileEntity).getInventoryStackLimit());
if(max>=invCopy[a].stackSize){
IAEItemStack AEs = storage.getItemInventory().extractItems(AEApi.instance().storage().createItemStack(invCopy[a]), Actionable.MODULATE, new BaseActionSource());
if(AEs!=null){
ItemStack s = AEs.getItemStack();
s.stackSize = (int) (AEs.getStackSize());
((IInventory) tileEntity).setInventorySlotContents(m, s);
if(invCopy[a].stackSize == (int) s.stackSize)
invCopy[a] = null;
else
invCopy[a].stackSize = invCopy[a].stackSize - s.stackSize;
}
}else{
int add = invCopy[a].stackSize - (max-((IInventory) tileEntity).getStackInSlot(m).stackSize);
ItemStack toRem = invCopy[a].copy();
toRem.stackSize = add;
IAEItemStack AEs = storage.getItemInventory().extractItems(AEApi.instance().storage().createItemStack(toRem), Actionable.MODULATE, new BaseActionSource());
if(AEs!=null){
ItemStack s = AEs.getItemStack();
s.stackSize = (int) (AEs.getStackSize());
((IInventory) tileEntity).setInventorySlotContents(m, s);
if(invCopy[a].stackSize == (int) s.stackSize)
invCopy[a] = null;
else
invCopy[a].stackSize = invCopy[a].stackSize - s.stackSize;
}
}
}
}
}
}
}
}
}
}
if(hasWork)
return TickRateModulation.URGENT;
return TickRateModulation.IDLE;
}
@Optional.Method(modid = "appliedenergistics2")
@Override
public void writeToNBT(NBTTagCompound data) {
data.setInteger("last", last);
int a = 9;
while (a>0){
a = a-1;
if(inv[a] == null){
NBTTagCompound tag = new NBTTagCompound();
tag.setBoolean("hasItem", false);
data.setTag("inv"+a, tag);
}else{
NBTTagCompound tag = new NBTTagCompound();
tag.setBoolean("hasItem", true);
data.setTag("inv"+a, inv[a].writeToNBT(tag));
}
}
}
@Optional.Method(modid = "appliedenergistics2")
@Override
public void readFromNBT(NBTTagCompound data) {
if(data.hasKey("last"))
last = data.getInteger("last");
int a = 9;
while (a>0){
a = a-1;
if(data.hasKey("inv"+a)){
if(data.getCompoundTag("inv"+a).getBoolean("hasItem"))
this.inv[a] = ItemStack.loadItemStackFromNBT(data.getCompoundTag("inv"+a));
}else
this.inv[a] = null;
}
}
@Optional.Method(modid = "appliedenergistics2")
@Override
public boolean onActivate(EntityPlayer player, Vec3 pos) {
if(player.worldObj.isRemote)
return !player.isSneaking();
if(player.isSneaking())
return false;
if(player.getHeldItem()!=null)
this.inv[last] = player.getHeldItem().copy();
else
this.inv[last] = null;
last = last + 1;
Extra_Stuff.network.sendTo(new ChatPacket("chat.ae2addons.setItem", last), (EntityPlayerMP) player);
if(last == 9)
last = 0;
return true;
}
@Optional.Method(modid = "appliedenergistics2")
@SideOnly(Side.CLIENT)
@Override
public void renderInventory(IPartRenderHelper rh, RenderBlocks renderer) {
rh.setTexture(Extra_Stuff.partItem.getIconFromDamage(2));
rh.setBounds(2, 2, 14, 14, 14, 16);
rh.renderInventoryBox(renderer);
}
@Optional.Method(modid = "appliedenergistics2")
@SideOnly(Side.CLIENT)
@Override
public void renderStatic(int x, int y, int z, IPartRenderHelper rh,
RenderBlocks renderer) {
rh.setTexture(Extra_Stuff.partItem.getIconFromDamage(2));
rh.setBounds(2, 2, 14, 14, 14, 16);
rh.renderBlock(x, y, z, renderer);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.PlainShardIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportService;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import java.util.function.Function;
import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED;
import static org.hamcrest.CoreMatchers.startsWith;
public class TransportSearchActionTests extends ESTestCase {
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
public void testMergeShardsIterators() {
List<ShardIterator> localShardIterators = new ArrayList<>();
{
ShardId shardId = new ShardId("local_index", "local_index_uuid", 0);
ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, "local_node", true, STARTED);
ShardIterator shardIterator = new PlainShardIterator(shardId, Collections.singletonList(shardRouting));
localShardIterators.add(shardIterator);
}
{
ShardId shardId2 = new ShardId("local_index_2", "local_index_2_uuid", 1);
ShardRouting shardRouting2 = TestShardRouting.newShardRouting(shardId2, "local_node", true, STARTED);
ShardIterator shardIterator2 = new PlainShardIterator(shardId2, Collections.singletonList(shardRouting2));
localShardIterators.add(shardIterator2);
}
GroupShardsIterator<ShardIterator> localShardsIterator = new GroupShardsIterator<>(localShardIterators);
OriginalIndices localIndices = new OriginalIndices(new String[]{"local_alias", "local_index_2"},
SearchRequest.DEFAULT_INDICES_OPTIONS);
OriginalIndices remoteIndices = new OriginalIndices(new String[]{"remote_alias", "remote_index_2"},
IndicesOptions.strictExpandOpen());
List<SearchShardIterator> remoteShardIterators = new ArrayList<>();
{
ShardId remoteShardId = new ShardId("remote_index", "remote_index_uuid", 2);
ShardRouting remoteShardRouting = TestShardRouting.newShardRouting(remoteShardId, "remote_node", true, STARTED);
SearchShardIterator remoteShardIterator = new SearchShardIterator("remote", remoteShardId,
Collections.singletonList(remoteShardRouting), remoteIndices);
remoteShardIterators.add(remoteShardIterator);
}
{
ShardId remoteShardId2 = new ShardId("remote_index_2", "remote_index_2_uuid", 3);
ShardRouting remoteShardRouting2 = TestShardRouting.newShardRouting(remoteShardId2, "remote_node", true, STARTED);
SearchShardIterator remoteShardIterator2 = new SearchShardIterator("remote", remoteShardId2,
Collections.singletonList(remoteShardRouting2), remoteIndices);
remoteShardIterators.add(remoteShardIterator2);
}
OriginalIndices remoteIndices2 = new OriginalIndices(new String[]{"remote_index_3"}, IndicesOptions.strictExpand());
{
ShardId remoteShardId3 = new ShardId("remote_index_3", "remote_index_3_uuid", 4);
ShardRouting remoteShardRouting3 = TestShardRouting.newShardRouting(remoteShardId3, "remote_node", true, STARTED);
SearchShardIterator remoteShardIterator3 = new SearchShardIterator("remote", remoteShardId3,
Collections.singletonList(remoteShardRouting3), remoteIndices2);
remoteShardIterators.add(remoteShardIterator3);
}
String localClusterAlias = randomBoolean() ? null : "local";
GroupShardsIterator<SearchShardIterator> searchShardIterators = TransportSearchAction.mergeShardsIterators(localShardsIterator,
localIndices, localClusterAlias, remoteShardIterators);
assertEquals(searchShardIterators.size(), 5);
int i = 0;
for (SearchShardIterator searchShardIterator : searchShardIterators) {
switch(i++) {
case 0:
assertEquals("local_index", searchShardIterator.shardId().getIndexName());
assertEquals(0, searchShardIterator.shardId().getId());
assertSame(localIndices, searchShardIterator.getOriginalIndices());
assertEquals(localClusterAlias, searchShardIterator.getClusterAlias());
break;
case 1:
assertEquals("local_index_2", searchShardIterator.shardId().getIndexName());
assertEquals(1, searchShardIterator.shardId().getId());
assertSame(localIndices, searchShardIterator.getOriginalIndices());
assertEquals(localClusterAlias, searchShardIterator.getClusterAlias());
break;
case 2:
assertEquals("remote_index", searchShardIterator.shardId().getIndexName());
assertEquals(2, searchShardIterator.shardId().getId());
assertSame(remoteIndices, searchShardIterator.getOriginalIndices());
assertEquals("remote", searchShardIterator.getClusterAlias());
break;
case 3:
assertEquals("remote_index_2", searchShardIterator.shardId().getIndexName());
assertEquals(3, searchShardIterator.shardId().getId());
assertSame(remoteIndices, searchShardIterator.getOriginalIndices());
assertEquals("remote", searchShardIterator.getClusterAlias());
break;
case 4:
assertEquals("remote_index_3", searchShardIterator.shardId().getIndexName());
assertEquals(4, searchShardIterator.shardId().getId());
assertSame(remoteIndices2, searchShardIterator.getOriginalIndices());
assertEquals("remote", searchShardIterator.getClusterAlias());
break;
}
}
}
public void testProcessRemoteShards() {
try (TransportService transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool,
null)) {
RemoteClusterService service = transportService.getRemoteClusterService();
assertFalse(service.isCrossClusterSearchEnabled());
List<SearchShardIterator> iteratorList = new ArrayList<>();
Map<String, ClusterSearchShardsResponse> searchShardsResponseMap = new HashMap<>();
DiscoveryNode[] nodes = new DiscoveryNode[] {
new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT),
new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)
};
Map<String, AliasFilter> indicesAndAliases = new HashMap<>();
indicesAndAliases.put("foo", new AliasFilter(new TermsQueryBuilder("foo", "bar"), "some_alias_for_foo",
"some_other_foo_alias"));
indicesAndAliases.put("bar", new AliasFilter(new MatchAllQueryBuilder(), Strings.EMPTY_ARRAY));
ClusterSearchShardsGroup[] groups = new ClusterSearchShardsGroup[] {
new ClusterSearchShardsGroup(new ShardId("foo", "foo_id", 0),
new ShardRouting[] {TestShardRouting.newShardRouting("foo", 0, "node1", true, ShardRoutingState.STARTED),
TestShardRouting.newShardRouting("foo", 0, "node2", false, ShardRoutingState.STARTED)}),
new ClusterSearchShardsGroup(new ShardId("foo", "foo_id", 1),
new ShardRouting[] {TestShardRouting.newShardRouting("foo", 0, "node1", true, ShardRoutingState.STARTED),
TestShardRouting.newShardRouting("foo", 1, "node2", false, ShardRoutingState.STARTED)}),
new ClusterSearchShardsGroup(new ShardId("bar", "bar_id", 0),
new ShardRouting[] {TestShardRouting.newShardRouting("bar", 0, "node2", true, ShardRoutingState.STARTED),
TestShardRouting.newShardRouting("bar", 0, "node1", false, ShardRoutingState.STARTED)})
};
searchShardsResponseMap.put("test_cluster_1", new ClusterSearchShardsResponse(groups, nodes, indicesAndAliases));
DiscoveryNode[] nodes2 = new DiscoveryNode[] {
new DiscoveryNode("node3", buildNewFakeTransportAddress(), Version.CURRENT)
};
ClusterSearchShardsGroup[] groups2 = new ClusterSearchShardsGroup[] {
new ClusterSearchShardsGroup(new ShardId("xyz", "xyz_id", 0),
new ShardRouting[] {TestShardRouting.newShardRouting("xyz", 0, "node3", true, ShardRoutingState.STARTED)})
};
Map<String, AliasFilter> filter = new HashMap<>();
filter.put("xyz", new AliasFilter(null, "some_alias_for_xyz"));
searchShardsResponseMap.put("test_cluster_2", new ClusterSearchShardsResponse(groups2, nodes2, filter));
Map<String, OriginalIndices> remoteIndicesByCluster = new HashMap<>();
remoteIndicesByCluster.put("test_cluster_1",
new OriginalIndices(new String[]{"fo*", "ba*"}, SearchRequest.DEFAULT_INDICES_OPTIONS));
remoteIndicesByCluster.put("test_cluster_2",
new OriginalIndices(new String[]{"x*"}, SearchRequest.DEFAULT_INDICES_OPTIONS));
Map<String, AliasFilter> remoteAliases = new HashMap<>();
TransportSearchAction.processRemoteShards(searchShardsResponseMap, remoteIndicesByCluster, iteratorList,
remoteAliases);
assertEquals(4, iteratorList.size());
for (SearchShardIterator iterator : iteratorList) {
if (iterator.shardId().getIndexName().endsWith("foo")) {
assertArrayEquals(new String[]{"some_alias_for_foo", "some_other_foo_alias"},
iterator.getOriginalIndices().indices());
assertTrue(iterator.shardId().getId() == 0 || iterator.shardId().getId() == 1);
assertEquals("test_cluster_1", iterator.getClusterAlias());
assertEquals("foo", iterator.shardId().getIndexName());
ShardRouting shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "foo");
shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "foo");
assertNull(iterator.nextOrNull());
} else if (iterator.shardId().getIndexName().endsWith("bar")) {
assertArrayEquals(new String[]{"bar"}, iterator.getOriginalIndices().indices());
assertEquals(0, iterator.shardId().getId());
assertEquals("test_cluster_1", iterator.getClusterAlias());
assertEquals("bar", iterator.shardId().getIndexName());
ShardRouting shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "bar");
shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "bar");
assertNull(iterator.nextOrNull());
} else if (iterator.shardId().getIndexName().endsWith("xyz")) {
assertArrayEquals(new String[]{"some_alias_for_xyz"}, iterator.getOriginalIndices().indices());
assertEquals(0, iterator.shardId().getId());
assertEquals("xyz", iterator.shardId().getIndexName());
assertEquals("test_cluster_2", iterator.getClusterAlias());
ShardRouting shardRouting = iterator.nextOrNull();
assertNotNull(shardRouting);
assertEquals(shardRouting.getIndexName(), "xyz");
assertNull(iterator.nextOrNull());
}
}
assertEquals(3, remoteAliases.size());
assertTrue(remoteAliases.toString(), remoteAliases.containsKey("foo_id"));
assertTrue(remoteAliases.toString(), remoteAliases.containsKey("bar_id"));
assertTrue(remoteAliases.toString(), remoteAliases.containsKey("xyz_id"));
assertEquals(new TermsQueryBuilder("foo", "bar"), remoteAliases.get("foo_id").getQueryBuilder());
assertEquals(new MatchAllQueryBuilder(), remoteAliases.get("bar_id").getQueryBuilder());
assertNull(remoteAliases.get("xyz_id").getQueryBuilder());
}
}
public void testBuildConnectionLookup() {
Function<String, DiscoveryNode> localNodes = (nodeId) -> new DiscoveryNode("local-" + nodeId,
new TransportAddress(TransportAddress.META_ADDRESS, 1024), Version.CURRENT);
BiFunction<String, String, DiscoveryNode> remoteNodes = (clusterAlias, nodeId) -> new DiscoveryNode("remote-" + nodeId,
new TransportAddress(TransportAddress.META_ADDRESS, 2048), Version.CURRENT);
BiFunction<String, DiscoveryNode, Transport.Connection> nodeToConnection = (clusterAlias, node) -> new Transport.Connection() {
@Override
public DiscoveryNode getNode() {
return node;
}
@Override
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
throws TransportException {
}
@Override
public void addCloseListener(ActionListener<Void> listener) {
}
@Override
public boolean isClosed() {
return false;
}
@Override
public void close() {
}
};
{
BiFunction<String, String, Transport.Connection> connectionLookup = TransportSearchAction.buildConnectionLookup(
null, localNodes, remoteNodes, nodeToConnection);
Transport.Connection localConnection = connectionLookup.apply(null, randomAlphaOfLengthBetween(5, 10));
assertThat(localConnection.getNode().getId(), startsWith("local-"));
Transport.Connection remoteConnection = connectionLookup.apply(randomAlphaOfLengthBetween(5, 10),
randomAlphaOfLengthBetween(5, 10));
assertThat(remoteConnection.getNode().getId(), startsWith("remote-"));
}
{
String requestClusterAlias = randomAlphaOfLengthBetween(5, 10);
BiFunction<String, String, Transport.Connection> connectionLookup = TransportSearchAction.buildConnectionLookup(
requestClusterAlias, localNodes, remoteNodes, nodeToConnection);
Transport.Connection localConnection = connectionLookup.apply(requestClusterAlias, randomAlphaOfLengthBetween(5, 10));
assertThat(localConnection.getNode().getId(), startsWith("local-"));
}
}
public void testBuildClusters() {
OriginalIndices localIndices = randomBoolean() ? null : randomOriginalIndices();
Map<String, OriginalIndices> remoteIndices = new HashMap<>();
Map<String, ClusterSearchShardsResponse> searchShardsResponses = new HashMap<>();
int numRemoteClusters = randomIntBetween(0, 10);
boolean onlySuccessful = randomBoolean();
int localClusters = localIndices == null ? 0 : 1;
int total = numRemoteClusters + localClusters;
int successful = localClusters;
int skipped = 0;
for (int i = 0; i < numRemoteClusters; i++) {
String cluster = randomAlphaOfLengthBetween(5, 10);
remoteIndices.put(cluster, randomOriginalIndices());
if (onlySuccessful || randomBoolean()) {
//whatever response counts as successful as long as it's not the empty placeholder
searchShardsResponses.put(cluster, new ClusterSearchShardsResponse(null, null, null));
successful++;
} else {
searchShardsResponses.put(cluster, ClusterSearchShardsResponse.EMPTY);
skipped++;
}
}
SearchResponse.Clusters clusters = TransportSearchAction.buildClusters(localIndices, remoteIndices, searchShardsResponses);
assertEquals(total, clusters.getTotal());
assertEquals(successful, clusters.getSuccessful());
assertEquals(skipped, clusters.getSkipped());
}
private static OriginalIndices randomOriginalIndices() {
int numLocalIndices = randomIntBetween(0, 5);
String[] localIndices = new String[numLocalIndices];
for (int i = 0; i < numLocalIndices; i++) {
localIndices[i] = randomAlphaOfLengthBetween(3, 10);
}
return new OriginalIndices(localIndices, IndicesOptions.fromOptions(randomBoolean(),
randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
}
public void testSetMaxConcurrentShardRequests() {
{
SearchRequest searchRequest = new SearchRequest();
int value = randomIntBetween(1, Integer.MAX_VALUE);
searchRequest.setMaxConcurrentShardRequests(value);
TransportSearchAction.setMaxConcurrentShardRequests(searchRequest, randomIntBetween(0, Integer.MAX_VALUE));
assertEquals(value, searchRequest.getMaxConcurrentShardRequests());
}
{
SearchRequest searchRequest = new SearchRequest();
int nodeCount = randomIntBetween(1, 1000000);
TransportSearchAction.setMaxConcurrentShardRequests(searchRequest, nodeCount);
assertEquals(Math.min(256, nodeCount * 5), searchRequest.getMaxConcurrentShardRequests());
}
{
SearchRequest searchRequest = new SearchRequest();
TransportSearchAction.setMaxConcurrentShardRequests(searchRequest, 0);
assertEquals(5, searchRequest.getMaxConcurrentShardRequests());
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.toolbox;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Substring with explicit offsets within its parent string.
* <p/>
* Regular java.lang.String objects share a single char buffer for results of substring(), trim(), etc., but the offset and count
* fields of Strings are unfortunately private.
*
* @author vlan
*/
public class Substring implements CharSequence {
private static final Pattern RE_NL = Pattern.compile("(\\r?\\n)");
@NotNull private final String myString;
private final int myStartOffset;
private final int myEndOffset;
public Substring(@NotNull String s) {
this(s, 0, s.length());
}
public Substring(@NotNull String s, int start, int end) {
myString = s;
myStartOffset = start;
myEndOffset = end;
}
@Override
public boolean equals(Object o) {
if (o instanceof String) {
return toString().equals(o);
}
else if (o instanceof Substring) {
return toString().equals(o.toString());
}
return false;
}
@Override
public int hashCode() {
return toString().hashCode();
}
@NotNull
@Override
public String toString() {
return getValue();
}
@NotNull
public String getValue() {
return getTextRange().substring(myString);
}
@NotNull
public String getSuperString() {
return myString;
}
@NotNull
public TextRange getTextRange() {
return TextRange.create(myStartOffset, myEndOffset);
}
@NotNull
public List<Substring> split(@NotNull String regex) {
return split(regex, Integer.MAX_VALUE);
}
@NotNull
public List<Substring> split(@NotNull String regex, int maxSplits) {
return split(Pattern.compile(regex), maxSplits);
}
@NotNull
public List<Substring> split(@NotNull Pattern pattern) {
return split(pattern, Integer.MAX_VALUE);
}
@NotNull
public List<Substring> split(@NotNull Pattern pattern, int maxSplits) {
final List<Substring> result = new ArrayList<>();
final Matcher m = pattern.matcher(myString);
int start = myStartOffset;
int end = myEndOffset;
int splitCount = 0;
if (m.find(start)) {
do {
splitCount++;
end = m.start();
result.add(createAnotherSubstring(start, Math.min(end, myEndOffset)));
start = m.end();
}
while (end < myEndOffset && m.find() && splitCount < maxSplits);
if (start <= myEndOffset) {
result.add(createAnotherSubstring(start, myEndOffset));
}
}
else {
result.add(createAnotherSubstring(start, end));
}
return result;
}
@NotNull
public List<Substring> splitLines() {
return split(RE_NL);
}
@NotNull
public Substring trim() {
return trimLeft().trimRight();
}
@NotNull
public Substring trimLeft() {
int start;
for (start = myStartOffset; start < myEndOffset && myString.charAt(start) <= '\u0020'; start++) { /*empty*/ }
return createAnotherSubstring(start, myEndOffset);
}
@NotNull
public Substring trimRight() {
int end;
for (end = myEndOffset - 1; end > myStartOffset && myString.charAt(end) <= '\u0020'; end--) { /* empty */ }
return createAnotherSubstring(myStartOffset, end + 1);
}
@NotNull
public Substring getMatcherGroup(@NotNull Matcher m, int group) {
return substring(m.start(group), m.end(group));
}
@Override
public int length() {
return myEndOffset - myStartOffset;
}
public boolean isEmpty() {
return length() <= 0;
}
@Override
public char charAt(int i) {
return myString.charAt(myStartOffset + i);
}
@Override
public CharSequence subSequence(int start, int end) {
return substring(start, end);
}
public boolean startsWith(@NotNull String prefix) {
return indexOf(prefix) == 0;
}
public boolean endsWith(@NotNull String prefix) {
return myString.lastIndexOf(prefix) == length() - prefix.length();
}
public int indexOf(@NotNull String s) {
int n = myString.indexOf(s, myStartOffset);
return n >= 0 && n < myEndOffset ? n - myStartOffset : -1;
}
public boolean contains(@NotNull String s) {
return indexOf(s) >= 0;
}
@NotNull
public Substring substring(int start) {
return substring(start, length());
}
@NotNull
public Substring substring(int start, int end) {
return createAnotherSubstring(myStartOffset + start, myStartOffset + end);
}
@NotNull
public String concatTrimmedLines(@NotNull String separator) {
final StringBuilder b = new StringBuilder();
List<Substring> lines = splitLines();
final int n = lines.size();
for (int i = 0; i < n; i++) {
b.append(lines.get(i).trim().toString());
if (i < n - 1) {
b.append(separator);
}
}
return b.toString();
}
@NotNull
private Substring createAnotherSubstring(int start, int end) {
return new Substring(myString, start, end);
}
/**
* If both substrings share the same origin, returns new substring that includes both of them.
*/
@NotNull
public Substring union(@NotNull Substring other) {
if (!myString.equals(other.myString)) {
throw new IllegalArgumentException(String.format("Substrings '%s' and '%s' must belong to the same origin", this, other));
}
final TextRange unionRange = getTextRange().union(other.getTextRange());
return new Substring(getSuperString(), unionRange.getStartOffset(), unionRange.getEndOffset());
}
public int getStartOffset() {
return myStartOffset;
}
public int getStartLine() {
return StringUtil.offsetToLineNumber(myString, myStartOffset);
}
public int getEndOffset() {
return myEndOffset;
}
public int getEndLine() {
return StringUtil.offsetToLineNumber(myString, myEndOffset);
}
}
| |
/**
* Created by Aleksey Terzi
*/
package com.aleksey.castlegates.engine.bridge;
import com.aleksey.castlegates.CastleGates;
import com.aleksey.castlegates.config.ConfigManager;
import com.aleksey.castlegates.database.SqlDatabase;
import com.aleksey.castlegates.engine.PlayerStateManager;
import com.aleksey.castlegates.engine.StorageManager;
import com.aleksey.castlegates.types.*;
import com.aleksey.castlegates.utils.Helper;
import com.aleksey.castlegates.utils.ParticleHelper;
import com.aleksey.castlegates.utils.PowerResultHelper;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.entity.Player;
import org.bukkit.event.block.Action;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPhysicsEvent;
import org.bukkit.event.block.BlockRedstoneEvent;
import org.bukkit.event.entity.EntityChangeBlockEvent;
import org.bukkit.event.entity.EntityExplodeEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.EquipmentSlot;
import org.bukkit.inventory.ItemStack;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.UUID;
import java.util.logging.Level;
public class BridgeEventHandler {
private static class FindGearResult {
public Gearblock gearblock;
public int distance;
public FindGearResult(Gearblock gearblock, int distance) {
this.gearblock = gearblock;
this.distance = distance;
}
}
private StorageManager storage;
private BridgeManager bridgeManager = new BridgeManager();
private HashSet<Block> waitingBlocks = new HashSet<Block>();
private HashSet<Block> processingBlocks = new HashSet<Block>();
public BridgeEventHandler(StorageManager storage, BridgeManager bridgeManager) {
this.storage = storage;
this.bridgeManager = bridgeManager;
}
public boolean handleBlockClicked(PlayerInteractEvent event, PlayerStateManager.PlayerState state) {
if(event.getAction() == Action.RIGHT_CLICK_BLOCK && event.getHand() == EquipmentSlot.HAND) {
simpleActivate(event);
return false;
}
if(event.getAction() != Action.LEFT_CLICK_BLOCK) return false;
boolean interacted = false;
ConfigManager configManager = CastleGates.getConfigManager();
CommandMode mode = state != null ? state.mode: CommandMode.OFF;
if(configManager.getAllowAutoCreate() && configManager.isCreationConsumeItem(event.getItem())) {
interacted = createGearblockAndLink(event);
}
else if(mode == CommandMode.INFO) {
showGearInfo(event);
interacted = true;
}
else if(mode == CommandMode.TIMER) {
interacted = setGearblockTimer(event, state);
}
else if(configManager.isStickItem(event.getItem())) {
switch(mode) {
case CREATE:
interacted = createGearblock(event);
break;
case LINK:
interacted = linkGearblocks(event);
break;
default:
interacted = false;
break;
}
}
return interacted;
}
public void handleBlockBreak(BlockBreakEvent event) {
Block block = event.getBlock();
BridgeManager.RemoveResult result = this.bridgeManager.removeGear(new BlockCoord(block));
if(result == BridgeManager.RemoveResult.Removed || result == BridgeManager.RemoveResult.RemovedWithLink) {
Helper.putItemToInventoryOrDrop(event.getPlayer(), block.getLocation(), CastleGates.getConfigManager().getCreationConsumeItem());
}
}
public void handleBlockRedstone(BlockRedstoneEvent event) {
if ((event.getOldCurrent() != 0) == (event.getNewCurrent() != 0)) return;
Block block = event.getBlock();
for (BlockFace face : BridgeManager.faces) {
Block faceBlock = block.getRelative(face);
if(this.storage.getGearblock(new BlockCoord(faceBlock)) != null) {
this.waitingBlocks.add(faceBlock);
}
}
}
public void handleBlockPhysics(BlockPhysicsEvent event) {
Block block = event.getBlock();
if(this.waitingBlocks.remove(block)) {
processBlock(block, block.isBlockPowered());
}
}
public void handleEntityExplode(EntityExplodeEvent event) {
for (Block block : event.blockList()) {
this.bridgeManager.removeGear(new BlockCoord(block));
}
}
public void handleEntityChangeBlock(EntityChangeBlockEvent event) {
this.bridgeManager.removeGear(new BlockCoord(event.getBlock()));
}
public void handlePistonEvent(List<Block> blocks) {
ItemStack dropItem = CastleGates.getConfigManager().getCreationConsumeItem();
for (Block block : blocks) {
BridgeManager.RemoveResult result = this.bridgeManager.removeGear(new BlockCoord(block));
if(result == BridgeManager.RemoveResult.Removed || result == BridgeManager.RemoveResult.RemovedWithLink) {
Helper.putItemToInventoryOrDrop(null, block.getLocation(), dropItem);
}
}
}
private boolean createGearblockAndLink(PlayerInteractEvent event) {
if(!createGearblock(event)) return false;
Block block = event.getClickedBlock();
Gearblock gearblock1 = this.storage.getGearblock(new BlockCoord(block));
if(gearblock1.getLink() != null) return true;
for(BlockFace face : BridgeManager.faces) {
FindGearResult result = findEndGear(block, face);
if(result != null
&& result.gearblock.getLink() == null
&& linkGearblocks(event.getPlayer(), gearblock1, result, false)
)
{
break;
}
}
return true;
}
private boolean createGearblock(PlayerInteractEvent event) {
Player player = event.getPlayer();
Block block = event.getClickedBlock();
Location location = block.getLocation();
if(CastleGates.getConfigManager().isGearblockMustBeReinforced()
&& !CastleGates.getCitadelManager().isReinforced(location))
{
player.sendMessage(ChatColor.RED + "Block must be reinforced to create a gearblock.");
return false;
}
if(!CastleGates.getCitadelManager().canBypass(player, location)) {
player.sendMessage(ChatColor.RED + "Citadel has prevented the creation of a gearblock.");
return false;
}
ItemStack consumeItem = CastleGates.getConfigManager().getCreationConsumeItem();
List<Integer> consumeSlots = Helper.getConsumeSlots(player, consumeItem);
if(consumeSlots == null && consumeItem != null) {
player.sendMessage(ChatColor.RED + "Not enough material left to create a gearblock.");
return false;
}
BridgeManager.CreateResult result = this.bridgeManager.createGear(block);
if(result == BridgeManager.CreateResult.NotCreated) {
player.sendMessage(ChatColor.RED + block.getType().toString() + " cannot be used as a gearblock.");
return false;
} else if(result == BridgeManager.CreateResult.AlreadyExist) {
player.sendMessage(ChatColor.RED + "That block is already a gearblock.");
return true;
}
Helper.consumeItem(player, consumeItem, consumeSlots);
player.sendMessage(ChatColor.GREEN + "A gearblock has been created.");
ParticleHelper.spawn(block, ParticleHelper.Type.Info);
return true;
}
private boolean linkGearblocks(PlayerInteractEvent event) {
Player player = event.getPlayer();
Block block = event.getClickedBlock();
Gearblock gearblock1 = this.storage.getGearblock(new BlockCoord(block));
if(gearblock1 == null) return false;
if(!CastleGates.getCitadelManager().canBypass(player, block.getLocation())) {
player.sendMessage(ChatColor.RED + "Citadel has prevented the creation of a link.");
return false;
}
if(gearblock1.getLink() != null) {
if(gearblock1.getLink().isDrawn()) {
player.sendMessage(ChatColor.RED + "Cannot unlink gearblocks in a drawn bridge/gate.");
} else {
this.bridgeManager.removeLink(gearblock1.getLink());
player.sendMessage(ChatColor.GREEN + "The gearblocks have been unlinked.");
}
return true;
}
FindGearResult result = findEndGear(block, event.getBlockFace());
if(result == null) {
event.getPlayer().sendMessage(ChatColor.RED + "End gearblock not found. Link distance is limited to " + CastleGates.getConfigManager().getMaxBridgeLength() + " blocks.");
} else {
linkGearblocks(player, gearblock1, result, true);
}
return true;
}
private boolean linkGearblocks(Player player, Gearblock gearblock1, FindGearResult result, boolean showError) {
Location loc = new Location(player.getWorld(), result.gearblock.getCoord().getX(), result.gearblock.getCoord().getY(), result.gearblock.getCoord().getZ());
if(!CastleGates.getCitadelManager().canBypass(player, loc)) {
if(showError) {
player.sendMessage(ChatColor.RED + "Citadel has prevented the creation of a link.");
}
return false;
}
if(result.gearblock.getLink() != null) {
if(showError) {
player.sendMessage(ChatColor.RED + "The gearblock at [" + result.gearblock.getCoord().getX() + " " + result.gearblock.getCoord().getY() + " " + result.gearblock.getCoord().getZ() + "] already has a link. Remove it before creating a new one.");
ParticleHelper.spawn(player, result.gearblock, ParticleHelper.Type.Warning);
}
return false;
}
if(this.bridgeManager.createLink(gearblock1, result.gearblock, result.distance)) {
player.sendMessage(ChatColor.GREEN + "The gearblock has been linked with the gearblock at [" + result.gearblock.getCoord().getX() + " " + result.gearblock.getCoord().getY() + " " + result.gearblock.getCoord().getZ() + "].");
ParticleHelper.spawn(player, gearblock1, ParticleHelper.Type.Info);
ParticleHelper.spawn(player, result.gearblock, ParticleHelper.Type.Info);
return true;
}
if(showError) {
player.sendMessage(ChatColor.RED + "The gearblock at [" + result.gearblock.getCoord().getX() + " " + result.gearblock.getCoord().getY() + " " + result.gearblock.getCoord().getZ() + "] has a broken link which cannot be restored by using the clicked block. Click the block where the other gearblock used to be.");
ParticleHelper.spawn(player, result.gearblock, ParticleHelper.Type.Warning);
}
return false;
}
private FindGearResult findEndGear(Block startGearBlock, BlockFace blockFace) {
UUID worldUID = startGearBlock.getWorld().getUID();
int x = startGearBlock.getX();
int y = startGearBlock.getY();
int z = startGearBlock.getZ();
for(int i = 0; i < CastleGates.getConfigManager().getMaxBridgeLength(); i++) {
x += blockFace.getModX();
y += blockFace.getModY();
z += blockFace.getModZ();
BlockCoord location = new BlockCoord(worldUID, x, y, z);
Gearblock gearblock = this.storage.getGearblock(location);
if(gearblock != null) {
return i > 0 ? new FindGearResult(gearblock, i): null;
}
}
return null;
}
private void showGearInfo(PlayerInteractEvent event) {
Player player = event.getPlayer();
Block block = event.getClickedBlock();
BlockCoord blockCoord = new BlockCoord(block);
Gearblock gearblock = this.storage.getGearblock(blockCoord);
if(gearblock == null) {
BridgeManager.SearchBridgeBlockResult searchResult = this.bridgeManager.searchBridgeBlock(blockCoord);
switch(searchResult) {
case Bridge:
player.sendMessage("Bridge block");
break;
case Gates:
player.sendMessage("Gate block");
break;
default:
break;
}
return;
}
if(!CastleGates.getCitadelManager().canViewInformation(player, block.getLocation())) {
player.sendMessage(ChatColor.RED + "Gearblock");
}
else {
if(gearblock.getLink() == null) {
player.sendMessage(ChatColor.GREEN + "The gearblock is not linked.");
if(gearblock.getBrokenLink() != null) {
player.sendMessage(ChatColor.GREEN + "But contains " + gearblock.getBrokenLink().getBlocks().size() + " drawn blocks.");
}
}
else {
Gearblock gearblock2 = gearblock.getLink().getGearblock1() == gearblock ? gearblock.getLink().getGearblock2(): gearblock.getLink().getGearblock1();
player.sendMessage(ChatColor.GREEN + "The gearblock has been linked to the gearblock at [" + gearblock2.getCoord().getX() + " " + gearblock2.getCoord().getY() + " " + gearblock2.getCoord().getZ() + "].");
if(gearblock.getLink().isDrawn()) {
player.sendMessage(ChatColor.GREEN + "The bridge/gate is drawn.");
}
ParticleHelper.spawn(player, gearblock2, ParticleHelper.Type.Info);
}
if(gearblock.getTimer() != null) {
String message = "Timer: " + gearblock.getTimer() + " sec to process operation " + gearblock.getTimerOperation() + " in " + gearblock.getTimerMode() + " mode.";
player.sendMessage(ChatColor.GREEN + message);
}
if(gearblock.getLockedGearblocks() != null || gearblock.getLockGearblock() != null) {
player.sendMessage(ChatColor.YELLOW + "Locked");
}
}
}
private boolean processBlock(Block block, boolean isPowered) {
if(this.processingBlocks.contains(block)) return false;
Gearblock gearblock = this.storage.getGearblock(new BlockCoord(block));
if(gearblock == null || gearblock.isPowered() == isPowered) return false;
this.processingBlocks.add(block);
try
{
List<Player> players = Helper.getNearbyPlayers(block.getLocation());
PowerResult result = this.bridgeManager.processGearblock(
block.getWorld(),
gearblock,
isPowered,
players
);
if(result.status == PowerResult.Status.Locked && gearblock.getTimerMode() == TimerMode.DOOR) {
result = PowerResult.Unchanged;
}
PowerResultHelper.showStatus(block.getLocation(), players, result);
} finally {
this.processingBlocks.remove(block);
}
return true;
}
private boolean setGearblockTimer(PlayerInteractEvent event, PlayerStateManager.PlayerState state) {
Player player = event.getPlayer();
if(!CastleGates.getConfigManager().isTimerEnabled()) {
player.sendMessage(ChatColor.RED + "The timer function is disabled on this server.");
return true;
}
Block block = event.getClickedBlock();
BlockCoord blockCoord = new BlockCoord(block);
Gearblock gearblock = this.storage.getGearblock(blockCoord);
if(gearblock == null) {
player.sendMessage(ChatColor.RED + "That block is not a gearblock.");
return true;
}
if(!CastleGates.getCitadelManager().canBypass(player, block.getLocation())) {
player.sendMessage(ChatColor.RED + "Citadel has prevented that operation.");
return true;
}
String message;
if(gearblock.getTimer() == null) {
this.storage.setGearblockTimer(gearblock, state.timer, state.timerOperation, state.timerMode);
String modeText = state.timerMode == TimerMode.DEFAULT ? "DEFAULT" : "DOOR";
message = ChatColor.GREEN + "The gearblock's timer has been set to " + state.timer + " sec to process operation " + state.timerOperation + " in " + modeText + " mode.";
} else {
this.storage.clearGearblockTimer(gearblock);
message = ChatColor.YELLOW + "The gearblock's timer has been removed.";
}
player.sendMessage(message);
ParticleHelper.spawn(player, gearblock, ParticleHelper.Type.Info);
return true;
}
private boolean simpleActivate(PlayerInteractEvent event) {
Block block = event.getClickedBlock();
BlockCoord blockCoord = new BlockCoord(block);
Gearblock gearblock = this.storage.getGearblock(blockCoord);
if(gearblock != null) return false;
GearblockLink link = null;
if((link = getLink(blockCoord.getForward(), blockCoord.getBackward())) == null
&& (link = getLink(blockCoord.getRight(), blockCoord.getLeft())) == null
&& (link = getLink(blockCoord.getTop(), blockCoord.getBottom())) == null
)
{
return false;
}
if(!this.bridgeManager.isSimpleGearblock(link.getGearblock1(), event.getPlayer())
|| !this.bridgeManager.isSimpleGearblock(link.getGearblock2(), event.getPlayer())
)
{
return false;
}
BlockCoord processCoord = link.getGearblock1().getCoord();
Block processBlock = block.getWorld().getBlockAt(processCoord.getX(), processCoord.getY(), processCoord.getZ());
if(this.waitingBlocks.contains(processBlock)) return false;
processBlock(processBlock, true);
processBlock(processBlock, false);
return true;
}
private GearblockLink getLink(BlockCoord start, BlockCoord end) {
Gearblock gearblock = this.storage.getGearblock(start);
GearblockLink link = gearblock != null ? gearblock.getLink() : null;
if(link != null &&
(
link.getGearblock1().getCoord().equals(start) && link.getGearblock2().getCoord().equals(end)
|| link.getGearblock1().getCoord().equals(end) && link.getGearblock2().getCoord().equals(start)
)
)
{
return link;
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hazelcast;
import java.io.InputStream;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import com.hazelcast.client.HazelcastClient;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.XmlClientConfigBuilder;
import com.hazelcast.config.Config;
import com.hazelcast.config.XmlConfigBuilder;
import com.hazelcast.core.Hazelcast;
import com.hazelcast.core.HazelcastInstance;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.support.DefaultComponent;
import org.apache.camel.support.ResourceHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.component.hazelcast.HazelcastConstants.HAZELCAST_CONFIGU_PARAM;
import static org.apache.camel.component.hazelcast.HazelcastConstants.HAZELCAST_CONFIGU_URI_PARAM;
import static org.apache.camel.component.hazelcast.HazelcastConstants.HAZELCAST_INSTANCE_NAME_PARAM;
import static org.apache.camel.component.hazelcast.HazelcastConstants.HAZELCAST_INSTANCE_PARAM;
public abstract class HazelcastDefaultComponent extends DefaultComponent {
private static final Logger LOGGER = LoggerFactory.getLogger(HazelcastDefaultComponent.class);
private final Set<HazelcastInstance> customHazelcastInstances;
@Metadata(label = "advanced")
private HazelcastInstance hazelcastInstance;
@Metadata(label = "advanced", defaultValue = "" + HazelcastConstants.HAZELCAST_NODE_MODE)
private String hazelcastMode = HazelcastConstants.HAZELCAST_NODE_MODE;
public HazelcastDefaultComponent() {
super();
this.customHazelcastInstances = new LinkedHashSet<>();
}
public HazelcastDefaultComponent(final CamelContext context) {
super(context);
this.customHazelcastInstances = new LinkedHashSet<>();
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
// use the given hazelcast Instance or create one if not given
HazelcastInstance hzInstance;
if (ObjectHelper.equal(getHazelcastMode(), HazelcastConstants.HAZELCAST_NODE_MODE)) {
hzInstance = getOrCreateHzInstance(getCamelContext(), parameters);
} else {
hzInstance = getOrCreateHzClientInstance(getCamelContext(), parameters);
}
String defaultOperation = getAndRemoveOrResolveReferenceParameter(parameters, HazelcastConstants.OPERATION_PARAM, String.class);
if (defaultOperation == null) {
defaultOperation = getAndRemoveOrResolveReferenceParameter(parameters, "defaultOperation", String.class);
}
HazelcastDefaultEndpoint endpoint = doCreateEndpoint(uri, remaining, parameters, hzInstance);
if (defaultOperation != null) {
endpoint.setDefaultOperation(HazelcastOperation.getHazelcastOperation(defaultOperation));
}
return endpoint;
}
protected abstract HazelcastDefaultEndpoint doCreateEndpoint(String uri, String remaining, Map<String, Object> parameters, HazelcastInstance hzInstance) throws Exception;
@Override
public void doStart() throws Exception {
super.doStart();
}
@Override
public void doStop() throws Exception {
for (HazelcastInstance hazelcastInstance : customHazelcastInstances) {
hazelcastInstance.getLifecycleService().shutdown();
}
customHazelcastInstances.clear();
super.doStop();
}
public HazelcastInstance getHazelcastInstance() {
return hazelcastInstance;
}
/**
* The hazelcast instance reference which can be used for hazelcast endpoint.
* If you don't specify the instance reference, camel use the default hazelcast instance from the camel-hazelcast instance.
*/
public void setHazelcastInstance(HazelcastInstance hazelcastInstance) {
this.hazelcastInstance = hazelcastInstance;
}
public String getHazelcastMode() {
return hazelcastMode;
}
/**
* The hazelcast mode reference which kind of instance should be used.
* If you don't specify the mode, then the node mode will be the default.
*/
public void setHazelcastMode(String hazelcastMode) {
this.hazelcastMode = hazelcastMode;
}
protected HazelcastInstance getOrCreateHzInstance(CamelContext context, Map<String, Object> parameters) throws Exception {
HazelcastInstance hzInstance = null;
Config config = null;
// Query param named 'hazelcastInstance' (if exists) overrides the instance that was set
hzInstance = resolveAndRemoveReferenceParameter(parameters, HAZELCAST_INSTANCE_PARAM, HazelcastInstance.class);
// Check if an already created instance is given then just get instance by its name.
if (hzInstance == null && parameters.get(HAZELCAST_INSTANCE_NAME_PARAM) != null) {
hzInstance = Hazelcast.getHazelcastInstanceByName((String) parameters.get(HAZELCAST_INSTANCE_NAME_PARAM));
}
// If instance neither supplied nor found by name, try to lookup its config
// as reference or as xml configuration file.
if (hzInstance == null) {
config = resolveAndRemoveReferenceParameter(parameters, HAZELCAST_CONFIGU_PARAM, Config.class);
if (config == null) {
String configUri = getAndRemoveParameter(parameters, HAZELCAST_CONFIGU_URI_PARAM, String.class);
if (configUri != null) {
configUri = getCamelContext().resolvePropertyPlaceholders(configUri);
}
if (configUri != null) {
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(context, configUri);
config = new XmlConfigBuilder(is).build();
}
}
if (hazelcastInstance == null && config == null) {
config = new XmlConfigBuilder().build();
// Disable the version check
config.getProperties().setProperty("hazelcast.version.check.enabled", "false");
config.getProperties().setProperty("hazelcast.phone.home.enabled", "false");
hzInstance = Hazelcast.newHazelcastInstance(config);
} else if (config != null) {
if (ObjectHelper.isNotEmpty(config.getInstanceName())) {
hzInstance = Hazelcast.getOrCreateHazelcastInstance(config);
} else {
hzInstance = Hazelcast.newHazelcastInstance(config);
}
}
if (hzInstance != null) {
if (this.customHazelcastInstances.add(hzInstance)) {
LOGGER.debug("Add managed HZ instance {}", hzInstance.getName());
}
}
}
return hzInstance == null ? hazelcastInstance : hzInstance;
}
protected HazelcastInstance getOrCreateHzClientInstance(CamelContext context, Map<String, Object> parameters) throws Exception {
HazelcastInstance hzInstance = null;
ClientConfig config = null;
// Query param named 'hazelcastInstance' (if exists) overrides the instance that was set
hzInstance = resolveAndRemoveReferenceParameter(parameters, HAZELCAST_INSTANCE_PARAM, HazelcastInstance.class);
// Check if an already created instance is given then just get instance by its name.
if (hzInstance == null && parameters.get(HAZELCAST_INSTANCE_NAME_PARAM) != null) {
hzInstance = Hazelcast.getHazelcastInstanceByName((String) parameters.get(HAZELCAST_INSTANCE_NAME_PARAM));
}
// If instance neither supplied nor found by name, try to lookup its config
// as reference or as xml configuration file.
if (hzInstance == null) {
config = resolveAndRemoveReferenceParameter(parameters, HAZELCAST_CONFIGU_PARAM, ClientConfig.class);
if (config == null) {
String configUri = getAndRemoveParameter(parameters, HAZELCAST_CONFIGU_URI_PARAM, String.class);
if (configUri != null) {
configUri = getCamelContext().resolvePropertyPlaceholders(configUri);
}
if (configUri != null) {
InputStream is = ResourceHelper.resolveMandatoryResourceAsInputStream(context, configUri);
config = new XmlClientConfigBuilder(is).build();
}
}
if (hazelcastInstance == null && config == null) {
config = new XmlClientConfigBuilder().build();
// Disable the version check
config.getProperties().setProperty("hazelcast.version.check.enabled", "false");
config.getProperties().setProperty("hazelcast.phone.home.enabled", "false");
hzInstance = HazelcastClient.newHazelcastClient(config);
} else if (config != null) {
hzInstance = HazelcastClient.newHazelcastClient(config);
}
if (hzInstance != null) {
if (this.customHazelcastInstances.add(hzInstance)) {
LOGGER.debug("Add managed HZ instance {}", hzInstance.getName());
}
}
}
return hzInstance == null ? hazelcastInstance : hzInstance;
}
}
| |
/*
* Copyright 2018 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.activation;
import com.googlecode.junit.ext.checkers.OSChecker;
import com.thoughtworks.go.plugin.activation.test.*;
import com.thoughtworks.go.plugin.api.GoApplicationAccessor;
import com.thoughtworks.go.plugin.api.GoPlugin;
import com.thoughtworks.go.plugin.api.GoPluginIdentifier;
import com.thoughtworks.go.plugin.api.TestGoPluginExtensionPoint;
import com.thoughtworks.go.plugin.api.annotation.Extension;
import com.thoughtworks.go.plugin.api.request.GoPluginApiRequest;
import com.thoughtworks.go.plugin.api.response.GoPluginApiResponse;
import com.thoughtworks.go.plugin.infra.FelixGoPluginOSGiFramework;
import com.thoughtworks.go.plugin.infra.plugininfo.DefaultPluginRegistry;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.ZipUtil;
import lib.test.DummyTestPluginInLibDirectory;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.ops4j.pax.tinybundles.core.InnerClassStrategy;
import org.ops4j.pax.tinybundles.core.TinyBundle;
import org.ops4j.pax.tinybundles.core.TinyBundles;
import org.osgi.framework.*;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.zip.ZipInputStream;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
public class DefaultGoPluginActivatorIntegrationTest {
private static final File TMP_DIR = new File("./tmp");
private static final String BUNDLE_DIR_WHICH_HAS_PROPER_ACTIVATOR = "DefaultGoPluginActivatorIntegrationTest.bundleDirWhichHasProperActivator";
private static final String NO_EXT_ERR_MSG = "No extensions found in this plugin.Please check for @Extension annotations";
private static final String GO_TEST_DUMMY_SYMBOLIC_NAME = "Go-Test-Dummy-Symbolic-Name";
private FelixGoPluginOSGiFramework framework;
private StubOfDefaultPluginRegistry registry;
@Before
public void setUp() {
registry = new StubOfDefaultPluginRegistry();
framework = new FelixGoPluginOSGiFramework(registry, new SystemEnvironment());
framework.start();
}
@Test
public void shouldRegisterAClassImplementingGoPluginAsAnOSGiService() throws Exception {
assertThatPluginWithThisExtensionClassLoadsSuccessfully(DummyTestPlugin.class);
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAClassImplementingGoPluginWithoutAPublicConstructor() throws Exception {
Bundle bundle = installBundleWithClasses(DummyTestPluginWithNonPublicDefaultConstructor.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAClassImplementingGoPluginWithOnlyAOneArgConstructor() throws Exception {
Bundle bundle = installBundleWithClasses(DummyGoPluginWithOneArgConstructorOnly.class);
assertThat(bundle.getState(),is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
String error = descriptor.getStatus().getMessages().get(0);
assertThat(error.contains("DummyGoPluginWithOneArgConstructorOnly"),is(true));
assertThat(error.contains("Make sure it and all of its parent classes have a default constructor."),is(true));
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAnExtensionClassWhichDoesNotImplementAGoExtensionPoint() throws Exception {
Bundle bundle = installBundleWithClasses(NotAGoExtensionPoint.class, NotAGoExtensionAsItDoesNotImplementAnyExtensionPoints.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotLoadClassesFoundInMETA_INFEvenIfTheyAreProperGoExtensionPoints() throws Exception {
File bundleWithActivator = createBundleWithActivator(BUNDLE_DIR_WHICH_HAS_PROPER_ACTIVATOR, DummyTestPlugin.class);
File sourceClassFile = new File(bundleWithActivator, "com/thoughtworks/go/plugin/activation/test/DummyTestPlugin.class");
File destinationFile = new File(bundleWithActivator, "META-INF/com/thoughtworks/go/plugin/activation/test/");
FileUtils.moveFileToDirectory(sourceClassFile, destinationFile, true);
Bundle bundle = installBundleFoundInDirectory(bundleWithActivator);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG),is(true));
}
@Test
public void shouldNotFailToRegisterOtherClassesIfAClassCannotBeLoadedBecauseOfWrongPath() throws Exception {
File bundleWithActivator = createBundleWithActivator(BUNDLE_DIR_WHICH_HAS_PROPER_ACTIVATOR, DummyTestPlugin.class);
File sourceClassFile = new File(bundleWithActivator, "com/thoughtworks/go/plugin/activation/test/DummyTestPlugin.class");
File destinationFile = new File(bundleWithActivator, "ABC-DEF/com/thoughtworks/go/plugin/activation/test/");
FileUtils.copyFileToDirectory(sourceClassFile, destinationFile, true);
Bundle bundle = installBundleFoundInDirectory(bundleWithActivator);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
}
@Test
public void shouldNotLoadAClassFoundInLibDirectoryEvenIfItIsAProperGoExtensionPoints() throws Exception {
Bundle bundle = installBundleWithClasses(DummyTestPluginInLibDirectory.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAClassWhichIsAbstract() throws Exception {
Bundle bundle = installBundleWithClasses(AbstractTestPlugin.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAClassWhichIsNotPublic() throws Exception {
Bundle bundle = installBundleWithClasses(DummyTestPluginWhichIsNotPublic.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAnInterfaceEvenIfItImplementsAGoExtensionPointInterface() throws Exception {
Bundle bundle = installBundleWithClasses(TestGoPluginExtensionInterface.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterAsAnOSGiServiceAClassWhichThrowsExceptionDuringInstantiation() throws Exception {
Bundle bundle = installBundleWithClasses(DummyTestPlugin.class, DummyGoPluginWhichThrowsAnExceptionDuringConstruction.class);
assertThat(bundle.getState(),is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
String error = descriptor.getStatus().getMessages().get(0);
assertThat(error.contains("DummyGoPluginWhichThrowsAnExceptionDuringConstruction"), is(true));
assertThat(error.contains("java.lang.RuntimeException: Ouch! I failed!"), is(true));
}
@Test
public void shouldRegisterANestedClassImplementingGoPluginAsAnOSGiService() throws Exception {
if (new OSChecker(OSChecker.WINDOWS).satisfy()) {
return; // The class files in this test become too big for a Windows filesystem to handle.
}
File bundleWithActivator = createBundleWithActivator(BUNDLE_DIR_WHICH_HAS_PROPER_ACTIVATOR, TestPluginOuterClass.class,
TestPluginOuterClass.NestedClass.class,
TestPluginOuterClass.InnerClass.class,
TestPluginOuterClass.InnerClass.SecondLevelInnerClass.class,
TestPluginOuterClass.InnerClass.SecondLevelInnerClass.TestPluginThirdLevelInnerClass.class,
TestPluginOuterClass.InnerClass.SecondLevelSiblingInnerClassNoDefaultConstructor.class);
BundleContext installedBundledContext = bundleContext(installBundleFoundInDirectory(bundleWithActivator));
ServiceReference<?>[] references = installedBundledContext.getServiceReferences(GoPlugin.class.getName(), null);
String[] services = toSortedServiceClassNames(installedBundledContext, references);
assertEquals(Arrays.toString(services), 4, services.length);
assertEquals(TestPluginOuterClass.class.getName(), services[0]);
assertEquals(TestPluginOuterClass.InnerClass.class.getName(), services[1]);
assertEquals(TestPluginOuterClass.InnerClass.SecondLevelInnerClass.TestPluginThirdLevelInnerClass.class.getName(), services[2]);
assertEquals(TestPluginOuterClass.NestedClass.class.getName(), services[3]);
}
@Test
public void shouldRegisterAsAnOSGiServiceADerivedClassWhoseAncestorImplementsAnExtensionPoint() throws Exception {
BundleContext installedBundledContext = bundleContext(installBundleWithClasses(TestPluginThatIsADerivedClass.class,
DummyTestPlugin.class, TestPluginThatIsADerivedClass.class.getSuperclass()));
ServiceReference<?>[] references = installedBundledContext.getServiceReferences(GoPlugin.class.getName(), null);
String[] services = toSortedServiceClassNames(installedBundledContext, references);
assertEquals(Arrays.toString(services), 2, services.length);
assertEquals(DummyTestPlugin.class.getName(), services[0]);
assertEquals(TestPluginThatIsADerivedClass.class.getName(), services[1]);
}
@Test
public void shouldRegisterOneInstanceForEachExtensionPointAnExtensionImplements() throws Exception {
BundleContext installedBundledContext = bundleContext(installBundleWithClasses(TestGoPluginExtensionThatImplementsTwoExtensionPoints.class,
DummyTestPlugin.class));
ServiceReference<?>[] references = installedBundledContext.getServiceReferences(GoPlugin.class.getName(), null);
String[] services = toSortedServiceClassNames(installedBundledContext, references);
assertEquals(Arrays.toString(services), 2, services.length);
assertEquals(DummyTestPlugin.class.getName(), services[0]);
assertEquals(TestGoPluginExtensionThatImplementsTwoExtensionPoints.class.getName(), services[1]);
references = installedBundledContext.getServiceReferences(TestGoPluginExtensionPoint.class.getName(), null);
assertEquals(1, references.length);
assertEquals(TestGoPluginExtensionThatImplementsTwoExtensionPoints.class.getName(), installedBundledContext.getService(references[0]).getClass().getName());
Object testExtensionImplementation = getImplementationOfType(installedBundledContext, references, TestGoPluginExtensionThatImplementsTwoExtensionPoints.class);
references = installedBundledContext.getServiceReferences(GoPlugin.class.getName(), null);
assertEquals(2, references.length);
Object testPluginImplementation = getImplementationOfType(installedBundledContext, references, TestGoPluginExtensionThatImplementsTwoExtensionPoints.class);
assertSame(testExtensionImplementation, testPluginImplementation);
}
@Test
public void shouldRegisterOneInstanceForEachExtensionPointWhereThePluginClassExtendsABaseClassWhichIsAnExtensionAndImplementsAGoExtensionPoint() throws Exception {
BundleContext installedBundledContext = bundleContext(installBundleWithClasses(ClassThatExtendsTestExtensionPoint.class,
ClassThatExtendsTestExtensionPoint.ClassThatExtendsTwoGoExtensionPoint.class, TestGoPluginExtensionPoint.class));
ServiceReference<?>[] references = installedBundledContext.getServiceReferences(TestGoPluginExtensionPoint.class.getName(), null);
assertEquals(1, references.length);
Object testExtensionImplementation = getImplementationOfType(installedBundledContext, references, ClassThatExtendsTestExtensionPoint.ClassThatExtendsTwoGoExtensionPoint.class);
references = installedBundledContext.getServiceReferences(GoPlugin.class.getName(), null);
assertEquals(1, references.length);
Object testPluginImplementation = getImplementationOfType(installedBundledContext, references, ClassThatExtendsTestExtensionPoint.ClassThatExtendsTwoGoExtensionPoint.class);
assertSame(testExtensionImplementation, testPluginImplementation);
}
@Test
public void shouldNotRegisterAnAnonymousClassThatImplementsAnExtensionPoint() throws IOException {
Bundle bundle = installBundleWithClasses(DummyClassProvidingAnonymousClass.getAnonymousClass().getClass());
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG),is(true));
}
@Test
public void shouldNotRegisterAnAnonymousClassDefinedWithinAnInnerClassThatImplementsAnExtensionPoint() throws IOException {
Bundle bundle = installBundleWithClasses(DummyClassProvidingAnonymousClass.DummyInnerClassProvidingAnonymousClass.getAnonymousClass().getClass());
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterLocalInnerClassesThatImplementAnExtensionPoint() throws IOException {
Bundle bundle = installBundleWithClasses(DummyClassWithLocalInnerClass.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldNotRegisterPublicInnerClassesThatImplementAnExtensionPointInsidePackageLevelClass() throws IOException {
Bundle bundle = installBundleWithClasses(PackageLevelClassWithPublicInnerClass.class, PackageLevelClassWithPublicInnerClass.DummyInnerClassWithExtension.class);
assertThat(bundle.getState(), is(Bundle.UNINSTALLED));
GoPluginDescriptor descriptor = registry.getPlugin(GO_TEST_DUMMY_SYMBOLIC_NAME);
assertThat(descriptor.isInvalid(), is(true));
assertThat(descriptor.getStatus().getMessages().contains(NO_EXT_ERR_MSG), is(true));
}
@Test
public void shouldBeAbleToUsePackagesFromJavaxWithinThePluginSinceItHasBeenExportedUsingBootDelegationInTheOSGIFramework() throws Exception {
assertThatPluginWithThisExtensionClassLoadsSuccessfully(ClassWhichUsesSomeClassInJavaxPackage.class);
}
@Test
public void shouldBeAbleToUsePackagesFromOrgXmlSaxPackageWithinThePluginSinceItHasBeenExportedUsingBootDelegationInTheOSGIFramework() throws Exception {
assertThatPluginWithThisExtensionClassLoadsSuccessfully(ClassWhichUsesSomeClassesInOrgXMLSaxPackage.class);
}
@Test
public void shouldBeAbleToUsePackagesFromOrgW3cDomPackageWithinThePluginSinceItHasBeenExportedUsingBootDelegationInTheOSGIFramework() throws Exception {
assertThatPluginWithThisExtensionClassLoadsSuccessfully(ClassWhichUsesSomeClassesInOrgW3CDomPackage.class);
}
@After
public void tearDown() throws Exception {
framework.stop();
FileUtils.deleteDirectory(TMP_DIR);
}
private void assertThatPluginWithThisExtensionClassLoadsSuccessfully(Class<?> extensionClass) throws IOException, InvalidSyntaxException {
BundleContext installedBundleContext = bundleContext(installBundleWithClasses(extensionClass));
ServiceReference<?>[] references = installedBundleContext.getServiceReferences(GoPlugin.class.getName(), null);
assertEquals("No service registered for GoPlugin class", 1, references.length);
assertEquals("Symbolic Name property should be present", GO_TEST_DUMMY_SYMBOLIC_NAME, references[0].getProperty(Constants.BUNDLE_SYMBOLICNAME));
assertEquals(extensionClass.getName(), installedBundleContext.getService(references[0]).getClass().getName());
}
private String[] toSortedServiceClassNames(BundleContext installedBundledContext, ServiceReference<?>[] references) {
if (references == null) {
return new String[0];
}
String[] services = new String[references.length];
for (int i = 0; i < references.length; i++) {
ServiceReference<?> reference = references[i];
services[i] = installedBundledContext.getService(reference).getClass().getName();
}
Arrays.sort(services);
return services;
}
private Object getImplementationOfType(BundleContext installedBundledContext, ServiceReference<?>[] references, Class<?> type) {
if (references == null) {
return new String[0];
}
for (ServiceReference<?> reference : references) {
Object service = installedBundledContext.getService(reference);
if (service.getClass().getName().equals(type.getName())) {
return service;
}
}
throw new RuntimeException("Class type not found: " + type);
}
private Bundle installBundleWithClasses(Class... classesToBeLoaded) throws IOException {
return installBundleFoundInDirectory(createBundleWithActivator(BUNDLE_DIR_WHICH_HAS_PROPER_ACTIVATOR, classesToBeLoaded));
}
private Bundle installBundleFoundInDirectory(File bundleWithActivator) {
GoPluginDescriptor pluginDescriptor = new GoPluginDescriptor(GO_TEST_DUMMY_SYMBOLIC_NAME, "1", null, null, bundleWithActivator, true);
registry.fakeRegistrationOfPlugin(pluginDescriptor);
return framework.loadPlugin(pluginDescriptor);
}
private BundleContext bundleContext(Bundle bundle){
return bundle.getBundleContext();
}
private File createBundleWithActivator(String destinationDir, Class... classesToBeAdded) throws IOException {
TinyBundle bundleBeingBuilt = TinyBundles.bundle()
.add(GoPluginActivator.class)
.add(DefaultGoPluginActivator.class, InnerClassStrategy.ALL)
.set(Constants.BUNDLE_ACTIVATOR, DefaultGoPluginActivator.class.getCanonicalName())
.set(Constants.BUNDLE_CLASSPATH, ".,lib/dependency.jar")
.set(Constants.BUNDLE_SYMBOLICNAME, GO_TEST_DUMMY_SYMBOLIC_NAME);
for (Class aClass : classesToBeAdded) {
bundleBeingBuilt.add(aClass, InnerClassStrategy.NONE);
}
ZipInputStream src = new ZipInputStream(bundleBeingBuilt.build());
File bundleExplodedDir = explodeBundleIntoDirectory(src, destinationDir);
IOUtils.closeQuietly(src);
return bundleExplodedDir;
}
private File explodeBundleIntoDirectory(ZipInputStream src, String destinationDir) throws IOException {
File destinationPluginBundleLocation = new File(TMP_DIR, destinationDir);
destinationPluginBundleLocation.mkdirs();
new ZipUtil().unzip(src, destinationPluginBundleLocation);
return destinationPluginBundleLocation;
}
private class StubOfDefaultPluginRegistry extends DefaultPluginRegistry {
void fakeRegistrationOfPlugin(GoPluginDescriptor pluginDescriptor) {
idToDescriptorMap.putIfAbsent(pluginDescriptor.id(), pluginDescriptor);
}
}
}
@Extension
class DummyTestPluginWhichIsNotPublic implements GoPlugin {
@Override
public void initializeGoApplicationAccessor(GoApplicationAccessor goApplicationAccessor) {
throw new UnsupportedOperationException();
}
@Override
public GoPluginApiResponse handle(GoPluginApiRequest requestMessage) {
throw new UnsupportedOperationException();
}
@Override
public GoPluginIdentifier pluginIdentifier() {
throw new UnsupportedOperationException();
}
}
class PackageLevelClassWithPublicInnerClass {
@Extension
public class DummyInnerClassWithExtension implements GoPlugin {
@Override
public void initializeGoApplicationAccessor(GoApplicationAccessor goApplicationAccessor) {
throw new UnsupportedOperationException();
}
@Override
public GoPluginApiResponse handle(GoPluginApiRequest requestMessage) {
throw new UnsupportedOperationException();
}
@Override
public GoPluginIdentifier pluginIdentifier() {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes an action to republish to another topic.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class RepublishAction implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The ARN of the IAM role that grants access.
* </p>
*/
private String roleArn;
/**
* <p>
* The name of the MQTT topic.
* </p>
*/
private String topic;
/**
* <p>
* The Quality of Service (QoS) level to use when republishing messages.
* </p>
*/
private Integer qos;
/**
* <p>
* The ARN of the IAM role that grants access.
* </p>
*
* @param roleArn
* The ARN of the IAM role that grants access.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
* <p>
* The ARN of the IAM role that grants access.
* </p>
*
* @return The ARN of the IAM role that grants access.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
* <p>
* The ARN of the IAM role that grants access.
* </p>
*
* @param roleArn
* The ARN of the IAM role that grants access.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RepublishAction withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
* <p>
* The name of the MQTT topic.
* </p>
*
* @param topic
* The name of the MQTT topic.
*/
public void setTopic(String topic) {
this.topic = topic;
}
/**
* <p>
* The name of the MQTT topic.
* </p>
*
* @return The name of the MQTT topic.
*/
public String getTopic() {
return this.topic;
}
/**
* <p>
* The name of the MQTT topic.
* </p>
*
* @param topic
* The name of the MQTT topic.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RepublishAction withTopic(String topic) {
setTopic(topic);
return this;
}
/**
* <p>
* The Quality of Service (QoS) level to use when republishing messages.
* </p>
*
* @param qos
* The Quality of Service (QoS) level to use when republishing messages.
*/
public void setQos(Integer qos) {
this.qos = qos;
}
/**
* <p>
* The Quality of Service (QoS) level to use when republishing messages.
* </p>
*
* @return The Quality of Service (QoS) level to use when republishing messages.
*/
public Integer getQos() {
return this.qos;
}
/**
* <p>
* The Quality of Service (QoS) level to use when republishing messages.
* </p>
*
* @param qos
* The Quality of Service (QoS) level to use when republishing messages.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RepublishAction withQos(Integer qos) {
setQos(qos);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getTopic() != null)
sb.append("Topic: ").append(getTopic()).append(",");
if (getQos() != null)
sb.append("Qos: ").append(getQos());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RepublishAction == false)
return false;
RepublishAction other = (RepublishAction) obj;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getTopic() == null ^ this.getTopic() == null)
return false;
if (other.getTopic() != null && other.getTopic().equals(this.getTopic()) == false)
return false;
if (other.getQos() == null ^ this.getQos() == null)
return false;
if (other.getQos() != null && other.getQos().equals(this.getQos()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getTopic() == null) ? 0 : getTopic().hashCode());
hashCode = prime * hashCode + ((getQos() == null) ? 0 : getQos().hashCode());
return hashCode;
}
@Override
public RepublishAction clone() {
try {
return (RepublishAction) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.iot.model.transform.RepublishActionMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Chore;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.jboss.netty.bootstrap.ConnectionlessBootstrap;
import org.jboss.netty.channel.ChannelEvent;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelUpstreamHandler;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.socket.DatagramChannel;
import org.jboss.netty.channel.socket.DatagramChannelFactory;
import org.jboss.netty.channel.socket.oio.OioDatagramChannelFactory;
import org.jboss.netty.handler.codec.protobuf.ProtobufEncoder;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* Class to publish the cluster status to the client. This allows them to know immediately
* the dead region servers, hence to cut the connection they have with them, eventually stop
* waiting on the socket. This improves the mean time to recover, and as well allows to increase
* on the client the different timeouts, as the dead servers will be detected separately.
*/
@InterfaceAudience.Private
public class ClusterStatusPublisher extends Chore {
/**
* The implementation class used to publish the status. Default is null (no publish).
* Use org.apache.hadoop.hbase.master.ClusterStatusPublisher.MulticastPublisher to multicast the
* status.
*/
public static final String STATUS_PUBLISHER_CLASS = "hbase.status.publisher.class";
public static final Class<? extends ClusterStatusPublisher.Publisher>
DEFAULT_STATUS_PUBLISHER_CLASS = null;
/**
* The minimum time between two status messages, in milliseconds.
*/
public static final String STATUS_PUBLISH_PERIOD = "hbase.status.publish.period";
public static final int DEFAULT_STATUS_PUBLISH_PERIOD = 10000;
private long lastMessageTime = 0;
private final HMaster master;
private final int messagePeriod; // time between two message
private final ConcurrentMap<ServerName, Integer> lastSent =
new ConcurrentHashMap<ServerName, Integer>();
private Publisher publisher;
private boolean connected = false;
/**
* We want to limit the size of the protobuf message sent, do fit into a single packet.
* a reasonable size for ip / ethernet is less than 1Kb.
*/
public static int MAX_SERVER_PER_MESSAGE = 10;
/**
* If a server dies, we're sending the information multiple times in case a receiver misses the
* message.
*/
public static int NB_SEND = 5;
public ClusterStatusPublisher(HMaster master, Configuration conf,
Class<? extends Publisher> publisherClass)
throws IOException {
super("HBase clusterStatusPublisher for " + master.getName(),
conf.getInt(STATUS_PUBLISH_PERIOD, DEFAULT_STATUS_PUBLISH_PERIOD), master);
this.master = master;
this.messagePeriod = conf.getInt(STATUS_PUBLISH_PERIOD, DEFAULT_STATUS_PUBLISH_PERIOD);
try {
this.publisher = publisherClass.newInstance();
} catch (InstantiationException e) {
throw new IOException("Can't create publisher " + publisherClass.getName(), e);
} catch (IllegalAccessException e) {
throw new IOException("Can't create publisher " + publisherClass.getName(), e);
}
this.publisher.connect(conf);
connected = true;
}
// For tests only
protected ClusterStatusPublisher() {
master = null;
messagePeriod = 0;
}
@Override
protected void chore() {
if (!connected) {
return;
}
List<ServerName> sns = generateDeadServersListToSend();
if (sns.isEmpty()) {
// Nothing to send. Done.
return;
}
final long curTime = EnvironmentEdgeManager.currentTimeMillis();
if (lastMessageTime > curTime - messagePeriod) {
// We already sent something less than 10 second ago. Done.
return;
}
// Ok, we're going to send something then.
lastMessageTime = curTime;
// We're reusing an existing protobuf message, but we don't send everything.
// This could be extended in the future, for example if we want to send stuff like the
// META server name.
ClusterStatus cs = new ClusterStatus(VersionInfo.getVersion(),
master.getMasterFileSystem().getClusterId().toString(),
null,
sns,
master.getServerName(),
null,
null,
null,
null);
publisher.publish(cs);
}
protected void cleanup() {
connected = false;
publisher.close();
}
/**
* Create the dead server to send. A dead server is sent NB_SEND times. We send at max
* MAX_SERVER_PER_MESSAGE at a time. if there are too many dead servers, we send the newly
* dead first.
*/
protected List<ServerName> generateDeadServersListToSend() {
// We're getting the message sent since last time, and add them to the list
long since = EnvironmentEdgeManager.currentTimeMillis() - messagePeriod * 2;
for (Pair<ServerName, Long> dead : getDeadServers(since)) {
lastSent.putIfAbsent(dead.getFirst(), 0);
}
// We're sending the new deads first.
List<Map.Entry<ServerName, Integer>> entries = new ArrayList<Map.Entry<ServerName, Integer>>();
entries.addAll(lastSent.entrySet());
Collections.sort(entries, new Comparator<Map.Entry<ServerName, Integer>>() {
@Override
public int compare(Map.Entry<ServerName, Integer> o1, Map.Entry<ServerName, Integer> o2) {
return o1.getValue().compareTo(o2.getValue());
}
});
// With a limit of MAX_SERVER_PER_MESSAGE
int max = entries.size() > MAX_SERVER_PER_MESSAGE ? MAX_SERVER_PER_MESSAGE : entries.size();
List<ServerName> res = new ArrayList<ServerName>(max);
for (int i = 0; i < max; i++) {
Map.Entry<ServerName, Integer> toSend = entries.get(i);
if (toSend.getValue() >= (NB_SEND - 1)) {
lastSent.remove(toSend.getKey());
} else {
lastSent.replace(toSend.getKey(), toSend.getValue(), toSend.getValue() + 1);
}
res.add(toSend.getKey());
}
return res;
}
/**
* Get the servers which died since a given timestamp.
* protected because it can be subclassed by the tests.
*/
protected List<Pair<ServerName, Long>> getDeadServers(long since) {
if (master.getServerManager() == null) {
return Collections.emptyList();
}
return master.getServerManager().getDeadServers().copyDeadServersSince(since);
}
public static interface Publisher extends Closeable {
public void connect(Configuration conf) throws IOException;
public void publish(ClusterStatus cs);
@Override
public void close();
}
public static class MulticastPublisher implements Publisher {
private DatagramChannel channel;
private final ExecutorService service = Executors.newSingleThreadExecutor(
Threads.newDaemonThreadFactory("hbase-master-clusterStatus-worker"));
public MulticastPublisher() {
}
@Override
public void connect(Configuration conf) throws IOException {
String mcAddress = conf.get(HConstants.STATUS_MULTICAST_ADDRESS,
HConstants.DEFAULT_STATUS_MULTICAST_ADDRESS);
int port = conf.getInt(HConstants.STATUS_MULTICAST_PORT,
HConstants.DEFAULT_STATUS_MULTICAST_PORT);
// Can't be NiO with Netty today => not implemented in Netty.
DatagramChannelFactory f = new OioDatagramChannelFactory(service);
ConnectionlessBootstrap b = new ConnectionlessBootstrap(f);
b.setPipeline(Channels.pipeline(new ProtobufEncoder(),
new ChannelUpstreamHandler() {
@Override
public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e)
throws Exception {
// We're just writing here. Discard any incoming data. See HBASE-8466.
}
}));
channel = (DatagramChannel) b.bind(new InetSocketAddress(0));
channel.getConfig().setReuseAddress(true);
InetAddress ina;
try {
ina = InetAddress.getByName(mcAddress);
} catch (UnknownHostException e) {
throw new IOException("Can't connect to " + mcAddress, e);
}
channel.joinGroup(ina);
channel.connect(new InetSocketAddress(mcAddress, port));
}
@Override
public void publish(ClusterStatus cs) {
ClusterStatusProtos.ClusterStatus csp = cs.convert();
channel.write(csp);
}
@Override
public void close() {
if (channel != null) {
channel.close();
}
service.shutdown();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.registry.client.impl;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.PathNotFoundException;
import org.apache.hadoop.registry.client.api.BindFlags;
import org.apache.hadoop.registry.client.api.RegistryOperations;
import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
import org.apache.hadoop.registry.client.binding.RegistryUtils;
import org.apache.hadoop.registry.client.exceptions.InvalidPathnameException;
import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;
import org.apache.hadoop.registry.client.exceptions.NoRecordException;
import org.apache.hadoop.registry.client.types.RegistryPathStatus;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.service.CompositeService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
/**
* Filesystem-based implementation of RegistryOperations. This class relies
* entirely on the configured FS for security and does no extra checks.
*/
public class FSRegistryOperationsService extends CompositeService
implements RegistryOperations {
private FileSystem fs;
private static final Logger LOG =
LoggerFactory.getLogger(FSRegistryOperationsService.class);
private final RegistryUtils.ServiceRecordMarshal serviceRecordMarshal =
new RegistryUtils.ServiceRecordMarshal();
public FSRegistryOperationsService() {
super(FSRegistryOperationsService.class.getName());
}
@VisibleForTesting
public FileSystem getFs() {
return this.fs;
}
@Override
protected void serviceInit(Configuration conf) {
try {
this.fs = FileSystem.get(conf);
LOG.info("Initialized Yarn-registry with Filesystem "
+ fs.getClass().getCanonicalName());
} catch (IOException e) {
LOG.error("Failed to get FileSystem for registry", e);
throw new RuntimeException(e);
}
}
private Path makePath(String path) {
return new Path(path);
}
private Path formatDataPath(String basePath) {
return Path.mergePaths(new Path(basePath), new Path("/_record"));
}
private String relativize(String basePath, String childPath) {
String relative = new File(basePath).toURI()
.relativize(new File(childPath).toURI()).getPath();
return relative;
}
@Override
public boolean mknode(String path, boolean createParents)
throws PathNotFoundException, InvalidPathnameException, IOException {
Path registryPath = makePath(path);
// getFileStatus throws FileNotFound if the path doesn't exist. If the
// file already exists, return.
try {
fs.getFileStatus(registryPath);
return false;
} catch (FileNotFoundException e) {
}
if (createParents) {
// By default, mkdirs creates any parent dirs it needs
fs.mkdirs(registryPath);
} else {
FileStatus parentStatus = null;
if (registryPath.getParent() != null) {
parentStatus = fs.getFileStatus(registryPath.getParent());
}
if (registryPath.getParent() == null || parentStatus.isDirectory()) {
fs.mkdirs(registryPath);
} else {
throw new PathNotFoundException("no parent for " + path);
}
}
return true;
}
@Override
public void bind(String path, ServiceRecord record, int flags)
throws PathNotFoundException, FileAlreadyExistsException,
InvalidPathnameException, IOException {
// Preserve same overwrite semantics as ZK implementation
Preconditions.checkArgument(record != null, "null record");
RegistryTypeUtils.validateServiceRecord(path, record);
Path dataPath = formatDataPath(path);
Boolean overwrite = ((flags & BindFlags.OVERWRITE) != 0);
if (fs.exists(dataPath) && !overwrite) {
throw new FileAlreadyExistsException();
} else {
// Either the file doesn't exist, or it exists and we're
// overwriting. Create overwrites by default and creates parent dirs if
// needed.
FSDataOutputStream stream = fs.create(dataPath);
byte[] bytes = serviceRecordMarshal.toBytes(record);
stream.write(bytes);
stream.close();
LOG.info("Bound record to path " + dataPath);
}
}
@Override
public ServiceRecord resolve(String path) throws PathNotFoundException,
NoRecordException, InvalidRecordException, IOException {
// Read the entire file into byte array, should be small metadata
Long size = fs.getFileStatus(formatDataPath(path)).getLen();
byte[] bytes = new byte[size.intValue()];
FSDataInputStream instream = fs.open(formatDataPath(path));
int bytesRead = instream.read(bytes);
instream.close();
if (bytesRead < size) {
throw new InvalidRecordException(path,
"Expected " + size + " bytes, but read " + bytesRead);
}
// Unmarshal, check, and return
ServiceRecord record = serviceRecordMarshal.fromBytes(path, bytes);
RegistryTypeUtils.validateServiceRecord(path, record);
return record;
}
@Override
public RegistryPathStatus stat(String path)
throws PathNotFoundException, InvalidPathnameException, IOException {
FileStatus fstat = fs.getFileStatus(formatDataPath(path));
int numChildren = fs.listStatus(makePath(path)).length;
RegistryPathStatus regstat =
new RegistryPathStatus(fstat.getPath().toString(),
fstat.getModificationTime(), fstat.getLen(), numChildren);
return regstat;
}
@Override
public boolean exists(String path) throws IOException {
return fs.exists(makePath(path));
}
@Override
public List<String> list(String path)
throws PathNotFoundException, InvalidPathnameException, IOException {
FileStatus[] statArray = fs.listStatus(makePath(path));
String basePath = fs.getFileStatus(makePath(path)).getPath().toString();
List<String> paths = new ArrayList<String>();
FileStatus stat;
// Only count dirs; the _record files are hidden.
for (int i = 0; i < statArray.length; i++) {
stat = statArray[i];
if (stat.isDirectory()) {
String relativePath = relativize(basePath, stat.getPath().toString());
paths.add(relativePath);
}
}
return paths;
}
@Override
public void delete(String path, boolean recursive)
throws PathNotFoundException, PathIsNotEmptyDirectoryException,
InvalidPathnameException, IOException {
Path dirPath = makePath(path);
if (!fs.exists(dirPath)) {
throw new PathNotFoundException(path);
}
// If recursive == true, or dir is empty, delete.
if (recursive || list(path).isEmpty()) {
fs.delete(makePath(path), true);
return;
}
throw new PathIsNotEmptyDirectoryException(path);
}
@Override
public boolean addWriteAccessor(String id, String pass) throws IOException {
throw new NotImplementedException("Code is not implemented");
}
@Override
public void clearWriteAccessors() {
throw new NotImplementedException("Code is not implemented");
}
}
| |
package com.futureplatforms.kirin.dependencies.db;
import java.util.ArrayList;
import java.util.List;
import com.futureplatforms.kirin.dependencies.StaticDependencies;
import com.futureplatforms.kirin.dependencies.StaticDependencies.LogDelegate;
import com.futureplatforms.kirin.dependencies.db.Database.TxRunner;
import com.futureplatforms.kirin.dependencies.internal.TransactionBackend;
import com.futureplatforms.kirin.dependencies.internal.TransactionBundle;
import com.futureplatforms.kirin.dependencies.json.JSONArray;
public class Transaction {
public static class RowSet {
public class Row {
public final List<String> _Values;
public Row(List<String> values) {
this._Values = values;
}
public String valueForColumn(String column) {
if (!_ColumnNames.contains(column)) {
throw new IllegalArgumentException("No such column: " + column);
}
return this._Values.get(_ColumnNames.indexOf(column));
}
public Boolean hasColumn(String column) {
return _ColumnNames.contains(column);
}
}
public final List<String> _ColumnNames;
public final List<Row> _Rows = new ArrayList<>();
public RowSet(List<String> columnNames) {
_ColumnNames = columnNames;
}
public void addRow(List<String> values) {
_Rows.add(new Row(values));
}
private static String str(String str, int colWidth, char pad) {
str = (str == null) ? "<null>" : str;
String s = str;
if (str.length() > colWidth) {
s = s.substring(0, colWidth);
} else {
for (int i=str.length(); i<colWidth; i++) {
s += pad;
}
}
return s;
}
public void log(LogDelegate log) {
String header = "";
for (String colName : _ColumnNames) {
header += str(colName, 15, ' ') + " | ";
}
log.log(header);
log.log(str("", 15 * _ColumnNames.size(), '='));
for (Row row : _Rows) {
String rowStr = "";
for (String value : row._Values) {
rowStr += str(value, 15, ' ') + " | ";
}
log.log(rowStr);
}
}
}
public static interface TxCB {
public void onError();
}
public static interface TxRowsCB extends TxCB {
public void onSuccess(RowSet rowset);
}
public static interface TxTokenCB extends TxCB {
public void onSuccess(String token);
}
public static interface TxJSONCB extends TxCB {
public void onSuccess(JSONArray json);
}
public static abstract class Statement {
public final String _SQL;
public final String[] _Params;
public Statement(String sql, String[] params) {
this._SQL = sql;
this._Params = params;
}
}
/**
* Insert Statement, that allows nulls in the params
* @author kassimmaguire
*
*/
public static class InsertStatement extends StatementWithRowsReturn{
public InsertStatement(String sql, String[] params) {
super(sql, params, null);
}
}
/**
* Update Statement, that allows nulls in the params
* @author kassimmaguire
*
*/
public static class UpdateStatement extends StatementWithRowsReturn{
public UpdateStatement(String sql, String[] params) {
super(sql, params, null);
}
}
public static class StatementWithTokenReturn extends Statement {
public final TxTokenCB _Callback;
public StatementWithTokenReturn(String sql, String[]params, TxTokenCB callback) {
super(sql, params);
this._Callback = callback;
}
}
public static class StatementWithRowsReturn extends Statement {
public final TxRowsCB _Callback;
public StatementWithRowsReturn(String sql, String[] params, TxRowsCB cb) {
super(sql, params);
this._Callback = cb;
}
}
public static class StatementWithJSONReturn extends Statement {
public final TxJSONCB _Callback;
public StatementWithJSONReturn(String sql, String[] params, TxJSONCB cb) {
super(sql, params);
this._Callback = cb;
}
}
public enum TxElementType {
Statement, Batch
}
private List<TxElementType> _TxElements = new ArrayList<>();
private List<Statement> _Statements = new ArrayList<>();
private List<String[]> _BatchQueries = new ArrayList<>();
public enum Mode {
ReadOnly, ReadWrite
}
private TransactionBackend _Backend;
public interface TransactionCallback {
void onSuccess(Transaction t);
void onError();
}
protected Transaction(TransactionBackend backend) {
this._Backend = backend;
}
public void execQueryWithTokenReturn(String sql, TxTokenCB cb) {
execQueryWithTokenReturn(sql, null, cb);
}
private void validateParams(String[] params) {
if (params != null) {
for (int i=0; i<params.length; i++) {
if (params[i] == null) {
throw new NullPointerException("param " + i + " must not be null!");
}
}
}
}
private void log(String sql, String[] params) {
if (Database.DEBUG) {
LogDelegate log = StaticDependencies.getInstance().getLogDelegate();
log.log("\n" + sql);
if (params != null) {
String queryStr = "";
for (String param : params) {
queryStr += param + ", ";
}
log.log(queryStr);
}
log.log("\n");
}
}
public void execQueryWithTokenReturn(String sql, String[] params, TxTokenCB cb) {
try {
validateParams(params);
} catch (NullPointerException e) {
LogDelegate log = StaticDependencies.getInstance().getLogDelegate();
log.log("sql: " + sql + ": " + e.getMessage());
}
_Statements.add(new StatementWithTokenReturn(sql, params, cb));
_TxElements.add(TxElementType.Statement);
}
/**
* exec an INSERT statement
* @param sql
*/
public void execInsert(String sql) {
execInsert(sql, null);
}
/**
* Execute an INSERT statement, which allows nulls in the params
* @param sql
* @param params
*/
public void execInsert(String sql,String[] params) {
log(sql, params);
_Statements.add(new InsertStatement(sql, params));
_TxElements.add(TxElementType.Statement);
}
/**
* Execute an UPDATE statement, which allows nulls in the params
* @param sql
* @param params
*/
public void execUpdateWithNulls(String sql, String[] params) {
_Statements.add(new UpdateStatement(sql, params));
_TxElements.add(TxElementType.Statement);
}
public void execUpdate(String sql) {
execQueryWithRowsReturn(sql, null);
}
public void execUpdate(String sql, String[] params) {
execQueryWithRowsReturn(sql, params, null);
}
public void execQueryWithRowsReturn(String sql, TxRowsCB cb) {
execQueryWithRowsReturn(sql, null, cb);
}
public void execQueryWithRowsReturn(String sql, String[] params, TxRowsCB cb) {
try {
validateParams(params);
} catch (NullPointerException e) {
LogDelegate log = StaticDependencies.getInstance().getLogDelegate();
log.log("sql: " + sql + ": " + e.getMessage());
}
log(sql, params);
_Statements.add(new StatementWithRowsReturn(sql, params, cb));
_TxElements.add(TxElementType.Statement);
}
public void execQueryWithJSONReturn(String sql, TxJSONCB cb) {
execQueryWithJSONReturn(sql, null, cb);
}
public void execQueryWithJSONReturn(String sql, String[] params, TxJSONCB cb) {
try {
validateParams(params);
} catch (NullPointerException e) {
LogDelegate log = StaticDependencies.getInstance().getLogDelegate();
log.log("sql: " + sql + ": " + e.getMessage());
}
log(sql, params);
_Statements.add(new StatementWithJSONReturn(sql, params, cb));
_TxElements.add(TxElementType.Statement);
}
/**
* IF you're executing a SQL file then use this function. Statements must be separated
* with semicolon followed by newline!
* @param batch
*/
public void execBatchUpdate(String batch) {
String[] lines = StaticDependencies.getInstance().getFormatter().parseBatchSql(batch);
_BatchQueries.add(lines);
_TxElements.add(TxElementType.Batch);
}
protected void pullTrigger(TxRunner closedCallback) {
// Package all the queries the user wants to do in this statement
// into a bundle
TransactionBundle bundle = new TransactionBundle(_TxElements, _Statements, _BatchQueries, closedCallback);
// Now pass it to the native platform's flavour of pulling trigger
_Backend.pullTrigger(bundle);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.Random;
import java.util.zip.CRC32;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.hdfs.metrics.LookasideMetrics.LocalMetrics;
import org.apache.log4j.Level;
/**
* This class tests the lookaside cache.
*/
public class TestLookasideCache extends junit.framework.TestCase {
final static Log LOG = LogFactory.getLog("org.apache.hadoop.hdfs.TestLookasideCache");
final static String TEST_DIR = new File(System.getProperty("test.build.data",
"build/lookasidecache/test/data")).getAbsolutePath();
final static int NUM_DATANODES = 3;
{
((Log4JLogger)LookasideCache.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)LookasideCacheFileSystem.LOG).getLogger().setLevel(Level.ALL);
}
public void testCache() throws IOException {
Configuration conf = new Configuration();
long maxSize = 5 * 1024;
long evictPercent = 20;
conf.setLong(LookasideCache.CACHESIZE, maxSize);
conf.setLong(LookasideCache.CACHEEVICT_PERCENT, evictPercent);
LookasideCache cache = new LookasideCache(conf);
LocalMetrics metrics = cache.getLocalMetrics();
assertTrue(cache.getCacheMaxSize() == maxSize);
assertTrue(cache.getCacheSize() == 0);
assertTrue(cache.getCacheEvictPercent() == evictPercent);
// insert five elements into the cache, each of size 1024
cache.addCache(new Path("one"), new Path("one"), 1024);
cache.addCache(new Path("two"), new Path("two"), 1024);
cache.addCache(new Path("three"), new Path("three"), 1024);
cache.addCache(new Path("four"), new Path("four"), 1024);
cache.addCache(new Path("five"), new Path("five"), 1024);
assertTrue(cache.getCacheSize() == 5 * 1024);
assertTrue(metrics.numAdd == 5);
assertTrue(metrics.numAddNew == 5);
assertTrue(metrics.numAddExisting == 0);
// the cache is now full. If we add one more element, the oldest
// two should be evicted
cache.addCache(new Path("six"), new Path("six"), 512);
assertTrue("cachesize is " + cache.getCacheSize(),
cache.getCacheSize() == 3 * 1024 + 512);
// verify that first two are not there. and the rest is there
assertTrue(cache.getCache(new Path("one")) == null);
assertTrue(cache.getCache(new Path("two")) == null);
assertTrue(cache.getCache(new Path("three")) != null);
assertTrue(cache.getCache(new Path("four")) != null);
assertTrue(cache.getCache(new Path("five")) != null);
assertTrue(cache.getCache(new Path("six")) != null);
assertTrue(metrics.numEvict == 2);
// make three the most recently used
assertTrue(cache.getCache(new Path("three")) != null);
assertTrue(metrics.numGetAttempts == 7);
assertTrue(metrics.numGetHits == 5);
// now we insert seven.
cache.addCache(new Path("seven"), new Path("seven"), 512);
assertTrue(cache.getCacheSize() == 4 * 1024);
assertTrue(cache.getCache(new Path("one")) == null);
assertTrue(cache.getCache(new Path("two")) == null);
assertTrue(cache.getCache(new Path("three")) != null);
assertTrue(cache.getCache(new Path("four")) != null);
assertTrue(cache.getCache(new Path("five")) != null);
assertTrue(cache.getCache(new Path("six")) != null);
assertTrue(cache.getCache(new Path("seven")) != null);
}
public void testCacheFileSystem() throws IOException {
// configure a cached filessytem, cache size is 10 KB.
mySetup(10*1024L);
try {
// create a 5K file using the LookasideCache. This write
// should be cached in the cache.
Path file = new Path("/hdfs/testRead");
long crc = createTestFile(lfs, file, 1, 5, 1024L);
FileStatus stat = lfs.getFileStatus(file);
LOG.info("Created " + file + ", crc=" + crc + ", len=" + stat.getLen());
assertTrue(lfs.lookasideCache.getCacheSize() == 5 * 1024);
// Test that readFully via the Lookasidecache fetches correct data
// from the cache.
FSDataInputStream stm = lfs.open(file);
byte[] filebytes = new byte[(int)stat.getLen()];
stm.readFully(0, filebytes);
assertEquals(crc, bufferCRC(filebytes));
stm.close();
// assert that there is one element of size 5K in the cache
assertEquals(5*1024, lfs.lookasideCache.getCacheSize());
// create a 6K file using the LookasideCache. This is an
// overwrite of the earlier file, so the cache should reflect
// the new size of the file.
crc = createTestFile(lfs, file, 1, 6, 1024L);
stat = lfs.getFileStatus(file);
LOG.info("Created " + file + ", crc=" + crc + ", len=" + stat.getLen());
// assert that there is one element of size 6K in the cache
assertEquals(6*1024, lfs.lookasideCache.getCacheSize());
// verify reading file2 from the cache
stm = lfs.open(file);
filebytes = new byte[(int)stat.getLen()];
stm.readFully(0, filebytes);
assertEquals(crc, bufferCRC(filebytes));
stm.close();
// add a 5 KB file to the cache. This should start eviction of
// the earlier file.
Path file2 = new Path("/hdfs/testRead2");
crc = createTestFile(lfs, file2, 1, 5, 1024L);
stat = lfs.getFileStatus(file2);
LOG.info("Created " + file2 + ", crc=" + crc + ", len=" + stat.getLen());
assertEquals(5*1024, lfs.lookasideCache.getCacheSize());
// move file2 to file3
Path file3 = new Path("/hdfs/testRead3");
assertTrue(lfs.rename(file2, file3));
// delete file3. This should clear out the cache.
lfs.delete(file3, false);
assertEquals(0, lfs.lookasideCache.getCacheSize());
} finally {
myTearDown();
}
}
private MiniDFSCluster dfs;
private FileSystem fileSys;
private LookasideCacheFileSystem lfs;
private String namenode;
private String hftp;
// setup a LookasideCachedFileSystem
private void mySetup(long cacheSize) throws IOException {
Configuration conf = new Configuration();
// create a HDFS cluster
dfs = new MiniDFSCluster(conf, NUM_DATANODES, true, null);
dfs.waitActive();
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
hftp = "hftp://localhost.localdomain:" + dfs.getNameNodePort();
FileSystem.setDefaultUri(conf, namenode);
// create a client-side layered filesystem.
// The cache size is 10 KB.
lfs = getCachedHdfs(fileSys, conf, cacheSize);
}
private void myTearDown() throws IOException {
if (dfs != null) { dfs.shutdown(); }
}
/**
* Returns a cached filesystem layered on top of the HDFS cluster
*/
private LookasideCacheFileSystem getCachedHdfs(FileSystem fileSys,
Configuration conf, long cacheSize) throws IOException {
DistributedFileSystem dfs = (DistributedFileSystem)fileSys;
Configuration clientConf = new Configuration(conf);
clientConf.setLong(LookasideCache.CACHESIZE, cacheSize);
clientConf.set("fs.lookasidecache.dir", TEST_DIR);
clientConf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.LookasideCacheFileSystem");
clientConf.set("fs.lookasidecache.underlyingfs.impl",
"org.apache.hadoop.hdfs.DistributedFileSystem");
URI dfsUri = dfs.getUri();
FileSystem.closeAll();
FileSystem lfs = FileSystem.get(dfsUri, clientConf);
assertTrue("lfs not an instance of LookasideCacheFileSystem",
lfs instanceof LookasideCacheFileSystem);
return (LookasideCacheFileSystem)lfs;
}
//
// creates a file and populate it with random data. Returns its crc.
//
private static long createTestFile(FileSystem fileSys, Path name, int repl,
int numBlocks, long blocksize)
throws IOException {
CRC32 crc = new CRC32();
Random rand = new Random();
FSDataOutputStream stm = fileSys.create(name, true,
fileSys.getConf().getInt("io.file.buffer.size", 4096),
(short)repl, blocksize);
// fill random data into file
final byte[] b = new byte[(int)blocksize];
for (int i = 0; i < numBlocks; i++) {
rand.nextBytes(b);
stm.write(b);
crc.update(b);
}
stm.close();
return crc.getValue();
}
/**
* returns the CRC32 of the buffer
*/
private long bufferCRC(byte[] buf) {
CRC32 crc = new CRC32();
crc.update(buf, 0, buf.length);
return crc.getValue();
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.device.mgt.jaxrs.service.api;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import io.swagger.annotations.ApiResponse;
import io.swagger.annotations.ApiResponses;
import io.swagger.annotations.Extension;
import io.swagger.annotations.ExtensionProperty;
import io.swagger.annotations.Info;
import io.swagger.annotations.ResponseHeader;
import io.swagger.annotations.SwaggerDefinition;
import io.swagger.annotations.Tag;
import org.wso2.carbon.apimgt.annotations.api.Scope;
import org.wso2.carbon.apimgt.annotations.api.Scopes;
import org.wso2.carbon.device.mgt.common.Device;
import org.wso2.carbon.device.mgt.common.operation.mgt.Operation;
import org.wso2.carbon.device.mgt.jaxrs.beans.ErrorResponse;
import org.wso2.carbon.device.mgt.jaxrs.beans.OperationList;
import org.wso2.carbon.device.mgt.jaxrs.util.Constants;
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
import java.util.Map;
@SwaggerDefinition(
info = @Info(
version = "1.0.0",
title = "",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = "name", value = "DeviceAgent Service"),
@ExtensionProperty(name = "context", value = "/api/device-mgt/v1.0/device/agent"),
})
}
),
tags = {
@Tag(name = "device_agent, device_management", description = "")
}
)
@Api(value = "Device Agent", description = "Device Agent Service")
@Path("/device/agent")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Scopes(
scopes = {
@Scope(
name = "Enroll Device",
description = "Register a device",
key = "perm:device:enroll",
permissions = {"/device-mgt/devices/owning-device/add"}
),
@Scope(
name = "Modify Device",
description = "Modify a device",
key = "perm:device:modify",
permissions = {"/device-mgt/devices/owning-device/modify"}
),
@Scope(
name = "Disenroll Device",
description = "Disenroll a device",
key = "perm:device:disenroll",
permissions = {"/device-mgt/devices/owning-device/remove"}
),
@Scope(
name = "Publish Event",
description = "publish device event",
key = "perm:device:publish-event",
permissions = {"/device-mgt/devices/owning-device/event"}
),
@Scope(
name = "Getting Device Operation Details",
description = "Getting Device Operation Details",
key = "perm:device:operations",
permissions = {"/device-mgt/devices/owning-device/view"}
)
}
)
public interface DeviceAgentService {
@POST
@Path("/enroll")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "POST",
value = "Creating a Device Instance",
notes = "Create a device instance using this API. Since this is not a physical device you are not able to try out device operations.",
tags = "Device Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:enroll")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully created a device instance.",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest version" +
" of the requested resource.\n"),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n A deviceType with the specified device type was not found.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving the device details.",
response = ErrorResponse.class)
})
Response enrollDevice(@ApiParam(name = "device", value = "Define the payload to create a device instance. You can double click on the Example Value and define your values in it to create the instance.", required = true)
@Valid Device device);
@DELETE
@Path("/enroll/{type}/{id}")
@ApiOperation(
httpMethod = "DELETE",
value = "Unregistering a Device",
notes = "Use this REST API to unregister a device.",
tags = "Device Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:disenroll")
})
}
)
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully disenrolled the device."),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while dis-enrolling the device.")
})
Response disEnrollDevice(
@ApiParam(name = "type", value = "Define the device type, such as android, ios, or windows.") @PathParam("type") String type,
@ApiParam(name = "id", value = "The unique device identifier.") @PathParam("id") String id);
@PUT
@Path("/enroll/{type}/{id}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "PUT",
value = "Updating the Device Details",
notes = "You are able to update the details of a device using the API.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:modify")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully updated device instance.",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest version" +
" of the requested resource.\n"),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n A deviceType with the specified device type was not found.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving the device details.",
response = ErrorResponse.class)
})
Response updateDevice(@ApiParam(name = "type", value = "The device type, such as ios, android, or windows.", required = true)
@PathParam("type") String type,
@ApiParam(name = "id", value = "The device ID.", required = true)
@PathParam("id") String deviceId,
@ApiParam(name = "device", value = "Update the device details in the device object.", required = true)
@Valid Device updateDevice);
@POST
@Path("events/publish/{type}/{deviceId}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "POST",
value = "Publishing Events",
notes = "Publish events received by the device client to the WSO2 Data Analytics Server (DAS) using this API.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:publish-event")
})
}
)
@ApiResponses(
value = {
@ApiResponse(code = 200, message = "OK. \n Successfully published the event",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")
}),
@ApiResponse(
code = 303,
message = "See Other. \n The source can be retrieved from the URL specified in the location header.",
responseHeaders = {
@ResponseHeader(
name = "Content-Location",
description = "The Source URL of the document.")}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error."),
@ApiResponse(
code = 415,
message = "Unsupported media type. \n The format of the requested entity was not supported."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while publishing events.")
})
Response publishEvents(
@ApiParam(
name = "payloadData",
value = "Information of the agent event to be published on DAS.")
@Valid
Map<String, Object> payloadData,
@ApiParam(
name = "type",
value = "The name of the device type, such as android, ios, or windows.")
@PathParam("type") String type,
@ApiParam(
name = "deviceId",
value = "The device ID.")
@PathParam("deviceId") String deviceId);
@POST
@Path("/events/publish/data/{type}/{deviceId}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "POST",
value = "Publishing Events data only",
notes = "Publish events received by the device client to the WSO2 Data Analytics Server (DAS) using this" +
" API.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:publish-event")
})
}
)
@ApiResponses(
value = {
@ApiResponse(code = 200, message = "OK. \n Successfully published the event",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")
}),
@ApiResponse(
code = 303,
message = "See Other. \n The source can be retrieved from the URL specified in the " +
"location header.",
responseHeaders = {
@ResponseHeader(
name = "Content-Location",
description = "The Source URL of the document.")}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error."),
@ApiResponse(
code = 415,
message = "Unsupported media type. \n The format of the requested entity was not " +
"supported."),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while publishing events.")
})
Response publishEvents(
@ApiParam(
name = "payloadData",
value = "Information of the agent event to be published on DAS.")
@Valid
List<Object> payloadData,
@ApiParam(
name = "type",
value = "name of the device type")
@PathParam("type") String type,
@ApiParam(
name = "deviceId",
value = "deviceId of the device")
@PathParam("deviceId") String deviceId);
@GET
@Path("/pending/operations/{type}/{id}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting All the Pending Operations of a Device",
notes = "You can get all the list of pending operations of a device.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:operations")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully retrieved the operations.",
response = OperationList.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource has been modified the last " +
"time.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest " +
"version of the requested resource."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n No device is found under the provided type and id.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving information requested device.",
response = ErrorResponse.class)
})
Response getPendingOperations(@ApiParam(name = "type", value = "The device type, such as ios, android, or windows.", required = true)
@PathParam("type") String type,
@ApiParam(name = "id", value = "The device ID.", required = true)
@PathParam("id") String deviceId);
@GET
@Path("/next-pending/operation/{type}/{id}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting the Next Pending Operations of a Device",
notes = "There can be many operations that are pending on the device end. Get the next pending operation " +
"of a specific device using this API.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:operations")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully retrieved the operation.",
response = Operation.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource has been modified the last time.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest " +
"version of the requested resource."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n No device is found under the provided type and id.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving information requested device.",
response = ErrorResponse.class)
})
Response getNextPendingOperation(@ApiParam(name = "type", value = "The device type, such as ios, android, or " +
"windows.", required = true)
@PathParam("type") String type,
@ApiParam(name = "id", value = "The device ID.", required = true)
@PathParam("id") String deviceId);
@PUT
@Path("/operations/{type}/{id}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "PUT",
value = "Updating an Operation",
notes = "Update the Operations.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:operations")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully updated the operations.",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource has been modified the last time.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest " +
"version of the requested resource."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n No device is found under the provided type and id.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving information requested device.",
response = ErrorResponse.class)
})
Response updateOperation(@ApiParam(name = "type", value = "The device type, such as ios, android, or windows.",
required = true)
@PathParam("type") String type,
@ApiParam(name = "id", value = "The device ID.", required = true)
@PathParam("id") String deviceId,
@ApiParam(name = "operation", value = "Update the operation object with data.", required = true)
@Valid Operation operation);
@PUT
@Path("/properties/{type}/{id}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "PUT",
value = "Updating Device Properties",
notes = "Update device properties.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:modify")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully updated the operations.",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource has been modified the last time.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest " +
"version of the requested resource."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n No device is found under the provided type and id.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving information requested device.",
response = ErrorResponse.class)
})
Response updateDeviceProperties(@ApiParam(name = "type", value = "The device type, such as ios, android, or " +
"windows.", required = true)
@PathParam("type") String type,
@ApiParam(name = "id", value = "The device ID.", required = true)
@PathParam("id") String deviceId,
@ApiParam(name = "properties", value = "The device properties list.", required = true)
@Valid List<Device.Property> properties);
@GET
@Path("/status/operations/{type}/{id}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
consumes = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting Specific Operations of a Device",
notes = "Device operations can be in the IN_PROGRESS, PENDING, COMPLETED, ERROR, or REPEATED status. " +
"You can use this API to get a list of all the operations that are in a specific status for a given device.",
tags = "Device Agent Management",
extensions = {
@Extension(properties = {
@ExtensionProperty(name = Constants.SCOPE, value = "perm:device:operations")
})
}
)
@ApiResponses(
value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully retrieved the operations.",
response = OperationList.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource has been modified the last time.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. Empty body because the client already has the latest " +
"version of the requested resource."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n No device is found under the provided type and id.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while retrieving information requested device.",
response = ErrorResponse.class)
})
Response getOperationsByDeviceAndStatus(@ApiParam(name = "type", value = "The device type, such as ios, android," +
" or windows.", required = true)
@PathParam("type") String type,
@ApiParam(name = "id", value = "The device ID.", required = true)
@PathParam("id") String deviceId,
@ApiParam(name = "status", value = "status of the operation.", required = true)
@QueryParam("status")Operation.Status status);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.netty;
import org.apache.flink.runtime.io.network.ConnectionID;
import org.apache.flink.runtime.io.network.NetworkClientHandler;
import org.apache.flink.runtime.io.network.TaskEventDispatcher;
import org.apache.flink.runtime.io.network.netty.NettyTestUtil.NettyServerAndClient;
import org.apache.flink.runtime.io.network.netty.exception.LocalTransportException;
import org.apache.flink.runtime.io.network.netty.exception.RemoteTransportException;
import org.apache.flink.runtime.io.network.partition.ResultPartitionID;
import org.apache.flink.runtime.io.network.partition.ResultPartitionProvider;
import org.apache.flink.runtime.io.network.partition.consumer.InputChannelID;
import org.apache.flink.runtime.io.network.partition.consumer.RemoteInputChannel;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.shaded.netty4.io.netty.buffer.Unpooled;
import org.apache.flink.shaded.netty4.io.netty.channel.Channel;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFuture;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandler;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandlerContext;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelInboundHandlerAdapter;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelOutboundHandlerAdapter;
import org.apache.flink.shaded.netty4.io.netty.channel.ChannelPromise;
import org.apache.flink.shaded.netty4.io.netty.channel.embedded.EmbeddedChannel;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.apache.flink.runtime.io.network.netty.NettyTestUtil.connect;
import static org.apache.flink.runtime.io.network.netty.NettyTestUtil.createConfig;
import static org.apache.flink.runtime.io.network.netty.NettyTestUtil.initServerAndClient;
import static org.apache.flink.runtime.io.network.netty.NettyTestUtil.shutdown;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ClientTransportErrorHandlingTest {
/**
* Verifies that failed client requests via {@link PartitionRequestClient} are correctly
* attributed to the respective {@link RemoteInputChannel}.
*/
@Test
public void testExceptionOnWrite() throws Exception {
NettyProtocol protocol = new NettyProtocol(
mock(ResultPartitionProvider.class),
mock(TaskEventDispatcher.class),
true) {
@Override
public ChannelHandler[] getServerChannelHandlers() {
return new ChannelHandler[0];
}
};
// We need a real server and client in this test, because Netty's EmbeddedChannel is
// not failing the ChannelPromise of failed writes.
NettyServerAndClient serverAndClient = initServerAndClient(protocol, createConfig());
Channel ch = connect(serverAndClient);
NetworkClientHandler handler = getClientHandler(ch);
// Last outbound handler throws Exception after 1st write
ch.pipeline().addFirst(new ChannelOutboundHandlerAdapter() {
int writeNum = 0;
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
if (writeNum >= 1) {
throw new RuntimeException("Expected test exception.");
}
writeNum++;
ctx.write(msg, promise);
}
});
PartitionRequestClient requestClient = new PartitionRequestClient(
ch, handler, mock(ConnectionID.class), mock(PartitionRequestClientFactory.class));
// Create input channels
RemoteInputChannel[] rich = new RemoteInputChannel[] {
createRemoteInputChannel(), createRemoteInputChannel()};
final CountDownLatch sync = new CountDownLatch(1);
// Do this with explicit synchronization. Otherwise this is not robust against slow timings
// of the callback (e.g. we cannot just verify that it was called once, because there is
// a chance that we do this too early).
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
sync.countDown();
return null;
}
}).when(rich[1]).onError(isA(LocalTransportException.class));
// First request is successful
ChannelFuture f = requestClient.requestSubpartition(new ResultPartitionID(), 0, rich[0], 0);
assertTrue(f.await().isSuccess());
// Second request is *not* successful
f = requestClient.requestSubpartition(new ResultPartitionID(), 0, rich[1], 0);
assertFalse(f.await().isSuccess());
// Only the second channel should be notified about the error
verify(rich[0], times(0)).onError(any(LocalTransportException.class));
// Wait for the notification
if (!sync.await(TestingUtils.TESTING_DURATION().toMillis(), TimeUnit.MILLISECONDS)) {
fail("Timed out after waiting for " + TestingUtils.TESTING_DURATION().toMillis() +
" ms to be notified about the channel error.");
}
shutdown(serverAndClient);
}
/**
* Verifies that {@link NettyMessage.ErrorResponse} messages are correctly wrapped in
* {@link RemoteTransportException} instances.
*/
@Test
public void testWrappingOfRemoteErrorMessage() throws Exception {
EmbeddedChannel ch = createEmbeddedChannel();
NetworkClientHandler handler = getClientHandler(ch);
// Create input channels
RemoteInputChannel[] rich = new RemoteInputChannel[] {
createRemoteInputChannel(), createRemoteInputChannel()};
for (RemoteInputChannel r : rich) {
when(r.getInputChannelId()).thenReturn(new InputChannelID());
handler.addInputChannel(r);
}
// Error msg for channel[0]
ch.pipeline().fireChannelRead(new NettyMessage.ErrorResponse(
new RuntimeException("Expected test exception"),
rich[0].getInputChannelId()));
try {
// Exception should not reach end of pipeline...
ch.checkException();
}
catch (Exception e) {
fail("The exception reached the end of the pipeline and "
+ "was not handled correctly by the last handler.");
}
verify(rich[0], times(1)).onError(isA(RemoteTransportException.class));
verify(rich[1], never()).onError(any(Throwable.class));
// Fatal error for all channels
ch.pipeline().fireChannelRead(new NettyMessage.ErrorResponse(
new RuntimeException("Expected test exception")));
try {
// Exception should not reach end of pipeline...
ch.checkException();
}
catch (Exception e) {
fail("The exception reached the end of the pipeline and "
+ "was not handled correctly by the last handler.");
}
verify(rich[0], times(2)).onError(isA(RemoteTransportException.class));
verify(rich[1], times(1)).onError(isA(RemoteTransportException.class));
}
/**
* Verifies that unexpected remote closes are reported as an instance of
* {@link RemoteTransportException}.
*/
@Test
public void testExceptionOnRemoteClose() throws Exception {
NettyProtocol protocol = new NettyProtocol(
mock(ResultPartitionProvider.class),
mock(TaskEventDispatcher.class),
true) {
@Override
public ChannelHandler[] getServerChannelHandlers() {
return new ChannelHandler[] {
// Close on read
new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
throws Exception {
ctx.channel().close();
}
}
};
}
};
NettyServerAndClient serverAndClient = initServerAndClient(protocol, createConfig());
Channel ch = connect(serverAndClient);
NetworkClientHandler handler = getClientHandler(ch);
// Create input channels
RemoteInputChannel[] rich = new RemoteInputChannel[] {
createRemoteInputChannel(), createRemoteInputChannel()};
final CountDownLatch sync = new CountDownLatch(rich.length);
Answer<Void> countDownLatch = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
sync.countDown();
return null;
}
};
for (RemoteInputChannel r : rich) {
doAnswer(countDownLatch).when(r).onError(any(Throwable.class));
handler.addInputChannel(r);
}
// Write something to trigger close by server
ch.writeAndFlush(Unpooled.buffer().writerIndex(16));
// Wait for the notification
if (!sync.await(TestingUtils.TESTING_DURATION().toMillis(), TimeUnit.MILLISECONDS)) {
fail("Timed out after waiting for " + TestingUtils.TESTING_DURATION().toMillis() +
" ms to be notified about remote connection close.");
}
// All the registered channels should be notified.
for (RemoteInputChannel r : rich) {
verify(r).onError(isA(RemoteTransportException.class));
}
shutdown(serverAndClient);
}
/**
* Verifies that fired Exceptions are handled correctly by the pipeline.
*/
@Test
public void testExceptionCaught() throws Exception {
EmbeddedChannel ch = createEmbeddedChannel();
NetworkClientHandler handler = getClientHandler(ch);
// Create input channels
RemoteInputChannel[] rich = new RemoteInputChannel[] {
createRemoteInputChannel(), createRemoteInputChannel()};
for (RemoteInputChannel r : rich) {
when(r.getInputChannelId()).thenReturn(new InputChannelID());
handler.addInputChannel(r);
}
ch.pipeline().fireExceptionCaught(new Exception());
try {
// Exception should not reach end of pipeline...
ch.checkException();
}
catch (Exception e) {
fail("The exception reached the end of the pipeline and "
+ "was not handled correctly by the last handler.");
}
// ...but all the registered channels should be notified.
for (RemoteInputChannel r : rich) {
verify(r).onError(isA(LocalTransportException.class));
}
}
/**
* Verifies that "Connection reset by peer" Exceptions are special-cased and are reported as
* an instance of {@link RemoteTransportException}.
*/
@Test
public void testConnectionResetByPeer() throws Throwable {
EmbeddedChannel ch = createEmbeddedChannel();
NetworkClientHandler handler = getClientHandler(ch);
RemoteInputChannel rich = addInputChannel(handler);
final Throwable[] error = new Throwable[1];
// Verify the Exception
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Throwable cause = (Throwable) invocation.getArguments()[0];
try {
assertEquals(RemoteTransportException.class, cause.getClass());
assertNotEquals("Connection reset by peer", cause.getMessage());
assertEquals(IOException.class, cause.getCause().getClass());
assertEquals("Connection reset by peer", cause.getCause().getMessage());
}
catch (Throwable t) {
error[0] = t;
}
return null;
}
}).when(rich).onError(any(Throwable.class));
ch.pipeline().fireExceptionCaught(new IOException("Connection reset by peer"));
assertNull(error[0]);
}
/**
* Verifies that the channel is closed if there is an error *during* error notification.
*/
@Test
public void testChannelClosedOnExceptionDuringErrorNotification() throws Exception {
EmbeddedChannel ch = createEmbeddedChannel();
NetworkClientHandler handler = getClientHandler(ch);
RemoteInputChannel rich = addInputChannel(handler);
doThrow(new RuntimeException("Expected test exception"))
.when(rich).onError(any(Throwable.class));
ch.pipeline().fireExceptionCaught(new Exception());
assertFalse(ch.isActive());
}
// ---------------------------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------------------------
private EmbeddedChannel createEmbeddedChannel() {
NettyProtocol protocol = new NettyProtocol(
mock(ResultPartitionProvider.class),
mock(TaskEventDispatcher.class),
true);
return new EmbeddedChannel(protocol.getClientChannelHandlers());
}
private RemoteInputChannel addInputChannel(NetworkClientHandler clientHandler)
throws IOException {
RemoteInputChannel rich = createRemoteInputChannel();
clientHandler.addInputChannel(rich);
return rich;
}
private NetworkClientHandler getClientHandler(Channel ch) {
return ch.pipeline().get(NetworkClientHandler.class);
}
private RemoteInputChannel createRemoteInputChannel() {
return when(mock(RemoteInputChannel.class)
.getInputChannelId())
.thenReturn(new InputChannelID()).getMock();
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.presentation.registration;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.olat.data.basesecurity.BaseSecurity;
import org.olat.data.basesecurity.Identity;
import org.olat.data.registration.TemporaryKey;
import org.olat.data.registration.TemporaryKeyImpl;
import org.olat.lms.commons.i18n.I18nManager;
import org.olat.lms.commons.mail.MailTemplateHelper;
import org.olat.lms.commons.mediaresource.RedirectMediaResource;
import org.olat.lms.registration.RegisterUserParameter;
import org.olat.lms.registration.RegistrationModule;
import org.olat.lms.registration.RegistrationService;
import org.olat.lms.registration.RegistrationServiceEBL;
import org.olat.lms.registration.UserPropertyParameter;
import org.olat.lms.user.UserService;
import org.olat.lms.user.propertyhandler.UserPropertyHandler;
import org.olat.presentation.framework.core.UserRequest;
import org.olat.presentation.framework.core.chiefcontrollers.LanguageChangedEvent;
import org.olat.presentation.framework.core.components.Component;
import org.olat.presentation.framework.core.components.form.flexible.FormItem;
import org.olat.presentation.framework.core.components.panel.Panel;
import org.olat.presentation.framework.core.components.velocity.VelocityContainer;
import org.olat.presentation.framework.core.control.Controller;
import org.olat.presentation.framework.core.control.WindowControl;
import org.olat.presentation.framework.core.control.controller.BasicController;
import org.olat.presentation.framework.core.control.generic.wizard.WizardInfoController;
import org.olat.presentation.framework.core.translator.PackageUtil;
import org.olat.presentation.framework.dispatcher.LocaleNegotiator;
import org.olat.presentation.framework.layout.fullWebApp.LayoutMain3ColsController;
import org.olat.system.commons.Settings;
import org.olat.system.commons.WebappHelper;
import org.olat.system.event.Event;
import org.olat.system.exception.OLATRuntimeException;
import org.olat.system.mail.Emailer;
import org.olat.system.spring.CoreSpringFactory;
/**
* Description:<br>
* Controls the registration workflow.
* <P>
*
* @author Sabina Jeger
*/
public class RegistrationController extends BasicController {
private static final String SEPARATOR = "____________________________________________________________________\n";
private VelocityContainer myContent;
private final Panel regarea;
private final WizardInfoController wic;
private DisclaimerController dclController;
private EmailSendingForm ef;
private RegistrationForm2 rf2;
private LanguageChooserController lc;
private final String regKey;
private TemporaryKeyImpl tempKey;
/**
* Controller implementing registration work flow.
*
* @param ureq
* @param wControl
*/
public RegistrationController(final UserRequest ureq, final WindowControl wControl) {
super(ureq, wControl);
if (!RegistrationModule.isSelfRegistrationEnabled()) {
throw new OLATRuntimeException(RegistrationController.class, "Registration controller launched but self registration is turned off in the config file", null);
}
// override language when not the same as in ureq and add fallback to
// property handler translator for user properties
final String lang = ureq.getParameter("lang");
if (lang != null && !lang.equals(I18nManager.getInstance().getLocaleKey(getLocale()))) {
final Locale loc = I18nManager.getInstance().getLocaleOrDefault(lang);
ureq.getUserSession().setLocale(loc);
setLocale(loc, true);
setTranslator(getUserService().getUserPropertiesConfig().getTranslator(PackageUtil.createPackageTranslator(this.getClass(), loc)));
} else {
// set fallback only
setTranslator(getUserService().getUserPropertiesConfig().getTranslator(getTranslator()));
}
// construct content
myContent = createVelocityContainer("reg");
wic = new WizardInfoController(ureq, 5);
listenTo(wic);
myContent.put("regwizard", wic.getInitialComponent());
regarea = new Panel("regarea");
myContent.put("regarea", regarea);
regKey = ureq.getHttpReq().getParameter("key");
if (regKey == null || regKey.equals("")) {
// no temporary key is given, we assume step 1. If this is the case, we
// render in a modal dialog, no need to add the 3cols layout controller
// wrapper
createLanguageForm(ureq, wControl);
putInitialPanel(myContent);
} else {
// we check if given key is a valid temporary key
tempKey = getRegistrationService().loadTemporaryKeyByRegistrationKey(regKey);
// if key is not valid we redirect to first page
if (tempKey == null) {
// error, there should be an entry
showError("regkey.missingentry");
createLanguageForm(ureq, wControl);
} else {
wic.setCurStep(3);
myContent.contextPut("pwdhelp", translate("pwdhelp"));
myContent.contextPut("loginhelp", translate("loginhelp"));
myContent.contextPut("text", translate("step3.reg.text"));
myContent.contextPut("email", tempKey.getEmailAddress());
final Map<String, String> userAttrs = new HashMap<String, String>();
userAttrs.put("email", tempKey.getEmailAddress());
if (RegistrationModule.getUsernamePresetBean() != null) {
final UserNameCreationInterceptor interceptor = RegistrationModule.getUsernamePresetBean();
final String proposedUsername = interceptor.getUsernameFor(userAttrs);
if (proposedUsername == null) {
if (interceptor.allowChangeOfUsername()) {
createRegForm2(ureq, null, false, false);
} else {
myContent = setErrorPage("reg.error.no_username", wControl);
}
} else {
final Identity identity = getBaseSecurity().findIdentityByName(proposedUsername);
if (identity != null) {
if (interceptor.allowChangeOfUsername()) {
createRegForm2(ureq, proposedUsername, true, false);
} else {
myContent = setErrorPage("reg.error.user_in_use", wControl);
}
} else if (interceptor.allowChangeOfUsername()) {
createRegForm2(ureq, proposedUsername, false, false);
} else {
createRegForm2(ureq, proposedUsername, false, true);
}
}
} else {
createRegForm2(ureq, null, false, false);
}
}
// load view in layout
final LayoutMain3ColsController layoutCtr = new LayoutMain3ColsController(ureq, getWindowControl(), null, null, myContent, null);
listenTo(layoutCtr);
putInitialPanel(layoutCtr.getInitialComponent());
}
}
private BaseSecurity getBaseSecurity() {
return CoreSpringFactory.getBean(BaseSecurity.class);
}
private VelocityContainer setErrorPage(final String errorKey, final WindowControl wControl) {
final String error = getTranslator().translate(errorKey);
wControl.setError(error);
final VelocityContainer errorContainer = createVelocityContainer("error");
errorContainer.contextPut("errorMsg", error);
return errorContainer;
}
private void createRegForm2(final UserRequest ureq, final String proposedUsername, final boolean userInUse, final boolean usernameReadonly) {
rf2 = new RegistrationForm2(ureq, getWindowControl(), I18nManager.getInstance().getLocaleKey(getLocale()), proposedUsername, userInUse, usernameReadonly);
listenTo(rf2);
regarea.setContent(rf2.getInitialComponent());
}
private void createLanguageForm(final UserRequest ureq, final WindowControl wControl) {
removeAsListenerAndDispose(lc);
lc = new LanguageChooserController(ureq, wControl, true);
listenTo(lc);
myContent.contextPut("text", translate("select.language.description"));
regarea.setContent(lc.getInitialComponent());
}
/**
* just needed for creating EmailForm
*/
private void createEmailForm(final UserRequest ureq) {
removeAsListenerAndDispose(ef);
ef = new EmailSendingForm(ureq, getWindowControl());
listenTo(ef);
myContent.contextPut("text", translate("step1.reg.text"));
regarea.setContent(ef.getInitialComponent());
}
/**
*/
@Override
protected void event(final UserRequest ureq, final Component source, final Event event) {
//
}
/**
*/
@Override
protected void event(final UserRequest ureq, final Controller source, final Event event) {
if (source == ef) {
if (event == Event.DONE_EVENT) { // form
// validation
// was ok
wic.setCurStep(2);
// Email requested for tempkey
// save the fields somewhere
final String email = ef.getEmailAddress();
myContent.contextPut("email", email);
myContent.contextPut("text", translate("step2.reg.text", email));
// ef.setVisible(false);
regarea.setVisible(false);
// look for user in "Person" and "user" tables
boolean isMailSent = sendRegistrationEmail(ureq, email);
if (isMailSent) {
showInfo("email.sent");
} else {
showError("email.notsent");
}
} else if (event == Event.CANCELLED_EVENT) {
fireEvent(ureq, Event.CANCELLED_EVENT);
}
} else if (source == lc) {
if (event == Event.DONE_EVENT) {
wic.setCurStep(2);
createEmailForm(ureq);
ureq.getUserSession().removeEntry(LocaleNegotiator.NEGOTIATED_LOCALE);
} else if (event == Event.CANCELLED_EVENT) {
ureq.getDispatchResult().setResultingMediaResource(new RedirectMediaResource(Settings.getServerContextPathURI()));
} else if (event instanceof LanguageChangedEvent) {
final LanguageChangedEvent lcev = (LanguageChangedEvent) event;
setLocale(lcev.getNewLocale(), true);
myContent.contextPut("text", translate("select.language.description"));
}
} else if (source == rf2) {
// Userdata entered
if (event == Event.DONE_EVENT) {
final String lang = rf2.getLangKey();
// change language if different then current language
if (!lang.equals(I18nManager.getInstance().getLocaleKey(ureq.getLocale()))) {
final Locale loc = I18nManager.getInstance().getLocaleOrDefault(lang);
ureq.getUserSession().setLocale(loc);
getTranslator().setLocale(loc);
}
wic.setCurStep(4);
myContent.contextPut("pwdhelp", "");
myContent.contextPut("loginhelp", "");
myContent.contextPut("text", translate("step4.reg.text"));
removeAsListenerAndDispose(dclController);
dclController = new DisclaimerController(ureq, getWindowControl());
listenTo(dclController);
regarea.setContent(dclController.getInitialComponent());
} else if (event == Event.CANCELLED_EVENT) {
ureq.getDispatchResult().setResultingMediaResource(new RedirectMediaResource(Settings.getServerContextPathURI()));
}
} else if (source == dclController) {
if (event == Event.DONE_EVENT) {
wic.setCurStep(5);
myContent.contextRemove("text");
myContent.contextPut("pwdhelp", "");
myContent.contextPut("loginhelp", "");
myContent.contextPut("disclaimer", "");
// myContent.contextPut("yourdata", translate("step5.reg.yourdata"));
rf2.freeze();
regarea.setContent(rf2.getInitialComponent());
final String login = rf2.getLogin();
final String pwd = rf2.getPassword();
String firstName = rf2.getFirstName();
String lastName = rf2.getLastName();
Locale locale = I18nManager.getInstance().getLocaleOrDefault(rf2.getLangKey());
// create user with mandatory fields from registrationform
Identity persistedIdentity = getRegistrationServiceEBL().registerUser(
new RegisterUserParameter(login, pwd, firstName, lastName, locale, tempKey, getUserPropertyParameters()));
if (persistedIdentity == null) {
showError("user.notregistered");
} else {
// show last screen
myContent.contextPut("text", getTranslator().translate("step5.reg.text", new String[] { WebappHelper.getServletContextPath(), login }));
}
} else if (event == Event.CANCELLED_EVENT) {
ureq.getDispatchResult().setResultingMediaResource(new RedirectMediaResource(Settings.getServerContextPathURI()));
}
}
}
/**
* @param ureq
* @param email
* @return
*/
private boolean sendRegistrationEmail(final UserRequest ureq, final String email) {
final boolean foundUser = getUserService().userExist(email);
// get remote address
final String ip = ureq.getHttpReq().getRemoteAddr();
String body = null;
final String today = DateFormat.getDateInstance(DateFormat.LONG, ureq.getLocale()).format(new Date());
final Emailer mailer = new Emailer(MailTemplateHelper.getMailTemplateWithFooterNoUserData(ureq.getLocale()));
// TODO eMail Vorlagen
final String serverpath = Settings.getServerContextPathURI();
boolean isMailSent = false;
if (!foundUser) {
TemporaryKey tk = getRegistrationServiceEBL().getOrCreateTemporaryKey(ip, email);
myContent.contextPut("regKey", tk.getRegistrationKey());
body = getTranslator().translate("reg.body", new String[] { serverpath, tk.getRegistrationKey(), I18nManager.getInstance().getLocaleKey(ureq.getLocale()) })
+ SEPARATOR + getTranslator().translate("reg.wherefrom", new String[] { serverpath, today, ip });
try {
if (mailer.sendEmail(email, translate("reg.subject"), body)) {
isMailSent = true;
}
} catch (final Exception e) {
// nothing to do, emailSent flag is false, errors will be reported to user
}
} else {
// a user exists, this is an error in the registration page
// send email
final Identity identity = getUserService().findIdentityByEmail(email);
body = translate("login.body", identity.getName()) + SEPARATOR + getTranslator().translate("reg.wherefrom", new String[] { serverpath, today, ip });
try {
isMailSent = mailer.sendEmail(email, translate("login.subject"), body);
} catch (final Exception e) {
// nothing to do, emailSent flag is false, errors will be reported to user
}
}
return isMailSent;
}
private RegistrationServiceEBL getRegistrationServiceEBL() {
return CoreSpringFactory.getBean(RegistrationServiceEBL.class);
}
private List<UserPropertyParameter> getUserPropertyParameters() {
List<UserPropertyParameter> parameters = new ArrayList<UserPropertyParameter>();
final List<UserPropertyHandler> userPropertyHandlers = getUserService().getUserPropertyHandlersFor(RegistrationForm2.USERPROPERTIES_FORM_IDENTIFIER, false);
for (final UserPropertyHandler userPropertyHandler : userPropertyHandlers) {
final FormItem fi = rf2.getPropFormItem(userPropertyHandler.getName());
parameters.add(new UserPropertyParameter(userPropertyHandler, fi));
}
return parameters;
}
@Override
protected void doDispose() {
//
}
private UserService getUserService() {
return CoreSpringFactory.getBean(UserService.class);
}
private RegistrationService getRegistrationService() {
return CoreSpringFactory.getBean(RegistrationService.class);
}
}
| |
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.integtests.fixtures.executer;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.gradle.api.Action;
import org.gradle.integtests.fixtures.logging.GroupedOutputFixture;
import org.gradle.internal.Pair;
import org.gradle.internal.featurelifecycle.LoggingDeprecatedFeatureHandler;
import org.gradle.launcher.daemon.client.DaemonStartupMessage;
import org.gradle.launcher.daemon.server.DaemonStateCoordinator;
import org.gradle.launcher.daemon.server.health.LowHeapSpaceDaemonExpirationStrategy;
import org.gradle.util.GUtil;
import org.junit.ComparisonFailure;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
public class OutputScrapingExecutionResult implements ExecutionResult {
static final Pattern STACK_TRACE_ELEMENT = Pattern.compile("\\s+(at\\s+)?([\\w.$_]+/)?[\\w.$_]+\\.[\\w$_ =\\+\'-<>]+\\(.+?\\)(\\x1B\\[0K)?");
private static final String TASK_PREFIX = "> Task ";
//for example: ':a SKIPPED' or ':foo:bar:baz UP-TO-DATE' but not ':a'
private static final Pattern SKIPPED_TASK_PATTERN = Pattern.compile("(> Task )?(:\\S+?(:\\S+?)*)\\s+((SKIPPED)|(UP-TO-DATE)|(NO-SOURCE)|(FROM-CACHE))");
//for example: ':hey' or ':a SKIPPED' or ':foo:bar:baz UP-TO-DATE' but not ':a FOO'
private static final Pattern TASK_PATTERN = Pattern.compile("(> Task )?(:\\S+?(:\\S+?)*)((\\s+SKIPPED)|(\\s+UP-TO-DATE)|(\\s+FROM-CACHE)|(\\s+NO-SOURCE)|(\\s+FAILED)|(\\s*))");
private static final Pattern BUILD_RESULT_PATTERN = Pattern.compile("BUILD (SUCCESSFUL|FAILED) in( \\d+m?[smh])+");
private final LogContent output;
private final LogContent error;
private boolean includeBuildSrc;
private final LogContent mainContent;
private final LogContent postBuild;
private final LogContent errorContent;
private GroupedOutputFixture groupedOutputFixture;
private Set<String> tasks;
public static List<String> flattenTaskPaths(Object[] taskPaths) {
return org.gradle.util.CollectionUtils.toStringList(GUtil.flatten(taskPaths, Lists.newArrayList()));
}
/**
* Creates a result from the output of a <em>single</em> Gradle invocation.
*
* @param output The raw build stdout chars.
* @param error The raw build stderr chars.
* @return A {@link OutputScrapingExecutionResult} for a successful build, or a {@link OutputScrapingExecutionFailure} for a failed build.
*/
public static OutputScrapingExecutionResult from(String output, String error) {
// Should provide a Gradle version as parameter so this check can be more precise
if (output.contains("BUILD FAILED") || output.contains("FAILURE: Build failed with an exception.") || error.contains("BUILD FAILED")) {
return new OutputScrapingExecutionFailure(output, error, true);
}
return new OutputScrapingExecutionResult(LogContent.of(output), LogContent.of(error), true);
}
/**
* @param output The build stdout content.
* @param error The build stderr content. Must have normalized line endings.
*/
protected OutputScrapingExecutionResult(LogContent output, LogContent error, boolean includeBuildSrc) {
this.output = output;
this.error = error;
this.includeBuildSrc = includeBuildSrc;
// Split out up the output into main content and post build content
LogContent filteredOutput = this.output.ansiCharsToPlainText().removeDebugPrefix();
Pair<LogContent, LogContent> match = filteredOutput.splitOnFirstMatchingLine(BUILD_RESULT_PATTERN);
if (match == null) {
this.mainContent = filteredOutput;
this.postBuild = LogContent.empty();
} else {
this.mainContent = match.getLeft();
this.postBuild = match.getRight().drop(1);
}
this.errorContent = error.ansiCharsToPlainText();
}
@Override
public ExecutionResult getIgnoreBuildSrc() {
return new OutputScrapingExecutionResult(output, error, false);
}
@Override
public String getOutput() {
return output.withNormalizedEol();
}
/**
* The main content with debug prefix and ANSI characters removed.
*/
public LogContent getMainContent() {
return mainContent;
}
@Override
public String getNormalizedOutput() {
return normalize(output);
}
@Override
public String getFormattedOutput() {
return output.ansiCharsToColorText().withNormalizedEol();
}
@Override
public String getPlainTextOutput() {
return output.ansiCharsToPlainText().withNormalizedEol();
}
@Override
public GroupedOutputFixture getGroupedOutput() {
if (groupedOutputFixture == null) {
groupedOutputFixture = new GroupedOutputFixture(getMainContent());
}
return groupedOutputFixture;
}
private String normalize(LogContent output) {
List<String> result = new ArrayList<String>();
List<String> lines = output.getLines();
int i = 0;
while (i < lines.size()) {
String line = lines.get(i);
if (line.contains(DaemonStartupMessage.STARTING_DAEMON_MESSAGE)) {
// Remove the "daemon starting" message
i++;
} else if (line.contains(DaemonStateCoordinator.DAEMON_WILL_STOP_MESSAGE)) {
// Remove the "Daemon will be shut down" message
i++;
} else if (line.contains(LowHeapSpaceDaemonExpirationStrategy.EXPIRE_DAEMON_MESSAGE)) {
// Remove the "Expiring Daemon" message
i++;
} else if (line.contains(LoggingDeprecatedFeatureHandler.WARNING_SUMMARY)) {
// Remove the deprecations message: "Deprecated Gradle features...", "Use '--warning-mode all'...", "See https://docs.gradle.org...", and additional newline
i+=4;
} else if (BUILD_RESULT_PATTERN.matcher(line).matches()) {
result.add(BUILD_RESULT_PATTERN.matcher(line).replaceFirst("BUILD $1 in 0s"));
i++;
} else {
result.add(line);
i++;
}
}
return LogContent.of(result).withNormalizedEol();
}
@Override
public ExecutionResult assertOutputEquals(String expectedOutput, boolean ignoreExtraLines, boolean ignoreLineOrder) {
SequentialOutputMatcher matcher = ignoreLineOrder ? new AnyOrderOutputMatcher() : new SequentialOutputMatcher();
matcher.assertOutputMatches(expectedOutput, getNormalizedOutput(), ignoreExtraLines);
return this;
}
@Override
public ExecutionResult assertHasPostBuildOutput(String expectedOutput) {
return assertContentContains(postBuild.withNormalizedEol(), expectedOutput, "Post-build output");
}
@Override
public ExecutionResult assertNotOutput(String expectedOutput) {
String expectedText = LogContent.of(expectedOutput).withNormalizedEol();
if (getOutput().contains(expectedText)|| getError().contains(expectedText)) {
failureOnUnexpectedOutput(String.format("Found unexpected text in build output.%nExpected not present: %s%n", expectedText));
}
return this;
}
@Override
public ExecutionResult assertContentContains(String actualText, String expectedOutput, String label) {
String expectedText = LogContent.of(expectedOutput).withNormalizedEol();
if (!actualText.contains(expectedText)) {
failOnMissingOutput("Did not find expected text in " + label.toLowerCase() + ".", label, expectedOutput, actualText);
}
return this;
}
@Override
public ExecutionResult assertOutputContains(String expectedOutput) {
return assertContentContains(getMainContent().withNormalizedEol(), expectedOutput, "Build output");
}
@Override
public boolean hasErrorOutput(String expectedOutput) {
return getError().contains(expectedOutput);
}
@Override
public ExecutionResult assertHasErrorOutput(String expectedOutput) {
return assertContentContains(errorContent.withNormalizedEol(), expectedOutput, "Error output");
}
@Override
public String getError() {
return error.withNormalizedEol();
}
public List<String> getExecutedTasks() {
return ImmutableList.copyOf(findExecutedTasksInOrderStarted());
}
private Set<String> findExecutedTasksInOrderStarted() {
if (tasks == null) {
tasks = new LinkedHashSet<String>(grepTasks(TASK_PATTERN));
}
return tasks;
}
@Override
public ExecutionResult assertTasksExecutedInOrder(Object... taskPaths) {
Set<String> allTasks = TaskOrderSpecs.exact(taskPaths).getTasks();
assertTasksExecuted(allTasks);
assertTaskOrder(taskPaths);
return this;
}
@Override
public ExecutionResult assertTasksExecuted(Object... taskPaths) {
Set<String> expectedTasks = new TreeSet<String>(flattenTaskPaths(taskPaths));
Set<String> actualTasks = findExecutedTasksInOrderStarted();
if (!expectedTasks.equals(actualTasks)) {
failOnDifferentSets("Build output does not contain the expected tasks.", expectedTasks, actualTasks);
}
return this;
}
@Override
public ExecutionResult assertTasksExecutedAndNotSkipped(Object... taskPaths) {
assertTasksExecuted(taskPaths);
return assertTasksNotSkipped(taskPaths);
}
@Override
public ExecutionResult assertTaskExecuted(String taskPath) {
Set<String> actualTasks = findExecutedTasksInOrderStarted();
if (!actualTasks.contains(taskPath)) {
failOnMissingElement("Build output does not contain the expected task.", taskPath, actualTasks);
}
return this;
}
@Override
public ExecutionResult assertTaskNotExecuted(String taskPath) {
Set<String> actualTasks = findExecutedTasksInOrderStarted();
if (actualTasks.contains(taskPath)) {
failOnMissingElement("Build output does contains unexpected task.", taskPath, actualTasks);
}
return this;
}
@Override
public ExecutionResult assertTaskOrder(Object... taskPaths) {
TaskOrderSpecs.exact(taskPaths).assertMatches(-1, getExecutedTasks());
return this;
}
public Set<String> getSkippedTasks() {
return new TreeSet<String>(grepTasks(SKIPPED_TASK_PATTERN));
}
@Override
public ExecutionResult assertTasksSkipped(Object... taskPaths) {
Set<String> expectedTasks = new TreeSet<String>(flattenTaskPaths(taskPaths));
Set<String> skippedTasks = getSkippedTasks();
if (!expectedTasks.equals(skippedTasks)) {
failOnDifferentSets("Build output does not contain the expected skipped tasks.", expectedTasks, skippedTasks);
}
return this;
}
@Override
public ExecutionResult assertTaskSkipped(String taskPath) {
Set<String> tasks = new TreeSet<String>(getSkippedTasks());
if (!tasks.contains(taskPath)) {
failOnMissingElement("Build output does not contain the expected skipped task.", taskPath, tasks);
}
return this;
}
private Collection<String> getNotSkippedTasks() {
Set<String> all = new TreeSet<String>(getExecutedTasks());
Set<String> skipped = getSkippedTasks();
all.removeAll(skipped);
return all;
}
@Override
public ExecutionResult assertTasksNotSkipped(Object... taskPaths) {
Set<String> expectedTasks = new TreeSet<String>(flattenTaskPaths(taskPaths));
Set<String> tasks = new TreeSet<String>(getNotSkippedTasks());
if (!expectedTasks.equals(tasks)) {
failOnDifferentSets("Build output does not contain the expected non skipped tasks.", expectedTasks, tasks);
}
return this;
}
@Override
public ExecutionResult assertTaskNotSkipped(String taskPath) {
Set<String> tasks = new TreeSet<String>(getNotSkippedTasks());
if (!tasks.contains(taskPath)) {
failOnMissingElement("Build output does not contain the expected non skipped task.", taskPath, tasks);
}
return this;
}
private void failOnDifferentSets(String message, Set<String> expected, Set<String> actual) {
failureOnUnexpectedOutput(String.format("%s%nExpected: %s%nActual: %s", message, expected, actual));
}
private void failOnMissingElement(String message, String expected, Set<String> actual) {
failureOnUnexpectedOutput(String.format("%s%nExpected: %s%nActual: %s", message, expected, actual));
}
private void failOnMissingOutput(String message, String type, String expected, String actual) {
throw new ComparisonFailure(unexpectedOutputMessage(String.format("%s%nExpected: %s%n%n%s:%n=======%n%s", message, expected, type, actual)), expected, actual);
}
protected void failureOnUnexpectedOutput(String message) {
throw new AssertionError(unexpectedOutputMessage(message));
}
private String unexpectedOutputMessage(String message) {
return String.format("%s%nOutput:%n=======%n%s%nError:%n======%n%s", message, getOutput(), getError());
}
private List<String> grepTasks(final Pattern pattern) {
final List<String> tasks = Lists.newArrayList();
final List<String> taskStatusLines = Lists.newArrayList();
getMainContent().eachLine(new Action<String>() {
@Override
public void execute(String line) {
java.util.regex.Matcher matcher = pattern.matcher(line);
if (matcher.matches()) {
String taskStatusLine = matcher.group().replace(TASK_PREFIX, "");
String taskName = matcher.group(2);
if (!includeBuildSrc && taskName.startsWith(":buildSrc:")) {
return;
}
// The task status line may appear twice - once for the execution, once for the UP-TO-DATE/SKIPPED/etc
// So don't add to the task list if this is an update to a previously added task.
// Find the status line for the previous record of this task
String previousTaskStatusLine = tasks.contains(taskName) ? taskStatusLines.get(tasks.lastIndexOf(taskName)) : "";
// Don't add if our last record has a `:taskName` status, and this one is `:taskName SOMETHING`
if (previousTaskStatusLine.equals(taskName) && !taskStatusLine.equals(taskName)) {
return;
}
taskStatusLines.add(taskStatusLine);
tasks.add(taskName);
}
}
});
return tasks;
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.cvsSupport2.cvsoperations.cvsMessages;
import com.intellij.cvsSupport2.CvsUtil;
import com.intellij.cvsSupport2.history.CvsRevisionNumber;
import com.intellij.cvsSupport2.cvsoperations.common.UpdatedFilesManager;
import com.intellij.cvsSupport2.util.CvsVfsUtil;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.CvsBundle;
import org.netbeans.lib.cvsclient.admin.Entry;
import org.netbeans.lib.cvsclient.command.update.UpdateFileInfo;
import org.netbeans.lib.cvsclient.command.update.UpdatedFileInfo;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import java.io.File;
public class FileMessage {
public static final int UNKNOWN = -1;
public static final int SCHEDULING_FOR_ADDING = 0;
public static final int SCHEDULING_FOR_REMOVING = 1;
public static final int UPDATING = 2;
public static final int IMPORTING = 3;
public static final int ADDING = 4;
public static final int REMOVING = 5;
public static final int SAVING = 6;
public static final int SENDING = 7;
public static final int MODIFIED = 8;
public static final int MERGED_WITH_CONFLICTS = 9;
public static final int NOT_IN_REPOSITORY = 10;
public static final int LOCALLY_ADDED = 11;
public static final int LOCALLY_REMOVED = 12;
public static final int REMOVED_FROM_REPOSITORY = 13;
public static final int PATCHED = 14;
public static final int UPDATING2 = 15;
public static final int MERGED = 16;
public static final int REMOVED_FROM_SERVER_CONFLICT = 17;
public static final int LOCALLY_REMOVED_CONFLICT = 18;
public static final int CREATED = 19;
public static final int CREATED_BY_SECOND_PARTY = 20;
private final int myType;
private String myFileAbsolutePath = "";
private CvsRevisionNumber myRevision;
@NonNls public static final String CONFLICT = "C";
@NonNls private static final String U_COMMIT_OPERATION_TYPE = "U";
@NonNls private static final String P_COMMIT_OPERATION_TYPE = "P";
@NonNls private static final String A_COMMIT_OPERATION_TYPE = "A";
@NonNls private static final String R_COMMIT_OPERATION_TYPE = "R";
@NonNls private static final String M_COMMIT_OPERATION_TYPE = "M";
@NonNls private static final String Y_COMMIT_OPERATION_TYPE = "Y";
public FileMessage(UpdateFileInfo info,
UpdatedFilesManager mergedFilesCollector,
UpdatedFilesManager updatedFilesManager) {
final Entry entry = info.getEntry();
myType = getCommitOperationType(info.getType(),
info.getFile(), entry,
mergedFilesCollector,
updatedFilesManager);
myFileAbsolutePath = info.getFile().getAbsolutePath();
if (entry != null) {
myRevision = new CvsRevisionNumber(entry.getRevision());
}
}
public FileMessage(UpdatedFileInfo info,
UpdatedFilesManager updatedFilesManager) {
final Entry entry = info.getEntry();
myType = getUpdateOperationType(info.getType(), info.getFile(), updatedFilesManager, entry);
myFileAbsolutePath = info.getFile().getAbsolutePath();
if (entry != null) {
myRevision = new CvsRevisionNumber(entry.getRevision());
}
}
public void showMessageIn(ProgressIndicator progress) {
progress.setText2(getMyActionName() + ": " + myFileAbsolutePath);
}
private String getMyActionName() {
switch (myType) {
case SCHEDULING_FOR_ADDING:
return CvsBundle.message("current.action.name.scheduling.for.adding");
case SCHEDULING_FOR_REMOVING:
return CvsBundle.message("current.action.name.scheduling.for.removing");
case UPDATING:
return CvsBundle.message("current.action.name.updating");
case UPDATING2:
return CvsBundle.message("current.action.name.updating");
case IMPORTING:
return CvsBundle.message("current.action.name.importing");
case ADDING:
return CvsBundle.message("current.action.name.adding");
case REMOVING:
return CvsBundle.message("current.action.name.removing");
case SAVING:
return CvsBundle.message("current.action.name.saving");
case SENDING:
return CvsBundle.message("current.action.name.sending");
case MODIFIED:
return CvsBundle.message("current.action.name.modified");
}
return CvsBundle.message("current.action.name.processing");
}
public String getFileAbsolutePath() {
return myFileAbsolutePath;
}
public int getType() {
return myType;
}
@Nullable
public CvsRevisionNumber getRevision() {
return myRevision;
}
private static int getUpdateOperationType(UpdatedFileInfo.UpdatedType type,
File file,
UpdatedFilesManager updatedFilesManager, Entry entry) {
if (type == UpdatedFileInfo.UpdatedType.REMOVED) {
return REMOVED_FROM_REPOSITORY;
}
VirtualFile virtualParent = CvsVfsUtil.getParentFor(file);
if (virtualParent == null) return CREATED;
if (type == UpdatedFileInfo.UpdatedType.MERGED) {
if (entry.isConflict()) {
return MERGED_WITH_CONFLICTS;
}
else {
return MERGED;
}
}
else {
if (updatedFilesManager.isNewlyCreatedEntryFor(virtualParent, file.getName())) {
return CREATED;
}
return UPDATING;
}
}
private static int getCommitOperationType(String commitOperationType,
File file,
Entry entry,
UpdatedFilesManager mergedFiles,
UpdatedFilesManager updatedFilesManager) {
if (commitOperationType.equals(U_COMMIT_OPERATION_TYPE)) {
VirtualFile virtualParent = CvsVfsUtil.getParentFor(file);
if (virtualParent == null) return CREATED;
if (updatedFilesManager.isNewlyCreatedEntryFor(virtualParent, file.getName())) {
return CREATED;
}
return UPDATING;
}
else if (commitOperationType.equals(P_COMMIT_OPERATION_TYPE)) {
return PATCHED;
}
else if (commitOperationType.equals(A_COMMIT_OPERATION_TYPE)) {
return LOCALLY_ADDED;
}
else if (commitOperationType.equals(R_COMMIT_OPERATION_TYPE)) {
return LOCALLY_REMOVED;
}
else if (commitOperationType.equals(M_COMMIT_OPERATION_TYPE)) {
if (mergedFiles.isMerged(file)) {
return MERGED;
}
else {
return MODIFIED;
}
}
else if (commitOperationType.equals(CONFLICT)) {
if ((entry != null && entry.isResultOfMerge()) || mergedFiles.isMerged(file)) {
return MERGED_WITH_CONFLICTS;
}
else if (mergedFiles.isCreatedBySecondParty(file)) {
return CREATED_BY_SECOND_PARTY;
}
else if (CvsUtil.isLocallyRemoved(file)) {
return LOCALLY_REMOVED_CONFLICT;
}
else {
return REMOVED_FROM_SERVER_CONFLICT;
}
}
else if (commitOperationType.equals(Y_COMMIT_OPERATION_TYPE)) {
return REMOVED_FROM_REPOSITORY;
}
else if (commitOperationType.equals("?")) {
return NOT_IN_REPOSITORY;
}
else {
return UNKNOWN;
}
}
}
| |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.remote.internal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.openqa.selenium.json.Json.MAP_TYPE;
import static org.openqa.selenium.remote.http.HttpMethod.GET;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import org.junit.Test;
import org.openqa.selenium.Platform;
import org.openqa.selenium.internal.BuildInfo;
import org.openqa.selenium.json.Json;
import org.openqa.selenium.json.JsonOutput;
import org.openqa.selenium.net.PortProber;
import org.openqa.selenium.remote.http.HttpClient;
import org.openqa.selenium.remote.http.HttpRequest;
import org.openqa.selenium.remote.http.HttpResponse;
import org.seleniumhq.jetty9.server.Server;
import org.seleniumhq.jetty9.servlet.ServletContextHandler;
import org.seleniumhq.jetty9.servlet.ServletHolder;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.net.URI;
import java.net.URL;
import java.util.Map;
import java.util.stream.Stream;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
abstract public class HttpClientTestBase {
protected abstract HttpClient.Factory createFactory();
@Test
public void responseShouldCaptureASingleHeader() throws Exception {
HashMultimap<String, String> headers = HashMultimap.create();
headers.put("Cake", "Delicious");
HttpResponse response = getResponseWithHeaders(headers);
String value = response.getHeader("Cake");
assertEquals("Delicious", value);
}
/**
* The HTTP Spec that it should be
* <a href="https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2">safe to combine them
* </a>, but things like the <a href="https://www.ietf.org/rfc/rfc2109.txt">cookie spec</a> make
* this hard (notably when a legal value may contain a comma).
*/
@Test
public void responseShouldKeepMultipleHeadersSeparate() throws Exception {
HashMultimap<String, String> headers = HashMultimap.create();
headers.put("Cheese", "Cheddar");
headers.put("Cheese", "Brie, Gouda");
HttpResponse response = getResponseWithHeaders(headers);
ImmutableList<String> values = ImmutableList.copyOf(response.getHeaders("Cheese"));
assertTrue(values.toString(), values.contains("Cheddar"));
assertTrue(values.toString(), values.contains("Brie, Gouda"));
}
@Test
public void shouldAddUrlParameters() {
HttpRequest request = new HttpRequest(GET, "/query");
String value = request.getQueryParameter("cheese");
assertNull(value);
request.addQueryParameter("cheese", "brie");
value = request.getQueryParameter("cheese");
assertEquals("brie", value);
}
@Test
public void shouldSendSimpleQueryParameters() throws Exception {
HttpRequest request = new HttpRequest(GET, "/query");
request.addQueryParameter("cheese", "cheddar");
HttpResponse response = getQueryParameterResponse(request);
Map<String, Object> values = new Json().toType(response.getContentString(), MAP_TYPE);
assertEquals(ImmutableList.of("cheddar"), values.get("cheese"));
}
@Test
public void shouldEncodeParameterNamesAndValues() throws Exception {
HttpRequest request = new HttpRequest(GET, "/query");
request.addQueryParameter("cheese type", "tasty cheese");
HttpResponse response = getQueryParameterResponse(request);
Map<String, Object> values = new Json().toType(response.getContentString(), MAP_TYPE);
assertEquals(ImmutableList.of("tasty cheese"), values.get("cheese type"));
}
@Test
public void canAddMoreThanOneQueryParameter() throws Exception {
HttpRequest request = new HttpRequest(GET, "/query");
request.addQueryParameter("cheese", "cheddar");
request.addQueryParameter("cheese", "gouda");
request.addQueryParameter("vegetable", "peas");
HttpResponse response = getQueryParameterResponse(request);
Map<String, Object> values = new Json().toType(response.getContentString(), MAP_TYPE);
assertEquals(ImmutableList.of("cheddar", "gouda"), values.get("cheese"));
assertEquals(ImmutableList.of("peas"), values.get("vegetable"));
}
@Test
public void shouldAllowUrlsWithSchemesToBeUsed() throws Exception {
Server server = new Server(PortProber.findFreePort());
ServletContextHandler handler = new ServletContextHandler();
handler.setContextPath("");
class Canned extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
try (PrintWriter writer = resp.getWriter()) {
writer.append("Hello, World!");
}
}
}
ServletHolder holder = new ServletHolder(new Canned());
handler.addServlet(holder, "/*");
server.setHandler(handler);
server.start();
try {
// This is a terrible choice of URL
HttpClient client = createFactory().createClient(new URL("http://example.com"));
URI uri = server.getURI();
HttpRequest request = new HttpRequest(
GET,
String.format("http://%s:%s/hello", uri.getHost(), uri.getPort()));
HttpResponse response = client.execute(request);
assertEquals("Hello, World!", response.getContentString());
} finally {
server.stop();
}
}
@Test
public void shouldIncludeAUserAgentHeader() throws Exception {
HttpResponse response = executeWithinServer(
new HttpRequest(GET, "/foo"),
new HttpServlet() {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws IOException {
try (Writer writer = resp.getWriter()) {
writer.write(req.getHeader("user-agent"));
}
}
});
String label = new BuildInfo().getReleaseLabel();
Platform platform = Platform.getCurrent();
Platform family = platform.family() == null ? platform : platform.family();
assertEquals(
response.getContentString(),
String.format(
"selenium/%s (java %s)",
label,
family.toString().toLowerCase()),
response.getContentString());
}
private HttpResponse getResponseWithHeaders(final Multimap<String, String> headers)
throws Exception {
return executeWithinServer(
new HttpRequest(GET, "/foo"),
new HttpServlet() {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) {
headers.forEach(resp::addHeader);
resp.setContentLengthLong(0);
}
});
}
private HttpResponse getQueryParameterResponse(HttpRequest request) throws Exception {
return executeWithinServer(
request,
new HttpServlet() {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws IOException {
try (Writer writer = resp.getWriter()) {
JsonOutput json = new Json().newOutput(writer);
json.beginObject();
req.getParameterMap()
.forEach((key, value) -> {
json.name(key);
json.beginArray();
Stream.of(value).forEach(v -> json.write(v, String.class));
json.endArray();
});
json.endObject();
}
}
});
}
private HttpResponse executeWithinServer(HttpRequest request, HttpServlet servlet)
throws Exception {
Server server = new Server(PortProber.findFreePort());
ServletContextHandler handler = new ServletContextHandler();
handler.setContextPath("");
ServletHolder holder = new ServletHolder(servlet);
handler.addServlet(holder, "/*");
server.setHandler(handler);
server.start();
try {
HttpClient client = createFactory().createClient(server.getURI().toURL());
return client.execute(request);
} finally {
server.stop();
}
}
}
| |
package com.github.dockerjava.core;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import com.github.dockerjava.api.DockerClient;
import com.github.dockerjava.api.command.*;
import com.github.dockerjava.api.model.AuthConfig;
import com.github.dockerjava.core.command.*;
import com.google.common.base.Preconditions;
/**
* @author Konstantin Pelykh (kpelykh@gmail.com)
*
* @see "https://github.com/docker/docker/blob/master/api/client/commands.go"
*/
public class DockerClientImpl implements Closeable, DockerClient {
private final DockerClientConfig dockerClientConfig;
private DockerCmdExecFactory dockerCmdExecFactory;
private DockerClientImpl() {
this(DockerClientConfig.createDefaultConfigBuilder().build());
}
private DockerClientImpl(String serverUrl) {
this(configWithServerUrl(serverUrl));
}
private DockerClientImpl(DockerClientConfig dockerClientConfig) {
Preconditions.checkNotNull(dockerClientConfig,
"config was not specified");
this.dockerClientConfig = dockerClientConfig;
}
private static DockerClientConfig configWithServerUrl(String serverUrl) {
return DockerClientConfig.createDefaultConfigBuilder()
.withUri(serverUrl).build();
}
public static DockerClientImpl getInstance() {
return new DockerClientImpl();
}
public static DockerClientImpl getInstance(
DockerClientConfig dockerClientConfig) {
return new DockerClientImpl(dockerClientConfig);
}
public static DockerClientImpl getInstance(String serverUrl) {
return new DockerClientImpl(serverUrl);
}
public DockerClientImpl withDockerCmdExecFactory(
DockerCmdExecFactory dockerCmdExecFactory) {
Preconditions.checkNotNull(dockerCmdExecFactory,
"dockerCmdExecFactory was not specified");
this.dockerCmdExecFactory = dockerCmdExecFactory;
this.dockerCmdExecFactory.init(dockerClientConfig);
return this;
}
private DockerCmdExecFactory getDockerCmdExecFactory() {
Preconditions.checkNotNull(dockerCmdExecFactory,
"dockerCmdExecFactory was not specified");
return dockerCmdExecFactory;
}
@Override
public AuthConfig authConfig() {
checkNotNull(dockerClientConfig.getUsername(),
"Configured username is null.");
checkNotNull(dockerClientConfig.getServerAddress(),
"Configured serverAddress is null.");
AuthConfig authConfig = new AuthConfig();
authConfig.setUsername(dockerClientConfig.getUsername());
authConfig.setPassword(dockerClientConfig.getPassword());
authConfig.setEmail(dockerClientConfig.getEmail());
authConfig.setServerAddress(dockerClientConfig.getServerAddress());
return authConfig;
}
/**
* * MISC API *
*/
/**
* Authenticate with the server, useful for checking authentication.
*/
@Override
public AuthCmd authCmd() {
return new AuthCmdImpl(getDockerCmdExecFactory().createAuthCmdExec(),
authConfig());
}
@Override
public InfoCmd infoCmd() {
return new InfoCmdImpl(getDockerCmdExecFactory().createInfoCmdExec());
}
@Override
public PingCmd pingCmd() {
return new PingCmdImpl(getDockerCmdExecFactory().createPingCmdExec());
}
@Override
public VersionCmd versionCmd() {
return new VersionCmdImpl(getDockerCmdExecFactory()
.createVersionCmdExec());
}
/**
* * IMAGE API *
*/
@Override
public PullImageCmd pullImageCmd(String repository) {
return new PullImageCmdImpl(getDockerCmdExecFactory()
.createPullImageCmdExec(), dockerClientConfig.effectiveAuthConfig(repository), repository);
}
@Override
public PushImageCmd pushImageCmd(String name) {
return new PushImageCmdImpl(getDockerCmdExecFactory()
.createPushImageCmdExec(), name).withAuthConfig(dockerClientConfig.effectiveAuthConfig(name));
}
@Override
public CreateImageCmd createImageCmd(String repository,
InputStream imageStream) {
return new CreateImageCmdImpl(getDockerCmdExecFactory()
.createCreateImageCmdExec(), repository, imageStream);
}
@Override
public SearchImagesCmd searchImagesCmd(String term) {
return new SearchImagesCmdImpl(getDockerCmdExecFactory()
.createSearchImagesCmdExec(), term);
}
@Override
public RemoveImageCmd removeImageCmd(String imageId) {
return new RemoveImageCmdImpl(getDockerCmdExecFactory()
.createRemoveImageCmdExec(), imageId);
}
@Override
public ListImagesCmd listImagesCmd() {
return new ListImagesCmdImpl(getDockerCmdExecFactory()
.createListImagesCmdExec());
}
@Override
public InspectImageCmd inspectImageCmd(String imageId) {
return new InspectImageCmdImpl(getDockerCmdExecFactory()
.createInspectImageCmdExec(), imageId);
}
/**
* * CONTAINER API *
*/
@Override
public ListContainersCmd listContainersCmd() {
return new ListContainersCmdImpl(getDockerCmdExecFactory()
.createListContainersCmdExec());
}
@Override
public CreateContainerCmd createContainerCmd(String image) {
return new CreateContainerCmdImpl(getDockerCmdExecFactory()
.createCreateContainerCmdExec(), image);
}
@Override
public StartContainerCmd startContainerCmd(String containerId) {
return new StartContainerCmdImpl(getDockerCmdExecFactory()
.createStartContainerCmdExec(), containerId);
}
@Override
public InspectContainerCmd inspectContainerCmd(String containerId) {
return new InspectContainerCmdImpl(getDockerCmdExecFactory()
.createInspectContainerCmdExec(), containerId);
}
@Override
public ExecCreateCmd execCreateCmd(String containerId) {
return new ExecCreateCmdImpl(getDockerCmdExecFactory().createExecCmdExec(), containerId);
}
@Override
public RemoveContainerCmd removeContainerCmd(String containerId) {
return new RemoveContainerCmdImpl(getDockerCmdExecFactory()
.createRemoveContainerCmdExec(), containerId);
}
@Override
public WaitContainerCmd waitContainerCmd(String containerId) {
return new WaitContainerCmdImpl(getDockerCmdExecFactory()
.createWaitContainerCmdExec(), containerId);
}
@Override
public AttachContainerCmd attachContainerCmd(String containerId) {
return new AttachContainerCmdImpl(getDockerCmdExecFactory()
.createAttachContainerCmdExec(), containerId);
}
@Override
public ExecStartCmd execStartCmd(String containerId) {
return new ExecStartCmdImpl(getDockerCmdExecFactory().createExecStartCmdExec(), containerId);
}
@Override
public LogContainerCmd logContainerCmd(String containerId) {
return new LogContainerCmdImpl(getDockerCmdExecFactory()
.createLogContainerCmdExec(), containerId);
}
@Override
public CopyFileFromContainerCmd copyFileFromContainerCmd(
String containerId, String resource) {
return new CopyFileFromContainerCmdImpl(getDockerCmdExecFactory()
.createCopyFileFromContainerCmdExec(), containerId, resource);
}
@Override
public ContainerDiffCmd containerDiffCmd(String containerId) {
return new ContainerDiffCmdImpl(getDockerCmdExecFactory()
.createContainerDiffCmdExec(), containerId);
}
@Override
public StopContainerCmd stopContainerCmd(String containerId) {
return new StopContainerCmdImpl(getDockerCmdExecFactory()
.createStopContainerCmdExec(), containerId);
}
@Override
public KillContainerCmd killContainerCmd(String containerId) {
return new KillContainerCmdImpl(getDockerCmdExecFactory()
.createKillContainerCmdExec(), containerId);
}
@Override
public RestartContainerCmd restartContainerCmd(String containerId) {
return new RestartContainerCmdImpl(getDockerCmdExecFactory()
.createRestartContainerCmdExec(), containerId);
}
@Override
public CommitCmd commitCmd(String containerId) {
return new CommitCmdImpl(getDockerCmdExecFactory()
.createCommitCmdExec(), containerId);
}
@Override
public BuildImageCmd buildImageCmd(File dockerFolder) {
return new BuildImageCmdImpl(getDockerCmdExecFactory()
.createBuildImageCmdExec(), dockerFolder);
}
@Override
public BuildImageCmd buildImageCmd(InputStream tarInputStream) {
return new BuildImageCmdImpl(getDockerCmdExecFactory()
.createBuildImageCmdExec(), tarInputStream);
}
@Override
public TopContainerCmd topContainerCmd(String containerId) {
return new TopContainerCmdImpl(getDockerCmdExecFactory()
.createTopContainerCmdExec(), containerId);
}
@Override
public TagImageCmd tagImageCmd(String imageId, String repository, String tag) {
return new TagImageCmdImpl(getDockerCmdExecFactory()
.createTagImageCmdExec(), imageId, repository, tag);
}
@Override
public PauseContainerCmd pauseContainerCmd(String containerId) {
return new PauseContainerCmdImpl(getDockerCmdExecFactory()
.createPauseContainerCmdExec(), containerId);
}
@Override
public UnpauseContainerCmd unpauseContainerCmd(String containerId) {
return new UnpauseContainerCmdImpl(getDockerCmdExecFactory()
.createUnpauseContainerCmdExec(), containerId);
}
@Override
public EventsCmd eventsCmd(EventCallback eventCallback) {
return new EventsCmdImpl(getDockerCmdExecFactory()
.createEventsCmdExec(), eventCallback);
}
@Override
public void close() throws IOException {
getDockerCmdExecFactory().close();
}
}
| |
/*
* Copyright 2020 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client.cookie;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import java.net.URI;
import org.junit.jupiter.api.Test;
import com.linecorp.armeria.common.Cookie;
import com.linecorp.armeria.common.CookieBuilder;
import com.linecorp.armeria.common.Cookies;
class DefaultCookieJarTest {
@Test
void ensureDomainAndPath() {
final DefaultCookieJar cookieJar = new DefaultCookieJar();
final Cookie cookie = Cookie.ofSecure("name", "value");
final CookieBuilder builder = Cookie.secureBuilder("name", "value");
assertThat(cookieJar.ensureDomainAndPath(cookie, URI.create("http://foo.com")))
.isEqualTo(builder.domain("foo.com").path("/").build());
assertThat(cookieJar.ensureDomainAndPath(cookie, URI.create("http://bar.foo.com/")))
.isEqualTo(builder.domain("bar.foo.com").path("/").build());
assertThat(cookieJar.ensureDomainAndPath(cookie, URI.create("http://bar.foo.com/a/b")))
.isEqualTo(builder.domain("bar.foo.com").path("/a").build());
assertThat(cookieJar.ensureDomainAndPath(cookie, URI.create("http://foo.com/a/b/")))
.isEqualTo(builder.domain("foo.com").path("/a/b").build());
// domain and path are unchanged if already set
assertThat(cookieJar.ensureDomainAndPath(builder.domain("foo.com").path("/a").build(),
URI.create("http://bar.foo.com/a/b/")))
.isEqualTo(builder.domain("foo.com").path("/a").build());
assertThat(cookieJar.ensureDomainAndPath(cookie, URI.create("http://foo.com")).isHostOnly()).isTrue();
assertThat(cookieJar.ensureDomainAndPath(builder.domain("foo.com").build(),
URI.create("http://foo.com")).isHostOnly()).isFalse();
}
@Test
void simple() {
final CookieJar cookieJar = new DefaultCookieJar();
final URI foo = URI.create("https://foo.com");
final URI bar = URI.create("https://bar.com");
cookieJar.set(foo, Cookies.of(Cookie.ofSecure("name1", "value1"), Cookie.ofSecure("name2", "value2")));
assertThat(cookieJar.get(bar)).isEmpty();
assertThat(cookieJar.get(foo)).hasSize(2);
cookieJar.set(bar, Cookies.of(Cookie.ofSecure("name4", "value4"), Cookie.ofSecure("name5", "value5")));
assertThat(cookieJar.get(bar)).hasSize(2);
assertThat(cookieJar.get(foo)).hasSize(2).doesNotContainAnyElementsOf(Cookies.of(
Cookie.secureBuilder("name4", "value4").domain("bar.com").path("/").build(),
Cookie.secureBuilder("name5", "value5").domain("bar.com").path("/").build()));
}
@Test
void publicSuffix() {
final DefaultCookieJar cookieJar = new DefaultCookieJar();
final CookieBuilder builder = Cookie.secureBuilder("name", "value");
URI uri = URI.create("https://google.com");
cookieJar.set(uri, Cookies.of(builder.domain("com").build()));
assertThat(cookieJar.get(uri)).isEmpty();
uri = URI.create("https://foo.kawasaki.jp");
cookieJar.set(uri, Cookies.of(builder.domain("kawasaki.jp").build()));
assertThat(cookieJar.get(uri)).isEmpty();
uri = URI.create("https://foo.city.kawasaki.jp");
cookieJar.set(uri, Cookies.of(builder.domain("city.kawasaki.jp").build()));
assertThat(cookieJar.get(uri)).hasSize(1);
uri = URI.create("https://xn--12c1fe0br.xn--o3cw4h");
cookieJar.set(uri, Cookies.of(builder.domain("xn--12c1fe0br.xn--o3cw4h").build()));
assertThat(cookieJar.get(uri)).isEmpty();
}
@Test
void secure() {
final CookieJar cookieJar = new DefaultCookieJar();
final URI fooHttp = URI.create("http://foo.com");
final URI fooHttps = URI.create("https://foo.com");
final Cookie secureCookie = Cookie.fromSetCookieHeader("name=value; secure");
cookieJar.set(fooHttp, Cookies.of(secureCookie));
assertThat(cookieJar.get(fooHttp)).isEmpty();
cookieJar.set(fooHttps, Cookies.of(secureCookie));
assertThat(cookieJar.get(fooHttp)).isEmpty();
assertThat(cookieJar.get(fooHttps)).hasSize(1);
}
@Test
void customPath() {
final CookieJar cookieJar = new DefaultCookieJar();
final Cookie cookie1 = Cookie.fromSetCookieHeader("name=value; path=/bar");
final Cookie cookie2 = Cookie.fromSetCookieHeader("name=value; path=/boo");
cookieJar.set(URI.create("http://foo.com"), Cookies.of(cookie1, cookie2));
assertThat(cookieJar.get(URI.create("http://foo.com"))).isEmpty();
assertThat(cookieJar.get(URI.create("http://foo.com/bar"))).hasSize(1);
assertThat(cookieJar.get(URI.create("http://foo.com/bar/baz"))).hasSize(1);
}
@Test
void customDomain() {
final CookieJar cookieJar = new DefaultCookieJar();
final Cookie cookie1 = Cookie.fromSetCookieHeader("name1=value1");
final Cookie cookie2 = Cookie.fromSetCookieHeader("name2=value2; domain=.foo.com");
final Cookie cookie3 = Cookie.fromSetCookieHeader("name3=value3; domain=foo.com");
final Cookie cookie4 = Cookie.fromSetCookieHeader("name4=value4; domain=bar.foo.com");
final Cookie cookie5 = Cookie.fromSetCookieHeader("name5=value5; domain=baz.foo.com");
final Cookie cookie6 = Cookie.fromSetCookieHeader("name6=value6; domain=baz.bar.foo.com");
cookieJar.set(URI.create("http://bar.foo.com"),
Cookies.of(cookie1, cookie2, cookie3, cookie4, cookie5, cookie6));
assertThat(cookieJar.get(URI.create("http://baz.foo.com")))
.hasSize(2)
.containsAll(Cookies.of(
Cookie.secureBuilder("name2", "value2").domain("foo.com").path("/").build(),
Cookie.secureBuilder("name3", "value3").domain("foo.com").path("/").build()));
assertThat(cookieJar.get(URI.create("http://baz.bar.foo.com")))
.hasSize(3)
.containsAll(Cookies.of(
Cookie.secureBuilder("name2", "value2").domain("foo.com").path("/").build(),
Cookie.secureBuilder("name3", "value3").domain("foo.com").path("/").build(),
Cookie.secureBuilder("name4", "value4").domain("bar.foo.com").path("/").build()));
}
@Test
void maxAge() {
final URI foo = URI.create("https://foo.com");
final CookieJar cookieJar = new DefaultCookieJar();
cookieJar.set(foo, Cookies.of(Cookie.secureBuilder("name", "value").maxAge(1).build()));
await().untilAsserted(() -> assertThat(cookieJar.get(foo)).isEmpty());
cookieJar.set(foo, Cookies.of(Cookie.secureBuilder("name", "value").build()));
assertThat(cookieJar.get(foo)).hasSize(1);
cookieJar.set(foo, Cookies.of(Cookie.secureBuilder("name", "value").maxAge(-1).build()));
assertThat(cookieJar.get(foo)).isEmpty();
}
@Test
void cookiePolicy() {
final URI foo = URI.create("https://foo.com");
CookieJar cookieJar = new DefaultCookieJar(CookiePolicy.acceptNone());
cookieJar.set(foo, Cookies.of(Cookie.ofSecure("name", "value")));
assertThat(cookieJar.get(foo)).isEmpty();
cookieJar = new DefaultCookieJar(CookiePolicy.acceptAll());
cookieJar.set(foo, Cookies.of(Cookie.ofSecure("name", "value")));
assertThat(cookieJar.get(foo)).hasSize(1);
}
@Test
void cookieState() {
final CookieJar cookieJar = new DefaultCookieJar();
final URI foo = URI.create("http://foo.com");
Cookie cookie = Cookie.ofSecure("name", "value");
Cookie expectCookie = Cookie.secureBuilder("name", "value").domain("foo.com").path("/").build();
assertThat(cookieJar.state(cookie)).isEqualTo(CookieState.NON_EXISTENT);
cookieJar.set(foo, Cookies.of(cookie));
assertThat(cookieJar.state(expectCookie)).isEqualTo(CookieState.EXISTENT);
cookie = cookie.toBuilder().maxAge(1).build();
expectCookie = expectCookie.toBuilder().maxAge(1).build();
final long currentTimeMillis = System.currentTimeMillis();
cookieJar.set(foo, Cookies.of(cookie), currentTimeMillis);
assertThat(cookieJar.state(expectCookie, currentTimeMillis + 1000)).isEqualTo(CookieState.EXISTENT);
assertThat(cookieJar.state(expectCookie, currentTimeMillis + 1001)).isEqualTo(CookieState.EXPIRED);
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java;
import static com.facebook.buck.rules.BuildableProperties.Kind.PACKAGING;
import com.facebook.buck.io.DirectoryTraverser;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.BinaryBuildRule;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRules;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.BuildableProperties;
import com.facebook.buck.rules.CommandTool;
import com.facebook.buck.rules.RuleKeyAppendable;
import com.facebook.buck.rules.RuleKeyBuilder;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePaths;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.fs.MkdirAndSymlinkFileStep;
import com.facebook.buck.step.fs.MkdirStep;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
import java.nio.file.Paths;
import javax.annotation.Nullable;
@BuildsAnnotationProcessor
public class JavaBinary extends AbstractBuildRule
implements BinaryBuildRule, HasClasspathEntries, RuleKeyAppendable {
private static final BuildableProperties OUTPUT_TYPE = new BuildableProperties(PACKAGING);
@AddToRuleKey
private final JavaRuntimeLauncher javaRuntimeLauncher;
@AddToRuleKey
@Nullable
private final String mainClass;
@AddToRuleKey
@Nullable
private final SourcePath manifestFile;
private final boolean mergeManifests;
@Nullable
private final Path metaInfDirectory;
@AddToRuleKey
private final ImmutableSet<String> blacklist;
private final DirectoryTraverser directoryTraverser;
private final ImmutableSetMultimap<JavaLibrary, Path> transitiveClasspathEntries;
public JavaBinary(
BuildRuleParams params,
SourcePathResolver resolver,
JavaRuntimeLauncher javaRuntimeLauncher,
@Nullable String mainClass,
@Nullable SourcePath manifestFile,
boolean mergeManifests,
@Nullable Path metaInfDirectory,
ImmutableSet<String> blacklist,
DirectoryTraverser directoryTraverser,
ImmutableSetMultimap<JavaLibrary, Path> transitiveClasspathEntries) {
super(params, resolver);
this.javaRuntimeLauncher = javaRuntimeLauncher;
this.mainClass = mainClass;
this.manifestFile = manifestFile;
this.mergeManifests = mergeManifests;
this.metaInfDirectory = metaInfDirectory;
this.blacklist = blacklist;
this.directoryTraverser = directoryTraverser;
this.transitiveClasspathEntries = transitiveClasspathEntries;
}
@Override
public BuildableProperties getProperties() {
return OUTPUT_TYPE;
}
@Override
public RuleKeyBuilder appendToRuleKey(RuleKeyBuilder builder) {
// Build a sorted set so that metaInfDirectory contents are listed in a canonical order.
ImmutableSortedSet.Builder<Path> paths = ImmutableSortedSet.naturalOrder();
BuildRules.addInputsToSortedSet(metaInfDirectory, paths, directoryTraverser);
return builder.setReflectively(
"metaInfDirectory",
FluentIterable.from(paths.build())
.transform(SourcePaths.toSourcePath(getProjectFilesystem())));
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context,
BuildableContext buildableContext) {
ImmutableList.Builder<Step> commands = ImmutableList.builder();
Path outputDirectory = getOutputDirectory();
Step mkdir = new MkdirStep(getProjectFilesystem(), outputDirectory);
commands.add(mkdir);
ImmutableSortedSet<Path> includePaths;
if (metaInfDirectory != null) {
Path stagingRoot = outputDirectory.resolve("meta_inf_staging");
Path stagingTarget = stagingRoot.resolve("META-INF");
MakeCleanDirectoryStep createStagingRoot = new MakeCleanDirectoryStep(
getProjectFilesystem(),
stagingRoot);
commands.add(createStagingRoot);
MkdirAndSymlinkFileStep link = new MkdirAndSymlinkFileStep(
getProjectFilesystem(),
metaInfDirectory,
stagingTarget);
commands.add(link);
includePaths = ImmutableSortedSet.<Path>naturalOrder()
.add(stagingRoot)
.addAll(getTransitiveClasspathEntries().values())
.build();
} else {
includePaths = ImmutableSortedSet.copyOf(getTransitiveClasspathEntries().values());
}
Path outputFile = getPathToOutput();
Path manifestPath = manifestFile == null ? null : getResolver().getAbsolutePath(manifestFile);
Step jar = new JarDirectoryStep(
getProjectFilesystem(),
outputFile,
includePaths,
mainClass,
manifestPath,
mergeManifests,
blacklist);
commands.add(jar);
buildableContext.recordArtifact(outputFile);
return commands.build();
}
@Override
public ImmutableSetMultimap<JavaLibrary, Path> getTransitiveClasspathEntries() {
return transitiveClasspathEntries;
}
@Override
public ImmutableSet<JavaLibrary> getTransitiveClasspathDeps() {
return transitiveClasspathEntries.keySet();
}
private Path getOutputDirectory() {
return BuildTargets.getGenPath(getBuildTarget(), "%s").getParent();
}
@Override
public Path getPathToOutput() {
return Paths.get(
String.format(
"%s/%s.jar",
getOutputDirectory(),
getBuildTarget().getShortNameAndFlavorPostfix()));
}
@Override
public Tool getExecutableCommand() {
Preconditions.checkState(
mainClass != null,
"Must specify a main class for %s in order to to run it.",
getBuildTarget());
return new CommandTool.Builder()
.addArg(javaRuntimeLauncher.getCommand())
.addArg("-jar")
.addArg(new SourcePathArg(getResolver(), new BuildTargetSourcePath(getBuildTarget())))
.build();
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python;
import com.intellij.formatting.WrapType;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.fixtures.PyTestCase;
import com.jetbrains.python.formatter.PyCodeStyleSettings;
import com.jetbrains.python.psi.LanguageLevel;
import com.jetbrains.python.psi.PyElementGenerator;
import com.jetbrains.python.psi.PyStatement;
import com.jetbrains.python.psi.impl.PythonLanguageLevelPusher;
/**
* @author yole
*/
public class PyFormatterTest extends PyTestCase {
public void testBlankLineBetweenMethods() {
doTest();
}
public void testBlankLineAroundClasses() {
getCommonCodeStyleSettings().BLANK_LINES_AROUND_CLASS = 2;
doTest();
}
public void testSpaceAfterComma() {
doTest();
}
public void testPep8ExtraneousWhitespace() {
doTest();
}
public void testPep8Operators() {
doTest();
}
public void testPep8KeywordArguments() {
doTest();
}
public void testUnaryMinus() {
doTest();
}
public void testBlankLineAfterImports() {
doTest();
}
// PY-15701
public void testNoBlankLinesAfterLocalImports() {
doTest();
}
public void testBlankLineBeforeFunction() {
doTest();
}
public void testStarArgument() { // PY-1395
doTest();
}
public void testDictLiteral() { // PY-1461
doTest();
}
public void testListAssignment() { // PY-1522
doTest();
}
public void testStarExpression() { // PY-1523
doTestPy3();
}
private void doTestPy3() {
PythonLanguageLevelPusher.setForcedLanguageLevel(myFixture.getProject(), LanguageLevel.PYTHON30);
try {
doTest();
}
finally {
PythonLanguageLevelPusher.setForcedLanguageLevel(myFixture.getProject(), null);
}
}
public void testWrapTuple() { // PY-1792
doTest();
}
public void testSpaceAfterCommaWrappedLine() { // PY-1065
doTest();
}
public void testAlignInBinaryExpression() {
doTest();
}
public void testAlignInStringLiteral() {
doTest();
}
public void testComment() { // PY-2108
doTest();
}
public void testCommentBetweenClasses() { // PY-1598
doTest();
}
public void testCommentInEmptyTuple() { //PY-11904
doTest();
}
public void testTwoLinesBetweenTopLevelClasses() { // PY-2765
doTest();
}
public void testTwoLinesBetweenTopLevelFunctions() { // PY-2765
doTest();
}
// PY-9923
public void testTwoLinesBetweenTopLevelDeclarationsWithComment() { // PY-9923
doTest();
}
// PY-9923
public void testTwoLinesBetweenTopLevelStatementAndDeclarationsWithComment() {
doTest();
}
public void testSpecialSlice() { // PY-1928
doTest();
}
public void testNoWrapBeforeParen() { // PY-3172
doTest();
}
public void testTupleAssignment() { // PY-4034 comment
doTest();
}
public void testSpaceInMethodDeclaration() { // PY-4241
getCommonCodeStyleSettings().SPACE_BEFORE_METHOD_PARENTHESES = true;
doTest();
}
public void testOptionalAlignForMethodParameters() { // PY-3995
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS = false;
doTest();
}
public void testNoAlignForMethodArguments() { // PY-3995
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = false;
doTest();
}
public void testAlignForMethodArguments() { // PY-3995
doTest();
}
public void testLambdaColon() {
doTest();
}
public void testInGenerator() { // PY-5379
doTest();
}
public void testIndentInGenerator() { // PY-6219
doTest();
}
public void testSpaceAroundDot() { // PY-6908
doTest();
}
public void testSetLiteralInArgList() { // PY-6672
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTest();
}
public void testLiterals() { // PY-6751
doTest();
}
public void testTupleInArgList() {
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTest();
}
public void testAlignInBinaryExpressions() {
doTest();
}
public void testFromImportRelative() {
doTest();
}
public void testContinuationIndent() {
doTest();
}
public void testContinuationIndentInIndentingStatement() { // PY-9573
doTest();
}
public void testContinuationIndentInIndentingStatement2() { // PY-11868
doTest();
}
public void testBlankLineAfterDecorator() {
doTest();
}
public void testSpaceAroundKeywords() {
doTest();
}
public void testSpaceAfterReturn() {
doTest();
}
public void testSpaceAfterRelativeImport() { // PY-8112
doTest();
}
public void testSpaceWithinBraces() { // PY-8069
getPythonCodeStyleSettings().SPACE_WITHIN_BRACES = true;
doTest();
}
public void testTupleClosingParen() { // PY-7946
doTest();
}
public void testBeforeTopLevelClass() { // PY-7743
doTest();
}
public void testPsiFormatting() { // IDEA-69724
String initial =
"def method_name(\n" +
" desired_impulse_response,\n" +
" desired_response_parameters,\n" +
" inverse_filter_length, \n" +
" observed_impulse_response):\n" +
" # Extract from here to ...\n" +
" desired_impulse_response = {'dirac, 'gaussian', logistic_derivative'}\n" +
"return desired, o";
final PsiFile file = PyElementGenerator.getInstance(myFixture.getProject()).createDummyFile(LanguageLevel.PYTHON30, initial);
final PsiElement reformatted = CodeStyleManager.getInstance(myFixture.getProject()).reformat(file);
String expected =
"def method_name(\n" +
" desired_impulse_response,\n" +
" desired_response_parameters,\n" +
" inverse_filter_length,\n" +
" observed_impulse_response):\n" +
" # Extract from here to ...\n" +
" desired_impulse_response = {'dirac, '\n" +
" gaussian\n" +
" ', logistic_derivative'}\n" +
" return desired, o";
assertEquals(expected, reformatted.getText());
}
public void testWrapDefinitionWithLongLine() { // IDEA-92081
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 30);
getCommonCodeStyleSettings().WRAP_LONG_LINES = true;
doTest();
}
public void testWrapAssignment() { // PY-8572
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 120);
getCommonCodeStyleSettings().WRAP_LONG_LINES = false;
doTest();
}
public void testIndentInSlice() { // PY-8572
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 120);
getCommonCodeStyleSettings().WRAP_LONG_LINES = false;
doTest();
}
public void testIndentInComprehensions() { // PY-8516
getPythonCodeStyleSettings().ALIGN_COLLECTIONS_AND_COMPREHENSIONS = false;
doTest();
}
public void testAlignInGenerators() { // PY-8822
doTest();
}
public void testAlignInCallExpression() {
doTest();
}
public void _testAlignInNestedCallInWith() { //PY-11337 TODO:
doTest();
}
public void testContinuationIndentForCallInStatementPart() { // PY-8577
doTest();
}
public void testIfConditionContinuation() { // PY-8195
doTest();
}
public void _testIndentInNestedCall() { // PY-11919 TODO: required changes in formatter to be able to make indent relative to block or alignment
doTest();
}
public void testIndentAfterBackslash() {
doTest();
}
public void testSpaceBeforeBackslash() {
getPythonCodeStyleSettings().SPACE_BEFORE_BACKSLASH = false;
doTest();
}
public void testNewLineAfterColon() {
getPythonCodeStyleSettings().NEW_LINE_AFTER_COLON = true;
doTest();
}
public void testNewLineAfterColonMultiClause() {
doTest();
}
public void testLongWith() { // PY-8743
PythonLanguageLevelPusher.setForcedLanguageLevel(myFixture.getProject(), LanguageLevel.PYTHON27);
try {
doTest();
}
finally {
PythonLanguageLevelPusher.setForcedLanguageLevel(myFixture.getProject(), null);
}
}
// PY-8961, PY-16050
public void testSpaceInAnnotations() {
doTestPy3();
}
// PY-15791
public void testForceSpacesAroundEqualSignInAnnotatedParameter() {
doTestPy3();
}
public void testWrapInBinaryExpression() { // PY-9032
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 80);
doTest(true);
}
public void testSpaceWithinDeclarationParentheses() { // PY-8818
getCommonCodeStyleSettings().SPACE_WITHIN_METHOD_PARENTHESES = true;
doTest();
}
public void testWrapBeforeElse() { // PY-10319
doTest(true);
}
public void testSpacesInImportParentheses() { // PY-11359
doTest();
}
public void testWrapImports() { // PY-9163
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 80);
doTest();
}
public void testCommentAfterBlock() { // PY-9542
doTest();
}
public void testWrapOnDot() { // PY-6359
doTest();
}
public void testIndentParensInImport() { // PY-9075
doTest();
}
public void testAlignInParenthesizedExpression() {
doTest();
}
public void testAlignInParameterList() {
doTest();
}
public void testAlignListComprehensionInDict() { //PY-10076
doTest();
}
public void testParenthesisAroundGeneratorExpression() {
doTest();
}
private void doTest() {
doTest(false);
}
private void doTest(final boolean reformatText) {
myFixture.configureByFile("formatter/" + getTestName(true) + ".py");
WriteCommandAction.runWriteCommandAction(null, new Runnable() {
@Override
public void run() {
CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myFixture.getProject());
PsiFile file = myFixture.getFile();
if (reformatText) {
codeStyleManager.reformatText(file, 0, file.getTextLength());
}
else {
codeStyleManager.reformat(file);
}
}
});
myFixture.checkResultByFile("formatter/" + getTestName(true) + "_after.py");
}
// PY-12861
public void testSpacesInsideParenthesisAreStripped() {
doTest();
}
// PY-14838
public void testNoAlignmentAfterDictHangingIndentInFunctionCall() {
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
doTest();
}
// PY-13955
public void testNoAlignmentAfterDictHangingIndentInFunctionCallOnTyping() {
getCommonCodeStyleSettings().ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true;
final String testName = "formatter/" + getTestName(true);
myFixture.configureByFile(testName + ".py");
WriteCommandAction.runWriteCommandAction(null, new Runnable() {
@Override
public void run() {
myFixture.type("\n(");
}
});
myFixture.checkResultByFile(testName + "_after.py");
}
// PY-12145
public void testAlignmentOfClosingBraceInDictLiteralWhenNoHangingIndent() {
doTest();
}
// PY-13004
public void testAlignmentOfClosingParenthesisOfArgumentListWhenNoHangingIndent() {
doTest();
}
// PY-14408
public void testIndentsWithTabsInsideDictLiteral() {
getIndentOptions().USE_TAB_CHARACTER = true;
doTest();
}
// PY-12749
public void testContinuationIndentIsNotUsedForNestedFunctionCallsInWithStatement() {
doTest();
}
public void testAlignmentOfClosingParenthesisInNestedFunctionCallsWithSingleArgument() {
doTest();
}
// PY-12748
public void testIndentCommentariesInsideFromImportStatement() {
doTest();
}
public void testClosingParenthesisInFromImportStatementWithNoHangingIndent() {
doTest();
}
// PY-12932
public void testCommentedCodeFragmentIgnored() {
doTest();
}
// PY-12932
public void testTrailingComment() {
doTest();
}
// PY-12938
public void testDoubleHashCommentIgnored() {
doTest();
}
// PY-12938
public void testDocCommentIgnored() {
doTest();
}
// PY-12775
public void testShebangCommentIgnored() {
doTest();
}
// PY-13232
public void testWhitespaceInsertedAfterHashSignInMultilineComment() {
doTest();
}
/**
* This test merely checks that call to {@link com.intellij.psi.codeStyle.CodeStyleManager#reformat(com.intellij.psi.PsiElement)}
* is possible for Python sources.
*/
public void testReformatOfSingleElementPossible() {
myFixture.configureByFile("formatter/" + getTestName(true) + ".py");
WriteCommandAction.runWriteCommandAction(myFixture.getProject(), new Runnable() {
@Override
public void run() {
final PsiElement elementAtCaret = myFixture.getFile().findElementAt(myFixture.getCaretOffset());
assertNotNull(elementAtCaret);
final PyStatement statement = PsiTreeUtil.getParentOfType(elementAtCaret, PyStatement.class, false);
assertNotNull(statement);
final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(myFixture.getProject());
codeStyleManager.reformat(statement);
}
});
myFixture.checkResultByFile("formatter/" + getTestName(true) + "_after.py");
}
// PY-11552
public void testExtraBlankLinesBetweenMethodsAndAtTheEnd() {
getCommonCodeStyleSettings().KEEP_BLANK_LINES_IN_DECLARATIONS = 1;
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesAtFileEnd() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesAtFunctionEnd() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesAtFunctionEndNoNewLine() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesWithBackslashesMixed() {
doTest();
}
// PY-11552
public void testTrailingBlankLinesInEmptyFile() {
doTest();
}
// PY-14962
public void testAlignDictLiteralOnValue() {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_VALUE;
doTest();
}
// PY-14962
public void testAlignDictLiteralOnColon() {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_COLON;
doTest();
}
// PY-14962
public void testDictWrappingChopDownIfLong() {
getCodeStyleSettings().setRightMargin(PythonLanguage.getInstance(), 80);
getPythonCodeStyleSettings().DICT_WRAPPING = WrapType.CHOP_DOWN_IF_LONG.getLegacyRepresentation();
doTest();
}
// PY-14962
public void testForceNewLineAfterLeftBraceInDict() {
getPythonCodeStyleSettings().DICT_NEW_LINE_AFTER_LEFT_BRACE = true;
doTest();
}
// PY-14962
public void testForceNewLineBeforeRightBraceInDict() {
getPythonCodeStyleSettings().DICT_NEW_LINE_BEFORE_RIGHT_BRACE = true;
doTest();
}
// PY-17674
public void testForceNewLineBeforeRightBraceInDictAfterColon() {
getPythonCodeStyleSettings().DICT_NEW_LINE_BEFORE_RIGHT_BRACE = true;
doTest();
}
// PY-16393
public void testHangingIndentDetectionIgnoresComments() {
doTest();
}
// PY-15530
public void testAlignmentInArgumentListWhereFirstArgumentIsEmptyCall() {
doTest();
}
public void testAlignmentInListLiteralWhereFirstItemIsEmptyTuple() {
doTest();
}
public void testHangingIndentInNamedArgumentValue() {
doTest();
}
public void testHangingIndentInParameterDefaultValue() {
doTest();
}
// PY-15171
public void testHangingIndentInKeyValuePair() {
doTest();
}
public void testDoNotDestroyAlignment_OnPostponedFormatting() throws Exception {
getPythonCodeStyleSettings().DICT_ALIGNMENT = PyCodeStyleSettings.DICT_ALIGNMENT_ON_COLON;
doTest();
}
public void testAlignmentOfEmptyCollectionLiterals() {
doTest();
}
// PY-17593
public void testBlanksBetweenImportsPreservedWithoutOptimizeImports() {
doTest();
}
// PY-17979, PY-13304
public void testContinuationIndentBeforeFunctionArguments() {
getPythonCodeStyleSettings().USE_CONTINUATION_INDENT_FOR_ARGUMENTS = true;
doTest();
}
// PY-18265
public void testNoSpaceAroundPowerOperator() {
getPythonCodeStyleSettings().SPACE_AROUND_POWER_OPERATOR = false;
doTest();
}
}
| |
/*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pcepio.protocol.ver1;
import java.util.LinkedList;
import java.util.ListIterator;
import org.jboss.netty.buffer.ChannelBuffer;
import org.onosproject.pcepio.exceptions.PcepParseException;
import org.onosproject.pcepio.protocol.PcepLspObject;
import org.onosproject.pcepio.types.PcepErrorDetailInfo;
import org.onosproject.pcepio.types.PcepObjectHeader;
import org.onosproject.pcepio.types.PcepValueType;
import org.onosproject.pcepio.types.StatefulIPv4LspIdentifiersTlv;
import org.onosproject.pcepio.types.StatefulLspDbVerTlv;
import org.onosproject.pcepio.types.StatefulLspErrorCodeTlv;
import org.onosproject.pcepio.types.StatefulRsvpErrorSpecTlv;
import org.onosproject.pcepio.types.SymbolicPathNameTlv;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.MoreObjects;
/**
* Provides PCEP lsp object.
*/
public class PcepLspObjectVer1 implements PcepLspObject {
/*
message format.
Reference : draft-ietf-pce-stateful-pce-11, section 7.3.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Object-Class | OT |Res|P|I| Object Length (bytes) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| PLSP-ID | Flag |C| O|A|R|S|D|
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// TLVs //
| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
The LSP Object format
*/
protected static final Logger log = LoggerFactory.getLogger(PcepLspObjectVer1.class);
public static final byte LSP_OBJ_TYPE = 1;
public static final byte LSP_OBJ_CLASS = 32;
public static final byte LSP_OBJECT_VERSION = 1;
// LSP_OBJ_MINIMUM_LENGTH = CommonHeaderLen(4)+ LspObjectHeaderLen(4)+TlvAssumedMinLength(8)
public static final short LSP_OBJ_MINIMUM_LENGTH = 16;
public static final int DEFAULT_PLSPID = 0;
public static final byte DEFAULT_OFLAG = 1;
public static final boolean DEFAULT_AFLAG = false;
public static final boolean DEFAULT_RFLAG = false;
public static final boolean DEFAULT_SFLAG = false;
public static final boolean DEFAULT_DFLAG = false;
public static final boolean DEFAULT_CFLAG = false;
public static final int OBJECT_HEADER_LENGTH = 4;
public static final int PLSPID_SHIFT_VALUE = 12;
public static final int CFLAG_SHIFT_VALUE = 7;
public static final int OFLAG_SHIFT_VALUE = 4;
public static final int AFLAG_SHIFT_VALUE = 3;
public static final int RFLAG_SHIFT_VALUE = 2;
public static final int SFLAG_SHIFT_VALUE = 1;
public static final int PLSPID_TEMP_SHIFT_VALUE = 0xFFFFF000;
public static final int CFLAG_TEMP_SHIFT_VALUE = 0x80;
public static final int OFLAG_TEMP_SHIFT_VALUE = 0x70;
public static final int AFLAG_TEMP_SHIFT_VALUE = 0x08;
public static final int RFLAG_TEMP_SHIFT_VALUE = 0x04;
public static final int SFLAG_TEMP_SHIFT_VALUE = 0x02;
public static final int DFLAG_TEMP_SHIFT_VALUE = 0x01;
public static final int BIT_SET = 1;
public static final int BIT_RESET = 0;
public static final int MINIMUM_COMMON_HEADER_LENGTH = 4;
static final PcepObjectHeader DEFAULT_LSP_OBJECT_HEADER = new PcepObjectHeader(LSP_OBJ_CLASS, LSP_OBJ_TYPE,
PcepObjectHeader.REQ_OBJ_OPTIONAL_PROCESS, PcepObjectHeader.RSP_OBJ_PROCESSED, LSP_OBJ_MINIMUM_LENGTH);
private PcepObjectHeader lspObjHeader;
private int iPlspId;
// 3-bits
private byte yOFlag;
private boolean bAFlag;
private boolean bRFlag;
private boolean bSFlag;
private boolean bDFlag;
private boolean bCFlag;
// Optional TLV
private LinkedList<PcepValueType> llOptionalTlv;
/**
* Constructor to initialize all the member variables.
*
* @param lspObjHeader lsp object header
* @param iPlspId plsp id
* @param yOFlag O flag
* @param bAFlag A flag
* @param bRFlag R flag
* @param bSFlag S flag
* @param bDFlag D flag
* @param bCFlag C flag
* @param llOptionalTlv list of optional tlv
*/
public PcepLspObjectVer1(PcepObjectHeader lspObjHeader, int iPlspId, byte yOFlag, boolean bAFlag, boolean bRFlag,
boolean bSFlag, boolean bDFlag, boolean bCFlag, LinkedList<PcepValueType> llOptionalTlv) {
this.lspObjHeader = lspObjHeader;
this.iPlspId = iPlspId;
this.yOFlag = yOFlag;
this.bAFlag = bAFlag;
this.bRFlag = bRFlag;
this.bSFlag = bSFlag;
this.bDFlag = bDFlag;
this.bCFlag = bCFlag;
this.llOptionalTlv = llOptionalTlv;
}
/**
* Sets lsp Object Header.
*
* @param obj lsp object header
*/
public void setLspObjHeader(PcepObjectHeader obj) {
this.lspObjHeader = obj;
}
@Override
public void setPlspId(int iPlspId) {
this.iPlspId = iPlspId;
}
@Override
public void setCFlag(boolean bCFlag) {
this.bCFlag = bCFlag;
}
@Override
public void setOFlag(byte yOFlag) {
this.yOFlag = yOFlag;
}
@Override
public void setAFlag(boolean bAFlag) {
this.bAFlag = bAFlag;
}
@Override
public void setRFlag(boolean bRFlag) {
this.bRFlag = bRFlag;
}
@Override
public void setSFlag(boolean bSFlag) {
this.bSFlag = bSFlag;
}
@Override
public void setDFlag(boolean bDFlag) {
this.bDFlag = bDFlag;
}
/**
* Returns lsp object header.
*
* @return lspObjHeader
*/
public PcepObjectHeader getLspObjHeader() {
return this.lspObjHeader;
}
@Override
public int getPlspId() {
return this.iPlspId;
}
@Override
public boolean getCFlag() {
return this.bCFlag;
}
@Override
public byte getOFlag() {
return this.yOFlag;
}
@Override
public boolean getAFlag() {
return this.bAFlag;
}
@Override
public boolean getRFlag() {
return this.bRFlag;
}
@Override
public boolean getSFlag() {
return this.bSFlag;
}
@Override
public boolean getDFlag() {
return this.bDFlag;
}
@Override
public LinkedList<PcepValueType> getOptionalTlv() {
return this.llOptionalTlv;
}
@Override
public void setOptionalTlv(LinkedList<PcepValueType> llOptionalTlv) {
this.llOptionalTlv = llOptionalTlv;
}
/**
* Parse channel buffer and returns object of PcepLspObject.
*
* @param cb of type channel buffer
* @return object of PcepLspObject
* @throws PcepParseException when lsp object is not present in channel buffer
*/
public static PcepLspObject read(ChannelBuffer cb) throws PcepParseException {
PcepObjectHeader lspObjHeader;
int iPlspId;
// 3-bits
byte yOFlag;
boolean bAFlag;
boolean bRFlag;
boolean bSFlag;
boolean bDFlag;
boolean bCFlag;
// Optional TLV
LinkedList<PcepValueType> llOptionalTlv = new LinkedList<>();
lspObjHeader = PcepObjectHeader.read(cb);
if (lspObjHeader.getObjClass() != PcepLspObjectVer1.LSP_OBJ_CLASS) {
throw new PcepParseException(PcepErrorDetailInfo.ERROR_TYPE_6, PcepErrorDetailInfo.ERROR_VALUE_8);
}
//take only LspObject buffer.
ChannelBuffer tempCb = cb.readBytes(lspObjHeader.getObjLen() - OBJECT_HEADER_LENGTH);
Integer iTemp = tempCb.readInt();
iPlspId = (iTemp & PLSPID_TEMP_SHIFT_VALUE) >> PLSPID_SHIFT_VALUE;
bCFlag = ((iTemp & CFLAG_TEMP_SHIFT_VALUE) >> CFLAG_SHIFT_VALUE) > 0;
Integer iX = (iTemp & OFLAG_TEMP_SHIFT_VALUE) >> OFLAG_SHIFT_VALUE;
yOFlag = iX.byteValue();
iX = (iTemp & AFLAG_TEMP_SHIFT_VALUE) >> AFLAG_SHIFT_VALUE;
bAFlag = iX > 0;
iX = (iTemp & RFLAG_TEMP_SHIFT_VALUE) >> RFLAG_SHIFT_VALUE;
bRFlag = iX > 0;
iX = (iTemp & SFLAG_TEMP_SHIFT_VALUE) >> SFLAG_SHIFT_VALUE;
bSFlag = iX > 0;
iX = iTemp & DFLAG_TEMP_SHIFT_VALUE;
bDFlag = iX > 0;
// parse optional TLV
llOptionalTlv = parseOptionalTlv(tempCb);
return new PcepLspObjectVer1(lspObjHeader, iPlspId, yOFlag, bAFlag, bRFlag, bSFlag, bDFlag, bCFlag,
llOptionalTlv);
}
@Override
public int write(ChannelBuffer cb) throws PcepParseException {
//write Object header
int objStartIndex = cb.writerIndex();
int objLenIndex = lspObjHeader.write(cb);
if (objLenIndex <= 0) {
throw new PcepParseException("Failed to write lsp object header. Index " + objLenIndex);
}
int iTemp = iPlspId << PLSPID_SHIFT_VALUE;
iTemp = iTemp | (((bCFlag) ? BIT_SET : BIT_RESET) << CFLAG_SHIFT_VALUE);
iTemp = iTemp | (yOFlag << OFLAG_SHIFT_VALUE);
byte bFlag;
iTemp = bAFlag ? (iTemp | AFLAG_TEMP_SHIFT_VALUE) : iTemp;
bFlag = (bRFlag) ? (byte) BIT_SET : BIT_RESET;
iTemp = iTemp | (bFlag << RFLAG_SHIFT_VALUE);
bFlag = (bSFlag) ? (byte) BIT_SET : BIT_RESET;
iTemp = iTemp | (bFlag << SFLAG_SHIFT_VALUE);
bFlag = (bDFlag) ? (byte) BIT_SET : BIT_RESET;
iTemp = iTemp | bFlag;
cb.writeInt(iTemp);
// Add optional TLV
packOptionalTlv(cb);
//Update object length now
int length = cb.writerIndex() - objStartIndex;
//will be helpful during print().
lspObjHeader.setObjLen((short) length);
// As per RFC the length of object should be
// multiples of 4
cb.setShort(objLenIndex, (short) length);
return length;
}
/**
* Returns Linked list of optional tlvs.
*
* @param cb of channel buffer.
* @return list of optional tlvs
* @throws PcepParseException when unsupported tlv is received
*/
protected static LinkedList<PcepValueType> parseOptionalTlv(ChannelBuffer cb) throws PcepParseException {
LinkedList<PcepValueType> llOutOptionalTlv;
llOutOptionalTlv = new LinkedList<>();
while (MINIMUM_COMMON_HEADER_LENGTH <= cb.readableBytes()) {
PcepValueType tlv = null;
short hType = cb.readShort();
short hLength = cb.readShort();
int iValue = 0;
switch (hType) {
case StatefulIPv4LspIdentifiersTlv.TYPE:
tlv = StatefulIPv4LspIdentifiersTlv.read(cb);
break;
case StatefulLspErrorCodeTlv.TYPE:
iValue = cb.readInt();
tlv = new StatefulLspErrorCodeTlv(iValue);
break;
case StatefulRsvpErrorSpecTlv.TYPE:
tlv = StatefulRsvpErrorSpecTlv.read(cb);
break;
case SymbolicPathNameTlv.TYPE:
tlv = SymbolicPathNameTlv.read(cb, hLength);
break;
case StatefulLspDbVerTlv.TYPE:
tlv = StatefulLspDbVerTlv.read(cb);
break;
default:
// Skip the unknown TLV.
cb.skipBytes(hLength);
tlv = null;
log.info("Received unsupported TLV type :" + hType + " in LSP object.");
}
// Check for the padding
int pad = hLength % 4;
if (0 < pad) {
pad = 4 - pad;
if (pad <= cb.readableBytes()) {
cb.skipBytes(pad);
}
}
if (tlv != null) {
llOutOptionalTlv.add(tlv);
}
}
if (0 < cb.readableBytes()) {
throw new PcepParseException("Optional Tlv parsing error. Extra bytes received.");
}
return llOutOptionalTlv;
}
/**
* returns writer index.
*
* @param cb of type channel buffer
* @return length of bytes written to channel buffer
*/
protected int packOptionalTlv(ChannelBuffer cb) {
ListIterator<PcepValueType> listIterator = llOptionalTlv.listIterator();
int startIndex = cb.writerIndex();
while (listIterator.hasNext()) {
PcepValueType tlv = listIterator.next();
if (tlv == null) {
log.debug("tlv is null from OptionalTlv list");
continue;
}
tlv.write(cb);
// need to take care of padding
int pad = tlv.getLength() % 4;
if (0 != pad) {
pad = 4 - pad;
for (int i = 0; i < pad; ++i) {
cb.writeByte((byte) 0);
}
}
}
return cb.writerIndex() - startIndex;
}
/**
* Builder class for PCEP lsp Object.
*/
public static class Builder implements PcepLspObject.Builder {
private boolean bIsHeaderSet = false;
private boolean bIsPlspIdSet = false;
private boolean bIsOFlagSet = false;
private boolean bIsRFlagSet = false;
private boolean bIsAFlagSet = false;
private boolean bIsDFlagSet = false;
private boolean bIsSFlagSet = false;
private boolean bIsCFlagSet = false;
private PcepObjectHeader lspObjHeader;
private byte yOFlag;
private boolean bAFlag;
private boolean bDFlag;
private boolean bSFlag;
private boolean bRFlag;
private boolean bCFlag;
LinkedList<PcepValueType> llOptionalTlv = null;
private int plspId;
private boolean bIsPFlagSet = false;
private boolean bPFlag;
private boolean bIsIFlagSet = false;
private boolean bIFlag;
@Override
public PcepLspObject build() {
PcepObjectHeader lspObjHeader = this.bIsHeaderSet ? this.lspObjHeader : DEFAULT_LSP_OBJECT_HEADER;
int plspId = this.bIsPlspIdSet ? this.plspId : DEFAULT_PLSPID;
byte yOFlag = this.bIsOFlagSet ? this.yOFlag : DEFAULT_OFLAG;
boolean bAFlag = this.bIsAFlagSet ? this.bAFlag : DEFAULT_AFLAG;
boolean bRFlag = this.bIsRFlagSet ? this.bRFlag : DEFAULT_RFLAG;
boolean bSFlag = this.bIsSFlagSet ? this.bSFlag : DEFAULT_SFLAG;
boolean bDFlag = this.bIsDFlagSet ? this.bDFlag : DEFAULT_DFLAG;
boolean bCFlag = this.bIsCFlagSet ? this.bCFlag : DEFAULT_CFLAG;
if (bIsPFlagSet) {
lspObjHeader.setPFlag(bPFlag);
}
if (bIsIFlagSet) {
lspObjHeader.setIFlag(bIFlag);
}
return new PcepLspObjectVer1(lspObjHeader, plspId, yOFlag, bAFlag, bRFlag, bSFlag, bDFlag, bCFlag,
llOptionalTlv);
}
@Override
public PcepObjectHeader getLspObjHeader() {
return this.lspObjHeader;
}
@Override
public Builder setLspObjHeader(PcepObjectHeader obj) {
this.lspObjHeader = obj;
this.bIsHeaderSet = true;
return this;
}
@Override
public int getPlspId() {
return this.plspId;
}
@Override
public Builder setPlspId(int value) {
this.plspId = value;
this.bIsPlspIdSet = true;
return this;
}
@Override
public boolean getCFlag() {
return this.bCFlag;
}
@Override
public Builder setCFlag(boolean value) {
this.bCFlag = value;
this.bIsCFlagSet = true;
return this;
}
@Override
public byte getOFlag() {
return this.yOFlag;
}
@Override
public Builder setOFlag(byte value) {
this.yOFlag = value;
this.bIsOFlagSet = true;
return this;
}
@Override
public boolean getAFlag() {
return this.bAFlag;
}
@Override
public Builder setAFlag(boolean value) {
this.bAFlag = value;
this.bIsAFlagSet = true;
return this;
}
@Override
public boolean getRFlag() {
return this.bRFlag;
}
@Override
public Builder setRFlag(boolean value) {
this.bRFlag = value;
this.bIsRFlagSet = true;
return this;
}
@Override
public boolean getSFlag() {
return this.bSFlag;
}
@Override
public Builder setSFlag(boolean value) {
this.bSFlag = value;
this.bIsSFlagSet = true;
return this;
}
@Override
public boolean getDFlag() {
return this.bDFlag;
}
@Override
public Builder setDFlag(boolean value) {
this.bDFlag = value;
this.bIsDFlagSet = true;
return this;
}
@Override
public Builder setOptionalTlv(LinkedList<PcepValueType> llOptionalTlv) {
this.llOptionalTlv = llOptionalTlv;
return this;
}
@Override
public LinkedList<PcepValueType> getOptionalTlv() {
return this.llOptionalTlv;
}
@Override
public Builder setPFlag(boolean value) {
this.bPFlag = value;
this.bIsPFlagSet = true;
return this;
}
@Override
public Builder setIFlag(boolean value) {
this.bIFlag = value;
this.bIsIFlagSet = true;
return this;
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("PlspIDValue", iPlspId)
.add("CFlag", bCFlag)
.add("OFlag", yOFlag)
.add("AFlag", bAFlag)
.add("RFlag", bRFlag)
.add("SFlag", bSFlag)
.add("DFlag", bDFlag)
.add("OptionalTlvList", llOptionalTlv)
.toString();
}
}
| |
/*
* Copyright (c) 2013, TeamCMPUT301F13T02
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of the {organization} nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ca.ualberta.CMPUT301F13T02.chooseyouradventure;
import java.util.ArrayList;
import java.util.UUID;
/**
* This class represents a story -- it is part of the model of the application
* in MVC style.
*
* A Story is serialized and stored via a Handler implementation which interact
* solely with Stories.
*
* @author The whole group
*/
public class Story {
private ArrayList<Page> pages = new ArrayList<Page>();
private String id;
private UUID firstpage;
private int currRefNum = 1;
private String author;
private String title = new String();
@SuppressWarnings("unused")
private long timeStamp;
private boolean usesCombat = false;
private Counters playerStats;
private transient Handler handler;
/**
* This is the main constructor for Story that initializes a new story.
*/
public Story() {
this.firstpage = new Page().getId();
this.timeStamp = System.currentTimeMillis() / 1000L;
this.author = "";
}
// For testing purposes
public Story(String title) {
this.title = title;
}
/**
* This is a copy constructor to copy a story
* @param story
*/
public Story(Story story) {
this.firstpage = new Page().getId();
}
/**
* Marks a page with the given UUID as the first page of the story.
* @param firstpage
*/
public void setFirstpage(UUID firstpage)
{
this.firstpage = firstpage;
}
/**
* Finds the first page in the list of pages and returns it.
* @return the firstpage
*/
public Page getFirstpage()
{
Page fp = null;
for(int i = 0; i < pages.size(); i++){
if (firstpage.equals(pages.get(i).getId()))
{
fp = pages.get(i);
}
}
return fp;
}
/**
* This sets the title of the story
* @param title the title to set
*/
public void setTitle(String title) {
this.title = title;
}
/**
* This gets the title of the story
* @return the title
*/
public String getTitle() {
return title;
}
/**
* This sets the ID of the story
* @param id
*/
public void setId(String id) {
this.id = id;
}
/**
* This gets the ID of the story
* @return the story id
*/
public String getId() {
return id;
}
/**
* This sets the author of the story. Note that the author string should
* be a string generated by the android device that is unique to that
* app on that device. This allows the app to distinguish users.
* @param author
*/
public void setAuthor(String author) {
this.author = author;
}
/**
* This gets the author of the story
* @return the author
*/
public String getAuthor() {
return author;
}
/**
* This gets the currRefNum
* @return currRefNum
*/
public int getCurrRefNum() {
return currRefNum;
}
/**
* This gets the pages of a story
* @return The pages
*/
public ArrayList<Page> getPages() {
return pages;
}
/**
* This adds a page to a story
* @param newPage A new page
*/
public void addPage(Page newPage) {
newPage.setRefNum(currRefNum);
pages.add(newPage);
currRefNum++;
}
/**
* This deletes a page from a story
* @param aPage What to delete
*/
public void deletePage(int index) {
pages.remove(index);
updateStory();
}
/**
* This function updates the stories data in the database
*/
public void updateStory(){
try {
handler.updateStory(this);
} catch (HandlerException e) {
e.printStackTrace();
}
}
/**
* This sets the scope of the storage of a story
* @param state The type of Handler to set our handler to
*/
public void setHandler(Handler state) {
handler = state;
}
/**
* This gets the scope of the storage of a story
* @return state The type of Handler we are using
*/
public Handler getHandler() {
return handler;
}
/**
* Compares this story for deep equality with another story
* @param The story to test equality with
* @return The equality Truth value
*/
public boolean equals(Story story) {
if (pages.size() != story.getPages().size())
return false;
if (!title.equals(story.getTitle()))
return false;
//if (!firstpage.equals(story.getFirstpage()))
// return false;
//Check that all pages are the same
for (int i = 0; i < pages.size(); i++) {
if (!pages.get(i).equals(story.getPages().get(i)))
return false;
}
return true;
}
/**
* Returns if the current page is a fighting page
* @return if the page is a fighting fragment
*/
public boolean isUsesCombat() {
return usesCombat;
}
/**
* Sets if the page is a fighting fragment
* @param usesCombat
*/
public void setUsesCombat(boolean usesCombat) {
this.usesCombat = usesCombat;
}
/**
* get the players current counter state
* @return the counter recording the current stats of the player
*/
public Counters getPlayerStats() {
return playerStats;
}
/**
* Sets the players counter state
* @param playerStats
*/
public void setPlayerStats(Counters playerStats) {
this.playerStats = playerStats;
}
}
| |
package org.rabix.bindings.draft3.helper;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.avro.Schema;
import org.rabix.bindings.BindingException;
import org.rabix.bindings.model.DataType;
import org.rabix.common.helper.CloneHelper;
import org.rabix.common.helper.JSONHelper;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
public class Draft3SchemaHelper extends Draft3BeanHelper {
public static final String ID_START = "#";
public static final String ID_SEPARATOR = "/";
public static final String PORT_ID_SEPARATOR = ".";
public static final String STEP_PORT_ID = "id";
public static final String KEY_SCHEMA_TYPE = "type";
public static final String KEY_SCHEMA_NAME = "name";
public static final String KEY_SCHEMA_ITEMS = "items";
public static final String KEY_INPUT_BINDING_ADAPTER = "inputBinding";
public static final String KEY_OUTPUT_BINDING_ADAPTER = "outputBinding";
public static final String KEY_SCHEMA_FIELDS = "fields";
public static final String KEY_SCHEMA_SYMBOLS = "symbols";
public static final String KEY_JOB_TYPE = "class";
public static final String TYPE_JOB_FILE = "File";
public static final String TYPE_JOB_EXPRESSION = "Expression";
public static final String TYPE_JOB_ARRAY = "array";
public static final String TYPE_JOB_RECORD = "record";
public static final String TYPE_JOB_ANY = "Any";
public static final String SCHEMA_NULL = "null";
public static final String OPTIONAL_SHORTENED = "?";
public static final String ARRAY_SHORTENED = "[]";
public static String normalizeId(String id) {
if (id == null) {
return null;
}
return id.startsWith(ID_START) ? id.substring(1) : id;
}
public static String denormalizeId(String id) {
if (id == null) {
return null;
}
return id.startsWith(ID_START) ? id : ID_START + id;
}
public static Object getFields(Object raw) {
return getValue(KEY_SCHEMA_FIELDS, raw);
}
public static Object getItems(Object schema) {
String shortenedSchema = getArrayShortenedType(schema);
if (shortenedSchema != null) {
return shortenedSchema;
}
return getValue(KEY_SCHEMA_ITEMS, schema);
}
public static String getName(Object raw) {
return getValue(KEY_SCHEMA_NAME, raw);
}
public static Object getInputBinding(Object raw) {
if(raw instanceof List) {
for(Object elem: (List<?>) raw) {
if(elem != null && elem instanceof Map) {
return getValue(KEY_INPUT_BINDING_ADAPTER, elem);
}
}
}
return getValue(KEY_INPUT_BINDING_ADAPTER, raw);
}
public static Object getOutputBinding(Object raw) {
return getValue(KEY_OUTPUT_BINDING_ADAPTER, raw);
}
public static Object getType(Object raw) {
return getValue(KEY_SCHEMA_TYPE, raw);
}
public static boolean isFileFromSchema(Object schema) {
return isTypeFromSchema(schema, TYPE_JOB_FILE);
}
public static boolean isArrayFromSchema(Object schema) {
String shortenedSchema = getArrayShortenedType(schema);
if (shortenedSchema != null) {
return true;
}
return isTypeFromSchema(schema, TYPE_JOB_ARRAY);
}
public static boolean isAnyFromSchema(Object schema) {
return isTypeFromSchema(schema, TYPE_JOB_ANY);
}
public static boolean isRecordFromSchema(Object schema) {
return isTypeFromSchema(schema, TYPE_JOB_RECORD);
}
@SuppressWarnings("unchecked")
public static boolean isRequired(Object schema) {
String shortenedSchema = getOptionalShortenedType(schema);
if (shortenedSchema != null) {
return false;
}
try {
Object clonedSchema = CloneHelper.deepCopy(schema);
while (clonedSchema instanceof Map<?, ?> && ((Map<?, ?>) clonedSchema).containsKey("type")) {
clonedSchema = ((Map<?, ?>) clonedSchema).get("type");
}
if (clonedSchema instanceof List<?>) {
for (Object subschema : ((List<Object>) clonedSchema)) {
if (subschema == null || SCHEMA_NULL.equals(subschema)) {
return false;
}
}
return true;
}
return clonedSchema != null;
} catch (Exception e) {
throw new RuntimeException("Failed to clone schema " + schema);
}
}
private static String getOptionalShortenedType(Object schema) {
if (schema == null) {
return null;
}
if (!(schema instanceof String)) {
return null;
}
String schemaStr = ((String) schema).trim();
if (schemaStr.endsWith(OPTIONAL_SHORTENED)) {
return schemaStr.substring(0, schemaStr.length() - 1);
}
return null;
}
private static String getArrayShortenedType(Object schema) {
if (schema == null) {
return null;
}
if (!(schema instanceof String)) {
return null;
}
String schemaStr = ((String) schema).trim();
String optionalShortenedType = getOptionalShortenedType(schemaStr);
if (optionalShortenedType != null) {
schemaStr = optionalShortenedType;
}
if (schemaStr.endsWith(ARRAY_SHORTENED)) {
return schemaStr.substring(0, schemaStr.length() - ARRAY_SHORTENED.length());
}
return null;
}
@SuppressWarnings("unchecked")
private static boolean isTypeFromSchema(Object schema, String type) {
Preconditions.checkNotNull(type);
if (schema == null) {
return false;
}
if (type.equals(schema)) {
return true;
}
if (schema instanceof Map<?, ?>) {
Map<String, Object> schemaMap = (Map<String, Object>) schema;
if (schemaMap.containsKey(KEY_SCHEMA_TYPE)) {
return type.equals(schemaMap.get(KEY_SCHEMA_TYPE));
}
}
if (schema instanceof List<?>) {
List<?> schemaList = (List<?>) schema;
for (Object subschema : schemaList) {
boolean isType = isTypeFromSchema(subschema, type);
if (isType) {
return true;
}
}
}
return false;
}
public static boolean isFileFromValue(Object valueObj) {
if (valueObj == null) {
return false;
}
if (valueObj instanceof Map<?, ?>) {
Map<?, ?> valueMap = (Map<?, ?>) valueObj;
Object type = valueMap.get(KEY_JOB_TYPE);
if (type != null && type.equals(TYPE_JOB_FILE)) {
return true;
}
}
return false;
}
public static Map<?, ?> getField(String field, Object schema) {
Object fields = getFields(schema);
Object fieldObj = null;
if (fields instanceof Map<?, ?>) {
fieldObj = ((Map<?, ?>) fields).get(field);
} else if (fields instanceof List<?>) {
for (Object tmpField : ((List<?>) fields)) {
if (field.equals(getName(tmpField))) {
fieldObj = tmpField;
break;
}
}
}
return (Map<?, ?>) fieldObj;
}
public static Object findSchema(List<Map<String, Object>> schemaDefs, Object schema) {
if (schema == null || TYPE_JOB_FILE.equals(schema)) {
return schema;
}
if (schema instanceof Map) {
return schema;
}
if (schema instanceof List) {
return new HashMap<>();
}
return getSchemaDef(schemaDefs, (String) schema);
}
/**
* Extract schema from schema definitions
*
* TODO implement AVRO validation for multiple matches
*/
private static Object getSchemaDef(List<Map<String, Object>> schemaDefs, String name) {
if (schemaDefs == null) {
return null;
}
List<Object> compatibleSchemas = new ArrayList<>();
for (Map<String, Object> schemaDef : schemaDefs) {
if (name.equals(getName(schemaDef))) {
compatibleSchemas.add(schemaDef);
}
}
if (compatibleSchemas.size() == 1) {
return compatibleSchemas.get(0);
}
return null;
}
@SuppressWarnings("unchecked")
public static Object getSchemaForRecordField(List<Map<String, Object>> schemaDefs, Object recordSchema) {
if (recordSchema == null) {
return null;
}
List<Object> recordSchemaList = null;
if (recordSchema instanceof List<?>) {
recordSchemaList = (List<Object>) recordSchema;
} else {
recordSchemaList = new ArrayList<>();
recordSchemaList.add(recordSchema);
}
for (Object recordSchemaItem : recordSchemaList) {
Object schemaObj = findSchema(schemaDefs, recordSchemaItem);
if (schemaObj == null) {
continue;
}
return schemaObj;
}
return new HashMap<>();
}
@SuppressWarnings("unchecked")
public static Object getSchemaForArrayItem(Object value, List<Map<String, Object>> schemaDefs, Object arraySchema) {
if (arraySchema == null) {
return null;
}
List<Object> arraySchemaList = null;
if (arraySchema instanceof List<?>) {
arraySchemaList = (List<Object>) arraySchema;
} else {
arraySchemaList = new ArrayList<>();
arraySchemaList.add(arraySchema);
}
List<Object> schemas = new ArrayList<>();
for (Object arraySchemaItem : arraySchemaList) {
Object itemSchemaObj = getItems(arraySchemaItem);
if (itemSchemaObj == null) {
continue;
}
if (itemSchemaObj instanceof List) {
schemas = (List<Object>) itemSchemaObj;
} else {
schemas.add(itemSchemaObj);
}
}
List<Object> schemaObjects = new ArrayList<>();
for (Object schema : schemas) {
Object schemaObj = findSchema(schemaDefs, ((String) schema).substring(1));
if (schemaObj == null) {
continue;
}
schemaObjects.add(schemaObj);
}
if (schemaObjects.size() == 1) {
return schemaObjects.get(0);
}
if (schemaObjects.size() > 1) {
for (Object schemaObj : schemaObjects) {
if (validateAvro(JSONHelper.writeObject(value), JSONHelper.writeObject(schemaObj))) {
return schemaObj;
}
}
}
return new HashMap<>();
}
@SuppressWarnings("unchecked")
public static Object getSchemaForArrayItem(List<Map<String, Object>> schemaDefs, Object arraySchema) {
String shortenedSchema = getArrayShortenedType(arraySchema);
if (shortenedSchema != null) {
Object shortenedSchemaObj = findSchema(schemaDefs, shortenedSchema);
if (shortenedSchemaObj != null) {
return shortenedSchemaObj;
}
return shortenedSchema;
}
if (arraySchema == null) {
return null;
}
List<Object> arraySchemaList = null;
if (arraySchema instanceof List<?>) {
arraySchemaList = (List<Object>) arraySchema;
} else {
arraySchemaList = new ArrayList<>();
arraySchemaList.add(arraySchema);
}
List<Object> schemas = new ArrayList<>();
for (Object arraySchemaItem : arraySchemaList) {
Object itemSchemaObj = getItems(arraySchemaItem);
if (itemSchemaObj == null) {
continue;
}
if (itemSchemaObj instanceof List) {
schemas = (List<Object>) itemSchemaObj;
} else {
schemas.add(itemSchemaObj);
}
}
for (Object schema : schemas) {
Object schemaObj = findSchema(schemaDefs, schema);
if (schemaObj == null) {
continue;
}
return schemaObj;
}
return new HashMap<>();
}
public static String getLastInputId(String id) {
if (id == null) {
return null;
}
if (id.contains(PORT_ID_SEPARATOR)) {
return id.substring(id.indexOf(PORT_ID_SEPARATOR) + 1);
}
return id;
}
public static boolean validateAvro(String json, String schemaStr) {
Schema schema = new Schema.Parser().parse(schemaStr);
List<Schema> schemas = new ArrayList<>();
schemas.add(schema);
try {
resolveUnion(JSONHelper.readJsonNode(json), schemas);
return true;
} catch (BindingException e) {
return false;
}
}
private static Schema resolveUnion(JsonNode datum, Collection<Schema> schemas) throws BindingException {
Set<Schema.Type> primitives = Sets.newHashSet();
List<Schema> others = Lists.newArrayList();
for (Schema schema : schemas) {
if (PRIMITIVES.containsKey(schema.getType())) {
primitives.add(schema.getType());
} else {
others.add(schema);
}
}
// Try to identify specific primitive types
Schema primitiveSchema = null;
if (datum == null || datum.isNull()) {
primitiveSchema = closestPrimitive(primitives, Schema.Type.NULL);
} else if (datum.isShort() || datum.isInt()) {
primitiveSchema = closestPrimitive(primitives, Schema.Type.INT, Schema.Type.LONG, Schema.Type.FLOAT,
Schema.Type.DOUBLE);
} else if (datum.isLong()) {
primitiveSchema = closestPrimitive(primitives, Schema.Type.LONG, Schema.Type.DOUBLE);
} else if (datum.isFloat()) {
primitiveSchema = closestPrimitive(primitives, Schema.Type.FLOAT, Schema.Type.DOUBLE);
} else if (datum.isDouble()) {
primitiveSchema = closestPrimitive(primitives, Schema.Type.DOUBLE);
} else if (datum.isBoolean()) {
primitiveSchema = closestPrimitive(primitives, Schema.Type.BOOLEAN);
}
if (primitiveSchema != null) {
return primitiveSchema;
}
// otherwise, select the first schema that matches the datum
for (Schema schema : others) {
if (matches(datum, schema)) {
return schema;
}
}
throw new BindingException(String.format("Cannot resolve union: %s not in %s", datum, schemas));
}
// this does not contain string, bytes, or fixed because the datum type
// doesn't necessarily determine the schema.
private static ImmutableMap<Schema.Type, Schema> PRIMITIVES = ImmutableMap.<Schema.Type, Schema> builder()
.put(Schema.Type.NULL, Schema.create(Schema.Type.NULL))
.put(Schema.Type.BOOLEAN, Schema.create(Schema.Type.BOOLEAN)).put(Schema.Type.INT, Schema.create(Schema.Type.INT))
.put(Schema.Type.LONG, Schema.create(Schema.Type.LONG)).put(Schema.Type.FLOAT, Schema.create(Schema.Type.FLOAT))
.put(Schema.Type.DOUBLE, Schema.create(Schema.Type.DOUBLE)).build();
private static Schema closestPrimitive(Set<Schema.Type> possible, Schema.Type... types) {
for (Schema.Type type : types) {
if (possible.contains(type) && PRIMITIVES.containsKey(type)) {
return PRIMITIVES.get(type);
}
}
return null;
}
private static boolean matches(JsonNode datum, Schema schema) throws BindingException {
switch (schema.getType()) {
case RECORD:
if (datum.isObject()) {
// check that each field is present or has a default
for (Schema.Field field : schema.getFields()) {
JsonNode toValidate = null;
if (!datum.has(field.name()) && field.defaultValue() == null) {
toValidate = null;
} else {
toValidate = datum.get(field.name());
}
List<Schema> schemas = new ArrayList<>();
schemas.add(field.schema());
resolveUnion(toValidate, schemas);
}
return true;
}
break;
case UNION:
if (resolveUnion(datum, schema.getTypes()) != null) {
return true;
}
break;
case MAP:
if (datum.isObject()) {
return true;
}
break;
case ARRAY:
if (datum.isArray()) {
return true;
}
break;
case BOOLEAN:
if (datum.isBoolean()) {
return true;
}
break;
case FLOAT:
if (datum.isFloat() || datum.isInt()) {
return true;
}
break;
case DOUBLE:
if (datum.isDouble() || datum.isFloat() || datum.isLong() || datum.isInt()) {
return true;
}
break;
case INT:
if (datum.isInt()) {
return true;
}
break;
case LONG:
if (datum.isLong() || datum.isInt()) {
return true;
}
break;
case STRING:
if (datum.isTextual()) {
return true;
}
break;
case ENUM:
if (datum.isTextual() && schema.hasEnumSymbol(datum.textValue())) {
return true;
}
break;
case BYTES:
case FIXED:
if (datum.isBinary()) {
return true;
}
break;
case NULL:
if (datum == null || datum.isNull()) {
return true;
}
break;
default: // UNION or unknown
throw new IllegalArgumentException("Unsupported schema: " + schema);
}
return false;
}
@SuppressWarnings("unchecked")
public static DataType readDataType(Object schema) {
// UNION
if (schema instanceof List) {
List<?> schemaList = (List<?>) schema;
int numberOfTypes = schemaList.size() - (schemaList.contains(SCHEMA_NULL) ? 1 : 0);
if (numberOfTypes > 1 ) {
Set<DataType> types = new HashSet<>();
for (Object subschema : schemaList) {
types.add(readDataType(subschema));
}
return new DataType(DataType.Type.UNION, types, !isRequired(schema));
}
}
// FILE
if (isFileFromSchema(schema))
return new DataType(DataType.Type.FILE, !isRequired(schema));
//ARRAY
if (isArrayFromSchema(schema)) {
DataType arrayType = readDataType(getItems(schema));
return new DataType(DataType.Type.ARRAY, arrayType, !isRequired(schema));
}
// ENUM
if (isTypeFromSchema(schema, "enum")) {
Object symbols = getValue(KEY_SCHEMA_SYMBOLS, schema);
if (symbols == null || !(symbols instanceof List))
return new DataType(DataType.Type.ANY);
List<String> list = new ArrayList<>();
for (Object o: (List<?>) symbols) {
if (!(o instanceof String))
return new DataType(DataType.Type.ANY);
list.add((String) o);
}
return new DataType(DataType.Type.ENUM, list);
}
// RECORD
if (isRecordFromSchema(schema)) {
Map<String, DataType> subTypes = new HashMap<>();
Object fields = getFields(schema);
if (fields instanceof List<?>) {
for (Object o : (List<Object>) fields) {
Map<String, Object> map = (Map<String, Object>) o;
subTypes.put((String) map.get("name"), readDataType(map.get("type")));
}
}
return new DataType(DataType.Type.RECORD, subTypes, !isRequired(schema));
}
// MAP
if (isTypeFromSchema(schema, "map")) {
DataType mapType = readDataType(getValue("values", schema));
return new DataType(DataType.Type.MAP, mapType, !isRequired(schema));
}
// PRIMITIVES
if (isTypeFromSchema(schema, "boolean")) {
return new DataType(DataType.Type.BOOLEAN, !isRequired(schema));
}
if (isTypeFromSchema(schema, "string")) {
return new DataType(DataType.Type.STRING, !isRequired(schema));
}
if (isTypeFromSchema(schema, "int")) {
return new DataType(DataType.Type.INT, !isRequired(schema));
}
if (isTypeFromSchema(schema, "long")) {
return new DataType(DataType.Type.INT, !isRequired(schema));
}
if (isTypeFromSchema(schema, "float")) {
return new DataType(DataType.Type.FLOAT, !isRequired(schema));
}
if (isTypeFromSchema(schema, "double")) {
return new DataType(DataType.Type.FLOAT, !isRequired(schema));
}
if (isTypeFromSchema(schema, SCHEMA_NULL)) {
return new DataType(DataType.Type.NULL);
}
return new DataType(DataType.Type.ANY);
}
}
| |
/*
* Copyright 2011-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lettuce.core.cluster;
import static io.lettuce.core.cluster.ClusterScanSupport.reactiveClusterKeyScanCursorMapper;
import static io.lettuce.core.cluster.ClusterScanSupport.reactiveClusterStreamScanCursorMapper;
import static io.lettuce.core.cluster.models.partitions.RedisClusterNode.NodeFlag.UPSTREAM;
import static io.lettuce.core.protocol.CommandType.GEORADIUSBYMEMBER_RO;
import static io.lettuce.core.protocol.CommandType.GEORADIUS_RO;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import io.lettuce.core.*;
import io.lettuce.core.api.StatefulRedisConnection;
import io.lettuce.core.api.reactive.RedisKeyReactiveCommands;
import io.lettuce.core.api.reactive.RedisScriptingReactiveCommands;
import io.lettuce.core.api.reactive.RedisServerReactiveCommands;
import io.lettuce.core.cluster.ClusterConnectionProvider.Intent;
import io.lettuce.core.cluster.api.StatefulRedisClusterConnection;
import io.lettuce.core.cluster.api.reactive.RedisAdvancedClusterReactiveCommands;
import io.lettuce.core.cluster.api.reactive.RedisClusterReactiveCommands;
import io.lettuce.core.cluster.models.partitions.Partitions;
import io.lettuce.core.cluster.models.partitions.RedisClusterNode;
import io.lettuce.core.codec.RedisCodec;
import io.lettuce.core.internal.LettuceLists;
import io.lettuce.core.output.KeyStreamingChannel;
import io.lettuce.core.output.KeyValueStreamingChannel;
/**
* An advanced reactive and thread-safe API to a Redis Cluster connection.
*
* @param <K> Key type.
* @param <V> Value type.
* @author Mark Paluch
* @author Jon Chambers
* @since 4.0
*/
public class RedisAdvancedClusterReactiveCommandsImpl<K, V> extends AbstractRedisReactiveCommands<K, V>
implements RedisAdvancedClusterReactiveCommands<K, V> {
private static final Predicate<RedisClusterNode> ALL_NODES = node -> true;
private final RedisCodec<K, V> codec;
/**
* Initialize a new connection.
*
* @param connection the stateful connection.
* @param codec Codec used to encode/decode keys and values.
* @deprecated since 5.2, use {@link #RedisAdvancedClusterReactiveCommandsImpl(StatefulRedisClusterConnection, RedisCodec)}.
*/
@Deprecated
public RedisAdvancedClusterReactiveCommandsImpl(StatefulRedisClusterConnectionImpl<K, V> connection,
RedisCodec<K, V> codec) {
super(connection, codec);
this.codec = codec;
}
/**
* Initialize a new connection.
*
* @param connection the stateful connection.
* @param codec Codec used to encode/decode keys and values.
*/
public RedisAdvancedClusterReactiveCommandsImpl(StatefulRedisClusterConnection<K, V> connection, RedisCodec<K, V> codec) {
super(connection, codec);
this.codec = codec;
}
@Override
public Mono<String> clientSetname(K name) {
List<Publisher<String>> publishers = new ArrayList<>();
publishers.add(super.clientSetname(name));
for (RedisClusterNode redisClusterNode : getStatefulConnection().getPartitions()) {
Mono<RedisClusterReactiveCommands<K, V>> byNodeId = getConnectionReactive(redisClusterNode.getNodeId());
publishers.add(byNodeId.flatMap(conn -> {
if (conn.isOpen()) {
return conn.clientSetname(name);
}
return Mono.empty();
}));
Mono<RedisClusterReactiveCommands<K, V>> byHost = getConnectionReactive(redisClusterNode.getUri().getHost(),
redisClusterNode.getUri().getPort());
publishers.add(byHost.flatMap(conn -> {
if (conn.isOpen()) {
return conn.clientSetname(name);
}
return Mono.empty();
}));
}
return Flux.merge(publishers).last();
}
@Override
public Mono<Long> clusterCountKeysInSlot(int slot) {
Mono<RedisClusterReactiveCommands<K, V>> connectionBySlot = findConnectionBySlotReactive(slot);
return connectionBySlot.flatMap(cmd -> cmd.clusterCountKeysInSlot(slot));
}
@Override
public Flux<K> clusterGetKeysInSlot(int slot, int count) {
Mono<RedisClusterReactiveCommands<K, V>> connectionBySlot = findConnectionBySlotReactive(slot);
return connectionBySlot.flatMapMany(conn -> conn.clusterGetKeysInSlot(slot, count));
}
@Override
public Mono<Long> dbsize() {
Map<String, Publisher<Long>> publishers = executeOnUpstream(RedisServerReactiveCommands::dbsize);
return Flux.merge(publishers.values()).reduce((accu, next) -> accu + next);
}
@Override
public Mono<Long> del(K... keys) {
return del(Arrays.asList(keys));
}
@Override
public Mono<Long> del(Iterable<K> keys) {
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, keys);
if (partitioned.size() < 2) {
return super.del(keys);
}
List<Publisher<Long>> publishers = new ArrayList<>();
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
publishers.add(super.del(entry.getValue()));
}
return Flux.merge(publishers).reduce((accu, next) -> accu + next);
}
@Override
public Mono<Long> exists(K... keys) {
return exists(Arrays.asList(keys));
}
public Mono<Long> exists(Iterable<K> keys) {
List<K> keyList = LettuceLists.newList(keys);
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, keyList);
if (partitioned.size() < 2) {
return super.exists(keyList);
}
List<Publisher<Long>> publishers = new ArrayList<>();
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
publishers.add(super.exists(entry.getValue()));
}
return Flux.merge(publishers).reduce((accu, next) -> accu + next);
}
@Override
public Mono<String> flushall() {
Map<String, Publisher<String>> publishers = executeOnUpstream(RedisServerReactiveCommands::flushall);
return Flux.merge(publishers.values()).last();
}
@Override
public Mono<String> flushallAsync() {
Map<String, Publisher<String>> publishers = executeOnUpstream(RedisServerReactiveCommands::flushallAsync);
return Flux.merge(publishers.values()).last();
}
@Override
public Mono<String> flushdb() {
Map<String, Publisher<String>> publishers = executeOnUpstream(RedisServerReactiveCommands::flushdb);
return Flux.merge(publishers.values()).last();
}
@Override
public Flux<V> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit) {
if (hasRedisState() && getRedisState().hasCommand(GEORADIUS_RO)) {
return super.georadius_ro(key, longitude, latitude, distance, unit);
}
return super.georadius(key, longitude, latitude, distance, unit);
}
@Override
public Flux<GeoWithin<V>> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit,
GeoArgs geoArgs) {
if (hasRedisState() && getRedisState().hasCommand(GEORADIUS_RO)) {
return super.georadius_ro(key, longitude, latitude, distance, unit, geoArgs);
}
return super.georadius(key, longitude, latitude, distance, unit, geoArgs);
}
@Override
public Flux<V> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit) {
if (hasRedisState() && getRedisState().hasCommand(GEORADIUSBYMEMBER_RO)) {
return super.georadiusbymember_ro(key, member, distance, unit);
}
return super.georadiusbymember(key, member, distance, unit);
}
@Override
public Flux<GeoWithin<V>> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit, GeoArgs geoArgs) {
if (hasRedisState() && getRedisState().hasCommand(GEORADIUSBYMEMBER_RO)) {
return super.georadiusbymember_ro(key, member, distance, unit, geoArgs);
}
return super.georadiusbymember(key, member, distance, unit, geoArgs);
}
@Override
public Flux<K> keys(K pattern) {
Map<String, Publisher<K>> publishers = executeOnUpstream(commands -> commands.keys(pattern));
return Flux.merge(publishers.values());
}
@Override
public Mono<Long> keys(KeyStreamingChannel<K> channel, K pattern) {
Map<String, Publisher<Long>> publishers = executeOnUpstream(commands -> commands.keys(channel, pattern));
return Flux.merge(publishers.values()).reduce((accu, next) -> accu + next);
}
@Override
public Flux<KeyValue<K, V>> mget(K... keys) {
return mget(Arrays.asList(keys));
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Flux<KeyValue<K, V>> mget(Iterable<K> keys) {
List<K> keyList = LettuceLists.newList(keys);
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, keyList);
if (partitioned.size() < 2) {
return super.mget(keyList);
}
List<Publisher<KeyValue<K, V>>> publishers = new ArrayList<>();
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
publishers.add(super.mget(entry.getValue()));
}
Flux<KeyValue<K, V>> fluxes = Flux.concat(publishers);
Mono<List<KeyValue<K, V>>> map = fluxes.collectList().map(vs -> {
KeyValue<K, V>[] values = new KeyValue[vs.size()];
int offset = 0;
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
for (int i = 0; i < keyList.size(); i++) {
int index = entry.getValue().indexOf(keyList.get(i));
if (index == -1) {
continue;
}
values[i] = vs.get(offset + index);
}
offset += entry.getValue().size();
}
return Arrays.asList(values);
});
return map.flatMapIterable(keyValues -> keyValues);
}
@Override
public Mono<Long> mget(KeyValueStreamingChannel<K, V> channel, K... keys) {
return mget(channel, Arrays.asList(keys));
}
@Override
public Mono<Long> mget(KeyValueStreamingChannel<K, V> channel, Iterable<K> keys) {
List<K> keyList = LettuceLists.newList(keys);
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, keyList);
if (partitioned.size() < 2) {
return super.mget(channel, keyList);
}
List<Publisher<Long>> publishers = new ArrayList<>();
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
publishers.add(super.mget(channel, entry.getValue()));
}
return Flux.merge(publishers).reduce((accu, next) -> accu + next);
}
@Override
public Mono<Boolean> msetnx(Map<K, V> map) {
return pipeliningWithMap(map, kvMap -> RedisAdvancedClusterReactiveCommandsImpl.super.msetnx(kvMap).flux(),
booleanFlux -> booleanFlux).reduce((accu, next) -> accu && next);
}
@Override
public Mono<String> mset(Map<K, V> map) {
return pipeliningWithMap(map, kvMap -> RedisAdvancedClusterReactiveCommandsImpl.super.mset(kvMap).flux(),
booleanFlux -> booleanFlux).last();
}
@Override
public Mono<K> randomkey() {
Partitions partitions = getStatefulConnection().getPartitions();
int index = ThreadLocalRandom.current().nextInt(partitions.size());
Mono<RedisClusterReactiveCommands<K, V>> connection = getConnectionReactive(partitions.getPartition(index).getNodeId());
return connection.flatMap(RedisKeyReactiveCommands::randomkey);
}
@Override
public Mono<String> scriptFlush() {
Map<String, Publisher<String>> publishers = executeOnNodes(RedisScriptingReactiveCommands::scriptFlush, ALL_NODES);
return Flux.merge(publishers.values()).last();
}
@Override
public Mono<String> scriptKill() {
Map<String, Publisher<String>> publishers = executeOnNodes(RedisScriptingReactiveCommands::scriptKill, ALL_NODES);
return Flux.merge(publishers.values()).onErrorReturn("OK").last();
}
@Override
public Mono<String> scriptLoad(byte[] script) {
Map<String, Publisher<String>> publishers = executeOnNodes((commands) -> commands.scriptLoad(script), ALL_NODES);
return Flux.merge(publishers.values()).last();
}
@Override
public Mono<Void> shutdown(boolean save) {
Map<String, Publisher<Void>> publishers = executeOnNodes(commands -> commands.shutdown(save), ALL_NODES);
return Flux.merge(publishers.values()).then();
}
@Override
public Mono<Long> touch(K... keys) {
return touch(Arrays.asList(keys));
}
public Mono<Long> touch(Iterable<K> keys) {
List<K> keyList = LettuceLists.newList(keys);
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, keyList);
if (partitioned.size() < 2) {
return super.touch(keyList);
}
List<Publisher<Long>> publishers = new ArrayList<>();
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
publishers.add(super.touch(entry.getValue()));
}
return Flux.merge(publishers).reduce((accu, next) -> accu + next);
}
@Override
public Mono<Long> unlink(K... keys) {
return unlink(Arrays.asList(keys));
}
@Override
public Mono<Long> unlink(Iterable<K> keys) {
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, keys);
if (partitioned.size() < 2) {
return super.unlink(keys);
}
List<Publisher<Long>> publishers = new ArrayList<>();
for (Map.Entry<Integer, List<K>> entry : partitioned.entrySet()) {
publishers.add(super.unlink(entry.getValue()));
}
return Flux.merge(publishers).reduce((accu, next) -> accu + next);
}
@Override
public RedisClusterReactiveCommands<K, V> getConnection(String nodeId) {
return getStatefulConnection().getConnection(nodeId).reactive();
}
private Mono<RedisClusterReactiveCommands<K, V>> getConnectionReactive(String nodeId) {
return getMono(getConnectionProvider().<K, V> getConnectionAsync(Intent.WRITE, nodeId))
.map(StatefulRedisConnection::reactive);
}
@Override
public RedisClusterReactiveCommands<K, V> getConnection(String host, int port) {
return getStatefulConnection().getConnection(host, port).reactive();
}
private Mono<RedisClusterReactiveCommands<K, V>> getConnectionReactive(String host, int port) {
return getMono(getConnectionProvider().<K, V> getConnectionAsync(Intent.WRITE, host, port))
.map(StatefulRedisConnection::reactive);
}
@Override
public StatefulRedisClusterConnection<K, V> getStatefulConnection() {
return (StatefulRedisClusterConnection<K, V>) super.getConnection();
}
@Override
public Mono<KeyScanCursor<K>> scan() {
return clusterScan(ScanCursor.INITIAL, (connection, cursor) -> connection.scan(), reactiveClusterKeyScanCursorMapper());
}
@Override
public Mono<KeyScanCursor<K>> scan(ScanArgs scanArgs) {
return clusterScan(ScanCursor.INITIAL, (connection, cursor) -> connection.scan(scanArgs),
reactiveClusterKeyScanCursorMapper());
}
@Override
public Mono<KeyScanCursor<K>> scan(ScanCursor scanCursor, ScanArgs scanArgs) {
return clusterScan(scanCursor, (connection, cursor) -> connection.scan(cursor, scanArgs),
reactiveClusterKeyScanCursorMapper());
}
@Override
public Mono<KeyScanCursor<K>> scan(ScanCursor scanCursor) {
return clusterScan(scanCursor, RedisKeyReactiveCommands::scan, reactiveClusterKeyScanCursorMapper());
}
@Override
public Mono<StreamScanCursor> scan(KeyStreamingChannel<K> channel) {
return clusterScan(ScanCursor.INITIAL, (connection, cursor) -> connection.scan(channel),
reactiveClusterStreamScanCursorMapper());
}
@Override
public Mono<StreamScanCursor> scan(KeyStreamingChannel<K> channel, ScanArgs scanArgs) {
return clusterScan(ScanCursor.INITIAL, (connection, cursor) -> connection.scan(channel, scanArgs),
reactiveClusterStreamScanCursorMapper());
}
@Override
public Mono<StreamScanCursor> scan(KeyStreamingChannel<K> channel, ScanCursor scanCursor, ScanArgs scanArgs) {
return clusterScan(scanCursor, (connection, cursor) -> connection.scan(channel, cursor, scanArgs),
reactiveClusterStreamScanCursorMapper());
}
@Override
public Mono<StreamScanCursor> scan(KeyStreamingChannel<K> channel, ScanCursor scanCursor) {
return clusterScan(scanCursor, (connection, cursor) -> connection.scan(channel, cursor),
reactiveClusterStreamScanCursorMapper());
}
@SuppressWarnings("unchecked")
private <T extends ScanCursor> Mono<T> clusterScan(ScanCursor cursor,
BiFunction<RedisKeyReactiveCommands<K, V>, ScanCursor, Mono<T>> scanFunction,
ClusterScanSupport.ScanCursorMapper<Mono<T>> resultMapper) {
return clusterScan(getStatefulConnection(), getConnectionProvider(), cursor, scanFunction,
(ClusterScanSupport.ScanCursorMapper) resultMapper);
}
private <T> Flux<T> pipeliningWithMap(Map<K, V> map, Function<Map<K, V>, Flux<T>> function,
Function<Flux<T>, Flux<T>> resultFunction) {
Map<Integer, List<K>> partitioned = SlotHash.partition(codec, map.keySet());
if (partitioned.size() < 2) {
return function.apply(map);
}
List<Flux<T>> publishers = partitioned.values().stream().map(ks -> {
Map<K, V> op = new HashMap<>();
ks.forEach(k -> op.put(k, map.get(k)));
return function.apply(op);
}).collect(Collectors.toList());
return resultFunction.apply(Flux.merge(publishers));
}
/**
* Run a command on all available masters,
*
* @param function function producing the command
* @param <T> result type
* @return map of a key (counter) and commands.
*/
protected <T> Map<String, Publisher<T>> executeOnUpstream(
Function<RedisClusterReactiveCommands<K, V>, ? extends Publisher<T>> function) {
return executeOnNodes(function, redisClusterNode -> redisClusterNode.is(UPSTREAM));
}
/**
* Run a command on all available nodes that match {@code filter}.
*
* @param function function producing the command
* @param filter filter function for the node selection
* @param <T> result type
* @return map of a key (counter) and commands.
*/
protected <T> Map<String, Publisher<T>> executeOnNodes(
Function<RedisClusterReactiveCommands<K, V>, ? extends Publisher<T>> function, Predicate<RedisClusterNode> filter) {
Map<String, Publisher<T>> executions = new HashMap<>();
for (RedisClusterNode redisClusterNode : getStatefulConnection().getPartitions()) {
if (!filter.test(redisClusterNode)) {
continue;
}
RedisURI uri = redisClusterNode.getUri();
Mono<RedisClusterReactiveCommands<K, V>> connection = getConnectionReactive(uri.getHost(), uri.getPort());
executions.put(redisClusterNode.getNodeId(), connection.flatMapMany(function::apply));
}
return executions;
}
private Mono<RedisClusterReactiveCommands<K, V>> findConnectionBySlotReactive(int slot) {
RedisClusterNode node = getStatefulConnection().getPartitions().getPartitionBySlot(slot);
if (node != null) {
return getConnectionReactive(node.getUri().getHost(), node.getUri().getPort());
}
return Mono.error(new RedisException("No partition for slot " + slot));
}
private CommandSet getRedisState() {
return ((StatefulRedisClusterConnectionImpl<K, V>) super.getConnection()).getCommandSet();
}
private boolean hasRedisState() {
return super.getConnection() instanceof StatefulRedisClusterConnectionImpl;
}
private AsyncClusterConnectionProvider getConnectionProvider() {
ClusterDistributionChannelWriter writer = (ClusterDistributionChannelWriter) getStatefulConnection().getChannelWriter();
return (AsyncClusterConnectionProvider) writer.getClusterConnectionProvider();
}
/**
* Perform a SCAN in the cluster.
*
*/
static <T extends ScanCursor, K, V> Mono<T> clusterScan(StatefulRedisClusterConnection<K, V> connection,
AsyncClusterConnectionProvider connectionProvider, ScanCursor cursor,
BiFunction<RedisKeyReactiveCommands<K, V>, ScanCursor, Mono<T>> scanFunction,
ClusterScanSupport.ScanCursorMapper<Mono<T>> mapper) {
List<String> nodeIds = ClusterScanSupport.getNodeIds(connection, cursor);
String currentNodeId = ClusterScanSupport.getCurrentNodeId(cursor, nodeIds);
ScanCursor continuationCursor = ClusterScanSupport.getContinuationCursor(cursor);
Mono<T> scanCursor = getMono(connectionProvider.<K, V> getConnectionAsync(Intent.WRITE, currentNodeId))
.flatMap(conn -> scanFunction.apply(conn.reactive(), continuationCursor));
return mapper.map(nodeIds, currentNodeId, scanCursor);
}
private static <T> Mono<T> getMono(CompletableFuture<T> future) {
return Mono.fromCompletionStage(future);
}
}
| |
package com.zhy.http.okhttp.https;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
/**
* Created by zhy on 15/12/14.
*/
public class HttpsUtils
{
public static SSLSocketFactory getSslSocketFactory(InputStream[] certificates, InputStream bksFile, String password)
{
try
{
TrustManager[] trustManagers = prepareTrustManager(certificates);
KeyManager[] keyManagers = prepareKeyManager(bksFile, password);
SSLContext sslContext = SSLContext.getInstance("TLS");
TrustManager trustManager = null;
if (trustManagers != null)
{
trustManager = new MyTrustManager(chooseTrustManager(trustManagers));
} else
{
trustManager = new UnSafeTrustManager();
}
sslContext.init(keyManagers, new TrustManager[]{trustManager}, new SecureRandom());
return sslContext.getSocketFactory();
} catch (NoSuchAlgorithmException e)
{
throw new AssertionError(e);
} catch (KeyManagementException e)
{
throw new AssertionError(e);
} catch (KeyStoreException e)
{
throw new AssertionError(e);
}
}
private class UnSafeHostnameVerifier implements HostnameVerifier
{
@Override
public boolean verify(String hostname, SSLSession session)
{
return true;
}
}
private static class UnSafeTrustManager implements X509TrustManager
{
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException
{
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException
{
}
@Override
public X509Certificate[] getAcceptedIssuers()
{
return new java.security.cert.X509Certificate[]{};
}
}
private static TrustManager[] prepareTrustManager(InputStream... certificates)
{
if (certificates == null || certificates.length <= 0) return null;
try
{
CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509");
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(null);
int index = 0;
for (InputStream certificate : certificates)
{
String certificateAlias = Integer.toString(index++);
keyStore.setCertificateEntry(certificateAlias, certificateFactory.generateCertificate(certificate));
try
{
if (certificate != null)
certificate.close();
} catch (IOException e)
{
}
}
TrustManagerFactory trustManagerFactory = null;
trustManagerFactory = TrustManagerFactory.
getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(keyStore);
TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
return trustManagers;
} catch (NoSuchAlgorithmException e)
{
e.printStackTrace();
} catch (CertificateException e)
{
e.printStackTrace();
} catch (KeyStoreException e)
{
e.printStackTrace();
} catch (Exception e)
{
e.printStackTrace();
}
return null;
}
private static KeyManager[] prepareKeyManager(InputStream bksFile, String password)
{
try
{
if (bksFile == null || password == null) return null;
KeyStore clientKeyStore = KeyStore.getInstance("BKS");
clientKeyStore.load(bksFile, password.toCharArray());
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
keyManagerFactory.init(clientKeyStore, password.toCharArray());
return keyManagerFactory.getKeyManagers();
} catch (KeyStoreException e)
{
e.printStackTrace();
} catch (NoSuchAlgorithmException e)
{
e.printStackTrace();
} catch (UnrecoverableKeyException e)
{
e.printStackTrace();
} catch (CertificateException e)
{
e.printStackTrace();
} catch (IOException e)
{
e.printStackTrace();
} catch (Exception e)
{
e.printStackTrace();
}
return null;
}
private static X509TrustManager chooseTrustManager(TrustManager[] trustManagers)
{
for (TrustManager trustManager : trustManagers)
{
if (trustManager instanceof X509TrustManager)
{
return (X509TrustManager) trustManager;
}
}
return null;
}
private static class MyTrustManager implements X509TrustManager
{
private X509TrustManager defaultTrustManager;
private X509TrustManager localTrustManager;
public MyTrustManager(X509TrustManager localTrustManager) throws NoSuchAlgorithmException, KeyStoreException
{
TrustManagerFactory var4 = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
var4.init((KeyStore) null);
defaultTrustManager = chooseTrustManager(var4.getTrustManagers());
this.localTrustManager = localTrustManager;
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException
{
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException
{
try
{
defaultTrustManager.checkServerTrusted(chain, authType);
} catch (CertificateException ce)
{
localTrustManager.checkServerTrusted(chain, authType);
}
}
@Override
public X509Certificate[] getAcceptedIssuers()
{
return new X509Certificate[0];
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.databridge.agent.endpoint;
import com.lmax.disruptor.BlockingWaitStrategy;
import com.lmax.disruptor.BusySpinWaitStrategy;
import com.lmax.disruptor.EventHandler;
import com.lmax.disruptor.InsufficientCapacityException;
import com.lmax.disruptor.RingBuffer;
import com.lmax.disruptor.SleepingWaitStrategy;
import com.lmax.disruptor.WaitStrategy;
import com.lmax.disruptor.YieldingWaitStrategy;
import com.lmax.disruptor.dsl.Disruptor;
import com.lmax.disruptor.dsl.ProducerType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.databridge.agent.DataEndpointAgent;
import org.wso2.carbon.databridge.agent.exception.DataEndpointConfigurationException;
import org.wso2.carbon.databridge.agent.exception.EventQueueFullException;
import org.wso2.carbon.databridge.agent.util.DataEndpointConstants;
import org.wso2.carbon.databridge.agent.util.DataPublisherUtil;
import org.wso2.carbon.databridge.commons.Event;
import org.wso2.carbon.databridge.commons.utils.DataBridgeThreadFactory;
import java.io.IOException;
import java.net.Socket;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* This class holds the endpoints associated within a group. Also it has a queue
* to hold the list of events that needs to be processed by the endpoints with
* provided the load balancing, or failover configuration.
*/
public class DataEndpointGroup implements DataEndpointFailureCallback {
private static final Log log = LogFactory.getLog(DataEndpointGroup.class);
private List<DataEndpoint> dataEndpoints;
private HAType haType;
private EventQueue eventQueue;
private int reconnectionInterval;
private final Integer START_INDEX = 0;
private AtomicInteger currentDataPublisherIndex = new AtomicInteger(START_INDEX);
private AtomicInteger maximumDataPublisherIndex = new AtomicInteger();
private ScheduledExecutorService reconnectionService;
private boolean immediateDispatching = false;
private final String disruptorWaitStrategy;
public enum HAType {
FAILOVER, LOADBALANCE
}
public DataEndpointGroup(HAType haType, DataEndpointAgent agent) {
this.dataEndpoints = new ArrayList<>();
this.haType = haType;
this.reconnectionService = Executors.newScheduledThreadPool(1, new DataBridgeThreadFactory("ReconnectionService"));
this.reconnectionInterval = agent.getAgentConfiguration().getReconnectionInterval();
this.disruptorWaitStrategy = agent.getAgentConfiguration().getWaitStrategy();
this.immediateDispatching = agent.getAgentConfiguration().isImmediateDispatching();
this.eventQueue = new EventQueue(agent.getAgentConfiguration().getQueueSize());
this.reconnectionService.scheduleAtFixedRate(new ReconnectionTask(), reconnectionInterval,
reconnectionInterval, TimeUnit.SECONDS);
currentDataPublisherIndex.set(START_INDEX);
}
public void addDataEndpoint(DataEndpoint dataEndpoint) {
dataEndpoints.add(dataEndpoint);
dataEndpoint.registerDataEndpointFailureCallback(this);
maximumDataPublisherIndex.incrementAndGet();
}
public void tryPublish(Event event) throws EventQueueFullException {
eventQueue.tryPut(event);
}
public void tryPublish(Event event, long timeoutMS) throws EventQueueFullException {
eventQueue.tryPut(event, timeoutMS);
}
public void publish(Event event) {
eventQueue.put(event);
}
class EventQueue {
private RingBuffer<WrappedEventFactory.WrappedEvent> ringBuffer;
private Disruptor<WrappedEventFactory.WrappedEvent> eventQueueDisruptor;
private ExecutorService eventQueuePool;
EventQueue(int queueSize) {
WaitStrategy waitStrategy;
if (disruptorWaitStrategy.equalsIgnoreCase(DataEndpointConstants.YIELDING_WAIT_STRATEGY)) {
waitStrategy = new YieldingWaitStrategy();
} else if (disruptorWaitStrategy.equalsIgnoreCase(DataEndpointConstants.SLEEPING_WAITING_STRATEGY)) {
waitStrategy = new SleepingWaitStrategy();
} else if (disruptorWaitStrategy.equalsIgnoreCase(DataEndpointConstants.BUSY_SPIN_WAIT_STRATEGY)) {
waitStrategy = new BusySpinWaitStrategy();
} else {
waitStrategy = new BlockingWaitStrategy();
}
eventQueuePool = Executors.newCachedThreadPool(new DataBridgeThreadFactory("EventQueue"));
eventQueueDisruptor = new Disruptor<>(new WrappedEventFactory(), queueSize, eventQueuePool, ProducerType.MULTI, waitStrategy);
if (immediateDispatching) {
eventQueueDisruptor.handleEventsWith(new EventDispatcher());
} else {
eventQueueDisruptor.handleEventsWith(new EventQueueWorker());
}
this.ringBuffer = eventQueueDisruptor.start();
}
private void tryPut(Event event) throws EventQueueFullException {
long sequence;
try {
sequence = this.ringBuffer.tryNext(1);
WrappedEventFactory.WrappedEvent bufferedEvent = this.ringBuffer.get(sequence);
bufferedEvent.setEvent(event);
this.ringBuffer.publish(sequence);
} catch (InsufficientCapacityException e) {
throw new EventQueueFullException("Cannot send events because the event queue is full", e);
}
}
private void tryPut(Event event, long timeoutMS) throws EventQueueFullException {
long sequence;
long stopTime = System.currentTimeMillis() + timeoutMS;
while (true) {
try {
sequence = this.ringBuffer.tryNext(1);
WrappedEventFactory.WrappedEvent bufferedEvent = this.ringBuffer.get(sequence);
bufferedEvent.setEvent(event);
this.ringBuffer.publish(sequence);
break;
} catch (InsufficientCapacityException ex) {
if (stopTime <= System.currentTimeMillis()) {
throw new EventQueueFullException("Cannot send events because the event queue is full", ex);
}
try {
Thread.sleep(1);
} catch (InterruptedException ignored) {
}
}
}
}
//Endless wait if at-least once endpoint is available.
private void put(Event event) {
do {
try {
long sequence = this.ringBuffer.tryNext(1);
WrappedEventFactory.WrappedEvent bufferedEvent = this.ringBuffer.get(sequence);
bufferedEvent.setEvent(event);
this.ringBuffer.publish(sequence);
return;
} catch (InsufficientCapacityException ex) {
try {
Thread.sleep(2);
} catch (InterruptedException ignored) {
}
}
} while (isActiveDataEndpointExists());
}
private void shutdown() {
eventQueuePool.shutdown();
eventQueueDisruptor.shutdown();
}
}
class EventQueueWorker implements EventHandler<WrappedEventFactory.WrappedEvent> {
@Override
public void onEvent(WrappedEventFactory.WrappedEvent wrappedEvent, long sequence, boolean endOfBatch) {
DataEndpoint endpoint = getDataEndpoint(true);
Event event = wrappedEvent.getEvent();
if (endpoint != null) {
endpoint.collectAndSend(event);
if (endOfBatch) {
flushAllDataEndpoints();
}
} else {
log.error("Dropping event as DataPublisher is shutting down.");
if (log.isDebugEnabled()) {
log.debug("Data publisher is shutting down, dropping event : " + event);
}
}
}
}
class EventDispatcher implements EventHandler<WrappedEventFactory.WrappedEvent> {
@Override
public void onEvent(WrappedEventFactory.WrappedEvent wrappedEvent, long sequence, boolean endOfBatch) {
try {
DataEndpoint endpoint = getDataEndpoint(true);
Event event = wrappedEvent.getEvent();
if (endpoint != null) {
endpoint.collectAndSendNow(event);
if (endOfBatch) {
flushAllDataEndpointsNow();
}
} else {
log.error("Dropping event as DataPublisher is shutting down.");
if (log.isDebugEnabled()) {
log.debug("Data publisher is shutting down, dropping event : " + event);
}
}
} catch (Throwable t) {
log.error("Unexpected error: " + t.getMessage(), t);
}
}
}
private void flushAllDataEndpoints() {
for (DataEndpoint dataEndpoint : dataEndpoints) {
if (dataEndpoint.getState().equals(DataEndpoint.State.ACTIVE)) {
dataEndpoint.flushEvents();
}
}
}
private void flushAllDataEndpointsNow() {
for (DataEndpoint dataEndpoint : dataEndpoints) {
if (dataEndpoint.getState().equals(DataEndpoint.State.ACTIVE)) {
dataEndpoint.flushEventsNow();
}
}
}
/**
* Find the next event processable endpoint to the
* data endpoint based on load balancing and failover logic, and wait
* indefinitely until at least one data endpoint becomes available based
* on busywait parameter.
*
* @param isBusyWait waitUntil atleast one endpoint becomes available
* @return DataEndpoint which can accept and send the events.
*/
private DataEndpoint getDataEndpoint(boolean isBusyWait) {
int startIndex;
if (haType.equals(HAType.LOADBALANCE)) {
startIndex = getDataPublisherIndex();
} else {
startIndex = START_INDEX;
}
int index = startIndex;
while (true) {
DataEndpoint dataEndpoint = dataEndpoints.get(index);
if (dataEndpoint.getState().equals(DataEndpoint.State.ACTIVE)) {
return dataEndpoint;
} else if (haType.equals(HAType.FAILOVER) && (dataEndpoint.getState().equals(DataEndpoint.State.BUSY) ||
dataEndpoint.getState().equals(DataEndpoint.State.INITIALIZING))) {
/**
* Wait for some time until the failover endpoint finish publishing
*
*/
busyWait(1);
} else {
index++;
if (index > maximumDataPublisherIndex.get() - 1) {
index = START_INDEX;
}
if (index == startIndex) {
if (isBusyWait) {
if (!reconnectionService.isShutdown()) {
/**
* Have fully iterated the data publisher list,
* and busy wait until data publisher
* becomes available
*/
busyWait(1);
} else {
if (!isActiveDataEndpointExists()) {
return null;
} else {
busyWait(1);
}
}
} else {
return null;
}
}
}
}
}
private void busyWait(long timeInMilliSec) {
try {
Thread.sleep(timeInMilliSec);
} catch (InterruptedException ignored) {
}
}
private boolean isActiveDataEndpointExists() {
int index = START_INDEX;
while (index < maximumDataPublisherIndex.get()) {
DataEndpoint dataEndpoint = dataEndpoints.get(index);
if (dataEndpoint.getState() != DataEndpoint.State.UNAVAILABLE) {
if (log.isDebugEnabled()) {
log.debug("Available endpoint : " + dataEndpoint + " existing in state - " + dataEndpoint.getState());
}
return true;
}
index++;
}
return false;
}
private synchronized int getDataPublisherIndex() {
int index = currentDataPublisherIndex.getAndIncrement();
if (index == maximumDataPublisherIndex.get() - 1) {
currentDataPublisherIndex.set(START_INDEX);
}
return index;
}
public void tryResendEvents(List<Event> events) {
List<Event> unsuccessfulEvents = trySendActiveEndpoints(events);
for (Event event : unsuccessfulEvents) {
try {
eventQueue.tryPut(event);
} catch (EventQueueFullException e) {
log.error("Unable to put the event :" + event, e);
}
}
}
private List<Event> trySendActiveEndpoints(List<Event> events) {
ArrayList<Event> unsuccessfulEvents = new ArrayList<>();
for (Event event : events) {
DataEndpoint endpoint = getDataEndpoint(false);
if (endpoint != null) {
endpoint.collectAndSend(event);
} else {
unsuccessfulEvents.add(event);
}
}
flushAllDataEndpoints();
return unsuccessfulEvents;
}
private class ReconnectionTask implements Runnable {
public void run() {
boolean isOneReceiverConnected = false;
for (int i = START_INDEX; i < maximumDataPublisherIndex.get(); i++) {
DataEndpoint dataEndpoint = dataEndpoints.get(i);
if (!dataEndpoint.isConnected()) {
try {
dataEndpoint.connect();
} catch (Exception ex) {
dataEndpoint.deactivate();
}
} else {
try {
String[] urlElements = DataPublisherUtil.getProtocolHostPort(
dataEndpoint.getDataEndpointConfiguration().getReceiverURL());
if (!isServerExists(urlElements[1], Integer.parseInt(urlElements[2]))) {
dataEndpoint.deactivate();
}
} catch (DataEndpointConfigurationException exception) {
log.warn("Data Endpoint with receiver URL:" + dataEndpoint.getDataEndpointConfiguration().getReceiverURL()
+ " could not be deactivated", exception);
}
}
if (dataEndpoint.isConnected()) {
isOneReceiverConnected = true;
}
}
if (!isOneReceiverConnected) {
log.warn("No receiver is reachable at reconnection, will try to reconnect every " + reconnectionInterval + " sec");
}
}
private boolean isServerExists(String ip, int port) {
try {
Socket socket = new Socket(ip, port);
socket.close();
return true;
} catch (UnknownHostException e) {
return false;
} catch (IOException e) {
return false;
} catch (Exception e) {
return false;
}
}
}
public String toString() {
StringBuilder group = new StringBuilder();
group.append("[ ");
for (int i = 0; i < dataEndpoints.size(); i++) {
DataEndpoint endpoint = dataEndpoints.get(i);
group.append(endpoint.toString());
if (i == dataEndpoints.size() - 1) {
group.append(" ]");
return group.toString();
} else {
if (haType == HAType.FAILOVER) {
group.append(DataEndpointConstants.FAILOVER_URL_GROUP_SEPARATOR);
} else {
group.append(DataEndpointConstants.LB_URL_GROUP_SEPARATOR);
}
}
}
return group.toString();
}
public void shutdown() {
reconnectionService.shutdownNow();
eventQueue.shutdown();
for (DataEndpoint dataEndpoint : dataEndpoints) {
dataEndpoint.shutdown();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.config;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.processor.resequencer.DefaultExchangeComparator;
import org.apache.camel.processor.resequencer.ExpressionResultComparator;
import org.apache.camel.spi.Metadata;
/**
* Configures stream-processing resequence eip.
*/
@Metadata(label = "eip,routing,resequence")
@XmlRootElement(name = "stream-config")
@XmlAccessorType(XmlAccessType.FIELD)
public class StreamResequencerConfig extends ResequencerConfig {
@XmlAttribute @Metadata(defaultValue = "100")
private Integer capacity;
@XmlAttribute @Metadata(defaultValue = "1000")
private Long timeout;
@XmlAttribute @Metadata(defaultValue = "1000")
private Long deliveryAttemptInterval;
@XmlAttribute
private Boolean ignoreInvalidExchanges;
@XmlTransient
private ExpressionResultComparator comparator;
@XmlAttribute
private String comparatorRef;
@XmlAttribute
private Boolean rejectOld;
/**
* Creates a new {@link StreamResequencerConfig} instance using default
* values for <code>capacity</code> (1000) and <code>timeout</code>
* (1000L). Elements of the sequence are compared using the
* {@link DefaultExchangeComparator}.
*/
public StreamResequencerConfig() {
this(1000, 1000L);
}
/**
* Creates a new {@link StreamResequencerConfig} instance using the given
* values for <code>capacity</code> and <code>timeout</code>. Elements
* of the sequence are compared using the {@link DefaultExchangeComparator}.
*
* @param capacity capacity of the resequencer's inbound queue.
* @param timeout minimum time to wait for missing elements (messages).
*/
public StreamResequencerConfig(int capacity, long timeout) {
this(capacity, timeout, new DefaultExchangeComparator());
}
/**
* Creates a new {@link StreamResequencerConfig} instance using the given
* values for <code>capacity</code> and <code>timeout</code>. Elements
* of the sequence are compared with the given
* {@link ExpressionResultComparator}.
*
* @param capacity capacity of the resequencer's inbound queue.
* @param timeout minimum time to wait for missing elements (messages).
* @param comparator comparator for sequence comparision
*/
public StreamResequencerConfig(int capacity, long timeout, ExpressionResultComparator comparator) {
this.capacity = capacity;
this.timeout = timeout;
this.comparator = comparator;
}
/**
* Creates a new {@link StreamResequencerConfig} instance using the given
* values for <code>capacity</code> and <code>timeout</code>. Elements
* of the sequence are compared using the {@link DefaultExchangeComparator}.
*
* @param capacity capacity of the resequencer's inbound queue.
* @param timeout minimum time to wait for missing elements (messages).
* @param rejectOld if true, throws an exception when messages older than the last delivered message are processed
*/
public StreamResequencerConfig(int capacity, long timeout, Boolean rejectOld) {
this(capacity, timeout, rejectOld, new DefaultExchangeComparator());
}
/**
* Creates a new {@link StreamResequencerConfig} instance using the given
* values for <code>capacity</code> and <code>timeout</code>. Elements
* of the sequence are compared with the given {@link ExpressionResultComparator}.
*
* @param capacity capacity of the resequencer's inbound queue.
* @param timeout minimum time to wait for missing elements (messages).
* @param rejectOld if true, throws an exception when messages older than the last delivered message are processed
* @param comparator comparator for sequence comparision
*/
public StreamResequencerConfig(int capacity, long timeout, Boolean rejectOld, ExpressionResultComparator comparator) {
this.capacity = capacity;
this.timeout = timeout;
this.rejectOld = rejectOld;
this.comparator = comparator;
}
/**
* Returns a new {@link StreamResequencerConfig} instance using default
* values for <code>capacity</code> (1000) and <code>timeout</code>
* (1000L). Elements of the sequence are compared using the
* {@link DefaultExchangeComparator}.
*
* @return a default {@link StreamResequencerConfig}.
*/
public static StreamResequencerConfig getDefault() {
return new StreamResequencerConfig();
}
public int getCapacity() {
return capacity;
}
/**
* Sets the capacity of the resequencer's inbound queue.
*/
public void setCapacity(int capacity) {
this.capacity = capacity;
}
public long getTimeout() {
return timeout;
}
/**
* Sets minimum time to wait for missing elements (messages).
*/
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public Long getDeliveryAttemptInterval() {
return deliveryAttemptInterval;
}
/**
* Sets the interval in milli seconds the stream resequencer will at most wait
* while waiting for condition of being able to deliver.
*/
public void setDeliveryAttemptInterval(Long deliveryAttemptInterval) {
this.deliveryAttemptInterval = deliveryAttemptInterval;
}
public Boolean getIgnoreInvalidExchanges() {
return ignoreInvalidExchanges;
}
/**
* Whether to ignore invalid exchanges
*/
public void setIgnoreInvalidExchanges(Boolean ignoreInvalidExchanges) {
this.ignoreInvalidExchanges = ignoreInvalidExchanges;
}
public ExpressionResultComparator getComparator() {
return comparator;
}
/**
* To use a custom comparator
*/
public void setComparator(ExpressionResultComparator comparator) {
this.comparator = comparator;
}
public String getComparatorRef() {
return comparatorRef;
}
/**
* To use a custom comparator
*/
public void setComparatorRef(String comparatorRef) {
this.comparatorRef = comparatorRef;
}
/**
* If true, throws an exception when messages older than the last delivered message are processed
*/
public void setRejectOld(boolean value) {
this.rejectOld = value;
}
public Boolean getRejectOld() {
return rejectOld;
}
}
| |
/*
* Copyright 2002-2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.arsenal.framework.util;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.UndeclaredThrowableException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
/**
* Simple utility class for working with the reflection API and handling
* reflection exceptions.
*
* <p>Only intended for internal use.
*
* @author Juergen Hoeller
* @author Rob Harrop
* @author Rod Johnson
* @author Costin Leau
* @author Sam Brannen
* @author Chris Beams
* @since 1.2.2
*/
public abstract class ReflectionUtils {
private static final Pattern CGLIB_RENAMED_METHOD_PATTERN = Pattern.compile("CGLIB\\$(.+)\\$\\d+");
/**
* Attempt to find a {@link Field field} on the supplied {@link Class} with the
* supplied <code>name</code>. Searches all superclasses up to {@link Object}.
* @param clazz the class to introspect
* @param name the name of the field
* @return the corresponding Field object, or <code>null</code> if not found
*/
public static Field findField(Class<?> clazz, String name) {
return findField(clazz, name, null);
}
/**
* Attempt to find a {@link Field field} on the supplied {@link Class} with the
* supplied <code>name</code> and/or {@link Class type}. Searches all superclasses
* up to {@link Object}.
* @param clazz the class to introspect
* @param name the name of the field (may be <code>null</code> if type is specified)
* @param type the type of the field (may be <code>null</code> if name is specified)
* @return the corresponding Field object, or <code>null</code> if not found
*/
public static Field findField(Class<?> clazz, String name, Class<?> type) {
Assert.notNull(clazz, "Class must not be null");
Assert.isTrue(name != null || type != null, "Either name or type of the field must be specified");
Class<?> searchType = clazz;
while (!Object.class.equals(searchType) && searchType != null) {
Field[] fields = searchType.getDeclaredFields();
for (Field field : fields) {
if ((name == null || name.equals(field.getName())) && (type == null || type.equals(field.getType()))) {
return field;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
/**
* Set the field represented by the supplied {@link Field field object} on the
* specified {@link Object target object} to the specified <code>value</code>.
* In accordance with {@link Field#set(Object, Object)} semantics, the new value
* is automatically unwrapped if the underlying field has a primitive type.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException(Exception)}.
* @param field the field to set
* @param target the target object on which to set the field
* @param value the value to set; may be <code>null</code>
*/
public static void setField(Field field, Object target, Object value) {
try {
field.set(target, value);
}
catch (IllegalAccessException ex) {
handleReflectionException(ex);
throw new IllegalStateException("Unexpected reflection exception - " + ex.getClass().getName() + ": "
+ ex.getMessage());
}
}
/**
* Get the field represented by the supplied {@link Field field object} on the
* specified {@link Object target object}. In accordance with {@link Field#get(Object)}
* semantics, the returned value is automatically wrapped if the underlying field
* has a primitive type.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException(Exception)}.
* @param field the field to get
* @param target the target object from which to get the field
* @return the field's current value
*/
public static Object getField(Field field, Object target) {
try {
return field.get(target);
}
catch (IllegalAccessException ex) {
handleReflectionException(ex);
throw new IllegalStateException(
"Unexpected reflection exception - " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
/**
* Attempt to find a {@link Method} on the supplied class with the supplied name
* and no parameters. Searches all superclasses up to <code>Object</code>.
* <p>Returns <code>null</code> if no {@link Method} can be found.
* @param clazz the class to introspect
* @param name the name of the method
* @return the Method object, or <code>null</code> if none found
*/
public static Method findMethod(Class<?> clazz, String name) {
return findMethod(clazz, name, new Class[0]);
}
/**
* Attempt to find a {@link Method} on the supplied class with the supplied name
* and parameter types. Searches all superclasses up to <code>Object</code>.
* <p>Returns <code>null</code> if no {@link Method} can be found.
* @param clazz the class to introspect
* @param name the name of the method
* @param paramTypes the parameter types of the method
* (may be <code>null</code> to indicate any signature)
* @return the Method object, or <code>null</code> if none found
*/
public static Method findMethod(Class<?> clazz, String name, Class<?>... paramTypes) {
Assert.notNull(clazz, "Class must not be null");
Assert.notNull(name, "Method name must not be null");
Class<?> searchType = clazz;
while (searchType != null) {
Method[] methods = (searchType.isInterface() ? searchType.getMethods() : searchType.getDeclaredMethods());
for (Method method : methods) {
if (name.equals(method.getName())
&& (paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) {
return method;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
/**
* Invoke the specified {@link Method} against the supplied target object with no arguments.
* The target object can be <code>null</code> when invoking a static {@link Method}.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException}.
* @param method the method to invoke
* @param target the target object to invoke the method on
* @return the invocation result, if any
* @see #invokeMethod(java.lang.reflect.Method, Object, Object[])
*/
public static Object invokeMethod(Method method, Object target) {
return invokeMethod(method, target, new Object[0]);
}
/**
* Invoke the specified {@link Method} against the supplied target object with the
* supplied arguments. The target object can be <code>null</code> when invoking a
* static {@link Method}.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException}.
* @param method the method to invoke
* @param target the target object to invoke the method on
* @param args the invocation arguments (may be <code>null</code>)
* @return the invocation result, if any
*/
public static Object invokeMethod(Method method, Object target, Object... args) {
try {
return method.invoke(target, args);
}
catch (Exception ex) {
handleReflectionException(ex);
}
throw new IllegalStateException("Should never get here");
}
/**
* Invoke the specified JDBC API {@link Method} against the supplied target
* object with no arguments.
* @param method the method to invoke
* @param target the target object to invoke the method on
* @return the invocation result, if any
* @throws SQLException the JDBC API SQLException to rethrow (if any)
* @see #invokeJdbcMethod(java.lang.reflect.Method, Object, Object[])
*/
public static Object invokeJdbcMethod(Method method, Object target) throws SQLException {
return invokeJdbcMethod(method, target, new Object[0]);
}
/**
* Invoke the specified JDBC API {@link Method} against the supplied target
* object with the supplied arguments.
* @param method the method to invoke
* @param target the target object to invoke the method on
* @param args the invocation arguments (may be <code>null</code>)
* @return the invocation result, if any
* @throws SQLException the JDBC API SQLException to rethrow (if any)
* @see #invokeMethod(java.lang.reflect.Method, Object, Object[])
*/
public static Object invokeJdbcMethod(Method method, Object target, Object... args) throws SQLException {
try {
return method.invoke(target, args);
}
catch (IllegalAccessException ex) {
handleReflectionException(ex);
}
catch (InvocationTargetException ex) {
if (ex.getTargetException() instanceof SQLException) {
throw (SQLException) ex.getTargetException();
}
handleInvocationTargetException(ex);
}
throw new IllegalStateException("Should never get here");
}
/**
* Handle the given reflection exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>Throws the underlying RuntimeException or Error in case of an
* InvocationTargetException with such a root cause. Throws an
* IllegalStateException with an appropriate message else.
* @param ex the reflection exception to handle
*/
public static void handleReflectionException(Exception ex) {
if (ex instanceof NoSuchMethodException) {
throw new IllegalStateException("Method not found: " + ex.getMessage());
}
if (ex instanceof IllegalAccessException) {
throw new IllegalStateException("Could not access method: " + ex.getMessage());
}
if (ex instanceof InvocationTargetException) {
handleInvocationTargetException((InvocationTargetException) ex);
}
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Handle the given invocation target exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>Throws the underlying RuntimeException or Error in case of such a root
* cause. Throws an IllegalStateException else.
* @param ex the invocation target exception to handle
*/
public static void handleInvocationTargetException(InvocationTargetException ex) {
rethrowRuntimeException(ex.getTargetException());
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}. Should
* only be called if no checked exception is expected to be thrown by the
* target method.
* <p>Rethrows the underlying exception cast to an {@link RuntimeException} or
* {@link Error} if appropriate; otherwise, throws an
* {@link IllegalStateException}.
* @param ex the exception to rethrow
* @throws RuntimeException the rethrown exception
*/
public static void rethrowRuntimeException(Throwable ex) {
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
}
if (ex instanceof Error) {
throw (Error) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}. Should
* only be called if no checked exception is expected to be thrown by the
* target method.
* <p>Rethrows the underlying exception cast to an {@link Exception} or
* {@link Error} if appropriate; otherwise, throws an
* {@link IllegalStateException}.
* @param ex the exception to rethrow
* @throws Exception the rethrown exception (in case of a checked exception)
*/
public static void rethrowException(Throwable ex) throws Exception {
if (ex instanceof Exception) {
throw (Exception) ex;
}
if (ex instanceof Error) {
throw (Error) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Determine whether the given method explicitly declares the given
* exception or one of its superclasses, which means that an exception of
* that type can be propagated as-is within a reflective invocation.
* @param method the declaring method
* @param exceptionType the exception to throw
* @return <code>true</code> if the exception can be thrown as-is;
* <code>false</code> if it needs to be wrapped
*/
public static boolean declaresException(Method method, Class<?> exceptionType) {
Assert.notNull(method, "Method must not be null");
Class<?>[] declaredExceptions = method.getExceptionTypes();
for (Class<?> declaredException : declaredExceptions) {
if (declaredException.isAssignableFrom(exceptionType)) {
return true;
}
}
return false;
}
/**
* Determine whether the given field is a "public static final" constant.
* @param field the field to check
*/
public static boolean isPublicStaticFinal(Field field) {
int modifiers = field.getModifiers();
return (Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers));
}
/**
* Determine whether the given method is an "equals" method.
* @see java.lang.Object#equals(Object)
*/
public static boolean isEqualsMethod(Method method) {
if (method == null || !method.getName().equals("equals")) {
return false;
}
Class<?>[] paramTypes = method.getParameterTypes();
return (paramTypes.length == 1 && paramTypes[0] == Object.class);
}
/**
* Determine whether the given method is a "hashCode" method.
* @see java.lang.Object#hashCode()
*/
public static boolean isHashCodeMethod(Method method) {
return (method != null && method.getName().equals("hashCode") && method.getParameterTypes().length == 0);
}
/**
* Determine whether the given method is a "toString" method.
* @see java.lang.Object#toString()
*/
public static boolean isToStringMethod(Method method) {
return (method != null && method.getName().equals("toString") && method.getParameterTypes().length == 0);
}
/**
* Determine whether the given method is originally declared by {@link java.lang.Object}.
*/
public static boolean isObjectMethod(Method method) {
try {
Object.class.getDeclaredMethod(method.getName(), method.getParameterTypes());
return true;
} catch (SecurityException ex) {
return false;
} catch (NoSuchMethodException ex) {
return false;
}
}
/**
* Determine whether the given method is a CGLIB 'renamed' method, following
* the pattern "CGLIB$methodName$0".
* @param renamedMethod the method to check
* @see net.sf.cglib.proxy.Enhancer#rename
*/
public static boolean isCglibRenamedMethod(Method renamedMethod) {
return CGLIB_RENAMED_METHOD_PATTERN.matcher(renamedMethod.getName()).matches();
}
/**
* Make the given field accessible, explicitly setting it accessible if
* necessary. The <code>setAccessible(true)</code> method is only called
* when actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
* @param field the field to make accessible
* @see java.lang.reflect.Field#setAccessible
*/
public static void makeAccessible(Field field) {
if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers()) ||
Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true);
}
}
/**
* Make the given method accessible, explicitly setting it accessible if
* necessary. The <code>setAccessible(true)</code> method is only called
* when actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
* @param method the method to make accessible
* @see java.lang.reflect.Method#setAccessible
*/
public static void makeAccessible(Method method) {
if ((!Modifier.isPublic(method.getModifiers()) || !Modifier.isPublic(method.getDeclaringClass().getModifiers()))
&& !method.isAccessible()) {
method.setAccessible(true);
}
}
/**
* Make the given constructor accessible, explicitly setting it accessible
* if necessary. The <code>setAccessible(true)</code> method is only called
* when actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
* @param ctor the constructor to make accessible
* @see java.lang.reflect.Constructor#setAccessible
*/
public static void makeAccessible(Constructor<?> ctor) {
if ((!Modifier.isPublic(ctor.getModifiers()) || !Modifier.isPublic(ctor.getDeclaringClass().getModifiers()))
&& !ctor.isAccessible()) {
ctor.setAccessible(true);
}
}
/**
* Perform the given callback operation on all matching methods of the given
* class and superclasses.
* <p>The same named method occurring on subclass and superclass will appear
* twice, unless excluded by a {@link MethodFilter}.
* @param clazz class to start looking at
* @param mc the callback to invoke for each method
* @see #doWithMethods(Class, MethodCallback, MethodFilter)
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc) throws IllegalArgumentException {
doWithMethods(clazz, mc, null);
}
/**
* Perform the given callback operation on all matching methods of the given
* class and superclasses (or given interface and super-interfaces).
* <p>The same named method occurring on subclass and superclass will appear
* twice, unless excluded by the specified {@link MethodFilter}.
* @param clazz class to start looking at
* @param mc the callback to invoke for each method
* @param mf the filter that determines the methods to apply the callback to
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc, MethodFilter mf)
throws IllegalArgumentException {
// Keep backing up the inheritance hierarchy.
Method[] methods = clazz.getDeclaredMethods();
for (Method method : methods) {
if (mf != null && !mf.matches(method)) {
continue;
}
try {
mc.doWith(method);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException("Shouldn't be illegal to access method '" + method.getName()
+ "': " + ex);
}
}
if (clazz.getSuperclass() != null) {
doWithMethods(clazz.getSuperclass(), mc, mf);
}
else if (clazz.isInterface()) {
for (Class<?> superIfc : clazz.getInterfaces()) {
doWithMethods(superIfc, mc, mf);
}
}
}
/**
* Get all declared methods on the leaf class and all superclasses. Leaf
* class methods are included first.
*/
public static Method[] getAllDeclaredMethods(Class<?> leafClass) throws IllegalArgumentException {
final List<Method> methods = new ArrayList<Method>(32);
doWithMethods(leafClass, new MethodCallback() {
public void doWith(Method method) {
methods.add(method);
}
});
return methods.toArray(new Method[methods.size()]);
}
/**
* Get the unique set of declared methods on the leaf class and all superclasses. Leaf
* class methods are included first and while traversing the superclass hierarchy any methods found
* with signatures matching a method already included are filtered out.
*/
public static Method[] getUniqueDeclaredMethods(Class<?> leafClass) throws IllegalArgumentException {
final List<Method> methods = new ArrayList<Method>(32);
doWithMethods(leafClass, new MethodCallback() {
public void doWith(Method method) {
boolean knownSignature = false;
Method methodBeingOverriddenWithCovariantReturnType = null;
for (Method existingMethod : methods) {
if (method.getName().equals(existingMethod.getName()) &&
Arrays.equals(method.getParameterTypes(), existingMethod.getParameterTypes())) {
// is this a covariant return type situation?
if (existingMethod.getReturnType() != method.getReturnType() &&
existingMethod.getReturnType().isAssignableFrom(method.getReturnType())) {
methodBeingOverriddenWithCovariantReturnType = existingMethod;
} else {
knownSignature = true;
}
break;
}
}
if (methodBeingOverriddenWithCovariantReturnType != null) {
methods.remove(methodBeingOverriddenWithCovariantReturnType);
}
if (!knownSignature && !isCglibRenamedMethod(method)) {
methods.add(method);
}
}
});
return methods.toArray(new Method[methods.size()]);
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
* @param clazz the target class to analyze
* @param fc the callback to invoke for each field
*/
public static void doWithFields(Class<?> clazz, FieldCallback fc) throws IllegalArgumentException {
doWithFields(clazz, fc, null);
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
* @param clazz the target class to analyze
* @param fc the callback to invoke for each field
* @param ff the filter that determines the fields to apply the callback to
*/
public static void doWithFields(Class<?> clazz, FieldCallback fc, FieldFilter ff)
throws IllegalArgumentException {
// Keep backing up the inheritance hierarchy.
Class<?> targetClass = clazz;
do {
Field[] fields = targetClass.getDeclaredFields();
for (Field field : fields) {
// Skip static and final fields.
if (ff != null && !ff.matches(field)) {
continue;
}
try {
fc.doWith(field);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException(
"Shouldn't be illegal to access field '" + field.getName() + "': " + ex);
}
}
targetClass = targetClass.getSuperclass();
}
while (targetClass != null && targetClass != Object.class);
}
/**
* Given the source object and the destination, which must be the same class
* or a subclass, copy all fields, including inherited fields. Designed to
* work on objects with public no-arg constructors.
* @throws IllegalArgumentException if the arguments are incompatible
*/
public static void shallowCopyFieldState(final Object src, final Object dest) throws IllegalArgumentException {
if (src == null) {
throw new IllegalArgumentException("Source for field copy cannot be null");
}
if (dest == null) {
throw new IllegalArgumentException("Destination for field copy cannot be null");
}
if (!src.getClass().isAssignableFrom(dest.getClass())) {
throw new IllegalArgumentException("Destination class [" + dest.getClass().getName()
+ "] must be same or subclass as source class [" + src.getClass().getName() + "]");
}
doWithFields(src.getClass(), new FieldCallback() {
public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException {
makeAccessible(field);
Object srcValue = field.get(src);
field.set(dest, srcValue);
}
}, COPYABLE_FIELDS);
}
/**
* Action to take on each method.
*/
public interface MethodCallback {
/**
* Perform an operation using the given method.
* @param method the method to operate on
*/
void doWith(Method method) throws IllegalArgumentException, IllegalAccessException;
}
/**
* Callback optionally used to filter methods to be operated on by a method callback.
*/
public interface MethodFilter {
/**
* Determine whether the given method matches.
* @param method the method to check
*/
boolean matches(Method method);
}
/**
* Callback interface invoked on each field in the hierarchy.
*/
public interface FieldCallback {
/**
* Perform an operation using the given field.
* @param field the field to operate on
*/
void doWith(Field field) throws IllegalArgumentException, IllegalAccessException;
}
/**
* Callback optionally used to filter fields to be operated on by a field callback.
*/
public interface FieldFilter {
/**
* Determine whether the given field matches.
* @param field the field to check
*/
boolean matches(Field field);
}
/**
* Pre-built FieldFilter that matches all non-static, non-final fields.
*/
public static FieldFilter COPYABLE_FIELDS = new FieldFilter() {
public boolean matches(Field field) {
return !(Modifier.isStatic(field.getModifiers()) || Modifier.isFinal(field.getModifiers()));
}
};
/**
* Pre-built MethodFilter that matches all non-bridge methods.
*/
public static MethodFilter NON_BRIDGED_METHODS = new MethodFilter() {
public boolean matches(Method method) {
return !method.isBridge();
}
};
/**
* Pre-built MethodFilter that matches all non-bridge methods
* which are not declared on <code>java.lang.Object</code>.
*/
public static MethodFilter USER_DECLARED_METHODS = new MethodFilter() {
public boolean matches(Method method) {
return (!method.isBridge() && method.getDeclaringClass() != Object.class);
}
};
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.timeseriesinsights.v2017_11_15.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Operations.
*/
public class OperationsInner {
/** The Retrofit service to perform REST calls. */
private OperationsService service;
/** The service client containing this operation class. */
private TimeSeriesInsightsClientImpl client;
/**
* Initializes an instance of OperationsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public OperationsInner(Retrofit retrofit, TimeSeriesInsightsClientImpl client) {
this.service = retrofit.create(OperationsService.class);
this.client = client;
}
/**
* The interface defining all the services for Operations to be
* used by Retrofit to perform actually REST calls.
*/
interface OperationsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.timeseriesinsights.v2017_11_15.Operations list" })
@GET("providers/Microsoft.TimeSeriesInsights/operations")
Observable<Response<ResponseBody>> list(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.timeseriesinsights.v2017_11_15.Operations listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<OperationInner> object if successful.
*/
public PagedList<OperationInner> list() {
ServiceResponse<Page<OperationInner>> response = listSinglePageAsync().toBlocking().single();
return new PagedList<OperationInner>(response.body()) {
@Override
public Page<OperationInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<OperationInner>> listAsync(final ListOperationCallback<OperationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(),
new Func1<String, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<Page<OperationInner>> listAsync() {
return listWithServiceResponseAsync()
.map(new Func1<ServiceResponse<Page<OperationInner>>, Page<OperationInner>>() {
@Override
public Page<OperationInner> call(ServiceResponse<Page<OperationInner>> response) {
return response.body();
}
});
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<ServiceResponse<Page<OperationInner>>> listWithServiceResponseAsync() {
return listSinglePageAsync()
.concatMap(new Func1<ServiceResponse<Page<OperationInner>>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(ServiceResponse<Page<OperationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<OperationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<OperationInner>>> listSinglePageAsync() {
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.list(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<OperationInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<OperationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<OperationInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<OperationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<OperationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<OperationInner> object if successful.
*/
public PagedList<OperationInner> listNext(final String nextPageLink) {
ServiceResponse<Page<OperationInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<OperationInner>(response.body()) {
@Override
public Page<OperationInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<OperationInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<OperationInner>> serviceFuture, final ListOperationCallback<OperationInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<Page<OperationInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<OperationInner>>, Page<OperationInner>>() {
@Override
public Page<OperationInner> call(ServiceResponse<Page<OperationInner>> response) {
return response.body();
}
});
}
/**
* Lists all of the available Time Series Insights related operations.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<OperationInner> object
*/
public Observable<ServiceResponse<Page<OperationInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<OperationInner>>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(ServiceResponse<Page<OperationInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Lists all of the available Time Series Insights related operations.
*
ServiceResponse<PageImpl<OperationInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<OperationInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<OperationInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<OperationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<OperationInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<OperationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<OperationInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<OperationInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<OperationInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/* TestSampledQuantile.java
*
* Copyright 2009-2019 Comcast Interactive Media, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fishwife.jrugged;
import static org.junit.Assert.*;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
public class TestSampledQuantile {
private SampledQuantile impl;
@Before
public void setUp() {
impl = new SampledQuantile();
}
@Test
public void quantileWithNoSamplesShouldReturnZero() {
assertEquals(0, impl.getPercentile(50));
}
@Test
public void quantileWithOneSampleShouldReturnThatSample() {
impl.addSample(42);
assertEquals(42, impl.getPercentile(50));
}
@Test
public void medianOfThreeSamplesIsMiddleSample() {
impl.addSample(42);
impl.addSample(41);
impl.addSample(43);
assertEquals(42, impl.getPercentile(50));
}
@Test
public void medianOfFiveSamplesWithRepeatsStillWorks() {
impl.addSample(41);
impl.addSample(43);
impl.addSample(42);
impl.addSample(41);
impl.addSample(43);
assertEquals(42, impl.getPercentile(50));
}
@Test
public void medianOfTwoSamplesIsTheirAverage() {
impl.addSample(41);
impl.addSample(43);
assertEquals(42, impl.getPercentile(50));
}
@Test
public void canGetMedianAsExpressedInQuantiles() {
impl.addSample(42);
impl.addSample(41);
impl.addSample(43);
assertEquals(42, impl.getQuantile(1,2));
}
@Test
public void canGetMedianDirectly() {
impl.addSample(42);
impl.addSample(41);
impl.addSample(43);
assertEquals(42, impl.getMedian());
}
@Test
public void zerothQuantileShouldThrowException() {
impl.addSample(41);
try {
impl.getQuantile(0,7);
fail("should have thrown exception");
} catch (SampledQuantile.QuantileOutOfBoundsException expected) {
}
}
@Test
public void qthQuantileShouldThrowException() {
impl.addSample(41);
try {
impl.getQuantile(7,7);
fail("should have thrown exception");
} catch (SampledQuantile.QuantileOutOfBoundsException expected) {
}
}
@Test
public void canSpecifyMaxSamples() {
impl = new SampledQuantile(10);
for(int i=0; i<20; i++) impl.addSample(0);
assertEquals(10, impl.getNumSamples());
}
@Test
public void canSpecifyCurrentTimeWhenAddingSample() {
impl.addSample(41, System.currentTimeMillis());
}
@Test
public void ignoresSamplesOutsideOfSpecifiedSecondWindow() {
impl = new SampledQuantile(60, TimeUnit.SECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 90 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void ignoresSamplesOutsideOfSpecifiedNanosecondWindow() {
impl = new SampledQuantile(60 * 1000000000L, TimeUnit.NANOSECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 90 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void ignoresSamplesOutsideOfSpecifiedMicrosecondWindow() {
impl = new SampledQuantile(60 * 1000000L, TimeUnit.MICROSECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 90 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void ignoresSamplesOutsideOfSpecifiedMillisecondWindow() {
impl = new SampledQuantile(60 * 1000L, TimeUnit.MILLISECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 90 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void ignoresSamplesOutsideOfSpecifiedMinuteWindow() {
impl = new SampledQuantile(60L, TimeUnit.SECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 90 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void ignoresSamplesOutsideOfSpecifiedHourWindow() {
impl = new SampledQuantile(3600L, TimeUnit.SECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 5400 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void ignoresSamplesOutsideOfSpecifiedDayWindow() {
impl = new SampledQuantile(86400L, TimeUnit.SECONDS);
long now = System.currentTimeMillis();
impl.addSample(7, now - 2 * 24 * 3600 * 1000L);
impl.addSample(42, now);
assertEquals(42, impl.getPercentile(50, now+1));
}
@Test
public void windowedSamplingWorks() {
long t0 = System.currentTimeMillis();
impl = new SampledQuantile(10, 60L, TimeUnit.SECONDS, t0);
for(int t=0; t<30 * 1000; t++) {
impl.addSample(1L, t0 + t);
}
long t1 = t0 + 30 * 1000L;
assertEquals(1L, impl.getPercentile(50, t1));
for(int t=0; t<60*1000; t++) {
impl.addSample(2L, t1 + t);
}
long t2 = t1 + 60 * 1000L;
assertEquals(2L, impl.getPercentile(50, t2));
impl.addSample(3L, t2+1);
}
@Test
public void windowedSamplingHandlesLongTimesBetweenSamples() {
long t0 = System.currentTimeMillis();
impl = new SampledQuantile(10, 60L, TimeUnit.SECONDS, t0);
impl.addSample(1L, t0 + 1);
long t1 = t0 + 90 * 1000L;
impl.addSample(2L, t1);
assertEquals(2L, impl.getPercentile(50, t1));
}
}
| |
package i5.las2peer.services.codeGenerationService.models.frontendComponent;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import i5.cae.simpleModel.SimpleEntityAttribute;
import i5.cae.simpleModel.SimpleModel;
import i5.cae.simpleModel.edge.SimpleEdge;
import i5.cae.simpleModel.node.SimpleNode;
import i5.las2peer.services.codeGenerationService.exception.ModelParseException;
/**
*
* FrontendComponent data class. Currently, edges are only used for creating simple 1 to 1
* dependencies between objects, without any attributes added to them.
*
*/
public class FrontendComponent {
private String widgetModelId;
private String name;
private String widgetName;
private String version;
private String widgetDescription;
private String widgetDeveloperName;
private String widgetDeveloperMail;
private int widgetWidth;
private int widgetHeight;
private String microserviceAddress;
private HashMap<String, HtmlElement> htmlElements;
private HashMap<String, Function> functions;
private boolean hasPolymerElements = false;
/**
*
* Creates a new frontend component.
*
* @param model a {@link i5.cae.simpleModel.SimpleModel} containing the frontend component
*
* @throws ModelParseException if something goes wrong during parsing
*
*/
public FrontendComponent(SimpleModel model) throws ModelParseException {
this.htmlElements = new HashMap<String, HtmlElement>();
this.functions = new HashMap<String, Function>();
// some helper fields to check model for correctness
// used to find (possible) duplicate (HTML) ids and report them
ArrayList<String> tempIds = new ArrayList<String>();
// used to first parse all nodes and later add them to their corresponding "parent objects"
HashMap<String, Event> tempEvents = new HashMap<String, Event>();
HashMap<String, InputParameter> tempParameters = new HashMap<String, InputParameter>();
HashMap<String, IWCResponse> tempIwcResponses = new HashMap<String, IWCResponse>();
HashMap<String, IWCCall> tempIwcCalls = new HashMap<String, IWCCall>();
HashMap<String, MicroserviceCall> tempMicroserviceCalls =
new HashMap<String, MicroserviceCall>();
this.name = model.getName();
// metadata of model (currently only version)
for (int attributeIndex = 0; attributeIndex < model.getAttributes().size(); attributeIndex++) {
if (model.getAttributes().get(attributeIndex).getName().equals("version")) {
try {
this.setVersion(model.getAttributes().get(attributeIndex).getValue());
} catch (NumberFormatException e) {
throw new ModelParseException("FrontendComponent version is not a number!");
}
}
}
// go through the nodes and create objects
ArrayList<SimpleNode> nodes = model.getNodes();
for (int nodeIndex = 0; nodeIndex < nodes.size(); nodeIndex++) {
SimpleNode node = nodes.get(nodeIndex);
ArrayList<SimpleEntityAttribute> nodeAttributes = node.getAttributes();
switch (node.getType()) {
case "Widget":
if (this.widgetModelId == null) {
this.widgetModelId = node.getId();
} else {
throw new ModelParseException("More than one Widget in FrontendComponent model");
}
for (int attributeIndex = 0; attributeIndex < nodeAttributes.size(); attributeIndex++) {
SimpleEntityAttribute attribute = nodeAttributes.get(attributeIndex);
switch (attribute.getName()) {
case "name":
this.widgetName = attribute.getValue();
break;
case "description":
this.widgetDescription = attribute.getValue();
break;
case "developerName":
this.widgetDeveloperName = attribute.getValue();
break;
case "developerMail":
this.widgetDeveloperMail = attribute.getValue();
break;
case "height":
try {
this.widgetHeight = Integer.parseInt(attribute.getValue());
} catch (NumberFormatException e) {
throw new ModelParseException("Widget height is not a number!");
}
break;
case "width":
try {
this.widgetWidth = Integer.parseInt(attribute.getValue());
break;
} catch (NumberFormatException e) {
throw new ModelParseException("Widget width is not a number!");
}
case "microserviceAddress":
this.microserviceAddress = attribute.getValue();
break;
default:
throw new ModelParseException(
"Unknown attribute type of Widget: " + attribute.getName());
}
}
break;
case "HTML Element":
HtmlElement element = new HtmlElement(node);
//element is a polymer element
if(element.getType().equals(HtmlElement.ElementType.CUSTOM))
this.setHasPolymerElements(true);
this.htmlElements.put(node.getId(), element);
if (tempIds.contains(element.getModelId())) {
throw new ModelParseException("Duplicate id found: " + element.getModelId());
}
tempIds.add(element.getModelId());
break;
case "Event":
Event event = new Event(node);
tempEvents.put(node.getId(), event);
break;
case "Function":
Function function = new Function(node);
this.functions.put(node.getId(), function);
break;
case "Input Parameter":
InputParameter parameter = new InputParameter(node);
tempParameters.put(node.getId(), parameter);
break;
case "IWC Response":
IWCResponse response = new IWCResponse(node);
tempIwcResponses.put(node.getId(), response);
break;
case "IWC Call":
IWCCall call = new IWCCall(node);
tempIwcCalls.put(node.getId(), call);
break;
case "Microservice Call":
MicroserviceCall microserviceCall = new MicroserviceCall(node);
tempMicroserviceCalls.put(node.getId(), microserviceCall);
break;
default:
throw new ModelParseException("Unknown node type: " + node.getType());
}
}
// edges
ArrayList<SimpleEdge> edges = model.getEdges();
// helper variables to check for correct edges
int htmlElementCount = this.htmlElements.size();
int functionCount = this.functions.size();
for (int edgeIndex = 0; edgeIndex < edges.size(); edgeIndex++) {
String currentEdgeSource = edges.get(edgeIndex).getSourceNode();
String currentEdgeTarget = edges.get(edgeIndex).getTargetNode();
String currentEdgeType = edges.get(edgeIndex).getType();
switch (currentEdgeType) {
case "Widget to HTML Element":
if (!this.widgetModelId.equals(currentEdgeSource)
|| !this.htmlElements.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Widget to HTML edge!");
}
htmlElementCount--;
break;
case "Element Update":
if (!this.functions.containsKey(currentEdgeSource)
|| !this.htmlElements.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Element Update edge!");
}
// check if element is not static
if (this.htmlElements.get(currentEdgeTarget).isStaticElement()) {
throw new ModelParseException("Static HtmlElements cannot be updated by functions: "
+ this.htmlElements.get(currentEdgeTarget).getId());
}
this.functions.get(currentEdgeSource).addHtmlElementUpdates(currentEdgeTarget);
break;
case "Element Creation":
if (!this.functions.containsKey(currentEdgeSource)
|| !this.htmlElements.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Element Creation edge!");
}
// check if element is not static
if (this.htmlElements.get(currentEdgeTarget).isStaticElement()) {
throw new ModelParseException("Static HtmlElements cannot be created by functions: "
+ this.htmlElements.get(currentEdgeTarget).getId());
}
this.functions.get(currentEdgeSource).addHtmlElementCreations(currentEdgeTarget);
break;
case "HTML Element to Event":
if (!this.htmlElements.containsKey(currentEdgeSource)
|| !tempEvents.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong HTML Element to Event edge!");
}
this.htmlElements.get(currentEdgeSource).addEvent(tempEvents.get(currentEdgeTarget));
tempEvents.remove(currentEdgeTarget);
break;
case "Parameter Connection":
// check if parameter is there
if (!tempParameters.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Parameter Connection edge!");
}
// check for function connection
if (this.functions.containsKey(currentEdgeSource)) {
this.functions.get(currentEdgeSource)
.addInputParameter(tempParameters.get(currentEdgeTarget));
}
// if not, check for microservice connection
// first, check if call is still in tempMicroserviceCalls
else if (tempMicroserviceCalls.containsKey(currentEdgeSource)) {
tempMicroserviceCalls.get(currentEdgeSource)
.addInputParameter(tempParameters.get(currentEdgeTarget));
// if not, check if in list of a function
} else {
boolean found = false;
for (Function function : this.functions.values()) {
for (MicroserviceCall call : function.getMicroserviceCalls()) {
if (call.getModelId().equals(currentEdgeSource)) {
call.addInputParameter(tempParameters.get(currentEdgeTarget));
found = true;
}
}
}
// if not, the parameter connection is invalid
if (!found) {
throw new ModelParseException("Wrong Parameter Connection edge!");
}
}
tempParameters.remove(currentEdgeTarget);
break;
case "Waits for":
if (!this.functions.containsKey(currentEdgeSource)
|| !tempIwcResponses.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Waits for edge!");
}
this.functions.get(currentEdgeSource)
.addIwcResponse(tempIwcResponses.get(currentEdgeTarget));
tempIwcResponses.remove(currentEdgeTarget);
break;
case "Initiates":
if (!this.functions.containsKey(currentEdgeSource)
|| !tempIwcCalls.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Initiates edge!");
}
this.functions.get(currentEdgeSource).addIwcCall(tempIwcCalls.get(currentEdgeTarget));
tempIwcCalls.remove(currentEdgeTarget);
break;
case "Event to Function Call":
// check if function exists
if (!this.functions.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Event to Function Call!");
}
// check if event is still in tempEvent list
if (tempEvents.containsKey(currentEdgeSource)) {
tempEvents.get(currentEdgeSource).setCalledFunctionId(currentEdgeTarget);
break;
} else {
boolean found = false;
// now we need to check already parsed events..
for (HtmlElement element : this.htmlElements.values()) {
for (Event event : element.getEvents()) {
if (event.getModelId().equals(currentEdgeSource)) {
event.setCalledFunctionId(currentEdgeTarget);
found = true;
}
}
}
if (!found) {
throw new ModelParseException("Wrong Event to Function Call!");
}
}
break;
case "Function To Microservice Call":
if (!this.functions.containsKey(currentEdgeSource)
|| !tempMicroserviceCalls.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Function To Microservice Call edge!");
}
this.functions.get(currentEdgeSource)
.addMicroserviceCall(tempMicroserviceCalls.get(currentEdgeTarget));
tempMicroserviceCalls.remove(currentEdgeTarget);
break;
case "Widget to Function":
if (!this.widgetModelId.equals(currentEdgeSource)
|| !this.functions.containsKey(currentEdgeTarget)) {
throw new ModelParseException("Wrong Widget to Function edge!");
}
functionCount--;
break;
case "hasChild":
//actually not needed, consider removing this edge
if(!htmlElements.containsKey(currentEdgeSource) || !htmlElements.containsKey(currentEdgeTarget)){
throw new ModelParseException("Wrong hasChild edge");
}
HtmlElement parent = htmlElements.get(currentEdgeSource);
HtmlElement child = htmlElements.get(currentEdgeTarget);
parent.addChildren(child);
child.setParent(parent);
break;
default:
throw new ModelParseException("Unknown frontend component edge type: " + currentEdgeType);
}
}
// only one widget allowed (checked previously), no multiple edges between two objects in
// SyncMeta -> element count must be zero now if all elements are connected to the widget
// also, all temp lists should be empty by now
if (htmlElementCount != 0 || functionCount != 0 || !tempEvents.isEmpty()
|| !tempParameters.isEmpty() || !tempIwcResponses.isEmpty() || !tempIwcCalls.isEmpty()
|| !tempMicroserviceCalls.isEmpty()) {
throw new ModelParseException("Model not fully connected!");
}
// check functions (now complete with all IWC events, microservice calls and input parameters)
// for semantical correctness
for (Function function : this.functions.values()) {
function.checkCorrectness();
}
}
public String getWidgetModelId() {
return widgetModelId;
}
public void setWidgetModelId(String widgetId) {
this.widgetModelId = widgetId;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getWidgetName() {
return this.widgetName;
}
public void setWidgetName(String name) {
this.widgetName = name;
}
public String getVersion() {
return this.version;
}
public void setVersion(String version) {
this.version = version;
}
public String getWidgetDescription() {
return widgetDescription;
}
public void setWidgetDescription(String widgetDescription) {
this.widgetDescription = widgetDescription;
}
public String getWidgetDeveloperName() {
return widgetDeveloperName;
}
public void setWidgetDeveloperName(String widgetDeveloperName) {
this.widgetDeveloperName = widgetDeveloperName;
}
public String getWidgetDeveloperMail() {
return widgetDeveloperMail;
}
public void setWidgetDeveloperMail(String widgetDeveloperMail) {
this.widgetDeveloperMail = widgetDeveloperMail;
}
public int getWidgetWidth() {
return widgetWidth;
}
public void setWidgetWidth(int widgetWidth) {
this.widgetWidth = widgetWidth;
}
public int getWidgetHeight() {
return widgetHeight;
}
public void setWidgetHeight(int widgetHeight) {
this.widgetHeight = widgetHeight;
}
public Map<String, HtmlElement> getHtmlElements() {
return this.htmlElements;
}
public void setHtmlElements(HashMap<String, HtmlElement> htmlElements) {
this.htmlElements = htmlElements;
}
public Map<String, Function> getFunctions() {
return this.functions;
}
public void setFunctions(HashMap<String, Function> functions) {
this.functions = functions;
}
public String getMicroserviceAddress() {
return microserviceAddress;
}
public void setMicroserviceAddress(String microserviceAddress) {
this.microserviceAddress = microserviceAddress;
}
public boolean hasPolymerElements(){
return this.hasPolymerElements;
}
private void setHasPolymerElements(boolean hasPolymerElements){
this.hasPolymerElements = hasPolymerElements;
}
}
| |
package org.cobbzilla.util.json;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.io.JsonStringEncoder;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.node.*;
import org.cobbzilla.util.io.FileSuffixFilter;
import org.cobbzilla.util.io.FileUtil;
import org.cobbzilla.util.io.FilenameSuffixFilter;
import org.cobbzilla.util.io.StreamUtil;
import java.io.*;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import static org.cobbzilla.util.daemon.ZillaRuntime.*;
public class JsonUtil {
public static final String EMPTY_JSON = "{}";
public static final String EMPTY_JSON_ARRAY = "[]";
public static final ThreadLocal<Boolean> verboseErrors = new ThreadLocal<>();
public static boolean verboseErrors() { return verboseErrors.get() != null && verboseErrors.get(); }
public static final JsonNode MISSING = MissingNode.getInstance();
public static final FileFilter JSON_FILES = new FileSuffixFilter(".json");
public static final FilenameFilter JSON_FILENAMES = new FilenameSuffixFilter(".json");
public static final ObjectMapper COMPACT_MAPPER = new ObjectMapper()
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
public static final ObjectMapper FULL_MAPPER = new ObjectMapper()
.configure(SerializationFeature.INDENT_OUTPUT, true);
public static final ObjectWriter FULL_WRITER = FULL_MAPPER.writer();
public static final ObjectMapper FULL_MAPPER_ALLOW_COMMENTS = new ObjectMapper()
.configure(SerializationFeature.INDENT_OUTPUT, true);
static {
FULL_MAPPER_ALLOW_COMMENTS.getFactory().enable(JsonParser.Feature.ALLOW_COMMENTS);
}
public static final ObjectMapper FULL_MAPPER_ALLOW_COMMENTS_AND_UNKNOWN_FIELDS = new ObjectMapper()
.configure(SerializationFeature.INDENT_OUTPUT, true)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
static {
FULL_MAPPER_ALLOW_COMMENTS_AND_UNKNOWN_FIELDS.getFactory().enable(JsonParser.Feature.ALLOW_COMMENTS);
}
public static final ObjectMapper FULL_MAPPER_ALLOW_UNKNOWN_FIELDS = new ObjectMapper()
.configure(SerializationFeature.INDENT_OUTPUT, true)
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
static {
FULL_MAPPER_ALLOW_UNKNOWN_FIELDS.getFactory().enable(JsonParser.Feature.ALLOW_COMMENTS);
}
public static final ObjectMapper NOTNULL_MAPPER = FULL_MAPPER
.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false)
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
public static final ObjectMapper NOTNULL_MAPPER_ALLOW_EMPTY = FULL_MAPPER
.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, false)
.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
public static final ObjectMapper PUBLIC_MAPPER = buildMapper();
public static final ObjectWriter PUBLIC_WRITER = buildWriter(PUBLIC_MAPPER, PublicView.class);
public static ObjectMapper buildMapper() {
return new ObjectMapper()
.configure(MapperFeature.DEFAULT_VIEW_INCLUSION, false)
.configure(SerializationFeature.INDENT_OUTPUT, true)
.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false)
.setSerializationInclusion(JsonInclude.Include.NON_NULL);
}
public static ObjectWriter buildWriter(Class<? extends PublicView> view) {
return buildMapper().writerWithView(view);
}
public static ObjectWriter buildWriter(ObjectMapper mapper, Class<? extends PublicView> view) {
return mapper.writerWithView(view);
}
public static ArrayNode newArrayNode() { return new ArrayNode(FULL_MAPPER.getNodeFactory()); }
public static ObjectNode newObjectNode() { return new ObjectNode(FULL_MAPPER.getNodeFactory()); }
public static String find(JsonNode array, String name, String value, String returnValue) {
if (array instanceof ArrayNode) {
for (int i=0; i<array.size(); i++) {
final JsonNode n = array.get(i).get(name);
if (n != null && n.textValue().equals(value)) {
final JsonNode valNode = array.get(i).get(returnValue);
return valNode == null ? null : valNode.textValue();
}
}
}
return null;
}
public static String prettyJson(String json) {
try {
return FULL_MAPPER.writerWithDefaultPrettyPrinter().writeValueAsString(json(json, JsonNode.class));
} catch (Exception e) {
return die("prettyPrint: "+e);
}
}
public static String json_html(Object value) { return json_html(value, null); }
public static String json_html(Object value, ObjectMapper m) {
return (m == null ? json(value) : json(value, m)).replace("&", "&").replace("<", "<").replace(">", ">").replace(" ", " ").replace("\n", "<br/>");
}
public static class PublicView {}
public static String toJson (Object o) throws Exception { return toJson(o, NOTNULL_MAPPER); }
public static String toJson (Object o, ObjectMapper m) throws Exception { return m.writeValueAsString(o); }
public static String json (Object o) { return toJsonOrDie(o); }
public static String json (Object o, ObjectMapper m) { return toJsonOrDie(o, m); }
public static String toJsonOrDie (Object o) {
try { return toJson(o); } catch (Exception e) {
final String msg = "toJson: exception writing object (" + o + "): " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static String toJsonOrDie (Object o, ObjectMapper m) {
try { return toJson(o, m); } catch (Exception e) {
final String msg = "toJson: exception writing object (" + o + "): " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static String toJsonOrErr(Object o) {
try { return toJson(o); } catch (Exception e) {
return e.toString();
}
}
private static Map<String, ObjectWriter> viewWriters = new ConcurrentHashMap<>();
protected static ObjectWriter viewWriter(Class jsonView) {
ObjectWriter w = viewWriters.get(jsonView.getName());
if (w == null) {
w = JsonUtil.NOTNULL_MAPPER.disable(MapperFeature.DEFAULT_VIEW_INCLUSION).writerWithView(jsonView);
viewWriters.put(jsonView.getName(), w);
}
return w;
}
public static String toJson (Object o, Class jsonView) throws Exception {
return viewWriter(jsonView).writeValueAsString(o);
}
public static String toJsonOrDie (Object o, Class jsonView) {
try { return toJson(o, jsonView); } catch (Exception e) {
final String msg = "toJson: exception writing object (" + o + "): " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static String toJsonOrErr(Object o, Class jsonView) {
try { return toJson(o, jsonView); } catch (Exception e) {
return e.toString();
}
}
public static <T> T fromJson(InputStream json, Class<T> clazz) throws Exception {
return fromJson(StreamUtil.toString(json), clazz);
}
public static <T> T fromJson(InputStream json, Class<T> clazz, ObjectMapper mapper) throws Exception {
return fromJson(StreamUtil.toString(json), clazz, mapper);
}
public static <T> T fromJson(File json, Class<T> clazz) throws Exception {
return fromJson(FileUtil.toString(json), clazz);
}
public static <T> T fromJson(File json, Class<T> clazz, ObjectMapper mapper) throws Exception {
return fromJson(FileUtil.toString(json), clazz, mapper);
}
public static <T> T fromJson(String json, Class<T> clazz) throws Exception {
return fromJson(json, clazz, JsonUtil.FULL_MAPPER);
}
public static <T> T fromJson(String json, JavaType type) throws Exception {
if (empty(json)) return null;
return JsonUtil.FULL_MAPPER.readValue(json, type);
}
public static <T> T fromJson(String json, Class<T> clazz, ObjectMapper mapper) throws Exception {
if (empty(json)) return null;
if (clazz == String.class && !(json.startsWith("\"") && json.endsWith("\""))) {
json = "\"" + json + "\"";
}
return mapper.readValue(json, clazz);
}
public static <T> T fromJsonOrDie(File json, Class<T> clazz) {
return fromJsonOrDie(FileUtil.toStringOrDie(json), clazz);
}
public static <T> T json(String json, Class<T> clazz) { return fromJsonOrDie(json, clazz); }
public static <T> T json(String json, Class<T> clazz, ObjectMapper mapper) { return fromJsonOrDie(json, clazz, mapper); }
public static <T> T json(JsonNode json, Class<T> clazz) { return fromJsonOrDie(json, clazz); }
public static <T> List<T> json(JsonNode[] json, Class<T> clazz) {
final List<T> list = new ArrayList<>();
for (JsonNode node : json) list.add(json(node, clazz));
return list;
}
public static <T> T jsonWithComments(String json, Class<T> clazz) { return fromJsonOrDie(json, clazz, FULL_MAPPER_ALLOW_COMMENTS); }
public static <T> T jsonWithComments(JsonNode json, Class<T> clazz) { return fromJsonOrDie(json(json), clazz, FULL_MAPPER_ALLOW_COMMENTS); }
public static <T> T fromJsonOrDie(String json, Class<T> clazz) {
return fromJsonOrDie(json, clazz, FULL_MAPPER);
}
public static <T> T fromJsonOrDie(String json, Class<T> clazz, ObjectMapper mapper) {
if (empty(json)) return null;
try {
return mapper.readValue(json, clazz);
} catch (IOException e) {
final String msg = "fromJsonOrDie: exception while reading: " + json + ": " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static <T> T fromJson(String json, String path, Class<T> clazz) throws Exception {
return fromJson(FULL_MAPPER.readTree(json), path, clazz);
}
public static <T> T fromJson(File json, String path, Class<T> clazz) throws Exception {
return fromJson(FULL_MAPPER.readTree(json), path, clazz);
}
public static <T> T fromJson(JsonNode child, Class<? extends T> childClass) throws Exception {
return fromJson(child, "", childClass);
}
public static <T> T fromJsonOrDie(JsonNode child, Class<? extends T> childClass) {
return fromJsonOrDie(child, "", childClass);
}
public static <T> T fromJsonOrDie(JsonNode node, String path, Class<T> clazz) {
return fromJsonOrDie(node, path, clazz, FULL_MAPPER);
}
public static <T> T fromJson(JsonNode node, String path, Class<T> clazz) throws Exception {
return fromJson(node, path, clazz, FULL_MAPPER);
}
public static <T> T fromJsonOrDie(JsonNode node, String path, Class<T> clazz, ObjectMapper mapper) {
try {
return fromJson(node, path, clazz, mapper);
} catch (Exception e) {
final String msg = "fromJsonOrDie: exception while reading: " + node + ": " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static <T> T fromJson(JsonNode node, String path, Class<T> clazz, ObjectMapper mapper) throws Exception {
node = findNode(node, path);
return mapper.convertValue(node, clazz);
}
public static JsonNode findNode(JsonNode node, String path) throws IOException {
if (node == null || path == null) return null;
final List<JsonNode> nodePath = findNodePath(node, path);
if (nodePath == null || nodePath.isEmpty()) return null;
final JsonNode lastNode = nodePath.get(nodePath.size()-1);
return lastNode == MISSING ? null : lastNode;
}
public static String toString(Object node) throws JsonProcessingException {
return node == null ? null : FULL_MAPPER.writeValueAsString(node);
}
public static String nodeValue (JsonNode node, String path) throws IOException {
return fromJsonOrDie(toString(findNode(node, path)), String.class);
}
public static List<JsonNode> findNodePath(JsonNode node, String path) throws IOException {
final List<JsonNode> nodePath = new ArrayList<>();
nodePath.add(node);
if (empty(path)) return nodePath;
final List<String> pathParts = tokenize(path);
for (String pathPart : pathParts) {
int index = -1;
int bracketPos = pathPart.indexOf("[");
int bracketClosePos = pathPart.indexOf("]");
boolean isEmptyBrackets = false;
if (bracketPos != -1 && bracketClosePos != -1 && bracketClosePos > bracketPos) {
if (bracketClosePos == bracketPos+1) {
// ends with [], they mean to append
isEmptyBrackets = true;
} else {
index = Integer.parseInt(pathPart.substring(bracketPos + 1, bracketClosePos));
}
pathPart = pathPart.substring(0, bracketPos);
}
if (!empty(pathPart)) {
node = node.get(pathPart);
if (node == null) {
nodePath.add(MISSING);
return nodePath;
}
nodePath.add(node);
} else if (nodePath.size() > 1) {
return die("findNodePath: invalid path: "+path);
}
if (index != -1) {
node = node.get(index);
nodePath.add(node);
} else if (isEmptyBrackets) {
nodePath.add(MISSING);
return nodePath;
}
}
return nodePath;
}
public static List<String> tokenize(String path) {
final List<String> pathParts = new ArrayList<>();
final StringTokenizer st = new StringTokenizer(path, ".'", true);
boolean collectingQuotedToken = false;
StringBuffer pathToken = new StringBuffer();
while (st.hasMoreTokens()) {
final String token = st.nextToken();
if (token.equals("'")) {
collectingQuotedToken = !collectingQuotedToken;
} else if (collectingQuotedToken) {
pathToken.append(token);
} else if (token.equals(".") && pathToken.length() > 0) {
pathParts.add(pathToken.toString());
pathToken = new StringBuffer();
} else {
pathToken.append(token);
}
}
if (collectingQuotedToken) throw new IllegalArgumentException("Unterminated single quote in: "+path);
if (pathToken.length() > 0) pathParts.add(pathToken.toString());
return pathParts;
}
public static ObjectNode replaceNode(File file, String path, String replacement) throws Exception {
return replaceNode((ObjectNode) FULL_MAPPER.readTree(file), path, replacement);
}
public static ObjectNode replaceNode(String json, String path, String replacement) throws Exception {
return replaceNode((ObjectNode) FULL_MAPPER.readTree(json), path, replacement);
}
public static ObjectNode replaceNode(ObjectNode document, String path, String replacement) throws Exception {
final String simplePath = path.contains(".") ? path.substring(path.lastIndexOf(".")+1) : path;
Integer index = null;
if (simplePath.contains("[")) {
index = Integer.parseInt(simplePath.substring(simplePath.indexOf("[")+1, simplePath.indexOf("]")));
}
final List<JsonNode> found = findNodePath(document, path);
if (found == null || found.isEmpty() || found.get(found.size()-1).equals(MISSING)) {
throw new IllegalArgumentException("path not found: "+path);
}
final JsonNode parent = found.size() > 1 ? found.get(found.size()-2) : document;
if (index != null) {
final JsonNode origNode = parent.get(index);
((ArrayNode) parent).set(index, getValueNode(origNode, path, replacement));
} else {
// what is the original node type?
final JsonNode origNode = parent.get(simplePath);
((ObjectNode) parent).set(simplePath, getValueNode(origNode, path, replacement));
}
return document;
}
public static JsonNode getValueNode(JsonNode node, String path, String replacement) {
final String nodeClass = node.getClass().getName();
if ( ! (node instanceof ValueNode) ) die("Path "+path+" does not refer to a value (it is a "+ nodeClass +")");
if (node instanceof TextNode) return new TextNode(replacement);
if (node instanceof BooleanNode) return BooleanNode.valueOf(Boolean.parseBoolean(replacement));
if (node instanceof IntNode) return new IntNode(Integer.parseInt(replacement));
if (node instanceof LongNode) return new LongNode(Long.parseLong(replacement));
if (node instanceof DoubleNode) return new DoubleNode(Double.parseDouble(replacement));
if (node instanceof DecimalNode) return new DecimalNode(big(replacement));
if (node instanceof BigIntegerNode) return new BigIntegerNode(new BigInteger(replacement));
return die("Path "+path+" refers to an unsupported ValueNode: "+ nodeClass);
}
public static Object getNodeAsJava(JsonNode node, String path) {
if (node == null || node instanceof NullNode) return null;
final String nodeClass = node.getClass().getName();
if (node instanceof ArrayNode) {
final Object[] array = new Object[node.size()];
for (int i=0; i<node.size(); i++) {
array[i] = getNodeAsJava(node.get(i), path+"["+i+"]");
}
return array;
}
if (node instanceof ObjectNode) {
final Map<String, Object> map = new HashMap<>(node.size());
for (Iterator<String> iter = node.fieldNames(); iter.hasNext(); ) {
final String name = iter.next();
map.put(name, getNodeAsJava(node.get(name), path+"."+name));
}
return map;
}
if ( ! (node instanceof ValueNode) ) return node; // return as-is...
if (node instanceof TextNode) return node.textValue();
if (node instanceof BooleanNode) return node.booleanValue();
if (node instanceof IntNode) return node.intValue();
if (node instanceof LongNode) return node.longValue();
if (node instanceof DoubleNode) return node.doubleValue();
if (node instanceof DecimalNode) return node.decimalValue();
if (node instanceof BigIntegerNode) return node.bigIntegerValue();
return die("Path "+path+" refers to an unsupported ValueNode: "+ nodeClass);
}
public static JsonNode getValueNode(Object data) {
if (data == null) return NullNode.getInstance();
if (data instanceof Integer) return new IntNode((Integer) data);
if (data instanceof Boolean) return BooleanNode.valueOf((Boolean) data);
if (data instanceof Long) return new LongNode((Long) data);
if (data instanceof Float) return new DoubleNode((Float) data);
if (data instanceof Double) return new DoubleNode((Double) data);
if (data instanceof BigDecimal) return new DecimalNode((BigDecimal) data);
if (data instanceof BigInteger) return new BigIntegerNode((BigInteger) data);
return die("Cannot create value node from: "+data+" (type "+data.getClass().getName()+")");
}
public static JsonNode toNode (File f) { return fromJsonOrDie(FileUtil.toStringOrDie(f), JsonNode.class); }
// adapted from: https://stackoverflow.com/a/11459962/1251543
public static JsonNode mergeNodes(JsonNode mainNode, JsonNode updateNode) {
final Iterator<String> fieldNames = updateNode.fieldNames();
while (fieldNames.hasNext()) {
final String fieldName = fieldNames.next();
final JsonNode jsonNode = mainNode.get(fieldName);
// if field exists and is an embedded object
if (jsonNode != null && jsonNode.isObject()) {
mergeNodes(jsonNode, updateNode.get(fieldName));
} else {
if (mainNode instanceof ObjectNode) {
// Overwrite field
final JsonNode value = updateNode.get(fieldName);
((ObjectNode) mainNode).set(fieldName, value);
}
}
}
return mainNode;
}
public static String mergeJsonOrDie(String json, String request) {
try {
return mergeJson(json, request);
} catch (Exception e) {
final String msg = "mergeJsonOrDie: " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static String mergeJson(String json, String request) throws Exception {
return mergeJson(json, fromJson(request, JsonNode.class));
}
public static String mergeJson(String json, Object request) throws Exception {
return json(mergeJsonNodes(json, request));
}
public static JsonNode mergeJsonNodes(String json, Object request) throws Exception {
if (request != null) {
if (json != null) {
final JsonNode current = fromJson(json, JsonNode.class);
final JsonNode update;
if (request instanceof JsonNode) {
update = (JsonNode) request;
} else {
update = PUBLIC_MAPPER.valueToTree(request);
}
mergeNodes(current, update);
return current;
} else {
return PUBLIC_MAPPER.valueToTree(request);
}
}
return json(json, JsonNode.class);
}
public static JsonNode mergeJsonNodesOrDie(String json, Object request) {
try {
return mergeJsonNodes(json, request);
} catch (Exception e) {
final String msg = "mergeJsonNodesOrDie: " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static String mergeJsonOrDie(String json, Object request) {
try {
return mergeJson(json, request);
} catch (Exception e) {
final String msg = "mergeJsonOrDie: " + shortError(e);
return verboseErrors() ? die(msg, e) : die(msg);
}
}
public static JsonStringEncoder getJsonStringEncoder() { return JsonStringEncoder.getInstance(); }
public static String jsonQuoteRegex (String val) {
return val.replaceAll("([-/^$*+?.()|\\[\\]{}])", "\\\\$1");
}
}
| |
/*
* This file is generated by jOOQ.
*/
package com.showka.table.public_.tables;
import com.showka.table.public_.Keys;
import com.showka.table.public_.Public;
import com.showka.table.public_.tables.records.T_NYUKIN_RECORD;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.ForeignKey;
import org.jooq.Name;
import org.jooq.Schema;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.UniqueKey;
import org.jooq.impl.DSL;
import org.jooq.impl.TableImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.10.6"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class T_NYUKIN extends TableImpl<T_NYUKIN_RECORD> {
private static final long serialVersionUID = 663929624;
/**
* The reference instance of <code>PUBLIC.T_NYUKIN</code>
*/
public static final T_NYUKIN t_nyukin = new T_NYUKIN();
/**
* The class holding records for this type
*/
@Override
public Class<T_NYUKIN_RECORD> getRecordType() {
return T_NYUKIN_RECORD.class;
}
/**
* The column <code>PUBLIC.T_NYUKIN.ID</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> id = createField("ID", org.jooq.impl.SQLDataType.VARCHAR(255).nullable(false), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.CREATE_FUNCTION</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> create_function = createField("CREATE_FUNCTION", org.jooq.impl.SQLDataType.VARCHAR(2147483647).nullable(false).defaultValue(org.jooq.impl.DSL.field("'default'", org.jooq.impl.SQLDataType.VARCHAR)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.CREATE_TIMESTAMP</code>.
*/
public final TableField<T_NYUKIN_RECORD, LocalDateTime> create_timestamp = createField("CREATE_TIMESTAMP", org.jooq.impl.SQLDataType.LOCALDATETIME.nullable(false).defaultValue(org.jooq.impl.DSL.field("CURRENT_TIMESTAMP()", org.jooq.impl.SQLDataType.LOCALDATETIME)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.CREATE_USER_ID</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> create_user_id = createField("CREATE_USER_ID", org.jooq.impl.SQLDataType.VARCHAR(2147483647).nullable(false).defaultValue(org.jooq.impl.DSL.field("'default'", org.jooq.impl.SQLDataType.VARCHAR)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.RECORD_ID</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> record_id = createField("RECORD_ID", org.jooq.impl.SQLDataType.VARCHAR(255).nullable(false), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.UPDATE_FUNCTION</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> update_function = createField("UPDATE_FUNCTION", org.jooq.impl.SQLDataType.VARCHAR(2147483647).nullable(false).defaultValue(org.jooq.impl.DSL.field("'default'", org.jooq.impl.SQLDataType.VARCHAR)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.UPDATE_TIMESTAMP</code>.
*/
public final TableField<T_NYUKIN_RECORD, LocalDateTime> update_timestamp = createField("UPDATE_TIMESTAMP", org.jooq.impl.SQLDataType.LOCALDATETIME.nullable(false).defaultValue(org.jooq.impl.DSL.field("CURRENT_TIMESTAMP()", org.jooq.impl.SQLDataType.LOCALDATETIME)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.UPDATE_USER_ID</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> update_user_id = createField("UPDATE_USER_ID", org.jooq.impl.SQLDataType.VARCHAR(2147483647).nullable(false).defaultValue(org.jooq.impl.DSL.field("'default'", org.jooq.impl.SQLDataType.VARCHAR)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.VERSION</code>.
*/
public final TableField<T_NYUKIN_RECORD, Integer> version = createField("VERSION", org.jooq.impl.SQLDataType.INTEGER.nullable(false).defaultValue(org.jooq.impl.DSL.field("1", org.jooq.impl.SQLDataType.INTEGER)), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.BUSHO_ID</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> busho_id = createField("BUSHO_ID", org.jooq.impl.SQLDataType.VARCHAR(255).nullable(false), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.DATE</code>.
*/
public final TableField<T_NYUKIN_RECORD, LocalDateTime> date = createField("DATE", org.jooq.impl.SQLDataType.LOCALDATETIME.nullable(false), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.KINGAKU</code>.
*/
public final TableField<T_NYUKIN_RECORD, Integer> kingaku = createField("KINGAKU", org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.KOKYAKU_ID</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> kokyaku_id = createField("KOKYAKU_ID", org.jooq.impl.SQLDataType.VARCHAR(255).nullable(false), this, "");
/**
* The column <code>PUBLIC.T_NYUKIN.NYUKIN_HOHO_KUBUN</code>.
*/
public final TableField<T_NYUKIN_RECORD, String> nyukin_hoho_kubun = createField("NYUKIN_HOHO_KUBUN", org.jooq.impl.SQLDataType.VARCHAR(2).nullable(false), this, "");
/**
* Create a <code>PUBLIC.T_NYUKIN</code> table reference
*/
public T_NYUKIN() {
this(DSL.name("T_NYUKIN"), null);
}
/**
* Create an aliased <code>PUBLIC.T_NYUKIN</code> table reference
*/
public T_NYUKIN(String alias) {
this(DSL.name(alias), t_nyukin);
}
/**
* Create an aliased <code>PUBLIC.T_NYUKIN</code> table reference
*/
public T_NYUKIN(Name alias) {
this(alias, t_nyukin);
}
private T_NYUKIN(Name alias, Table<T_NYUKIN_RECORD> aliased) {
this(alias, aliased, null);
}
private T_NYUKIN(Name alias, Table<T_NYUKIN_RECORD> aliased, Field<?>[] parameters) {
super(alias, null, aliased, parameters, "");
}
/**
* {@inheritDoc}
*/
@Override
public Schema getSchema() {
return Public.PUBLIC;
}
/**
* {@inheritDoc}
*/
@Override
public UniqueKey<T_NYUKIN_RECORD> getPrimaryKey() {
return Keys.CONSTRAINT_4;
}
/**
* {@inheritDoc}
*/
@Override
public List<UniqueKey<T_NYUKIN_RECORD>> getKeys() {
return Arrays.<UniqueKey<T_NYUKIN_RECORD>>asList(Keys.CONSTRAINT_4, Keys.UK_L792YLAIHK8HQ7S2FG0EPWB70);
}
/**
* {@inheritDoc}
*/
@Override
public List<ForeignKey<T_NYUKIN_RECORD, ?>> getReferences() {
return Arrays.<ForeignKey<T_NYUKIN_RECORD, ?>>asList(Keys.FK373T5DKXS4W8AX6S8YKTKCYF1, Keys.FKEBDCJX6BN1VW9767JNL3HIFC3);
}
/**
* {@inheritDoc}
*/
@Override
public T_NYUKIN as(String alias) {
return new T_NYUKIN(DSL.name(alias), this);
}
/**
* {@inheritDoc}
*/
@Override
public T_NYUKIN as(Name alias) {
return new T_NYUKIN(alias, this);
}
/**
* Rename this table
*/
@Override
public T_NYUKIN rename(String name) {
return new T_NYUKIN(DSL.name(name), null);
}
/**
* Rename this table
*/
@Override
public T_NYUKIN rename(Name name) {
return new T_NYUKIN(name, null);
}
}
| |
package org.sanelib.ils.core.dao;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Restrictions;
import org.hibernate.transform.Transformers;
import org.sanelib.ils.core.domain.entity.Constants;
import org.sanelib.ils.core.domain.entity.DBValue;
import org.sanelib.ils.core.domain.entity.DomainEntity;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.util.List;
@SuppressWarnings({"rawtypes", "unchecked"})
public abstract class EntityRepositoryBase<T extends DomainEntity> implements EntityRepository<T> {
protected Class<T> entityClass;
public EntityRepositoryBase() {
ParameterizedType genericSuperclass = (ParameterizedType) getClass().getGenericSuperclass();
this.entityClass = (Class<T>) genericSuperclass.getActualTypeArguments()[0];
}
private UnitOfWork unitOfWork;
@Autowired
public void setUnitOfWork(final UnitOfWork unitOfWork) {
this.unitOfWork = unitOfWork;
}
public Session getSession() {
return unitOfWork.getCurrentSession();
}
@Override
public List<T> loadAll() {
Criteria criteria = getSession().createCriteria(entityClass);
return (List<T>) criteria.list();
}
@Override
public T load(final Serializable id) {
return (T) getSession().load(entityClass, id);
}
@Override
public T get(final Serializable id) {
return (T) getSession().get(entityClass, id);
}
@Override
public void save(final T entity) {
getSession().save(entity);
}
@Override
public void update(final T entity) {
getSession().update(entity);
}
@Override
public void saveOrUpdate(final T entity) {
getSession().saveOrUpdate(entity);
}
@Override
public T merge(final T entity) {
return (T) getSession().merge(entity);
}
@Override
public void remove(final T entity) {
getSession().delete(entity);
}
private void setResultColumnNames(final String[] resultColumnNames, SQLQuery query) {
if (resultColumnNames != null) {
for (String colName : resultColumnNames) {
query.addScalar(colName);
}
}
}
private void validateParams(final String[] names, final Object[] values) {
if (names == null || values == null) {
throw new IllegalArgumentException("Both names & values must be provided.");
}
if (names.length != values.length) {
throw new IllegalArgumentException(
"Both names and values should have same number of elements. " + names.length + " names and " + values.length
+ " values were found");
}
}
@Override
public List<T> executeQuery(final String queryString, final String[] paramValues) {
return executeQuery(queryString, paramValues, null, null);
}
@Override
public List<T> executeQuery(final String queryString, final String[] paramValues, final Class<T> returnType) {
return executeQuery(queryString, paramValues, null, returnType);
}
@Override
public List<T> executeQuery(final String queryString, final String[] paramValues, final String[] resultColumnNames) {
return executeQuery(queryString, paramValues, resultColumnNames, null);
}
@Override
public List<T> executeQuery(final String queryString, final String[] paramValues, final String[] resultColumnNames, final Class<T> returnType) {
SQLQuery query = getSession().createSQLQuery(queryString);
if (returnType != null) {
query.addEntity(returnType);
}
if (paramValues != null) {
setParamValues(paramValues, query);
}
if (resultColumnNames != null) {
setResultColumnNames(resultColumnNames, query);
}
return (List<T>) query.list();
}
@Override
public List<T> executeQueryObject(final String queryString, Class clazz) {
if (clazz.equals(String.class)) {
return getSession().createSQLQuery(queryString).list();
}
return getSession().createSQLQuery(queryString).setResultTransformer(Transformers.aliasToBean(clazz)).list();
}
@Override
public List<T> executeQueryObject(final DetachedCriteria detachedCriteria) {
Criteria criteria = detachedCriteria.getExecutableCriteria(getSession());
return criteria.list();
}
private void setParamValues(final String[] paramValues, SQLQuery query) {
if (paramValues != null && paramValues.length > 0) {
for (int liCnt = 0; liCnt < paramValues.length; liCnt++) {
query.setString(liCnt, paramValues[liCnt]);
}
}
}
@Override
public int executeUpdate(final String queryString, final String[] paramValues) {
SQLQuery query = getSession().createSQLQuery(queryString);
setParamValues(paramValues, query);
return query.executeUpdate();
}
@Override
public T getSafeSingleEntityByColumnAndValue(final String columnName, final Object columnValue) {
return getSafeSingleEntityByColumnAndValue(new String[] {columnName}, new Object[] {columnValue});
}
@Override
public T getSafeSingleEntityByColumnAndValue(final String[] columnNames, final Object[] columnValues) {
List<T> list = findByColumnAndValue(columnNames, columnValues, 0, 1);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
}
private void prepareCriteriaWithPagination(final String[] columnNames, final Object[] columnValues, final int offset, final int size, Criteria criteria, boolean useLike) {
prepareCriteria(columnNames, columnValues, criteria, useLike);
addPaginationCriteria(offset, size, criteria);
}
private void prepareCriteria(final String[] columnNames, final Object[] columnValues, Criteria criteria, boolean useLike) {
for (int i = 0; i < columnNames.length; i++) {
if (columnValues[i] == null || columnValues[i] == DBValue.Null) {
criteria.add(Restrictions.isNull(columnNames[i]));
} else if (columnValues[i] == DBValue.NotNull) {
criteria.add(Restrictions.isNotNull(columnNames[i]));
} else {
criteria.add(
useLike ? Restrictions.like(columnNames[i], columnValues[i]) : Restrictions.eq(columnNames[i], columnValues[i]));
}
}
}
private void addPaginationCriteria(final int offset, final int size, Criteria criteria) {
if (offset >= 0) {
criteria.setFirstResult(offset);
}
if (size > 0) {
criteria.setMaxResults(size);
}
}
@Override
public List<T> findByColumnAndValue(final String columnName, final Object columnValue) {
return findByColumnAndValue(columnName, columnValue, -1, 0);
}
@Override
public List<T> findByColumnAndValue(final String columnName, final Object columnValue, final int offset, final int size) {
return findByColumnAndValue(new String[] {columnName}, new Object[] {columnValue}, offset, size);
}
@Override
public List<T> findByColumnAndValue(final String[] columnNames, final Object[] columnValues) {
return findByColumnAndValue(columnNames, columnValues, -1, 0);
}
@Override
public List<T> findByColumnAndValue(final String[] columnNames, final Object[] columnValues, final int offset, final int size) {
validateParams(columnNames, columnValues);
Criteria criteria = getSession().createCriteria(entityClass);
prepareCriteriaWithPagination(columnNames, columnValues, offset, size, criteria, false);
return (List<T>) criteria.list();
}
@Override
public List<T> findColumnAndValueByOrder(final String[] columnNames, final Object[] columnValues, final String orderColumnName,
final boolean isLikeFlag, final String orderType) {
Criteria criteria = getSession().createCriteria(entityClass);
prepareOrderByCriteria(criteria, columnNames, columnValues, orderColumnName, isLikeFlag, orderType);
return (List<T>) criteria.list();
}
private void prepareOrderByCriteria(Criteria criteria, final String[] columnNames, final Object[] columnValues, final String orderColumnName, final boolean isLikeFlag, final String orderType) {
prepareCriteria(columnNames, columnValues, criteria, isLikeFlag);
if (orderType != null && orderType.equalsIgnoreCase(Constants.ASCENDING_ORDER)) {
criteria.addOrder(Order.asc(orderColumnName));
} else if (orderType != null && orderType.equalsIgnoreCase(Constants.DESCENDING_ORDER)) {
criteria.addOrder(Order.desc(orderColumnName));
} else {
criteria.addOrder(Order.asc(orderColumnName));
}
}
@Override
public List<T> findColumnAndValueByOrderPagination(final String[] columnNames, final Object[] columnValues, final String orderColumnName, final boolean isLikeFlag, final String orderType, final int offset, final int size) {
Criteria criteria = getSession().createCriteria(entityClass);
prepareOrderByCriteria(criteria, columnNames, columnValues, orderColumnName, isLikeFlag, orderType);
addPaginationCriteria(offset, size, criteria);
return (List<T>) criteria.list();
}
@Override
public List<String> getListString(final String query) {
Query hQuery = getSession().createQuery(query);
return (List<String>) hQuery.list();
}
@Override
public List getListString(final DetachedCriteria deCriteria) {
Criteria criteria = deCriteria.getExecutableCriteria(getSession());
return criteria.list();
}
@Override
public List<T> findByCriteria(final DetachedCriteria deCriteria) {
Criteria criteria = deCriteria.getExecutableCriteria(getSession());
return (List<T>) criteria.list();
}
}
| |
package u14.reflect;
final class JxArray {
/**
* boolean[],byte[],int[],short[],long[],float[],double[]
* @param cls
* @return
*/
public static boolean isPrimitiveArray(Class<?> cls){
return cls!=null && cls.isArray() && cls.getComponentType().isPrimitive();
}
public static Object toPrimitiveArray(Object arr) {
if(arr==null){
return arr;
}
Class<?> arrClass = arr.getClass();
if(isPrimitiveArray(arrClass)){
return arr;
}
if(Boolean[].class==arrClass){
return toPrimitiveArray((Boolean[])arr);
}
if(Byte[].class==arrClass){
return toPrimitiveArray((Byte[])arr);
}
if(Short[].class==arrClass){
return toPrimitiveArray((Short[])arr);
}
if(Integer[].class==arrClass){
return toPrimitiveArray((Integer[])arr);
}
if(Long[].class==arrClass){
return toPrimitiveArray((Long[])arr);
}
if(Float[].class==arrClass){
return toPrimitiveArray((Float[])arr);
}
if(Double[].class==arrClass){
return toPrimitiveArray((Double[])arr);
}
if(Character[].class==arrClass){
return toPrimitiveArray((Character[])arr);
}
return null;
}
public static Object toObjectArray(Object arr) {
if(arr==null){
return arr;
}
Class<?> arrClass = arr.getClass();
if(arrClass.isArray() && arrClass.getComponentType().isPrimitive()==false){
return arr;
}
if(boolean[].class==arrClass){
return toObjectArray((boolean[])arr);
}
if(byte[].class==arrClass){
return toObjectArray((byte[])arr);
}
if(short[].class==arrClass){
return toObjectArray((short[])arr);
}
if(int[].class==arrClass){
return toObjectArray((int[])arr);
}
if(long[].class==arrClass){
return toObjectArray((long[])arr);
}
if(float[].class==arrClass){
return toObjectArray((float[])arr);
}
if(double[].class==arrClass){
return toObjectArray((double[])arr);
}
if(char[].class==arrClass){
return toObjectArray((char[])arr);
}
return null;
}
public static boolean[] toPrimitiveArray(Boolean[] arr)
{
if(arr==null){
return null;
}
boolean[] newArr = new boolean[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Boolean[] toObjectArray(boolean[] arr)
{
if(arr==null){
return null;
}
Boolean[] newArr = new Boolean[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static byte[] toPrimitiveArray(Byte[] arr)
{
if(arr==null){
return null;
}
byte[] newArr = new byte[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Byte[] toObjectArray(byte[] arr)
{
if(arr==null){
return null;
}
Byte[] newArr = new Byte[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static int[] toPrimitiveArray(Integer[] arr)
{
if(arr==null){
return null;
}
int[] newArr = new int[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Integer[] toObjectArray(int[] arr)
{
if(arr==null){
return null;
}
Integer[] newArr = new Integer[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static long[] toPrimitiveArray(Long[] arr)
{
if(arr==null){
return null;
}
long[] newArr = new long[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Long[] toObjectArray(long[] arr)
{
if(arr==null){
return null;
}
Long[] newArr = new Long[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static float[] toPrimitiveArray(Float[] arr)
{
if(arr==null){
return null;
}
float[] newArr = new float[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Float[] toObjectArray(float[] arr)
{
if(arr==null){
return null;
}
Float[] newArr = new Float[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static double[] toPrimitiveArray(Double[] arr)
{
if(arr==null){
return null;
}
double[] newArr = new double[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Double[] toObjectArray(double[] arr)
{
if(arr==null){
return null;
}
Double[] newArr = new Double[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static char[] toPrimitiveArray(Character[] arr)
{
if(arr==null){
return null;
}
char[] newArr = new char[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
public static Character[] toObjectArray(char[] arr)
{
if(arr==null){
return null;
}
Character[] newArr = new Character[arr.length];
for(int i=0;i<arr.length;i++){
newArr[i] = arr[i];
}
return newArr;
}
}
| |
package org.genyris.core;
import org.genyris.classes.GlobalDescriptions;
import org.genyris.exception.AccessException;
import org.genyris.exception.GenyrisException;
import org.genyris.interp.Environment;
import org.genyris.interp.UnboundException;
import org.genyris.java.JavaClass;
public class StandardClass extends Dictionary {
private SimpleSymbol CLASSNAME, SUPERCLASSES, SUBCLASSES;
private SimpleSymbol NIL;
public StandardClass(SimpleSymbol classname, SimpleSymbol symbolicName,
Environment env) {
super(classname, symbolicName, env);
CLASSNAME = env.getSymbolTable().CLASSNAME();
SUPERCLASSES = env.getSymbolTable().SUPERCLASSES();
SUBCLASSES = env.getSymbolTable().SUBCLASSES();
NIL = env.getNil();
}
public StandardClass(Environment env) {
super(env);
CLASSNAME = env.getSymbolTable().CLASSNAME();
SUPERCLASSES = env.getSymbolTable().SUPERCLASSES();
SUBCLASSES = env.getSymbolTable().SUBCLASSES();
NIL = env.getNil();
}
public Symbol getBuiltinClassSymbol(Internable table) {
return table.STANDARDCLASS();
}
public static StandardClass mkClass(String name, Environment env,
StandardClass superClass) throws GenyrisException {
Internable table = env.getSymbolTable();
Symbol STANDARDCLASS = table.STANDARDCLASS();
StandardClass standardClassDict = (StandardClass) env
.lookupVariableValue(STANDARDCLASS);
SimpleSymbol classname = table.CLASSNAME();
SimpleSymbol symbolicName = table.internString(name);
StandardClass newClass = makeTheClass(env, superClass, table,
standardClassDict, classname, symbolicName);
GlobalDescriptions.updateClassSingleSuper(env, table, symbolicName,
(superClass != null ? (Symbol) superClass
.lookupVariableShallow(classname) : null));
return newClass;
}
private static StandardClass makeTheClass(Environment env,
StandardClass superClass, Internable table,
StandardClass standardClassDict, SimpleSymbol classname,
SimpleSymbol symbolicName) throws GenyrisException {
StandardClass newClass = new StandardClass(classname, symbolicName, env);
newClass.defineVariableRaw(table.SUPERCLASSES(), env.getNil());
newClass.defineVariableRaw(table.SUBCLASSES(), env.getNil());
if (superClass != null)
newClass.addSuperClass(superClass);
env.defineVariable(symbolicName, newClass);
return newClass;
}
public String toString() {
String result = "<class ";
result += getClassName();
try {
result += classListToString(getSuperClasses());
// result += classListToString(getSubClasses());
} catch (AccessException e) {
return this.getClassName() + " toString(): " + e.getMessage();
}
result += ">";
return result;
}
private String classListToString(Exp classes) throws AccessException {
StringBuffer result = new StringBuffer(" (");
while (classes != NIL) {
StandardClass klass = (StandardClass) classes.car();
result.append(klass.getClassName());
if (classes.cdr() != NIL)
result.append(' ');
classes = classes.cdr();
}
result.append(")");
return result.toString();
}
private Exp getSubClasses() {
try {
return lookupVariableShallow(SUBCLASSES);
} catch (UnboundException e) {
return NIL;
}
}
public void addSuperClass(StandardClass klass) throws UnboundException {
if (klass == null)
return;
Exp supers = lookupVariableShallow(SUPERCLASSES);
supers = new Pair(klass, supers);
setDynamicVariableValueRaw(SUPERCLASSES, supers);
klass.addSubClass(this);
// TODO use a list set adding function to avoid duplicates.
}
public void addSubClass(StandardClass klass) throws UnboundException {
if (klass == null)
return;
Exp subs = lookupVariableShallow(SUBCLASSES);
subs = new Pair(klass, subs);
setDynamicVariableValueRaw(SUBCLASSES, subs);
// TODO use a list set adding function to avoid duplicate subclasses.
}
private Exp getSuperClasses() {
try {
return lookupVariableShallow(SUPERCLASSES);
} catch (UnboundException e) {
return NIL;
}
}
public String getClassName() {
try {
return lookupVariableShallow(CLASSNAME).toString();
} catch (UnboundException e) {
return "Anonymous";
}
}
public static JavaClass makeClass(Class javaClass, Environment env,
Symbol klassname, Exp superklasses) throws GenyrisException {
return (JavaClass)makeClass(new JavaClass(javaClass, env), env, klassname, superklasses);
}
public static StandardClass makeClass(Environment env, Symbol klassname,
Exp superklasses) throws GenyrisException {
return makeClass(new StandardClass(env), env, klassname, superklasses);
}
public static StandardClass makeClass(StandardClass newClass, Environment env,
Symbol klassname, Exp superklasses) throws GenyrisException {
Exp NIL = env.getNil();
newClass.defineVariableRaw(env.getSymbolTable().CLASSNAME(), klassname);
newClass.defineVariableRaw(env.getSymbolTable().SUBCLASSES(), NIL);
if (superklasses == NIL)
superklasses = new Pair(env.getSymbolTable().THING(), NIL);
newClass.defineVariableRaw(env.getSymbolTable().SUPERCLASSES(),
lookupClasses(env, superklasses));
Exp sklist = superklasses;
while (sklist != NIL) {
Exp possibleClass = env.lookupVariableValue((Symbol) sklist.car());
StandardClass.assertIsThisObjectAClass(possibleClass);
StandardClass superClass = (StandardClass) possibleClass;
Exp subklasses = NIL;
try {
subklasses = superClass.lookupVariableShallow(env
.getSymbolTable().SUBCLASSES());
} catch (UnboundException ignore) {
superClass.defineVariable(env.getSymbolTable().SUBCLASSES(),
NIL);
}
superClass.setDynamicVariableValueRaw(env.getSymbolTable()
.SUBCLASSES(), new Pair(newClass, subklasses));
sklist = sklist.cdr();
}
env.defineVariable(klassname, newClass);
GlobalDescriptions.updateClass(env, env.getSymbolTable(), klassname,
superklasses);
return newClass;
}
private static Exp lookupClasses(Environment env, Exp superklasses)
throws GenyrisException {
Exp result = env.getNil();
while (superklasses != env.getNil()) {
result = new Pair(env.lookupVariableValue((Symbol) superklasses
.car()), result);
superklasses = superklasses.cdr();
}
return result;
}
public boolean isSubClass(StandardClass klass) throws GenyrisException {
if (klass == this) {
return true;
}
Environment env = getParent();
Exp mysubclasses = getSubClasses();
while (mysubclasses != env.getNil()) {
assertIsThisObjectAClass(mysubclasses.car());
StandardClass firstClass = (StandardClass) mysubclasses.car();
if (firstClass == klass) {
return true;
} else if (firstClass.isSubClass(klass)) {
return true;
}
mysubclasses = mysubclasses.cdr();
}
return false;
}
public static void assertIsThisObjectAClass(Exp firstClass)
throws GenyrisException {
if (!(firstClass instanceof StandardClass)) {
throw new GenyrisException(firstClass + "is not a class.");
}
}
public boolean isInstance(Exp object) throws GenyrisException {
Environment env = getParent();
Exp classes;
classes = object.getClasses(env);
while (classes != env.getNil()) {
assertIsThisObjectAClass(classes.car());
StandardClass klass = (StandardClass) classes.car();
if (classes.car() == this) {
return true;
}
if (isSubClass(klass)) {
return true;
}
classes = classes.cdr();
}
return false;
}
public void acceptVisitor(Visitor guest) throws GenyrisException {
guest.visitStandardClass(this);
}
}
| |
package org.bouncycastle.math.ec.custom.sec;
import org.bouncycastle.math.ec.ECConstants;
import org.bouncycastle.math.ec.ECCurve;
import org.bouncycastle.math.ec.ECFieldElement;
import org.bouncycastle.math.ec.ECPoint;
import org.bouncycastle.math.ec.ECPoint.AbstractF2m;
public class SecT283K1Point extends AbstractF2m
{
/**
* @deprecated Use ECCurve.createPoint to construct points
*/
public SecT283K1Point(ECCurve curve, ECFieldElement x, ECFieldElement y)
{
this(curve, x, y, false);
}
/**
* @deprecated per-point compression property will be removed, refer {@link #getEncoded(boolean)}
*/
public SecT283K1Point(ECCurve curve, ECFieldElement x, ECFieldElement y, boolean withCompression)
{
super(curve, x, y);
if ((x == null) != (y == null))
{
throw new IllegalArgumentException("Exactly one of the field elements is null");
}
this.withCompression = withCompression;
}
SecT283K1Point(ECCurve curve, ECFieldElement x, ECFieldElement y, ECFieldElement[] zs, boolean withCompression)
{
super(curve, x, y, zs);
this.withCompression = withCompression;
}
protected ECPoint detach()
{
return new SecT283K1Point(null, this.getAffineXCoord(), this.getAffineYCoord()); // earlier JDK
}
public ECFieldElement getYCoord()
{
ECFieldElement X = x, L = y;
if (this.isInfinity() || X.isZero())
{
return L;
}
// Y is actually Lambda (X + Y/X) here; convert to affine value on the fly
ECFieldElement Y = L.add(X).multiply(X);
ECFieldElement Z = zs[0];
if (!Z.isOne())
{
Y = Y.divide(Z);
}
return Y;
}
protected boolean getCompressionYTilde()
{
ECFieldElement X = this.getRawXCoord();
if (X.isZero())
{
return false;
}
ECFieldElement Y = this.getRawYCoord();
// Y is actually Lambda (X + Y/X) here
return Y.testBitZero() != X.testBitZero();
}
public ECPoint add(ECPoint b)
{
if (this.isInfinity())
{
return b;
}
if (b.isInfinity())
{
return this;
}
ECCurve curve = this.getCurve();
ECFieldElement X1 = this.x;
ECFieldElement X2 = b.getRawXCoord();
if (X1.isZero())
{
if (X2.isZero())
{
return curve.getInfinity();
}
return b.add(this);
}
ECFieldElement L1 = this.y, Z1 = this.zs[0];
ECFieldElement L2 = b.getRawYCoord(), Z2 = b.getZCoord(0);
boolean Z1IsOne = Z1.isOne();
ECFieldElement U2 = X2, S2 = L2;
if (!Z1IsOne)
{
U2 = U2.multiply(Z1);
S2 = S2.multiply(Z1);
}
boolean Z2IsOne = Z2.isOne();
ECFieldElement U1 = X1, S1 = L1;
if (!Z2IsOne)
{
U1 = U1.multiply(Z2);
S1 = S1.multiply(Z2);
}
ECFieldElement A = S1.add(S2);
ECFieldElement B = U1.add(U2);
if (B.isZero())
{
if (A.isZero())
{
return twice();
}
return curve.getInfinity();
}
ECFieldElement X3, L3, Z3;
if (X2.isZero())
{
// TODO This can probably be optimized quite a bit
ECPoint p = this.normalize();
X1 = p.getXCoord();
ECFieldElement Y1 = p.getYCoord();
ECFieldElement Y2 = L2;
ECFieldElement L = Y1.add(Y2).divide(X1);
// X3 = L.square().add(L).add(X1).add(curve.getA());
X3 = L.square().add(L).add(X1);
if (X3.isZero())
{
// return new SecT283K1Point(curve, X3, curve.getB().sqrt(), this.withCompression);
return new SecT283K1Point(curve, X3, curve.getB(), this.withCompression);
}
ECFieldElement Y3 = L.multiply(X1.add(X3)).add(X3).add(Y1);
L3 = Y3.divide(X3).add(X3);
Z3 = curve.fromBigInteger(ECConstants.ONE);
}
else
{
B = B.square();
ECFieldElement AU1 = A.multiply(U1);
ECFieldElement AU2 = A.multiply(U2);
X3 = AU1.multiply(AU2);
if (X3.isZero())
{
// return new SecT283K1Point(curve, X3, curve.getB().sqrt(), this.withCompression);
return new SecT283K1Point(curve, X3, curve.getB(), this.withCompression);
}
ECFieldElement ABZ2 = A.multiply(B);
if (!Z2IsOne)
{
ABZ2 = ABZ2.multiply(Z2);
}
L3 = AU2.add(B).squarePlusProduct(ABZ2, L1.add(Z1));
Z3 = ABZ2;
if (!Z1IsOne)
{
Z3 = Z3.multiply(Z1);
}
}
return new SecT283K1Point(curve, X3, L3, new ECFieldElement[]{ Z3 }, this.withCompression);
}
public ECPoint twice()
{
if (this.isInfinity())
{
return this;
}
ECCurve curve = this.getCurve();
ECFieldElement X1 = this.x;
if (X1.isZero())
{
// A point with X == 0 is it's own additive inverse
return curve.getInfinity();
}
ECFieldElement L1 = this.y, Z1 = this.zs[0];
boolean Z1IsOne = Z1.isOne();
ECFieldElement Z1Sq = Z1IsOne ? Z1 : Z1.square();
ECFieldElement T;
if (Z1IsOne)
{
T = L1.square().add(L1);
}
else
{
T = L1.add(Z1).multiply(L1);
}
if (T.isZero())
{
// return new SecT283K1Point(curve, T, curve.getB().sqrt(), withCompression);
return new SecT283K1Point(curve, T, curve.getB(), withCompression);
}
ECFieldElement X3 = T.square();
ECFieldElement Z3 = Z1IsOne ? T : T.multiply(Z1Sq);
ECFieldElement t1 = L1.add(X1).square();
ECFieldElement t2 = Z1IsOne ? Z1 : Z1Sq.square();
ECFieldElement L3 = t1.add(T).add(Z1Sq).multiply(t1).add(t2).add(X3).add(Z3);
return new SecT283K1Point(curve, X3, L3, new ECFieldElement[]{ Z3 }, this.withCompression);
}
public ECPoint twicePlus(ECPoint b)
{
if (this.isInfinity())
{
return b;
}
if (b.isInfinity())
{
return twice();
}
ECCurve curve = this.getCurve();
ECFieldElement X1 = this.x;
if (X1.isZero())
{
// A point with X == 0 is it's own additive inverse
return b;
}
// NOTE: twicePlus() only optimized for lambda-affine argument
ECFieldElement X2 = b.getRawXCoord(), Z2 = b.getZCoord(0);
if (X2.isZero() || !Z2.isOne())
{
return twice().add(b);
}
ECFieldElement L1 = this.y, Z1 = this.zs[0];
ECFieldElement L2 = b.getRawYCoord();
ECFieldElement X1Sq = X1.square();
ECFieldElement L1Sq = L1.square();
ECFieldElement Z1Sq = Z1.square();
ECFieldElement L1Z1 = L1.multiply(Z1);
// ECFieldElement T = curve.getA().multiply(Z1Sq).add(L1Sq).add(L1Z1);
ECFieldElement T = L1Sq.add(L1Z1);
ECFieldElement L2plus1 = L2.addOne();
// ECFieldElement A = curve.getA().add(L2plus1).multiply(Z1Sq).add(L1Sq).multiplyPlusProduct(T, X1Sq, Z1Sq);
ECFieldElement A = L2plus1.multiply(Z1Sq).add(L1Sq).multiplyPlusProduct(T, X1Sq, Z1Sq);
ECFieldElement X2Z1Sq = X2.multiply(Z1Sq);
ECFieldElement B = X2Z1Sq.add(T).square();
if (B.isZero())
{
if (A.isZero())
{
return b.twice();
}
return curve.getInfinity();
}
if (A.isZero())
{
// return new SecT283K1Point(curve, A, curve.getB().sqrt(), withCompression);
return new SecT283K1Point(curve, A, curve.getB(), withCompression);
}
ECFieldElement X3 = A.square().multiply(X2Z1Sq);
ECFieldElement Z3 = A.multiply(B).multiply(Z1Sq);
ECFieldElement L3 = A.add(B).square().multiplyPlusProduct(T, L2plus1, Z3);
return new SecT283K1Point(curve, X3, L3, new ECFieldElement[]{ Z3 }, this.withCompression);
}
public ECPoint negate()
{
if (this.isInfinity())
{
return this;
}
ECFieldElement X = this.x;
if (X.isZero())
{
return this;
}
// L is actually Lambda (X + Y/X) here
ECFieldElement L = this.y, Z = this.zs[0];
return new SecT283K1Point(curve, X, L.add(Z), new ECFieldElement[]{ Z }, this.withCompression);
}
}
| |
package org.jivesoftware.openfire.admin;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
import org.jivesoftware.util.*;
import java.util.*;
import org.xmpp.packet.*;
import org.jivesoftware.openfire.muc.MultiUserChatService;
import java.net.URLEncoder;
public final class muc_002dsysadmins_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static java.util.List _jspx_dependants;
private org.apache.jasper.runtime.TagHandlerPool _jspx_tagPool_fmt_message_key_nobody;
public Object getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_jspx_tagPool_fmt_message_key_nobody = org.apache.jasper.runtime.TagHandlerPool.getTagHandlerPool(getServletConfig());
}
public void _jspDestroy() {
_jspx_tagPool_fmt_message_key_nobody.release();
}
public void _jspService(HttpServletRequest request, HttpServletResponse response)
throws java.io.IOException, ServletException {
JspFactory _jspxFactory = null;
PageContext pageContext = null;
HttpSession session = null;
ServletContext application = null;
ServletConfig config = null;
JspWriter out = null;
Object page = this;
JspWriter _jspx_out = null;
PageContext _jspx_page_context = null;
try {
_jspxFactory = JspFactory.getDefaultFactory();
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
"error.jsp", true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
out.write("\n\n\n\n\n\n\n\n");
org.jivesoftware.util.WebManager webManager = null;
synchronized (_jspx_page_context) {
webManager = (org.jivesoftware.util.WebManager) _jspx_page_context.getAttribute("webManager", PageContext.PAGE_SCOPE);
if (webManager == null){
webManager = new org.jivesoftware.util.WebManager();
_jspx_page_context.setAttribute("webManager", webManager, PageContext.PAGE_SCOPE);
}
}
out.write('\n');
webManager.init(request, response, session, application, out );
out.write('\n');
out.write('\n');
// Get parameters
String userJID = ParamUtils.getParameter(request,"userJID");
boolean add = request.getParameter("add") != null;
boolean delete = ParamUtils.getBooleanParameter(request,"delete");
String mucname = ParamUtils.getParameter(request,"mucname");
if (!webManager.getMultiUserChatManager().isServiceRegistered(mucname)) {
// The requested service name does not exist so return to the list of the existing rooms
response.sendRedirect("muc-service-summary.jsp");
return;
}
// Get muc server
MultiUserChatService mucService = webManager.getMultiUserChatManager().getMultiUserChatService(mucname);
// Handle a save
Map<String,String> errors = new HashMap<String,String>();
JID bareJID = null;
try {
// do validation
bareJID = new JID(userJID).asBareJID();
} catch (IllegalArgumentException e) {
errors.put("userJID","userJID");
}
if (errors.size() == 0) {
if (add) {
mucService.addSysadmin(bareJID);
// Log the event
webManager.logEvent("added muc sysadmin "+userJID+" for service "+mucname, null);
response.sendRedirect("muc-sysadmins.jsp?addsuccess=true&mucname="+URLEncoder.encode(mucname, "UTF-8"));
return;
}
if (delete) {
// Remove the user from the list of system administrators
mucService.removeSysadmin(bareJID);
// Log the event
webManager.logEvent("removed muc sysadmin "+userJID+" for service "+mucname, null);
// done, return
response.sendRedirect("muc-sysadmins.jsp?deletesuccess=true&mucname="+URLEncoder.encode(mucname, "UTF-8"));
return;
}
}
out.write("\n\n<html>\n<head>\n<title>");
if (_jspx_meth_fmt_message_0(_jspx_page_context))
return;
out.write("</title>\n<meta name=\"subPageID\" content=\"muc-sysadmin\"/>\n<meta name=\"extraParams\" content=\"");
out.print( "mucname="+URLEncoder.encode(mucname, "UTF-8") );
out.write("\"/>\n<meta name=\"helpPage\" content=\"edit_group_chat_service_administrators.html\"/>\n</head>\n<body>\n\n<p>\n");
if (_jspx_meth_fmt_message_1(_jspx_page_context))
return;
out.write('\n');
if (_jspx_meth_fmt_message_2(_jspx_page_context))
return;
out.write(" <b><a href=\"muc-service-edit-form.jsp?mucname=");
out.print( URLEncoder.encode(mucname, "UTF-8") );
out.write('"');
out.write('>');
out.print( StringUtils.escapeHTMLTags(mucname) );
out.write("</a></b>\n</p>\n\n");
if ("true".equals(request.getParameter("deletesuccess"))) {
out.write("\n\n <div class=\"jive-success\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tbody>\n <tr><td class=\"jive-icon\"><img src=\"images/success-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></td>\n <td class=\"jive-icon-label\">\n ");
if (_jspx_meth_fmt_message_3(_jspx_page_context))
return;
out.write("\n </td></tr>\n </tbody>\n </table>\n </div><br>\n\n");
} else if ("true".equals(request.getParameter("addsuccess"))) {
out.write("\n\n <div class=\"jive-success\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tbody>\n <tr><td class=\"jive-icon\"><img src=\"images/success-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></td>\n <td class=\"jive-icon-label\">\n ");
if (_jspx_meth_fmt_message_4(_jspx_page_context))
return;
out.write("\n </td></tr>\n </tbody>\n </table>\n </div><br>\n\n");
} else if (errors.size() > 0) {
out.write("\n\n <div class=\"jive-error\">\n <table cellpadding=\"0\" cellspacing=\"0\" border=\"0\">\n <tbody>\n <tr><td class=\"jive-icon\"><img src=\"images/error-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></td>\n <td class=\"jive-icon-label\">\n ");
if (_jspx_meth_fmt_message_5(_jspx_page_context))
return;
out.write("\n </td></tr>\n </tbody>\n </table>\n </div><br>\n\n");
}
out.write("\n\n\n<!-- BEGIN 'Administrators' -->\n<form action=\"muc-sysadmins.jsp?add\" method=\"post\">\n <input type=\"hidden\" name=\"mucname\" value=\"");
out.print( StringUtils.escapeForXML(mucname) );
out.write("\" />\n <div class=\"jive-contentBoxHeader\">\n\t\t");
if (_jspx_meth_fmt_message_6(_jspx_page_context))
return;
out.write("\n\t</div>\n\t<div class=\"jive-contentBox\">\n\t\t<label for=\"userJIDtf\">");
if (_jspx_meth_fmt_message_7(_jspx_page_context))
return;
out.write("</label>\n\t\t<input type=\"text\" name=\"userJID\" size=\"30\" maxlength=\"100\" value=\"");
out.print( (userJID != null ? StringUtils.escapeForXML(userJID) : "") );
out.write("\"\n\t\t id=\"userJIDtf\">\n\t\t<input type=\"submit\" value=\"");
if (_jspx_meth_fmt_message_8(_jspx_page_context))
return;
out.write("\">\n\t\t<br><br>\n\n\t\t<div class=\"jive-table\" style=\"width:400px;\">\n\t\t\t<table cellpadding=\"0\" cellspacing=\"0\" border=\"0\" width=\"100%\">\n\t\t\t<thead>\n\t\t\t\t<tr>\n\t\t\t\t\t<th width=\"99%\">");
if (_jspx_meth_fmt_message_9(_jspx_page_context))
return;
out.write("</th>\n\t\t\t\t\t<th width=\"1%\" nowrap>");
if (_jspx_meth_fmt_message_10(_jspx_page_context))
return;
out.write("</th>\n\t\t\t\t</tr>\n\t\t\t</thead>\n\t\t\t<tbody>\n\t\t\t\t");
if (mucService.getSysadmins().size() == 0) {
out.write("\n\n\t\t\t\t\t<tr>\n\t\t\t\t\t\t<td colspan=\"2\">\n\t\t\t\t\t\t\t");
if (_jspx_meth_fmt_message_11(_jspx_page_context))
return;
out.write("\n\t\t\t\t\t\t</td>\n\t\t\t\t\t</tr>\n\n\t\t\t\t");
}
out.write("\n\n\t\t\t\t");
for (JID user : mucService.getSysadmins()) {
String username = JID.unescapeNode(user.getNode());
String userDisplay = username + '@' + user.getDomain();
out.write("\n\t\t\t\t\t<tr>\n\t\t\t\t\t\t<td width=\"99%\">\n\t\t\t\t\t\t\t");
out.print( StringUtils.escapeHTMLTags(userDisplay) );
out.write("\n\t\t\t\t\t\t</td>\n\t\t\t\t\t\t<td width=\"1%\" align=\"center\">\n\t\t\t\t\t\t\t<a href=\"muc-sysadmins.jsp?userJID=");
out.print( URLEncoder.encode(user.toString()) );
out.write("&delete=true&mucname=");
out.print( URLEncoder.encode(mucname, "UTF-8") );
out.write("\"\n\t\t\t\t\t\t\t title=\"");
if (_jspx_meth_fmt_message_12(_jspx_page_context))
return;
out.write("\"\n\t\t\t\t\t\t\t onclick=\"return confirm('");
if (_jspx_meth_fmt_message_13(_jspx_page_context))
return;
out.write("');\"\n\t\t\t\t\t\t\t ><img src=\"images/delete-16x16.gif\" width=\"16\" height=\"16\" border=\"0\" alt=\"\"></a>\n\t\t\t\t\t\t</td>\n\t\t\t\t\t</tr>\n\n\t\t\t\t");
}
out.write("\n\t\t\t</tbody>\n\t\t\t</table>\n\t\t</div>\n\t</div>\n</form>\n<!-- END 'Administrators' -->\n\n\n</body>\n</html>\n");
} catch (Throwable t) {
if (!(t instanceof SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
out.clearBuffer();
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
}
} finally {
if (_jspxFactory != null) _jspxFactory.releasePageContext(_jspx_page_context);
}
}
private boolean _jspx_meth_fmt_message_0(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_0 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_0.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_0.setParent(null);
_jspx_th_fmt_message_0.setKey("groupchat.admins.title");
int _jspx_eval_fmt_message_0 = _jspx_th_fmt_message_0.doStartTag();
if (_jspx_th_fmt_message_0.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_0);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_0);
return false;
}
private boolean _jspx_meth_fmt_message_1(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_1 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_1.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_1.setParent(null);
_jspx_th_fmt_message_1.setKey("groupchat.admins.introduction");
int _jspx_eval_fmt_message_1 = _jspx_th_fmt_message_1.doStartTag();
if (_jspx_th_fmt_message_1.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_1);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_1);
return false;
}
private boolean _jspx_meth_fmt_message_2(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_2 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_2.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_2.setParent(null);
_jspx_th_fmt_message_2.setKey("groupchat.service.settings_affect");
int _jspx_eval_fmt_message_2 = _jspx_th_fmt_message_2.doStartTag();
if (_jspx_th_fmt_message_2.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_2);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_2);
return false;
}
private boolean _jspx_meth_fmt_message_3(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_3 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_3.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_3.setParent(null);
_jspx_th_fmt_message_3.setKey("groupchat.admins.user_removed");
int _jspx_eval_fmt_message_3 = _jspx_th_fmt_message_3.doStartTag();
if (_jspx_th_fmt_message_3.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_3);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_3);
return false;
}
private boolean _jspx_meth_fmt_message_4(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_4 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_4.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_4.setParent(null);
_jspx_th_fmt_message_4.setKey("groupchat.admins.user_added");
int _jspx_eval_fmt_message_4 = _jspx_th_fmt_message_4.doStartTag();
if (_jspx_th_fmt_message_4.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_4);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_4);
return false;
}
private boolean _jspx_meth_fmt_message_5(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_5 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_5.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_5.setParent(null);
_jspx_th_fmt_message_5.setKey("groupchat.admins.error_adding");
int _jspx_eval_fmt_message_5 = _jspx_th_fmt_message_5.doStartTag();
if (_jspx_th_fmt_message_5.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_5);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_5);
return false;
}
private boolean _jspx_meth_fmt_message_6(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_6 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_6.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_6.setParent(null);
_jspx_th_fmt_message_6.setKey("groupchat.admins.legend");
int _jspx_eval_fmt_message_6 = _jspx_th_fmt_message_6.doStartTag();
if (_jspx_th_fmt_message_6.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_6);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_6);
return false;
}
private boolean _jspx_meth_fmt_message_7(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_7 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_7.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_7.setParent(null);
_jspx_th_fmt_message_7.setKey("groupchat.admins.label_add_admin");
int _jspx_eval_fmt_message_7 = _jspx_th_fmt_message_7.doStartTag();
if (_jspx_th_fmt_message_7.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_7);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_7);
return false;
}
private boolean _jspx_meth_fmt_message_8(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_8 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_8.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_8.setParent(null);
_jspx_th_fmt_message_8.setKey("groupchat.admins.add");
int _jspx_eval_fmt_message_8 = _jspx_th_fmt_message_8.doStartTag();
if (_jspx_th_fmt_message_8.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_8);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_8);
return false;
}
private boolean _jspx_meth_fmt_message_9(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_9 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_9.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_9.setParent(null);
_jspx_th_fmt_message_9.setKey("groupchat.admins.column_user");
int _jspx_eval_fmt_message_9 = _jspx_th_fmt_message_9.doStartTag();
if (_jspx_th_fmt_message_9.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_9);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_9);
return false;
}
private boolean _jspx_meth_fmt_message_10(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_10 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_10.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_10.setParent(null);
_jspx_th_fmt_message_10.setKey("groupchat.admins.column_remove");
int _jspx_eval_fmt_message_10 = _jspx_th_fmt_message_10.doStartTag();
if (_jspx_th_fmt_message_10.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_10);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_10);
return false;
}
private boolean _jspx_meth_fmt_message_11(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_11 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_11.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_11.setParent(null);
_jspx_th_fmt_message_11.setKey("groupchat.admins.no_admins");
int _jspx_eval_fmt_message_11 = _jspx_th_fmt_message_11.doStartTag();
if (_jspx_th_fmt_message_11.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_11);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_11);
return false;
}
private boolean _jspx_meth_fmt_message_12(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_12 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_12.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_12.setParent(null);
_jspx_th_fmt_message_12.setKey("groupchat.admins.dialog.title");
int _jspx_eval_fmt_message_12 = _jspx_th_fmt_message_12.doStartTag();
if (_jspx_th_fmt_message_12.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_12);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_12);
return false;
}
private boolean _jspx_meth_fmt_message_13(PageContext _jspx_page_context)
throws Throwable {
PageContext pageContext = _jspx_page_context;
JspWriter out = _jspx_page_context.getOut();
// fmt:message
org.apache.taglibs.standard.tag.el.fmt.MessageTag _jspx_th_fmt_message_13 = (org.apache.taglibs.standard.tag.el.fmt.MessageTag) _jspx_tagPool_fmt_message_key_nobody.get(org.apache.taglibs.standard.tag.el.fmt.MessageTag.class);
_jspx_th_fmt_message_13.setPageContext(_jspx_page_context);
_jspx_th_fmt_message_13.setParent(null);
_jspx_th_fmt_message_13.setKey("groupchat.admins.dialog.text");
int _jspx_eval_fmt_message_13 = _jspx_th_fmt_message_13.doStartTag();
if (_jspx_th_fmt_message_13.doEndTag() == javax.servlet.jsp.tagext.Tag.SKIP_PAGE) {
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_13);
return true;
}
_jspx_tagPool_fmt_message_key_nobody.reuse(_jspx_th_fmt_message_13);
return false;
}
}
| |
package org.owasp.webgoat.lessons.CrossSiteScripting;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.PreparedStatement;
import java.sql.Statement;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import org.owasp.webgoat.lessons.GoatHillsFinancial.DefaultLessonAction;
import org.owasp.webgoat.lessons.GoatHillsFinancial.GoatHillsFinancial;
import org.owasp.webgoat.lessons.GoatHillsFinancial.LessonAction;
import org.owasp.webgoat.session.Employee;
import org.owasp.webgoat.session.ParameterNotFoundException;
import org.owasp.webgoat.session.ParameterParser;
import org.owasp.webgoat.session.UnauthenticatedException;
import org.owasp.webgoat.session.UnauthorizedException;
import org.owasp.webgoat.session.ValidationException;
import org.owasp.webgoat.session.WebSession;
/***************************************************************************************************
*
*
* This file is part of WebGoat, an Open Web Application Security Project utility. For details,
* please see http://www.owasp.org/
*
* Copyright (c) 2002 - 2007 Bruce Mayhew
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU General Public License as published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with this program; if
* not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*
* Getting Source ==============
*
* Source for this application is maintained at code.google.com, a repository for free software
* projects.
*
* For details, please see http://code.google.com/p/webgoat/
*/
public class UpdateProfile extends DefaultLessonAction
{
private LessonAction chainedAction;
public UpdateProfile(GoatHillsFinancial lesson, String lessonName, String actionName, LessonAction chainedAction)
{
super(lesson, lessonName, actionName);
this.chainedAction = chainedAction;
}
public void handleRequest(WebSession s) throws ParameterNotFoundException, UnauthenticatedException,
UnauthorizedException, ValidationException
{
if (isAuthenticated(s))
{
int userId = getIntSessionAttribute(s, getLessonName() + "." + CrossSiteScripting.USER_ID);
int subjectId = s.getParser().getIntParameter(CrossSiteScripting.EMPLOYEE_ID, 0);
Employee employee = null;
try
{
employee = parseEmployeeProfile(subjectId, s);
} catch (ValidationException e)
{
if (CrossSiteScripting.STAGE2.equals(getStage(s)))
{
setStageComplete(s, CrossSiteScripting.STAGE2);
}
throw e;
}
if (subjectId > 0)
{
this.changeEmployeeProfile(s, userId, subjectId, employee);
setRequestAttribute(s, getLessonName() + "." + CrossSiteScripting.EMPLOYEE_ID, Integer
.toString(subjectId));
}
else
this.createEmployeeProfile(s, userId, employee);
try
{
chainedAction.handleRequest(s);
} catch (UnauthenticatedException ue1)
{
//System.out.println("Internal server error");
ue1.printStackTrace();
} catch (UnauthorizedException ue2)
{
//System.out.println("Internal server error");
ue2.printStackTrace();
}
}
else
throw new UnauthenticatedException();
}
protected Employee parseEmployeeProfile(int subjectId, WebSession s) throws ParameterNotFoundException,
ValidationException
{
// The input validation can be added using a parsing component
// or by using an inline regular expression. The parsing component
// is the better solution.
HttpServletRequest request = s.getRequest();
String firstName = request.getParameter(CrossSiteScripting.FIRST_NAME);
String lastName = request.getParameter(CrossSiteScripting.LAST_NAME);
String ssn = request.getParameter(CrossSiteScripting.SSN);
String title = request.getParameter(CrossSiteScripting.TITLE);
String phone = request.getParameter(CrossSiteScripting.PHONE_NUMBER);
String address1 = request.getParameter(CrossSiteScripting.ADDRESS1);
String address2 = request.getParameter(CrossSiteScripting.ADDRESS2);
int manager = Integer.parseInt(request.getParameter(CrossSiteScripting.MANAGER));
String startDate = request.getParameter(CrossSiteScripting.START_DATE);
int salary = Integer.parseInt(request.getParameter(CrossSiteScripting.SALARY));
String ccn = request.getParameter(CrossSiteScripting.CCN);
int ccnLimit = Integer.parseInt(request.getParameter(CrossSiteScripting.CCN_LIMIT));
String disciplinaryActionDate = request.getParameter(CrossSiteScripting.DISCIPLINARY_DATE);
String disciplinaryActionNotes = request.getParameter(CrossSiteScripting.DISCIPLINARY_NOTES);
String personalDescription = request.getParameter(CrossSiteScripting.DESCRIPTION);
Employee employee = new Employee(subjectId, firstName, lastName, ssn, title, phone, address1, address2,
manager, startDate, salary, ccn, ccnLimit, disciplinaryActionDate, disciplinaryActionNotes,
personalDescription);
return employee;
}
protected Employee parseEmployeeProfile_BACKUP(int subjectId, WebSession s) throws ParameterNotFoundException,
ValidationException
{
// The input validation can be added using a parsing component
// or by using an inline regular expression. The parsing component
// is the better solution.
HttpServletRequest request = s.getRequest();
String firstName = request.getParameter(CrossSiteScripting.FIRST_NAME);
String lastName = request.getParameter(CrossSiteScripting.LAST_NAME);
String ssn = request.getParameter(CrossSiteScripting.SSN);
String title = request.getParameter(CrossSiteScripting.TITLE);
String phone = request.getParameter(CrossSiteScripting.PHONE_NUMBER);
String address1 = request.getParameter(CrossSiteScripting.ADDRESS1);
String address2 = request.getParameter(CrossSiteScripting.ADDRESS2);
int manager = Integer.parseInt(request.getParameter(CrossSiteScripting.MANAGER));
String startDate = request.getParameter(CrossSiteScripting.START_DATE);
int salary = Integer.parseInt(request.getParameter(CrossSiteScripting.SALARY));
String ccn = request.getParameter(CrossSiteScripting.CCN);
int ccnLimit = Integer.parseInt(request.getParameter(CrossSiteScripting.CCN_LIMIT));
String disciplinaryActionDate = request.getParameter(CrossSiteScripting.DISCIPLINARY_DATE);
String disciplinaryActionNotes = request.getParameter(CrossSiteScripting.DISCIPLINARY_NOTES);
String personalDescription = request.getParameter(CrossSiteScripting.DESCRIPTION);
Employee employee = new Employee(subjectId, firstName, lastName, ssn, title, phone, address1, address2,
manager, startDate, salary, ccn, ccnLimit, disciplinaryActionDate, disciplinaryActionNotes,
personalDescription);
return employee;
}
protected Employee doParseEmployeeProfile(int subjectId, ParameterParser parser) throws ParameterNotFoundException,
ValidationException
{
// Fix this method using the org.owasp.webgoat.session.ParameterParser class
return null;
}
public String getNextPage(WebSession s)
{
return CrossSiteScripting.VIEWPROFILE_ACTION;
}
public void changeEmployeeProfile(WebSession s, int userId, int subjectId, Employee employee)
throws UnauthorizedException
{
try
{
// Note: The password field is ONLY set by ChangePassword
String query = "UPDATE employee SET first_name = ?, last_name = ?, ssn = ?, title = ?, phone = ?, address1 = ?, address2 = ?,"
+ " manager = ?, start_date = ?, ccn = ?, ccn_limit = ?,"
+ " personal_description = ? WHERE userid = ?;";
try
{
PreparedStatement ps = WebSession.getConnection(s).prepareStatement(query,
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ps.setString(1, employee.getFirstName());
ps.setString(2, employee.getLastName());
ps.setString(3, employee.getSsn());
ps.setString(4, employee.getTitle());
ps.setString(5, employee.getPhoneNumber());
ps.setString(6, employee.getAddress1());
ps.setString(7, employee.getAddress2());
ps.setInt(8, employee.getManager());
ps.setString(9, employee.getStartDate());
ps.setString(10, employee.getCcn());
ps.setInt(11, employee.getCcnLimit());
ps.setString(12, employee.getPersonalDescription());
ps.setInt(13, subjectId);
ps.execute();
} catch (SQLException sqle)
{
s.setMessage("Error updating employee profile");
sqle.printStackTrace();
}
} catch (Exception e)
{
s.setMessage("Error updating employee profile");
e.printStackTrace();
}
}
public void doChangeEmployeeProfile_BACKUP(WebSession s, int userId, int subjectId, Employee employee)
throws UnauthorizedException
{
try
{
// Note: The password field is ONLY set by ChangePassword
String query = "UPDATE employee SET first_name = ?, last_name = ?, ssn = ?, title = ?, phone = ?, address1 = ?, address2 = ?,"
+ " manager = ?, start_date = ?, ccn = ?, ccn_limit = ?,"
+ " personal_description = ? WHERE userid = ?;";
try
{
PreparedStatement ps = WebSession.getConnection(s).prepareStatement(query,
ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ps.setString(1, employee.getFirstName());
ps.setString(2, employee.getLastName());
ps.setString(3, employee.getSsn());
ps.setString(4, employee.getTitle());
ps.setString(5, employee.getPhoneNumber());
ps.setString(6, employee.getAddress1());
ps.setString(7, employee.getAddress2());
ps.setInt(8, employee.getManager());
ps.setString(9, employee.getStartDate());
ps.setString(10, employee.getCcn());
ps.setInt(11, employee.getCcnLimit());
ps.setString(12, employee.getPersonalDescription());
ps.setInt(13, subjectId);
ps.executeUpdate(query);
} catch (SQLException sqle)
{
s.setMessage("Error updating employee profile");
sqle.printStackTrace();
}
} catch (Exception e)
{
s.setMessage("Error updating employee profile");
e.printStackTrace();
}
}
public void createEmployeeProfile(WebSession s, int userId, Employee employee) throws UnauthorizedException
{
try
{
// FIXME: Cannot choose the id because we cannot guarantee uniqueness
int nextId = getNextUID(s);
String query = "INSERT INTO employee VALUES ( " + nextId + ", ?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
// System.out.println("Query: " + query);
try
{
PreparedStatement ps = WebSession.getConnection(s).prepareStatement(query);
ps.setString(1, employee.getFirstName().toLowerCase());
ps.setString(2, employee.getLastName());
ps.setString(3, employee.getSsn());
ps.setString(4, employee.getTitle());
ps.setString(5, employee.getPhoneNumber());
ps.setString(6, employee.getAddress1());
ps.setString(7, employee.getAddress2());
ps.setInt(8, employee.getManager());
ps.setString(9, employee.getStartDate());
ps.setString(10, employee.getCcn());
ps.setInt(11, employee.getCcnLimit());
ps.setString(12, employee.getDisciplinaryActionDate());
ps.setString(13, employee.getDisciplinaryActionNotes());
ps.setString(14, employee.getPersonalDescription());
ps.execute();
} catch (SQLException sqle)
{
s.setMessage("Error updating employee profile");
sqle.printStackTrace();
}
} catch (Exception e)
{
s.setMessage("Error updating employee profile");
e.printStackTrace();
}
}
public void createEmployeeProfile_BACKUP(WebSession s, int userId, Employee employee) throws UnauthorizedException
{
try
{
// FIXME: Cannot choose the id because we cannot guarantee uniqueness
int nextId = getNextUID(s);
String query = "INSERT INTO employee VALUES ( " + nextId + ", ?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
// System.out.println("Query: " + query);
try
{
PreparedStatement ps = WebSession.getConnection(s).prepareStatement(query);
ps.setString(1, employee.getFirstName().toLowerCase());
ps.setString(2, employee.getLastName());
ps.setString(3, employee.getSsn());
ps.setString(4, employee.getTitle());
ps.setString(5, employee.getPhoneNumber());
ps.setString(6, employee.getAddress1());
ps.setString(7, employee.getAddress2());
ps.setInt(8, employee.getManager());
ps.setString(9, employee.getStartDate());
ps.setString(10, employee.getCcn());
ps.setInt(11, employee.getCcnLimit());
ps.setString(12, employee.getDisciplinaryActionDate());
ps.setString(13, employee.getDisciplinaryActionNotes());
ps.setString(14, employee.getPersonalDescription());
ps.execute();
} catch (SQLException sqle)
{
s.setMessage("Error updating employee profile");
sqle.printStackTrace();
}
} catch (Exception e)
{
s.setMessage("Error updating employee profile");
e.printStackTrace();
}
}
/**
* Validates that the given parameter value matches the given regular expression pattern.
*
* @param parameter
* @param pattern
* @return
* @throws ValidationException
*/
protected String validate(final String parameter, final Pattern pattern) throws ValidationException
{
Matcher matcher = pattern.matcher(parameter);
if (!matcher.matches()) throw new ValidationException();
return parameter;
}
private int getNextUID(WebSession s)
{
int uid = -1;
try
{
Statement statement = WebSession.getConnection(s).createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ResultSet results = statement.executeQuery("select max(userid) as uid from employee");
results.first();
uid = results.getInt("uid");
} catch (SQLException sqle)
{
sqle.printStackTrace();
s.setMessage("Error updating employee profile");
} catch (ClassNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
return uid + 1;
}
}
| |
package com.github.crazymax.crossfitreader.tray;
import java.awt.AWTException;
import java.awt.Image;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.TrayIcon.MessageType;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowFocusListener;
import java.nio.file.Files;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.smartcardio.Card;
import javax.swing.JDialog;
import javax.swing.JPopupMenu;
import com.github.crazymax.crossfitreader.processus.ConfigProc;
import org.apache.log4j.Logger;
import com.github.crazymax.crossfitreader.Main;
import com.github.crazymax.crossfitreader.booking.User;
import com.github.crazymax.crossfitreader.device.Device;
import com.github.crazymax.crossfitreader.device.DeviceListener;
import com.github.crazymax.crossfitreader.exception.BookingException;
import com.github.crazymax.crossfitreader.exception.FindDeviceException;
import com.github.crazymax.crossfitreader.processus.BookingProc;
import com.github.crazymax.crossfitreader.tray.menu.TrayMenuCardManager;
import com.github.crazymax.crossfitreader.tray.menu.TrayMenuCardUid;
import com.github.crazymax.crossfitreader.tray.menu.TrayMenuExit;
import com.github.crazymax.crossfitreader.util.Resources;
import com.github.crazymax.crossfitreader.util.Util;
import com.google.common.base.Strings;
/**
* System Tray icon notification
* @author CrazyMax
* @license MIT License
* @link https://github.com/crazy-max/crossfit-reader
*/
public class SysTray implements DeviceListener {
private static final Logger LOGGER = Logger.getLogger(SysTray.class);
private static SysTray instance = null;
private Device device = null;
private SystemTray systemTray;
private TrayIcon trayIcon;
private JPopupMenu popupMenu;
private JDialog hiddenDialog;
private SysTray() {
super();
}
public static SysTray getInstance() {
if (instance == null) {
instance = new SysTray();
}
return instance;
}
public void init() {
systemTray = SystemTray.getSystemTray();
trayIcon = new TrayIcon(Resources.ICON_BLUE_32.getImage(), Main.appName, null);
trayIcon.setImageAutoSize(true);
final TrayMenuExit trayMenuExit = new TrayMenuExit(instance);
final TrayMenuCardManager trayMenuCardManager = new TrayMenuCardManager(instance);
final TrayMenuCardUid trayMenuCardUid = new TrayMenuCardUid(instance);
popupMenu = new JPopupMenu();
popupMenu.add(trayMenuCardManager);
popupMenu.add(trayMenuCardUid);
popupMenu.addSeparator();
popupMenu.add(trayMenuExit);
trayIcon.addMouseListener(new MouseAdapter() {
@Override
public void mouseReleased(final MouseEvent e) {
if (e.isPopupTrigger()) {
popupMenu.setLocation(e.getX(), e.getY());
hiddenDialog.setLocation(e.getX(), e.getY());
popupMenu.setInvoker(popupMenu);
hiddenDialog.setVisible(true);
popupMenu.setVisible(true);
}
}
});
try {
systemTray.add(trayIcon);
Util.createPidFile();
// Init terminal
try {
LOGGER.info("Init " + ConfigProc.getInstance().getConfig().getTerminalName());
device = new Device(Main.noReader ? null : Util.getTerminal());
addCardListener();
showInfoTooltip(Util.i18n("systray.device.found"));
Util.playSound(Resources.SOUND_SUCCESS);
} catch (FindDeviceException e1) {
showErrorTooltip(Util.i18n("systray.device.error"));
Util.playSound(Resources.SOUND_KO);
LOGGER.error(e1.getMessage(), e1);
removeTray();
System.exit(0);
}
// Check device connected
final ScheduledExecutorService scheduledExecutorDevice = Executors.newSingleThreadScheduledExecutor();
scheduledExecutorDevice.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
if (Util.isTerminalPlugged()) {
setImage(Resources.ICON_BLUE_32.getImage());
} else {
setImage(Resources.ICON_RED_32.getImage());
}
}
}, 0, 500, TimeUnit.MILLISECONDS);
// Check app exited outside
final ScheduledExecutorService scheduledExecutorExit = Executors.newSingleThreadScheduledExecutor();
scheduledExecutorExit.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
if (!Files.exists(Util.getPidFile().toPath())) {
removeTray();
System.exit(0);
}
}
}, 0, 100, TimeUnit.MILLISECONDS);
} catch (AWTException e) {
Util.logErrorExit(Util.i18n("systray.error.load"), e);
}
hiddenDialog = new JDialog();
hiddenDialog.setSize(10, 10);
hiddenDialog.addWindowFocusListener(new WindowFocusListener() {
@Override
public void windowLostFocus(final WindowEvent we) {
hiddenDialog.setVisible(false);
popupMenu.setVisible(false);
popupMenu.setInvoker(null);
}
@Override
public void windowGainedFocus(final WindowEvent we) {
}
});
}
public Device getDevice() {
return device;
}
@Override
public void cardInserted(final Card card, final String cardUid) {
String errorMsg = null;
User userScan = null;
try {
userScan = BookingProc.getInstance().scanCard(cardUid);
if (userScan == null) {
errorMsg = String.format(Util.i18n("systray.scan.unknowncard"), cardUid);
LOGGER.warn(String.format("The card %s is assigned to any member", cardUid));
} else if (userScan.getBookings() == null || userScan.getBookings().size() <= 0) {
errorMsg = String.format(Util.i18n("systray.scan.noresa"), userScan.getFirstName(), userScan.getLastName());
LOGGER.warn(String.format("%s %s has not made any reservations", userScan.getFirstName(), userScan.getLastName()));
}
} catch (BookingException e) {
errorMsg = e.getMessage();
LOGGER.error(e.getMessage(), e);
}
if (!Strings.isNullOrEmpty(errorMsg)) {
showErrorTooltip(errorMsg);
Util.playSound(Resources.SOUND_MIRROR_SHATTERING);
return;
}
showInfoTooltip(String.format(Util.i18n("systray.scan.welcome"), userScan.getFirstName(), userScan.getLastName()));
LOGGER.info(String.format("Good CrossFit workout %s %s !", userScan.getFirstName(), userScan.getLastName()));
Util.playSound(Resources.SOUND_CASH_REGISTER);
}
@Override
public void cardRemoved() {
// N/A
}
public void addCardListener() {
device.addCardListener(this);
}
public void removeCardListener() {
device.removeCardListener(this);
}
public void removeTray() {
systemTray.remove(trayIcon);
trayIcon = null;
}
public void setImage(final Image image) {
trayIcon.setImage(image);
}
private void showTooltip(final String message, final MessageType type) {
trayIcon.displayMessage(Main.appName, message, type);
}
public void showInfoTooltip(final String message) {
showTooltip(message, MessageType.INFO);
}
public void showWarningTooltip(final String message) {
showTooltip(message, MessageType.WARNING);
}
public void showErrorTooltip(final String message) {
showTooltip(message, MessageType.ERROR);
}
}
| |
package org.kuali.kpme.edo.api.group;
import java.io.Serializable;
import java.util.Collection;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.kuali.kpme.edo.api.group.EdoGroupDefinition.Builder;
import org.kuali.rice.core.api.CoreConstants;
import org.kuali.rice.core.api.mo.AbstractDataTransferObject;
import org.kuali.rice.core.api.mo.ModelBuilder;
import org.w3c.dom.Element;
@XmlRootElement(name = EdoRoleResponsibility.Constants.ROOT_ELEMENT_NAME)
@XmlAccessorType(XmlAccessType.NONE)
@XmlType(name = EdoRoleResponsibility.Constants.TYPE_NAME, propOrder = {
EdoRoleResponsibility.Elements.KIM_FORCE_ACTION,
EdoRoleResponsibility.Elements.KIM_PRIORITY,
EdoRoleResponsibility.Elements.KIM_ROLE_NAME,
EdoRoleResponsibility.Elements.KIM_RESPONSIBILITY_NAME,
EdoRoleResponsibility.Elements.KIM_ACTION_POLICY_CODE,
EdoRoleResponsibility.Elements.KIM_ACTION_TYPE_CODE,
EdoRoleResponsibility.Elements.EDO_KIM_ROLE_RESPONSIBILITY_ID,
CoreConstants.CommonElements.VERSION_NUMBER,
CoreConstants.CommonElements.OBJECT_ID,
CoreConstants.CommonElements.FUTURE_ELEMENTS
})
public final class EdoRoleResponsibility
extends AbstractDataTransferObject
implements EdoRoleResponsibilityContract
{
@XmlElement(name = Elements.KIM_FORCE_ACTION, required = false)
private final boolean kimForceAction;
@XmlElement(name = Elements.KIM_PRIORITY, required = false)
private final int kimPriority;
@XmlElement(name = Elements.KIM_ROLE_NAME, required = false)
private final String kimRoleName;
@XmlElement(name = Elements.KIM_RESPONSIBILITY_NAME, required = false)
private final String kimResponsibilityName;
@XmlElement(name = Elements.KIM_ACTION_POLICY_CODE, required = false)
private final String kimActionPolicyCode;
@XmlElement(name = Elements.KIM_ACTION_TYPE_CODE, required = false)
private final String kimActionTypeCode;
@XmlElement(name = Elements.EDO_KIM_ROLE_RESPONSIBILITY_ID, required = false)
private final String edoKimRoleResponsibilityId;
@XmlElement(name = CoreConstants.CommonElements.VERSION_NUMBER, required = false)
private final Long versionNumber;
@XmlElement(name = CoreConstants.CommonElements.OBJECT_ID, required = false)
private final String objectId;
@SuppressWarnings("unused")
@XmlAnyElement
private final Collection<Element> _futureElements = null;
/**
* Private constructor used only by JAXB.
*
*/
private EdoRoleResponsibility() {
this.kimForceAction = false;
this.kimPriority = 0;
this.kimRoleName = null;
this.kimResponsibilityName = null;
this.kimActionPolicyCode = null;
this.kimActionTypeCode = null;
this.edoKimRoleResponsibilityId = null;
this.versionNumber = null;
this.objectId = null;
}
private EdoRoleResponsibility(Builder builder) {
this.kimForceAction = builder.isKimForceAction();
this.kimPriority = builder.getKimPriority();
this.kimRoleName = builder.getKimRoleName();
this.kimResponsibilityName = builder.getKimResponsibilityName();
this.kimActionPolicyCode = builder.getKimActionPolicyCode();
this.kimActionTypeCode = builder.getKimActionTypeCode();
this.edoKimRoleResponsibilityId = builder.getEdoKimRoleResponsibilityId();
this.versionNumber = builder.getVersionNumber();
this.objectId = builder.getObjectId();
}
@Override
public boolean isKimForceAction() {
return this.kimForceAction;
}
@Override
public int getKimPriority() {
return this.kimPriority;
}
@Override
public String getKimRoleName() {
return this.kimRoleName;
}
@Override
public String getKimResponsibilityName() {
return this.kimResponsibilityName;
}
@Override
public String getKimActionPolicyCode() {
return this.kimActionPolicyCode;
}
@Override
public String getKimActionTypeCode() {
return this.kimActionTypeCode;
}
@Override
public String getEdoKimRoleResponsibilityId() {
return this.edoKimRoleResponsibilityId;
}
@Override
public Long getVersionNumber() {
return this.versionNumber;
}
@Override
public String getObjectId() {
return this.objectId;
}
/**
* A builder which can be used to construct {@link EdoRoleResponsibility} instances. Enforces the constraints of the {@link EdoRoleResponsibilityContract}.
*
*/
public final static class Builder
implements Serializable, EdoRoleResponsibilityContract, ModelBuilder
{
private boolean kimForceAction;
private int kimPriority;
private String kimRoleName;
private String kimResponsibilityName;
private String kimActionPolicyCode;
private String kimActionTypeCode;
private String edoKimRoleResponsibilityId;
private Long versionNumber;
private String objectId;
private Builder() {
// TODO modify this constructor as needed to pass any required values and invoke the appropriate 'setter' methods
}
public static Builder create() {
// TODO modify as needed to pass any required values and add them to the signature of the 'create' method
return new Builder();
}
private Builder(String kimRoleName, String kimResponsibilityName, String kimActionTypeCode) {
// TODO modify this constructor as needed to pass any required values and invoke the appropriate 'setter' methods
setKimRoleName(kimRoleName);
setKimResponsibilityName(kimResponsibilityName);
setKimActionTypeCode(kimActionTypeCode);
}
public static Builder create(String kimRoleName, String kimResponsibilityName, String kimActionTypeCode) {
// TODO modify as needed to pass any required values and add them to the signature of the 'create' method
return new Builder(kimRoleName, kimResponsibilityName, kimActionTypeCode);
}
public static Builder create(EdoRoleResponsibilityContract contract) {
if (contract == null) {
throw new IllegalArgumentException("contract was null");
}
// TODO if create() is modified to accept required parameters, this will need to be modified
Builder builder = create();
builder.setKimForceAction(contract.isKimForceAction());
builder.setKimPriority(contract.getKimPriority());
builder.setKimRoleName(contract.getKimRoleName());
builder.setKimResponsibilityName(contract.getKimResponsibilityName());
builder.setKimActionPolicyCode(contract.getKimActionPolicyCode());
builder.setKimActionTypeCode(contract.getKimActionTypeCode());
builder.setEdoKimRoleResponsibilityId(contract.getEdoKimRoleResponsibilityId());
builder.setVersionNumber(contract.getVersionNumber());
builder.setObjectId(contract.getObjectId());
return builder;
}
public EdoRoleResponsibility build() {
return new EdoRoleResponsibility(this);
}
@Override
public boolean isKimForceAction() {
return this.kimForceAction;
}
@Override
public int getKimPriority() {
return this.kimPriority;
}
@Override
public String getKimRoleName() {
return this.kimRoleName;
}
@Override
public String getKimResponsibilityName() {
return this.kimResponsibilityName;
}
@Override
public String getKimActionPolicyCode() {
return this.kimActionPolicyCode;
}
@Override
public String getKimActionTypeCode() {
return this.kimActionTypeCode;
}
@Override
public String getEdoKimRoleResponsibilityId() {
return this.edoKimRoleResponsibilityId;
}
@Override
public Long getVersionNumber() {
return this.versionNumber;
}
@Override
public String getObjectId() {
return this.objectId;
}
public void setKimForceAction(boolean kimForceAction) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.kimForceAction = kimForceAction;
}
public void setKimPriority(int kimPriority) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.kimPriority = kimPriority;
}
public void setKimRoleName(String kimRoleName) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.kimRoleName = kimRoleName;
}
public void setKimResponsibilityName(String kimResponsibilityName) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.kimResponsibilityName = kimResponsibilityName;
}
public void setKimActionPolicyCode(String kimActionPolicyCode) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.kimActionPolicyCode = kimActionPolicyCode;
}
public void setKimActionTypeCode(String kimActionTypeCode) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.kimActionTypeCode = kimActionTypeCode;
}
public void setEdoKimRoleResponsibilityId(String edoKimRoleResponsibilityId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.edoKimRoleResponsibilityId = edoKimRoleResponsibilityId;
}
public void setVersionNumber(Long versionNumber) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.versionNumber = versionNumber;
}
public void setObjectId(String objectId) {
// TODO add validation of input value if required and throw IllegalArgumentException if needed
this.objectId = objectId;
}
}
/**
* Defines some internal constants used on this class.
*
*/
static class Constants {
final static String ROOT_ELEMENT_NAME = "edoRoleResponsibility";
final static String TYPE_NAME = "EdoRoleResponsibilityType";
}
/**
* A private class which exposes constants which define the XML element names to use when this object is marshalled to XML.
*
*/
static class Elements {
final static String KIM_FORCE_ACTION = "kimForceAction";
final static String KIM_PRIORITY = "kimPriority";
final static String KIM_ROLE_NAME = "kimRoleName";
final static String KIM_RESPONSIBILITY_NAME = "kimResponsibilityName";
final static String KIM_ACTION_POLICY_CODE = "kimActionPolicyCode";
final static String KIM_ACTION_TYPE_CODE = "kimActionTypeCode";
final static String EDO_KIM_ROLE_RESPONSIBILITY_ID = "edoKimRoleResponsibilityId";
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive.parquet.predicate;
import com.facebook.presto.hive.HiveColumnHandle;
import com.facebook.presto.hive.parquet.ParquetCodecFactory;
import com.facebook.presto.hive.parquet.ParquetCodecFactory.BytesDecompressor;
import com.facebook.presto.hive.parquet.predicate.TupleDomainParquetPredicate.ColumnReference;
import com.facebook.presto.spi.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.primitives.Ints;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import parquet.bytes.BytesInput;
import parquet.column.ColumnDescriptor;
import parquet.column.Encoding;
import parquet.column.page.DictionaryPage;
import parquet.column.statistics.Statistics;
import parquet.format.DictionaryPageHeader;
import parquet.format.PageHeader;
import parquet.format.PageType;
import parquet.format.Util;
import parquet.hadoop.metadata.BlockMetaData;
import parquet.hadoop.metadata.ColumnChunkMetaData;
import parquet.hadoop.metadata.ColumnPath;
import parquet.hadoop.metadata.CompressionCodecName;
import parquet.schema.MessageType;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Set;
public final class ParquetPredicateUtils
{
// definition level, repetition level, value
private static final int PARQUET_DATA_TRIPLE = 3;
private ParquetPredicateUtils()
{
}
public static ParquetPredicate buildParquetPredicate(
List<HiveColumnHandle> columns,
TupleDomain<HiveColumnHandle> effectivePredicate,
MessageType fileSchema,
TypeManager typeManager)
{
ImmutableList.Builder<ColumnReference<HiveColumnHandle>> columnReferences = ImmutableList.builder();
for (HiveColumnHandle column : columns) {
if (!column.isPartitionKey()) {
int parquetFieldIndex = lookupParquetColumn(column, fileSchema);
Type type = typeManager.getType(column.getTypeSignature());
columnReferences.add(new ColumnReference<>(column, parquetFieldIndex, type));
}
}
return new TupleDomainParquetPredicate<>(effectivePredicate, columnReferences.build());
}
private static int lookupParquetColumn(HiveColumnHandle column, MessageType fileSchema)
{
// map column has more than one primitive columns in parquet file
// the column ordinal number does not always equal to hive column index
// need to do a look up in parquet file schema columns
int parquetFieldIndex = 0;
for (; parquetFieldIndex < fileSchema.getColumns().size(); parquetFieldIndex++) {
String[] path = fileSchema.getColumns().get(parquetFieldIndex).getPath();
String columnName = path[path.length - 1];
if (column.getName().equals(columnName)) {
break;
}
}
return parquetFieldIndex;
}
public static boolean predicateMatches(ParquetPredicate parquetPredicate,
BlockMetaData block,
Configuration configuration,
Path path,
MessageType requestedSchema,
TupleDomain<HiveColumnHandle> effectivePredicate)
{
Map<Integer, Statistics<?>> columnStatistics = getStatisticsByColumnOrdinal(block);
if (!parquetPredicate.matches(block.getRowCount(), columnStatistics)) {
return false;
}
Map<Integer, ParquetDictionaryDescriptor> dictionaries = getDictionariesByColumnOrdinal(block, path, configuration, requestedSchema, effectivePredicate);
return parquetPredicate.matches(dictionaries);
}
private static Map<Integer, Statistics<?>> getStatisticsByColumnOrdinal(BlockMetaData blockMetadata)
{
ImmutableMap.Builder<Integer, Statistics<?>> statistics = ImmutableMap.builder();
for (int ordinal = 0; ordinal < blockMetadata.getColumns().size(); ordinal++) {
Statistics<?> columnStatistics = blockMetadata.getColumns().get(ordinal).getStatistics();
if (columnStatistics != null) {
statistics.put(ordinal, columnStatistics);
}
}
return statistics.build();
}
private static Map<Integer, ParquetDictionaryDescriptor> getDictionariesByColumnOrdinal(
BlockMetaData blockMetadata,
Path path,
Configuration configuration,
MessageType requestedSchema,
TupleDomain<HiveColumnHandle> effectivePredicate)
{
// todo should we call release?
ParquetCodecFactory codecFactory = new ParquetCodecFactory(configuration);
ImmutableMap.Builder<Integer, ParquetDictionaryDescriptor> dictionaries = ImmutableMap.builder();
for (int ordinal = 0; ordinal < blockMetadata.getColumns().size(); ordinal++) {
ColumnChunkMetaData columnChunkMetaData = blockMetadata.getColumns().get(ordinal);
for (int i = 0; i < requestedSchema.getColumns().size(); i++) {
ColumnDescriptor columnDescriptor = requestedSchema.getColumns().get(i);
if (isColumnPredicate(columnDescriptor, effectivePredicate) &&
columnChunkMetaData.getPath().equals(ColumnPath.get(columnDescriptor.getPath())) &&
isOnlyDictionaryEncodingPages(columnChunkMetaData.getEncodings())) {
DictionaryPage dictionaryPage;
try (FSDataInputStream inputStream = path.getFileSystem(configuration).open(path)) {
inputStream.seek(columnChunkMetaData.getStartingPos());
int totalSize = Ints.checkedCast(columnChunkMetaData.getTotalSize());
byte[] buffer = new byte[totalSize];
inputStream.readFully(buffer);
dictionaryPage = readDictionaryPage(buffer, codecFactory, columnChunkMetaData.getCodec());
dictionaries.put(ordinal, new ParquetDictionaryDescriptor(columnDescriptor, dictionaryPage));
}
catch (IOException ignored) {
}
break;
}
}
}
return dictionaries.build();
}
private static DictionaryPage readDictionaryPage(byte[] data, ParquetCodecFactory codecFactory, CompressionCodecName codecName)
{
try {
ByteArrayInputStream inputStream = new ByteArrayInputStream(data);
PageHeader pageHeader = Util.readPageHeader(inputStream);
if (pageHeader.type != PageType.DICTIONARY_PAGE) {
return null;
}
// todo this wrapper is not needed
BytesInput compressedData = BytesInput.from(data, data.length - inputStream.available(), pageHeader.getCompressed_page_size());
BytesDecompressor decompressor = codecFactory.getDecompressor(codecName);
BytesInput decompressed = decompressor.decompress(compressedData, pageHeader.getUncompressed_page_size());
DictionaryPageHeader dicHeader = pageHeader.getDictionary_page_header();
Encoding encoding = Encoding.valueOf(dicHeader.getEncoding().name());
int dictionarySize = dicHeader.getNum_values();
return new DictionaryPage(decompressed, dictionarySize, encoding);
}
catch (IOException ignored) {
return null;
}
}
private static boolean isColumnPredicate(ColumnDescriptor columnDescriptor, TupleDomain<HiveColumnHandle> effectivePredicate)
{
String[] columnPath = columnDescriptor.getPath();
String columnName = columnPath[columnPath.length - 1];
return effectivePredicate.getDomains().keySet().stream()
.map(HiveColumnHandle::getName)
.anyMatch(columnName::equals);
}
private static boolean isOnlyDictionaryEncodingPages(Set<Encoding> encodings)
{
// more than 1 encodings for values
if (encodings.size() > PARQUET_DATA_TRIPLE) {
return false;
}
// definition level, repetition level never have dictionary encoding
// TODO: add PageEncodingStats in ColumnChunkMetaData
return encodings.stream().anyMatch(Encoding::usesDictionary);
}
}
| |
package testingharness;
import com.jcraft.jsch.JSchException;
import com.puppycrawl.tools.checkstyle.api.CheckstyleException;
import configuration.ConfigurationLoader;
import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import publicinterfaces.CategoryNotInReportException;
import publicinterfaces.ITestSetting;
import publicinterfaces.Report;
import publicinterfaces.ReportResult;
import publicinterfaces.Severity;
import publicinterfaces.StaticOptions;
import publicinterfaces.Status;
//changed here
import dynamictesting.TestsApi;
import uk.ac.cam.cl.dtg.teaching.containers.api.exceptions.GitRepositoryCloneException;
import uk.ac.cam.cl.dtg.teaching.containers.api.exceptions.InvalidNameException;
import uk.ac.cam.cl.dtg.teaching.containers.api.exceptions.TestInstanceNotFoundException;
import uk.ac.cam.cl.dtg.teaching.containers.api.exceptions.TestNotFoundException;
//changed here
import dynamictesting.TestInstance;
import uk.ac.cam.cl.dtg.teaching.containers.api.model.TestStep;
import uk.ac.cam.cl.dtg.teaching.exceptions.SerializableException;
import uk.ac.cam.cl.git.api.RepositoryNotFoundException;
import uk.ac.cam.cl.git.interfaces.WebInterface;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.ws.rs.InternalServerErrorException;
/**
* Runs all the static and dynamic analysis tests for a given tick, and produces a report,
* stored in memory until the object is destroyed
*
* @author as2388
* @author kls82
*
*/
public class Tester {
static Logger log = LoggerFactory.getLogger(Tester.class); //initialise log4j logger
private Report report; //Report object into which all the report items will ultimately go
private Exception failCause = null; //if the report fails, save it here so that it can be thrown when the report is requested
private String repoName;
//Maps the path of a test (either static or dynamic) to a list of paths to files on which that test should be run
private List<String> filesToTest;
private List<StaticOptions> testingQueue = null;
private boolean dynamicPass;
/**
* Creates a new Tester
*/
public Tester(List<StaticOptions> tests, List<String> filesToTest, String repoName, String commitId) {
this.testingQueue = tests;
this.filesToTest = filesToTest;
this.report = new Report(repoName, commitId);
this.repoName = repoName;
}
/**
* Runs all tests required by the tick on all files required to be tested by the tick.
* Note: only runs static analysis if dynamic analysis succeeded
*/
public void runTests(String crsId, String tickId, String commitId, WebInterface gitProxy, Status status, TestsApi testerProxyTest, String dynamicContainerId, String dynamicTestId)
{
log.info(crsId + " " + tickId + " " + commitId + ": Tick analysis started");
try {
if (testingQueue != null) {
int noOfTests = testingQueue.size()+2;
report.setNoOfTests(noOfTests);
status.setCurrentPositionInQueue(0);
status.setMaxProgress(noOfTests + 1);
status.setInfo("Loading tests");
}
else {
int noOfTests = 2;
report.setNoOfTests(noOfTests);
status.setCurrentPositionInQueue(0);
status.setMaxProgress(noOfTests + 1);
status.setInfo("Loading tests");
}
String repo = ConfigurationLoader.getConfig().getRepoTemplate() + this.repoName + ".git";
if (dynamicContainerId != null && dynamicTestId != null) {
log.info(crsId + " " + tickId + " " + commitId + ": Running dynamic tests");
runDynamicTests(testerProxyTest,crsId,tickId,dynamicContainerId,dynamicTestId,status,repo, gitProxy);
log.info(crsId + " " + tickId + " " + commitId + ": Dynamic tests complete");
}
//TODO: will need to fill in checkstyles error bit
else {
log.info(crsId + " " + tickId + " " + commitId + ": No dynamic tests specified");
dynamicPass = true;
}
//now run checkstyles if required
if (dynamicPass && testingQueue.size()>0)
{
log.info(crsId + " " + tickId + " " + commitId + ": Running static checks");
runStaticJavaTests(commitId,gitProxy,status);
log.info(crsId + " " + tickId + " " + commitId + ": Static checks complete");
}
else {
log.info(crsId + " " + tickId + " " + commitId + ": Dynamic tests failed");
}
if (this.failCause == null) {
report.calculateProblemStatuses();
}
log.info("Tick analysis finished");
}
catch (CheckstyleException | IOException | RepositoryNotFoundException e)
{
log.error("Tick analysis failed. Exception message: " + e.getMessage());
report.setTestResult(ReportResult.UNDEFINED);
failCause = e;
}
finally
{
//TODO: Is this a viable option? error will appear as ticker comment but won't be able to sign up so
//shouldn't be overwritten
if (this.failCause != null) {
this.report.setTickerComments("Test failed to complete, error: " + this.failCause.getMessage());
}
Report reportToAdd = this.report;
TestService.getDatabase().addReport(crsId, tickId, reportToAdd);
status.complete();
}
}
/**
* Runs all dynamic analysis tests required by the tick
*/
private void runDynamicTests(TestsApi testerProxyTest, String crsId, String tickId, String dynamicContainerId, String dynamicTestId, Status status, String repo, WebInterface gitProxy)
{
String privateKey = "";
try {
privateKey = gitProxy.getPrivateKey(ConfigurationLoader.getConfig().getSecurityToken(),crsId);
} catch (IOException | JSchException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
log.info(tickId + " " + crsId +": started dynamic analysis");
try {
/* String[] lines = privateKey.split("\n");
String key = "";
for (String line : lines) {
if(!(line.equals("-----BEGIN RSA PRIVATE KEY-----")) && !(line.equals("-----END RSA PRIVATE KEY-----"))) {
key += line;
}
}
privateKey = privateKey.replace("-----BEGIN RSA PRIVATE KEY-----", "");
privateKey = privateKey.replace("-----END RSA PRIVATE KEY-----", "");
System.out.println("privateKey : " + privateKey);
System.out.println("privateKey2 : " + key); */
String dynamicTestStatus = "";
TestInstance testInstance = testerProxyTest.startTest(crsId, dynamicContainerId, dynamicTestId, repo , privateKey);
String containerId = testInstance.getContainerID();
dynamicTestStatus = testInstance.getStatus();
int progress = 0;
status.setProgress(1);
status.setInfo("Compiling code");
log.info("Compiling ");
//poll status
while(dynamicTestStatus.equals(TestInstance.STATUS_UNINITIALIZED) || dynamicTestStatus.equals(TestInstance.STATUS_STARTING) || dynamicTestStatus.equals(TestInstance.STATUS_RUNNING)) {
dynamicTestStatus = testerProxyTest.getTestStatus(crsId, containerId).getStatus();
log.info(tickId + " " + crsId +": status poll = " + dynamicTestStatus);
List<TestStep> results = testerProxyTest.getTestStatus(crsId, containerId).getResults();
if (!results.isEmpty()) {
progress = testerProxyTest.getTestStatus(crsId, containerId).getResults().size();
if(results.get(progress-1).getName().equals("Compilation")) {
log.info("Compiling");
}
else {
log.info("Running correctness tests");
status.setProgress(2);
status.setInfo("Running correctness tests");
}
}
delay(1000);
}
//test is finished, find result
if(status.equals(TestInstance.STATUS_FAILED)) {
log.info(tickId + " " + crsId +": failed dynamic tests");
dynamicPass = false;
}
else {
dynamicPass = true;
log.info(tickId + " " + crsId +": passed dynamic tests");
}
log.info(tickId + " " + crsId +": putting dynamic test results in report");
unpackResults(testerProxyTest.getTestStatus(crsId, containerId).getException(),testerProxyTest.getTestStatus(crsId, containerId).getResults());
testerProxyTest.removeTest(crsId, containerId);
}
catch (InternalServerErrorException | GitRepositoryCloneException | InvalidNameException | TestNotFoundException | TestInstanceNotFoundException e) {
//TODO change back to error
log.info("Dynamic analysis failed. Exception message: " + e.getMessage());
report.setTestResult(ReportResult.UNDEFINED);
failCause = e;
}
log.info("Dynamic analysis complete");
}
private void unpackResults(SerializableException exception,List<TestStep> results) {
if (exception == null) {
//tests ran successfully so put results in report
log.info("There was no exception from the tester, writing report as normal");
for(TestStep result: results) {
log.info("Adding " + result.getName());
if(result.getStatus().equals(TestStep.STATUS_PASS)) {
log.info(result.getName() + " result = pass");
report.addProblem(result.getName() , Severity.WARNING);
}
else {
if(result.getStatus().equals(TestStep.STATUS_FAIL)){
log.info(result.getName() + " result = error");
report.addProblem(result.getName() , Severity.ERROR);
}
else if(result.getStatus().equals(TestStep.STATUS_WARNING)) {
log.info(result.getName() + " result = warning");
report.addProblem(result.getName() , Severity.WARNING);
}
log.info(result.getName() + " writing details for warning/error");
String message = "";
for(String m : result.getMessages()){
message += m + "\n";
}
message += "Expected result: " + result.getExpected() + "\n";
message += "Obtained result: " + result.getActual() + "\n";
try {
report.addDetail(result.getName() , result.getFileName(), (int) result.getStartLine(), message);
}
catch (CategoryNotInReportException e) {
// TODO Auto-generated catch block
//should never be called
log.error("category not found in report");
e.printStackTrace();
}
}
}
}
else {
//there was an exception so put what went wrong in report
log.info("Dynamic tester threw an exception, see comment in report for details");
report.setTestResult(ReportResult.UNDEFINED);
report.setTickerComments(exception.getMessage());
dynamicPass = false;
}
}
/**
* Runs all static analysis tests required by the tick if files are .java
* @throws CheckstyleException Thrown if Checkstyle fails to run
* @throws IOException Thrown if creating/making temp files fails
* @throws RepositoryNotFoundException Thrown by git API
*/
private void runStaticJavaTests(String commitId, WebInterface gitProxy, Status status) throws CheckstyleException, IOException, RepositoryNotFoundException {
List<String> javaFiles = getStaticTestFiles(this.filesToTest, "java");
List<File> fileList = new LinkedList<>();
Map<String,String> filePathMap = new HashMap<>();
//create temp files to run checkstyles on
for(String file : javaFiles) {
log.debug("obtaining " + file + " version " + commitId + " from " + repoName + " to test");
String contents = gitProxy.getFile(Security.SecurityManager.getSecurityToken(), file, commitId, repoName);
log.debug("obtained file " + file + " version " + commitId + " from " + repoName + " to test");
String fileName = file.substring(0,file.lastIndexOf("."));
File javaFile = File.createTempFile(fileName,".java");
log.info("file temporarily stored at: " + javaFile.getAbsolutePath());
//write string to temp file
log.info("writing data to " + javaFile.getAbsolutePath());
FileOutputStream output = new FileOutputStream(javaFile.getAbsolutePath());
byte[] bytes = contents.getBytes();
output.write(bytes);
output.flush();
output.close();
log.info("Data transferred to " + javaFile.getAbsolutePath());
if (javaFile.exists()){
fileList.add(javaFile);
filePathMap.put(javaFile.getAbsolutePath(),file);
}
else {
log.warn("could not find file " + javaFile.getAbsolutePath());
throw new IOException("Could not find file: " + file);
}
}
//run static analysis on each file
for (StaticOptions o : this.testingQueue) {
delay(ConfigurationLoader.getConfig().getTestDelay());
status.addProgress();
runStaticAnalysis(o, fileList, commitId, filePathMap);
}
//try to delete all the temp files that were created
for (File javaFile : fileList) {
if( javaFile.delete()) {
log.info("Deleted temp file: " + javaFile.getAbsolutePath());
}
else {
log.warn("Failed to delete temp file: " + javaFile.getAbsolutePath());
}
}
}
/**
* Extracts only the tests with .{ext} extensions for the static checks
* @param files list of files from the corresponding repo
* @param ext extension used to filter the files to the ones required to be tested statically
* @return List containing only .{ext} files
*/
public List<String> getStaticTestFiles(List<String> files, String ext)
{
List<String> toReturn = new LinkedList<>();
for (String s : files) {
//TODO check this is right
if (s.substring(s.length() - (ext.length())).equals(ext)) {
toReturn.add(s);
}
}
return toReturn;
}
/**
* Run CheckStyle, set up with the given config file, on all the files to which it should be applied
*
* @param configFileName Path to the config file needed by CheckStyle
* @param fileNames A list of paths to the files on which the static analyses tests are to be performed
* @throws CheckstyleException Thrown if Checkstyle fails to run
* @throws IOException Thrown if creating/making temp files fails
* @throws RepositoryNotFoundException Thrown by git API
*/
public void runStaticAnalysis(StaticOptions configFileName, List<File> fileNames, String commitId, Map<String,String> filePathMap) throws CheckstyleException, IOException, RepositoryNotFoundException {
StaticParser.test(configFileName, fileNames, report, repoName, commitId, filePathMap);
}
//GETTERS
public Exception getFailCause()
{
return this.failCause;
}
public Report getReport() {
return report;
}
private void delay(int timeMS) {
try {
Thread.sleep(timeMS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
| |
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jrcs.rcs;
import java.util.Arrays;
import java.util.StringTokenizer;
import org.apache.commons.jrcs.util.ToString;
/**
* Contains and manages a version number of the form "x(\.y)*".
* This class is NOT thread safe.
*
* @see Archive
*
* @author <a href="mailto:juanco@suigeneris.org">Juanco Anez</a>
* @version $Id: Version.java,v 1.5 2004/02/28 03:35:36 bayard Exp $
*/
public class Version
extends ToString
implements Cloneable, Comparable
{
private int[] numbers = new int[0];
/**
* Creates a new Version with a single digit version number
* @param major the version number
*/
public Version(int major)
{
numbers = new int[]{major};
}
/**
* Creates a new Version with a major.minor version number.
* @param major the major version number
* @param minor the minor version number
*/
public Version(int major, int minor)
{
numbers = new int[]{major, minor};
}
/**
* Converts an array of Integer to a Version.
* @param num an array of Integers
*/
public Version(Integer[] num)
{
numbers = new int[num.length];
for (int i = 0; i < num.length; i++)
{
numbers[i] = num[i].intValue();
}
}
/**
* Converts an array of int to a Version.
* @param num an array of int
*/
public Version(int[] num)
{
numbers = (int[]) num.clone();
}
/**
* Converts string to a version.
* @param v a string accepted by the following regular expression.
* <code>
* [0-9]+(.[0-9]+)*
* </code>
* @throws InvalidVersionNumberException if the string cannot be parsed
*/
public Version(String v)
throws InvalidVersionNumberException
{
if (v.endsWith("."))
{
v = v + "0";
}
StringTokenizer t = new StringTokenizer(v, ".");
int count = t.countTokens();
if (even(count) && v.endsWith(".0"))
{
count--;
} // allow a .0 ending only in branch revisions
numbers = new int[count];
for (int i = 0; i < count; i++)
{
try
{
numbers[i] = Integer.parseInt(t.nextToken());
}
catch (NumberFormatException e)
{
throw new InvalidVersionNumberException(v);
}
}
}
/**
* Create a new Version by copying another.
* @param v the version to copy
*/
public Version(Version v)
{
this.numbers = (int[]) v.numbers.clone();
if (!Arrays.equals(this.numbers, v.numbers))
{
throw new IllegalStateException(Arrays.toString(numbers));
}
}
/**
* Create an empty version number.
*/
public Version()
{
}
public Object clone()
{
return new Version(this);
}
/**
* Return the current version number as an array of int.
* @return the current version number as an array of int.
*/
public int[] getNumbers()
{
return (int[]) this.numbers.clone();
}
/**
* Compares two versions.
* The comparison is done the usual way, i.e., 2.0 is greter than 1.99.1,
* and 0.1.2 is greater than 0.1
* @param ver the version to compare to.
* @return 0 if this == ver, 1 if this greater than ver, -1 otherwise.
*/
public int compareVersions(Version ver)
{
int[] nthis = this.numbers;
int[] nthat = ver.numbers;
int i;
for (i = 0; i < nthis.length; i++)
{
if (i >= nthat.length || nthis[i] > nthat[i])
{
return 1;
}
else if (nthis[i] < nthat[i])
{
return -1;
}
}
// all matched up to i-1
if (nthat.length > i)
{
return -1;
}
else
{
return 0;
}
}
/**
* Compares two versions in lexigographical order.
* Unlike compareVersions, this comparison is not done in
* the way usual for versions numbers. The order relationship
* stablished here is the one CVS used to store nodes into archive
* files.
* @param other The version to compare to
* @see #compareVersions
*/
public int compareTo(Object other)
{
if (other == this)
{
return 0;
}
else if (!(other instanceof Version))
{
throw new IllegalArgumentException(other.toString());
}
else {
Version otherVer = (Version) other;
if (this.size() != otherVer.size())
{
return this.size() - otherVer.size();
}
else
{
return -compareVersions(otherVer);
}
}
}
/**
* Determine if this version is greater than the given one.
* @param ver the version to compare to.
* @return true if compareVersions(ver) > 0
* @see #compareVersions
*/
public boolean isGreaterThan(Version ver)
{
return compareVersions(ver) > 0;
}
/**
* Determine if this version is greater than or equal to the given one.
* @param ver the version to compare to.
* @return true if compareVersions(ver) >= 0
* @see #compareVersions
*/
public boolean isGreaterOrEqualThan(Version ver)
{
return compareVersions(ver) >= 0;
}
/**
* Determine if this version is less than the given one.
* @param ver the version to compare to.
* @return true if compareVersions(ver) < 0
* @see #compareVersions
*/
public boolean isLessThan(Version ver)
{
return compareVersions(ver) < 0;
}
/**
* Determine if this version is less than or equal to the given one.
* @param ver the version to compare to.
* @return true if compareVersions(ver) <= 0
* @see #compareVersions
*/
public boolean isLessOrEqualThan(Version ver)
{
return compareVersions(ver) <= 0;
}
/**
* Determine if two versions are equal.
* @param o the version to compare to
* @return true if both versions represent the same version number
*/
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
else if (!(o instanceof Version))
{
return false;
}
else if (hashCode() != o.hashCode())
{
return false;
}
else
{
return compareTo((Version) o) == 0;
}
}
public int hashCode()
{
return toString().hashCode();
}
/**
* Return the version number at the given position.
* @param pos the position.
* @return the number.
*/
public int at(int pos)
{
return numbers[pos];
}
/**
* Return the last number in the version number.
* @return the number.
*/
public int last()
{
return at(size() - 1);
}
/**
* Return the last number in the version number.
* @return the number.
*/
public Version getBase(int positions)
{
positions = (positions > numbers.length ? numbers.length : positions);
int[] result = new int[positions];
System.arraycopy(this.numbers, 0, result, 0, positions);
return new Version(result);
}
public Version getBranchPoint()
{
return getBase(size() - 1);
}
public Version next()
{
Version result = new Version(this);
result.numbers[this.numbers.length - 1] = this.last() + 1;
return result;
}
protected void __addBranch(Integer branch)
{
__addBranch(branch.intValue());
}
protected void __addBranch(int branch)
{
int[] newnum = new int[numbers.length + 1];
System.arraycopy(this.numbers, 0, newnum, 0, numbers.length);
newnum[numbers.length] = branch;
this.numbers = newnum;
}
public Version newBranch(int branch)
{
int[] newnum = new int[numbers.length + 1];
System.arraycopy(this.numbers, 0, newnum, 0, numbers.length);
newnum[numbers.length] = branch;
Version result = new Version();
result.numbers = newnum;
return result;
}
public int size()
{
return numbers.length;
}
public boolean isTrunk()
{
return (size() >= 1) && (size() <= 2);
}
public boolean isBranch()
{
return size() > 2;
}
public boolean isRevision()
{
return even();
}
public boolean isGhost()
{
for (int i = 0; i < size(); i++)
{
if (numbers[i] <= 0)
{
return true;
}
}
return false;
}
public boolean even(int n)
{
return n % 2 == 0;
}
public boolean even()
{
return even(size());
}
public boolean odd(int n)
{
return !even(n);
}
public boolean odd()
{
return !even();
}
public void toString(StringBuffer s)
{
if (size() > 0)
{
s.append(Integer.toString(numbers[0]));
for (int i = 1; i < numbers.length; i++)
{
s.append(".");
s.append(Integer.toString(numbers[i]));
}
}
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet.mvc.method.annotation;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.annotation.AnnotationAwareOrderComparator;
import org.springframework.http.HttpStatus;
import org.springframework.http.converter.ByteArrayHttpMessageConverter;
import org.springframework.http.converter.HttpMessageConverter;
import org.springframework.http.converter.StringHttpMessageConverter;
import org.springframework.http.converter.support.AllEncompassingFormHttpMessageConverter;
import org.springframework.http.converter.xml.SourceHttpMessageConverter;
import org.springframework.ui.ModelMap;
import org.springframework.web.accept.ContentNegotiationManager;
import org.springframework.web.bind.annotation.ControllerAdvice;
import org.springframework.web.context.request.ServletWebRequest;
import org.springframework.web.method.ControllerAdviceBean;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.method.annotation.ExceptionHandlerMethodResolver;
import org.springframework.web.method.annotation.MapMethodProcessor;
import org.springframework.web.method.annotation.ModelAttributeMethodProcessor;
import org.springframework.web.method.annotation.ModelMethodProcessor;
import org.springframework.web.method.support.HandlerMethodArgumentResolver;
import org.springframework.web.method.support.HandlerMethodArgumentResolverComposite;
import org.springframework.web.method.support.HandlerMethodReturnValueHandler;
import org.springframework.web.method.support.HandlerMethodReturnValueHandlerComposite;
import org.springframework.web.method.support.ModelAndViewContainer;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.handler.AbstractHandlerMethodExceptionResolver;
/**
* An {@link AbstractHandlerMethodExceptionResolver} that resolves exceptions
* through {@code @ExceptionHandler} methods.
*
* <p>Support for custom argument and return value types can be added via
* {@link #setCustomArgumentResolvers} and {@link #setCustomReturnValueHandlers}.
* Or alternatively to re-configure all argument and return value types use
* {@link #setArgumentResolvers} and {@link #setReturnValueHandlers(List)}.
*
* @author Rossen Stoyanchev
* @author Juergen Hoeller
* @since 3.1
*/
public class ExceptionHandlerExceptionResolver extends AbstractHandlerMethodExceptionResolver
implements ApplicationContextAware, InitializingBean {
private List<HandlerMethodArgumentResolver> customArgumentResolvers;
private HandlerMethodArgumentResolverComposite argumentResolvers;
private List<HandlerMethodReturnValueHandler> customReturnValueHandlers;
private HandlerMethodReturnValueHandlerComposite returnValueHandlers;
private List<HttpMessageConverter<?>> messageConverters;
private ContentNegotiationManager contentNegotiationManager = new ContentNegotiationManager();
private final List<Object> responseBodyAdvice = new ArrayList<>();
private ApplicationContext applicationContext;
private final Map<Class<?>, ExceptionHandlerMethodResolver> exceptionHandlerCache =
new ConcurrentHashMap<>(64);
private final Map<ControllerAdviceBean, ExceptionHandlerMethodResolver> exceptionHandlerAdviceCache =
new LinkedHashMap<>();
public ExceptionHandlerExceptionResolver() {
StringHttpMessageConverter stringHttpMessageConverter = new StringHttpMessageConverter();
stringHttpMessageConverter.setWriteAcceptCharset(false); // see SPR-7316
this.messageConverters = new ArrayList<>();
this.messageConverters.add(new ByteArrayHttpMessageConverter());
this.messageConverters.add(stringHttpMessageConverter);
this.messageConverters.add(new SourceHttpMessageConverter<>());
this.messageConverters.add(new AllEncompassingFormHttpMessageConverter());
}
/**
* Provide resolvers for custom argument types. Custom resolvers are ordered
* after built-in ones. To override the built-in support for argument
* resolution use {@link #setArgumentResolvers} instead.
*/
public void setCustomArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) {
this.customArgumentResolvers= argumentResolvers;
}
/**
* Return the custom argument resolvers, or {@code null}.
*/
public List<HandlerMethodArgumentResolver> getCustomArgumentResolvers() {
return this.customArgumentResolvers;
}
/**
* Configure the complete list of supported argument types thus overriding
* the resolvers that would otherwise be configured by default.
*/
public void setArgumentResolvers(List<HandlerMethodArgumentResolver> argumentResolvers) {
if (argumentResolvers == null) {
this.argumentResolvers = null;
}
else {
this.argumentResolvers = new HandlerMethodArgumentResolverComposite();
this.argumentResolvers.addResolvers(argumentResolvers);
}
}
/**
* Return the configured argument resolvers, or possibly {@code null} if
* not initialized yet via {@link #afterPropertiesSet()}.
*/
public HandlerMethodArgumentResolverComposite getArgumentResolvers() {
return this.argumentResolvers;
}
/**
* Provide handlers for custom return value types. Custom handlers are
* ordered after built-in ones. To override the built-in support for
* return value handling use {@link #setReturnValueHandlers}.
*/
public void setCustomReturnValueHandlers(List<HandlerMethodReturnValueHandler> returnValueHandlers) {
this.customReturnValueHandlers = returnValueHandlers;
}
/**
* Return the custom return value handlers, or {@code null}.
*/
public List<HandlerMethodReturnValueHandler> getCustomReturnValueHandlers() {
return this.customReturnValueHandlers;
}
/**
* Configure the complete list of supported return value types thus
* overriding handlers that would otherwise be configured by default.
*/
public void setReturnValueHandlers(List<HandlerMethodReturnValueHandler> returnValueHandlers) {
if (returnValueHandlers == null) {
this.returnValueHandlers = null;
}
else {
this.returnValueHandlers = new HandlerMethodReturnValueHandlerComposite();
this.returnValueHandlers.addHandlers(returnValueHandlers);
}
}
/**
* Return the configured handlers, or possibly {@code null} if not
* initialized yet via {@link #afterPropertiesSet()}.
*/
public HandlerMethodReturnValueHandlerComposite getReturnValueHandlers() {
return this.returnValueHandlers;
}
/**
* Set the message body converters to use.
* <p>These converters are used to convert from and to HTTP requests and responses.
*/
public void setMessageConverters(List<HttpMessageConverter<?>> messageConverters) {
this.messageConverters = messageConverters;
}
/**
* Return the configured message body converters.
*/
public List<HttpMessageConverter<?>> getMessageConverters() {
return this.messageConverters;
}
/**
* Set the {@link ContentNegotiationManager} to use to determine requested media types.
* If not set, the default constructor is used.
*/
public void setContentNegotiationManager(ContentNegotiationManager contentNegotiationManager) {
this.contentNegotiationManager = contentNegotiationManager;
}
/**
* Return the configured {@link ContentNegotiationManager}.
*/
public ContentNegotiationManager getContentNegotiationManager() {
return this.contentNegotiationManager;
}
/**
* Add one or more components to be invoked after the execution of a controller
* method annotated with {@code @ResponseBody} or returning {@code ResponseEntity}
* but before the body is written to the response with the selected
* {@code HttpMessageConverter}.
*/
public void setResponseBodyAdvice(List<ResponseBodyAdvice<?>> responseBodyAdvice) {
this.responseBodyAdvice.clear();
if (responseBodyAdvice != null) {
this.responseBodyAdvice.addAll(responseBodyAdvice);
}
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
public ApplicationContext getApplicationContext() {
return this.applicationContext;
}
@Override
public void afterPropertiesSet() {
// Do this first, it may add ResponseBodyAdvice beans
initExceptionHandlerAdviceCache();
if (this.argumentResolvers == null) {
List<HandlerMethodArgumentResolver> resolvers = getDefaultArgumentResolvers();
this.argumentResolvers = new HandlerMethodArgumentResolverComposite().addResolvers(resolvers);
}
if (this.returnValueHandlers == null) {
List<HandlerMethodReturnValueHandler> handlers = getDefaultReturnValueHandlers();
this.returnValueHandlers = new HandlerMethodReturnValueHandlerComposite().addHandlers(handlers);
}
}
private void initExceptionHandlerAdviceCache() {
if (getApplicationContext() == null) {
return;
}
if (logger.isDebugEnabled()) {
logger.debug("Looking for exception mappings: " + getApplicationContext());
}
List<ControllerAdviceBean> adviceBeans = ControllerAdviceBean.findAnnotatedBeans(getApplicationContext());
AnnotationAwareOrderComparator.sort(adviceBeans);
for (ControllerAdviceBean adviceBean : adviceBeans) {
ExceptionHandlerMethodResolver resolver = new ExceptionHandlerMethodResolver(adviceBean.getBeanType());
if (resolver.hasExceptionMappings()) {
this.exceptionHandlerAdviceCache.put(adviceBean, resolver);
if (logger.isInfoEnabled()) {
logger.info("Detected @ExceptionHandler methods in " + adviceBean);
}
}
if (ResponseBodyAdvice.class.isAssignableFrom(adviceBean.getBeanType())) {
this.responseBodyAdvice.add(adviceBean);
if (logger.isInfoEnabled()) {
logger.info("Detected ResponseBodyAdvice implementation in " + adviceBean);
}
}
}
}
/**
* Return an unmodifiable Map with the {@link ControllerAdvice @ControllerAdvice}
* beans discovered in the ApplicationContext. The returned map will be empty if
* the method is invoked before the bean has been initialized via
* {@link #afterPropertiesSet()}.
*/
public Map<ControllerAdviceBean, ExceptionHandlerMethodResolver> getExceptionHandlerAdviceCache() {
return Collections.unmodifiableMap(this.exceptionHandlerAdviceCache);
}
/**
* Return the list of argument resolvers to use including built-in resolvers
* and custom resolvers provided via {@link #setCustomArgumentResolvers}.
*/
protected List<HandlerMethodArgumentResolver> getDefaultArgumentResolvers() {
List<HandlerMethodArgumentResolver> resolvers = new ArrayList<>();
// Annotation-based argument resolution
resolvers.add(new SessionAttributeMethodArgumentResolver());
resolvers.add(new RequestAttributeMethodArgumentResolver());
// Type-based argument resolution
resolvers.add(new ServletRequestMethodArgumentResolver());
resolvers.add(new ServletResponseMethodArgumentResolver());
resolvers.add(new ModelMethodProcessor());
// Custom arguments
if (getCustomArgumentResolvers() != null) {
resolvers.addAll(getCustomArgumentResolvers());
}
return resolvers;
}
/**
* Return the list of return value handlers to use including built-in and
* custom handlers provided via {@link #setReturnValueHandlers}.
*/
protected List<HandlerMethodReturnValueHandler> getDefaultReturnValueHandlers() {
List<HandlerMethodReturnValueHandler> handlers = new ArrayList<>();
// Single-purpose return value types
handlers.add(new ModelAndViewMethodReturnValueHandler());
handlers.add(new ModelMethodProcessor());
handlers.add(new ViewMethodReturnValueHandler());
handlers.add(new HttpEntityMethodProcessor(
getMessageConverters(), this.contentNegotiationManager, this.responseBodyAdvice));
// Annotation-based return value types
handlers.add(new ModelAttributeMethodProcessor(false));
handlers.add(new RequestResponseBodyMethodProcessor(
getMessageConverters(), this.contentNegotiationManager, this.responseBodyAdvice));
// Multi-purpose return value types
handlers.add(new ViewNameMethodReturnValueHandler());
handlers.add(new MapMethodProcessor());
// Custom return value types
if (getCustomReturnValueHandlers() != null) {
handlers.addAll(getCustomReturnValueHandlers());
}
// Catch-all
handlers.add(new ModelAttributeMethodProcessor(true));
return handlers;
}
/**
* Find an {@code @ExceptionHandler} method and invoke it to handle the raised exception.
*/
@Override
protected ModelAndView doResolveHandlerMethodException(HttpServletRequest request,
HttpServletResponse response, HandlerMethod handlerMethod, Exception exception) {
ServletInvocableHandlerMethod exceptionHandlerMethod = getExceptionHandlerMethod(handlerMethod, exception);
if (exceptionHandlerMethod == null) {
return null;
}
exceptionHandlerMethod.setHandlerMethodArgumentResolvers(this.argumentResolvers);
exceptionHandlerMethod.setHandlerMethodReturnValueHandlers(this.returnValueHandlers);
ServletWebRequest webRequest = new ServletWebRequest(request, response);
ModelAndViewContainer mavContainer = new ModelAndViewContainer();
try {
if (logger.isDebugEnabled()) {
logger.debug("Invoking @ExceptionHandler method: " + exceptionHandlerMethod);
}
Throwable cause = exception.getCause();
if (cause != null) {
// Expose cause as provided argument as well
exceptionHandlerMethod.invokeAndHandle(webRequest, mavContainer, exception, cause, handlerMethod);
}
else {
// Otherwise, just the given exception as-is
exceptionHandlerMethod.invokeAndHandle(webRequest, mavContainer, exception, handlerMethod);
}
}
catch (Throwable invocationEx) {
if (logger.isWarnEnabled()) {
logger.warn("Failed to invoke @ExceptionHandler method: " + exceptionHandlerMethod, invocationEx);
}
return null;
}
if (mavContainer.isRequestHandled()) {
return new ModelAndView();
}
else {
ModelMap model = mavContainer.getModel();
HttpStatus status = mavContainer.getStatus();
ModelAndView mav = new ModelAndView(mavContainer.getViewName(), model, status);
mav.setViewName(mavContainer.getViewName());
if (!mavContainer.isViewReference()) {
mav.setView((View) mavContainer.getView());
}
return mav;
}
}
/**
* Find an {@code @ExceptionHandler} method for the given exception. The default
* implementation searches methods in the class hierarchy of the controller first
* and if not found, it continues searching for additional {@code @ExceptionHandler}
* methods assuming some {@linkplain ControllerAdvice @ControllerAdvice}
* Spring-managed beans were detected.
* @param handlerMethod the method where the exception was raised (may be {@code null})
* @param exception the raised exception
* @return a method to handle the exception, or {@code null}
*/
protected ServletInvocableHandlerMethod getExceptionHandlerMethod(HandlerMethod handlerMethod, Exception exception) {
Class<?> handlerType = (handlerMethod != null ? handlerMethod.getBeanType() : null);
if (handlerMethod != null) {
ExceptionHandlerMethodResolver resolver = this.exceptionHandlerCache.get(handlerType);
if (resolver == null) {
resolver = new ExceptionHandlerMethodResolver(handlerType);
this.exceptionHandlerCache.put(handlerType, resolver);
}
Method method = resolver.resolveMethod(exception);
if (method != null) {
return new ServletInvocableHandlerMethod(handlerMethod.getBean(), method);
}
}
for (Entry<ControllerAdviceBean, ExceptionHandlerMethodResolver> entry : this.exceptionHandlerAdviceCache.entrySet()) {
if (entry.getKey().isApplicableToBeanType(handlerType)) {
ExceptionHandlerMethodResolver resolver = entry.getValue();
Method method = resolver.resolveMethod(exception);
if (method != null) {
return new ServletInvocableHandlerMethod(entry.getKey().resolveBean(), method);
}
}
}
return null;
}
}
| |
/*
* Copyright (c) 2013-2017 Cinchapi Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cinchapi.concourse.server.io;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.FileChannel.MapMode;
import java.nio.channels.OverlappingFileLockException;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.Iterator;
import java.util.Set;
import com.cinchapi.concourse.util.FileOps;
import com.cinchapi.concourse.util.Logger;
import com.cinchapi.concourse.util.ReadOnlyIterator;
import com.google.common.base.Throwables;
import com.google.common.collect.Sets;
import static com.google.common.base.Preconditions.checkState;
/**
* Interface to the underlying filesystem which provides methods to perform file
* based operations without having to deal with the annoyance of checked
* exceptions or the awkward {@link Path} API. Using this class will help
* produce more streamlined and readable code.
*
* <p>
* This class makes a lot of assumptions that are particular to Concourse
* Server, so it isn't suitable as a strictly generic utility. {@link FileOps}
* is a parent class that does contain file based utility functions that are
* applicable in situations outside of Concourse Server.
*
* @author Jeff Nelson
*/
public final class FileSystem extends FileOps {
/**
* Close the {@code channel} without throwing a checked exception. If, for
* some reason, this can't be done the underlying IOException will be
* re-thrown as a runtime exception.
*
* @param channel
*/
public static void closeFileChannel(FileChannel channel) {
try {
channel.close();
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Copy all the bytes {@code from} one file to {to} another.
*
* @param from
* @param to
*/
public static void copyBytes(String from, String to) {
try {
Files.copy(Paths.get(from), Files.newOutputStream(Paths.get(to)));
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Delete {@code directory}. If files are added to the directory while its
* being deleted, this method will make a best effort to delete those files
* as well.
*
* @param directory
*/
public static void deleteDirectory(String directory) {
try (DirectoryStream<Path> stream = Files
.newDirectoryStream(Paths.get(directory))) {
for (Path path : stream) {
if(Files.isDirectory(path)) {
deleteDirectory(path.toString());
}
else {
Files.delete(path);
}
}
Files.delete(Paths.get(directory));
}
catch (IOException e) {
if(e.getClass() == DirectoryNotEmptyException.class) {
Logger.warn("It appears that data was added to directory "
+ "{} while trying to perform a deletion. "
+ "Trying again...", directory);
deleteDirectory(directory);
}
else {
throw Throwables.propagate(e);
}
}
}
/**
* Delete the {@code file}.
*
* @param file
*/
public static void deleteFile(String file) {
try {
java.nio.file.Files.delete(Paths.get(file));
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Return an {@link Iterator} to traverse over all of the flat files (e.g.
* non subdirectores) in {@code directory}.
*
* @param directory
* @return the iterator
*/
public static Iterator<String> fileOnlyIterator(final String directory) {
return new ReadOnlyIterator<String>() {
private final File[] files = new File(directory).listFiles();
private int position = 0;
private File next = null;
{
findNext();
}
@Override
public boolean hasNext() {
return next != null;
}
@Override
public String next() {
File file = next;
findNext();
return file.getAbsolutePath();
}
/**
* Find the next element to be returned from {@link #next()}.
*/
private void findNext() {
if(files != null) {
File file = null;
while (file == null || file.isDirectory()) {
if(position >= files.length) {
file = null;
break;
}
else {
file = files[position];
position++;
}
}
next = file;
}
}
};
}
/**
* Return the random access {@link FileChannel} for {@code file}. The
* channel will be opened for reading and writing.
*
* @param file
* @return the FileChannel for {@code file}
*/
@SuppressWarnings("resource") // NOTE: can't close the file channel here
// because others depend on it
public static FileChannel getFileChannel(String file) {
try {
return new RandomAccessFile(openFile(file), "rwd").getChannel();
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Return the size of {@code file}. This method will automatically create
* {@code file} if it does not already exist.
*
* @param file
* @return the size in bytes
*/
public static long getFileSize(String file) {
try {
openFile(file);
return Files.size(Paths.get(file));
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Return the simple filename without path information or extension. This
* method assumes that the filename only contains one extension.
*
* @param filename
* @return the simple file name
*/
public static String getSimpleName(String filename) {
String[] placeholder;
return (placeholder = (placeholder = filename
.split("\\."))[placeholder.length - 2]
.split(File.separator))[placeholder.length - 1];
}
/**
* Look through {@code dir} and return all the sub directories.
*
* @param dir
* @return the sub directories under {@code dir}.
*/
public static Set<String> getSubDirs(String dir) {
File directory = new File(dir);
File[] files = directory.listFiles();
if(files != null) {
Set<String> subDirs = Sets.newHashSet();
for (File file : files) {
if(Files.isDirectory(Paths.get(file.getAbsolutePath()))) {
subDirs.add(file.getName());
}
}
return subDirs;
}
else {
return Collections.emptySet();
}
}
/**
* Return {@code true} in the filesystem contains {@code dir} and it is
* a directory.
*
* @param dir
* @return {@code true} if {@code dir} exists
*/
public static boolean hasDir(String dir) {
Path path = Paths.get(dir);
return Files.exists(path) && Files.isDirectory(path);
}
/**
* Return {@code true} in the filesystem contains {@code file} and it is not
* a directory.
*
* @param file
* @return {@code true} if {@code file} exists
*/
public static boolean hasFile(String file) {
Path path = Paths.get(file);
return Files.exists(path) && !Files.isDirectory(path);
}
/**
* Lock the file or directory specified in {@code path} for use in this JVM
* process. If the lock cannot be acquired, an exception is thrown.
*
* @param path
*/
public static void lock(String path) {
if(Files.isDirectory(Paths.get(path))) {
lock(path + File.separator + "concourse.lock");
}
else {
try {
checkState(getFileChannel(path).tryLock() != null,
"Unable to grab lock for %s because another "
+ "Concourse Server process is using it",
path);
}
catch (OverlappingFileLockException e) {
Logger.warn("Trying to lock {}, but the current "
+ "JVM is already the owner", path);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
}
/**
* Create a valid path that contains separators in the appropriate places
* by joining all the {@link parts} together with the {@link File#separator}
*
* @param parts
* @return the path
*/
public static String makePath(String... parts) {
StringBuilder path = new StringBuilder();
for (String part : parts) {
path.append(part);
if(!part.endsWith(File.separator)) {
path.append(File.separator);
}
}
return path.toString();
}
/**
* Return a {@link MappedByteBuffer} for {@code file} in {@code mode}
* starting at {@code position} and continuing for {@code size} bytes. This
* method will automatically create {@code file} if it does not already
* exist.
*
* @param file
* @param mode
* @param position
* @param size
* @return the MappedByteBuffer
*/
public static MappedByteBuffer map(String file, MapMode mode, long position,
long size) {
FileChannel channel = getFileChannel(file);
try {
return channel.map(mode, position, size).load();
}
catch (IOException e) {
throw Throwables.propagate(e);
}
finally {
closeFileChannel(channel);
}
}
/**
* Open {@code file} and return a {@link File} handle. This method will
* create a new file if and only if it does not already exist.
*
* @param file
*/
public static File openFile(String file) {
try {
File f = new File(file);
if(f.getParentFile() != null) {
f.getParentFile().mkdirs();
}
f.createNewFile();
return f;
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Read bytes from {@code file} <em>sequentially</em> and return the content
* as a <strong>read only</strong> {@link ByteBuffer}.
*
* @param file
* @return the read only ByteBuffer with the content of {@code file}
*/
public static ByteBuffer readBytes(String file) {
FileChannel channel = getFileChannel(file);
try {
MappedByteBuffer data = channel.map(MapMode.READ_ONLY, 0,
channel.size());
return data;
}
catch (IOException e) {
throw Throwables.propagate(e);
}
finally {
closeFileChannel(channel);
}
}
/**
* Replace the content of {@code original} with that of {@code replacement}
* and delete {@code replacement} in a single atomic operation.
*
* @param original
* @param replacement
*/
public static void replaceFile(String original, String replacement) {
try {
java.nio.file.Files.move(Paths.get(replacement),
Paths.get(original), StandardCopyOption.ATOMIC_MOVE,
StandardCopyOption.REPLACE_EXISTING);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
}
/**
* Return an {@link Iterator} to traverse over all of the sub directories
* (e.g. no flat files) in {@code directory}.
*
* @param directory
* @return the iterator
*/
public static Iterator<String> subDirectoryOnlyIterator(
final String directory) {
return getSubDirs(directory).iterator();
}
/**
* Attempt to force the unmapping of {@code buffer}. This method should be
* used with <strong>EXTREME CAUTION</strong>. If {@code buffer} is used
* after this method is invoked, it is likely that the JVM will crash.
*
* @param buffer
*/
public static void unmap(MappedByteBuffer buffer) {
Cleaners.freeMappedByteBuffer(buffer);
}
/**
* Write the {@code bytes} to {@code file} starting at the beginning. This
* method will perform and fsync.
*
* @param bytes
* @param file
*/
public static void writeBytes(ByteBuffer bytes, String file) {
writeBytes(bytes, file, 0);
}
/**
* Write the {@code bytes} to {@code file} starting {@code position}. This
* method will perform an fsync.
*
* @param bytes
* @param file
* @param position
*/
public static void writeBytes(ByteBuffer bytes, String file, int position) {
FileChannel channel = getFileChannel(file);
try {
channel.position(position);
channel.write(bytes);
channel.force(true);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
finally {
closeFileChannel(channel);
}
}
private FileSystem() {/* noop */}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xbean.propertyeditor;
import static org.apache.xbean.recipe.RecipeHelper.getTypeParameters;
import static org.apache.xbean.recipe.RecipeHelper.*;
import org.apache.xbean.recipe.RecipeHelper;
import java.beans.PropertyEditor;
import java.beans.PropertyEditorManager;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Collection;
import java.util.SortedSet;
import java.util.Set;
import java.util.TreeSet;
import java.util.LinkedHashSet;
import java.util.ArrayList;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.LinkedHashMap;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentHashMap;
import java.lang.reflect.Type;
/**
* The property editor manager. This orchestrates Geronimo usage of
* property editors, allowing additional search paths to be added and
* specific editors to be registered.
*
* @version $Rev: 6687 $
*/
public class PropertyEditors {
private static final Map<Class, Converter> registry = Collections.synchronizedMap(new ReferenceIdentityMap());
private static final Map<Class, Class> PRIMITIVE_TO_WRAPPER;
private static final Map<Class, Class> WRAPPER_TO_PRIMITIVE;
private static boolean registerWithVM;
/**
* Register all of the built in converters
*/
static {
Map<Class, Class> map = new HashMap<Class, Class>();
map.put(boolean.class, Boolean.class);
map.put(char.class, Character.class);
map.put(byte.class, Byte.class);
map.put(short.class, Short.class);
map.put(int.class, Integer.class);
map.put(long.class, Long.class);
map.put(float.class, Float.class);
map.put(double.class, Double.class);
PRIMITIVE_TO_WRAPPER = Collections.unmodifiableMap(map);
map = new HashMap<Class, Class>();
map.put(Boolean.class, boolean.class);
map.put(Character.class, char.class);
map.put(Byte.class, byte.class);
map.put(Short.class, short.class);
map.put(Integer.class, int.class);
map.put(Long.class, long.class);
map.put(Float.class, float.class);
map.put(Double.class, double.class);
WRAPPER_TO_PRIMITIVE = Collections.unmodifiableMap(map);
// Explicitly register the types
registerConverter(new ArrayListEditor());
registerConverter(new BigDecimalEditor());
registerConverter(new BigIntegerEditor());
registerConverter(new BooleanEditor());
registerConverter(new ByteEditor());
registerConverter(new CharacterEditor());
registerConverter(new ClassEditor());
registerConverter(new DateEditor());
registerConverter(new DoubleEditor());
registerConverter(new FileEditor());
registerConverter(new FloatEditor());
registerConverter(new HashMapEditor());
registerConverter(new HashtableEditor());
registerConverter(new IdentityHashMapEditor());
registerConverter(new Inet4AddressEditor());
registerConverter(new Inet6AddressEditor());
registerConverter(new InetAddressEditor());
registerConverter(new IntegerEditor());
registerConverter(new LinkedHashMapEditor());
registerConverter(new LinkedHashSetEditor());
registerConverter(new LinkedListEditor());
registerConverter(new ListEditor());
registerConverter(new LongEditor());
registerConverter(new MapEditor());
registerConverter(new ObjectNameEditor());
registerConverter(new PropertiesEditor());
registerConverter(new SetEditor());
registerConverter(new ShortEditor());
registerConverter(new SortedMapEditor());
registerConverter(new SortedSetEditor());
registerConverter(new StringEditor());
registerConverter(new TreeMapEditor());
registerConverter(new TreeSetEditor());
registerConverter(new URIEditor());
registerConverter(new URLEditor());
registerConverter(new LoggerConverter());
registerConverter(new PatternConverter());
registerConverter(new JndiConverter());
registerConverter(new VectorEditor());
registerConverter(new WeakHashMapEditor());
try {
registerConverter(new Log4jConverter());
} catch (Throwable e) {
}
try {
registerConverter(new CommonsLoggingConverter());
} catch (Throwable e) {
}
}
/**
* Are converters registered with the VM PropertyEditorManager. By default
* converters are not registered with the VM as this creates problems for
* IDE and Spring because they rely in their specific converters being
* registered to function properly.
*/
public static boolean isRegisterWithVM() {
return registerWithVM;
}
/**
* Sets if converters registered with the VM PropertyEditorManager.
* If the new value is true, all currently registered converters are
* immediately registered with the VM.
*/
public static void setRegisterWithVM(boolean registerWithVM) {
if (PropertyEditors.registerWithVM != registerWithVM) {
PropertyEditors.registerWithVM = registerWithVM;
// register all converters with the VM
if (registerWithVM) {
for (Entry<Class, Converter> entry : registry.entrySet()) {
Class type = entry.getKey();
Converter converter = entry.getValue();
PropertyEditorManager.registerEditor(type, converter.getClass());
}
}
}
}
public static void registerConverter(Converter converter) {
if (converter == null) throw new NullPointerException("editor is null");
Class type = converter.getType();
registry.put(type, converter);
if (registerWithVM) {
PropertyEditorManager.registerEditor(type, converter.getClass());
}
if (PRIMITIVE_TO_WRAPPER.containsKey(type)) {
Class wrapperType = PRIMITIVE_TO_WRAPPER.get(type);
registry.put(wrapperType, converter);
if (registerWithVM) {
PropertyEditorManager.registerEditor(wrapperType, converter.getClass());
}
} else if (WRAPPER_TO_PRIMITIVE.containsKey(type)) {
Class primitiveType = WRAPPER_TO_PRIMITIVE.get(type);
registry.put(primitiveType, converter);
if (registerWithVM) {
PropertyEditorManager.registerEditor(primitiveType, converter.getClass());
}
}
}
public static boolean canConvert(String type, ClassLoader classLoader) {
if (type == null) throw new NullPointerException("type is null");
if (classLoader == null) throw new NullPointerException("classLoader is null");
// load using the ClassLoading utility, which also manages arrays and primitive classes.
Class typeClass;
try {
typeClass = Class.forName(type, true, classLoader);
} catch (ClassNotFoundException e) {
throw new PropertyEditorException("Type class could not be found: " + type);
}
return canConvert(typeClass);
}
public static boolean canConvert(Class type) {
PropertyEditor editor = findConverterOrEditor(type);
return editor != null;
}
private static PropertyEditor findConverterOrEditor(Type type){
Converter converter = findConverter(type);
if (converter != null) {
return converter;
}
// fall back to a property editor
PropertyEditor editor = findEditor(type);
if (editor != null) {
return editor;
}
converter = findBuiltinConverter(type);
if (converter != null) {
return converter;
}
return null;
}
public static String toString(Object value) throws PropertyEditorException {
if (value == null) throw new NullPointerException("value is null");
// get an editor for this type
Class type = value.getClass();
PropertyEditor editor = findConverterOrEditor(type);
if (editor instanceof Converter) {
Converter converter = (Converter) editor;
return converter.toString(value);
}
if (editor == null) {
throw new PropertyEditorException("Unable to find PropertyEditor for " + type.getSimpleName());
}
// create the string value
editor.setValue(value);
String textValue;
try {
textValue = editor.getAsText();
} catch (Exception e) {
throw new PropertyEditorException("Error while converting a \"" + type.getSimpleName() + "\" to text " +
" using the property editor " + editor.getClass().getSimpleName(), e);
}
return textValue;
}
public static Object getValue(String type, String value, ClassLoader classLoader) throws PropertyEditorException {
if (type == null) throw new NullPointerException("type is null");
if (value == null) throw new NullPointerException("value is null");
if (classLoader == null) throw new NullPointerException("classLoader is null");
// load using the ClassLoading utility, which also manages arrays and primitive classes.
Class typeClass;
try {
typeClass = Class.forName(type, true, classLoader);
} catch (ClassNotFoundException e) {
throw new PropertyEditorException("Type class could not be found: " + type);
}
return getValue(typeClass, value);
}
public static Object getValue(Type type, String value) throws PropertyEditorException {
if (type == null) throw new NullPointerException("type is null");
if (value == null) throw new NullPointerException("value is null");
PropertyEditor editor = findConverterOrEditor(type);
if (editor instanceof Converter) {
Converter converter = (Converter) editor;
return converter.toObject(value);
}
Class clazz = toClass(type);
if (editor == null) {
throw new PropertyEditorException("Unable to find PropertyEditor for " + clazz.getSimpleName());
}
editor.setAsText(value);
Object objectValue;
try {
objectValue = editor.getValue();
} catch (Exception e) {
throw new PropertyEditorException("Error while converting \"" + value + "\" to a " + clazz.getSimpleName() +
" using the property editor " + editor.getClass().getSimpleName(), e);
}
return objectValue;
}
private static Converter findBuiltinConverter(Type type) {
if (type == null) throw new NullPointerException("type is null");
Class clazz = toClass(type);
if (Enum.class.isAssignableFrom(clazz)){
return new EnumConverter(clazz);
}
return null;
}
private static Converter findConverter(Type type) {
if (type == null) throw new NullPointerException("type is null");
Class clazz = toClass(type);
// it's possible this was a request for an array class. We might not
// recognize the array type directly, but the component type might be
// resolvable
if (clazz.isArray() && !clazz.getComponentType().isArray()) {
// do a recursive lookup on the base type
PropertyEditor editor = findConverterOrEditor(clazz.getComponentType());
// if we found a suitable editor for the base component type,
// wrapper this in an array adaptor for real use
if (editor != null) {
return new ArrayConverter(clazz, editor);
} else {
return null;
}
}
if (Collection.class.isAssignableFrom(clazz)){
Type[] types = getTypeParameters(Collection.class, type);
Type componentType = String.class;
if (types != null && types.length == 1 && types[0] instanceof Class) {
componentType = types[0];
}
PropertyEditor editor = findConverterOrEditor(componentType);
if (editor != null){
if (RecipeHelper.hasDefaultConstructor(clazz)) {
return new GenericCollectionConverter(clazz, editor);
} else if (SortedSet.class.isAssignableFrom(clazz)) {
return new GenericCollectionConverter(TreeSet.class, editor);
} else if (Set.class.isAssignableFrom(clazz)) {
return new GenericCollectionConverter(LinkedHashSet.class, editor);
} else {
return new GenericCollectionConverter(ArrayList.class, editor);
}
}
return null;
}
if (Map.class.isAssignableFrom(clazz)){
Type[] types = getTypeParameters(Map.class, type);
Type keyType = String.class;
Type valueType = String.class;
if (types != null && types.length == 2 && types[0] instanceof Class && types[1] instanceof Class) {
keyType = types[0];
valueType = types[1];
}
PropertyEditor keyConverter = findConverterOrEditor(keyType);
PropertyEditor valueConverter = findConverterOrEditor(valueType);
if (keyConverter != null && valueConverter != null){
if (RecipeHelper.hasDefaultConstructor(clazz)) {
return new GenericMapConverter(clazz, keyConverter, valueConverter);
} else if (SortedMap.class.isAssignableFrom(clazz)) {
return new GenericMapConverter(TreeMap.class, keyConverter, valueConverter);
} else if (ConcurrentMap.class.isAssignableFrom(clazz)) {
return new GenericMapConverter(ConcurrentHashMap.class, keyConverter, valueConverter);
} else {
return new GenericMapConverter(LinkedHashMap.class, keyConverter, valueConverter);
}
}
return null;
}
Converter converter = registry.get(clazz);
// we're outta here if we got one.
if (converter != null) {
return converter;
}
Class[] declaredClasses = clazz.getDeclaredClasses();
for (Class declaredClass : declaredClasses) {
if (Converter.class.isAssignableFrom(declaredClass)) {
try {
converter = (Converter) declaredClass.newInstance();
registerConverter(converter);
// try to get the converter from the registry... the converter
// created above may have been for another class
converter = registry.get(clazz);
if (converter != null) {
return converter;
}
} catch (Exception e) {
}
}
}
// nothing found
return null;
}
/**
* Locate a property editor for qiven class of object.
*
* @param type The target object class of the property.
* @return The resolved editor, if any. Returns null if a suitable editor
* could not be located.
*/
private static PropertyEditor findEditor(Type type) {
if (type == null) throw new NullPointerException("type is null");
Class clazz = toClass(type);
// try to locate this directly from the editor manager first.
PropertyEditor editor = PropertyEditorManager.findEditor(clazz);
// we're outta here if we got one.
if (editor != null) {
return editor;
}
// it's possible this was a request for an array class. We might not
// recognize the array type directly, but the component type might be
// resolvable
if (clazz.isArray() && !clazz.getComponentType().isArray()) {
// do a recursive lookup on the base type
editor = findEditor(clazz.getComponentType());
// if we found a suitable editor for the base component type,
// wrapper this in an array adaptor for real use
if (editor != null) {
return new ArrayConverter(clazz, editor);
}
}
// nothing found
return null;
}
}
| |
package org.sagebionetworks.repo.util.jrjc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
import java.util.Base64;
import java.util.HashMap;
import java.util.Map;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpStatus;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.simpleHttpClient.SimpleHttpClient;
import org.sagebionetworks.simpleHttpClient.SimpleHttpRequest;
import org.sagebionetworks.simpleHttpClient.SimpleHttpResponse;
@ExtendWith(MockitoExtension.class)
public class JiraClientImplTest {
@Mock
private StackConfiguration mockConfig;
@Mock
private SimpleHttpClient mockHttpClient;
@Mock
private SimpleHttpResponse mockResponse;
private static final String USERNAME = "userName";
private static final String USERAPIKEY = "userApiKey";
@InjectMocks
private JiraClientImpl jiraClient;
@Test
public void testGetProjectInfo() throws Exception {
String expectedJson =
"{ \"id\": \"10000\"," +
" \"issueTypes\": [" +
" {" +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issueType/3\"," +
" \"id\": \"3\"," +
" \"description\": \"A task that needs to be done.\"," +
" \"iconUrl\": \"https://your-domain.atlassian.net//secure/viewavatar?size=xsmall&avatarId=10299&avatarType=issuetype\\\",\"," +
" \"name\": \"Task\"," +
" \"subtask\": false," +
" \"avatarId\": 1" +
" }," +
" {" +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issueType/1\"," +
" \"id\": \"1\"," +
" \"description\": \"A problem with the software.\"," +
" \"iconUrl\": \"https://your-domain.atlassian.net/secure/viewavatar?size=xsmall&avatarId=10316&avatarType=issuetype\\\",\"," +
" \"name\": \"Bug\"," +
" \"subtask\": false," +
" \"avatarId\": 10002," +
" \"entityId\": \"9d7dd6f7-e8b6-4247-954b-7b2c9b2a5ba2\"," +
" \"scope\": {" +
" \"type\": \"PROJECT\"," +
" \"project\": {" +
" \"id\": \"10000\"," +
" \"key\": \"KEY\"," +
" \"name\": \"Next Gen Project\"" +
" }" +
" }" +
" }" +
" ]}";
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_OK);
when(mockResponse.getContent()).thenReturn(expectedJson);
when(mockHttpClient.get(any(SimpleHttpRequest.class))).thenReturn(mockResponse);
// Call under test
ProjectInfo pInfo = jiraClient.getProjectInfo("SG", "Task");
assertNotNull(pInfo);
String projId = (String) pInfo.getProjectId();
assertEquals("10000", projId);
Long issueTypeId = (Long) pInfo.getIssueTypeId();
assertTrue(3L==issueTypeId);
}
@Test
public void testGetProjectInfoIssueTypeNotFound() throws Exception {
String expectedJson =
"{ \"id\": \"10000\"," +
" \"issueTypes\": [" +
" {" +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issueType/3\"," +
" \"id\": \"3\"," +
" \"description\": \"A task that needs to be done.\"," +
" \"iconUrl\": \"https://your-domain.atlassian.net//secure/viewavatar?size=xsmall&avatarId=10299&avatarType=issuetype\\\",\"," +
" \"name\": \"Flag\"," +
" \"subtask\": false," +
" \"avatarId\": 1" +
" }," +
" {" +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issueType/1\"," +
" \"id\": \"1\"," +
" \"description\": \"A problem with the software.\"," +
" \"iconUrl\": \"https://your-domain.atlassian.net/secure/viewavatar?size=xsmall&avatarId=10316&avatarType=issuetype\\\",\"," +
" \"name\": \"Bug\"," +
" \"subtask\": false," +
" \"avatarId\": 10002," +
" \"entityId\": \"9d7dd6f7-e8b6-4247-954b-7b2c9b2a5ba2\"," +
" \"scope\": {" +
" \"type\": \"PROJECT\"," +
" \"project\": {" +
" \"id\": \"10000\"," +
" \"key\": \"KEY\"," +
" \"name\": \"Next Gen Project\"" +
" }" +
" }" +
" }" +
" ]}";
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_OK);
when(mockResponse.getContent()).thenReturn(expectedJson);
when(mockHttpClient.get(any(SimpleHttpRequest.class))).thenReturn(mockResponse);
// Call under test
Assertions.assertThrows(JiraClientException.class, () -> {
jiraClient.getProjectInfo("SG", "Task");
}
);
}
@Test
public void testGetProjectInfoInvalidJson() throws Exception {
String expectedJson =
"{ \"id\": \"10000\"," +
" \"issueTypes\": [" +
" {" +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issueType/3\"," +
" \"id\": \"3\"," +
" \"description\": \"A task that needs to be done.\"," +
" \"iconUrl\": \"https://your-domain.atlassian.net//secure/viewavatar?size=xsmall&avatarId=10299&avatarType=issuetype\\\",\"," +
" \"name\": \"Flag\"," +
" \"subtask\": false," +
" \"avatarId\": 1" +
" ," +
" {" +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issueType/1\"," +
" \"id\": \"1\"," +
" \"description\": \"A problem with the software.\"," +
" \"iconUrl\": \"https://your-domain.atlassian.net/secure/viewavatar?size=xsmall&avatarId=10316&avatarType=issuetype\\\",\"," +
" \"name\": \"Bug\"," +
" \"subtask\": false," +
" \"avatarId\": 10002," +
" \"entityId\": \"9d7dd6f7-e8b6-4247-954b-7b2c9b2a5ba2\"," +
" \"scope\": {" +
" \"type\": \"PROJECT\"," +
" \"project\": {" +
" \"id\": \"10000\"," +
" \"key\": \"KEY\"," +
" \"name\": \"Next Gen Project\"" +
" }" +
" }" +
" }" +
" ]}";
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_OK);
when(mockResponse.getContent()).thenReturn(expectedJson);
when(mockHttpClient.get(any(SimpleHttpRequest.class))).thenReturn(mockResponse);
Assertions.assertThrows(JiraClientException.class, () -> {
jiraClient.getProjectInfo("SG", "Task");
}
);
}
@Test
public void testGetFields() throws Exception {
String expectedJson =
"[" +
" {" +
" \"id\": \"description\"," +
" \"name\": \"Description\"," +
" \"custom\": false," +
" \"orderable\": true," +
" \"navigable\": true," +
" \"searchable\": true," +
" \"clauseNames\": [" +
" \"description\"" +
" ]," +
" \"schema\": {" +
" \"type\": \"string\"," +
" \"system\": \"description\"" +
" }" +
" }," +
" {" +
" \"id\": \"summary\"," +
" \"key\": \"summary\"," +
" \"name\": \"Summary\"," +
" \"custom\": false," +
" \"orderable\": true," +
" \"navigable\": true," +
" \"searchable\": true," +
" \"clauseNames\": [" +
" \"summary\"" +
" ]," +
" \"schema\": {" +
" \"type\": \"string\"," +
" \"system\": \"summary\"" +
" }" +
" }" +
"]";
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_OK);
when(mockResponse.getContent()).thenReturn(expectedJson);
when(mockHttpClient.get(any(SimpleHttpRequest.class))).thenReturn(mockResponse);
// Call under test
Map<String, String> m = jiraClient.getFields();
assertNotNull(m);
assertEquals(2, m.size());
assertEquals("description", m.get("Description"));
assertEquals("summary", m.get("Summary"));
}
@Test
public void testGetFieldsInvalidJson() throws Exception {
String expectedJson =
"[" +
" {" +
" \"id\": \"description\"," +
" \"name\": \"Description\"," +
" \"custom\": false," +
" \"orderable\": true," +
" \"navigable\": true," +
" \"searchable\": true," +
" \"clauseNames\": [" +
" \"description\"" +
" ]," +
" \"schema\": {" +
" \"type\": \"string\"," +
" \"system\": \"description\"" +
" }" +
" ," +
" {" +
" \"id\": \"summary\"," +
" \"key\": \"summary\"," +
" \"name\": \"Summary\"," +
" \"custom\": false," +
" \"orderable\": true," +
" \"navigable\": true," +
" \"searchable\": true," +
" \"clauseNames\": [" +
" \"summary\"" +
" ]," +
" \"schema\": {" +
" \"type\": \"string\"," +
" \"system\": \"summary\"" +
" }" +
" }" +
"]";
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_OK);
when(mockResponse.getContent()).thenReturn(expectedJson);
when(mockHttpClient.get(any(SimpleHttpRequest.class))).thenReturn(mockResponse);
// Call under test
Assertions.assertThrows(JiraClientException.class, () -> {
jiraClient.getFields();
}
);
}
@Test
public void testCreateIssue() throws Exception {
String expectedJson =
"{" +
" \"id\": \"10000\"," +
" \"key\": \"SG-24\"," +
" \"self\": \"https://your-domain.atlassian.net/rest/api/3/issue/10000\"," +
" \"transition\": {" +
" \"status\": 200," +
" \"errorCollection\": {" +
" \"errorMessages\": []," +
" \"errors\": {}" +
" }" +
" }" +
"}";
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_CREATED);
when(mockResponse.getContent()).thenReturn(expectedJson);
ArgumentCaptor<String> bodyCaptor = ArgumentCaptor.forClass(String.class);
when(mockHttpClient.post(any(SimpleHttpRequest.class), bodyCaptor.capture())).thenReturn(mockResponse);
BasicIssue issue = new BasicIssue();
issue.setProjectId("101");
issue.setSummary("this is the summary");
issue.setIssueTypeId(202L);
Map<String,Object> customFields = new HashMap<String,Object>();
customFields.put("components", JRJCHelper.componentName("some name"));
issue.setCustomFields(customFields);
String expectedBody ="{\"fields\":{\"summary\":\"this is the summary\",\"issuetype\":{\"id\":202},\"components\":[{\"name\":\"some name\"}],\"project\":{\"id\":\"101\"}}}";
// Call under test
CreatedIssue i = jiraClient.createIssue(issue);
assertNotNull(i);
assertEquals("SG-24", i.getKey());
assertEquals(expectedBody, bodyCaptor.getValue());
}
@Test
public void testCreateRequest() throws Exception {
when(mockConfig.getJiraUserEmail()).thenReturn(USERNAME);
when(mockConfig.getJiraUserApikey()).thenReturn(USERAPIKEY);
SimpleHttpRequest req;
req = jiraClient.createRequest("/aPath/", "aResource");
assertEquals("https://sagebionetworks.jira.com/aPath/aResource", req.getUri());
req = jiraClient.createRequest("/aPath/", null);
assertEquals("https://sagebionetworks.jira.com/aPath/", req.getUri());
Map<String, String> headers = req.getHeaders();
assertEquals("Basic " + Base64.getEncoder().encodeToString((USERNAME + ":" + USERAPIKEY).getBytes("utf-8")), headers.get(HttpHeaders.AUTHORIZATION));
assertEquals("application/json", headers.get(HttpHeaders.CONTENT_TYPE));
assertEquals("Synapse", headers.get(HttpHeaders.USER_AGENT));
}
@Test
public void testCreateRequestBadPath() throws JiraClientException {
Assertions.assertThrows(JiraClientException.class, () -> {
jiraClient.createRequest("/aPath", "aResource");
}
);
}
@Test
public void testHandleResponseStatusOK() {
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_CREATED);
JiraClientImpl.handleResponseStatus(mockResponse.getStatusCode()); // Should not fail
}
@Test
public void testHandleResponseStatusError() {
when(mockResponse.getStatusCode()).thenReturn(HttpStatus.SC_BAD_REQUEST);
Assertions.assertThrows(JiraClientException.class, () -> {
JiraClientImpl.handleResponseStatus(mockResponse.getStatusCode());
}
);
}
}
| |
package com.antarescraft.kloudy.hologuiapi.guicomponents;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.UUID;
import org.bukkit.entity.Player;
import org.bukkit.util.Vector;
import com.antarescraft.kloudy.hologuiapi.guicomponentproperties.ValueScrollerComponentProperties;
import com.antarescraft.kloudy.hologuiapi.handlers.ScrollHandler;
import com.antarescraft.kloudy.hologuiapi.playerguicomponents.PlayerGUIValueScrollerComponent;
import com.antarescraft.kloudy.hologuiapi.scrollvalues.AbstractScrollValue;
import com.antarescraft.kloudy.hologuiapi.scrollvalues.DateScrollValue;
import com.antarescraft.kloudy.hologuiapi.scrollvalues.DoubleScrollValue;
import com.antarescraft.kloudy.hologuiapi.scrollvalues.DurationScrollValue;
import com.antarescraft.kloudy.hologuiapi.scrollvalues.IntegerScrollValue;
import com.antarescraft.kloudy.hologuiapi.scrollvalues.ListScrollValue;
import com.antarescraft.kloudy.hologuiapi.util.AABB;
import com.antarescraft.kloudy.hologuiapi.util.Point3D;
import com.antarescraft.kloudy.plugincore.config.ConfigObject;
import com.antarescraft.kloudy.plugincore.config.PassthroughParams;
import com.antarescraft.kloudy.plugincore.config.annotations.ConfigElement;
import com.antarescraft.kloudy.plugincore.config.annotations.ConfigProperty;
import com.antarescraft.kloudy.plugincore.exceptions.InvalidDateFormatException;
import com.antarescraft.kloudy.plugincore.exceptions.InvalidDurationFormatException;
import com.antarescraft.kloudy.plugincore.time.TimeFormat;
public class ValueScrollerComponent extends ClickableGUIComponent implements ConfigObject
{
@ConfigElement
@ConfigProperty(key = "")
ValueScrollerComponentProperties properties;
private AbstractScrollValue<?, ?> componentValue;
private HashMap<UUID, ScrollHandler> scrollHandlers = new HashMap<UUID, ScrollHandler>();
private HashMap<UUID, AbstractScrollValue<?, ?>> playerScrollValues = new HashMap<UUID, AbstractScrollValue<?, ?>>();
private ValueScrollerComponent(){}
@Override
public void removePlayerHandlers(Player player)
{
super.removePlayerHandlers(player);
scrollHandlers.remove(player.getUniqueId());
}
public void setPlayerScrollValue(Player player, AbstractScrollValue<?, ?> value)
{
playerScrollValues.put(player.getUniqueId(), value);
}
public void removePlayerScrollValue(Player player)
{
playerScrollValues.remove(player.getUniqueId());
}
public void registerScrollHandler(Player player, ScrollHandler scrollHandler)
{
scrollHandlers.put(player.getUniqueId(), scrollHandler);
}
public void removeScrollHandler(Player player)
{
scrollHandlers.remove(player.getUniqueId());
}
public void triggerScrollHandler(Player player, AbstractScrollValue<?, ?> value)
{
ScrollHandler scrollHandler = scrollHandlers.get(player.getUniqueId());
if(scrollHandler != null) scrollHandler.onScroll(value);
}
public AbstractScrollValue<?, ?> getPlayerScrollValue(Player player)
{
AbstractScrollValue<?, ?> value = componentValue;
if(playerScrollValues.containsKey(player.getUniqueId()))
{
value = playerScrollValues.get(player.getUniqueId());
}
return value;
}
@Override
public PlayerGUIValueScrollerComponent initPlayerGUIComponent(Player player)
{
return new PlayerGUIValueScrollerComponent(player, this);
}
@Override
public void updateIncrement(){}
@Override
public String[] updateComponentLines(Player player)
{
return new String[]{getPlayerScrollValue(player).toString()};
}
@Override
public double zoomDistance()
{
return 2;
}
@Override
public AABB.Vec3D getMinBoundingRectPoint18(Point3D origin)
{
return AABB.Vec3D.fromVector(new Vector(origin.x-1.75, origin.y - 2, origin.z-1.75));
}
@Override
public AABB.Vec3D getMaxBoundingRectPoint18(Point3D origin)
{
return AABB.Vec3D.fromVector(new Vector(origin.x+1.875, origin.y + 1, origin.z+1.875));
}
@Override
public AABB.Vec3D getMinBoundingRectPoint19(Point3D origin)
{
return AABB.Vec3D.fromVector(new Vector(origin.x-1, origin.y+0.2, origin.z-1));
}
@Override
public AABB.Vec3D getMaxBoundingRectPoint19(Point3D origin)
{
return AABB.Vec3D.fromVector(new Vector(origin.x+1, origin.y + 1, origin.z+1));
}
@Override
public double getLineHeight()
{
return (1 / properties.getDistance()) * 0.21;
}
@Override
public double getZoomedInLineHeight()
{
return getLineHeight() + 0.0005;
}
@Override
public void configParseComplete(PassthroughParams params)
{
super.configParseComplete(params);
if(properties.getValueType().equals("decimal"))
{
double defaultValue = 0;
double step = 1.0;
Double minValue = null;
Double maxValue = null;
try
{
defaultValue = Double.parseDouble(properties.getDefaultValue());
}
catch(NumberFormatException e){}
if(properties.getStep() != null)
{
try
{
step = Double.parseDouble(properties.getStep());
}
catch(NumberFormatException e){}
}
if(properties.getMinValue() != null)
{
try
{
minValue = Double.parseDouble(properties.getMinValue());
}
catch(NumberFormatException e){}
}
if(properties.getMaxValue() != null)
{
try
{
maxValue = Double.parseDouble(properties.getMaxValue());
}
catch(NumberFormatException e){}
}
componentValue = new DoubleScrollValue(defaultValue, step, minValue, maxValue, properties.getDecimalFormat(), properties.wrap());
}
else if(properties.getValueType().equalsIgnoreCase("integer"))
{
int defaultValue = 0;
int step = 1;
Integer minValue = null;
Integer maxValue = null;
try
{
defaultValue = Integer.parseInt(properties.getDefaultValue());
}
catch(NumberFormatException e){}
if(properties.getStep() != null)
{
try
{
step = Integer.parseInt(properties.getStep());
}
catch(NumberFormatException e){}
}
if(properties.getMinValue() != null)
{
try
{
minValue = Integer.parseInt(properties.getMinValue());
}
catch(NumberFormatException e){}
}
if(properties.getMaxValue() != null)
{
try
{
maxValue = Integer.parseInt(properties.getMaxValue());
}
catch(NumberFormatException e){}
}
componentValue = new IntegerScrollValue(defaultValue, step, minValue, maxValue, properties.wrap());
}
else if(properties.getValueType().equalsIgnoreCase("duration"))
{
Duration defaultValue = Duration.ZERO;
Duration step = Duration.ZERO.plusSeconds(1);
Duration minValue = null;
Duration maxValue = null;
try
{
defaultValue = TimeFormat.parseDurationFormat(properties.getDefaultValue());
}
catch(InvalidDurationFormatException e){}
if(properties.getStep() != null)
{
try
{
step = TimeFormat.parseDurationFormat(properties.getStep());
}
catch(InvalidDurationFormatException e){}
}
if(properties.getMinValue() != null)
{
try
{
minValue = TimeFormat.parseDurationFormat(properties.getMinValue());
}
catch(InvalidDurationFormatException e){}
}
if(properties.getMaxValue() != null)
{
try
{
maxValue = TimeFormat.parseDurationFormat(properties.getMaxValue());
}
catch(InvalidDurationFormatException e){}
}
componentValue = new DurationScrollValue(defaultValue, step, minValue, maxValue, properties.wrap());
}
else if(properties.getValueType().equalsIgnoreCase("date"))
{
Calendar defaultValue = Calendar.getInstance();
Duration step = Duration.ZERO.plusDays(1);
Calendar minValue = null;
Calendar maxValue = null;
try
{
defaultValue = TimeFormat.parseDateFormat(properties.getDefaultValue());
}
catch(InvalidDateFormatException e){}
if(properties.getStep() != null)
{
try
{
step = TimeFormat.parseDurationFormat(properties.getStep());
}
catch(InvalidDurationFormatException e){}
}
if(properties.getMinValue() != null)
{
try
{
minValue = TimeFormat.parseDateFormat(properties.getMinValue());
}
catch(InvalidDateFormatException e){}
}
if(properties.getMaxValue() != null)
{
try
{
maxValue = TimeFormat.parseDateFormat(properties.getMaxValue());
}
catch(InvalidDateFormatException e){}
}
componentValue = new DateScrollValue(defaultValue, step, minValue, maxValue, properties.wrap());
}
else if(properties.getValueType().equalsIgnoreCase("list"))
{
if(properties.getListItems() != null)
{
componentValue = new ListScrollValue(properties.getListItems());
}
else
{
componentValue = new ListScrollValue(new ArrayList<String>());
}
}
}
@Override
public ValueScrollerComponentProperties getProperties()
{
return properties;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.validation.html;
import static org.apache.jena.fuseki.validation.html.ValidatorHtmlLib.finishFixed;
import static org.apache.jena.fuseki.validation.html.ValidatorHtmlLib.output;
import static org.apache.jena.fuseki.validation.html.ValidatorHtmlLib.printHead;
import static org.apache.jena.fuseki.validation.html.ValidatorHtmlLib.serviceLog;
import static org.apache.jena.fuseki.validation.html.ValidatorHtmlLib.setHeaders;
import static org.apache.jena.fuseki.validation.html.ValidatorHtmlLib.startFixed;
import java.io.IOException;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryFactory;
import org.apache.jena.query.Syntax;
import org.apache.jena.sparql.ARQException;
import org.apache.jena.sparql.algebra.Algebra;
import org.apache.jena.sparql.algebra.Op;
import org.apache.jena.sparql.serializer.SerializationContext;
public class QueryValidatorHTML {
private QueryValidatorHTML() {}
static final String paramLineNumbers = "linenumbers";
static final String paramFormat = "outputFormat";
static final String paramQuery = "query";
static final String paramSyntax = "languageSyntax";
// static final String paramSyntaxExtended = "extendedSyntax";
public static void executeHTML(HttpServletRequest httpRequest, HttpServletResponse httpResponse) {
try {
String[] args = httpRequest.getParameterValues(paramQuery);
if ( args == null || args.length == 0 ) {
httpResponse.sendError(HttpServletResponse.SC_BAD_REQUEST, "No query parameter to validator");
return;
}
if ( args.length > 1 ) {
httpResponse.sendError(HttpServletResponse.SC_BAD_REQUEST, "Too many query parameters");
return;
}
final String queryString = httpRequest.getParameter(paramQuery).replaceAll("(\r|\n| )*$", "");
// queryString = queryString.replace("\r\n", "\n");
// queryString.replaceAll("(\r|\n| )*$", "");
String querySyntax = httpRequest.getParameter(paramSyntax);
if ( querySyntax == null || querySyntax.equals("") )
querySyntax = "SPARQL";
Syntax language = Syntax.lookup(querySyntax);
if ( language == null ) {
httpResponse.sendError(HttpServletResponse.SC_BAD_REQUEST, "Unknown syntax: " + querySyntax);
return;
}
String lineNumbersArg = httpRequest.getParameter(paramLineNumbers);
String a[] = httpRequest.getParameterValues(paramFormat);
boolean outputSPARQL = false;
boolean outputPrefix = false;
boolean outputAlgebra = false;
boolean outputQuads = false;
boolean outputOptimized = false;
boolean outputOptimizedQuads = false;
if ( a != null ) {
for ( String anA : a ) {
if ( anA.equals("sparql") ) {
outputSPARQL = true;
}
if ( anA.equals("prefix") ) {
outputPrefix = true;
}
if ( anA.equals("algebra") ) {
outputAlgebra = true;
}
if ( anA.equals("quads") ) {
outputQuads = true;
}
if ( anA.equals("opt") ) {
outputOptimized = true;
}
if ( anA.equals("optquads") ) {
outputOptimizedQuads = true;
}
}
}
// if ( ! outputSPARQL && ! outputPrefix )
// outputSPARQL = true;
boolean lineNumbers = true;
if ( lineNumbersArg != null )
lineNumbers = lineNumbersArg.equalsIgnoreCase("true") || lineNumbersArg.equalsIgnoreCase("yes");
setHeaders(httpResponse);
ServletOutputStream outStream = httpResponse.getOutputStream();
outStream.println("<html>");
printHead(outStream, "SPARQL Query Validation Report");
outStream.println("<body>");
outStream.println("<h1>SPARQL Query Validator</h1>");
// Print query as received
outStream.println("<p>Input:</p>");
output(outStream, (out) -> out.print(queryString), lineNumbers);
// Attempt to parse it.
Query query = null;
try {
query = QueryFactory.create(queryString, "http://example/base/", language);
} catch (ARQException ex) {
// Over generous exception (should be QueryException)
// but this makes the code robust.
outStream.println("<p>Syntax error:</p>");
startFixed(outStream);
outStream.println(ex.getMessage());
finishFixed(outStream);
} catch (RuntimeException ex) {
outStream.println("<p>Internal error:</p>");
startFixed(outStream);
outStream.println(ex.getMessage());
finishFixed(outStream);
}
if ( query != null ) {
if ( outputSPARQL )
outputSyntax(outStream, query, lineNumbers);
if ( outputAlgebra )
outputAlgebra(outStream, query, lineNumbers);
if ( outputQuads )
outputAlgebraQuads(outStream, query, lineNumbers);
if ( outputOptimized )
outputAlgebraOpt(outStream, query, lineNumbers);
if ( outputOptimizedQuads )
outputAlgebraOptQuads(outStream, query, lineNumbers);
}
outStream.println("</body>");
outStream.println("</html>");
} catch (Exception ex) {
serviceLog.warn("Exception in doGet", ex);
}
}
private static void outputSyntax(ServletOutputStream outStream, final Query query, boolean lineNumbers) throws IOException {
output(outStream, (out) -> query.serialize(out), lineNumbers);
}
private static void outputAlgebra(ServletOutputStream outStream, final Query query, boolean lineNumbers) throws IOException {
outStream.println("<p>Algebra structure:</p>");
final Op op = Algebra.compile(query); // No optimization
outputQueryOp(outStream, query, op, lineNumbers);
}
private static void outputAlgebraOpt(ServletOutputStream outStream, final Query query, boolean lineNumbers) throws IOException {
outStream.println("<p>Alebgra, with general triple optimizations:</p>");
final Op op = Algebra.optimize(Algebra.compile(query));
outputQueryOp(outStream, query, op, lineNumbers);
}
private static void outputAlgebraQuads(ServletOutputStream outStream, final Query query, boolean lineNumbers) throws IOException {
outStream.println("<p>Quad structure:</p>");
final Op op = Algebra.toQuadForm(Algebra.compile(query));
outputQueryOp(outStream, query, op, lineNumbers);
}
private static void outputAlgebraOptQuads(ServletOutputStream outStream, final Query query, boolean lineNumbers) throws IOException {
outStream.println("<p>Alebgra, with general quads optimizations:</p>");
final Op op = Algebra.optimize(Algebra.toQuadForm(Algebra.compile(query)));
outputQueryOp(outStream, query, op, lineNumbers);
}
private static void outputQueryOp(ServletOutputStream outStream, Query query, final Op op, boolean lineNumbers) throws IOException {
final SerializationContext sCxt = new SerializationContext(query);
output(outStream, out -> op.output(out, sCxt), lineNumbers);
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.ntp;
import android.test.suitebuilder.annotation.LargeTest;
import android.text.TextUtils;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.ListView;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.DisabledTest;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.BookmarksBridge.BookmarkItem;
import org.chromium.chrome.browser.ChromeSwitches;
import org.chromium.chrome.browser.Tab;
import org.chromium.chrome.browser.UrlConstants;
import org.chromium.chrome.browser.bookmark.AddEditBookmarkFragment;
import org.chromium.chrome.browser.bookmark.ManageBookmarkActivity;
import org.chromium.chrome.browser.bookmark.SelectBookmarkFolderFragment;
import org.chromium.chrome.test.ChromeTabbedActivityTestBase;
import org.chromium.chrome.test.util.ActivityUtils;
import org.chromium.chrome.test.util.BookmarkTestUtils;
import org.chromium.chrome.test.util.ChromeTabUtils;
import org.chromium.chrome.test.util.TestHttpServerClient;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.content.browser.test.util.TestTouchUtils;
import org.chromium.content.browser.test.util.TouchCommon;
import org.chromium.content.browser.test.util.UiUtils;
import java.util.concurrent.Callable;
/**
* Tests for the old bookmarks page.
*/
@CommandLineFlags.Add(ChromeSwitches.ENABLE_ENHANCED_BOOKMARKS + "=0")
public class BookmarksPageTest extends ChromeTabbedActivityTestBase {
private static final String TEST_PAGE =
TestHttpServerClient.getUrl("chrome/test/data/android/about.html");
private static final String TEST_PAGE_TITLE = "About";
private static final String TEST_FOLDER_TITLE = "Test Folder";
private static final String TEST_PAGE_TITLE_2 = "About 2";
private static final String MOBILE_BOOKMARKS_TITLE = "Mobile bookmarks";
private static final String BOOKMARKS_TITLE = "Bookmarks";
private ListView mBookmarksList;
private LinearLayout mHierarchyLayout;
@Override
public void startMainActivity() throws InterruptedException {
startMainActivityOnBlankPage();
}
private void addBookmark() throws InterruptedException {
loadUrl(TEST_PAGE);
BookmarkTestUtils.addCurrentUrlAsBookmark(this, getActivity());
loadMobileBookmarksPage();
}
private void addFolderAndAddBookmark() throws InterruptedException {
loadUrl(TEST_PAGE);
ManageBookmarkActivity addActivity = BookmarkTestUtils.selectBookmarkItemFromMenu(
getInstrumentation(), getActivity());
final AddEditBookmarkFragment addFragment =
BookmarkTestUtils.loadAddEditFragment(addActivity);
BookmarkTestUtils.clickSelectFolderButton(this, addFragment);
SelectBookmarkFolderFragment selectedFolder = BookmarkTestUtils.loadSelectFragment(
addActivity);
BookmarkTestUtils.clickNewFolderButton(this, selectedFolder);
final AddEditBookmarkFragment addNewFolderFragment =
BookmarkTestUtils.loadAddFolderFragment(addActivity);
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
((EditText) addNewFolderFragment.getView().findViewById(R.id.bookmark_title_input))
.setText(TEST_FOLDER_TITLE);
}
});
BookmarkTestUtils.clickOkButton(this, addNewFolderFragment);
BookmarkTestUtils.clickOkButton(this, addFragment);
loadMobileBookmarksPage();
}
private void loadMobileBookmarksPage() throws InterruptedException {
final String mobileFolderUrl = UrlConstants.BOOKMARKS_FOLDER_URL + "2";
loadUrl(mobileFolderUrl);
Tab tab = getActivity().getActivityTab();
assertTrue(tab.getNativePage() instanceof BookmarksPage);
mHierarchyLayout = (LinearLayout) getActivity().findViewById(
R.id.bookmark_folder_structure);
mBookmarksList = (ListView) getActivity().findViewById(R.id.bookmarks_list_view);
}
private void openBookmarkInCurrentTab(final BookmarkItemView itemView)
throws InterruptedException {
ChromeTabUtils.waitForTabPageLoaded(getActivity().getActivityTab(), new Runnable() {
@Override
public void run() {
TouchCommon.singleClickView(itemView);
}
});
BookmarkTestUtils.assertUrlBarEquals(
getActivity(), "urlBar string not matching the bookmarked page", TEST_PAGE);
}
private void addBookmarkAndLongClickForContextMenu() throws InterruptedException {
addBookmark();
BookmarkItemView itemView = (BookmarkItemView) BookmarkTestUtils.getViewWithText(
mBookmarksList, TEST_PAGE_TITLE);
TouchCommon.longPressView(itemView, itemView.getWidth() / 2, itemView.getHeight() / 2);
}
private String getCurrentFolderTitle() {
return ThreadUtils.runOnUiThreadBlockingNoException(new Callable<String>() {
@Override
public String call() throws Exception {
return ((BookmarkFolderHierarchyItem) mHierarchyLayout.getChildAt(
mHierarchyLayout.getChildCount() - 1)).getText().toString();
}
});
}
private void clickFolderInFolderHierarchy(final String folderToSelect)
throws InterruptedException {
CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return (BookmarkTestUtils.getViewWithText(mHierarchyLayout,
folderToSelect) != null);
}
});
final BookmarkFolderHierarchyItem itemView =
(BookmarkFolderHierarchyItem) BookmarkTestUtils.getViewWithText(
mHierarchyLayout, folderToSelect);
TouchCommon.singleClickView(itemView);
assertEquals(folderToSelect, getCurrentFolderTitle());
}
private void clickFolderInBookmarksList(final String folderToSelect)
throws InterruptedException {
CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return (BookmarkTestUtils.getViewWithText(mBookmarksList, folderToSelect) != null);
}
});
final BookmarkItemView itemView = (BookmarkItemView) BookmarkTestUtils.getViewWithText(
mBookmarksList, folderToSelect);
TouchCommon.singleClickView(itemView);
assertEquals(folderToSelect, getCurrentFolderTitle());
}
private boolean isItemPresentInBookmarksList(final String expectedTitle) {
return ThreadUtils.runOnUiThreadBlockingNoException(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
for (int i = 0; i < mBookmarksList.getCount(); i++) {
String actualTitle =
((BookmarkItem) mBookmarksList.getItemAtPosition(i)).getTitle();
if (TextUtils.equals(actualTitle, expectedTitle)) {
return true;
}
}
return false;
}
});
}
@LargeTest
public void testCreateAndOpenBookmark() throws InterruptedException {
addBookmark();
// Assert "About" item is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(TEST_PAGE_TITLE));
// Click the item "About".
openBookmarkInCurrentTab((BookmarkItemView) BookmarkTestUtils.getViewWithText(
mBookmarksList, TEST_PAGE_TITLE));
}
@LargeTest
public void testNavigateFoldersInFolderHierarchy() throws InterruptedException {
addFolderAndAddBookmark();
// Click on "Mobile bookmarks" in the Folder hierarchy.
clickFolderInFolderHierarchy(MOBILE_BOOKMARKS_TITLE);
// Assert "Test Folder" is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(TEST_FOLDER_TITLE));
// Click on "Bookmarks" in the Folder hierarchy.
clickFolderInFolderHierarchy(BOOKMARKS_TITLE);
// Assert "Desktop Bookmarks" is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(MOBILE_BOOKMARKS_TITLE));
}
/*
@LargeTest
Disabled because of repeated flakes on ICS bot.
http://crbug.com/384126
*/
@DisabledTest
public void testNavigateFoldersInBookmarksListView() throws InterruptedException {
addFolderAndAddBookmark();
// Click on "Bookmarks" in the Folder hierarchy.
clickFolderInFolderHierarchy(BOOKMARKS_TITLE);
// Assert "Mobile Bookmarks" is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(MOBILE_BOOKMARKS_TITLE));
// Click on "Mobile bookmarks" in the bookmarks list view.
clickFolderInBookmarksList(MOBILE_BOOKMARKS_TITLE);
// Assert "Test Folder" is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(TEST_FOLDER_TITLE));
// Click on "Test Folder" in the bookmarks list view.
clickFolderInBookmarksList(TEST_FOLDER_TITLE);
// Assert "About" is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(TEST_PAGE_TITLE));
}
@LargeTest
public void testContextMenuOptionOpenInANewTab() throws InterruptedException {
addBookmark();
BookmarkItemView itemView = (BookmarkItemView) BookmarkTestUtils.getViewWithText(
mBookmarksList, TEST_PAGE_TITLE);
invokeContextMenuAndOpenInANewTab(itemView, BookmarkItemView.ID_OPEN_IN_NEW_TAB, false,
TEST_PAGE);
}
@LargeTest
public void testContextMenuOptionOpenInAnIncognitoTab() throws InterruptedException {
addBookmark();
BookmarkItemView itemView = (BookmarkItemView) BookmarkTestUtils.getViewWithText(
mBookmarksList, TEST_PAGE_TITLE);
invokeContextMenuAndOpenInANewTab(itemView, BookmarkItemView.ID_OPEN_IN_INCOGNITO_TAB, true,
TEST_PAGE);
}
@LargeTest
public void testContextMenuOptionEditBookmark() throws InterruptedException {
addBookmarkAndLongClickForContextMenu();
// Invoke the "Edit Bookmark" context menu option.
final ManageBookmarkActivity activity = ActivityUtils.waitForActivity(
getInstrumentation(), ManageBookmarkActivity.class,
new Runnable() {
@Override
public void run() {
getInstrumentation().invokeContextMenuAction(
getActivity(), BookmarkItemView.ID_EDIT, 0);
}
}
);
UiUtils.settleDownUI(getInstrumentation());
// Edit the bookmark title.
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
((EditText) activity.findViewById(R.id.bookmark_title_input))
.setText(TEST_PAGE_TITLE_2);
}
});
TestTouchUtils.clickView(this, activity.findViewById(R.id.ok));
// Assert "About 2" is listed in the bookmarks list.
assertTrue(isItemPresentInBookmarksList(TEST_PAGE_TITLE_2));
}
@LargeTest
public void testContextMenuOptionDeleteBookmark() throws InterruptedException {
addBookmarkAndLongClickForContextMenu();
// Invoke the "Delete Bookmark" context menu option.
getInstrumentation().invokeContextMenuAction(
getActivity(), BookmarkItemView.ID_DELETE, 0);
UiUtils.settleDownUI(getInstrumentation());
// Assert no bookmarks exist in the current folder.
assertTrue(mBookmarksList.getCount() == 0);
}
}
| |
/*
* Copyright (c) 2001, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4473210
* @summary SSLSessionContext should be accessible from SSLContext
*/
import java.io.*;
import java.net.*;
import javax.net.ssl.*;
import java.util.*;
import java.security.KeyStore;
public class SSLCtxAccessToSessCtx {
/*
* =============================================================
* Set the various variables needed for the tests, then
* specify what tests to run on each side.
*/
/*
* Should we run the client or server in a separate thread?
* Both sides can throw exceptions, but do you have a preference
* as to which side should be the main thread.
*/
static boolean separateServerThread = true;
/*
* Where do we find the keystores?
*/
static String pathToStores = "/../../../../etc";
static String keyStoreFile = "keystore";
static String trustStoreFile = "truststore";
static String passwd = "passphrase";
/*
* Is the server ready to serve?
*/
volatile static boolean serverReady = false;
/*
* Turn on SSL debugging?
*/
static boolean debug = false;
/*
* If the client or server is doing some kind of object creation
* that the other side depends on, and that thread prematurely
* exits, you may experience a hang. The test harness will
* terminate all hung threads after its timeout has expired,
* currently 3 minutes by default, but you might try to be
* smart about it....
*/
/*
* Define the server side of the test.
*
* If the server prematurely exits, serverReady will be set to true
* to avoid infinite hangs.
*/
void doServerSide(int serverPort) throws Exception {
SSLServerSocket sslServerSocket =
(SSLServerSocket) sslssf.createServerSocket(serverPort);
serverPorts[createdPorts++] = sslServerSocket.getLocalPort();
/*
* Signal Client, we're ready for his connect.
*/
serverReady = true;
int read = 0;
SSLSocket sslSocket = (SSLSocket) sslServerSocket.accept();
InputStream sslIS = sslSocket.getInputStream();
OutputStream sslOS = sslSocket.getOutputStream();
read = sslIS.read();
SSLSessionContext sslctxCache = sslctx.getServerSessionContext();
SSLSessionContext sessCache = sslSocket.getSession().
getSessionContext();
if (sessCache != sslctxCache)
throw new Exception("Test failed, session_cache != sslctx_cache");
sslOS.write(85);
sslOS.flush();
sslSocket.close();
}
/*
* Define the client side of the test.
*
* If the server prematurely exits, serverReady will be set to true
* to avoid infinite hangs.
*/
void doClientSide() throws Exception {
/*
* Wait for server to get started.
*/
while (!serverReady) {
Thread.sleep(50);
}
/*
* first connection to serverPorts[0] -- a new session, session11
* gets created, and is cached.
*/
SSLSocket sslSocket;
sslSocket = (SSLSocket) sslsf.
createSocket("localhost", serverPorts[0]);
InputStream sslIS = sslSocket.getInputStream();
OutputStream sslOS = sslSocket.getOutputStream();
sslOS.write(237);
sslOS.flush();
SSLSession sess = sslSocket.getSession();
SSLSessionContext sessCache = sess.getSessionContext();
SSLSessionContext sslctxCache = sslctx.getClientSessionContext();
if (sessCache != sslctxCache)
throw new Exception("Test failed, session_cache != sslctx_cache");
int read = sslIS.read();
sslSocket.close();
}
/*
* =============================================================
* The remainder is just support stuff
*/
volatile int serverPorts[] = new int[]{0};
volatile int createdPorts = 0;
static SSLServerSocketFactory sslssf;
static SSLSocketFactory sslsf;
static SSLContext sslctx;
volatile Exception serverException = null;
volatile Exception clientException = null;
public static void main(String[] args) throws Exception {
String keyFilename =
System.getProperty("test.src", "./") + "/" + pathToStores +
"/" + keyStoreFile;
String trustFilename =
System.getProperty("test.src", "./") + "/" + pathToStores +
"/" + trustStoreFile;
System.setProperty("javax.net.ssl.keyStore", keyFilename);
System.setProperty("javax.net.ssl.keyStorePassword", passwd);
System.setProperty("javax.net.ssl.trustStore", trustFilename);
System.setProperty("javax.net.ssl.trustStorePassword", passwd);
sslctx = SSLContext.getInstance("TLS");
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(new FileInputStream(keyFilename), passwd.toCharArray());
kmf.init(ks, passwd.toCharArray());
sslctx.init(kmf.getKeyManagers(), null, null);
sslssf = (SSLServerSocketFactory) sslctx.getServerSocketFactory();
sslsf = (SSLSocketFactory) sslctx.getSocketFactory();
if (debug)
System.setProperty("javax.net.debug", "all");
/*
* Start the tests.
*/
new SSLCtxAccessToSessCtx();
}
Thread clientThread = null;
Thread serverThread = null;
/*
* Primary constructor, used to drive remainder of the test.
*
* Fork off the other side, then do your work.
*/
SSLCtxAccessToSessCtx() throws Exception {
/*
* create the SSLServerSocket and SSLSocket factories
*/
if (separateServerThread) {
for (int i = 0; i < serverPorts.length; i++) {
startServer(serverPorts[i], true);
}
startClient(false);
} else {
startClient(true);
for (int i = 0; i < serverPorts.length; i++) {
startServer(serverPorts[i], false);
}
}
/*
* Wait for other side to close down.
*/
if (separateServerThread) {
serverThread.join();
} else {
clientThread.join();
}
/*
* When we get here, the test is pretty much over.
*
* If the main thread excepted, that propagates back
* immediately. If the other thread threw an exception, we
* should report back.
*/
if (serverException != null)
throw serverException;
if (clientException != null)
throw clientException;
System.out.println("The Session context tests passed");
}
void startServer(final int port,
boolean newThread) throws Exception {
if (newThread) {
serverThread = new Thread() {
public void run() {
try {
doServerSide(port);
} catch (Exception e) {
/*
* Our server thread just died.
*
* Release the client, if not active already...
*/
System.err.println("Server died...");
e.printStackTrace();
serverReady = true;
serverException = e;
}
}
};
serverThread.start();
} else {
doServerSide(port);
}
}
void startClient(boolean newThread)
throws Exception {
if (newThread) {
clientThread = new Thread() {
public void run() {
try {
doClientSide();
} catch (Exception e) {
/*
* Our client thread just died.
*/
System.err.println("Client died...");
clientException = e;
}
}
};
clientThread.start();
} else {
doClientSide();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY;
import org.apache.camel.api.management.ManagedAttribute;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.support.ReloadStrategySupport;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
/**
* A file based {@link org.apache.camel.spi.ReloadStrategy} which watches a file folder
* for modified files and reload on file changes.
* <p/>
* This implementation uses the JDK {@link WatchService} to watch for when files are
* created or modified. Mac OS X users should be noted the osx JDK does not support
* native file system changes and therefore the watch service is much slower than on
* linux or windows systems.
*/
@ManagedResource(description = "Managed FileWatcherReloadStrategy")
public class FileWatcherReloadStrategy extends ReloadStrategySupport {
private String folder;
private ExecutorService executorService;
private WatchFileChangesTask task;
public FileWatcherReloadStrategy() {
}
public FileWatcherReloadStrategy(String directory) {
setFolder(directory);
}
public void setFolder(String folder) {
this.folder = folder;
}
@ManagedAttribute(description = "Folder being watched")
public String getFolder() {
return folder;
}
@ManagedAttribute(description = "Whether the watcher is running")
public boolean isRunning() {
return task != null && task.isRunning();
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (folder == null) {
// no folder configured
return;
}
File dir = new File(folder);
if (dir.exists() && dir.isDirectory()) {
log.info("Starting ReloadStrategy to watch directory: {}", dir);
WatchEvent.Modifier modifier = null;
// if its mac OSX then attempt to apply workaround or warn its slower
String os = ObjectHelper.getSystemProperty("os.name", "");
if (os.toLowerCase(Locale.US).startsWith("mac")) {
// this modifier can speedup the scanner on mac osx (as java on mac has no native file notification integration)
Class<WatchEvent.Modifier> clazz = getCamelContext().getClassResolver().resolveClass("com.sun.nio.file.SensitivityWatchEventModifier", WatchEvent.Modifier.class);
if (clazz != null) {
WatchEvent.Modifier[] modifiers = clazz.getEnumConstants();
for (WatchEvent.Modifier mod : modifiers) {
if ("HIGH".equals(mod.name())) {
modifier = mod;
break;
}
}
}
if (modifier != null) {
log.info("On Mac OS X the JDK WatchService is slow by default so enabling SensitivityWatchEventModifier.HIGH as workaround");
} else {
log.warn("On Mac OS X the JDK WatchService is slow and it may take up till 10 seconds to notice file changes");
}
}
try {
Path path = dir.toPath();
WatchService watcher = path.getFileSystem().newWatchService();
// we cannot support deleting files as we don't know which routes that would be
if (modifier != null) {
path.register(watcher, new WatchEvent.Kind<?>[]{ENTRY_CREATE, ENTRY_MODIFY}, modifier);
} else {
path.register(watcher, ENTRY_CREATE, ENTRY_MODIFY);
}
task = new WatchFileChangesTask(watcher, path);
executorService = getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "FileWatcherReloadStrategy");
executorService.submit(task);
} catch (IOException e) {
throw ObjectHelper.wrapRuntimeCamelException(e);
}
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (executorService != null) {
getCamelContext().getExecutorServiceManager().shutdownGraceful(executorService);
executorService = null;
}
}
/**
* Background task which watches for file changes
*/
protected class WatchFileChangesTask implements Runnable {
private final WatchService watcher;
private final Path folder;
private volatile boolean running;
public WatchFileChangesTask(WatchService watcher, Path folder) {
this.watcher = watcher;
this.folder = folder;
}
public boolean isRunning() {
return running;
}
public void run() {
log.debug("ReloadStrategy is starting watching folder: {}", folder);
// allow to run while starting Camel
while (isStarting() || isRunAllowed()) {
running = true;
WatchKey key;
try {
log.trace("ReloadStrategy is polling for file changes in directory: {}", folder);
// wait for a key to be available
key = watcher.poll(2, TimeUnit.SECONDS);
} catch (InterruptedException ex) {
break;
}
if (key != null) {
for (WatchEvent<?> event : key.pollEvents()) {
WatchEvent<Path> we = (WatchEvent<Path>) event;
Path path = we.context();
String name = folder.resolve(path).toAbsolutePath().toFile().getAbsolutePath();
log.trace("Modified/Created file: {}", name);
// must be an .xml file
if (name.toLowerCase(Locale.US).endsWith(".xml")) {
log.debug("Modified/Created XML file: {}", name);
try {
FileInputStream fis = new FileInputStream(name);
onReloadXml(getCamelContext(), name, fis);
IOHelper.close(fis);
} catch (Exception e) {
log.warn("Error reloading routes from file: " + name + " due " + e.getMessage() + ". This exception is ignored.", e);
}
}
}
// the key must be reset after processed
boolean valid = key.reset();
if (!valid) {
break;
}
}
}
running = false;
log.info("ReloadStrategy is stopping watching folder: {}", folder);
}
}
}
| |
/*
* Copyright (C) 2004-2011 John Currier
* Copyright (C) 2017 Nils Petzaell
*
* This file is a part of the SchemaSpy project (http://schemaspy.org).
*
* SchemaSpy is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* SchemaSpy is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.schemaspy;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.FileFilterUtils;
import org.apache.commons.io.filefilter.IOFileFilter;
import org.schemaspy.cli.CommandLineArguments;
import org.schemaspy.model.*;
import org.schemaspy.model.xml.SchemaMeta;
import org.schemaspy.output.OutputException;
import org.schemaspy.output.OutputProducer;
import org.schemaspy.output.xml.dom.XmlProducerUsingDOM;
import org.schemaspy.service.DatabaseService;
import org.schemaspy.service.SqlService;
import org.schemaspy.util.ConnectionURLBuilder;
import org.schemaspy.util.Dot;
import org.schemaspy.util.LineWriter;
import org.schemaspy.util.ResourceWriter;
import org.schemaspy.view.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author John Currier
* @author Nils Petzaell
*/
public class SchemaAnalyzer {
private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SqlService sqlService;
private final DatabaseService databaseService;
private final CommandLineArguments commandLineArguments;
private final List<OutputProducer> outputProducers = new ArrayList<>();
public SchemaAnalyzer(SqlService sqlService, DatabaseService databaseService, CommandLineArguments commandLineArguments) {
this.sqlService = Objects.requireNonNull(sqlService);
this.databaseService = Objects.requireNonNull(databaseService);
this.commandLineArguments = Objects.requireNonNull(commandLineArguments);
addOutputProducer(new XmlProducerUsingDOM());
}
public SchemaAnalyzer addOutputProducer(OutputProducer outputProducer) {
outputProducers.add(outputProducer);
return this;
}
public Database analyze(Config config) throws SQLException, IOException {
// don't render console-based detail unless we're generating HTML (those probably don't have a user watching)
// and not already logging fine details (to keep from obfuscating those)
boolean render = config.isHtmlGenerationEnabled();
ProgressListener progressListener = new ConsoleProgressListener(render, commandLineArguments);
// if -all(evaluteAll) or -schemas given then analyzeMultipleSchemas
List<String> schemas = config.getSchemas();
if (schemas != null || config.isEvaluateAllEnabled()) {
return this.analyzeMultipleSchemas(config, progressListener);
} else {
File outputDirectory = commandLineArguments.getOutputDirectory();
Objects.requireNonNull(outputDirectory);
String schema = commandLineArguments.getSchema();
return analyze(schema, config, outputDirectory, progressListener);
}
}
public Database analyzeMultipleSchemas(Config config, ProgressListener progressListener) throws SQLException, IOException {
try {
// following params will be replaced by something appropriate
List<String> args = config.asList();
args.remove("-schemas");
args.remove("-schemata");
List<String> schemas = config.getSchemas();
Database db = null;
String schemaSpec = config.getSchemaSpec();
Connection connection = this.getConnection(config);
DatabaseMetaData meta = connection.getMetaData();
//-all(evaluteAll) given then get list of the database schemas
if (schemas == null || config.isEvaluateAllEnabled()) {
if (schemaSpec == null)
schemaSpec = ".*";
LOGGER.info(
"Analyzing schemas that match regular expression '{}'. " +
"(use -schemaSpec on command line or in .properties to exclude other schemas)",
schemaSpec);
schemas = DbAnalyzer.getPopulatedSchemas(meta, schemaSpec, false);
if (schemas.isEmpty())
schemas = DbAnalyzer.getPopulatedSchemas(meta, schemaSpec, true);
if (schemas.isEmpty())
schemas.add(config.getUser());
}
LOGGER.info("Analyzing schemas: " + System.lineSeparator() + "{}",
schemas.stream().collect(Collectors.joining(System.lineSeparator())));
String dbName = config.getDb();
File outputDir = commandLineArguments.getOutputDirectory();
// set flag which later on used for generation rootPathtoHome link.
config.setOneOfMultipleSchemas(true);
List<MustacheSchema> mustacheSchemas = new ArrayList<>();
MustacheCatalog mustacheCatalog = null;
for (String schema : schemas) {
// reset -all(evaluteAll) and -schemas parameter to avoid infinite loop! now we are analyzing single schema
config.setSchemas(null);
config.setEvaluateAllEnabled(false);
if (dbName == null)
config.setDb(schema);
else
config.setSchema(schema);
LOGGER.info("Analyzing {}", schema);
File outputDirForSchema = new File(outputDir, schema);
db = this.analyze(schema, config, outputDirForSchema, progressListener);
if (db == null) //if any of analysed schema returns null
return null;
mustacheSchemas.add(new MustacheSchema(db.getSchema(), ""));
mustacheCatalog = new MustacheCatalog(db.getCatalog(), "");
}
prepareLayoutFiles(outputDir);
HtmlMultipleSchemasIndexPage.getInstance().write(outputDir, dbName, mustacheCatalog, mustacheSchemas, meta);
return db;
} catch (Config.MissingRequiredParameterException missingParam) {
config.dumpUsage(missingParam.getMessage(), missingParam.isDbTypeSpecific());
return null;
}
}
public Database analyze(String schema, Config config, File outputDir, ProgressListener progressListener) throws SQLException, IOException {
try {
LOGGER.info("Starting schema analysis");
FileUtils.forceMkdir(outputDir);
String dbName = config.getDb();
String catalog = commandLineArguments.getCatalog();
DatabaseMetaData meta = sqlService.connect(config);
LOGGER.debug("supportsSchemasInTableDefinitions: {}", meta.supportsSchemasInTableDefinitions());
LOGGER.debug("supportsCatalogsInTableDefinitions: {}", meta.supportsCatalogsInTableDefinitions());
// set default Catalog and Schema of the connection
if (schema == null)
schema = meta.getConnection().getSchema();
if (catalog == null)
catalog = meta.getConnection().getCatalog();
SchemaMeta schemaMeta = config.getMeta() == null ? null : new SchemaMeta(config.getMeta(), dbName, schema);
if (config.isHtmlGenerationEnabled()) {
FileUtils.forceMkdir(new File(outputDir, "tables"));
FileUtils.forceMkdir(new File(outputDir, "diagrams/summary"));
LOGGER.info("Connected to {} - {}", meta.getDatabaseProductName(), meta.getDatabaseProductVersion());
if (schemaMeta != null && schemaMeta.getFile() != null) {
LOGGER.info("Using additional metadata from {}", schemaMeta.getFile());
}
}
//
// create our representation of the database
//
Database db = new Database(meta, dbName, catalog, schema, schemaMeta);
databaseService.gatheringSchemaDetails(config, db, progressListener);
long duration = progressListener.startedGraphingSummaries();
Collection<Table> tables = new ArrayList<>(db.getTables());
tables.addAll(db.getViews());
if (tables.isEmpty()) {
dumpNoTablesMessage(schema, config.getUser(), meta, config.getTableInclusions() != null);
if (!config.isOneOfMultipleSchemas()) // don't bail if we're doing the whole enchilada
throw new EmptySchemaException();
}
if (config.isHtmlGenerationEnabled()) {
generateHtmlDoc(config, progressListener, outputDir, db, duration, tables);
}
outputProducers.forEach(
outputProducer -> {
try {
outputProducer.generate(db, outputDir);
} catch (OutputException oe) {
if (config.isOneOfMultipleSchemas()) {
LOGGER.warn("Failed to produce output", oe);
} else {
throw oe;
}
}
});
List<ForeignKeyConstraint> recursiveConstraints = new ArrayList<>();
// create an orderer to be able to determine insertion and deletion ordering of tables
TableOrderer orderer = new TableOrderer();
// side effect is that the RI relationships get trashed
// also populates the recursiveConstraints collection
List<Table> orderedTables = orderer.getTablesOrderedByRI(db.getTables(), recursiveConstraints);
writeOrders(outputDir, orderedTables);
duration = progressListener.finishedGatheringDetails();
long overallDuration = progressListener.finished(tables, config);
if (config.isHtmlGenerationEnabled()) {
LOGGER.info("Wrote table details in {} seconds", duration / 1000);
LOGGER.info("Wrote relationship details of {} tables/views to directory '{}' in {} seconds.", tables.size(), outputDir, overallDuration / 1000);
LOGGER.info("View the results by opening {}", new File(outputDir, "index.html"));
}
return db;
} catch (Config.MissingRequiredParameterException missingParam) {
config.dumpUsage(missingParam.getMessage(), missingParam.isDbTypeSpecific());
return null;
}
}
private void writeOrders(File outputDir, List<Table> orderedTables) throws IOException {
LineWriter out;
out = new LineWriter(new File(outputDir, "insertionOrder.txt"), 16 * 1024, Config.DOT_CHARSET);
try {
TextFormatter.getInstance().write(orderedTables, false, out);
} catch (IOException e) {
throw new IOException(e);
} finally {
out.close();
}
out = new LineWriter(new File(outputDir, "deletionOrder.txt"), 16 * 1024, Config.DOT_CHARSET);
try {
Collections.reverse(orderedTables);
TextFormatter.getInstance().write(orderedTables, false, out);
} catch (IOException e) {
throw new IOException(e);
} finally {
out.close();
}
}
private void generateHtmlDoc(Config config, ProgressListener progressListener, File outputDir, Database db, long duration, Collection<Table> tables) throws IOException {
LineWriter out;
LOGGER.info("Gathered schema details in {} seconds", duration / 1000);
LOGGER.info("Writing/graphing summary");
prepareLayoutFiles(outputDir);
progressListener.graphingSummaryProgressed();
boolean showDetailedTables = tables.size() <= config.getMaxDetailedTables();
final boolean includeImpliedConstraints = config.isImpliedConstraintsEnabled();
// if evaluating a 'ruby on rails-based' database then connect the columns
// based on RoR conventions
// note that this is done before 'hasRealRelationships' gets evaluated so
// we get a relationships ER diagram
if (config.isRailsEnabled())
DbAnalyzer.getRailsConstraints(db.getTablesByName());
File summaryDir = new File(outputDir, "diagrams/summary");
// generate the compact form of the relationships .dot file
String dotBaseFilespec = "relationships";
out = new LineWriter(new File(summaryDir, dotBaseFilespec + ".real.compact.dot"), Config.DOT_CHARSET);
WriteStats stats = new WriteStats(tables);
DotFormatter.getInstance().writeRealRelationships(db, tables, true, showDetailedTables, stats, out, outputDir);
boolean hasRealRelationships = stats.getNumTablesWritten() > 0 || stats.getNumViewsWritten() > 0;
out.close();
if (hasRealRelationships) {
// real relationships exist so generate the 'big' form of the relationships .dot file
progressListener.graphingSummaryProgressed();
out = new LineWriter(new File(summaryDir, dotBaseFilespec + ".real.large.dot"), Config.DOT_CHARSET);
DotFormatter.getInstance().writeRealRelationships(db, tables, false, showDetailedTables, stats, out, outputDir);
out.close();
}
// getting implied constraints has a side-effect of associating the parent/child tables, so don't do it
// here unless they want that behavior
List<ImpliedForeignKeyConstraint> impliedConstraints = new ArrayList();
if (includeImpliedConstraints)
impliedConstraints.addAll(DbAnalyzer.getImpliedConstraints(tables));
List<Table> orphans = DbAnalyzer.getOrphans(tables);
config.setHasOrphans(!orphans.isEmpty() && Dot.getInstance().isValid());
config.setHasRoutines(!db.getRoutines().isEmpty());
progressListener.graphingSummaryProgressed();
File impliedDotFile = new File(summaryDir, dotBaseFilespec + ".implied.compact.dot");
out = new LineWriter(impliedDotFile, Config.DOT_CHARSET);
boolean hasImplied = DotFormatter.getInstance().writeAllRelationships(db, tables, true, showDetailedTables, stats, out, outputDir);
Set<TableColumn> excludedColumns = stats.getExcludedColumns();
out.close();
if (hasImplied) {
impliedDotFile = new File(summaryDir, dotBaseFilespec + ".implied.large.dot");
out = new LineWriter(impliedDotFile, Config.DOT_CHARSET);
DotFormatter.getInstance().writeAllRelationships(db, tables, false, showDetailedTables, stats, out, outputDir);
out.close();
} else {
Files.deleteIfExists(impliedDotFile.toPath());
}
HtmlRelationshipsPage.getInstance().write(db, summaryDir, dotBaseFilespec, hasRealRelationships, hasImplied, excludedColumns,
progressListener, outputDir);
progressListener.graphingSummaryProgressed();
File orphansDir = new File(outputDir, "diagrams/orphans");
FileUtils.forceMkdir(orphansDir);
HtmlOrphansPage.getInstance().write(db, orphans, orphansDir, outputDir);
out.close();
progressListener.graphingSummaryProgressed();
HtmlMainIndexPage.getInstance().write(db, tables, impliedConstraints, outputDir);
progressListener.graphingSummaryProgressed();
List<ForeignKeyConstraint> constraints = DbAnalyzer.getForeignKeyConstraints(tables);
HtmlConstraintsPage constraintIndexFormatter = HtmlConstraintsPage.getInstance();
constraintIndexFormatter.write(db, constraints, tables, outputDir);
progressListener.graphingSummaryProgressed();
HtmlAnomaliesPage.getInstance().write(db, tables, impliedConstraints, outputDir);
progressListener.graphingSummaryProgressed();
for (HtmlColumnsPage.ColumnInfo columnInfo : HtmlColumnsPage.getInstance().getColumnInfos().values()) {
HtmlColumnsPage.getInstance().write(db, tables, columnInfo, outputDir);
}
progressListener.graphingSummaryProgressed();
HtmlRoutinesPage.getInstance().write(db, outputDir);
// create detailed diagrams
duration = progressListener.startedGraphingDetails();
LOGGER.info("Completed summary in {} seconds", duration / 1000);
LOGGER.info("Writing/diagramming details");
generateTables(progressListener, outputDir, db, tables, stats);
HtmlComponentPage.getInstance().write(db, tables, outputDir);
}
/**
* This method is responsible to copy layout folder to destination directory and not copy template .html files
*
* @param outputDir File
* @throws IOException when not possible to copy layout files to outputDir
*/
private void prepareLayoutFiles(File outputDir) throws IOException {
URL url = null;
Enumeration<URL> possibleResources = getClass().getClassLoader().getResources("layout");
while (possibleResources.hasMoreElements() && Objects.isNull(url)) {
URL possibleResource = possibleResources.nextElement();
if (!possibleResource.getPath().contains("test-classes")) {
url = possibleResource;
}
}
IOFileFilter notHtmlFilter = FileFilterUtils.notFileFilter(FileFilterUtils.suffixFileFilter(".html"));
FileFilter filter = FileFilterUtils.and(notHtmlFilter);
ResourceWriter.copyResources(url, outputDir, filter);
}
private Connection getConnection(Config config) throws IOException {
Properties properties = config.getDbProperties();
ConnectionURLBuilder urlBuilder = new ConnectionURLBuilder(config, properties);
if (config.getDb() == null)
config.setDb(urlBuilder.build());
String driverClass = properties.getProperty("driver");
String driverPath = properties.getProperty("driverPath");
if (Objects.isNull(driverPath))
driverPath = "";
if (Objects.nonNull(config.getDriverPath()))
driverPath = config.getDriverPath();
DbDriverLoader driverLoader = new DbDriverLoader();
return driverLoader.getConnection(config, urlBuilder.build(), driverClass, driverPath);
}
private void generateTables(ProgressListener progressListener, File outputDir, Database db, Collection<Table> tables, WriteStats stats) throws IOException {
HtmlTablePage tableFormatter = HtmlTablePage.getInstance();
for (Table table : tables) {
progressListener.graphingDetailsProgressed(table);
LOGGER.debug("Writing details of {}", table.getName());
tableFormatter.write(db, table, outputDir, stats);
}
}
/**
* dumpNoDataMessage
*
* @param schema String
* @param user String
* @param meta DatabaseMetaData
*/
private static void dumpNoTablesMessage(String schema, String user, DatabaseMetaData meta, boolean specifiedInclusions) throws SQLException {
LOGGER.warn("No tables or views were found in schema '{}'.", schema);
List<String> schemas;
try {
schemas = DbAnalyzer.getSchemas(meta);
} catch (SQLException | RuntimeException exc) {
LOGGER.error("The user you specified '{}' might not have rights to read the database metadata.", user, exc);
return;
}
if (Objects.isNull(schemas)) {
LOGGER.error("Failed to retrieve any schemas");
return;
} else if (schemas.contains(schema)) {
LOGGER.error(
"The schema exists in the database, but the user you specified '{}'" +
"might not have rights to read its contents.",
user);
if (specifiedInclusions) {
LOGGER.error(
"Another possibility is that the regular expression that you specified " +
"for what to include (via -i) didn't match any tables.");
}
} else {
LOGGER.error(
"The schema '{}' could not be read/found, schema is specified using the -s option." +
"Make sure user '{}' has the correct privileges to read the schema." +
"Also not that schema names are usually case sensitive.",
schema, user);
LOGGER.info(
"Available schemas(Some of these may be user or system schemas):" +
System.lineSeparator() + "{}",
schemas.stream().collect(Collectors.joining(System.lineSeparator())));
List<String> populatedSchemas = DbAnalyzer.getPopulatedSchemas(meta);
if (populatedSchemas.isEmpty()) {
LOGGER.error("Unable to determine if any of the schemas contain tables/views");
} else {
LOGGER.info("Schemas with tables/views visible to '{}':" + System.lineSeparator() + "{}",
populatedSchemas.stream().collect(Collectors.joining(System.lineSeparator())));
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.redis;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.camel.impl.JndiRegistry;
import org.junit.Before;
import org.junit.Test;
import org.springframework.data.redis.connection.DataType;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.query.SortQuery;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RedisKeyTest extends RedisTestSupport {
private RedisTemplate redisTemplate;
@Override
protected JndiRegistry createRegistry() throws Exception {
JndiRegistry registry = super.createRegistry();
registry.bind("redisTemplate", redisTemplate);
return registry;
}
@Before
public void setUp() throws Exception {
redisTemplate = mock(RedisTemplate.class);
super.setUp();
}
@Test
public void shouldExecuteDEL() throws Exception {
Collection<String> keys = new HashSet<String>();
keys.add("key1");
keys.add("key2");
sendHeaders(
RedisConstants.COMMAND, "DEL",
RedisConstants.KEYS, keys);
verify(redisTemplate).delete(keys);
}
@Test
public void shouldExecuteEXISTS() throws Exception {
when(redisTemplate.hasKey(anyString())).thenReturn(true);
Object result = sendHeaders(
RedisConstants.COMMAND, "EXISTS",
RedisConstants.KEY, "key");
verify(redisTemplate).hasKey("key");
assertEquals(true, result);
}
@Test
public void shouldExecuteEXPIRE() throws Exception {
when(redisTemplate.expire(anyString(), anyLong(), any(TimeUnit.class))).thenReturn(true);
Object result = sendHeaders(
RedisConstants.COMMAND, "EXPIRE",
RedisConstants.KEY, "key",
RedisConstants.TIMEOUT, "10");
verify(redisTemplate).expire("key", 10L, TimeUnit.SECONDS);
assertEquals(true, result);
}
@Test
public void shouldExecuteEXPIREAT() throws Exception {
when(redisTemplate.expireAt(anyString(), any(Date.class))).thenReturn(true);
long unixTime = System.currentTimeMillis() / 1000L;
Object result = sendHeaders(
RedisConstants.COMMAND, "EXPIREAT",
RedisConstants.KEY, "key",
RedisConstants.TIMESTAMP, unixTime);
verify(redisTemplate).expireAt("key", new Date(unixTime * 1000L));
assertEquals(true, result);
}
@Test
public void shouldExecuteKEYS() throws Exception {
Set<String> keys = new HashSet<String>();
keys.add("key1");
keys.add("key2");
when(redisTemplate.keys(anyString())).thenReturn(keys);
Object result = sendHeaders(
RedisConstants.COMMAND, "KEYS",
RedisConstants.PATTERN, "key*");
verify(redisTemplate).keys("key*");
assertEquals(keys, result);
}
@Test
public void shouldExecuteMOVE() throws Exception {
when(redisTemplate.move(anyString(), anyInt())).thenReturn(true);
Object result = sendHeaders(
RedisConstants.COMMAND, "MOVE",
RedisConstants.KEY, "key",
RedisConstants.DB, "2");
verify(redisTemplate).move("key", 2);
assertEquals(true, result);
}
@Test
public void shouldExecutePERSIST() throws Exception {
when(redisTemplate.persist(anyString())).thenReturn(true);
Object result = sendHeaders(
RedisConstants.COMMAND, "PERSIST",
RedisConstants.KEY, "key");
verify(redisTemplate).persist("key");
assertEquals(true, result);
}
@Test
public void shouldExecutePEXPIRE() throws Exception {
when(redisTemplate.expire(anyString(), anyLong(), any(TimeUnit.class))).thenReturn(true);
Object result = sendHeaders(
RedisConstants.COMMAND, "PEXPIRE",
RedisConstants.KEY, "key",
RedisConstants.TIMEOUT, "10");
verify(redisTemplate).expire("key", 10L, TimeUnit.MILLISECONDS);
assertEquals(true, result);
}
@Test
public void shouldExecutePEXPIREAT() throws Exception {
when(redisTemplate.expireAt(anyString(), any(Date.class))).thenReturn(true);
long millis = System.currentTimeMillis();
Object result = sendHeaders(
RedisConstants.COMMAND, "PEXPIREAT",
RedisConstants.KEY, "key",
RedisConstants.TIMESTAMP, millis);
verify(redisTemplate).expireAt("key", new Date(millis));
assertEquals(true, result);
}
@Test
public void shouldExecuteRANDOMKEY() throws Exception {
when(redisTemplate.randomKey()).thenReturn("key");
Object result = sendHeaders(
RedisConstants.COMMAND, "RANDOMKEY");
verify(redisTemplate).randomKey();
assertEquals("key", result);
}
@Test
public void shouldExecuteRENAME() throws Exception {
sendHeaders(
RedisConstants.COMMAND, "RENAME",
RedisConstants.KEY, "key",
RedisConstants.VALUE, "newkey");
verify(redisTemplate).rename("key", "newkey");
}
@Test
public void shouldExecuteRENAMENX() throws Exception {
when(redisTemplate.renameIfAbsent(anyString(), anyString())).thenReturn(true);
Object result = sendHeaders(
RedisConstants.COMMAND, "RENAMENX",
RedisConstants.KEY, "key",
RedisConstants.VALUE, "newkey");
verify(redisTemplate).renameIfAbsent("key", "newkey");
assertEquals(true, result);
}
@Test
public void shouldExecuteSORT() throws Exception {
List<Integer> list = new ArrayList<Integer>();
list.add(5);
when(redisTemplate.sort(any(SortQuery.class))).thenReturn(list);
Object result = sendHeaders(
RedisConstants.COMMAND, "SORT",
RedisConstants.KEY, "key");
verify(redisTemplate).sort(any(SortQuery.class));
assertEquals(list, result);
}
@Test
public void shouldExecuteTTL() throws Exception {
when(redisTemplate.getExpire(anyString())).thenReturn(2L);
Object result = sendHeaders(
RedisConstants.COMMAND, "TTL",
RedisConstants.KEY, "key");
verify(redisTemplate).getExpire("key");
assertEquals(2L, result);
}
@Test
public void shouldExecuteTYPE() throws Exception {
when(redisTemplate.type(anyString())).thenReturn(DataType.STRING);
Object result = sendHeaders(
RedisConstants.COMMAND, "TYPE",
RedisConstants.KEY, "key");
verify(redisTemplate).type("key");
assertEquals(DataType.STRING.toString(), result);
}
}
| |
/*
* ******************************************************************************
* Copyright 2014-2015 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
package com.spectralogic.ds3autogen.java;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.spectralogic.ds3autogen.api.CodeGenerator;
import com.spectralogic.ds3autogen.api.FileUtils;
import com.spectralogic.ds3autogen.api.ResponseTypeNotFoundException;
import com.spectralogic.ds3autogen.api.TypeRenamingConflictException;
import com.spectralogic.ds3autogen.api.models.Classification;
import com.spectralogic.ds3autogen.api.models.Ds3ApiSpec;
import com.spectralogic.ds3autogen.api.models.Ds3Request;
import com.spectralogic.ds3autogen.api.models.Ds3Type;
import com.spectralogic.ds3autogen.java.converters.ClientConverter;
import com.spectralogic.ds3autogen.java.generators.requestmodels.*;
import com.spectralogic.ds3autogen.java.generators.responsemodels.BaseResponseGenerator;
import com.spectralogic.ds3autogen.java.generators.responsemodels.BulkResponseGenerator;
import com.spectralogic.ds3autogen.java.generators.responsemodels.CodesResponseGenerator;
import com.spectralogic.ds3autogen.java.generators.responsemodels.ResponseModelGenerator;
import com.spectralogic.ds3autogen.java.generators.typemodels.*;
import com.spectralogic.ds3autogen.java.models.Client;
import com.spectralogic.ds3autogen.java.models.Model;
import com.spectralogic.ds3autogen.java.models.Request;
import com.spectralogic.ds3autogen.java.models.Response;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.file.Path;
import java.nio.file.Paths;
import static com.spectralogic.ds3autogen.java.models.Constants.*;
import static com.spectralogic.ds3autogen.utils.ConverterUtil.*;
import static com.spectralogic.ds3autogen.utils.Ds3RequestClassificationUtil.*;
import static com.spectralogic.ds3autogen.utils.Ds3RequestClassificationUtil.isCompleteMultiPartUploadRequest;
import static com.spectralogic.ds3autogen.utils.Ds3TypeClassificationUtil.isCommonPrefixesType;
import static com.spectralogic.ds3autogen.utils.Ds3TypeClassificationUtil.isHttpErrorType;
/**
* Generates Java SDK code based on the contents of a Ds3ApiSpec.
*
* Generated Code:
* Request handlers
* Response handlers
* Ds3Client
* Ds3ClientImpl
* Models
*/
public class JavaCodeGenerator implements CodeGenerator {
private static final Logger LOG = LoggerFactory.getLogger(JavaCodeGenerator.class);
private static final Path baseProjectPath = Paths.get("ds3-sdk/src/main/java/");
private final Configuration config = new Configuration(Configuration.VERSION_2_3_23);
private Ds3ApiSpec spec;
private FileUtils fileUtils;
private Path destDir;
public JavaCodeGenerator() {
config.setDefaultEncoding("UTF-8");
config.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER);
config.setClassForTemplateLoading(JavaCodeGenerator.class, "/tmpls");
}
@Override
public void generate(
final Ds3ApiSpec spec,
final FileUtils fileUtils,
final Path destDir) throws IOException, ResponseTypeNotFoundException, TypeRenamingConflictException {
this.spec = spec;
this.fileUtils = fileUtils;
this.destDir = destDir;
try {
generateCommands();
} catch (final TemplateException e) {
e.printStackTrace();
}
}
/**
* Generates all code associated with the Ds3ApiSpec
* @throws IOException
* @throws TemplateException
*/
private void generateCommands() throws IOException, TemplateException {
generateAllRequests();
generateAllModels();
generateClient();
}
/**
* Generates the Models described within the Ds3ApiSpec that are being used
* by at least one request
* @throws IOException
* @throws TemplateException
*/
private void generateAllModels() throws IOException, TemplateException {
final ImmutableMap<String, Ds3Type> types = removeUnusedTypes(
spec.getTypes(),
spec.getRequests());
if (isEmpty(types)) {
LOG.info("There were no models to generate");
return;
}
for (final Ds3Type ds3Type : types.values()) {
generateModel(ds3Type);
}
}
/**
* Generates code for a Model from a Ds3Type
* @param ds3Type A Ds3Type
* @throws IOException
* @throws TemplateException
*/
private void generateModel(final Ds3Type ds3Type) throws IOException, TemplateException {
final Template modelTmpl = getModelTemplate(ds3Type);
final Model model = toModel(ds3Type, getModelPackage());
final Path modelPath = toModelFilePath(model.getName());
LOG.info("Getting outputstream for file:" + modelPath.toString());
try (final OutputStream outStream = fileUtils.getOutputFile(modelPath);
final Writer writer = new OutputStreamWriter(outStream)) {
modelTmpl.process(model, writer);
}
}
/**
* Converts a Ds3Type into a Model
*/
private Model toModel(final Ds3Type ds3Type, final String packageName) {
final TypeModelGenerator<?> modelGenerator = getModelGenerator(ds3Type);
return modelGenerator.generate(ds3Type, packageName);
}
/**
* Retrieves the associated type model generator for the specified Ds3TYpe
*/
private TypeModelGenerator<?> getModelGenerator(final Ds3Type ds3Type) {
if (isChecksum(ds3Type)) {
return new ChecksumTypeGenerator();
}
if (isJobsApiBean(ds3Type)) {
return new JobsApiBeanTypeGenerator();
}
if (isCommonPrefixesType(ds3Type)) {
return new CommonPrefixGenerator();
}
return new BaseTypeGenerator();
}
/**
* Gets the Model template that is used to generate the given Ds3Type content
* @param ds3Type A Ds3Type
* @return The appropriate template to generate the given Ds3Type
* @throws IOException
*/
private Template getModelTemplate(final Ds3Type ds3Type) throws IOException {
if (isHttpErrorType(ds3Type)) {
return config.getTemplate("models/http_error_template.ftl");
}
if (isChecksum(ds3Type)) {
return config.getTemplate("models/checksum_type_template.ftl");
}
if (isS3Object(ds3Type)) {
return config.getTemplate("models/s3object_model_template.ftl");
}
if (isBulkObject(ds3Type)) {
return config.getTemplate("models/bulk_object_template.ftl");
}
if (hasContent(ds3Type.getEnumConstants())) {
return config.getTemplate("models/enum_model_template.ftl");
}
if (hasContent(ds3Type.getElements())) {
return config.getTemplate("models/model_template.ftl");
}
throw new IllegalArgumentException("Type must have Elements and/or EnumConstants");
}
/**
* Determines if a given Ds3Type is a BulkObject
*/
private boolean isBulkObject(final Ds3Type ds3type) {
return ds3type.getName().endsWith(".BulkObject");
}
/**
* Determines if a given Ds3Type is the S3Object
*/
private boolean isS3Object(final Ds3Type ds3type) {
return ds3type.getName().endsWith(".S3Object");
}
/**
* Determines if a given Ds3Type is the Checksum Type
* @param ds3Type A Ds3Type
* @return True if the Ds3Type describes the ChecksumType, else false
*/
private boolean isChecksum(final Ds3Type ds3Type) {
return ds3Type.getName().endsWith(".ChecksumType");
}
/**
* Determines if a given Ds3Type is the JobsApiBean type
* which is renamed to JobList in the NameMapper
*/
private boolean isJobsApiBean(final Ds3Type ds3Type) {
return ds3Type.getName().endsWith(".JobList");
}
/**
* Gets the package name for where the Models will be generated
* @return The package name of where the Models are going to be generated
*/
private String getModelPackage() {
return ROOT_PACKAGE_PATH + MODELS_PACKAGE;
}
/**
* Converts a Model name into a Model file path
* @param modelName The name of a Model
* @return The file path of a Model
*/
private Path toModelFilePath(final String modelName) {
return destDir.resolve(baseProjectPath.resolve(
Paths.get(getModelPackage().replace(".", "/") + "/" + modelName + ".java")));
}
/**
* Generates all of the Request and Response handlers described within the Ds3ApiSpec
* excluding SpectraInternal requests.
* @throws IOException
* @throws TemplateException
*/
private void generateAllRequests() throws IOException, TemplateException {
final ImmutableList<Ds3Request> requests = spec.getRequests();
if (isEmpty(requests)) {
LOG.info("There were no requests to generate");
return;
}
for (final Ds3Request request : requests) {
generateRequest(request);
generateResponse(request);
}
}
/**
* Generates the Client and ClientImpl code that contains all non-SpectraInternal requests
* that are described within the Ds3ApiSpec
* @throws IOException
* @throws TemplateException
*/
private void generateClient() throws IOException, TemplateException {
final ImmutableList<Ds3Request> requests = spec.getRequests();
if (isEmpty(requests)) {
LOG.info("Not generating client: no requests.");
return;
}
final Template clientTmpl = config.getTemplate("client/ds3client_template.ftl");
final Client client = ClientConverter.toClient(requests, ROOT_PACKAGE_PATH);
final Path clientPath = toClientPath("Ds3Client.java");
LOG.info("Getting outputstream for file:" + clientPath.toString());
try (final OutputStream outStream = fileUtils.getOutputFile(clientPath);
final Writer writer = new OutputStreamWriter(outStream)) {
clientTmpl.process(client, writer);
}
final Template clientImplTmpl = config.getTemplate("client/ds3client_impl_template.ftl");
final Path clientImplPath = toClientPath("Ds3ClientImpl.java");
LOG.info("Getting outputstream for file:" + clientPath.toString());
try (final OutputStream outStream = fileUtils.getOutputFile(clientImplPath);
final Writer writer = new OutputStreamWriter(outStream)) {
clientImplTmpl.process(client, writer);
}
}
/**
* Converts a file name into the path containing said file within the client path
* @param fileName The name of a file
* @return The client path to the given file
*/
private Path toClientPath(final String fileName) {
return destDir.resolve(baseProjectPath.resolve(Paths.get(ROOT_PACKAGE_PATH.replace(".", "/") + "/" + fileName)));
}
/**
* Generates the code for the Response handler described in a Ds3Request
* @param ds3Request A Ds3Request
* @throws IOException
* @throws TemplateException
*/
private void generateResponse(final Ds3Request ds3Request) throws IOException, TemplateException {
final Template tmpl = getResponseTemplate(ds3Request);
final Response response = toResponse(ds3Request);
final Path responsePath = getPathFromPackage(ds3Request, response.getName());
LOG.info("Getting outputstream for file:" + responsePath.toString());
try (final OutputStream outStream = fileUtils.getOutputFile(responsePath);
final Writer writer = new OutputStreamWriter(outStream)) {
tmpl.process(response, writer);
}
}
/**
* Converts a Ds3Request into a Response model
* @param ds3Request A Ds3Request
* @return A Response
*/
private Response toResponse(final Ds3Request ds3Request) {
final ResponseModelGenerator<?> modelGenerator = getResponseTemplateModelGenerator(ds3Request);
return modelGenerator.generate(ds3Request, getCommandPackage(ds3Request));
}
/**
* Retrieves the associated response generator for the specified Ds3Request
*/
private ResponseModelGenerator<?> getResponseTemplateModelGenerator(final Ds3Request ds3Request) {
if (isAllocateJobChunkRequest(ds3Request)
|| isHeadObjectRequest(ds3Request)
|| isHeadBucketRequest(ds3Request)) {
return new CodesResponseGenerator();
}
if (isBulkRequest(ds3Request)) {
return new BulkResponseGenerator();
}
return new BaseResponseGenerator();
}
/**
* Gets the Response template that is used to generate the given Ds3Request's
* Response handler
* @param ds3Request A Ds3Request
* @return The appropriate template to generate the required Response
* @throws IOException
*/
private Template getResponseTemplate(final Ds3Request ds3Request) throws IOException {
if (isAllocateJobChunkRequest(ds3Request)) {
return config.getTemplate("response/allocate_job_chunk_response_template.ftl");
}
if (isGetJobChunksReadyForClientProcessingRequest(ds3Request)) {
return config.getTemplate("response/get_job_chunks_ready_response_template.ftl");
}
if (isHeadBucketRequest(ds3Request)) {
return config.getTemplate("response/head_bucket_response_template.ftl");
}
if (isHeadObjectRequest(ds3Request)) {
return config.getTemplate("response/head_object_response_template.ftl");
}
if (isBulkRequest(ds3Request)) {
return config.getTemplate("response/bulk_response_template.ftl");
}
if (isGetObjectAmazonS3Request(ds3Request)) {
return config.getTemplate("response/get_object_response_template.ftl");
}
return config.getTemplate("response/response_template.ftl");
}
/**
* Gets the command package suitable for the given Ds3Request. SpectraDs3 commands
* have a separate package, as do notifications.
* @param ds3Request A Ds3Request
* @return The command package that is suitable for the given Ds3Request
*/
private static String getCommandPackage(final Ds3Request ds3Request) {
final StringBuilder builder = new StringBuilder();
builder.append(COMMANDS_PACKAGE_PATH);
if (ds3Request.getClassification() == Classification.spectrads3) {
builder.append(SPECTRA_DS3_PACKAGE);
}
if (ds3Request.getClassification() == Classification.spectrainternal) {
builder.append(SPECTRA_INTERNAL_PACKAGE);
}
if (isNotificationRequest(ds3Request)) {
builder.append(NOTIFICATION_PACKAGE);
}
return builder.toString();
}
/**
* Generates the code for the Request handler described in a Ds3Request
* @param ds3Request A Ds3Request
* @throws IOException
* @throws TemplateException
*/
private void generateRequest(final Ds3Request ds3Request) throws IOException, TemplateException {
final Template tmpl = getRequestTemplate(ds3Request);
final Request request = toRequest(ds3Request);
final Path requestPath = getPathFromPackage(ds3Request, request.getName());
LOG.info("Getting outputstream for file:" + requestPath.toString());
try (final OutputStream outStream = fileUtils.getOutputFile(requestPath);
final Writer writer = new OutputStreamWriter(outStream)) {
tmpl.process(request, writer);
}
}
/**
* Returns the file system path for a request given it's package
* @param ds3Request A Ds3Request
* @param fileName The file name to be given to this Ds3Request
* @return The system path to this Ds3Request's generated code
*/
private Path getPathFromPackage(final Ds3Request ds3Request, final String fileName) {
return destDir.resolve(baseProjectPath.resolve(
Paths.get(getCommandPackage(ds3Request).replace(".", "/") + "/" + fileName + ".java")));
}
/**
* Converts a Ds3Request into a Request model
* @param ds3Request A Ds3Request
* @return A Request model
*/
private Request toRequest(final Ds3Request ds3Request) {
final RequestModelGenerator<?> modelGenerator = getTemplateModelGenerator(ds3Request);
return modelGenerator.generate(ds3Request, getCommandPackage(ds3Request));
}
/**
* Retrieves the associated request generator for the specified Ds3Request
*/
private static RequestModelGenerator<?> getTemplateModelGenerator(final Ds3Request ds3Request) {
if (hasStringRequestPayload(ds3Request)) {
return new StringRequestPayloadGenerator();
}
if (isBulkRequest(ds3Request)) {
return new BulkRequestGenerator();
}
if (hasListObjectsRequestPayload(ds3Request)) {
return new ObjectsRequestPayloadGenerator();
}
if (isCreateObjectRequest(ds3Request)) {
return new CreateObjectRequestGenerator();
}
if (isCreateNotificationRequest(ds3Request)) {
return new CreateNotificationRequestGenerator();
}
if (isGetNotificationRequest(ds3Request) || isDeleteNotificationRequest(ds3Request)) {
return new NotificationRequestGenerator();
}
if (isGetObjectRequest(ds3Request)) {
return new GetObjectRequestGenerator();
}
if (isMultiFileDeleteRequest(ds3Request)) {
return new MultiFileDeleteRequestGenerator();
}
if (isCreateMultiPartUploadPartRequest(ds3Request)) {
return new StreamRequestPayloadGenerator();
}
if (isCompleteMultiPartUploadRequest(ds3Request)) {
return new CompleteMultipartUploadRequestGenerator();
}
return new BaseRequestGenerator();
}
/**
* Gets the appropriate template that will generate the code for this
* Ds3Request's request handler
* @param ds3Request A Ds3Request
* @return The appropriate template to generate the required Request
* @throws IOException
*/
private Template getRequestTemplate(final Ds3Request ds3Request) throws IOException {
if (isBulkRequest(ds3Request)) {
return config.getTemplate("request/bulk_request_template.ftl");
} else if(hasStringRequestPayload(ds3Request)) {
return config.getTemplate("request/request_with_string_payload_template.ftl");
} else if (hasListObjectsRequestPayload(ds3Request)) {
return config.getTemplate("request/objects_request_payload_request_template.ftl");
} else if (isMultiFileDeleteRequest(ds3Request)) {
return config.getTemplate("request/multi_file_delete_request_template.ftl");
} else if (isGetObjectRequest(ds3Request)) {
return config.getTemplate("request/get_object_template.ftl");
} else if (isCreateObjectRequest(ds3Request)) {
return config.getTemplate("request/create_object_template.ftl");
} else if (isDeleteNotificationRequest(ds3Request)) {
return config.getTemplate("request/delete_notification_request_template.ftl");
} else if (isCreateNotificationRequest(ds3Request)) {
return config.getTemplate("request/create_notification_request_template.ftl");
} else if (isGetNotificationRequest(ds3Request)) {
return config.getTemplate("request/get_notification_request_template.ftl");
} else if (isGetJobRequest(ds3Request)) {
return config.getTemplate("request/get_job_request_template.ftl");
} else if (isCompleteMultiPartUploadRequest(ds3Request)) {
return config.getTemplate("request/complete_multipart_upload_template.ftl");
}
return config.getTemplate("request/request_template.ftl");
}
}
| |
/*
Derby - Class org.apache.derby.client.net.NetLogWriter
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.client.net;
// network traffic tracer.
// This class traces communication buffers for both sends and receives.
// The value of the hex bytes are traced along with the ascii and ebcdic translations.
public class NetLogWriter extends org.apache.derby.client.am.LogWriter {
// The recevie constant is used to indicate that the bytes were read to a Stream.
// It indicates to this class that a receive header should be used.
public static final int TYPE_TRACE_RECEIVE = 2;
// The send constant is used to indicate that the bytes were written to
// a Stream. It indicates to this class that a send header should be used.
public static final int TYPE_TRACE_SEND = 1;
//------------------------------ internal constants --------------------------
// This class was implemented using character arrays to translate bytes
// into ascii and ebcdic. The goal was to be able to quickly index into the
// arrays to find the characters. Char arrays instead of strings were used as
// much as possible in an attempt to help speed up performance.
// An array of characters used to translate bytes to ascii.
// The position in the array corresponds to the hex value of the character.
private static final char asciiChar__ [] = {
// 0 1 2 3 4 5 6 7 8 9 A B C D E F
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //0
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //1
' ', '!', '"', '#', '$', '%', '&', '\'', '(', ')', '*', '+', ',', '-', '.', '/', //2
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ':', ';', '<', '=', '>', '?', //3
'@', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', //4
'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '[', '\\', ']', '^', '_', //5
'`', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', //6
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '{', '|', '}', '~', '.', //7
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //8
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //9
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //A
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //B
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //C
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //D
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //E
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.' //F
};
// This column position header is used to mark offsets into the trace.
private static final String colPosHeader__ =
" 0 1 2 3 4 5 6 7 8 9 A B C D E F 0123456789ABCDEF 0123456789ABCDEF";
// An array of characters used to translate bytes to ebcdic.
// The position in the array corresponds to the hex value of the
// character.
private static final char ebcdicChar__[] = {
// 0 1 2 3 4 5 6 7 8 9 A B C D E F
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //0
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //1
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //2
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //3
' ', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '<', '(', '+', '|', //4
'&', '.', '.', '.', '.', '.', '.', '.', '.', '.', '!', '$', '*', ')', ';', '.', //5
'-', '/', '.', '.', '.', '.', '.', '.', '.', '.', '|', ',', '%', '_', '>', '?', //6
'.', '.', '.', '.', '.', '.', '.', '.', '.', '`', ':', '#', '@', '\'', '=', '"', //7
'.', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', '.', '.', '.', '.', '.', '.', //8
'.', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', '.', '.', '.', '.', '.', '.', //9
'.', '~', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '.', '.', '.', '.', '.', '.', //A
'.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', //B
'{', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', '.', '.', '.', '.', '.', '.', //C
'}', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', '.', '.', '.', '.', '.', '.', //D
'\\', '.', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '.', '.', '.', '.', '.', '.', //E
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.', '.', '.', '.', '.', '.' //F
};
// An array of characters representing hex numbers.
private static final char hexDigit__ [] = {
'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
};
// The receive header comes befor bytes which would be read from a stream.
private static final String receiveHeader__ =
" RECEIVE BUFFER: (ASCII) (EBCDIC)";
// The send header comes before bytes which would be written to a stream.
private static final String sendHeader__ =
" SEND BUFFER: (ASCII) (EBCDIC)";
private static final char spaceChar__ = ' ';
private static final char zeroChar__ = '0';
// This mapping table associates a codepoint to a String describing the codepoint.
// This is needed because the trace prints the first codepoint in send and receive buffers.
// This is created lazily because there is no need to create the mapping if tracing isn't used.
// So this array will only be created when the com buffer trace is started.
private static CodePointNameTable codePointNameTable__ = null;
//-----------------------------internal state---------------------------------
//-----------------------------constructors/finalizer-------------------------
// One NetLogWriter object is created per data source, iff tracing is enabled.
public NetLogWriter(java.io.PrintWriter printWriter, int traceLevel) {
super(printWriter, traceLevel);
// Initialize the codepoint name table if not previously initialized.
// This is done lazily so that it is not created if the trace isn't used (save some init time).
if (codePointNameTable__ == null) {
codePointNameTable__ = new CodePointNameTable();
}
}
//------------------------------entry points----------------------------------
// Specialization of LogWriter.traceConnectsExit()
public void traceConnectsExit(org.apache.derby.client.am.Connection connection) {
if (traceSuspended()) {
return;
}
NetConnection c = (NetConnection) connection;
synchronized (printWriter_) {
super.traceConnectsExit(c);
dncnetprint(" PROTOCOL manager levels: { ");
printWriter_.print("SQLAM=" + c.getSQLAM() + ", ");
printWriter_.print("AGENT=" + c.getAGENT() + ", ");
printWriter_.print("CMNTCPIP=" + c.getCMNTCPIP() + ", ");
printWriter_.print("RDB=" + c.getRDB() + ", ");
printWriter_.print("SECMGR=" + c.getSECMGR() + ", ");
printWriter_.print("XAMGR=" + c.getXAMGR() + ", ");
printWriter_.print("SYNCPTMGR=" + c.getSYNCPTMGR() + ", ");
printWriter_.print("RSYNCMGR=" + c.getRSYNCMGR());
printWriter_.println(" }");
printWriter_.flush();
}
}
public void traceConnectsResetExit(org.apache.derby.client.am.Connection connection) {
if (traceSuspended()) {
return;
}
NetConnection c = (NetConnection) connection;
synchronized (printWriter_) {
super.traceConnectsResetExit(c);
dncnetprint(" PROTOCOL manager levels: { ");
printWriter_.print("SQLAM=" + c.getSQLAM() + ", ");
printWriter_.print("AGENT=" + c.getAGENT() + ", ");
printWriter_.print("CMNTCPIP=" + c.getCMNTCPIP() + ", ");
printWriter_.print("RDB=" + c.getRDB() + ", ");
printWriter_.print("SECMGR=" + c.getSECMGR() + ", ");
printWriter_.print("XAMGR=" + c.getXAMGR() + ", ");
printWriter_.print("SYNCPTMGR=" + c.getSYNCPTMGR() + ", ");
printWriter_.print("RSYNCMGR=" + c.getRSYNCMGR());
printWriter_.println(" }");
printWriter_.flush();
}
}
// Pass the connection handle and print it in the header
// What exactly is supposed to be passed, assume one complete DSS packet
// Write the communication buffer data to the trace.
// The data is passed in via a byte array. The start and length of the data is given.
// The type is needed to indicate if the data is part of the send or receive buffer.
// The class name, method name, and trcPt number are also written to the trace.
// Not much checking is performed on the parameters. This is done to help performance.
synchronized public void traceProtocolFlow(byte[] buff,
int offset,
int len,
int type,
String className,
String methodName,
int tracepoint) {
if (traceSuspended()) {
return;
}
if (!loggingEnabled(org.apache.derby.jdbc.ClientDataSource.TRACE_PROTOCOL_FLOWS)) {
return;
}
synchronized (printWriter_) {
super.tracepoint("[net]", tracepoint, className, methodName);
int fullLen = len;
boolean printColPos = true;
while (fullLen >= 2) { // format each DssHdr seperately
// get the length of this DssHdr
len = ((buff[offset] & 0xff) << 8) + ((buff[offset + 1] & 0xff) << 0);
// check for valid dss header or not all of dss block
if ((len < 10) || (len > fullLen)) {
len = fullLen;
}
// subtract that length from the full length
fullLen -= len;
// The data will only be written if there is a non-zero positive length.
if (len != 0) {
String codePointName = null;
// If the length <= 10, lookup the first codepoint so it's name can be printed
if (len >= 10) {
// Get the int value of the two byte unsigned codepoint.
int codePoint = getCodePoint(buff, offset + 8);
codePointName = codePointNameTable__.lookup(codePoint);
// if this is not a valid codepoint then format the entire buffer
// as one block.
if (codePointName == null) {
len += fullLen;
fullLen = 0;
}
}
if (!printColPos) { // not 1st Dss header of this buffer, write seperator
dncnetprintln("");
}
if (codePointName == null) {
// codePointName was still null so either < 10 bytes were given or
// the codepoint wasn't found in the table. Just print the plain send header.
dncnetprintln(getHeader(type));
} else {
// codePointName isn't null so the name of the codepoint will be printed.
printHeaderWithCodePointName(codePointName, type);
}
// Print the col position header in the trace.
if (printColPos) { // first Dss header of buffer, need column position header
dncnetprintln(colPosHeader__);
printColPos = false;
}
// A char array will be used to translate the bytes to their character
// representations along with ascii and ebcdic representations.
char trcDump[] = new char[77];
// bCounter, aCounter, eCounter are offsets used to help position the characters
short bCounter = 7;
short aCounter = 43;
short eCounter = 61;
// The lines will be counted starting at zero.
// This is hard coded since we are at the beginning.
trcDump[0] = zeroChar__;
trcDump[1] = zeroChar__;
trcDump[2] = zeroChar__;
trcDump[3] = zeroChar__;
// The 0's are already in the trace so bump the line counter up a row.
int lineCounter = 0x10;
// Make sure the character array has all blanks in it.
// Some of these blanks will be replaced later with values.
// The 0's were not wrote over.
for (int j = 4; j < 77; j++) {
trcDump[j] = spaceChar__;
}
// i will maintain the position in the byte array to be traced.
int i = 0;
do {
// Get the unsigned value of the byte.
// int num = b[off++] & 0xff;
int num = (buff[offset] < 0) ? buff[offset] + 256 : buff[offset];
offset++;
i++;
// Place the characters representing the bytes in the array.
trcDump[bCounter++] = hexDigit__[((num >>> 4) & 0xf)];
trcDump[bCounter++] = hexDigit__[(num & 0xf)];
// Place the ascii and ebcdc representations in the array.
trcDump[aCounter++] = asciiChar__[num];
trcDump[eCounter++] = ebcdicChar__[num];
if (((i % 8) == 0)) {
if (((i % 16) == 0)) {
// Print the array each time 16 bytes are processed.
dncnetprintln(trcDump);
if (i != len) {
// Not yet at the end of the byte array.
if ((len - i) < 16) {
// This is the last line so blank it all out.
// This keeps the last line looking pretty in case
// < 16 bytes remain.
for (int j = 0; j < trcDump.length; j++) {
trcDump[j] = spaceChar__;
}
}
// Reset the counters.
bCounter = 0;
aCounter = 43;
eCounter = 61;
// Reset the lineCounter if it starts to get too large.
if (lineCounter == 0x100000) {
lineCounter = 0;
}
// Place the characters representing the line counter in the array.
trcDump[bCounter++] = hexDigit__[((lineCounter >>> 12) & 0xf)];
trcDump[bCounter++] = hexDigit__[((lineCounter >>> 8) & 0xf)];
trcDump[bCounter++] = hexDigit__[((lineCounter >>> 4) & 0xf)];
trcDump[bCounter++] = hexDigit__[(lineCounter & 0xf)];
bCounter += 3;
// Bump up the line counter.
lineCounter += 0x10;
}
} else {
// 8 bytes were processed so move the counter to adjust for
// spaces between the columns of bytes.
bCounter += 2;
}
}
// do this until we all the data has been traced.
} while (i < len);
// print the last line and add some blank lines to make it easier to read.
if (len % 16 != 0) {
dncnetprintln(trcDump);
}
}
}
dncnetprintln("");
}
}
// Gets the int value of the two byte unsigned codepoint.
private static int getCodePoint(byte[] buff, int offset) {
return ((buff[offset++] & 0xff) << 8) +
((buff[offset] & 0xff) << 0);
}
private static String getHeader(int type) {
switch (type) {
case TYPE_TRACE_SEND:
return sendHeader__;
case TYPE_TRACE_RECEIVE:
return receiveHeader__;
default:
return null;
}
}
private static int getStartPosition(int type) {
switch (type) {
case TYPE_TRACE_SEND:
return 20; // This is right after 'SEND BUFFER: '.
case TYPE_TRACE_RECEIVE:
return 23; // This is right after 'RECEIVE BUFFER: '.
default:
return 0;
}
}
private void printHeaderWithCodePointName(String codePointName, int type) {
// Create a char array so some of the characters
// can be replaced with the name of the codepoint.
char headerArray[] = getHeader(type).toCharArray();
// At most, 16 character name will be used. This is so
// the headers on top of the ascii and ebcdic rows aren't shifted.
int replaceLen = (codePointName.length() < 17) ? codePointName.length() : 16;
int offset = getStartPosition(type);
for (int i = 0; i < replaceLen; i++) {
headerArray[offset++] = codePointName.charAt(i);
}
dncnetprintln(headerArray);
}
private void dncnetprint(String s) {
synchronized (printWriter_) {
printWriter_.print("[derby] " + s);
printWriter_.flush();
}
}
private void dncnetprintln(String s) {
synchronized (printWriter_) {
printWriter_.println("[derby] " + s);
printWriter_.flush();
}
}
private void dncnetprintln(char[] s) {
synchronized (printWriter_) {
printWriter_.print("[derby] ");
printWriter_.println(s);
printWriter_.flush();
}
}
}
| |
package com.intirix.openmm.server.mt.app;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.intirix.openmm.server.api.beans.Episode;
import com.intirix.openmm.server.api.beans.Season;
import com.intirix.openmm.server.api.beans.Show;
import com.intirix.openmm.server.mt.OpenMMMidtierException;
import com.intirix.openmm.server.mt.technical.ShowMidtier;
import com.intirix.openmm.server.mt.technical.tvdb.TVDBMidtier;
import com.intirix.openmm.server.mt.technical.tvdb.TVDBSeasonBean;
import com.omertron.thetvdbapi.model.Series;
public class TVDBAppImpl implements TVDBApp
{
private SearchApp searchApp;
private ShowMidtier showMidtier;
private TVDBMidtier tvdbMidtier;
/**
* Logger
*/
private final Log log = LogFactory.getLog( TVDBAppImpl.class );
public void setShowMidtier( ShowMidtier showMidtier )
{
this.showMidtier = showMidtier;
}
public void setTVDBMidtier( TVDBMidtier tvdbMidtier )
{
this.tvdbMidtier = tvdbMidtier;
}
public void setSearchApp( SearchApp searchApp )
{
this.searchApp = searchApp;
}
public int importShow( String id ) throws OpenMMMidtierException
{
// first check if we already imported the show
// if we did, then update instead of import
for ( final Show show: showMidtier.listShows() )
{
if ( id.equals( show.getTvdbId() ) )
{
updateShow( show.getId() );
return show.getId();
}
}
final Series series = tvdbMidtier.getShowDetails( id );
final Show show = new Show();
show.setActive( !"Ended".equalsIgnoreCase( series.getStatus() ) );
show.setBannerPath( series.getBanner() );
show.setContentRating( series.getContentRating() );
show.setDescription( series.getOverview() );
show.setDisplayName( series.getSeriesName() );
show.setImdbId( series.getImdbId() );
show.setName( show.getDisplayName().replaceFirst( "^The ", "" ) );
show.setTvdbId( series.getId() );
show.setTvdbLang( series.getLanguage() );
show.setZap2itId( series.getZap2ItId() );
log.debug( "Adding show " + show.getName() );
int showId = showMidtier.addShow( show );
for ( final TVDBSeasonBean seasonBean: tvdbMidtier.listShowSeasons( id ) )
{
addSeason( showId, id, seasonBean );
}
searchApp.reindex();
return showId;
}
public void updateShow( int showId ) throws OpenMMMidtierException
{
try
{
final Show show1 = showMidtier.getShow( showId );
String tvdbId = show1.getTvdbId();
final Series series = tvdbMidtier.getShowDetails( tvdbId );
final Show show = (Show)show1.clone();
show.setActive( !"Ended".equalsIgnoreCase( series.getStatus() ) );
show.setBannerPath( series.getBanner() );
log.debug( "Updating show " + show.getName() );
showMidtier.updateShow( show1, show );
final List< Season > seasons = showMidtier.listSeasons( showId );
// convert the list into a map
final Map< Integer, Season > seasonMap = new HashMap< Integer, Season >( 16 );
for ( final Season season: seasons )
{
seasonMap.put( season.getNumber(), season );
}
// iterate over the seasons
for ( final TVDBSeasonBean seasonBean: tvdbMidtier.listShowSeasons( tvdbId ) )
{
// check if the season already exists
if ( seasonMap.containsKey( seasonBean.getSeasonNumber() ) )
{
final Season season = seasonMap.get( seasonBean.getSeasonNumber() );
// get all the episodes from the database
final List< Episode > episodes = showMidtier.listEpisodes( season.getNumber() );
// convert the list into a map
final Map< Integer, Episode > episodeMap = new HashMap< Integer, Episode >( 32 );
for ( final Episode episode: episodes )
{
episodeMap.put( episode.getEpNum(), episode );
}
// iterate over the episodes from TVDB
for ( final com.omertron.thetvdbapi.model.Episode episodeBean: tvdbMidtier.listSeasonEpisodes( tvdbId, season.getNumber() ) )
{
if ( episodeMap.containsKey( episodeBean.getEpisodeNumber() ) )
{
// update some of the attributes if the episode already exists
final Episode oldBean = episodeMap.get( episodeBean.getEpisodeNumber() );
final Episode newBean = (Episode)oldBean.clone();
newBean.setDescription( episodeBean.getOverview() );
newBean.setGuests( getGuestList( episodeBean ) );
newBean.setRating( episodeBean.getRating() );
newBean.setScreenshotPath( episodeBean.getFilename() );
log.debug( "Updating episode " + season.getNumber() + 'x' + oldBean.getEpNum() );
showMidtier.updateEpisode( oldBean, newBean );
}
else
{
// add the season if it doesn't already exist
addEpisode( season.getId(), season.getNumber(), episodeBean );
}
}
}
else
{
// add the season if it didn't exist
addSeason( showId, tvdbId, seasonBean );
}
}
searchApp.reindex();
}
catch ( CloneNotSupportedException e )
{
throw new OpenMMMidtierException( e );
}
}
/**
* Add a season
* @param showId
* @param tvdbId
* @param seasonBean
* @throws OpenMMMidtierException
*/
private void addSeason( int showId, String tvdbId, final TVDBSeasonBean seasonBean ) throws OpenMMMidtierException
{
final Season season = new Season();
season.setShowId( showId );
season.setNumber( seasonBean.getSeasonNumber() );
if ( seasonBean.getSeasonNumber() < 1 )
{
season.setName( "Specials" );
}
else
{
season.setName( "Season " + seasonBean.getSeasonNumber() );
}
log.debug( "Adding season " + season.getName() );
final int seasonId = showMidtier.addSeason( season );
final int seasonNumber = season.getNumber();
for ( final com.omertron.thetvdbapi.model.Episode episodeBean: tvdbMidtier.listSeasonEpisodes( tvdbId, seasonBean.getSeasonNumber() ) )
{
addEpisode( seasonId, seasonNumber, episodeBean );
}
}
/**
* Add an episode
* @param seasonId
* @param seasonNumber
* @param episodeBean
* @throws OpenMMMidtierException
*/
private void addEpisode( final int seasonId, final int seasonNumber, final com.omertron.thetvdbapi.model.Episode episodeBean ) throws OpenMMMidtierException
{
final Episode episode = new Episode();
episode.setAirDate( episodeBean.getFirstAired() );
episode.setDescription( episodeBean.getOverview() );
episode.setGuests( getGuestList( episodeBean ) );
if ( episodeBean.getDvdEpisodeNumber() == null )
{
episode.setDvdNum( episodeBean.getEpisodeNumber() );
}
else
{
try
{
episode.setDvdNum( Integer.parseInt( episodeBean.getDvdEpisodeNumber().replaceFirst( "\\..*", "" ) ) );
}
catch ( NumberFormatException e )
{
episode.setDvdNum( episodeBean.getEpisodeNumber() );
}
}
episode.setEpNum( episodeBean.getEpisodeNumber() );
episode.setName( episodeBean.getEpisodeName() );
episode.setRating( episodeBean.getRating() );
episode.setScreenshotPath( episodeBean.getFilename() );
episode.setSeasonId( seasonId );
episode.setTvdbId( episodeBean.getId() );
log.debug( "Adding episode " + seasonNumber + 'x' + episode.getEpNum() + " - " + episode.getName() );
showMidtier.addEpisode( episode );
}
public List< Show > listShowsThatCanBeUpdated() throws OpenMMMidtierException
{
final List< Show > ret = new ArrayList< Show >( 10 );
final List< Show > shows = showMidtier.listShows();
for ( final Show show: shows )
{
if ( show.getActive() && show.getTvdbId().length() > 0 )
{
ret.add( show );
}
}
return ret;
}
public void updateShows() throws OpenMMMidtierException
{
for ( final Show show: listShowsThatCanBeUpdated() )
{
updateShow( show.getId() );
}
}
private String getGuestList( com.omertron.thetvdbapi.model.Episode episodeBean )
{
final StringBuilder guestBuffer = new StringBuilder( 1024 );
if ( episodeBean.getGuestStars() != null )
{
for ( final String guest: episodeBean.getGuestStars() )
{
if ( guestBuffer.length() > 0 )
{
guestBuffer.append( ", " );
}
guestBuffer.append( guest );
}
}
final String guests = guestBuffer.toString();
return guests;
}
}
| |
/*
Copyright 2013 Red Hat, Inc. and/or its affiliates.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package org.jbpm.bpmn2;
import static org.kie.api.runtime.EnvironmentName.ENTITY_MANAGER_FACTORY;
import static org.kie.api.runtime.EnvironmentName.OBJECT_MARSHALLING_STRATEGIES;
import static org.kie.api.runtime.EnvironmentName.TRANSACTION_MANAGER;
import static org.kie.api.runtime.EnvironmentName.USE_PESSIMISTIC_LOCKING;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.lang.reflect.Method;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import javax.transaction.Status;
import javax.transaction.Transaction;
import org.drools.compiler.builder.impl.KnowledgeBuilderConfigurationImpl;
import org.drools.core.SessionConfiguration;
import org.drools.core.audit.WorkingMemoryInMemoryLogger;
import org.drools.core.audit.event.LogEvent;
import org.drools.core.audit.event.RuleFlowLogEvent;
import org.drools.core.audit.event.RuleFlowNodeLogEvent;
import org.drools.core.impl.EnvironmentFactory;
import org.drools.core.util.DroolsStreamUtils;
import org.drools.core.util.MVELSafeHelper;
import org.h2.tools.DeleteDbFiles;
import org.h2.tools.Server;
import org.jbpm.bpmn2.test.RequireLocking;
import org.jbpm.bpmn2.test.RequirePersistence;
import org.jbpm.bpmn2.xml.BPMNDISemanticModule;
import org.jbpm.bpmn2.xml.BPMNExtensionsSemanticModule;
import org.jbpm.bpmn2.xml.BPMNSemanticModule;
import org.jbpm.bpmn2.xml.XmlBPMNProcessDumper;
import org.jbpm.compiler.xml.XmlProcessReader;
import org.jbpm.marshalling.impl.ProcessInstanceResolverStrategy;
import org.jbpm.persistence.util.PersistenceUtil;
import org.jbpm.process.audit.AuditLogService;
import org.jbpm.process.audit.AuditLoggerFactory;
import org.jbpm.process.audit.AuditLoggerFactory.Type;
import org.jbpm.process.audit.JPAAuditLogService;
import org.jbpm.process.audit.NodeInstanceLog;
import org.jbpm.process.audit.ProcessInstanceLog;
import org.jbpm.process.audit.VariableInstanceLog;
import org.jbpm.process.instance.event.DefaultSignalManagerFactory;
import org.jbpm.process.instance.impl.DefaultProcessInstanceManagerFactory;
import org.jbpm.ruleflow.core.RuleFlowProcess;
import org.jbpm.test.util.AbstractBaseTest;
import org.jbpm.workflow.instance.impl.WorkflowProcessInstanceImpl;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieRepository;
import org.kie.api.builder.Message.Level;
import org.kie.api.definition.KiePackage;
import org.kie.api.definition.process.Node;
import org.kie.api.definition.process.Process;
import org.kie.api.io.Resource;
import org.kie.api.marshalling.ObjectMarshallingStrategy;
import org.kie.api.runtime.Environment;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.KieSessionConfiguration;
import org.kie.api.runtime.process.NodeInstance;
import org.kie.api.runtime.process.NodeInstanceContainer;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.process.WorkflowProcessInstance;
import org.kie.internal.KnowledgeBaseFactory;
import org.kie.internal.builder.KnowledgeBuilderConfiguration;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.persistence.jpa.JPAKnowledgeService;
import org.kie.internal.runtime.StatefulKnowledgeSession;
import org.kie.internal.runtime.conf.ForceEagerActivationOption;
import org.mvel2.MVEL;
import org.mvel2.ParserContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import bitronix.tm.TransactionManagerServices;
import bitronix.tm.resource.jdbc.PoolingDataSource;
/**
* Base test case for the jbpm-bpmn2 module.
*/
public abstract class JbpmBpmn2TestCase extends AbstractBaseTest {
private static final Logger log = LoggerFactory.getLogger(JbpmBpmn2TestCase.class);
public static String[] txStateName = { "ACTIVE", "MARKED_ROLLBACK",
"PREPARED", "COMMITTED", "ROLLEDBACK", "UNKNOWN", "NO_TRANSACTION",
"PREPARING", "COMMITTING", "ROLLING_BACK" };
public static final boolean PERSISTENCE = Boolean.valueOf(System.getProperty("org.jbpm.test.persistence", "true"));
public static final boolean LOCKING = Boolean.valueOf(System.getProperty("org.jbpm.test.locking", "false"));
private static boolean setupDataSource = false;
private boolean sessionPersistence = false;
private boolean pessimisticLocking = false;
private static H2Server server = new H2Server();
private WorkingMemoryInMemoryLogger logger;
protected AuditLogService logService;
protected static EntityManagerFactory emf;
private static PoolingDataSource ds;
private RequireLocking testReqLocking;
private RequirePersistence testReqPersistence;
@Rule
public TestRule watcher = new TestWatcher() {
protected void starting(Description description) {
log.info(" >>> {} <<<", description.getMethodName());
try {
String methodName = description.getMethodName();
int i = methodName.indexOf("[");
if (i > 0) {
methodName = methodName.substring(0, i);
}
Method method = description.getTestClass().getMethod(methodName);
testReqPersistence = method.getAnnotation(RequirePersistence.class);
testReqLocking = method.getAnnotation(RequireLocking.class);
} catch (Exception ex) {
// ignore
}
};
protected void finished(Description description) {
log.info("Finished {}", description);
};
};
public JbpmBpmn2TestCase() {
this(PERSISTENCE, LOCKING);
}
public JbpmBpmn2TestCase(boolean sessionPersistence) {
this(sessionPersistence, LOCKING);
}
public JbpmBpmn2TestCase(boolean sessionPersistance, boolean locking) {
System.setProperty("jbpm.user.group.mapping", "classpath:/usergroups.properties");
System.setProperty("jbpm.usergroup.callback", "org.jbpm.task.identity.DefaultUserGroupCallbackImpl");
this.sessionPersistence = sessionPersistance;
this.pessimisticLocking = locking;
}
public static PoolingDataSource setupPoolingDataSource() {
Properties dsProps = PersistenceUtil.getDatasourceProperties();
String jdbcUrl = dsProps.getProperty("url");
String driverClass = dsProps.getProperty("driverClassName");
// Setup the datasource
PoolingDataSource ds1 = PersistenceUtil.setupPoolingDataSource(dsProps, "jdbc/testDS1", false);
if( driverClass.startsWith("org.h2") ) {
ds1.getDriverProperties().setProperty("url", jdbcUrl);
}
ds1.init();
return ds1;
}
public void setPersistence(boolean sessionPersistence) {
this.sessionPersistence = sessionPersistence;
}
public boolean isPersistence() {
return sessionPersistence;
}
public void setEntityManagerFactory(EntityManagerFactory emf) {
JbpmBpmn2TestCase.emf = emf;
}
public void setPoolingDataSource(PoolingDataSource ds) {
JbpmBpmn2TestCase.ds = ds;
}
/**
* Can be called manually in method annotated with @BeforeClass.
*
* @throws Exception
*/
public static void setUpDataSource() throws Exception {
setupDataSource = true;
server.start();
ds = setupPoolingDataSource();
emf = Persistence.createEntityManagerFactory("org.jbpm.persistence.jpa");
}
@Before
public void checkTest() {
if (testReqPersistence != null && testReqPersistence.value() != sessionPersistence) {
log.info("Skipped - test is run only {} persistence", (testReqPersistence.value() ? "with" : "without"));
String comment = testReqPersistence.comment();
if( comment.length() > 0 ) {
log.info(comment);
}
Assume.assumeTrue(false);
}
if (testReqLocking != null && testReqLocking.value() != pessimisticLocking) {
log.info("Skipped - test is run only {} pessimistic locking", (testReqLocking.value() ? "with" : "without"));
String comment = testReqPersistence.comment();
if( comment.length() > 0 ) {
log.info(comment);
}
Assume.assumeTrue(false);
}
}
@After
public void clear() {
clearHistory();
}
@AfterClass
public static void tearDownClass() throws Exception {
if (setupDataSource) {
String runningTransactionStatus = null;
// Clean up possible transactions
Transaction tx = TransactionManagerServices.getTransactionManager()
.getCurrentTransaction();
if (tx != null) {
int testTxState = tx.getStatus();
if (testTxState != Status.STATUS_NO_TRANSACTION
&& testTxState != Status.STATUS_ROLLEDBACK
&& testTxState != Status.STATUS_COMMITTED) {
try {
tx.rollback();
} catch (Throwable t) {
// do nothing..
}
runningTransactionStatus = txStateName[testTxState];
}
}
if (emf != null) {
try {
emf.close();
} catch (Exception ex) {
// ignore
}
emf = null;
}
// If everything is closed, close data source and stop server.
if (ds != null) {
try {
ds.close();
} catch (Exception ex) {
// ignore
}
ds = null;
}
server.stop();
DeleteDbFiles.execute("~", "jbpm-db", true);
if (runningTransactionStatus != null) {
Assert.fail("Transaction had status "
+ runningTransactionStatus
+ " at the end of the test.");
}
}
}
protected KieBase createKnowledgeBase(String... process) throws Exception {
List<Resource> resources = new ArrayList<Resource>();
for (int i = 0; i < process.length; ++i) {
resources.addAll(buildAndDumpBPMN2Process(process[i]));
}
return createKnowledgeBaseFromResources(resources.toArray(new Resource[resources.size()]));
}
protected KieBase createKnowledgeBaseWithoutDumper(String... process) throws Exception {
Resource[] resources = new Resource[process.length];
for (int i = 0; i < process.length; ++i) {
String p = process[i];
resources[i] = (ResourceFactory.newClassPathResource(p));
}
return createKnowledgeBaseFromResources(resources);
}
// Important to test this since persistence relies on this
protected List<Resource> buildAndDumpBPMN2Process(String process) throws SAXException, IOException {
KnowledgeBuilderConfiguration conf = KnowledgeBuilderFactory.newKnowledgeBuilderConfiguration();
((KnowledgeBuilderConfigurationImpl) conf).initSemanticModules();
((KnowledgeBuilderConfigurationImpl) conf).addSemanticModule(new BPMNSemanticModule());
((KnowledgeBuilderConfigurationImpl) conf).addSemanticModule(new BPMNDISemanticModule());
((KnowledgeBuilderConfigurationImpl) conf).addSemanticModule(new BPMNExtensionsSemanticModule());
Resource classpathResource = ResourceFactory.newClassPathResource(process);
// Dump and reread
XmlProcessReader processReader
= new XmlProcessReader(((KnowledgeBuilderConfigurationImpl) conf).getSemanticModules(), getClass().getClassLoader());
List<Process> processes = processReader.read(this.getClass().getResourceAsStream("/" + process));
List<Resource> resources = new ArrayList<Resource>();
for (Process p : processes) {
RuleFlowProcess ruleFlowProcess = (RuleFlowProcess) p;
String dumpedString = XmlBPMNProcessDumper.INSTANCE.dump(ruleFlowProcess);
Resource resource = ResourceFactory.newReaderResource(new StringReader(dumpedString));
resource.setSourcePath(classpathResource.getSourcePath());
resource.setTargetPath(classpathResource.getTargetPath());
resources.add(resource);
}
return resources;
}
protected KieBase createKnowledgeBaseFromResources(Resource... process)
throws Exception {
KieServices ks = KieServices.Factory.get();
KieRepository kr = ks.getRepository();
if (process.length > 0) {
KieFileSystem kfs = ks.newKieFileSystem();
for (Resource p : process) {
kfs.write(p);
}
KieBuilder kb = ks.newKieBuilder(kfs);
kb.buildAll(); // kieModule is automatically deployed to KieRepository
// if successfully built.
if (kb.getResults().hasMessages(Level.ERROR)) {
throw new RuntimeException("Build Errors:\n"
+ kb.getResults().toString());
}
}
KieContainer kContainer = ks.newKieContainer(kr.getDefaultReleaseId());
return kContainer.getKieBase();
}
protected KieBase createKnowledgeBaseFromDisc(String process) throws Exception {
KieServices ks = KieServices.Factory.get();
KieRepository kr = ks.getRepository();
KieFileSystem kfs = ks.newKieFileSystem();
Resource res = ResourceFactory.newClassPathResource(process);
kfs.write(res);
KieBuilder kb = ks.newKieBuilder(kfs);
kb.buildAll(); // kieModule is automatically deployed to KieRepository
// if successfully built.
if (kb.getResults().hasMessages(Level.ERROR)) {
throw new RuntimeException("Build Errors:\n"
+ kb.getResults().toString());
}
KieContainer kContainer = ks.newKieContainer(kr.getDefaultReleaseId());
KieBase kbase = kContainer.getKieBase();
File packageFile = null;
for (KiePackage pkg : kbase.getKiePackages() ) {
packageFile = new File(System.getProperty("java.io.tmpdir") + File.separator + pkg.getName()+".pkg");
packageFile.deleteOnExit();
FileOutputStream out = new FileOutputStream(packageFile);
try {
DroolsStreamUtils.streamOut(out, pkg);
} finally {
out.close();
}
// store first package only
break;
}
kfs.delete(res.getSourcePath());
kfs.write(ResourceFactory.newFileResource(packageFile));
kb = ks.newKieBuilder(kfs);
kb.buildAll(); // kieModule is automatically deployed to KieRepository
// if successfully built.
if (kb.getResults().hasMessages(Level.ERROR)) {
throw new RuntimeException("Build Errors:\n"
+ kb.getResults().toString());
}
kContainer = ks.newKieContainer(kr.getDefaultReleaseId());
kbase = kContainer.getKieBase();
return kbase;
}
protected StatefulKnowledgeSession createKnowledgeSession(KieBase kbase)
throws Exception {
return createKnowledgeSession(kbase, null, null);
}
protected StatefulKnowledgeSession createKnowledgeSession(KieBase kbase,
Environment env) throws Exception {
return createKnowledgeSession(kbase, null, env);
}
protected StatefulKnowledgeSession createKnowledgeSession(KieBase kbase,
KieSessionConfiguration conf, Environment env) throws Exception {
StatefulKnowledgeSession result;
if (conf == null) {
conf = KnowledgeBaseFactory.newKnowledgeSessionConfiguration();
}
// Do NOT use the Pseudo clock yet..
// conf.setOption( ClockTypeOption.get( ClockType.PSEUDO_CLOCK.getId() )
// );
if (sessionPersistence) {
if (env == null) {
env = createEnvironment(emf);
}
if( pessimisticLocking ) {
env.set(USE_PESSIMISTIC_LOCKING, true);
}
conf.setOption(ForceEagerActivationOption.YES);
result = JPAKnowledgeService.newStatefulKnowledgeSession(kbase,
conf, env);
AuditLoggerFactory.newInstance(Type.JPA, result, null);
logService = new JPAAuditLogService(env);
} else {
if (env == null) {
env = EnvironmentFactory.newEnvironment();
}
Properties defaultProps = new Properties();
defaultProps.setProperty("drools.processSignalManagerFactory",
DefaultSignalManagerFactory.class.getName());
defaultProps.setProperty("drools.processInstanceManagerFactory",
DefaultProcessInstanceManagerFactory.class.getName());
conf = SessionConfiguration.newInstance(defaultProps);
conf.setOption(ForceEagerActivationOption.YES);
result = (StatefulKnowledgeSession) kbase.newKieSession(conf, env);
logger = new WorkingMemoryInMemoryLogger(result);
}
return result;
}
protected StatefulKnowledgeSession createKnowledgeSession(String... process)
throws Exception {
KieBase kbase = createKnowledgeBase(process);
return createKnowledgeSession(kbase);
}
protected KieSession restoreSession(KieSession ksession, boolean noCache) {
if (sessionPersistence) {
long id = ksession.getIdentifier();
KieBase kbase = ksession.getKieBase();
Environment env = null;
if (noCache) {
env = createEnvironment(emf);
} else {
env = ksession.getEnvironment();
}
if( pessimisticLocking ) {
env.set(USE_PESSIMISTIC_LOCKING, true);
}
KieSessionConfiguration config = ksession.getSessionConfiguration();
config.setOption(ForceEagerActivationOption.YES);
StatefulKnowledgeSession result = JPAKnowledgeService.loadStatefulKnowledgeSession(id, kbase, config, env);
AuditLoggerFactory.newInstance(Type.JPA, result, null);
ksession.dispose();
return result;
} else {
return ksession;
}
}
protected KieSession restoreSession(KieSession ksession) {
return ksession;
}
protected StatefulKnowledgeSession restoreSession(StatefulKnowledgeSession ksession) {
return ksession;
}
protected Environment createEnvironment(EntityManagerFactory emf) {
Environment env = EnvironmentFactory.newEnvironment();
env.set(ENTITY_MANAGER_FACTORY, emf);
env.set(TRANSACTION_MANAGER,
TransactionManagerServices.getTransactionManager());
if (sessionPersistence) {
ObjectMarshallingStrategy[] strategies = (ObjectMarshallingStrategy[]) env.get(OBJECT_MARSHALLING_STRATEGIES);
List<ObjectMarshallingStrategy> listStrategies =new ArrayList<ObjectMarshallingStrategy>(Arrays.asList(strategies));
listStrategies.add(0, new ProcessInstanceResolverStrategy());
strategies = new ObjectMarshallingStrategy[listStrategies.size()];
env.set(OBJECT_MARSHALLING_STRATEGIES, listStrategies.toArray(strategies));
}
return env;
}
public void assertProcessInstanceCompleted(ProcessInstance processInstance) {
assertTrue("Process instance has not been completed.", assertProcessInstanceState(ProcessInstance.STATE_COMPLETED, processInstance));
}
public void assertProcessInstanceAborted(ProcessInstance processInstance) {
assertTrue("Process instance has not been aborted.", assertProcessInstanceState(ProcessInstance.STATE_ABORTED, processInstance));
}
public void assertProcessInstanceActive(ProcessInstance processInstance) {
assertTrue("Process instance is not active.", assertProcessInstanceState(ProcessInstance.STATE_ACTIVE, processInstance)
|| assertProcessInstanceState(ProcessInstance.STATE_PENDING, processInstance));
}
public void assertProcessInstanceFinished(ProcessInstance processInstance,
KieSession ksession) {
assertNull("Process instance has not been finished.", ksession.getProcessInstance(processInstance.getId()));
}
public void assertNodeActive(long processInstanceId, KieSession ksession,
String... name) {
List<String> names = new ArrayList<String>();
for (String n : name) {
names.add(n);
}
ProcessInstance processInstance = ksession
.getProcessInstance(processInstanceId);
if (processInstance instanceof WorkflowProcessInstance) {
if (sessionPersistence) {
List<? extends NodeInstanceLog> logs = logService.findNodeInstances(processInstanceId); // ENTER -> EXIT is correctly ordered
if (logs != null) {
List<String> activeNodes = new ArrayList<String>();
for (NodeInstanceLog l : logs) {
String nodeName = l.getNodeName();
if (l.getType() == NodeInstanceLog.TYPE_ENTER && names.contains(nodeName)) {
activeNodes.add(nodeName);
}
if (l.getType() == NodeInstanceLog.TYPE_EXIT && names.contains(nodeName)) {
activeNodes.remove(nodeName);
}
}
names.removeAll(activeNodes);
}
} else {
assertNodeActive((WorkflowProcessInstance) processInstance, names);
}
}
if (!names.isEmpty()) {
String s = names.get(0);
for (int i = 1; i < names.size(); i++) {
s += ", " + names.get(i);
}
fail("Node(s) not active: " + s);
}
}
private void assertNodeActive(NodeInstanceContainer container,
List<String> names) {
for (NodeInstance nodeInstance : container.getNodeInstances()) {
String nodeName = nodeInstance.getNodeName();
if (names.contains(nodeName)) {
names.remove(nodeName);
}
if (nodeInstance instanceof NodeInstanceContainer) {
assertNodeActive((NodeInstanceContainer) nodeInstance, names);
}
}
}
public void assertNodeTriggered(long processInstanceId, String... nodeNames) {
List<String> names = getNotTriggeredNodes(processInstanceId, nodeNames);
if (!names.isEmpty()) {
String s = names.get(0);
for(int i = 1; i < names.size(); i++) {
s += ", " + names.get(i);
}
fail("Node(s) not executed: " + s);
}
}
public void assertNotNodeTriggered(long processInstanceId,
String... nodeNames) {
List<String> names = getNotTriggeredNodes(processInstanceId, nodeNames);
assertTrue(Arrays.equals(names.toArray(), nodeNames));
}
public int getNumberOfNodeTriggered(long processInstanceId,
String node) {
int counter = 0;
if (sessionPersistence) {
List<NodeInstanceLog> logs = logService.findNodeInstances(processInstanceId);
if (logs != null) {
for (NodeInstanceLog l : logs) {
String nodeName = l.getNodeName();
if ((l.getType() == NodeInstanceLog.TYPE_ENTER
|| l.getType() == NodeInstanceLog.TYPE_EXIT)
&& node.equals(nodeName)) {
counter++;
}
}
}
} else {
for (LogEvent event : logger.getLogEvents()) {
if (event instanceof RuleFlowNodeLogEvent) {
String nodeName = ((RuleFlowNodeLogEvent) event).getNodeName();
if (node.equals(nodeName)) {
counter++;
}
}
}
}
return counter;
}
public int getNumberOfProcessInstances(String processId) {
int counter = 0;
if (sessionPersistence) {
List<ProcessInstanceLog> logs = logService.findProcessInstances(processId);
if (logs != null) {
return logs.size();
}
} else {
LogEvent [] events = logger.getLogEvents().toArray(new LogEvent[0]);
for (LogEvent event : events ) {
if (event.getType() == LogEvent.BEFORE_RULEFLOW_CREATED) {
if(((RuleFlowLogEvent) event).getProcessId().equals(processId)) {
counter++;
}
}
}
}
return counter;
}
protected boolean assertProcessInstanceState(int state, ProcessInstance processInstance) {
if (sessionPersistence) {
ProcessInstanceLog log = logService.findProcessInstance(processInstance.getId());
if (log != null) {
return log.getStatus() == state;
}
} else {
return processInstance.getState() == state;
}
return false;
}
private List<String> getNotTriggeredNodes(long processInstanceId,
String... nodeNames) {
List<String> names = new ArrayList<String>();
for (String nodeName : nodeNames) {
names.add(nodeName);
}
if (sessionPersistence) {
List<NodeInstanceLog> logs = logService
.findNodeInstances(processInstanceId);
if (logs != null) {
for (NodeInstanceLog l : logs) {
String nodeName = l.getNodeName();
if ((l.getType() == NodeInstanceLog.TYPE_ENTER || l
.getType() == NodeInstanceLog.TYPE_EXIT)
&& names.contains(nodeName)) {
names.remove(nodeName);
}
}
}
} else {
for (LogEvent event : logger.getLogEvents()) {
if (event instanceof RuleFlowNodeLogEvent) {
String nodeName = ((RuleFlowNodeLogEvent) event)
.getNodeName();
if (names.contains(nodeName)) {
names.remove(nodeName);
}
}
}
}
return names;
}
protected List<String> getCompletedNodes(long processInstanceId) {
List<String> names = new ArrayList<String>();
if (sessionPersistence) {
AuditLogService auditLogService = new JPAAuditLogService(emf);
List<NodeInstanceLog> logs = auditLogService.findNodeInstances(processInstanceId);
if (logs != null) {
for (NodeInstanceLog l : logs) {
names.add(l.getNodeId());
}
}
} else {
for (LogEvent event : logger.getLogEvents()) {
if (event instanceof RuleFlowNodeLogEvent) {
if( event.getType() == 27 ) {
names.add(((RuleFlowNodeLogEvent) event).getNodeId());
}
}
}
}
return names;
}
protected void clearHistory() {
if (sessionPersistence) {
try {
logService.clear();
} catch(Exception e) {
}
} else {
if (logger != null) {
logger.clear();
}
}
}
public void assertProcessVarExists(ProcessInstance process,
String... processVarNames) {
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
List<String> names = new ArrayList<String>();
for (String nodeName : processVarNames) {
names.add(nodeName);
}
for (String pvar : instance.getVariables().keySet()) {
if (names.contains(pvar)) {
names.remove(pvar);
}
}
if (!names.isEmpty()) {
String s = names.get(0);
for (int i = 1; i < names.size(); i++) {
s += ", " + names.get(i);
}
fail("Process Variable(s) do not exist: " + s);
}
}
public String getProcessVarValue(ProcessInstance processInstance, String varName) {
String actualValue = null;
if (sessionPersistence) {
List<VariableInstanceLog> log = logService.findVariableInstances(processInstance.getId(), varName);
if (log != null && !log.isEmpty()) {
actualValue = log.get(log.size()-1).getValue();
}
} else {
Object value = ((WorkflowProcessInstanceImpl) processInstance).getVariable(varName);
if (value != null) {
actualValue = value.toString();
}
}
return actualValue;
}
public void assertProcessVarValue(ProcessInstance processInstance, String varName, Object varValue) {
String actualValue = getProcessVarValue(processInstance, varName);
assertEquals("Variable " + varName + " value misatch!", varValue, actualValue );
}
public void assertNodeExists(ProcessInstance process, String... nodeNames) {
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
List<String> names = new ArrayList<String>();
for (String nodeName : nodeNames) {
names.add(nodeName);
}
for (Node node : instance.getNodeContainer().getNodes()) {
if (names.contains(node.getName())) {
names.remove(node.getName());
}
}
if (!names.isEmpty()) {
String s = names.get(0);
for (int i = 1; i < names.size(); i++) {
s += ", " + names.get(i);
}
fail("Node(s) do not exist: " + s);
}
}
public void assertNumOfIncommingConnections(ProcessInstance process,
String nodeName, int num) {
assertNodeExists(process, nodeName);
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
for (Node node : instance.getNodeContainer().getNodes()) {
if (node.getName().equals(nodeName)) {
if (node.getIncomingConnections().size() != num) {
fail("Expected incomming connections: " + num + " - found "
+ node.getIncomingConnections().size());
} else {
break;
}
}
}
}
public void assertNumOfOutgoingConnections(ProcessInstance process,
String nodeName, int num) {
assertNodeExists(process, nodeName);
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
for (Node node : instance.getNodeContainer().getNodes()) {
if (node.getName().equals(nodeName)) {
if (node.getOutgoingConnections().size() != num) {
fail("Expected outgoing connections: " + num + " - found "
+ node.getOutgoingConnections().size());
} else {
break;
}
}
}
}
public void assertVersionEquals(ProcessInstance process, String version) {
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
if (!instance.getWorkflowProcess().getVersion().equals(version)) {
fail("Expected version: " + version + " - found "
+ instance.getWorkflowProcess().getVersion());
}
}
public void assertProcessNameEquals(ProcessInstance process, String name) {
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
if (!instance.getWorkflowProcess().getName().equals(name)) {
fail("Expected name: " + name + " - found "
+ instance.getWorkflowProcess().getName());
}
}
public void assertPackageNameEquals(ProcessInstance process,
String packageName) {
WorkflowProcessInstanceImpl instance = (WorkflowProcessInstanceImpl) process;
if (!instance.getWorkflowProcess().getPackageName().equals(packageName)) {
fail("Expected package name: " + packageName + " - found "
+ instance.getWorkflowProcess().getPackageName());
}
}
public Object eval(Reader reader, Map vars) {
try {
return eval(toString(reader), vars);
} catch (IOException e) {
throw new RuntimeException("Exception Thrown", e);
}
}
private String toString(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder(1024);
int charValue;
while ((charValue = reader.read()) != -1) {
sb.append((char) charValue);
}
return sb.toString();
}
public Object eval(String str, Map vars) {
ParserContext context = new ParserContext();
context.addPackageImport("org.jbpm.task");
context.addPackageImport("org.jbpm.task.service");
context.addPackageImport("org.jbpm.task.query");
context.addPackageImport("java.util");
vars.put("now", new Date());
return MVELSafeHelper.getEvaluator().executeExpression(MVEL.compileExpression(str, context),
vars);
}
protected void assertProcessInstanceCompleted(long processInstanceId, KieSession ksession) {
ProcessInstance processInstance = ksession.getProcessInstance(processInstanceId);
assertNull("Process instance has not completed.", processInstance);
}
protected void assertProcessInstanceAborted(long processInstanceId, KieSession ksession) {
assertNull(ksession.getProcessInstance(processInstanceId));
}
protected void assertProcessInstanceActive(long processInstanceId, KieSession ksession) {
assertNotNull(ksession.getProcessInstance(processInstanceId));
}
private static class H2Server {
private Server server;
public synchronized void start() {
if (server == null || !server.isRunning(false)) {
try {
DeleteDbFiles.execute("~", "jbpm-db", true);
server = Server.createTcpServer(new String[0]);
server.start();
} catch (SQLException e) {
throw new RuntimeException(
"Cannot start h2 server database", e);
}
}
}
public synchronized void finalize() throws Throwable {
stop();
super.finalize();
}
public void stop() {
if (server != null) {
server.stop();
server.shutdown();
DeleteDbFiles.execute("~", "jbpm-db", true);
server = null;
}
}
}
}
| |
/*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.beans;
import java.util.ArrayList;
import java.util.Collections;
import java.util.EventListener;
import java.util.EventListenerProxy;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* This is an abstract class that provides base functionality
* for the {@link PropertyChangeSupport PropertyChangeSupport} class
* and the {@link VetoableChangeSupport VetoableChangeSupport} class.
*
* @see PropertyChangeListenerMap
* @see VetoableChangeListenerMap
*
* @author Sergey A. Malenkov
*/
abstract class ChangeListenerMap<L extends EventListener> {
private Map<String, L[]> map;
/**
* Creates an array of listeners.
* This method can be optimized by using
* the same instance of the empty array
* when {@code length} is equal to {@code 0}.
*
* @param length the array length
* @return an array with specified length
*/
protected abstract L[] newArray(int length);
/**
* Creates a proxy listener for the specified property.
*
* @param name the name of the property to listen on
* @param listener the listener to process events
* @return a proxy listener
*/
protected abstract L newProxy(String name, L listener);
/**
* Adds a listener to the list of listeners for the specified property.
* This listener is called as many times as it was added.
*
* @param name the name of the property to listen on
* @param listener the listener to process events
*/
public final synchronized void add(String name, L listener) {
if (this.map == null) {
this.map = new HashMap<>();
}
L[] array = this.map.get(name);
int size = (array != null)
? array.length
: 0;
L[] clone = newArray(size + 1);
clone[size] = listener;
if (array != null) {
System.arraycopy(array, 0, clone, 0, size);
}
this.map.put(name, clone);
}
/**
* Removes a listener from the list of listeners for the specified property.
* If the listener was added more than once to the same event source,
* this listener will be notified one less time after being removed.
*
* @param name the name of the property to listen on
* @param listener the listener to process events
*/
public final synchronized void remove(String name, L listener) {
if (this.map != null) {
L[] array = this.map.get(name);
if (array != null) {
for (int i = 0; i < array.length; i++) {
if (listener.equals(array[i])) {
int size = array.length - 1;
if (size > 0) {
L[] clone = newArray(size);
System.arraycopy(array, 0, clone, 0, i);
System.arraycopy(array, i + 1, clone, i, size - i);
this.map.put(name, clone);
}
else {
this.map.remove(name);
if (this.map.isEmpty()) {
this.map = null;
}
}
break;
}
}
}
}
}
/**
* Returns the list of listeners for the specified property.
*
* @param name the name of the property
* @return the corresponding list of listeners
*/
public final synchronized L[] get(String name) {
return (this.map != null)
? this.map.get(name)
: null;
}
/**
* Sets new list of listeners for the specified property.
*
* @param name the name of the property
* @param listeners new list of listeners
*/
public final void set(String name, L[] listeners) {
if (listeners != null) {
if (this.map == null) {
this.map = new HashMap<>();
}
this.map.put(name, listeners);
}
else if (this.map != null) {
this.map.remove(name);
if (this.map.isEmpty()) {
this.map = null;
}
}
}
/**
* Returns all listeners in the map.
*
* @return an array of all listeners
*/
public final synchronized L[] getListeners() {
if (this.map == null) {
return newArray(0);
}
List<L> list = new ArrayList<>();
L[] listeners = this.map.get(null);
if (listeners != null) {
for (L listener : listeners) {
list.add(listener);
}
}
for (Entry<String, L[]> entry : this.map.entrySet()) {
String name = entry.getKey();
if (name != null) {
for (L listener : entry.getValue()) {
list.add(newProxy(name, listener));
}
}
}
return list.toArray(newArray(list.size()));
}
/**
* Returns listeners that have been associated with the named property.
*
* @param name the name of the property
* @return an array of listeners for the named property
*/
public final L[] getListeners(String name) {
if (name != null) {
L[] listeners = get(name);
if (listeners != null) {
return listeners.clone();
}
}
return newArray(0);
}
/**
* Indicates whether the map contains
* at least one listener to be notified.
*
* @param name the name of the property
* @return {@code true} if at least one listener exists or
* {@code false} otherwise
*/
public final synchronized boolean hasListeners(String name) {
if (this.map == null) {
return false;
}
L[] array = this.map.get(null);
return (array != null) || ((name != null) && (null != this.map.get(name)));
}
/**
* Returns a set of entries from the map.
* Each entry is a pair consisted of the property name
* and the corresponding list of listeners.
*
* @return a set of entries from the map
*/
public final Set<Entry<String, L[]>> getEntries() {
return (this.map != null)
? this.map.entrySet()
: Collections.<Entry<String, L[]>>emptySet();
}
/**
* Extracts a real listener from the proxy listener.
* It is necessary because default proxy class is not serializable.
*
* @return a real listener
*/
public abstract L extract(L listener);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.tools;
import io.hops.merge.HttpConfig2;
import io.hops.merge.SecurityUtil2;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.security.PrivilegedExceptionAction;
/**
* This class provides rudimentary checking of DFS volumes for errors and
* sub-optimal conditions.
* <p>The tool scans all files and directories, starting from an indicated
* root path. The following abnormal conditions are detected and handled:</p>
* <ul>
* <li>files with blocks that are completely missing from all datanodes.<br/>
* In this case the tool can perform one of the following actions:
* <ul>
* <li>none ({@link org.apache.hadoop.hdfs.server.namenode.NamenodeFsck#FIXING_NONE})</li>
* <li>move corrupted files to /lost+found directory on DFS
* ({@link org.apache.hadoop.hdfs.server.namenode.NamenodeFsck#FIXING_MOVE}).
* Remaining data blocks are saved as a
* block chains, representing longest consecutive series of valid blocks.</li>
* <li>delete corrupted files ({@link org.apache.hadoop.hdfs.server.namenode.NamenodeFsck#FIXING_DELETE})</li>
* </ul>
* </li>
* <li>detect files with under-replicated or over-replicated blocks</li>
* </ul>
* Additionally, the tool collects a detailed overall DFS statistics, and
* optionally can print detailed statistics on block locations and replication
* factors of each file.
* The tool also provides and option to filter open files during the scan.
*/
@InterfaceAudience.Private
public class DFSck extends Configured implements Tool {
static {
HdfsConfiguration.init();
}
private static final String USAGE =
"Usage: DFSck <path> " + "[-list-corruptfileblocks | " +
"[-move | -delete | -openforwrite] " +
"[-files [-blocks [-locations | -racks]]]]\n" +
"\t<path>\tstart checking from this path\n" +
"\t-move\tmove corrupted files to /lost+found\n" +
"\t-delete\tdelete corrupted files\n" +
"\t-files\tprint out files being checked\n" +
"\t-openforwrite\tprint out files opened for write\n" +
"\t-list-corruptfileblocks\tprint out list of missing " +
"blocks and files they belong to\n" +
"\t-blocks\tprint out block report\n" +
"\t-locations\tprint out locations for every block\n" +
"\t-racks\tprint out network topology for data-node locations\n" +
"\t\tBy default fsck ignores files opened for write, " +
"use -openforwrite to report such files. They are usually " +
" tagged CORRUPT or HEALTHY depending on their block " +
"allocation status";
private final UserGroupInformation ugi;
private final PrintStream out;
/**
* Filesystem checker.
*
* @param conf
* current Configuration
*/
public DFSck(Configuration conf) throws IOException {
this(conf, System.out);
}
public DFSck(Configuration conf, PrintStream out) throws IOException {
super(conf);
this.ugi = UserGroupInformation.getCurrentUser();
this.out = out;
}
/**
* Print fsck usage information
*/
static void printUsage(PrintStream out) {
out.println(USAGE + "\n");
ToolRunner.printGenericCommandUsage(out);
}
/**
* @param args
*/
@Override
public int run(final String[] args) throws IOException {
if (args.length == 0) {
printUsage(System.err);
return -1;
}
try {
return UserGroupInformation.getCurrentUser()
.doAs(new PrivilegedExceptionAction<Integer>() {
@Override
public Integer run() throws Exception {
return doWork(args);
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
}
/*
* To get the list, we need to call iteratively until the server says
* there is no more left.
*/
private Integer listCorruptFileBlocks(String dir, String baseUrl)
throws IOException {
int errCode = -1;
int numCorrupt = 0;
int cookie = 0;
final String noCorruptLine = "has no CORRUPT files";
final String noMoreCorruptLine = "has no more CORRUPT files";
final String cookiePrefix = "Cookie:";
boolean allDone = false;
while (!allDone) {
final StringBuffer url = new StringBuffer(baseUrl);
if (cookie > 0) {
url.append("&startblockafter=").append(String.valueOf(cookie));
}
URL path = new URL(url.toString());
URLConnection connection = SecurityUtil2.openSecureHttpConnection(path);
InputStream stream = connection.getInputStream();
BufferedReader input =
new BufferedReader(new InputStreamReader(stream, "UTF-8"));
try {
String line = null;
while ((line = input.readLine()) != null) {
if (line.startsWith(cookiePrefix)) {
try {
cookie = Integer.parseInt(line.split("\t")[1]);
} catch (Exception e) {
allDone = true;
break;
}
continue;
}
if ((line.endsWith(noCorruptLine)) ||
(line.endsWith(noMoreCorruptLine)) ||
(line.endsWith(NamenodeFsck.NONEXISTENT_STATUS))) {
allDone = true;
break;
}
if ((line.isEmpty()) || (line.startsWith("FSCK started by")) ||
(line.startsWith("The filesystem under path"))) {
continue;
}
numCorrupt++;
if (numCorrupt == 1) {
out.println(
"The list of corrupt files under path '" + dir + "' are:");
}
out.println(line);
}
} finally {
input.close();
}
}
out.println("The filesystem under path '" + dir + "' has " + numCorrupt +
" CORRUPT files");
if (numCorrupt == 0) {
errCode = 0;
}
return errCode;
}
/**
* Derive the namenode http address from the current file system,
* either default or as set by "-fs" in the generic options.
*
* @return Returns http address or null if failure.
* @throws IOException
* if we can't determine the active NN address
*/
private String getCurrentNamenodeAddress() throws IOException {
//String nnAddress = null;
Configuration conf = getConf();
//get the filesystem object to verify it is an HDFS system
FileSystem fs;
try {
fs = FileSystem.get(conf);
} catch (IOException ioe) {
System.err.println("FileSystem is inaccessible due to:\n" +
StringUtils.stringifyException(ioe));
return null;
}
if (!(fs instanceof DistributedFileSystem)) {
System.err.println("FileSystem is " + fs.getUri());
return null;
}
return DFSUtil.getInfoServer(fs, false);
}
private int doWork(final String[] args) throws IOException {
final StringBuilder url = new StringBuilder(HttpConfig2.getSchemePrefix());
String namenodeAddress = getCurrentNamenodeAddress();
if (namenodeAddress == null) {
//Error message already output in {@link #getCurrentNamenodeAddress()}
System.err.println("DFSck exiting.");
return 0;
}
url.append(namenodeAddress);
System.err.println("Connecting to namenode via " + url.toString());
url.append("/fsck?ugi=").append(ugi.getShortUserName());
String dir = null;
boolean doListCorruptFileBlocks = false;
for (String arg : args) {
if (arg.equals("-move")) {
url.append("&move=1");
} else if (arg.equals("-delete")) {
url.append("&delete=1");
} else if (arg.equals("-files")) {
url.append("&files=1");
} else if (arg.equals("-openforwrite")) {
url.append("&openforwrite=1");
} else if (arg.equals("-blocks")) {
url.append("&blocks=1");
} else if (arg.equals("-locations")) {
url.append("&locations=1");
} else if (arg.equals("-racks")) {
url.append("&racks=1");
} else if (arg.equals("-list-corruptfileblocks")) {
url.append("&listcorruptfileblocks=1");
doListCorruptFileBlocks = true;
} else if (!arg.startsWith("-")) {
if (null == dir) {
dir = arg;
} else {
System.err.println(
"fsck: can only operate on one path at a time '" + arg +
"'");
printUsage(System.err);
return -1;
}
} else {
System.err.println("fsck: Illegal option '" + arg + "'");
printUsage(System.err);
return -1;
}
}
if (null == dir) {
dir = "/";
}
url.append("&path=").append(URLEncoder.encode(dir, "UTF-8"));
if (doListCorruptFileBlocks) {
return listCorruptFileBlocks(dir, url.toString());
}
URL path = new URL(url.toString());
URLConnection connection = SecurityUtil2.openSecureHttpConnection(path);
InputStream stream = connection.getInputStream();
BufferedReader input =
new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String line = null;
String lastLine = null;
int errCode = -1;
try {
while ((line = input.readLine()) != null) {
out.println(line);
lastLine = line;
}
} finally {
input.close();
}
if (lastLine.endsWith(NamenodeFsck.HEALTHY_STATUS)) {
errCode = 0;
} else if (lastLine.endsWith(NamenodeFsck.CORRUPT_STATUS)) {
errCode = 1;
} else if (lastLine.endsWith(NamenodeFsck.NONEXISTENT_STATUS)) {
errCode = 0;
}
return errCode;
}
public static void main(String[] args) throws Exception {
// -files option is also used by GenericOptionsParser
// Make sure that is not the first argument for fsck
int res = -1;
if ((args.length == 0) || ("-files".equals(args[0]))) {
printUsage(System.err);
ToolRunner.printGenericCommandUsage(System.err);
} else if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) {
res = 0;
} else {
res = ToolRunner.run(new DFSck(new HdfsConfiguration()), args);
}
System.exit(res);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.jstorm.task;
import backtype.storm.Config;
import backtype.storm.serialization.KryoTupleDeserializer;
import backtype.storm.task.TopologyContext;
import backtype.storm.tuple.Tuple;
import backtype.storm.utils.DisruptorQueue;
import backtype.storm.utils.WorkerClassLoader;
import com.alibaba.jstorm.callback.AsyncLoopThread;
import com.alibaba.jstorm.callback.RunnableCallback;
import com.alibaba.jstorm.client.ConfigExtension;
import com.alibaba.jstorm.common.metric.AsmGauge;
import com.alibaba.jstorm.common.metric.AsmHistogram;
import com.alibaba.jstorm.common.metric.QueueGauge;
import com.alibaba.jstorm.metric.*;
import com.alibaba.jstorm.utils.JStormUtils;
import com.alibaba.jstorm.utils.TimeUtils;
import com.esotericsoftware.kryo.KryoException;
import com.lmax.disruptor.EventHandler;
import com.lmax.disruptor.WaitStrategy;
import com.lmax.disruptor.dsl.ProducerType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
public class TaskReceiver {
private static Logger LOG = LoggerFactory.getLogger(TaskReceiver.class);
protected Task task;
protected final int taskId;
protected final String idStr;
protected TopologyContext topologyContext;
protected Map<Integer, DisruptorQueue> innerTaskTransfer;
protected final boolean isDebugRecv;
protected DisruptorQueue deserializeQueue;
protected KryoTupleDeserializer deserializer;
protected AsyncLoopThread deserializeThread;
protected AsmHistogram deserializeTimer;
protected TaskStatus taskStatus;
public TaskReceiver(Task task, int taskId, Map stormConf, TopologyContext topologyContext, Map<Integer, DisruptorQueue> innerTaskTransfer,
TaskStatus taskStatus, String taskName) {
this.task = task;
this.taskId = taskId;
this.idStr = taskName;
this.topologyContext = topologyContext;
this.innerTaskTransfer = innerTaskTransfer;
this.taskStatus = taskStatus;
this.isDebugRecv = ConfigExtension.isTopologyDebugRecvTuple(stormConf);
int queueSize = JStormUtils.parseInt(stormConf.get(Config.TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE), 256);
WaitStrategy waitStrategy = (WaitStrategy) JStormUtils.createDisruptorWaitStrategy(stormConf);
this.deserializeQueue = DisruptorQueue.mkInstance("TaskDeserialize", ProducerType.MULTI, queueSize, waitStrategy);
setDeserializeThread();
this.deserializer = new KryoTupleDeserializer(stormConf, topologyContext);
String topologyId = topologyContext.getTopologyId();
String component = topologyContext.getThisComponentId();
deserializeTimer =
(AsmHistogram) JStormMetrics.registerTaskMetric(
MetricUtils.taskMetricName(topologyId, component, taskId, MetricDef.DESERIALIZE_TIME, MetricType.HISTOGRAM), new AsmHistogram());
QueueGauge deserializeQueueGauge = new QueueGauge(deserializeQueue, idStr, MetricDef.DESERIALIZE_QUEUE);
JStormMetrics.registerTaskMetric(MetricUtils.taskMetricName(topologyId, component, taskId, MetricDef.DESERIALIZE_QUEUE, MetricType.GAUGE),
new AsmGauge(deserializeQueueGauge));
JStormHealthCheck.registerTaskHealthCheck(taskId, MetricDef.DESERIALIZE_QUEUE, deserializeQueueGauge);
}
public AsyncLoopThread getDeserializeThread() {
return deserializeThread;
}
protected void setDeserializeThread() {
this.deserializeThread = new AsyncLoopThread(new DeserializeRunnable(deserializeQueue, innerTaskTransfer.get(taskId)));
}
public DisruptorQueue getDeserializeQueue() {
return deserializeQueue;
}
class DeserializeRunnable extends RunnableCallback implements EventHandler {
DisruptorQueue deserializeQueue;
DisruptorQueue exeQueue;
DeserializeRunnable(DisruptorQueue deserializeQueue, DisruptorQueue exeQueue) {
this.deserializeQueue = deserializeQueue;
this.exeQueue = exeQueue;
}
@Override
public String getThreadName() {
return idStr + "-deserializer";
}
protected Object deserialize(byte[] ser_msg) {
long start = System.nanoTime();
try {
if (ser_msg == null) {
return null;
}
if (ser_msg.length == 0) {
return null;
} else if (ser_msg.length == 1) {
byte newStatus = ser_msg[0];
LOG.info("Change task status as " + newStatus);
taskStatus.setStatus(newStatus);
return null;
}
// ser_msg.length > 1
Tuple tuple = deserializer.deserialize(ser_msg);
if (isDebugRecv) {
LOG.info(idStr + " receive " + tuple.toString());
}
return tuple;
} catch (KryoException e) {
throw new RuntimeException(e);
} catch (Throwable e) {
if (taskStatus.isShutdown() == false) {
LOG.error(idStr + " recv thread error " + JStormUtils.toPrintableString(ser_msg) + "\n", e);
}
} finally {
long end = System.nanoTime();
deserializeTimer.update((end - start) / TimeUtils.NS_PER_US);
}
return null;
}
@Override
public void onEvent(Object event, long sequence, boolean endOfBatch) throws Exception {
Object tuple = deserialize((byte[]) event);
if (tuple != null) {
exeQueue.publish(tuple);
}
}
@Override
public void preRun() {
WorkerClassLoader.switchThreadContext();
}
@Override
public void postRun() {
WorkerClassLoader.restoreThreadContext();
}
@Override
public void run() {
deserializeQueue.consumerStarted();
LOG.info("Successfully start recvThread of " + idStr);
while (taskStatus.isShutdown() == false) {
try {
deserializeQueue.consumeBatchWhenAvailable(this);
} catch (Throwable e) {
if (taskStatus.isShutdown() == false) {
LOG.error("Unknow exception ", e);
}
}
}
task.unregisterDeserializeQueue();
LOG.info("Successfully shutdown recvThread of " + idStr);
}
public Object getResult() {
LOG.info("Begin to shutdown recvThread of " + idStr);
return -1;
}
}
}
| |
/*
* Copyright 2016 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.deprecated.it;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import com.google.api.gax.paging.Page;
import com.google.cloud.compute.deprecated.Address;
import com.google.cloud.compute.deprecated.AddressId;
import com.google.cloud.compute.deprecated.AddressInfo;
import com.google.cloud.compute.deprecated.AttachedDisk;
import com.google.cloud.compute.deprecated.Compute;
import com.google.cloud.compute.deprecated.DeprecationStatus;
import com.google.cloud.compute.deprecated.Disk;
import com.google.cloud.compute.deprecated.DiskConfiguration;
import com.google.cloud.compute.deprecated.DiskId;
import com.google.cloud.compute.deprecated.DiskImageConfiguration;
import com.google.cloud.compute.deprecated.DiskInfo;
import com.google.cloud.compute.deprecated.DiskType;
import com.google.cloud.compute.deprecated.DiskTypeId;
import com.google.cloud.compute.deprecated.GlobalAddressId;
import com.google.cloud.compute.deprecated.Image;
import com.google.cloud.compute.deprecated.ImageConfiguration;
import com.google.cloud.compute.deprecated.ImageDiskConfiguration;
import com.google.cloud.compute.deprecated.ImageId;
import com.google.cloud.compute.deprecated.ImageInfo;
import com.google.cloud.compute.deprecated.Instance;
import com.google.cloud.compute.deprecated.InstanceId;
import com.google.cloud.compute.deprecated.InstanceInfo;
import com.google.cloud.compute.deprecated.License;
import com.google.cloud.compute.deprecated.LicenseId;
import com.google.cloud.compute.deprecated.MachineType;
import com.google.cloud.compute.deprecated.MachineTypeId;
import com.google.cloud.compute.deprecated.Network;
import com.google.cloud.compute.deprecated.NetworkConfiguration;
import com.google.cloud.compute.deprecated.NetworkId;
import com.google.cloud.compute.deprecated.NetworkInfo;
import com.google.cloud.compute.deprecated.NetworkInterface;
import com.google.cloud.compute.deprecated.Operation;
import com.google.cloud.compute.deprecated.Region;
import com.google.cloud.compute.deprecated.RegionAddressId;
import com.google.cloud.compute.deprecated.RegionOperationId;
import com.google.cloud.compute.deprecated.SchedulingOptions;
import com.google.cloud.compute.deprecated.Snapshot;
import com.google.cloud.compute.deprecated.SnapshotDiskConfiguration;
import com.google.cloud.compute.deprecated.SnapshotId;
import com.google.cloud.compute.deprecated.SnapshotInfo;
import com.google.cloud.compute.deprecated.StandardDiskConfiguration;
import com.google.cloud.compute.deprecated.StandardNetworkConfiguration;
import com.google.cloud.compute.deprecated.StorageImageConfiguration;
import com.google.cloud.compute.deprecated.SubnetNetworkConfiguration;
import com.google.cloud.compute.deprecated.Subnetwork;
import com.google.cloud.compute.deprecated.SubnetworkId;
import com.google.cloud.compute.deprecated.SubnetworkInfo;
import com.google.cloud.compute.deprecated.Zone;
import com.google.cloud.compute.deprecated.ZoneOperationId;
import com.google.cloud.compute.deprecated.testing.ResourceCleaner;
import com.google.cloud.compute.deprecated.testing.RemoteComputeHelper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeoutException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
public class ITComputeTest {
private static final String REGION = "us-central1";
private static final String ZONE = "us-central1-a";
private static final String DISK_TYPE = "local-ssd";
private static final String MACHINE_TYPE = "f1-micro";
private static final LicenseId LICENSE_ID = LicenseId.of("ubuntu-os-cloud", "ubuntu-1404-trusty");
private static final String BASE_RESOURCE_NAME = RemoteComputeHelper.baseResourceName();
private static final ImageId IMAGE_ID = ImageId.of("debian-cloud", "debian-8-jessie-v20160219");
private static final String IMAGE_PROJECT = "debian-cloud";
private static Compute compute;
private static ResourceCleaner resourceCleaner;
@Rule
public Timeout globalTimeout = Timeout.seconds(300);
@BeforeClass
public static void beforeClass() {
RemoteComputeHelper computeHelper = RemoteComputeHelper.create();
compute = computeHelper.getOptions().getService();
resourceCleaner = ResourceCleaner.create(compute);
}
@AfterClass
public static void tearDown() {
resourceCleaner.cleanUp();
}
@Test
public void testGetDiskType() {
DiskType diskType = compute.getDiskType(ZONE, DISK_TYPE);
// assertNotNull(diskType.getGeneratedId());
assertEquals(ZONE, diskType.getDiskTypeId().getZone());
assertEquals(DISK_TYPE, diskType.getDiskTypeId().getType());
assertNotNull(diskType.getCreationTimestamp());
assertNotNull(diskType.getDescription());
assertNotNull(diskType.getValidDiskSize());
assertNotNull(diskType.getDefaultDiskSizeGb());
}
@Test
public void testGetDiskTypeWithSelectedFields() {
DiskType diskType = compute.getDiskType(ZONE, DISK_TYPE,
Compute.DiskTypeOption.fields(Compute.DiskTypeField.CREATION_TIMESTAMP));
// assertNotNull(diskType.getGeneratedId());
assertEquals(ZONE, diskType.getDiskTypeId().getZone());
assertEquals(DISK_TYPE, diskType.getDiskTypeId().getType());
assertNotNull(diskType.getCreationTimestamp());
assertNull(diskType.getDescription());
assertNull(diskType.getValidDiskSize());
assertNull(diskType.getDefaultDiskSizeGb());
}
@Test
public void testListDiskTypes() {
Page<DiskType> diskPage = compute.listDiskTypes(ZONE);
Iterator<DiskType> diskTypeIterator = diskPage.iterateAll().iterator();
assertTrue(diskTypeIterator.hasNext());
while (diskTypeIterator.hasNext()) {
DiskType diskType = diskTypeIterator.next();
// assertNotNull(diskType.getGeneratedId());
assertNotNull(diskType.getDiskTypeId());
assertEquals(ZONE, diskType.getDiskTypeId().getZone());
assertNotNull(diskType.getCreationTimestamp());
assertNotNull(diskType.getDescription());
assertNotNull(diskType.getValidDiskSize());
assertNotNull(diskType.getDefaultDiskSizeGb());
}
}
@Test
public void testListDiskTypesWithSelectedFields() {
Page<DiskType> diskPage = compute.listDiskTypes(ZONE,
Compute.DiskTypeListOption.fields(Compute.DiskTypeField.CREATION_TIMESTAMP));
Iterator<DiskType> diskTypeIterator = diskPage.iterateAll().iterator();
assertTrue(diskTypeIterator.hasNext());
while (diskTypeIterator.hasNext()) {
DiskType diskType = diskTypeIterator.next();
assertNull(diskType.getGeneratedId());
assertNotNull(diskType.getDiskTypeId());
assertEquals(ZONE, diskType.getDiskTypeId().getZone());
assertNotNull(diskType.getCreationTimestamp());
assertNull(diskType.getDescription());
assertNull(diskType.getValidDiskSize());
assertNull(diskType.getDefaultDiskSizeGb());
}
}
@Test
public void testListDiskTypesWithFilter() {
Page<DiskType> diskPage = compute.listDiskTypes(ZONE, Compute.DiskTypeListOption.filter(
Compute.DiskTypeFilter.equals(Compute.DiskTypeField.DEFAULT_DISK_SIZE_GB, 375)));
Iterator<DiskType> diskTypeIterator = diskPage.iterateAll().iterator();
assertTrue(diskTypeIterator.hasNext());
while (diskTypeIterator.hasNext()) {
DiskType diskType = diskTypeIterator.next();
// todo(mziccard): uncomment or remove once #695 is closed
// assertNotNull(diskType.getGeneratedId());
assertNotNull(diskType.getDiskTypeId());
assertEquals(ZONE, diskType.getDiskTypeId().getZone());
assertNotNull(diskType.getCreationTimestamp());
assertNotNull(diskType.getDescription());
assertNotNull(diskType.getValidDiskSize());
assertEquals(375, (long) diskType.getDefaultDiskSizeGb());
}
}
@Test
public void testAggregatedListDiskTypes() {
Page<DiskType> diskPage = compute.listDiskTypes();
Iterator<DiskType> diskTypeIterator = diskPage.iterateAll().iterator();
assertTrue(diskTypeIterator.hasNext());
while (diskTypeIterator.hasNext()) {
DiskType diskType = diskTypeIterator.next();
// assertNotNull(diskType.getGeneratedId());
assertNotNull(diskType.getDiskTypeId());
assertNotNull(diskType.getCreationTimestamp());
assertNotNull(diskType.getDescription());
assertNotNull(diskType.getValidDiskSize());
assertNotNull(diskType.getDefaultDiskSizeGb());
}
}
@Test
public void testAggregatedListDiskTypesWithFilter() {
Page<DiskType> diskPage = compute.listDiskTypes(Compute.DiskTypeAggregatedListOption.filter(
Compute.DiskTypeFilter.notEquals(Compute.DiskTypeField.DEFAULT_DISK_SIZE_GB, 375)));
Iterator<DiskType> diskTypeIterator = diskPage.iterateAll().iterator();
assertTrue(diskTypeIterator.hasNext());
while (diskTypeIterator.hasNext()) {
DiskType diskType = diskTypeIterator.next();
// todo(mziccard): uncomment or remove once #695 is closed
// assertNotNull(diskType.getGeneratedId());
assertNotNull(diskType.getDiskTypeId());
assertNotNull(diskType.getCreationTimestamp());
assertNotNull(diskType.getDescription());
assertNotNull(diskType.getValidDiskSize());
assertNotEquals(375, (long) diskType.getDefaultDiskSizeGb());
}
}
@Test
public void testGetMachineType() {
MachineType machineType = compute.getMachineType(ZONE, MACHINE_TYPE);
assertEquals(ZONE, machineType.getMachineTypeId().getZone());
assertEquals(MACHINE_TYPE, machineType.getMachineTypeId().getType());
assertNotNull(machineType.getGeneratedId());
assertNotNull(machineType.getCreationTimestamp());
assertNotNull(machineType.getDescription());
assertNotNull(machineType.getCpus());
assertNotNull(machineType.getMemoryMb());
assertNotNull(machineType.getMaximumPersistentDisks());
assertNotNull(machineType.getMaximumPersistentDisksSizeGb());
}
@Test
public void testGetMachineTypeWithSelectedFields() {
MachineType machineType = compute.getMachineType(ZONE, MACHINE_TYPE,
Compute.MachineTypeOption.fields(Compute.MachineTypeField.ID));
assertEquals(ZONE, machineType.getMachineTypeId().getZone());
assertEquals(MACHINE_TYPE, machineType.getMachineTypeId().getType());
assertNotNull(machineType.getGeneratedId());
assertNull(machineType.getCreationTimestamp());
assertNull(machineType.getDescription());
assertNull(machineType.getCpus());
assertNull(machineType.getMemoryMb());
assertNull(machineType.getMaximumPersistentDisks());
assertNull(machineType.getMaximumPersistentDisksSizeGb());
}
@Test
public void testListMachineTypes() {
Page<MachineType> machinePage = compute.listMachineTypes(ZONE);
Iterator<MachineType> machineTypeIterator = machinePage.iterateAll().iterator();
assertTrue(machineTypeIterator.hasNext());
while (machineTypeIterator.hasNext()) {
MachineType machineType = machineTypeIterator.next();
assertNotNull(machineType.getMachineTypeId());
assertEquals(ZONE, machineType.getMachineTypeId().getZone());
assertNotNull(machineType.getGeneratedId());
assertNotNull(machineType.getCreationTimestamp());
assertNotNull(machineType.getDescription());
assertNotNull(machineType.getCpus());
assertNotNull(machineType.getMemoryMb());
assertNotNull(machineType.getMaximumPersistentDisks());
assertNotNull(machineType.getMaximumPersistentDisksSizeGb());
}
}
@Test
public void testListMachineTypesWithSelectedFields() {
Page<MachineType> machinePage = compute.listMachineTypes(ZONE,
Compute.MachineTypeListOption.fields(Compute.MachineTypeField.CREATION_TIMESTAMP));
Iterator<MachineType> machineTypeIterator = machinePage.iterateAll().iterator();
assertTrue(machineTypeIterator.hasNext());
while (machineTypeIterator.hasNext()) {
MachineType machineType = machineTypeIterator.next();
assertNotNull(machineType.getMachineTypeId());
assertEquals(ZONE, machineType.getMachineTypeId().getZone());
assertNull(machineType.getGeneratedId());
assertNotNull(machineType.getCreationTimestamp());
assertNull(machineType.getDescription());
assertNull(machineType.getCpus());
assertNull(machineType.getMemoryMb());
assertNull(machineType.getMaximumPersistentDisks());
assertNull(machineType.getMaximumPersistentDisksSizeGb());
}
}
@Test
public void testListMachineTypesWithFilter() {
Page<MachineType> machinePage = compute.listMachineTypes(ZONE,
Compute.MachineTypeListOption.filter(
Compute.MachineTypeFilter.equals(Compute.MachineTypeField.GUEST_CPUS, 2)));
Iterator<MachineType> machineTypeIterator = machinePage.iterateAll().iterator();
assertTrue(machineTypeIterator.hasNext());
while (machineTypeIterator.hasNext()) {
MachineType machineType = machineTypeIterator.next();
assertNotNull(machineType.getMachineTypeId());
assertEquals(ZONE, machineType.getMachineTypeId().getZone());
assertNotNull(machineType.getGeneratedId());
assertNotNull(machineType.getCreationTimestamp());
assertNotNull(machineType.getDescription());
assertNotNull(machineType.getCpus());
assertEquals(2, (long) machineType.getCpus());
assertNotNull(machineType.getMemoryMb());
assertNotNull(machineType.getMaximumPersistentDisks());
assertNotNull(machineType.getMaximumPersistentDisksSizeGb());
}
}
@Test
public void testAggregatedListMachineTypes() {
Page<MachineType> machinePage = compute.listMachineTypes();
Iterator<MachineType> machineTypeIterator = machinePage.iterateAll().iterator();
assertTrue(machineTypeIterator.hasNext());
while (machineTypeIterator.hasNext()) {
MachineType machineType = machineTypeIterator.next();
assertNotNull(machineType.getMachineTypeId());
assertNotNull(machineType.getGeneratedId());
assertNotNull(machineType.getCreationTimestamp());
assertNotNull(machineType.getDescription());
assertNotNull(machineType.getCpus());
assertNotNull(machineType.getMemoryMb());
assertNotNull(machineType.getMaximumPersistentDisks());
assertNotNull(machineType.getMaximumPersistentDisksSizeGb());
}
}
@Test
public void testAggregatedListMachineTypesWithFilter() {
Page<MachineType> machinePage =
compute.listMachineTypes(Compute.MachineTypeAggregatedListOption.filter(
Compute.MachineTypeFilter.notEquals(Compute.MachineTypeField.GUEST_CPUS, 2)));
Iterator<MachineType> machineTypeIterator = machinePage.iterateAll().iterator();
assertTrue(machineTypeIterator.hasNext());
while (machineTypeIterator.hasNext()) {
MachineType machineType = machineTypeIterator.next();
assertNotNull(machineType.getMachineTypeId());
assertNotNull(machineType.getGeneratedId());
assertNotNull(machineType.getCreationTimestamp());
assertNotNull(machineType.getDescription());
assertNotNull(machineType.getCpus());
assertNotEquals(2, (long) machineType.getCpus());
assertNotNull(machineType.getMemoryMb());
assertNotNull(machineType.getMaximumPersistentDisks());
assertNotNull(machineType.getMaximumPersistentDisksSizeGb());
}
}
@Test
public void testGetLicense() {
License license = compute.getLicense(LICENSE_ID);
assertEquals(LICENSE_ID, license.getLicenseId());
assertNotNull(license.chargesUseFee());
}
@Test
public void testGetLicenseWithSelectedFields() {
License license = compute.getLicense(LICENSE_ID, Compute.LicenseOption.fields());
assertEquals(LICENSE_ID, license.getLicenseId());
assertNull(license.chargesUseFee());
}
@Test
public void testGetRegion() {
Region region = compute.getRegion(REGION);
assertEquals(REGION, region.getRegionId().getRegion());
assertNotNull(region.getDescription());
assertNotNull(region.getCreationTimestamp());
assertNotNull(region.getGeneratedId());
assertNotNull(region.getQuotas());
assertNotNull(region.getStatus());
assertNotNull(region.getZones());
}
@Test
public void testGetRegionWithSelectedFields() {
Region region = compute.getRegion(REGION, Compute.RegionOption.fields(Compute.RegionField.ID));
assertEquals(REGION, region.getRegionId().getRegion());
assertNotNull(region.getGeneratedId());
assertNull(region.getDescription());
assertNull(region.getCreationTimestamp());
assertNull(region.getQuotas());
assertNull(region.getStatus());
assertNull(region.getZones());
}
@Test
public void testListRegions() {
Page<Region> regionPage = compute.listRegions();
Iterator<Region> regionIterator = regionPage.iterateAll().iterator();
while (regionIterator.hasNext()) {
Region region = regionIterator.next();
assertNotNull(region.getRegionId());
assertNotNull(region.getDescription());
assertNotNull(region.getCreationTimestamp());
assertNotNull(region.getGeneratedId());
assertNotNull(region.getQuotas());
assertNotNull(region.getStatus());
assertNotNull(region.getZones());
}
}
@Test
public void testListRegionsWithSelectedFields() {
Page<Region> regionPage =
compute.listRegions(Compute.RegionListOption.fields(Compute.RegionField.ID));
Iterator<Region> regionIterator = regionPage.iterateAll().iterator();
while (regionIterator.hasNext()) {
Region region = regionIterator.next();
assertNotNull(region.getRegionId());
assertNull(region.getDescription());
assertNull(region.getCreationTimestamp());
assertNotNull(region.getGeneratedId());
assertNull(region.getQuotas());
assertNull(region.getStatus());
assertNull(region.getZones());
}
}
@Test
public void testListRegionsWithFilter() {
Page<Region> regionPage = compute.listRegions(Compute.RegionListOption.filter(
Compute.RegionFilter.equals(Compute.RegionField.NAME, REGION)));
Iterator<Region> regionIterator = regionPage.iterateAll().iterator();
assertEquals(REGION, regionIterator.next().getRegionId().getRegion());
assertFalse(regionIterator.hasNext());
}
@Test
public void testGetZone() {
Zone zone = compute.getZone(ZONE);
assertEquals(ZONE, zone.getZoneId().getZone());
assertNotNull(zone.getGeneratedId());
assertNotNull(zone.getCreationTimestamp());
assertNotNull(zone.getDescription());
assertNotNull(zone.getStatus());
assertNotNull(zone.getRegion());
}
@Test
public void testGetZoneWithSelectedFields() {
Zone zone = compute.getZone(ZONE, Compute.ZoneOption.fields(Compute.ZoneField.ID));
assertEquals(ZONE, zone.getZoneId().getZone());
assertNotNull(zone.getGeneratedId());
assertNull(zone.getCreationTimestamp());
assertNull(zone.getDescription());
assertNull(zone.getStatus());
assertNull(zone.getRegion());
}
@Test
public void testListZones() {
Page<Zone> zonePage = compute.listZones();
Iterator<Zone> zoneIterator = zonePage.iterateAll().iterator();
while (zoneIterator.hasNext()) {
Zone zone = zoneIterator.next();
assertNotNull(zone.getZoneId());
assertNotNull(zone.getGeneratedId());
assertNotNull(zone.getCreationTimestamp());
assertNotNull(zone.getDescription());
assertNotNull(zone.getStatus());
assertNotNull(zone.getRegion());
}
}
@Test
public void testListZonesWithSelectedFields() {
Page<Zone> zonePage = compute.listZones(
Compute.ZoneListOption.fields(Compute.ZoneField.CREATION_TIMESTAMP));
Iterator<Zone> zoneIterator = zonePage.iterateAll().iterator();
while (zoneIterator.hasNext()) {
Zone zone = zoneIterator.next();
assertNotNull(zone.getZoneId());
assertNull(zone.getGeneratedId());
assertNotNull(zone.getCreationTimestamp());
assertNull(zone.getDescription());
assertNull(zone.getStatus());
assertNull(zone.getRegion());
}
}
@Test
public void testListZonesWithFilter() {
Page<Zone> zonePage = compute.listZones(
Compute.ZoneListOption.filter(Compute.ZoneFilter.equals(Compute.ZoneField.NAME, ZONE)));
Iterator<Zone> zoneIterator = zonePage.iterateAll().iterator();
assertEquals(ZONE, zoneIterator.next().getZoneId().getZone());
assertFalse(zoneIterator.hasNext());
}
@Test
public void testListGlobalOperations() {
Page<Operation> operationPage = compute.listGlobalOperations();
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
// todo(mziccard): uncomment or remove once #727 is closed
// assertNotNull(operation.getCreationTimestamp());
assertNotNull(operation.getOperationType());
assertNotNull(operation.getStatus());
assertNotNull(operation.getUser());
}
}
@Test
public void testListGlobalOperationsWithSelectedFields() {
Page<Operation> operationPage =
compute.listGlobalOperations(Compute.OperationListOption.fields(Compute.OperationField.ID));
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertNull(operation.getOperationType());
assertNull(operation.getTargetLink());
assertNull(operation.getTargetId());
assertNull(operation.getOperationType());
assertNull(operation.getStatus());
assertNull(operation.getStatusMessage());
assertNull(operation.getUser());
assertNull(operation.getProgress());
assertNull(operation.getDescription());
assertNull(operation.getInsertTime());
assertNull(operation.getStartTime());
assertNull(operation.getEndTime());
assertNull(operation.getWarnings());
assertNull(operation.getHttpErrorMessage());
}
}
@Test
public void testListGlobalOperationsWithFilter() {
Page<Operation> operationPage = compute.listGlobalOperations(Compute.OperationListOption.filter(
Compute.OperationFilter.equals(Compute.OperationField.STATUS, "DONE")));
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
// todo(mziccard): uncomment or remove once #727 is closed
// assertNotNull(operation.getCreationTimestamp());
assertNotNull(operation.getOperationType());
assertEquals(Operation.Status.DONE, operation.getStatus());
assertNotNull(operation.getUser());
}
}
@Test
public void testListRegionOperations() {
Page<Operation> operationPage = compute.listRegionOperations(REGION);
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertEquals(REGION, operation.<RegionOperationId>getOperationId().getRegion());
// todo(mziccard): uncomment or remove once #727 is closed
// assertNotNull(operation.getCreationTimestamp());
assertNotNull(operation.getOperationType());
assertNotNull(operation.getStatus());
assertNotNull(operation.getUser());
}
}
@Test
public void testListRegionOperationsWithSelectedFields() {
Page<Operation> operationPage = compute.listRegionOperations(REGION,
Compute.OperationListOption.fields(Compute.OperationField.ID));
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertEquals(REGION, operation.<RegionOperationId>getOperationId().getRegion());
assertNull(operation.getOperationType());
assertNull(operation.getTargetLink());
assertNull(operation.getTargetId());
assertNull(operation.getOperationType());
assertNull(operation.getStatus());
assertNull(operation.getStatusMessage());
assertNull(operation.getUser());
assertNull(operation.getProgress());
assertNull(operation.getDescription());
assertNull(operation.getInsertTime());
assertNull(operation.getStartTime());
assertNull(operation.getEndTime());
assertNull(operation.getWarnings());
assertNull(operation.getHttpErrorMessage());
}
}
@Test
public void testListRegionOperationsWithFilter() {
Page<Operation> operationPage = compute.listRegionOperations(REGION,
Compute.OperationListOption.filter(Compute.OperationFilter.equals(
Compute.OperationField.STATUS, "DONE")));
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertEquals(REGION, operation.<RegionOperationId>getOperationId().getRegion());
// todo(mziccard): uncomment or remove once #727 is closed
// assertNotNull(operation.getCreationTimestamp());
assertNotNull(operation.getOperationType());
assertEquals(Operation.Status.DONE, operation.getStatus());
assertNotNull(operation.getUser());
}
}
@Test
public void testListZoneOperations() {
Page<Operation> operationPage = compute.listZoneOperations(ZONE);
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertEquals(ZONE, operation.<ZoneOperationId>getOperationId().getZone());
// todo(mziccard): uncomment or remove once #727 is closed
// assertNotNull(operation.getCreationTimestamp());
assertNotNull(operation.getOperationType());
assertNotNull(operation.getStatus());
assertNotNull(operation.getUser());
}
}
@Test
public void testListZoneOperationsWithSelectedFields() {
Page<Operation> operationPage = compute.listZoneOperations(ZONE,
Compute.OperationListOption.fields(Compute.OperationField.ID));
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertEquals(ZONE, operation.<ZoneOperationId>getOperationId().getZone());
assertNull(operation.getOperationType());
assertNull(operation.getTargetLink());
assertNull(operation.getTargetId());
assertNull(operation.getOperationType());
assertNull(operation.getStatus());
assertNull(operation.getStatusMessage());
assertNull(operation.getUser());
assertNull(operation.getProgress());
assertNull(operation.getDescription());
assertNull(operation.getInsertTime());
assertNull(operation.getStartTime());
assertNull(operation.getEndTime());
assertNull(operation.getWarnings());
assertNull(operation.getHttpErrorMessage());
}
}
@Test
public void testListZoneOperationsWithFilter() {
Page<Operation> operationPage = compute.listZoneOperations(ZONE,
Compute.OperationListOption.filter(Compute.OperationFilter.equals(
Compute.OperationField.STATUS, "DONE")));
Iterator<Operation> operationIterator = operationPage.iterateAll().iterator();
while (operationIterator.hasNext()) {
Operation operation = operationIterator.next();
assertNotNull(operation.getGeneratedId());
assertNotNull(operation.getOperationId());
assertEquals(ZONE, operation.<ZoneOperationId>getOperationId().getZone());
// todo(mziccard): uncomment or remove once #727 is closed
// assertNotNull(operation.getCreationTimestamp());
assertNotNull(operation.getOperationType());
assertEquals(Operation.Status.DONE, operation.getStatus());
assertNotNull(operation.getUser());
}
}
@Test
public void testCreateGetAndDeleteRegionAddress() throws InterruptedException, TimeoutException {
String name = BASE_RESOURCE_NAME + "create-and-get-region-address";
AddressId addressId = RegionAddressId.of(REGION, name);
AddressInfo addressInfo = AddressInfo.of(addressId);
Operation operation = compute.create(addressInfo);
operation.waitFor();
// test get
Address remoteAddress = compute.getAddress(addressId);
resourceCleaner.add(addressId);
assertNotNull(remoteAddress);
assertTrue(remoteAddress.getAddressId() instanceof RegionAddressId);
assertEquals(REGION, remoteAddress.<RegionAddressId>getAddressId().getRegion());
assertEquals(addressId.getAddress(), remoteAddress.getAddressId().getAddress());
assertNotNull(remoteAddress.getAddress());
assertNotNull(remoteAddress.getCreationTimestamp());
assertNotNull(remoteAddress.getGeneratedId());
assertNotNull(remoteAddress.getStatus());
// test get with selected fields
remoteAddress = compute.getAddress(addressId, Compute.AddressOption.fields());
assertNotNull(remoteAddress);
assertTrue(remoteAddress.getAddressId() instanceof RegionAddressId);
assertEquals(REGION, remoteAddress.<RegionAddressId>getAddressId().getRegion());
assertEquals(addressId.getAddress(), remoteAddress.getAddressId().getAddress());
assertNull(remoteAddress.getAddress());
assertNull(remoteAddress.getCreationTimestamp());
assertNull(remoteAddress.getGeneratedId());
operation = remoteAddress.delete();
operation.waitFor();
resourceCleaner.remove(addressId);
assertNull(compute.getAddress(addressId));
}
@Test
public void testListRegionAddresses() throws InterruptedException, TimeoutException {
String prefix = BASE_RESOURCE_NAME + "list-region-address";
String[] addressNames = {prefix + "1", prefix + "2"};
AddressId firstAddressId = RegionAddressId.of(REGION, addressNames[0]);
AddressId secondAddressId = RegionAddressId.of(REGION, addressNames[1]);
Operation firstOperation = compute.create(AddressInfo.of(firstAddressId));
Operation secondOperation = compute.create(AddressInfo.of(secondAddressId));
firstOperation.waitFor();
resourceCleaner.add(firstAddressId);
secondOperation.waitFor();
resourceCleaner.add(secondAddressId);
Set<String> addressSet = ImmutableSet.copyOf(addressNames);
// test list
Compute.AddressFilter filter =
Compute.AddressFilter.equals(Compute.AddressField.NAME, prefix + "\\d");
Page<Address> addressPage =
compute.listRegionAddresses(REGION, Compute.AddressListOption.filter(filter));
Iterator<Address> addressIterator = addressPage.iterateAll().iterator();
int count = 0;
while (addressIterator.hasNext()) {
Address address = addressIterator.next();
assertNotNull(address.getAddressId());
assertTrue(address.getAddressId() instanceof RegionAddressId);
assertEquals(REGION, address.<RegionAddressId>getAddressId().getRegion());
assertTrue(addressSet.contains(address.getAddressId().getAddress()));
assertNotNull(address.getAddress());
assertNotNull(address.getCreationTimestamp());
assertNotNull(address.getGeneratedId());
count++;
}
assertEquals(2, count);
// test list with selected fields
count = 0;
addressPage = compute.listRegionAddresses(REGION, Compute.AddressListOption.filter(filter),
Compute.AddressListOption.fields(Compute.AddressField.ADDRESS));
addressIterator = addressPage.iterateAll().iterator();
while (addressIterator.hasNext()) {
Address address = addressIterator.next();
assertTrue(address.getAddressId() instanceof RegionAddressId);
assertEquals(REGION, address.<RegionAddressId>getAddressId().getRegion());
assertTrue(addressSet.contains(address.getAddressId().getAddress()));
assertNotNull(address.getAddress());
assertNull(address.getCreationTimestamp());
assertNull(address.getGeneratedId());
assertNull(address.getStatus());
assertNull(address.getUsage());
count++;
}
assertEquals(2, count);
}
@Test
public void testAggregatedListAddresses() throws InterruptedException, TimeoutException {
String prefix = BASE_RESOURCE_NAME + "aggregated-list-address";
String[] addressNames = {prefix + "1", prefix + "2"};
AddressId firstAddressId = RegionAddressId.of(REGION, addressNames[0]);
AddressId secondAddressId = GlobalAddressId.of(REGION, addressNames[1]);
Operation firstOperation = compute.create(AddressInfo.of(firstAddressId));
Operation secondOperation = compute.create(AddressInfo.of(secondAddressId));
firstOperation.waitFor();
resourceCleaner.add(firstAddressId);
secondOperation.waitFor();
resourceCleaner.add(secondAddressId);
Set<String> addressSet = ImmutableSet.copyOf(addressNames);
Compute.AddressFilter filter =
Compute.AddressFilter.equals(Compute.AddressField.NAME, prefix + "\\d");
Page<Address> addressPage =
compute.listAddresses(Compute.AddressAggregatedListOption.filter(filter));
Iterator<Address> addressIterator = addressPage.iterateAll().iterator();
int count = 0;
while (addressIterator.hasNext()) {
Address address = addressIterator.next();
assertNotNull(address.getAddressId());
assertTrue(addressSet.contains(address.getAddressId().getAddress()));
assertNotNull(address.getAddress());
assertNotNull(address.getCreationTimestamp());
assertNotNull(address.getGeneratedId());
count++;
}
assertEquals(2, count);
}
@Test
public void testCreateGetAndDeleteGlobalAddress() throws InterruptedException, TimeoutException {
String name = BASE_RESOURCE_NAME + "create-and-get-global-address";
AddressId addressId = GlobalAddressId.of(name);
AddressInfo addressInfo = AddressInfo.of(addressId);
Operation operation = compute.create(addressInfo);
operation.waitFor();
// test get
Address remoteAddress = compute.getAddress(addressId);
resourceCleaner.add(addressId);
assertNotNull(remoteAddress);
assertTrue(remoteAddress.getAddressId() instanceof GlobalAddressId);
assertEquals(addressId.getAddress(), remoteAddress.getAddressId().getAddress());
assertNotNull(remoteAddress.getAddress());
assertNotNull(remoteAddress.getCreationTimestamp());
assertNotNull(remoteAddress.getGeneratedId());
assertNotNull(remoteAddress.getStatus());
// test get with selected fields
remoteAddress = compute.getAddress(addressId, Compute.AddressOption.fields());
assertNotNull(remoteAddress);
assertTrue(remoteAddress.getAddressId() instanceof GlobalAddressId);
assertEquals(addressId.getAddress(), remoteAddress.getAddressId().getAddress());
assertNull(remoteAddress.getAddress());
assertNull(remoteAddress.getCreationTimestamp());
assertNull(remoteAddress.getGeneratedId());
operation = remoteAddress.delete();
operation.waitFor();
resourceCleaner.remove(addressId);
assertNull(compute.getAddress(addressId));
}
@Test
public void testListGlobalAddresses() throws InterruptedException, TimeoutException {
String prefix = BASE_RESOURCE_NAME + "list-global-address";
String[] addressNames = {prefix + "1", prefix + "2"};
AddressId firstAddressId = GlobalAddressId.of(addressNames[0]);
AddressId secondAddressId = GlobalAddressId.of(addressNames[1]);
Operation firstOperation = compute.create(AddressInfo.of(firstAddressId));
Operation secondOperation = compute.create(AddressInfo.of(secondAddressId));
firstOperation.waitFor();
resourceCleaner.add(firstAddressId);
secondOperation.waitFor();
resourceCleaner.add(secondAddressId);
Set<String> addressSet = ImmutableSet.copyOf(addressNames);
// test list
Compute.AddressFilter filter =
Compute.AddressFilter.equals(Compute.AddressField.NAME, prefix + "\\d");
Page<Address> addressPage =
compute.listGlobalAddresses(Compute.AddressListOption.filter(filter));
Iterator<Address> addressIterator = addressPage.iterateAll().iterator();
int count = 0;
while (addressIterator.hasNext()) {
Address address = addressIterator.next();
assertNotNull(address.getAddressId());
assertTrue(address.getAddressId() instanceof GlobalAddressId);
assertTrue(addressSet.contains(address.getAddressId().getAddress()));
assertNotNull(address.getAddress());
assertNotNull(address.getCreationTimestamp());
assertNotNull(address.getGeneratedId());
count++;
}
assertEquals(2, count);
// test list with selected fields
count = 0;
addressPage = compute.listGlobalAddresses(Compute.AddressListOption.filter(filter),
Compute.AddressListOption.fields(Compute.AddressField.ADDRESS));
addressIterator = addressPage.iterateAll().iterator();
while (addressIterator.hasNext()) {
Address address = addressIterator.next();
assertTrue(address.getAddressId() instanceof GlobalAddressId);
assertTrue(addressSet.contains(address.getAddressId().getAddress()));
assertNotNull(address.getAddress());
assertNull(address.getCreationTimestamp());
assertNull(address.getGeneratedId());
assertNull(address.getStatus());
assertNull(address.getUsage());
count++;
}
assertEquals(2, count);
}
@Test
public void testCreateGetResizeAndDeleteStandardDisk()
throws InterruptedException, TimeoutException {
String name = BASE_RESOURCE_NAME + "create-and-get-standard-disk";
DiskId diskId = DiskId.of(ZONE, name);
DiskInfo diskInfo =
DiskInfo.of(diskId, StandardDiskConfiguration.of(DiskTypeId.of(ZONE, "pd-ssd"), 100L));
Operation operation = compute.create(diskInfo);
operation.waitFor();
// test get
Disk remoteDisk = compute.getDisk(diskId);
resourceCleaner.add(diskId);
assertNotNull(remoteDisk);
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertEquals(diskId.getDisk(), remoteDisk.getDiskId().getDisk());
assertNotNull(remoteDisk.getCreationTimestamp());
assertNotNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof StandardDiskConfiguration);
StandardDiskConfiguration remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(100L, (long) remoteConfiguration.getSizeGb());
assertEquals("pd-ssd", remoteConfiguration.getDiskType().getType());
assertEquals(DiskConfiguration.Type.STANDARD, remoteConfiguration.getType());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
operation = remoteDisk.resize(200L);
operation.waitFor();
// test resize and get with selected fields
remoteDisk = compute.getDisk(diskId, Compute.DiskOption.fields(Compute.DiskField.SIZE_GB));
assertNotNull(remoteDisk);
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertEquals(diskId.getDisk(), remoteDisk.getDiskId().getDisk());
assertNull(remoteDisk.getCreationTimestamp());
assertNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof StandardDiskConfiguration);
remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(200L, (long) remoteConfiguration.getSizeGb());
assertEquals("pd-ssd", remoteConfiguration.getDiskType().getType());
assertEquals(DiskConfiguration.Type.STANDARD, remoteConfiguration.getType());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
operation = remoteDisk.delete();
operation.waitFor();
resourceCleaner.remove(diskId);
assertNull(compute.getDisk(diskId));
}
@Test
public void testCreateGetAndDeleteImageDisk() throws InterruptedException, TimeoutException {
String name = BASE_RESOURCE_NAME + "create-and-get-image-disk";
DiskId diskId = DiskId.of(ZONE, name);
DiskInfo diskInfo = DiskInfo.of(diskId, ImageDiskConfiguration.of(IMAGE_ID));
Operation operation = compute.create(diskInfo);
operation.waitFor();
// test get
Disk remoteDisk = compute.getDisk(diskId);
resourceCleaner.add(diskId);
assertNotNull(remoteDisk);
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertEquals(diskId.getDisk(), remoteDisk.getDiskId().getDisk());
assertEquals(DiskInfo.CreationStatus.READY, remoteDisk.getCreationStatus());
assertNotNull(remoteDisk.getCreationTimestamp());
assertNotNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof ImageDiskConfiguration);
ImageDiskConfiguration remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(IMAGE_ID, remoteConfiguration.getSourceImage());
assertNotNull(remoteConfiguration.getSourceImageId());
assertEquals(DiskConfiguration.Type.IMAGE, remoteConfiguration.getType());
assertNotNull(remoteConfiguration.getSizeGb());
assertEquals("pd-standard", remoteConfiguration.getDiskType().getType());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
// test get with selected fields
remoteDisk = compute.getDisk(diskId, Compute.DiskOption.fields());
assertNotNull(remoteDisk);
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertEquals(diskId.getDisk(), remoteDisk.getDiskId().getDisk());
assertNull(remoteDisk.getCreationTimestamp());
assertNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof ImageDiskConfiguration);
remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(IMAGE_ID, remoteConfiguration.getSourceImage());
assertNull(remoteConfiguration.getSourceImageId());
assertEquals(DiskConfiguration.Type.IMAGE, remoteConfiguration.getType());
assertNull(remoteConfiguration.getSizeGb());
assertEquals("pd-standard", remoteConfiguration.getDiskType().getType());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
operation = remoteDisk.delete();
operation.waitFor();
resourceCleaner.remove(diskId);
assertNull(compute.getDisk(diskId));
}
@Test
public void testCreateGetAndDeleteSnapshotAndSnapshotDisk()
throws InterruptedException, TimeoutException {
String diskName = BASE_RESOURCE_NAME + "create-and-get-snapshot-disk1";
String snapshotDiskName = BASE_RESOURCE_NAME + "create-and-get-snapshot-disk2";
DiskId diskId = DiskId.of(ZONE, diskName);
DiskId snapshotDiskId = DiskId.of(ZONE, snapshotDiskName);
String snapshotName = BASE_RESOURCE_NAME + "create-and-get-snapshot";
DiskInfo diskInfo =
DiskInfo.of(diskId, StandardDiskConfiguration.of(DiskTypeId.of(ZONE, "pd-ssd"), 100L));
Operation operation = compute.create(diskInfo);
operation.waitFor();
Disk remoteDisk = compute.getDisk(diskId);
resourceCleaner.add(diskId);
operation = remoteDisk.createSnapshot(snapshotName);
operation.waitFor();
// test get snapshot with selected fields
Snapshot snapshot = compute.getSnapshot(snapshotName,
Compute.SnapshotOption.fields(Compute.SnapshotField.CREATION_TIMESTAMP));
resourceCleaner.add(snapshot.getSnapshotId());
assertNull(snapshot.getGeneratedId());
assertNotNull(snapshot.getSnapshotId());
assertNotNull(snapshot.getCreationTimestamp());
assertNull(snapshot.getDescription());
assertNull(snapshot.getStatus());
assertNull(snapshot.getDiskSizeGb());
assertNull(snapshot.getLicenses());
assertNull(snapshot.getSourceDisk());
assertNull(snapshot.getSourceDiskId());
assertNull(snapshot.getStorageBytes());
assertNull(snapshot.getStorageBytesStatus());
// test get snapshot
snapshot = compute.getSnapshot(snapshotName);
assertNotNull(snapshot.getGeneratedId());
assertNotNull(snapshot.getSnapshotId());
assertNotNull(snapshot.getCreationTimestamp());
assertNotNull(snapshot.getStatus());
assertEquals(100L, (long) snapshot.getDiskSizeGb());
assertEquals(diskName, snapshot.getSourceDisk().getDisk());
assertNotNull(snapshot.getSourceDiskId());
assertNotNull(snapshot.getStorageBytes());
assertNotNull(snapshot.getStorageBytesStatus());
remoteDisk.delete();
resourceCleaner.remove(diskId);
diskInfo =
DiskInfo.of(snapshotDiskId, SnapshotDiskConfiguration.of(SnapshotId.of(snapshotName)));
operation = compute.create(diskInfo);
operation.waitFor();
// test get disk
remoteDisk = compute.getDisk(snapshotDiskId);
resourceCleaner.add(snapshotDiskId);
assertNotNull(remoteDisk);
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertEquals(snapshotDiskId.getDisk(), remoteDisk.getDiskId().getDisk());
assertEquals(DiskInfo.CreationStatus.READY, remoteDisk.getCreationStatus());
assertNotNull(remoteDisk.getCreationTimestamp());
assertNotNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof SnapshotDiskConfiguration);
SnapshotDiskConfiguration remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(DiskConfiguration.Type.SNAPSHOT, remoteConfiguration.getType());
assertEquals(snapshotName, remoteConfiguration.getSourceSnapshot().getSnapshot());
assertEquals(100L, (long) remoteConfiguration.getSizeGb());
assertEquals("pd-standard", remoteConfiguration.getDiskType().getType());
assertNotNull(remoteConfiguration.getSourceSnapshotId());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
// test get disk with selected fields
remoteDisk = compute.getDisk(snapshotDiskId, Compute.DiskOption.fields());
assertNotNull(remoteDisk);
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertEquals(snapshotDiskId.getDisk(), remoteDisk.getDiskId().getDisk());
assertNull(remoteDisk.getCreationStatus());
assertNull(remoteDisk.getCreationTimestamp());
assertNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof SnapshotDiskConfiguration);
remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(DiskConfiguration.Type.SNAPSHOT, remoteConfiguration.getType());
assertEquals(snapshotName, remoteConfiguration.getSourceSnapshot().getSnapshot());
assertNull(remoteConfiguration.getSizeGb());
assertEquals("pd-standard", remoteConfiguration.getDiskType().getType());
assertNull(remoteDisk.<SnapshotDiskConfiguration>getConfiguration().getSourceSnapshotId());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
operation = remoteDisk.delete();
operation.waitFor();
resourceCleaner.remove(snapshotDiskId);
assertNull(compute.getDisk(snapshotDiskId));
operation = snapshot.delete();
operation.waitFor();
resourceCleaner.remove(snapshot.getSnapshotId());
assertNull(compute.getSnapshot(snapshotName));
}
@Test
public void testListDisksAndSnapshots() throws InterruptedException, TimeoutException {
String prefix = BASE_RESOURCE_NAME + "list-disks-and-snapshots-disk";
String[] diskNames = {prefix + "1", prefix + "2"};
DiskId firstDiskId = DiskId.of(ZONE, diskNames[0]);
DiskId secondDiskId = DiskId.of(ZONE, diskNames[1]);
DiskConfiguration configuration =
StandardDiskConfiguration.of(DiskTypeId.of(ZONE, "pd-ssd"), 100L);
Operation firstOperation = compute.create(DiskInfo.of(firstDiskId, configuration));
Operation secondOperation = compute.create(DiskInfo.of(secondDiskId, configuration));
firstOperation.waitFor();
resourceCleaner.add(firstDiskId);
secondOperation.waitFor();
resourceCleaner.add(secondDiskId);
Set<String> diskSet = ImmutableSet.copyOf(diskNames);
// test list disks
Compute.DiskFilter diskFilter =
Compute.DiskFilter.equals(Compute.DiskField.NAME, prefix + "\\d");
Page<Disk> diskPage = compute.listDisks(ZONE, Compute.DiskListOption.filter(diskFilter));
Iterator<Disk> diskIterator = diskPage.iterateAll().iterator();
int count = 0;
while (diskIterator.hasNext()) {
Disk remoteDisk = diskIterator.next();
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertTrue(diskSet.contains(remoteDisk.getDiskId().getDisk()));
assertEquals(DiskInfo.CreationStatus.READY, remoteDisk.getCreationStatus());
assertNotNull(remoteDisk.getCreationTimestamp());
assertNotNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof StandardDiskConfiguration);
StandardDiskConfiguration remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(100L, (long) remoteConfiguration.getSizeGb());
assertEquals("pd-ssd", remoteConfiguration.getDiskType().getType());
assertEquals(DiskConfiguration.Type.STANDARD, remoteConfiguration.getType());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
count++;
}
assertEquals(2, count);
// test list disks with selected fields
count = 0;
diskPage = compute.listDisks(ZONE, Compute.DiskListOption.filter(diskFilter),
Compute.DiskListOption.fields(Compute.DiskField.STATUS));
diskIterator = diskPage.iterateAll().iterator();
while (diskIterator.hasNext()) {
Disk remoteDisk = diskIterator.next();
assertEquals(ZONE, remoteDisk.getDiskId().getZone());
assertTrue(diskSet.contains(remoteDisk.getDiskId().getDisk()));
assertEquals(DiskInfo.CreationStatus.READY, remoteDisk.getCreationStatus());
assertNull(remoteDisk.getCreationTimestamp());
assertNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof StandardDiskConfiguration);
StandardDiskConfiguration remoteConfiguration = remoteDisk.getConfiguration();
assertNull(remoteConfiguration.getSizeGb());
assertEquals("pd-ssd", remoteConfiguration.getDiskType().getType());
assertEquals(DiskConfiguration.Type.STANDARD, remoteConfiguration.getType());
assertNull(remoteDisk.getLastAttachTimestamp());
assertNull(remoteDisk.getLastDetachTimestamp());
count++;
}
assertEquals(2, count);
// test snapshots
SnapshotId firstSnapshotId = SnapshotId.of(diskNames[0]);
SnapshotId secondSnapshotId = SnapshotId.of(diskNames[1]);
firstOperation = compute.create(SnapshotInfo.of(firstSnapshotId, firstDiskId));
secondOperation = compute.create(SnapshotInfo.of(secondSnapshotId, secondDiskId));
firstOperation.waitFor();
resourceCleaner.add(firstSnapshotId);
secondOperation.waitFor();
resourceCleaner.add(secondSnapshotId);
// test list snapshots
Compute.SnapshotFilter snapshotFilter =
Compute.SnapshotFilter.equals(Compute.SnapshotField.NAME, prefix + "\\d");
Page<Snapshot> snapshotPage =
compute.listSnapshots(Compute.SnapshotListOption.filter(snapshotFilter));
Iterator<Snapshot> snapshotIterator = snapshotPage.iterateAll().iterator();
count = 0;
while (snapshotIterator.hasNext()) {
Snapshot remoteSnapshot = snapshotIterator.next();
assertNotNull(remoteSnapshot.getGeneratedId());
assertTrue(diskSet.contains(remoteSnapshot.getSnapshotId().getSnapshot()));
assertNotNull(remoteSnapshot.getCreationTimestamp());
assertNotNull(remoteSnapshot.getStatus());
assertEquals(100L, (long) remoteSnapshot.getDiskSizeGb());
assertTrue(diskSet.contains(remoteSnapshot.getSourceDisk().getDisk()));
assertNotNull(remoteSnapshot.getSourceDiskId());
assertNotNull(remoteSnapshot.getStorageBytes());
assertNotNull(remoteSnapshot.getStorageBytesStatus());
count++;
}
assertEquals(2, count);
// test list snapshots with selected fields
snapshotPage = compute.listSnapshots(Compute.SnapshotListOption.filter(snapshotFilter),
Compute.SnapshotListOption.fields(Compute.SnapshotField.CREATION_TIMESTAMP));
snapshotIterator = snapshotPage.iterateAll().iterator();
count = 0;
while (snapshotIterator.hasNext()) {
Snapshot remoteSnapshot = snapshotIterator.next();
assertNull(remoteSnapshot.getGeneratedId());
assertTrue(diskSet.contains(remoteSnapshot.getSnapshotId().getSnapshot()));
assertNotNull(remoteSnapshot.getCreationTimestamp());
assertNull(remoteSnapshot.getStatus());
assertNull(remoteSnapshot.getDiskSizeGb());
assertNull(remoteSnapshot.getSourceDisk());
assertNull(remoteSnapshot.getSourceDiskId());
assertNull(remoteSnapshot.getStorageBytes());
assertNull(remoteSnapshot.getStorageBytesStatus());
count++;
}
assertEquals(2, count);
}
@Test
public void testAggregatedListDisks() throws InterruptedException, TimeoutException {
String prefix = BASE_RESOURCE_NAME + "list-aggregated-disk";
String[] diskZones = {"us-central1-a", "us-east1-c"};
String[] diskNames = {prefix + "1", prefix + "2"};
DiskId firstDiskId = DiskId.of(diskZones[0], diskNames[0]);
DiskId secondDiskId = DiskId.of(diskZones[1], diskNames[1]);
DiskConfiguration configuration =
StandardDiskConfiguration.of(DiskTypeId.of(ZONE, "pd-ssd"), 100L);
Operation firstOperation = compute.create(DiskInfo.of(firstDiskId, configuration));
Operation secondOperation = compute.create(DiskInfo.of(secondDiskId, configuration));
firstOperation.waitFor();
resourceCleaner.add(firstDiskId);
secondOperation.waitFor();
resourceCleaner.add(secondDiskId);
Set<String> zoneSet = ImmutableSet.copyOf(diskZones);
Set<String> diskSet = ImmutableSet.copyOf(diskNames);
Compute.DiskFilter diskFilter =
Compute.DiskFilter.equals(Compute.DiskField.NAME, prefix + "\\d");
Page<Disk> diskPage = compute.listDisks(Compute.DiskAggregatedListOption.filter(diskFilter));
Iterator<Disk> diskIterator = diskPage.iterateAll().iterator();
int count = 0;
while (diskIterator.hasNext()) {
Disk remoteDisk = diskIterator.next();
assertTrue(zoneSet.contains(remoteDisk.getDiskId().getZone()));
assertTrue(diskSet.contains(remoteDisk.getDiskId().getDisk()));
assertEquals(DiskInfo.CreationStatus.READY, remoteDisk.getCreationStatus());
assertNotNull(remoteDisk.getCreationTimestamp());
assertNotNull(remoteDisk.getGeneratedId());
assertTrue(remoteDisk.getConfiguration() instanceof StandardDiskConfiguration);
StandardDiskConfiguration remoteConfiguration = remoteDisk.getConfiguration();
assertEquals(100L, (long) remoteConfiguration.getSizeGb());
assertEquals("pd-ssd", remoteConfiguration.getDiskType().getType());
assertEquals(DiskConfiguration.Type.STANDARD, remoteConfiguration.getType());
count++;
}
assertEquals(2, count);
}
@Test
public void testCreateGetAndDeprecateImage() throws InterruptedException, TimeoutException {
String diskName = BASE_RESOURCE_NAME + "create-and-get-image-disk";
String imageName = BASE_RESOURCE_NAME + "create-and-get-image";
DiskId diskId = DiskId.of(ZONE, diskName);
ImageId imageId = ImageId.of(imageName);
DiskInfo diskInfo =
DiskInfo.of(diskId, StandardDiskConfiguration.of(DiskTypeId.of(ZONE, "pd-ssd"), 100L));
Operation operation = compute.create(diskInfo);
operation.waitFor();
Disk remoteDisk = compute.getDisk(diskId);
ImageInfo imageInfo = ImageInfo.of(imageId, DiskImageConfiguration.of(diskId));
operation = compute.create(imageInfo);
operation.waitFor();
resourceCleaner.add(diskId);
// test get image with selected fields
Image image = compute.getImage(imageId,
Compute.ImageOption.fields(Compute.ImageField.CREATION_TIMESTAMP));
resourceCleaner.add(imageId);
assertNull(image.getGeneratedId());
assertNotNull(image.getImageId());
assertNotNull(image.getCreationTimestamp());
assertNull(image.getDescription());
assertNotNull(image.getConfiguration());
assertTrue(image.getConfiguration() instanceof DiskImageConfiguration);
DiskImageConfiguration remoteConfiguration = image.getConfiguration();
assertEquals(ImageConfiguration.Type.DISK, remoteConfiguration.getType());
assertEquals(diskName, remoteConfiguration.getSourceDisk().getDisk());
assertNull(image.getStatus());
assertNull(image.getDiskSizeGb());
assertNull(image.getLicenses());
assertNull(image.getDeprecationStatus());
// test get image
image = compute.getImage(imageId);
assertNotNull(image.getGeneratedId());
assertNotNull(image.getImageId());
assertNotNull(image.getCreationTimestamp());
assertNotNull(image.getConfiguration());
assertTrue(image.getConfiguration() instanceof DiskImageConfiguration);
remoteConfiguration = image.getConfiguration();
assertEquals(ImageConfiguration.Type.DISK, remoteConfiguration.getType());
assertEquals(diskName, remoteConfiguration.getSourceDisk().getDisk());
assertEquals(100L, (long) image.getDiskSizeGb());
assertNotNull(image.getStatus());
assertNull(image.getDeprecationStatus());
// test deprecate image
DeprecationStatus<ImageId> deprecationStatus =
DeprecationStatus.newBuilder(DeprecationStatus.Status.DEPRECATED, imageId)
.setDeprecated(System.currentTimeMillis())
.build();
operation = image.deprecate(deprecationStatus);
operation.waitFor();
image = compute.getImage(imageId);
assertEquals(deprecationStatus, image.getDeprecationStatus());
remoteDisk.delete();
resourceCleaner.remove(diskId);
operation = image.delete();
operation.waitFor();
resourceCleaner.remove(imageId);
assertNull(compute.getImage(imageId));
}
@Test
public void testListImages() {
Page<Image> imagePage = compute.listImages(IMAGE_PROJECT);
Iterator<Image> imageIterator = imagePage.iterateAll().iterator();
int count = 0;
while (imageIterator.hasNext()) {
count++;
Image image = imageIterator.next();
assertNotNull(image.getGeneratedId());
assertNotNull(image.getImageId());
assertNotNull(image.getCreationTimestamp());
assertNotNull(image.getConfiguration());
assertNotNull(image.getStatus());
assertNotNull(image.getDiskSizeGb());
}
assertTrue(count > 0);
}
@Test
public void testListImagesWithSelectedFields() {
Page<Image> imagePage =
compute.listImages(IMAGE_PROJECT, Compute.ImageListOption.fields(Compute.ImageField.ID));
Iterator<Image> imageIterator = imagePage.iterateAll().iterator();
int count = 0;
while (imageIterator.hasNext()) {
count++;
Image image = imageIterator.next();
assertNotNull(image.getGeneratedId());
assertNotNull(image.getImageId());
assertNull(image.getCreationTimestamp());
assertNotNull(image.getConfiguration());
assertNull(image.getStatus());
assertNull(image.getDiskSizeGb());
assertNull(image.getLicenses());
assertNull(image.getDeprecationStatus());
}
assertTrue(count > 0);
}
@Test
public void testListImagesWithFilter() {
Page<Image> imagePage = compute.listImages(IMAGE_PROJECT, Compute.ImageListOption.filter(
Compute.ImageFilter.equals(Compute.ImageField.ARCHIVE_SIZE_BYTES, 365056004L)));
Iterator<Image> imageIterator = imagePage.iterateAll().iterator();
int count = 0;
while (imageIterator.hasNext()) {
count++;
Image image = imageIterator.next();
assertNotNull(image.getGeneratedId());
assertNotNull(image.getImageId());
assertNotNull(image.getCreationTimestamp());
assertNotNull(image.getConfiguration());
assertNotNull(image.getStatus());
assertNotNull(image.getDiskSizeGb());
assertEquals(365056004L,
(long) image.<StorageImageConfiguration>getConfiguration().getArchiveSizeBytes());
}
assertTrue(count > 0);
}
@Ignore("Avoid leaving orphan networks when interrupted: see #2118")
@Test
public void testCreateAndGetNetwork() throws InterruptedException, TimeoutException {
String name = BASE_RESOURCE_NAME + "create-and-get-network";
NetworkId networkId = NetworkId.of(name);
NetworkInfo networkInfo =
NetworkInfo.of(networkId, StandardNetworkConfiguration.of("192.168.0.0/16"));
Operation operation = compute.create(networkInfo);
operation.waitFor();
// test get network with selected fields
Network network = compute.getNetwork(networkId.getNetwork(),
Compute.NetworkOption.fields(Compute.NetworkField.CREATION_TIMESTAMP));
resourceCleaner.add(networkId);
assertEquals(networkId.getNetwork(), network.getNetworkId().getNetwork());
assertNull(network.getGeneratedId());
assertNotNull(network.getCreationTimestamp());
assertNull(network.getDescription());
assertEquals(NetworkConfiguration.Type.STANDARD, network.getConfiguration().getType());
StandardNetworkConfiguration remoteConfiguration = network.getConfiguration();
assertEquals("192.168.0.0/16", remoteConfiguration.getIpRange());
// test get network
network = compute.getNetwork(networkId.getNetwork());
assertEquals(networkId.getNetwork(), network.getNetworkId().getNetwork());
assertNotNull(network.getGeneratedId());
assertNotNull(network.getCreationTimestamp());
assertEquals(NetworkConfiguration.Type.STANDARD, network.getConfiguration().getType());
remoteConfiguration = network.getConfiguration();
assertEquals("192.168.0.0/16", remoteConfiguration.getIpRange());
operation = network.delete();
operation.waitFor();
resourceCleaner.remove(networkId);
assertNull(compute.getNetwork(name));
}
@Ignore("Avoid leaving orphan networks when interrupted: see #2118")
@Test
public void testListNetworks() throws InterruptedException, TimeoutException {
String name = BASE_RESOURCE_NAME + "list-network";
NetworkId networkId = NetworkId.of(name);
NetworkInfo networkInfo =
NetworkInfo.of(networkId, StandardNetworkConfiguration.of("192.168.0.0/16"));
Operation operation = compute.create(networkInfo);
operation.waitFor();
resourceCleaner.add(networkId);
// test list
Compute.NetworkFilter filter = Compute.NetworkFilter.equals(Compute.NetworkField.NAME, name);
Page<Network> networkPage = compute.listNetworks(Compute.NetworkListOption.filter(filter));
Iterator<Network> networkIterator = networkPage.iterateAll().iterator();
int count = 0;
while (networkIterator.hasNext()) {
Network network = networkIterator.next();
assertEquals(networkId.getNetwork(), network.getNetworkId().getNetwork());
assertNotNull(network.getGeneratedId());
assertNotNull(network.getCreationTimestamp());
assertEquals(NetworkConfiguration.Type.STANDARD, network.getConfiguration().getType());
StandardNetworkConfiguration remoteConfiguration = network.getConfiguration();
assertEquals("192.168.0.0/16", remoteConfiguration.getIpRange());
count++;
}
assertEquals(1, count);
// test list with selected fields
count = 0;
networkPage = compute.listNetworks(Compute.NetworkListOption.filter(filter),
Compute.NetworkListOption.fields(Compute.NetworkField.CREATION_TIMESTAMP));
networkIterator = networkPage.iterateAll().iterator();
while (networkIterator.hasNext()) {
Network network = networkIterator.next();
assertEquals(networkId.getNetwork(), network.getNetworkId().getNetwork());
assertNull(network.getGeneratedId());
assertNotNull(network.getCreationTimestamp());
assertNull(network.getDescription());
assertEquals(NetworkConfiguration.Type.STANDARD, network.getConfiguration().getType());
StandardNetworkConfiguration remoteConfiguration = network.getConfiguration();
assertEquals("192.168.0.0/16", remoteConfiguration.getIpRange());
count++;
}
assertEquals(1, count);
operation = compute.deleteNetwork(networkId);
operation.waitFor();
resourceCleaner.remove(networkId);
assertNull(compute.getNetwork(name));
}
@Ignore("Avoid leaving orphan networks when interrupted: see #2118")
@Test
public void testCreateNetworkAndSubnetwork() throws InterruptedException, TimeoutException {
String networkName = BASE_RESOURCE_NAME + "create-subnetwork-network";
NetworkId networkId = NetworkId.of(networkName);
NetworkInfo networkInfo = NetworkInfo.of(networkId, SubnetNetworkConfiguration.of(false));
Operation operation = compute.create(networkInfo);
operation.waitFor();
// test get network
Network network = compute.getNetwork(networkId.getNetwork());
resourceCleaner.add(networkId);
assertEquals(networkId.getNetwork(), network.getNetworkId().getNetwork());
assertNotNull(network.getGeneratedId());
assertNotNull(network.getCreationTimestamp());
assertEquals(NetworkConfiguration.Type.SUBNET, network.getConfiguration().getType());
assertTrue(network.getConfiguration() instanceof SubnetNetworkConfiguration);
assertFalse(network.<SubnetNetworkConfiguration>getConfiguration().autoCreateSubnetworks());
String subnetworkName = BASE_RESOURCE_NAME + "create-subnetwork-subnetwork";
SubnetworkId subnetworkId = SubnetworkId.of(REGION, subnetworkName);
SubnetworkInfo subnetworkInfo = SubnetworkInfo.of(subnetworkId, networkId, "192.168.0.0/16");
operation = compute.create(subnetworkInfo);
operation.waitFor();
// test get subnetwork with selected fields
Subnetwork subnetwork = compute.getSubnetwork(subnetworkId,
Compute.SubnetworkOption.fields(Compute.SubnetworkField.CREATION_TIMESTAMP));
resourceCleaner.add(subnetworkId);
assertNull(subnetwork.getGeneratedId());
assertEquals(subnetworkId.getSubnetwork(), subnetwork.getSubnetworkId().getSubnetwork());
assertNotNull(subnetwork.getCreationTimestamp());
assertNull(subnetwork.getDescription());
assertNull(subnetwork.getGatewayAddress());
assertNull(subnetwork.getNetwork());
assertNull(subnetwork.getIpRange());
// test get subnetwork
subnetwork = compute.getSubnetwork(subnetworkId);
assertNotNull(subnetwork.getGeneratedId());
assertEquals(subnetworkId.getSubnetwork(), subnetwork.getSubnetworkId().getSubnetwork());
assertNotNull(subnetwork.getCreationTimestamp());
assertNotNull(subnetwork.getGatewayAddress());
assertEquals(networkId.getNetwork(), subnetwork.getNetwork().getNetwork());
assertEquals("192.168.0.0/16", subnetwork.getIpRange());
// test list subnetworks
Compute.SubnetworkFilter filter =
Compute.SubnetworkFilter.equals(Compute.SubnetworkField.NAME, subnetworkName);
Page<Subnetwork> subnetworkPage =
compute.listSubnetworks(REGION, Compute.SubnetworkListOption.filter(filter));
Iterator<Subnetwork> subnetworkIterator = subnetworkPage.iterateAll().iterator();
int count = 0;
while (subnetworkIterator.hasNext()) {
Subnetwork remoteSubnetwork = subnetworkIterator.next();
assertNotNull(remoteSubnetwork.getGeneratedId());
assertEquals(subnetworkId.getSubnetwork(),
remoteSubnetwork.getSubnetworkId().getSubnetwork());
assertNotNull(remoteSubnetwork.getCreationTimestamp());
assertNotNull(remoteSubnetwork.getGatewayAddress());
assertEquals(networkId.getNetwork(), remoteSubnetwork.getNetwork().getNetwork());
assertEquals("192.168.0.0/16", remoteSubnetwork.getIpRange());
count++;
}
assertEquals(1, count);
// test list subnetworks with selected fields
subnetworkPage = compute.listSubnetworks(REGION, Compute.SubnetworkListOption.filter(filter),
Compute.SubnetworkListOption.fields(Compute.SubnetworkField.CREATION_TIMESTAMP));
subnetworkIterator = subnetworkPage.iterateAll().iterator();
count = 0;
while (subnetworkIterator.hasNext()) {
Subnetwork remoteSubnetwork = subnetworkIterator.next();
assertNull(remoteSubnetwork.getGeneratedId());
assertEquals(subnetworkId.getSubnetwork(),
remoteSubnetwork.getSubnetworkId().getSubnetwork());
assertNotNull(remoteSubnetwork.getCreationTimestamp());
assertNull(remoteSubnetwork.getDescription());
assertNull(remoteSubnetwork.getGatewayAddress());
assertNull(remoteSubnetwork.getNetwork());
assertNull(remoteSubnetwork.getIpRange());
count++;
}
assertEquals(1, count);
operation = subnetwork.delete();
operation.waitFor();
resourceCleaner.remove(subnetworkId);
operation = compute.deleteNetwork(networkId);
operation.waitFor();
resourceCleaner.remove(networkId);
assertNull(compute.getSubnetwork(subnetworkId));
assertNull(compute.getNetwork(networkName));
}
@Ignore("Avoid leaving orphan networks when interrupted: see #2118")
@Test
public void testAggregatedListSubnetworks() throws InterruptedException, TimeoutException {
String networkName = BASE_RESOURCE_NAME + "list-subnetwork-network";
NetworkId networkId = NetworkId.of(networkName);
NetworkInfo networkInfo = NetworkInfo.of(networkId, SubnetNetworkConfiguration.of(false));
Operation operation = compute.create(networkInfo);
operation.waitFor();
resourceCleaner.add(networkId);
String prefix = BASE_RESOURCE_NAME + "list-subnetwork";
String[] regionNames = {"us-central1", "us-east1"};
String[] subnetworkNames = {prefix + "1", prefix + "2"};
String[] ipRanges = {"10.128.0.0/20", "10.132.0.0/20"};
SubnetworkId firstSubnetworkId = SubnetworkId.of(regionNames[0], subnetworkNames[0]);
SubnetworkId secondSubnetworkId = SubnetworkId.of(regionNames[1], subnetworkNames[1]);
SubnetworkInfo firstSubnetworkInfo =
SubnetworkInfo.of(firstSubnetworkId, networkId, ipRanges[0]);
SubnetworkInfo secondSubnetworkInfo =
SubnetworkInfo.of(secondSubnetworkId, networkId, ipRanges[1]);
Operation firstOperation = compute.create(firstSubnetworkInfo);
Operation secondOperation = compute.create(secondSubnetworkInfo);
firstOperation.waitFor();
resourceCleaner.add(firstSubnetworkId);
secondOperation.waitFor();
resourceCleaner.add(secondSubnetworkId);
Set<String> regionSet = ImmutableSet.copyOf(regionNames);
Set<String> subnetworkSet = ImmutableSet.copyOf(subnetworkNames);
Set<String> rangeSet = ImmutableSet.copyOf(ipRanges);
Compute.SubnetworkFilter subnetworkFilter =
Compute.SubnetworkFilter.equals(Compute.SubnetworkField.NAME, prefix + "\\d");
Page<Subnetwork> subnetworkPage =
compute.listSubnetworks(Compute.SubnetworkAggregatedListOption.filter(subnetworkFilter));
Iterator<Subnetwork> subnetworkIterator = subnetworkPage.iterateAll().iterator();
int count = 0;
while (subnetworkIterator.hasNext()) {
Subnetwork remoteSubnetwork = subnetworkIterator.next();
assertNotNull(remoteSubnetwork.getGeneratedId());
assertTrue(regionSet.contains(remoteSubnetwork.getSubnetworkId().getRegion()));
assertTrue(subnetworkSet.contains(remoteSubnetwork.getSubnetworkId().getSubnetwork()));
assertNotNull(remoteSubnetwork.getCreationTimestamp());
assertNotNull(remoteSubnetwork.getGatewayAddress());
assertEquals(networkId.getNetwork(), remoteSubnetwork.getNetwork().getNetwork());
assertTrue(rangeSet.contains(remoteSubnetwork.getIpRange()));
count++;
}
assertEquals(2, count);
firstOperation = compute.deleteSubnetwork(firstSubnetworkId);
secondOperation = compute.deleteSubnetwork(secondSubnetworkId);
firstOperation.waitFor();
resourceCleaner.remove(firstSubnetworkId);
secondOperation.waitFor();
resourceCleaner.remove(secondSubnetworkId);
operation = compute.deleteNetwork(networkId);
operation.waitFor();
resourceCleaner.remove(networkId);
assertNull(compute.getSubnetwork(firstSubnetworkId));
assertNull(compute.getSubnetwork(secondSubnetworkId));
assertNull(compute.getNetwork(networkName));
}
@Test
public void testCreateGetAndDeleteInstance() throws InterruptedException, TimeoutException {
String instanceName = BASE_RESOURCE_NAME + "create-and-get-instance";
String addressName = BASE_RESOURCE_NAME + "create-and-get-instance-address";
// Create an address to assign to the instance
AddressId addressId = RegionAddressId.of(REGION, addressName);
AddressInfo addressInfo = AddressInfo.of(addressId);
Operation operation = compute.create(addressInfo);
operation.waitFor();
Address address = compute.getAddress(addressId);
resourceCleaner.add(addressId);
// Create an instance
InstanceId instanceId = InstanceId.of(ZONE, instanceName);
NetworkId networkId = NetworkId.of("default");
NetworkInterface networkInterface = NetworkInterface.newBuilder(networkId)
.setAccessConfigurations(NetworkInterface.AccessConfig.newBuilder()
.setName("NAT")
.setNatIp(address.getAddress()).build())
.build();
AttachedDisk disk1 = AttachedDisk.of("dev0",
AttachedDisk.CreateDiskConfiguration.newBuilder(IMAGE_ID).setAutoDelete(true).build());
AttachedDisk disk2 =
AttachedDisk.of("dev1",
AttachedDisk.ScratchDiskConfiguration.of(DiskTypeId.of(ZONE, DISK_TYPE)));
InstanceInfo instanceInfo =
InstanceInfo.newBuilder(instanceId, MachineTypeId.of(ZONE, "n1-standard-1"))
.setAttachedDisks(disk1, disk2)
.setNetworkInterfaces(networkInterface)
.build();
operation = compute.create(instanceInfo);
operation.waitFor();
// test get
Instance remoteInstance = compute.getInstance(instanceId);
resourceCleaner.add(instanceId);
assertEquals(instanceName, remoteInstance.getInstanceId().getInstance());
assertEquals(ZONE, remoteInstance.getInstanceId().getZone());
assertEquals(InstanceInfo.Status.RUNNING, remoteInstance.getStatus());
assertEquals("n1-standard-1", remoteInstance.getMachineType().getType());
assertEquals(ZONE, remoteInstance.getMachineType().getZone());
assertNotNull(remoteInstance.getCreationTimestamp());
Set<String> deviceSet = ImmutableSet.of("dev0", "dev1");
assertEquals(2, remoteInstance.getAttachedDisks().size());
for (AttachedDisk remoteAttachedDisk : remoteInstance.getAttachedDisks()) {
assertTrue(deviceSet.contains(remoteAttachedDisk.getDeviceName()));
}
assertEquals(AttachedDisk.AttachedDiskConfiguration.Type.PERSISTENT,
remoteInstance.getAttachedDisks().get(0).getConfiguration().getType());
AttachedDisk.PersistentDiskConfiguration remoteConfiguration =
remoteInstance.getAttachedDisks().get(0).getConfiguration();
assertEquals(instanceName, remoteConfiguration.getSourceDisk().getDisk());
assertEquals(ZONE, remoteConfiguration.getSourceDisk().getZone());
assertTrue(remoteConfiguration.boot());
assertTrue(remoteConfiguration.autoDelete());
assertEquals(1, remoteInstance.getNetworkInterfaces().size());
NetworkInterface remoteNetworkInterface = remoteInstance.getNetworkInterfaces().get(0);
assertNotNull(remoteNetworkInterface.getName());
assertEquals("default", remoteNetworkInterface.getNetwork().getNetwork());
List<NetworkInterface.AccessConfig> remoteAccessConfigurations =
remoteNetworkInterface.getAccessConfigurations();
assertNotNull(remoteAccessConfigurations);
assertEquals(1, remoteAccessConfigurations.size());
NetworkInterface.AccessConfig remoteAccessConfig = remoteAccessConfigurations.get(0);
assertEquals(address.getAddress(), remoteAccessConfig.getNatIp());
assertEquals("NAT", remoteAccessConfig.getName());
assertNotNull(remoteInstance.getMetadata());
assertNotNull(remoteInstance.getTags());
// test get with selected fields
remoteInstance = compute.getInstance(instanceId,
Compute.InstanceOption.fields(Compute.InstanceField.CREATION_TIMESTAMP));
assertEquals(instanceName, remoteInstance.getInstanceId().getInstance());
assertEquals(ZONE, remoteInstance.getInstanceId().getZone());
assertNull(remoteInstance.getMachineType());
assertNotNull(remoteInstance.getCreationTimestamp());
assertNull(remoteInstance.getAttachedDisks());
assertNull(remoteInstance.getNetworkInterfaces());
assertNull(remoteInstance.getMetadata());
assertNull(remoteInstance.getTags());
// test get default serial port output
String serialPortOutput = remoteInstance.getSerialPortOutput();
assertNotNull(serialPortOutput);
// test get serial port output by number
String newSerialPortOutput = remoteInstance.getSerialPortOutput(1);
assertTrue(newSerialPortOutput.contains(serialPortOutput));
operation = remoteInstance.delete();
operation.waitFor();
resourceCleaner.remove(instanceId);
assertNull(compute.getInstance(instanceId));
}
@Test
public void testStartStopAndResetInstance() throws InterruptedException, TimeoutException {
String instanceName = BASE_RESOURCE_NAME + "start-stop-reset-instance";
InstanceId instanceId = InstanceId.of(ZONE, instanceName);
NetworkId networkId = NetworkId.of("default");
NetworkInterface networkInterface = NetworkInterface.newBuilder(networkId).build();
AttachedDisk disk = AttachedDisk.of("dev0",
AttachedDisk.CreateDiskConfiguration.newBuilder(IMAGE_ID).setAutoDelete(true).build());
InstanceInfo instanceInfo =
InstanceInfo.newBuilder(instanceId, MachineTypeId.of(ZONE, MACHINE_TYPE))
.setAttachedDisks(disk)
.setNetworkInterfaces(networkInterface)
.build();
Operation operation = compute.create(instanceInfo);
operation.waitFor();
Instance remoteInstance = compute.getInstance(instanceId,
Compute.InstanceOption.fields(Compute.InstanceField.STATUS));
resourceCleaner.add(instanceId);
assertEquals(InstanceInfo.Status.RUNNING, remoteInstance.getStatus());
operation = remoteInstance.stop();
operation.waitFor();
remoteInstance = compute.getInstance(instanceId,
Compute.InstanceOption.fields(Compute.InstanceField.STATUS));
assertEquals(InstanceInfo.Status.TERMINATED, remoteInstance.getStatus());
operation = remoteInstance.start();
operation.waitFor();
remoteInstance = compute.getInstance(instanceId,
Compute.InstanceOption.fields(Compute.InstanceField.STATUS));
assertEquals(InstanceInfo.Status.RUNNING, remoteInstance.getStatus());
operation = remoteInstance.reset();
operation.waitFor();
remoteInstance = compute.getInstance(instanceId,
Compute.InstanceOption.fields(Compute.InstanceField.STATUS));
assertEquals(InstanceInfo.Status.RUNNING, remoteInstance.getStatus());
}
@Test
public void testSetInstanceProperties() throws InterruptedException, TimeoutException {
String instanceName = BASE_RESOURCE_NAME + "set-properties-instance";
InstanceId instanceId = InstanceId.of(ZONE, instanceName);
NetworkId networkId = NetworkId.of("default");
NetworkInterface networkInterface = NetworkInterface.newBuilder(networkId).build();
AttachedDisk disk = AttachedDisk.of("dev0",
AttachedDisk.CreateDiskConfiguration.newBuilder(IMAGE_ID).setAutoDelete(true).build());
InstanceInfo instanceInfo =
InstanceInfo.newBuilder(instanceId, MachineTypeId.of(ZONE, MACHINE_TYPE))
.setAttachedDisks(disk)
.setNetworkInterfaces(networkInterface)
.build();
Operation operation = compute.create(instanceInfo);
operation.waitFor();
Instance remoteInstance = compute.getInstance(instanceId);
resourceCleaner.add(instanceId);
// test set tags
List<String> tags = ImmutableList.of("tag1", "tag2");
operation = remoteInstance.setTags(tags);
operation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertEquals(tags, remoteInstance.getTags().getValues());
// test set metadata
Map<String, String> metadata = ImmutableMap.of("key", "value");
operation = remoteInstance.setMetadata(metadata);
operation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertEquals(metadata, remoteInstance.getMetadata().getValues());
// test set machine type
operation = remoteInstance.stop();
operation.waitFor();
operation = remoteInstance.setMachineType(MachineTypeId.of(ZONE, "n1-standard-1"));
operation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertEquals("n1-standard-1", remoteInstance.getMachineType().getType());
assertEquals(ZONE, remoteInstance.getMachineType().getZone());
// test set scheduling options
SchedulingOptions options =
SchedulingOptions.standard(false, SchedulingOptions.Maintenance.TERMINATE);
operation = remoteInstance.setSchedulingOptions(options);
operation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertEquals(options, remoteInstance.getSchedulingOptions());
}
@Test
public void testAttachAndDetachDisk() throws InterruptedException, TimeoutException {
String instanceName = BASE_RESOURCE_NAME + "attach-and-detach-disk-instance";
String diskName = BASE_RESOURCE_NAME + "attach-and-detach-disk";
InstanceId instanceId = InstanceId.of(ZONE, instanceName);
NetworkId networkId = NetworkId.of("default");
NetworkInterface networkInterface = NetworkInterface.newBuilder(networkId).build();
AttachedDisk disk = AttachedDisk.of("dev0",
AttachedDisk.CreateDiskConfiguration.newBuilder(IMAGE_ID).setAutoDelete(true).build());
InstanceInfo instanceInfo =
InstanceInfo.newBuilder(instanceId, MachineTypeId.of(ZONE, MACHINE_TYPE))
.setAttachedDisks(disk)
.setNetworkInterfaces(networkInterface)
.build();
Operation instanceOperation = compute.create(instanceInfo);
DiskId diskId = DiskId.of(ZONE, diskName);
Operation diskOperation = compute.create(DiskInfo.of(diskId,
StandardDiskConfiguration.of(DiskTypeId.of(ZONE, "pd-ssd"))));
instanceOperation.waitFor();
diskOperation.waitFor();
resourceCleaner.add(diskId);
Instance remoteInstance = compute.getInstance(instanceId);
// test attach disk
instanceOperation = remoteInstance.attachDisk("dev1",
AttachedDisk.PersistentDiskConfiguration.newBuilder(diskId).build());
instanceOperation.waitFor();
remoteInstance = compute.getInstance(instanceId);
resourceCleaner.add(instanceId);
Set<String> deviceSet = ImmutableSet.of("dev0", "dev1");
assertEquals(2, remoteInstance.getAttachedDisks().size());
for (AttachedDisk remoteAttachedDisk : remoteInstance.getAttachedDisks()) {
assertTrue(deviceSet.contains(remoteAttachedDisk.getDeviceName()));
}
// test set disk auto-delete
instanceOperation = remoteInstance.setDiskAutoDelete("dev1", true);
instanceOperation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertEquals(2, remoteInstance.getAttachedDisks().size());
for (AttachedDisk remoteAttachedDisk : remoteInstance.getAttachedDisks()) {
assertTrue(deviceSet.contains(remoteAttachedDisk.getDeviceName()));
assertTrue(remoteAttachedDisk.getConfiguration().autoDelete());
}
// test detach disk
instanceOperation = remoteInstance.detachDisk("dev1");
instanceOperation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertEquals(1, remoteInstance.getAttachedDisks().size());
assertEquals("dev0", remoteInstance.getAttachedDisks().get(0).getDeviceName());
}
@Test
public void testAddAndRemoveAccessConfig() throws InterruptedException, TimeoutException {
String instanceName = BASE_RESOURCE_NAME + "add-and-remove-access-instance";
String addressName = BASE_RESOURCE_NAME + "add-and-remove-access-address";
InstanceId instanceId = InstanceId.of(ZONE, instanceName);
NetworkId networkId = NetworkId.of("default");
NetworkInterface networkInterface = NetworkInterface.newBuilder(networkId).build();
AttachedDisk disk = AttachedDisk.of("dev0",
AttachedDisk.CreateDiskConfiguration.newBuilder(IMAGE_ID).setAutoDelete(true).build());
InstanceInfo instanceInfo =
InstanceInfo.newBuilder(instanceId, MachineTypeId.of(ZONE, MACHINE_TYPE))
.setAttachedDisks(disk)
.setNetworkInterfaces(networkInterface)
.build();
Operation instanceOperation = compute.create(instanceInfo);
AddressId addressId = RegionAddressId.of(REGION, addressName);
AddressInfo addressInfo = AddressInfo.of(addressId);
Operation addressOperation = compute.create(addressInfo);
addressOperation.waitFor();
instanceOperation.waitFor();
Address remoteAddress = compute.getAddress(addressId);
resourceCleaner.add(addressId);
Instance remoteInstance = compute.getInstance(instanceId);
resourceCleaner.add(instanceId);
String networkInterfaceName = remoteInstance.getNetworkInterfaces().get(0).getName();
// test add access config
NetworkInterface.AccessConfig accessConfig = NetworkInterface.AccessConfig.newBuilder()
.setNatIp(remoteAddress.getAddress())
.setName("NAT")
.build();
instanceOperation = remoteInstance.addAccessConfig(networkInterfaceName, accessConfig);
instanceOperation.waitFor();
remoteInstance = compute.getInstance(instanceId);
List<NetworkInterface.AccessConfig> accessConfigurations =
remoteInstance.getNetworkInterfaces().get(0).getAccessConfigurations();
assertEquals(1, accessConfigurations.size());
assertEquals("NAT", accessConfigurations.get(0).getName());
// test delete access config
instanceOperation = remoteInstance.deleteAccessConfig(networkInterfaceName, "NAT");
instanceOperation.waitFor();
remoteInstance = compute.getInstance(instanceId);
assertTrue(remoteInstance.getNetworkInterfaces().get(0).getAccessConfigurations().isEmpty());
}
}
| |
// Copyright (c) 2013, Facebook, Inc. All rights reserved.
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.hive.orc;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.serde2.ReaderWriterProfiler;
import com.facebook.hive.orc.lazy.LazyBinaryTreeReader;
import com.facebook.hive.orc.lazy.LazyBooleanTreeReader;
import com.facebook.hive.orc.lazy.LazyByteTreeReader;
import com.facebook.hive.orc.lazy.LazyDoubleTreeReader;
import com.facebook.hive.orc.lazy.LazyFloatTreeReader;
import com.facebook.hive.orc.lazy.LazyIntTreeReader;
import com.facebook.hive.orc.lazy.LazyListTreeReader;
import com.facebook.hive.orc.lazy.LazyLongTreeReader;
import com.facebook.hive.orc.lazy.LazyMapTreeReader;
import com.facebook.hive.orc.lazy.LazyShortTreeReader;
import com.facebook.hive.orc.lazy.LazyStringTreeReader;
import com.facebook.hive.orc.lazy.LazyStructTreeReader;
import com.facebook.hive.orc.lazy.LazyTimestampTreeReader;
import com.facebook.hive.orc.lazy.LazyTreeReader;
import com.facebook.hive.orc.lazy.LazyUnionTreeReader;
import com.facebook.hive.orc.lazy.OrcLazyBinary;
import com.facebook.hive.orc.lazy.OrcLazyBoolean;
import com.facebook.hive.orc.lazy.OrcLazyByte;
import com.facebook.hive.orc.lazy.OrcLazyDouble;
import com.facebook.hive.orc.lazy.OrcLazyFloat;
import com.facebook.hive.orc.lazy.OrcLazyInt;
import com.facebook.hive.orc.lazy.OrcLazyList;
import com.facebook.hive.orc.lazy.OrcLazyLong;
import com.facebook.hive.orc.lazy.OrcLazyMap;
import com.facebook.hive.orc.lazy.OrcLazyObject;
import com.facebook.hive.orc.lazy.OrcLazyRow;
import com.facebook.hive.orc.lazy.OrcLazyShort;
import com.facebook.hive.orc.lazy.OrcLazyString;
import com.facebook.hive.orc.lazy.OrcLazyStruct;
import com.facebook.hive.orc.lazy.OrcLazyTimestamp;
import com.facebook.hive.orc.lazy.OrcLazyUnion;
class RecordReaderImpl implements RecordReader {
private final FSDataInputStream file;
private final long firstRow;
private final List<StripeInformation> stripes =
new ArrayList<StripeInformation>();
private OrcProto.StripeFooter stripeFooter;
private final long totalRowCount;
private final CompressionCodec codec;
private final int bufferSize;
private final boolean[] included;
private final long rowIndexStride;
private long rowInStripe = 0;
private int currentStripe = 0;
private long rowBaseInStripe = 0;
private long rowCountInStripe = 0;
private final Map<StreamName, InStream> streams =
new HashMap<StreamName, InStream>();
private OrcLazyRow reader;
private final OrcProto.RowIndex[] indexes;
private final int readStrides;
private final boolean readEagerlyFromHdfs;
private final long readEagerlyFromHdfsBytes;
RecordReaderImpl(Iterable<StripeInformation> stripes,
FileSystem fileSystem,
Path path,
long offset, long length,
List<OrcProto.Type> types,
CompressionCodec codec,
int bufferSize,
boolean[] included,
long strideRate,
Configuration conf
) throws IOException {
this.file = fileSystem.open(path);
this.codec = codec;
this.bufferSize = bufferSize;
this.included = included;
this.readStrides = OrcConf.getIntVar(conf, OrcConf.ConfVars.HIVE_ORC_READ_COMPRESSION_STRIDES);
this.readEagerlyFromHdfs = OrcConf.getBoolVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ);
this.readEagerlyFromHdfsBytes =
OrcConf.getLongVar(conf, OrcConf.ConfVars.HIVE_ORC_EAGER_HDFS_READ_BYTES);
long rows = 0;
long skippedRows = 0;
for(StripeInformation stripe: stripes) {
long stripeStart = stripe.getOffset();
if (offset > stripeStart) {
skippedRows += stripe.getNumberOfRows();
} else if (stripeStart < offset + length) {
this.stripes.add(stripe);
rows += stripe.getNumberOfRows();
}
}
firstRow = skippedRows;
totalRowCount = rows;
indexes = new OrcProto.RowIndex[types.size()];
rowIndexStride = strideRate;
reader = createLazyRow(types, included);
if (this.stripes.size() > 0) {
readStripe();
}
}
OrcLazyRow createLazyRow(List<OrcProto.Type> types, boolean[] included) throws IOException {
OrcProto.Type type = types.get(0);
int structFieldCount = type.getFieldNamesCount();
OrcLazyObject[] structFields = new OrcLazyObject[structFieldCount];
for (int i = 0; i < structFieldCount; i++) {
int subtype = type.getSubtypes(i);
if (included == null || included[subtype]) {
structFields[i] = createLazyObject(subtype, types, included);
}
}
return new OrcLazyRow(structFields, type.getFieldNamesList());
}
LazyTreeReader createLazyTreeReader(int columnId,
List<OrcProto.Type> types,
boolean[] included
) throws IOException {
OrcProto.Type type = types.get(columnId);
switch (type.getKind()) {
case BOOLEAN:
return new LazyBooleanTreeReader(columnId, rowIndexStride);
case BYTE:
return new LazyByteTreeReader(columnId, rowIndexStride);
case DOUBLE:
return new LazyDoubleTreeReader(columnId, rowIndexStride);
case FLOAT:
return new LazyFloatTreeReader(columnId, rowIndexStride);
case SHORT:
return new LazyShortTreeReader(columnId, rowIndexStride);
case LONG:
return new LazyLongTreeReader(columnId, rowIndexStride);
case INT:
return new LazyIntTreeReader(columnId, rowIndexStride);
case STRING:
return new LazyStringTreeReader(columnId, rowIndexStride);
case BINARY:
return new LazyBinaryTreeReader(columnId, rowIndexStride);
case TIMESTAMP:
return new LazyTimestampTreeReader(columnId, rowIndexStride);
case STRUCT:
int structFieldCount = type.getFieldNamesCount();
LazyTreeReader[] structFields = new LazyTreeReader[structFieldCount];
for (int i = 0; i < structFieldCount; i++) {
int subtype = type.getSubtypes(i);
if (included == null || included[subtype]) {
structFields[i] = createLazyTreeReader(subtype, types, included);
}
}
return new LazyStructTreeReader(columnId, rowIndexStride, structFields, type.getFieldNamesList());
case LIST:
LazyTreeReader elementReader = createLazyTreeReader(type.getSubtypes(0), types, included);
return new LazyListTreeReader(columnId, rowIndexStride, elementReader);
case MAP:
LazyTreeReader keyReader = createLazyTreeReader(type.getSubtypes(0), types, included);
LazyTreeReader valueReader = createLazyTreeReader(type.getSubtypes(1), types, included);
return new LazyMapTreeReader(columnId, rowIndexStride, keyReader, valueReader);
case UNION:
int unionFieldCount = type.getSubtypesCount();
LazyTreeReader[] unionFields = new LazyTreeReader[unionFieldCount];
for(int i=0; i < unionFieldCount; ++i) {
unionFields[i] = createLazyTreeReader(type.getSubtypes(i), types, included);
}
return new LazyUnionTreeReader(columnId, rowIndexStride, unionFields);
default:
throw new IllegalArgumentException("Unsupported type " +
type.getKind());
}
}
OrcLazyObject createLazyObject(int columnId,
List<OrcProto.Type> types,
boolean[] included
) throws IOException {
OrcProto.Type type = types.get(columnId);
switch (type.getKind()) {
case BOOLEAN:
return new OrcLazyBoolean((LazyBooleanTreeReader)createLazyTreeReader(columnId, types, included));
case BYTE:
return new OrcLazyByte((LazyByteTreeReader)createLazyTreeReader(columnId, types, included));
case DOUBLE:
return new OrcLazyDouble((LazyDoubleTreeReader)createLazyTreeReader(columnId, types, included));
case FLOAT:
return new OrcLazyFloat((LazyFloatTreeReader)createLazyTreeReader(columnId, types, included));
case SHORT:
return new OrcLazyShort((LazyShortTreeReader)createLazyTreeReader(columnId, types, included));
case LONG:
return new OrcLazyLong((LazyLongTreeReader)createLazyTreeReader(columnId, types, included));
case INT:
return new OrcLazyInt((LazyIntTreeReader)createLazyTreeReader(columnId, types, included));
case STRING:
return new OrcLazyString((LazyStringTreeReader)createLazyTreeReader(columnId, types, included));
case BINARY:
return new OrcLazyBinary((LazyBinaryTreeReader)createLazyTreeReader(columnId, types, included));
case TIMESTAMP:
return new OrcLazyTimestamp((LazyTimestampTreeReader)createLazyTreeReader(columnId, types, included));
case STRUCT:
return new OrcLazyStruct((LazyStructTreeReader)createLazyTreeReader(columnId, types, included));
case LIST:
return new OrcLazyList((LazyListTreeReader)createLazyTreeReader(columnId, types, included));
case MAP:
return new OrcLazyMap((LazyMapTreeReader)createLazyTreeReader(columnId, types, included));
case UNION:
return new OrcLazyUnion((LazyUnionTreeReader)createLazyTreeReader(columnId, types, included));
default:
throw new IllegalArgumentException("Unsupported type " +
type.getKind());
}
}
OrcProto.StripeFooter readStripeFooter(StripeInformation stripe
) throws IOException {
long offset = stripe.getOffset() + stripe.getIndexLength() +
stripe.getDataLength();
int tailLength = (int) stripe.getFooterLength();
return OrcProto.StripeFooter.parseFrom(InStream.create("footer", file, offset,
tailLength, codec, bufferSize));
}
private void readEntireStripeEagerly(StripeInformation stripe, long offset) throws IOException {
byte[] buffer = new byte[(int) (stripe.getDataLength())];
file.seek(offset + stripe.getIndexLength());
file.readFully(buffer, 0, buffer.length);
int sectionOffset = 0;
for(OrcProto.Stream section: stripeFooter.getStreamsList()) {
if (StreamName.getArea(section.getKind()) == StreamName.Area.DATA ||
StreamName.getArea(section.getKind()) == StreamName.Area.DICTIONARY) {
int sectionLength = (int) section.getLength();
StreamName name = new StreamName(section.getColumn(),
section.getKind());
ByteBuffer sectionBuffer = ByteBuffer.wrap(buffer, sectionOffset, sectionLength);
streams.put(name, InStream.create(name.toString(), sectionBuffer, codec, bufferSize,
section.getUseVInts()));
sectionOffset += sectionLength;
}
}
}
private void readEntireStripeLazily(StripeInformation stripe, long offset) throws IOException {
int sectionOffset = 0;
for(OrcProto.Stream section: stripeFooter.getStreamsList()) {
if (StreamName.getArea(section.getKind()) == StreamName.Area.DATA ||
StreamName.getArea(section.getKind()) == StreamName.Area.DICTIONARY) {
int sectionLength = (int) section.getLength();
StreamName name = new StreamName(section.getColumn(),
section.getKind());
streams.put(name, InStream.create(name.toString(), file,
offset + stripe.getIndexLength() + sectionOffset, sectionLength, codec, bufferSize,
section.getUseVInts(), readStrides));
sectionOffset += sectionLength;
}
}
}
private void readIncludedStreamsEagerly(StripeInformation stripe,
List<OrcProto.Stream> streamList, long offset, int currentSection) throws IOException {
long sectionOffset = stripe.getIndexLength();
while (currentSection < streamList.size()) {
int bytes = 0;
// find the first section that shouldn't be read
int excluded = currentSection;
while (excluded < streamList.size() && included[streamList.get(excluded).getColumn()]) {
bytes += streamList.get(excluded++).getLength();
}
// actually read the bytes as a big chunk
if (currentSection < excluded) {
byte[] buffer = new byte[bytes];
file.seek(offset + sectionOffset);
file.readFully(buffer, 0, bytes);
sectionOffset += bytes;
// create the streams for the sections we just read
bytes = 0;
while (currentSection < excluded) {
OrcProto.Stream section = streamList.get(currentSection);
StreamName name =
new StreamName(section.getColumn(), section.getKind());
this.streams.put(name,
InStream.create(name.toString(), ByteBuffer.wrap(buffer, bytes,
(int) section.getLength()), codec, bufferSize, section.getUseVInts()));
currentSection++;
bytes += section.getLength();
}
}
// skip forward until we get back to a section that we need
while (currentSection < streamList.size() && !included[streamList.get(currentSection).getColumn()]) {
sectionOffset += streamList.get(currentSection).getLength();
currentSection++;
}
}
}
private void readIncludedStreamsLazily(StripeInformation stripe,
List<OrcProto.Stream> streamList, long offset, int currentSection) throws IOException {
long sectionOffset = stripe.getIndexLength();
while (currentSection < streamList.size()) {
if (included[streamList.get(currentSection).getColumn()]) {
OrcProto.Stream section = streamList.get(currentSection);
StreamName name =
new StreamName(section.getColumn(), section.getKind());
this.streams.put(name,
InStream.create(name.toString(), file, offset + sectionOffset,
(int) section.getLength(), codec, bufferSize, section.getUseVInts(),
readStrides));
}
sectionOffset += streamList.get(currentSection).getLength();
currentSection += 1;
}
}
protected boolean shouldReadEagerly(StripeInformation stripe, int currentSection) {
if (readEagerlyFromHdfsBytes <= 0) {
return readEagerlyFromHdfs;
}
long inputBytes = 0;
if (included == null) {
inputBytes = stripe.getDataLength();
} else {
List<OrcProto.Stream> streamList = stripeFooter.getStreamsList();
for (int i = currentSection; i < streamList.size(); i++) {
if (included[streamList.get(i).getColumn()]) {
inputBytes += streamList.get(i).getLength();
}
}
}
return inputBytes <= readEagerlyFromHdfsBytes;
}
private void readStripe() throws IOException {
StripeInformation stripe = stripes.get(currentStripe);
stripeFooter = readStripeFooter(stripe);
long offset = stripe.getOffset();
streams.clear();
// if we aren't projecting columns, just read the whole stripe
if (included == null) {
if (shouldReadEagerly(stripe, 0)) {
readEntireStripeEagerly(stripe, offset);
} else {
readEntireStripeLazily(stripe, offset);
}
} else {
List<OrcProto.Stream> streamList = stripeFooter.getStreamsList();
// the index of the current section
int currentSection = 0;
while (currentSection < streamList.size() &&
StreamName.getArea(streamList.get(currentSection).getKind()) != StreamName.Area.DATA &&
StreamName.getArea(streamList.get(currentSection).getKind()) !=
StreamName.Area.DICTIONARY) {
currentSection += 1;
}
if (shouldReadEagerly(stripe, currentSection)) {
readIncludedStreamsEagerly(stripe, streamList, offset, currentSection);
} else {
readIncludedStreamsLazily(stripe, streamList, offset, currentSection);
}
}
rowInStripe = 0;
rowCountInStripe = stripe.getNumberOfRows();
rowBaseInStripe = 0;
for(int i=0; i < currentStripe; ++i) {
rowBaseInStripe += stripes.get(i).getNumberOfRows();
}
readRowIndex();
ReaderWriterProfiler.start(ReaderWriterProfiler.Counter.DESERIALIZATION_TIME);
reader.startStripe(streams, stripeFooter.getColumnsList(), indexes, rowBaseInStripe);
ReaderWriterProfiler.end(ReaderWriterProfiler.Counter.DESERIALIZATION_TIME);
// We don't need the indices anymore, so free them
for(int i=0; i < indexes.length; ++i) {
indexes[i] = null;
}
}
@Override
public boolean hasNext() throws IOException {
return rowInStripe < rowCountInStripe || currentStripe < stripes.size() - 1;
}
@Override
public Object next(Object previous) throws IOException {
if (rowInStripe >= rowCountInStripe) {
reader.close();
currentStripe += 1;
readStripe();
}
rowInStripe += 1;
if (previous == null) {
previous = reader;
} else if (previous != reader) {
((OrcLazyRow) previous).reset(reader);
reader = (OrcLazyRow) previous;
}
((OrcLazyObject) previous).next();
return previous;
}
@Override
public void close() throws IOException {
file.close();
reader.close();
}
@Override
public long getRowNumber() {
return rowInStripe + rowBaseInStripe + firstRow;
}
/**
* Return the fraction of rows that have been read from the selected.
* section of the file
* @return fraction between 0.0 and 1.0 of rows consumed
*/
@Override
public float getProgress() {
return ((float) rowBaseInStripe + rowInStripe) / totalRowCount;
}
private int findStripe(long rowNumber) {
if (rowNumber < 0) {
throw new IllegalArgumentException("Seek to a negative row number " +
rowNumber);
} else if (rowNumber < firstRow) {
throw new IllegalArgumentException("Seek before reader range " +
rowNumber);
}
rowNumber -= firstRow;
for(int i=0; i < stripes.size(); i++) {
StripeInformation stripe = stripes.get(i);
if (stripe.getNumberOfRows() > rowNumber) {
return i;
}
rowNumber -= stripe.getNumberOfRows();
}
throw new IllegalArgumentException("Seek after the end of reader range");
}
private void readRowIndex() throws IOException {
long offset = stripes.get(currentStripe).getOffset();
for(OrcProto.Stream stream: stripeFooter.getStreamsList()) {
if (stream.getKind() == OrcProto.Stream.Kind.ROW_INDEX) {
int col = stream.getColumn();
if ((included == null || included[col]) && indexes[col] == null) {
indexes[col] = OrcProto.RowIndex.parseFrom(InStream.create("index",
file, offset, (int) stream.getLength(), codec, bufferSize,
stream.getUseVInts(), readStrides));
}
}
offset += stream.getLength();
}
}
@Override
public void seekToRow(long rowNumber) throws IOException {
int rightStripe = findStripe(rowNumber);
if (rightStripe != currentStripe) {
currentStripe = rightStripe;
readStripe();
}
reader.seekToRow(rowNumber);
}
public OrcLazyRow getReader() {
return reader;
}
}
| |
/*
Derby - Class org.apache.derbyTesting.unitTests.store.T_SortController
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.unitTests.store;
// impl imports are the preferred way to create unit tests.
import org.apache.derbyTesting.unitTests.harness.T_Generic;
import org.apache.derbyTesting.unitTests.harness.T_Fail;
import org.apache.derby.iapi.store.access.*;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.services.io.Storable;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.reference.Property;
import org.apache.derby.iapi.services.i18n.MessageService;
import org.apache.derby.iapi.reference.SQLState;
import org.apache.derby.iapi.types.SQLInteger;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Properties;
import java.util.Vector;
import java.util.StringTokenizer;
import java.io.File;
/**
Unit test for sorting.
**/
public class T_SortController extends T_Generic
{
private static final String testService = "sortTest";
/** Set this to print out the rows that are inserted into
** and returned from each sort. **/
protected boolean verbose = false;
public String getModuleToTestProtocolName() {
return AccessFactory.MODULE;
}
private void setSortBufferSize(final String buf_length) {
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
System.setProperty("derby.storage.sortBufferMax", buf_length);
return null;
}
});
}
/*
** Methods of T_SortController
*/
/**
@exception T_Fail test has failed
*/
protected void runTests() throws T_Fail
{
int failcount = 0;
// Get the AccessFactory to test.
// don't automatic boot this service if it gets left around
if (startParams == null) {
startParams = new Properties();
}
startParams.put(Property.NO_AUTO_BOOT, Boolean.TRUE.toString());
// remove the service directory to ensure a clean run
startParams.put(Property.DELETE_ON_CREATE, Boolean.TRUE.toString());
// see if we are testing encryption
startParams = T_Util.setEncryptionParam(startParams);
try {
REPORT("(unitTestMain) Testing " + "sortTest with default sort buffer size 1024");
AccessFactory store1024 = null;
failcount = runEachTest(store1024, "1024");
setSortBufferSize("4");
REPORT("(unitTestMain) Testing " + "sortTest with minimum sort buffer size 4");
AccessFactory store4 = null;
failcount += runEachTest(store4, "4");
}
catch (StandardException e)
{
String msg = e.getMessage();
if (msg == null)
msg = e.getClass().getName();
REPORT("(unitTestMain) unexpected exception: " + msg);
throw T_Fail.exceptionFail(e);
}
if (failcount != 0)
throw T_Fail.testFailMsg("(unitTestMain)" + failcount + " cases failed.");
REPORT("(unitTestMain) succeeded");
}
protected int runEachTest(AccessFactory store, String tail) throws T_Fail, StandardException {
TransactionController tc = null;
int failcount = 0;
try {
store = (AccessFactory) Monitor.createPersistentService(getModuleToTestProtocolName(),
testService + tail, startParams);
} catch (StandardException mse) {
throw T_Fail.exceptionFail(mse);
}
if (store == null) {
throw T_Fail.testFailMsg(getModuleToTestProtocolName() + " service not started.");
}
tc = store.getTransaction(
ContextService.getFactory().getCurrentContextManager());
if (!sortExample(tc))
failcount++;
if (!sortBufferCoverage(tc))
failcount++;
if (!sortBoundaries(tc))
failcount++;
if (!sortAllDuplicates(tc))
failcount++;
if (!sortDescending(tc))
failcount++;
tc.commit();
tc.destroy();
return failcount;
}
/**
This test is more of an example, with lots of comments to
explain what's going on.
**/
boolean sortExample(TransactionController tc)
throws StandardException
{
REPORT("(sortExample)");
// Create the rows to be sorted.
T_AccessRow row[] = new T_AccessRow[4];
row[0] = new T_AccessRow(18, 1, 2);
row[1] = new T_AccessRow( 6, 1, 18);
row[2] = new T_AccessRow(18, 1, 2);
row[3] = new T_AccessRow( 8, 14, 3);
// Decide on what kind of sort we want. The properties
// can select different sorting techniques and options.
// But all sorts will result in the rows being in order.
// We don't care which sort technique is used, so set
// the properties to null.
Properties implParameters = null;
// Define the type of rows to be sorted by constructing
// a template. Any row with the correct column types
// will do (the values in the template are never used,
// just the types). The first row to be inserted will
// make a good template.
T_AccessRow template = row[0];
// Define the column ordering: sort on column 1
// (the second column) ascending, then column 2
// (the third column) ascending.
ColumnOrdering order[] = new ColumnOrdering[2];
order[0] = new T_ColumnOrderingImpl(1, true); // ascending
order[1] = new T_ColumnOrderingImpl(2, true); // ascending
// Tell the sort that the rows are not already in order.
boolean alreadyInOrder = false;
// Tell the sort that we're estimating that about 10
// rows will be inserted into the sort. This is just
// a hint, the sort will still work if more rows or
// fewer rows are inserted. But if the guess is close
// the sort will probably run faster.
long estimatedRows = 10;
// Tell the sort that we're estimating that the rows
// are about 24 bytes long (3 int columns).
// This is just a hint, the sort will still work if rows of
// less or greater size are inserted. But if the guess is close
// the sort will probably run faster.
int estimatedRowSize = 12;
// Create the sort.
long sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DuplicateEliminator(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
// For the above sort, on the above input rows, we expect
// the output rows to look like this:
T_AccessRow expectedRow[] = new T_AccessRow[3];
expectedRow[0] = new T_AccessRow(18, 1, 2);
expectedRow[1] = new T_AccessRow( 6, 1, 18);
expectedRow[2] = new T_AccessRow( 8, 14, 3);
return testSort(tc, row, expectedRow, sortid);
}
/**
This test covers specific code paths in the external sort's
sort buffer. It really should live closer to the sort buffer
since the effectiveness of this test is very very implementation
dependent.
**/
boolean sortBufferCoverage(TransactionController tc)
throws StandardException
{
REPORT("(sortBufferCoverage)");
// Create the rows to be sorted. This sequence of values
// will provoke both single and double rotations on insert
// and both single and double rotations on removal. Every
// row has a duplicate so that we can test duplicate handling
// in every tree position and through all manipulations.
T_AccessRow row[] = new T_AccessRow[16];
row[0] = new T_AccessRow(2, 0, 0); // first node
row[1] = new T_AccessRow(2, 0, 0);
row[2] = new T_AccessRow(4, 0, 0); // This makes the tree get higher [A7 case (i)]
row[3] = new T_AccessRow(4, 0, 0);
row[4] = new T_AccessRow(1, 0, 0); // This makes the tree more balanced [A7 case (ii)]
row[5] = new T_AccessRow(1, 0, 0);
row[6] = new T_AccessRow(7, 0, 0); // Tree getting higher again [A7 case (i)]
row[7] = new T_AccessRow(7, 0, 0);
row[8] = new T_AccessRow(8, 0, 0); // Tree getting out of balance [A7 case iii]
// Single rotation will fix [A8]
row[9] = new T_AccessRow(8, 0, 0);
row[10] = new T_AccessRow(3, 0, 0); // Tree getting out of balance [A7 case iii]
// Double rotation will fix [A9]
row[11] = new T_AccessRow(3, 0, 0);
row[12] = new T_AccessRow(5, 0, 0); // Tree more balanced [A7 case (ii)]
row[13] = new T_AccessRow(5, 0, 0);
row[14] = new T_AccessRow(6, 0, 0); // Tree getting higher again [A7 case (i)]
row[15] = new T_AccessRow(6, 0, 0);
// RESOLVE (nat) Should select the sort that being tested here.
Properties implParameters = null;
T_AccessRow template = row[0];
// Sort on column 0 (the first column) ascending
ColumnOrdering order[] = new ColumnOrdering[1];
order[0] = new T_ColumnOrderingImpl(0, true); // ascending
// The rows are not already in order.
boolean alreadyInOrder = false;
long estimatedRows = 20;
int estimatedRowSize = 12;
// Create the sort.
long sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
// Rows should come out in order
T_AccessRow expectedRow[] = new T_AccessRow[16];
expectedRow[0] = new T_AccessRow(1, 0, 0);
expectedRow[1] = new T_AccessRow(1, 0, 0);
expectedRow[2] = new T_AccessRow(2, 0, 0);
expectedRow[3] = new T_AccessRow(2, 0, 0);
expectedRow[4] = new T_AccessRow(3, 0, 0);
expectedRow[5] = new T_AccessRow(3, 0, 0);
expectedRow[6] = new T_AccessRow(4, 0, 0);
expectedRow[7] = new T_AccessRow(4, 0, 0);
expectedRow[8] = new T_AccessRow(5, 0, 0);
expectedRow[9] = new T_AccessRow(5, 0, 0);
expectedRow[10] = new T_AccessRow(6, 0, 0);
expectedRow[11] = new T_AccessRow(6, 0, 0);
expectedRow[12] = new T_AccessRow(7, 0, 0);
expectedRow[13] = new T_AccessRow(7, 0, 0);
expectedRow[14] = new T_AccessRow(8, 0, 0);
expectedRow[15] = new T_AccessRow(8, 0, 0);
return testSort(tc, row, expectedRow, sortid);
}
/**
Test a sorts with one or zero rows.
**/
boolean sortBoundaries(TransactionController tc)
throws StandardException
{
int failcount = 0;
long sortid;
Properties implParameters;
T_AccessRow template;
ColumnOrdering order[];
boolean alreadyInOrder;
long estimatedRows;
int estimatedRowSize;
T_AccessRow input[];
T_AccessRow expected[];
/*
** The following sort parameters are the same for
** every sort tested in this method.
*/
implParameters = null;
template = new T_AccessRow(1, 1, 1);
order = new ColumnOrdering[1];
order[0] = new T_ColumnOrderingImpl(0, true); // ascending
estimatedRows = 10;
estimatedRowSize = 12;
/*
** A no-row sort.
*/
REPORT("(sortBoundaries) Sorting no rows");
input = new T_AccessRow[0]; // no rows in..
expected = new T_AccessRow[0]; // .. ==> no rows out!
alreadyInOrder = false;
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
/*
** A no-row already in order sort.
*/
REPORT("(sortBoundaries) Sorting no rows - \"already in order\"");
input = new T_AccessRow[0]; // no rows in..
expected = new T_AccessRow[0]; // .. ==> no rows out!
alreadyInOrder = true;
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
/*
** A single-row sort.
*/
REPORT("(sortBoundaries) Sorting a single row");
input = new T_AccessRow[1];
input[0] = new T_AccessRow(99, 88, 77);
expected = new T_AccessRow[1];
expected[0] = new T_AccessRow(99, 88, 77);
alreadyInOrder = false;
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
/*
** A single-row already-in-order sort.
*/
REPORT("(sortBoundaries) Sorting a single row - \"already in order\"");
input = new T_AccessRow[1];
input[0] = new T_AccessRow(99, 88, 77);
expected = new T_AccessRow[1];
expected[0] = new T_AccessRow(99, 88, 77);
alreadyInOrder = true;
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
/*
** A single-row sort, eliminating duplicates
*/
REPORT("(sortBoundaries) Sorting a single row - \"eliminate duplicates\"");
input = new T_AccessRow[1];
input[0] = new T_AccessRow(99, 88, 77);
expected = new T_AccessRow[1];
expected[0] = new T_AccessRow(99, 88, 77);
alreadyInOrder = false;
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DuplicateEliminator(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
return failcount == 0;
}
/**
Test a sort where all the rows are duplicates
**/
boolean sortAllDuplicates(TransactionController tc)
throws StandardException
{
int failcount = 0;
long sortid;
Properties implParameters;
T_AccessRow template;
ColumnOrdering order[];
boolean alreadyInOrder;
long estimatedRows;
int estimatedRowSize;
T_AccessRow input[];
T_AccessRow expected[];
/*
** The following sort parameters will be used in every
** sort in this method.
*/
implParameters = null;
template = new T_AccessRow(1, 1, 1);
// Ordering first two columns, ascending
order = new ColumnOrdering[2];
order[0] = new T_ColumnOrderingImpl(0, true); // ascending
order[1] = new T_ColumnOrderingImpl(1, true); // ascending
alreadyInOrder = false;
estimatedRows = 10;
estimatedRowSize = 12;
input = new T_AccessRow[5];
input[0] = new T_AccessRow(1, 1, 1);
input[1] = new T_AccessRow(1, 1, 1);
input[2] = new T_AccessRow(1, 1, 1);
input[3] = new T_AccessRow(1, 1, 1);
input[4] = new T_AccessRow(1, 1, 1);
/*
** When doing no aggregation, we expect every duplicate
** to come back out.
*/
REPORT("(sortAllDuplicates) no aggregation");
expected = new T_AccessRow[5];
expected[0] = new T_AccessRow(1, 1, 1);
expected[1] = new T_AccessRow(1, 1, 1);
expected[2] = new T_AccessRow(1, 1, 1);
expected[3] = new T_AccessRow(1, 1, 1);
expected[4] = new T_AccessRow(1, 1, 1);
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
/*
** If we're doing duplicate elimination, we expect
** one row back (since they're all duplicates).
*/
REPORT("(sortAllDuplicates) eliminate duplicates");
expected = new T_AccessRow[1];
expected[0] = new T_AccessRow(1, 1, 1);
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DuplicateEliminator(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
/*
** Another aggregation, this time summing up the
** third column.
*/
REPORT("(sortAllDuplicates) sum aggregate");
expected = new T_AccessRow[1];
expected[0] = new T_AccessRow(1, 1, 5);
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_SumForIntCol(2), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, input, expected, sortid))
failcount++;
return failcount == 0;
}
/**
Test a sort where we have some ascending and descending keys.
**/
boolean sortDescending(TransactionController tc)
throws StandardException
{
int failcount = 0;
long sortid;
Properties implParameters;
T_AccessRow template;
ColumnOrdering order[];
boolean alreadyInOrder;
long estimatedRows;
int estimatedRowSize;
T_AccessRow expected[];
/*
** The following sort parameters will be used in every
** sort in this method.
*/
implParameters = null;
template = new T_AccessRow(1, 1, 1);
alreadyInOrder = false;
estimatedRows = 10;
estimatedRowSize = 12;
/*
** Straight descending sort.
*/
REPORT("(sortDescending) no aggregation");
order = new ColumnOrdering[2];
order[0] = new T_ColumnOrderingImpl(0, false); // descending
order[1] = new T_ColumnOrderingImpl(1, false); // descending
expected = new T_AccessRow[10];
expected[0] = new T_AccessRow(8, 1, 1);
expected[1] = new T_AccessRow(4, 8, 1);
expected[2] = new T_AccessRow(4, 2, 1);
expected[3] = new T_AccessRow(4, 1, 1);
expected[4] = new T_AccessRow(3, 8, 1);
expected[5] = new T_AccessRow(3, 5, 1);
expected[6] = new T_AccessRow(3, 3, 1);
expected[7] = new T_AccessRow(3, 3, 1);
expected[8] = new T_AccessRow(3, 3, 1);
expected[9] = new T_AccessRow(1, 1, 1);
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DummySortObserver(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, getSortDescendingInput(), expected, sortid))
failcount++;
/*
** Descending sort eliminating duplicates
*/
REPORT("(sortDescending) eliminate duplicates");
order = new ColumnOrdering[2];
order[0] = new T_ColumnOrderingImpl(0, false); // descending
order[1] = new T_ColumnOrderingImpl(1, false); // descending
expected = new T_AccessRow[8];
expected[0] = new T_AccessRow(8, 1, 1);
expected[1] = new T_AccessRow(4, 8, 1);
expected[2] = new T_AccessRow(4, 2, 1);
expected[3] = new T_AccessRow(4, 1, 1);
expected[4] = new T_AccessRow(3, 8, 1);
expected[5] = new T_AccessRow(3, 5, 1);
expected[6] = new T_AccessRow(3, 3, 1);
expected[7] = new T_AccessRow(1, 1, 1);
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DuplicateEliminator(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, getSortDescendingInput(), expected, sortid))
failcount++;
/*
** Eliminate duplicates, ascending on second column.
*/
REPORT("(sortDescending) descending/ascending - eliminate duplicates");
order = new ColumnOrdering[2];
order[0] = new T_ColumnOrderingImpl(0, false); // descending
order[1] = new T_ColumnOrderingImpl(1, true); // ascending
expected = new T_AccessRow[8];
expected[0] = new T_AccessRow(8, 1, 1);
expected[1] = new T_AccessRow(4, 1, 1);
expected[2] = new T_AccessRow(4, 2, 1);
expected[3] = new T_AccessRow(4, 8, 1);
expected[4] = new T_AccessRow(3, 3, 1);
expected[5] = new T_AccessRow(3, 5, 1);
expected[6] = new T_AccessRow(3, 8, 1);
expected[7] = new T_AccessRow(1, 1, 1);
sortid = tc.createSort(implParameters, template.getRowArray(),
order, new T_DuplicateEliminator(template), alreadyInOrder, estimatedRows,
estimatedRowSize);
if (!testSort(tc, getSortDescendingInput(), expected, sortid))
failcount++;
return failcount == 0;
}
private T_AccessRow[] getSortDescendingInput()
{
T_AccessRow[] input;
input = new T_AccessRow[10];
input[0] = new T_AccessRow(8, 1, 1);
input[1] = new T_AccessRow(1, 1, 1);
input[2] = new T_AccessRow(3, 5, 1);
input[3] = new T_AccessRow(4, 1, 1);
input[4] = new T_AccessRow(3, 3, 1);
input[5] = new T_AccessRow(3, 8, 1);
input[6] = new T_AccessRow(3, 3, 1);
input[7] = new T_AccessRow(3, 3, 1);
input[8] = new T_AccessRow(4, 2, 1);
input[9] = new T_AccessRow(4, 8, 1);
return input;
}
/**
Insert the given rows into the given sort, and check that the
rows retrieved from the sort match the output rows.
**/
boolean testSort(TransactionController tc, T_AccessRow in[], T_AccessRow outrow[], long sortid)
throws StandardException
{
// Open a sort controller for inserting the rows.
SortController sort = tc.openSort(sortid);
// Insert the rows to be sorted.
for (int i = 0; i < in.length; i++)
{
if (verbose)
REPORT("(testSort) in: " + in[i]);
sort.insert(in[i].getRowArray());
}
// Close the sort controller. This makes the rows
// available to be retrieved.
// It also means we are getting final sort statistics.
sort.completedInserts();
// Test the SortInfo part of sort.
SortInfo sort_info = sort.getSortInfo();
Properties sortprop = sort_info.getAllSortInfo(null);
String sortType = sortprop.getProperty(
MessageService.getTextMessage(SQLState.STORE_RTS_SORT_TYPE));
int numRowsInput = Integer.parseInt(sortprop.getProperty(
MessageService.getTextMessage(SQLState.STORE_RTS_NUM_ROWS_INPUT)));
int numRowsOutput = Integer.parseInt(sortprop.getProperty(
MessageService.getTextMessage(SQLState.STORE_RTS_NUM_ROWS_OUTPUT)));
String external =
MessageService.getTextMessage(SQLState.STORE_RTS_EXTERNAL);
String internal =
MessageService.getTextMessage(SQLState.STORE_RTS_INTERNAL);
if (sortType.compareTo(internal) != 0 &&
sortType.compareTo(external) != 0)
FAIL("(testSort) unknown sortType. Expected internal or external, got " + sortType);
if (numRowsInput != in.length)
FAIL("(testSort) SortInfo.numRowsInput (value: " + numRowsInput +
") is not equal to in.length (value: " + in.length + ")");
if (numRowsOutput != outrow.length)
FAIL("(testSort) SortInfo.numRowsOutput (value: " +
numRowsOutput + ") is not equal to outrow.length (value: " + outrow.length + ")");
if (sortType.equals(external))
{
int numMergeRuns = Integer.parseInt(sortprop.getProperty(
MessageService.getTextMessage(SQLState.STORE_RTS_NUM_MERGE_RUNS)));
Vector mergeRuns = new Vector();
StringTokenizer st = new StringTokenizer(sortprop.getProperty(
MessageService.getTextMessage(SQLState.STORE_RTS_MERGE_RUNS_SIZE)),
"[],",false);
while (st.hasMoreTokens())
mergeRuns.addElement(Integer.valueOf(st.nextToken().trim()));
if (mergeRuns.size() != numMergeRuns)
FAIL("(testSort) the number of elements in vector SortInfo.mergeRunsSize (value: " +
mergeRuns.size() + " ) is not equal to SortInfo.numMergeRuns (value: " +
numMergeRuns + " )");
int totRunSize = 0;
for (int i = 0; i < mergeRuns.size(); i++)
totRunSize += ((Integer) mergeRuns.elementAt(i)).intValue();
if (totRunSize != numRowsInput)
FAIL("(testSort) the sum of the elements of the vector SortInfo.mergeRunsSize (value: " +
totRunSize + " ) is not equal to SortInfo.numRowsInput (value: " +
numRowsInput + " )");
}
sort = null;
// Open a sort scan for reading the rows back.
ScanController scan = tc.openSortScan(sortid, false);
// Things that could go wrong.
boolean mismatch = false;
boolean toofew = false;
boolean toomany = false;
// Fetch the sorted rows and compare them to the rows
// in the outrow array.
T_AccessRow result = new T_AccessRow(3);
for (int i = 0; i < outrow.length; i++)
{
if (scan.next() == false)
{
// We were expecting the i'th row from outrow, but
// it didn't show up!
toofew = true;
FAIL("(testSort) Too few rows in sort output");
break;
}
scan.fetch(result.getRowArray());
if (verbose)
REPORT("(testSort) out: " + result);
if (!result.equals(outrow[i]))
{
// The i'th row from the sort didn't match the
// i'th row from out.
mismatch = true;
FAIL("(testSort) row " + result + " != " + outrow[i]);
}
}
// We should not see any more rows out of the sort,
// since we've exhausted the out array.
while (scan.next() == true)
{
scan.fetch(result.getRowArray());
if (verbose)
REPORT("(testSort) out: " + result);
toomany = true;
FAIL("(testSort) Extra row");
}
// Test the ScanInfo part of sort.
ScanInfo scan_info = scan.getScanInfo();
Properties prop = scan_info.getAllScanInfo(null);
if (prop.getProperty(
MessageService.getTextMessage(SQLState.STORE_RTS_SCAN_TYPE)
).compareTo(
MessageService.getTextMessage(SQLState.STORE_RTS_SORT)) != 0)
{
FAIL("(testSort) wrong scanType. Expected sort, got " +
prop.getProperty(
MessageService.getTextMessage(
SQLState.STORE_RTS_SCAN_TYPE)));
}
if (tc.countOpens(TransactionController.OPEN_CREATED_SORTS) != 1)
{
FAIL("(testSort) sort count before close is wrong: " +
tc.countOpens(TransactionController.OPEN_CREATED_SORTS));
}
// Close the scan controller (which implicitly destroys the sort).
scan.close();
scan = null;
if (tc.countOpens(TransactionController.OPEN_CREATED_SORTS) != 1)
{
FAIL("(testSort) sort count after close is wrong: " +
tc.countOpens(TransactionController.OPEN_CREATED_SORTS));
}
tc.dropSort(sortid);
if (tc.countOpens(TransactionController.OPEN_CREATED_SORTS) > 0)
{
FAIL("(testSort) a sort is still open.");
}
return (!mismatch && !toofew && !toomany);
}
}
class T_DummySortObserver implements SortObserver
{
T_AccessRow template;
Vector vector;
T_DummySortObserver(T_AccessRow template)
{
this.template = template;
vector = new Vector();
}
/*
* Methods of SortObserver
*/
public DataValueDescriptor[] insertNonDuplicateKey(
DataValueDescriptor[] insertRow)
{
return insertRow;
}
public DataValueDescriptor[] insertDuplicateKey(
DataValueDescriptor[] insertRow,
DataValueDescriptor[] existingRow)
{
return insertRow;
}
public void addToFreeList(
DataValueDescriptor[] objectArray,
int maxFreeListSize)
{
if (vector.size() < maxFreeListSize)
{
vector.addElement(objectArray);
}
}
public DataValueDescriptor[] getArrayClone()
throws StandardException
{
int lastElement = vector.size();
if (lastElement > 0)
{
DataValueDescriptor[] retval =
(DataValueDescriptor[]) vector.elementAt(lastElement - 1);
vector.removeElementAt(lastElement - 1);
return retval;
}
return template.getRowArrayClone();
}
}
class T_DuplicateEliminator extends T_DummySortObserver
{
T_DuplicateEliminator(T_AccessRow template)
{
super(template);
}
/*
* Methods of SortObserver
*/
public DataValueDescriptor[] insertNonDuplicateKey(
DataValueDescriptor[] insertRow)
{
return insertRow;
}
public DataValueDescriptor[] insertDuplicateKey(
DataValueDescriptor[] insertRow,
DataValueDescriptor[] existingRow)
{
return null;
}
}
class T_SumForIntCol implements SortObserver
{
private int columnId;
T_SumForIntCol(int columnId)
{
this.columnId = columnId;
}
/*
* Methods of SortObserver
*/
public DataValueDescriptor[] insertNonDuplicateKey(
DataValueDescriptor[] insertRow)
{
return insertRow;
}
public DataValueDescriptor[] insertDuplicateKey(
DataValueDescriptor[] insertRow,
DataValueDescriptor[] existingRow)
throws StandardException
{
// We know, because this is a test program and it's only
// used this way, that we can safely cast the arguments
// to SQLInteger.
SQLInteger increment = (SQLInteger) insertRow[columnId];
SQLInteger sum = (SQLInteger) existingRow[columnId];
// Perform the aggregation.
sum.plus(sum, increment, sum);
return null;
}
public void addToFreeList(
DataValueDescriptor[] objectArray,
int maxFreeListSize)
{
}
public DataValueDescriptor[] getArrayClone()
throws StandardException
{
return null;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.