gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.vizzini.example.boardgame.tictactoe;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.List;
import org.junit.Test;
import org.vizzini.core.TimePrinter;
import org.vizzini.core.game.Agent;
import org.vizzini.core.game.boardgame.Search;
/**
* Provides tests for implementations of the <code>Search</code> interface.
*/
public abstract class MyTestSearch
{
/** Search. */
private final Search search;
/**
* Construct this object.
*
* @param search Search.
*/
@SuppressWarnings("hiding")
public MyTestSearch(final Search search)
{
if (search == null)
{
throw new IllegalArgumentException("search is null");
}
this.search = search;
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth1OWins()
{
// Setup.
final int maxPlies = 1;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth1XWins()
{
// Setup.
final int maxPlies = 1;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth2OBlocks()
{
// Setup.
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final int maxPlies = 2;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTToken tokenX = TTTToken.X.withAgent(agentX);
final TTTToken tokenO = TTTToken.O.withAgent(agentO);
environment.placeToken(TTTPosition.b2, tokenX);
environment.placeToken(TTTPosition.a1, tokenO);
environment.placeToken(TTTPosition.b1, tokenX);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth2OWins()
{
// Setup.
final int maxPlies = 2;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth2XWins()
{
// Setup.
final int maxPlies = 2;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth3EmptyBoard()
{
// Setup.
final int maxPlies = 3;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = new TTTEnvironment();
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b2));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth3OBlocks()
{
// Setup.
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final int maxPlies = 3;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTToken tokenX = TTTToken.X.withAgent(agentX);
final TTTToken tokenO = TTTToken.O.withAgent(agentO);
environment.placeToken(TTTPosition.b2, tokenX);
environment.placeToken(TTTPosition.a1, tokenO);
environment.placeToken(TTTPosition.b1, tokenX);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth3OWins()
{
// Setup.
final int maxPlies = 3;
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
environment.placeToken(TTTPosition.a1, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.b2, TTTToken.O.withAgent(agentO));
environment.placeToken(TTTPosition.c3, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.b1, TTTToken.O.withAgent(agentO));
environment.placeToken(TTTPosition.a3, TTTToken.X.withAgent(agentX));
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
assertThat(result.getPosition(), is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth3XWins()
{
// Setup.
final int maxPlies = 3;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = new TTTEnvironment();
environment.placeToken(TTTPosition.a3, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.a1, TTTToken.O.withAgent(agentO));
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth4XWins()
{
// Setup.
final int maxPlies = 4;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = new TTTEnvironment();
environment.placeToken(TTTPosition.a3, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.a1, TTTToken.O.withAgent(agentO));
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth5EmptyBoard()
{
// Setup.
final int maxPlies = 5;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = new TTTEnvironment();
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b2));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth5XWins()
{
// Setup.
final int maxPlies = 5;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthThree(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth6OBlocks()
{
// Setup.
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final int maxPlies = 6;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTToken tokenX = TTTToken.X.withAgent(agentX);
final TTTToken tokenO = TTTToken.O.withAgent(agentO);
environment.placeToken(TTTPosition.b2, tokenX);
environment.placeToken(TTTPosition.a1, tokenO);
environment.placeToken(TTTPosition.b1, tokenX);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth6OBlocks0()
{
// Setup.
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final int maxPlies = 6;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTToken tokenX = TTTToken.X.withAgent(agentX);
final TTTToken tokenO = TTTToken.O.withAgent(agentO);
environment.placeToken(TTTPosition.c1, tokenX);
environment.placeToken(TTTPosition.b2, tokenO);
environment.placeToken(TTTPosition.a3, tokenX);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth6OBlocks1()
{
// Setup.
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final int maxPlies = 6;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTToken tokenX = TTTToken.X.withAgent(agentX);
final TTTToken tokenO = TTTToken.O.withAgent(agentO);
environment.placeToken(TTTPosition.b2, tokenX);
environment.placeToken(TTTPosition.a1, tokenO);
environment.placeToken(TTTPosition.b1, tokenX);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth6OWins()
{
// Setup.
final int maxPlies = 6;
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
environment.placeToken(TTTPosition.a1, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.b2, TTTToken.O.withAgent(agentO));
environment.placeToken(TTTPosition.c3, TTTToken.X.withAgent(agentX));
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
assertThat(result.getPosition(), is(TTTPosition.b1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
environment.placeToken(TTTPosition.b1, TTTToken.O.withAgent(agentO));
environment.placeToken(TTTPosition.a3, TTTToken.X.withAgent(agentX));
// Run.
result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, 3);
// Verify.
assertNotNull(result);
assertThat(result.getPosition(), is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth6XWins()
{
// Setup.
final int maxPlies = 6;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthThree(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth7EmptyBoard()
{
// Setup.
final int maxPlies = 7;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = new TTTEnvironment();
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b2));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth7XWins()
{
// Setup.
final int maxPlies = 7;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthThree(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.c1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchDepth9EmptyBoard()
{
final long start = System.currentTimeMillis();
// Setup.
final int maxPlies = 9;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = new TTTEnvironment();
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentX, agentO, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.a1));
assertThat((TTTSearchAgent)result.getAgent(), is(agentX));
assertThat(result.getEnvironment(), is(environment));
final long end = System.currentTimeMillis();
final TimePrinter timePrinter = new TimePrinter();
System.out.println(timePrinter.formatElapsedTime("searchDepth9EmptyBoard()", start, end));
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchFullGame()
{
// Setup.
final int maxPlies = 9;
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, maxPlies);
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTAdjudicator adjudicator = createAdjudicator();
final TTTPosition[] positions = { TTTPosition.a1, TTTPosition.b2, TTTPosition.b1, TTTPosition.c1, };
for (int i = 0; i < positions.length; i++)
{
final Agent activeAgent = ((i % 2) == 0 ? agentX : agentO);
final Agent opponent = (activeAgent == agentX ? agentO : agentX);
// Run.
final TTTAction result = (TTTAction)search
.search(environment, adjudicator, activeAgent, opponent, maxPlies);
// Verify.
assertNotNull(result);
assertThat("i = " + i, result.getPosition(), is(positions[i]));
assertThat("i = " + i, result.getAgent(), is(activeAgent));
result.doIt();
// System.out.println(environment);
}
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchMaxPliesNegative()
{
// Setup.
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, 1);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run / Verify.
try
{
search.search(environment, adjudicator, agentX, agentO, -1);
fail("Should have thrown an exception");
}
catch (final IllegalArgumentException e)
{
assertThat(e.getMessage(), is("maxPlies is zero or less"));
}
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchMaxPliesZero()
{
// Setup.
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, 1);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run / Verify.
try
{
search.search(environment, adjudicator, agentX, agentO, -1);
fail("Should have thrown an exception");
}
catch (final IllegalArgumentException e)
{
assertThat(e.getMessage(), is("maxPlies is zero or less"));
}
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchNull()
{
// Setup.
final TTTSearchAgent agentX = createSearchAgent(TTTTeam.X, 1);
final SimpleAgent agentO = createSimpleAgent(TTTTeam.O);
final TTTEnvironment environment = createEnvironmentDepthOne(agentX, agentO);
final TTTAdjudicator adjudicator = createAdjudicator();
final int depth = 10;
try
{
search.search(environment, adjudicator, null, agentO, depth);
fail("Should have thrown an exception");
}
catch (final IllegalArgumentException e)
{
assertThat(e.getMessage(), is("agent is null"));
}
try
{
search.search(environment, adjudicator, agentX, null, depth);
fail("Should have thrown an exception");
}
catch (final IllegalArgumentException e)
{
assertThat(e.getMessage(), is("opponent is null"));
}
try
{
search.search(null, adjudicator, agentX, agentO, depth);
fail("Should have thrown an exception");
}
catch (final IllegalArgumentException e)
{
assertThat(e.getMessage(), is("environment is null"));
}
try
{
search.search(environment, null, agentX, agentO, depth);
fail("Should have thrown an exception");
}
catch (final IllegalArgumentException e)
{
assertThat(e.getMessage(), is("adjudicator is null"));
}
}
/**
* Test the <code>search()</code> method.
*/
@Test
public void searchOForTheWin()
{
// Setup.
final SimpleAgent agentX = createSimpleAgent(TTTTeam.X);
final int maxPlies = 4;
final TTTSearchAgent agentO = createSearchAgent(TTTTeam.O, maxPlies);
final TTTEnvironment environment = new TTTEnvironment();
final List<Agent> agents = new ArrayList<Agent>();
agents.add(agentX);
agents.add(agentO);
environment.placeInitialTokens(agents);
final TTTToken tokenX = TTTToken.X.withAgent(agentX);
final TTTToken tokenO = TTTToken.O.withAgent(agentO);
environment.placeToken(TTTPosition.a1, tokenX);
environment.placeToken(TTTPosition.b2, tokenO);
environment.placeToken(TTTPosition.c3, tokenX);
environment.placeToken(TTTPosition.b1, tokenO);
environment.placeToken(TTTPosition.a3, tokenX);
final TTTAdjudicator adjudicator = createAdjudicator();
// Run.
final TTTAction result = (TTTAction)search.search(environment, adjudicator, agentO, agentX, maxPlies);
// Verify.
assertNotNull(result);
final TTTPosition position = result.getPosition();
assertNotNull(position);
assertThat(position, is(TTTPosition.b3));
assertThat((TTTSearchAgent)result.getAgent(), is(agentO));
assertThat(result.getEnvironment(), is(environment));
}
/**
* @return a new adjudicator.
*/
protected TTTAdjudicator createAdjudicator()
{
final EnvironmentStringifier environmentStringifier = new EnvironmentStringifier();
return new TTTAdjudicator(environmentStringifier);
}
/**
* @param agentX Agent X.
* @param agentO Agent O.
*
* @return a new environment.
*/
protected TTTEnvironment createEnvironmentDepthOne(final Agent agentX, final Agent agentO)
{
final TTTEnvironment environment = new TTTEnvironment();
environment.placeToken(TTTPosition.a3, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.a1, TTTToken.O.withAgent(agentO));
environment.placeToken(TTTPosition.CENTER, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.b1, TTTToken.O.withAgent(agentO));
return environment;
}
/**
* @param agentX Agent X.
* @param agentO Agent O.
*
* @return a new environment.
*/
protected TTTEnvironment createEnvironmentDepthThree(final Agent agentX, final Agent agentO)
{
final TTTEnvironment environment = new TTTEnvironment();
environment.placeToken(TTTPosition.a3, TTTToken.X.withAgent(agentX));
environment.placeToken(TTTPosition.a1, TTTToken.O.withAgent(agentO));
return environment;
}
/**
* @param team Team.
* @param maxPlies Maximum plies.
*
* @return a new agent.
*/
protected TTTSearchAgent createSearchAgent(final TTTTeam team, final int maxPlies)
{
return new TTTSearchAgent(team.getName(), team, search, maxPlies);
}
/**
* @param team Team.
*
* @return a new agent.
*/
protected SimpleAgent createSimpleAgent(final TTTTeam team)
{
final TTTActionGenerator actionGenerator = new TTTActionGenerator();
return new SimpleAgent(team.getName(), team, actionGenerator);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.net;
import java.io.*;
import java.lang.management.ManagementFactory;
import java.net.*;
import java.nio.channels.AsynchronousCloseException;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.ServerSocketChannel;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import org.cliffc.high_scale_lib.NonBlockingHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.ScheduledExecutors;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.concurrent.TracingAwareExecutorService;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions;
import org.apache.cassandra.db.*;
import org.apache.cassandra.dht.BootStrapper;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.gms.EchoMessage;
import org.apache.cassandra.gms.GossipDigestAck;
import org.apache.cassandra.gms.GossipDigestAck2;
import org.apache.cassandra.gms.GossipDigestSyn;
import org.apache.cassandra.io.IVersionedSerializer;
import org.apache.cassandra.io.util.DataOutputPlus;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.ILatencySubscriber;
import org.apache.cassandra.metrics.ConnectionMetrics;
import org.apache.cassandra.metrics.DroppedMessageMetrics;
import org.apache.cassandra.sink.SinkManager;
import org.apache.cassandra.repair.messages.RepairMessage;
import org.apache.cassandra.security.SSLFactory;
import org.apache.cassandra.service.*;
import org.apache.cassandra.service.paxos.Commit;
import org.apache.cassandra.service.paxos.PrepareResponse;
import org.apache.cassandra.tracing.TraceState;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.*;
import org.apache.cassandra.utils.concurrent.SimpleCondition;
public final class MessagingService implements MessagingServiceMBean
{
public static final String MBEAN_NAME = "org.apache.cassandra.net:type=MessagingService";
// 8 bits version, so don't waste versions
public static final int VERSION_12 = 6;
public static final int VERSION_20 = 7;
public static final int VERSION_21 = 8;
public static final int current_version = VERSION_21;
public static final String FAILURE_CALLBACK_PARAM = "CAL_BAC";
public static final byte[] ONE_BYTE = new byte[1];
public static final String FAILURE_RESPONSE_PARAM = "FAIL";
/**
* we preface every message with this number so the recipient can validate the sender is sane
*/
public static final int PROTOCOL_MAGIC = 0xCA552DFA;
private boolean allNodesAtLeast21 = true;
/* All verb handler identifiers */
public enum Verb
{
MUTATION,
@Deprecated BINARY,
READ_REPAIR,
READ,
REQUEST_RESPONSE, // client-initiated reads and writes
@Deprecated STREAM_INITIATE,
@Deprecated STREAM_INITIATE_DONE,
@Deprecated STREAM_REPLY,
@Deprecated STREAM_REQUEST,
RANGE_SLICE,
@Deprecated BOOTSTRAP_TOKEN,
@Deprecated TREE_REQUEST,
@Deprecated TREE_RESPONSE,
@Deprecated JOIN,
GOSSIP_DIGEST_SYN,
GOSSIP_DIGEST_ACK,
GOSSIP_DIGEST_ACK2,
@Deprecated DEFINITIONS_ANNOUNCE,
DEFINITIONS_UPDATE,
TRUNCATE,
SCHEMA_CHECK,
@Deprecated INDEX_SCAN,
REPLICATION_FINISHED,
INTERNAL_RESPONSE, // responses to internal calls
COUNTER_MUTATION,
@Deprecated STREAMING_REPAIR_REQUEST,
@Deprecated STREAMING_REPAIR_RESPONSE,
SNAPSHOT, // Similar to nt snapshot
MIGRATION_REQUEST,
GOSSIP_SHUTDOWN,
_TRACE, // dummy verb so we can use MS.droppedMessages
ECHO,
REPAIR_MESSAGE,
// use as padding for backwards compatability where a previous version needs to validate a verb from the future.
PAXOS_PREPARE,
PAXOS_PROPOSE,
PAXOS_COMMIT,
PAGED_RANGE,
// remember to add new verbs at the end, since we serialize by ordinal
UNUSED_1,
UNUSED_2,
UNUSED_3,
HEARTBEAT_DIGEST,
HEARTBEAT_SHOWDOWN;
}
public static final EnumMap<MessagingService.Verb, Stage> verbStages = new EnumMap<MessagingService.Verb, Stage>(MessagingService.Verb.class)
{{
put(Verb.MUTATION, Stage.MUTATION);
put(Verb.COUNTER_MUTATION, Stage.COUNTER_MUTATION);
put(Verb.READ_REPAIR, Stage.MUTATION);
put(Verb.TRUNCATE, Stage.MUTATION);
put(Verb.PAXOS_PREPARE, Stage.MUTATION);
put(Verb.PAXOS_PROPOSE, Stage.MUTATION);
put(Verb.PAXOS_COMMIT, Stage.MUTATION);
put(Verb.READ, Stage.READ);
put(Verb.RANGE_SLICE, Stage.READ);
put(Verb.INDEX_SCAN, Stage.READ);
put(Verb.PAGED_RANGE, Stage.READ);
put(Verb.REQUEST_RESPONSE, Stage.REQUEST_RESPONSE);
put(Verb.INTERNAL_RESPONSE, Stage.INTERNAL_RESPONSE);
put(Verb.STREAM_REPLY, Stage.MISC); // actually handled by FileStreamTask and streamExecutors
put(Verb.STREAM_REQUEST, Stage.MISC);
put(Verb.REPLICATION_FINISHED, Stage.MISC);
put(Verb.SNAPSHOT, Stage.MISC);
put(Verb.TREE_REQUEST, Stage.ANTI_ENTROPY);
put(Verb.TREE_RESPONSE, Stage.ANTI_ENTROPY);
put(Verb.STREAMING_REPAIR_REQUEST, Stage.ANTI_ENTROPY);
put(Verb.STREAMING_REPAIR_RESPONSE, Stage.ANTI_ENTROPY);
put(Verb.REPAIR_MESSAGE, Stage.ANTI_ENTROPY);
put(Verb.GOSSIP_DIGEST_ACK, Stage.GOSSIP);
put(Verb.GOSSIP_DIGEST_ACK2, Stage.GOSSIP);
put(Verb.GOSSIP_DIGEST_SYN, Stage.GOSSIP);
put(Verb.GOSSIP_SHUTDOWN, Stage.GOSSIP);
put(Verb.DEFINITIONS_UPDATE, Stage.MIGRATION);
put(Verb.SCHEMA_CHECK, Stage.MIGRATION);
put(Verb.MIGRATION_REQUEST, Stage.MIGRATION);
put(Verb.INDEX_SCAN, Stage.READ);
put(Verb.REPLICATION_FINISHED, Stage.MISC);
put(Verb.COUNTER_MUTATION, Stage.MUTATION);
put(Verb.SNAPSHOT, Stage.MISC);
put(Verb.ECHO, Stage.GOSSIP);
put(Verb.HEARTBEAT_DIGEST, Stage.HEARTBEAT);
put(Verb.UNUSED_1, Stage.INTERNAL_RESPONSE);
put(Verb.UNUSED_2, Stage.INTERNAL_RESPONSE);
put(Verb.UNUSED_3, Stage.INTERNAL_RESPONSE);
}};
/**
* Messages we receive in IncomingTcpConnection have a Verb that tells us what kind of message it is.
* Most of the time, this is enough to determine how to deserialize the message payload.
* The exception is the REQUEST_RESPONSE verb, which just means "a reply to something you told me to do."
* Traditionally, this was fine since each VerbHandler knew what type of payload it expected, and
* handled the deserialization itself. Now that we do that in ITC, to avoid the extra copy to an
* intermediary byte[] (See CASSANDRA-3716), we need to wire that up to the CallbackInfo object
* (see below).
*/
public static final EnumMap<Verb, IVersionedSerializer<?>> verbSerializers = new EnumMap<Verb, IVersionedSerializer<?>>(Verb.class)
{{
put(Verb.REQUEST_RESPONSE, CallbackDeterminedSerializer.instance);
put(Verb.INTERNAL_RESPONSE, CallbackDeterminedSerializer.instance);
put(Verb.MUTATION, Mutation.serializer);
put(Verb.READ_REPAIR, Mutation.serializer);
put(Verb.READ, ReadCommand.serializer);
put(Verb.RANGE_SLICE, RangeSliceCommand.serializer);
put(Verb.PAGED_RANGE, PagedRangeCommand.serializer);
put(Verb.BOOTSTRAP_TOKEN, BootStrapper.StringSerializer.instance);
put(Verb.REPAIR_MESSAGE, RepairMessage.serializer);
put(Verb.GOSSIP_DIGEST_ACK, GossipDigestAck.serializer);
put(Verb.GOSSIP_DIGEST_ACK2, GossipDigestAck2.serializer);
put(Verb.GOSSIP_DIGEST_SYN, GossipDigestSyn.serializer);
put(Verb.DEFINITIONS_UPDATE, MigrationManager.MigrationsSerializer.instance);
put(Verb.TRUNCATE, Truncation.serializer);
put(Verb.REPLICATION_FINISHED, null);
put(Verb.COUNTER_MUTATION, CounterMutation.serializer);
put(Verb.SNAPSHOT, SnapshotCommand.serializer);
put(Verb.ECHO, EchoMessage.serializer);
put(Verb.PAXOS_PREPARE, Commit.serializer);
put(Verb.PAXOS_PROPOSE, Commit.serializer);
put(Verb.PAXOS_COMMIT, Commit.serializer);
}};
/**
* A Map of what kind of serializer to wire up to a REQUEST_RESPONSE callback, based on outbound Verb.
*/
public static final EnumMap<Verb, IVersionedSerializer<?>> callbackDeserializers = new EnumMap<Verb, IVersionedSerializer<?>>(Verb.class)
{{
put(Verb.MUTATION, WriteResponse.serializer);
put(Verb.READ_REPAIR, WriteResponse.serializer);
put(Verb.COUNTER_MUTATION, WriteResponse.serializer);
put(Verb.RANGE_SLICE, RangeSliceReply.serializer);
put(Verb.PAGED_RANGE, RangeSliceReply.serializer);
put(Verb.READ, ReadResponse.serializer);
put(Verb.TRUNCATE, TruncateResponse.serializer);
put(Verb.SNAPSHOT, null);
put(Verb.MIGRATION_REQUEST, MigrationManager.MigrationsSerializer.instance);
put(Verb.SCHEMA_CHECK, UUIDSerializer.serializer);
put(Verb.BOOTSTRAP_TOKEN, BootStrapper.StringSerializer.instance);
put(Verb.REPLICATION_FINISHED, null);
put(Verb.PAXOS_PREPARE, PrepareResponse.serializer);
put(Verb.PAXOS_PROPOSE, BooleanSerializer.serializer);
}};
/* This records all the results mapped by message Id */
private final ExpiringMap<Integer, CallbackInfo> callbacks;
/**
* a placeholder class that means "deserialize using the callback." We can't implement this without
* special-case code in InboundTcpConnection because there is no way to pass the message id to IVersionedSerializer.
*/
static class CallbackDeterminedSerializer implements IVersionedSerializer<Object>
{
public static final CallbackDeterminedSerializer instance = new CallbackDeterminedSerializer();
public Object deserialize(DataInput in, int version) throws IOException
{
throw new UnsupportedOperationException();
}
public void serialize(Object o, DataOutputPlus out, int version) throws IOException
{
throw new UnsupportedOperationException();
}
public long serializedSize(Object o, int version)
{
throw new UnsupportedOperationException();
}
}
/* Lookup table for registering message handlers based on the verb. */
private final Map<Verb, IVerbHandler> verbHandlers;
private final ConcurrentMap<InetAddress, OutboundTcpConnectionPool> connectionManagers = new NonBlockingHashMap<InetAddress, OutboundTcpConnectionPool>();
private static final Logger logger = LoggerFactory.getLogger(MessagingService.class);
private static final int LOG_DROPPED_INTERVAL_IN_MS = 5000;
private final List<SocketThread> socketThreads = Lists.newArrayList();
private final SimpleCondition listenGate;
/**
* Verbs it's okay to drop if the request has been queued longer than the request timeout. These
* all correspond to client requests or something triggered by them; we don't want to
* drop internal messages like bootstrap or repair notifications.
*/
public static final EnumSet<Verb> DROPPABLE_VERBS = EnumSet.of(Verb.BINARY,
Verb._TRACE,
Verb.MUTATION,
Verb.COUNTER_MUTATION,
Verb.READ_REPAIR,
Verb.READ,
Verb.RANGE_SLICE,
Verb.PAGED_RANGE,
Verb.REQUEST_RESPONSE);
// total dropped message counts for server lifetime
private final Map<Verb, DroppedMessageMetrics> droppedMessages = new EnumMap<Verb, DroppedMessageMetrics>(Verb.class);
// dropped count when last requested for the Recent api. high concurrency isn't necessary here.
private final Map<Verb, Integer> lastDroppedInternal = new EnumMap<Verb, Integer>(Verb.class);
private final List<ILatencySubscriber> subscribers = new ArrayList<ILatencySubscriber>();
// protocol versions of the other nodes in the cluster
private final ConcurrentMap<InetAddress, Integer> versions = new NonBlockingHashMap<InetAddress, Integer>();
private static class MSHandle
{
public static final MessagingService instance = new MessagingService();
}
public static MessagingService instance()
{
return MSHandle.instance;
}
private MessagingService()
{
for (Verb verb : DROPPABLE_VERBS)
{
droppedMessages.put(verb, new DroppedMessageMetrics(verb));
lastDroppedInternal.put(verb, 0);
}
listenGate = new SimpleCondition();
verbHandlers = new EnumMap<Verb, IVerbHandler>(Verb.class);
Runnable logDropped = new Runnable()
{
public void run()
{
logDroppedMessages();
}
};
ScheduledExecutors.scheduledTasks.scheduleWithFixedDelay(logDropped, LOG_DROPPED_INTERVAL_IN_MS, LOG_DROPPED_INTERVAL_IN_MS, TimeUnit.MILLISECONDS);
Function<Pair<Integer, ExpiringMap.CacheableObject<CallbackInfo>>, ?> timeoutReporter = new Function<Pair<Integer, ExpiringMap.CacheableObject<CallbackInfo>>, Object>()
{
public Object apply(Pair<Integer, ExpiringMap.CacheableObject<CallbackInfo>> pair)
{
final CallbackInfo expiredCallbackInfo = pair.right.value;
maybeAddLatency(expiredCallbackInfo.callback, expiredCallbackInfo.target, pair.right.timeout);
ConnectionMetrics.totalTimeouts.mark();
getConnectionPool(expiredCallbackInfo.target).incrementTimeout();
if (expiredCallbackInfo.isFailureCallback())
{
StageManager.getStage(Stage.INTERNAL_RESPONSE).submit(new Runnable() {
@Override
public void run() {
((IAsyncCallbackWithFailure)expiredCallbackInfo.callback).onFailure(expiredCallbackInfo.target);
}
});
}
if (expiredCallbackInfo.shouldHint())
{
Mutation mutation = (Mutation) ((WriteCallbackInfo) expiredCallbackInfo).sentMessage.payload;
return StorageProxy.submitHint(mutation, expiredCallbackInfo.target, null);
}
return null;
}
};
callbacks = new ExpiringMap<Integer, CallbackInfo>(DatabaseDescriptor.getMinRpcTimeout(), timeoutReporter);
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(this, new ObjectName(MBEAN_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
/**
* Track latency information for the dynamic snitch
*
* @param cb the callback associated with this message -- this lets us know if it's a message type we're interested in
* @param address the host that replied to the message
* @param latency
*/
public void maybeAddLatency(IAsyncCallback cb, InetAddress address, long latency)
{
if (cb.isLatencyForSnitch())
addLatency(address, latency);
}
public void addLatency(InetAddress address, long latency)
{
for (ILatencySubscriber subscriber : subscribers)
subscriber.receiveTiming(address, latency);
}
/**
* called from gossiper when it notices a node is not responding.
*/
public void convict(InetAddress ep)
{
logger.debug("Resetting pool for {}", ep);
getConnectionPool(ep).reset();
}
/**
* Listen on the specified port.
*
* @param localEp InetAddress whose port to listen on.
*/
public void listen(InetAddress localEp) throws ConfigurationException
{
callbacks.reset(); // hack to allow tests to stop/restart MS
for (ServerSocket ss : getServerSockets(localEp))
{
SocketThread th = new SocketThread(ss, "ACCEPT-" + localEp);
th.start();
socketThreads.add(th);
}
listenGate.signalAll();
}
private List<ServerSocket> getServerSockets(InetAddress localEp) throws ConfigurationException
{
final List<ServerSocket> ss = new ArrayList<ServerSocket>(2);
if (DatabaseDescriptor.getServerEncryptionOptions().internode_encryption != ServerEncryptionOptions.InternodeEncryption.none)
{
try
{
ss.add(SSLFactory.getServerSocket(DatabaseDescriptor.getServerEncryptionOptions(), localEp, DatabaseDescriptor.getSSLStoragePort()));
}
catch (IOException e)
{
throw new ConfigurationException("Unable to create ssl socket", e);
}
// setReuseAddress happens in the factory.
logger.info("Starting Encrypted Messaging Service on SSL port {}", DatabaseDescriptor.getSSLStoragePort());
}
if (DatabaseDescriptor.getServerEncryptionOptions().internode_encryption != ServerEncryptionOptions.InternodeEncryption.all)
{
ServerSocketChannel serverChannel = null;
try
{
serverChannel = ServerSocketChannel.open();
}
catch (IOException e)
{
throw new RuntimeException(e);
}
ServerSocket socket = serverChannel.socket();
try
{
socket.setReuseAddress(true);
}
catch (SocketException e)
{
throw new ConfigurationException("Insufficient permissions to setReuseAddress", e);
}
InetSocketAddress address = new InetSocketAddress(localEp, DatabaseDescriptor.getStoragePort());
try
{
socket.bind(address,500);
}
catch (BindException e)
{
if (e.getMessage().contains("in use"))
throw new ConfigurationException(address + " is in use by another process. Change listen_address:storage_port in cassandra.yaml to values that do not conflict with other services");
else if (e.getMessage().contains("Cannot assign requested address"))
throw new ConfigurationException("Unable to bind to address " + address
+ ". Set listen_address in cassandra.yaml to an interface you can bind to, e.g., your private IP address on EC2");
else
throw new RuntimeException(e);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
logger.info("Starting Messaging Service on port {}", DatabaseDescriptor.getStoragePort());
ss.add(socket);
}
return ss;
}
public void waitUntilListening()
{
try
{
listenGate.await();
}
catch (InterruptedException ie)
{
logger.debug("await interrupted");
}
}
public boolean isListening()
{
return listenGate.isSignaled();
}
public void destroyConnectionPool(InetAddress to)
{
OutboundTcpConnectionPool cp = connectionManagers.get(to);
if (cp == null)
return;
cp.close();
connectionManagers.remove(to);
}
public OutboundTcpConnectionPool getConnectionPool(InetAddress to)
{
OutboundTcpConnectionPool cp = connectionManagers.get(to);
if (cp == null)
{
cp = new OutboundTcpConnectionPool(to);
OutboundTcpConnectionPool existingPool = connectionManagers.putIfAbsent(to, cp);
if (existingPool != null)
cp = existingPool;
else
cp.start();
}
cp.waitForStarted();
return cp;
}
public OutboundTcpConnection getConnection(InetAddress to, MessageOut msg)
{
return getConnectionPool(to).getConnection(msg);
}
/**
* Register a verb and the corresponding verb handler with the
* Messaging Service.
*
* @param verb
* @param verbHandler handler for the specified verb
*/
public void registerVerbHandlers(Verb verb, IVerbHandler verbHandler)
{
assert !verbHandlers.containsKey(verb);
verbHandlers.put(verb, verbHandler);
}
/**
* This method returns the verb handler associated with the registered
* verb. If no handler has been registered then null is returned.
*
* @param type for which the verb handler is sought
* @return a reference to IVerbHandler which is the handler for the specified verb
*/
public IVerbHandler getVerbHandler(Verb type)
{
return verbHandlers.get(type);
}
public int addCallback(IAsyncCallback cb, MessageOut message, InetAddress to, long timeout, boolean failureCallback)
{
assert message.verb != Verb.MUTATION; // mutations need to call the overload with a ConsistencyLevel
int messageId = nextId();
CallbackInfo previous = callbacks.put(messageId, new CallbackInfo(to, cb, callbackDeserializers.get(message.verb), failureCallback), timeout);
assert previous == null : String.format("Callback already exists for id %d! (%s)", messageId, previous);
return messageId;
}
public int addCallback(IAsyncCallback cb,
MessageOut<? extends IMutation> message,
InetAddress to,
long timeout,
ConsistencyLevel consistencyLevel,
boolean allowHints)
{
assert message.verb == Verb.MUTATION || message.verb == Verb.COUNTER_MUTATION;
int messageId = nextId();
CallbackInfo previous = callbacks.put(messageId,
new WriteCallbackInfo(to,
cb,
message,
callbackDeserializers.get(message.verb),
consistencyLevel,
allowHints),
timeout);
assert previous == null : String.format("Callback already exists for id %d! (%s)", messageId, previous);
return messageId;
}
private static final AtomicInteger idGen = new AtomicInteger(0);
private static int nextId()
{
return idGen.incrementAndGet();
}
public int sendRR(MessageOut message, InetAddress to, IAsyncCallback cb)
{
return sendRR(message, to, cb, message.getTimeout(), false);
}
public int sendRRWithFailure(MessageOut message, InetAddress to, IAsyncCallbackWithFailure cb)
{
return sendRR(message, to, cb, message.getTimeout(), true);
}
/**
* Send a non-mutation message to a given endpoint. This method specifies a callback
* which is invoked with the actual response.
*
* @param message message to be sent.
* @param to endpoint to which the message needs to be sent
* @param cb callback interface which is used to pass the responses or
* suggest that a timeout occurred to the invoker of the send().
* @param timeout the timeout used for expiration
* @return an reference to message id used to match with the result
*/
public int sendRR(MessageOut message, InetAddress to, IAsyncCallback cb, long timeout, boolean failureCallback)
{
int id = addCallback(cb, message, to, timeout, failureCallback);
sendOneWay(failureCallback ? message.withParameter(FAILURE_CALLBACK_PARAM, ONE_BYTE) : message, id, to);
return id;
}
/**
* Send a mutation message to a given endpoint. This method specifies a callback
* which is invoked with the actual response.
* Also holds the message (only mutation messages) to determine if it
* needs to trigger a hint (uses StorageProxy for that).
*
* @param message message to be sent.
* @param to endpoint to which the message needs to be sent
* @param handler callback interface which is used to pass the responses or
* suggest that a timeout occurred to the invoker of the send().
* @return an reference to message id used to match with the result
*/
public int sendRR(MessageOut<? extends IMutation> message,
InetAddress to,
AbstractWriteResponseHandler handler,
boolean allowHints)
{
int id = addCallback(handler, message, to, message.getTimeout(), handler.consistencyLevel, allowHints);
sendOneWay(message, id, to);
return id;
}
public void sendOneWay(MessageOut message, InetAddress to)
{
sendOneWay(message, nextId(), to);
}
public void sendReply(MessageOut message, int id, InetAddress to)
{
sendOneWay(message, id, to);
}
/**
* Send a message to a given endpoint. This method adheres to the fire and forget
* style messaging.
*
* @param message messages to be sent.
* @param to endpoint to which the message needs to be sent
*/
public void sendOneWay(MessageOut message, int id, InetAddress to)
{
if (logger.isTraceEnabled())
logger.trace(FBUtilities.getBroadcastAddress() + " sending " + message.verb + " to " + id + "@" + to);
if (to.equals(FBUtilities.getBroadcastAddress()))
logger.trace("Message-to-self {} going over MessagingService", message);
// message sinks are a testing hook
MessageOut processedMessage = SinkManager.processOutboundMessage(message, id, to);
if (processedMessage == null)
{
return;
}
// get pooled connection (really, connection queue)
OutboundTcpConnection connection = getConnection(to, processedMessage);
// write it
connection.enqueue(processedMessage, id);
}
public <T> AsyncOneResponse<T> sendRR(MessageOut message, InetAddress to)
{
AsyncOneResponse<T> iar = new AsyncOneResponse<T>();
sendRR(message, to, iar);
return iar;
}
public void register(ILatencySubscriber subcriber)
{
subscribers.add(subcriber);
}
public void clearCallbacksUnsafe()
{
callbacks.reset();
}
/**
* Wait for callbacks and don't allow any more to be created (since they could require writing hints)
*/
public void shutdown()
{
logger.info("Waiting for messaging service to quiesce");
// We may need to schedule hints on the mutation stage, so it's erroneous to shut down the mutation stage first
assert !StageManager.getStage(Stage.MUTATION).isShutdown();
// the important part
callbacks.shutdownBlocking();
// attempt to humor tests that try to stop and restart MS
try
{
for (SocketThread th : socketThreads)
th.close();
}
catch (IOException e)
{
throw new IOError(e);
}
}
public void receive(MessageIn message, int id, long timestamp)
{
TraceState state = Tracing.instance.initializeFromMessage(message);
if (state != null)
state.trace("Message received from {}", message.from);
Verb verb = message.verb;
message = SinkManager.processInboundMessage(message, id);
if (message == null)
{
incrementRejectedMessages(verb);
return;
}
Runnable runnable = new MessageDeliveryTask(message, id, timestamp);
TracingAwareExecutorService stage = StageManager.getStage(message.getMessageType());
assert stage != null : "No stage for message type " + message.verb;
stage.execute(runnable, state);
}
public void setCallbackForTests(int messageId, CallbackInfo callback)
{
callbacks.put(messageId, callback);
}
public CallbackInfo getRegisteredCallback(int messageId)
{
return callbacks.get(messageId);
}
public CallbackInfo removeRegisteredCallback(int messageId)
{
return callbacks.remove(messageId);
}
/**
* @return System.nanoTime() when callback was created.
*/
public long getRegisteredCallbackAge(int messageId)
{
return callbacks.getAge(messageId);
}
public static void validateMagic(int magic) throws IOException
{
if (magic != PROTOCOL_MAGIC)
throw new IOException("invalid protocol header");
}
public static int getBits(int packed, int start, int count)
{
return packed >>> (start + 1) - count & ~(-1 << count);
}
public boolean areAllNodesAtLeast21()
{
return allNodesAtLeast21;
}
/**
* @return the last version associated with address, or @param version if this is the first such version
*/
public int setVersion(InetAddress endpoint, int version)
{
logger.debug("Setting version {} for {}", version, endpoint);
if (version < VERSION_21)
allNodesAtLeast21 = false;
Integer v = versions.put(endpoint, version);
// if the version was increased to 2.0 or later, see if all nodes are >= 2.0 now
if (v != null && v < VERSION_21 && version >= VERSION_21)
refreshAllNodesAtLeast21();
return v == null ? version : v;
}
public void resetVersion(InetAddress endpoint)
{
logger.debug("Resetting version for {}", endpoint);
Integer removed = versions.remove(endpoint);
if (removed != null && removed <= VERSION_21)
refreshAllNodesAtLeast21();
}
private void refreshAllNodesAtLeast21()
{
for (Integer version: versions.values())
{
if (version < VERSION_21)
{
allNodesAtLeast21 = false;
return;
}
}
allNodesAtLeast21 = true;
}
public int getVersion(InetAddress endpoint)
{
Integer v = versions.get(endpoint);
if (v == null)
{
// we don't know the version. assume current. we'll know soon enough if that was incorrect.
logger.trace("Assuming current protocol version for {}", endpoint);
return MessagingService.current_version;
}
else
return Math.min(v, MessagingService.current_version);
}
public int getVersion(String endpoint) throws UnknownHostException
{
return getVersion(InetAddress.getByName(endpoint));
}
public int getRawVersion(InetAddress endpoint)
{
Integer v = versions.get(endpoint);
if (v == null)
throw new IllegalStateException("getRawVersion() was called without checking knowsVersion() result first");
return v;
}
public boolean knowsVersion(InetAddress endpoint)
{
return versions.containsKey(endpoint);
}
public void incrementDroppedMessages(Verb verb)
{
assert DROPPABLE_VERBS.contains(verb) : "Verb " + verb + " should not legally be dropped";
droppedMessages.get(verb).dropped.mark();
}
/**
* Same as incrementDroppedMessages(), but allows non-droppable verbs. Called for IMessageSink-caused message drops.
*/
private void incrementRejectedMessages(Verb verb)
{
DroppedMessageMetrics metrics = droppedMessages.get(verb);
if (metrics == null)
{
metrics = new DroppedMessageMetrics(verb);
droppedMessages.put(verb, metrics);
}
metrics.dropped.mark();
}
private void logDroppedMessages()
{
boolean logTpstats = false;
for (Map.Entry<Verb, DroppedMessageMetrics> entry : droppedMessages.entrySet())
{
int dropped = (int) entry.getValue().dropped.count();
Verb verb = entry.getKey();
int recent = dropped - lastDroppedInternal.get(verb);
if (recent > 0)
{
logTpstats = true;
logger.info("{} {} messages dropped in last {}ms",
new Object[] {recent, verb, LOG_DROPPED_INTERVAL_IN_MS});
lastDroppedInternal.put(verb, dropped);
}
}
if (logTpstats)
StatusLogger.log();
}
private static class SocketThread extends Thread
{
private final ServerSocket server;
SocketThread(ServerSocket server, String name)
{
super(name);
this.server = server;
}
public void run()
{
while (!server.isClosed())
{
Socket socket = null;
try
{
socket = server.accept();
if (!authenticate(socket))
{
logger.debug("remote failed to authenticate");
socket.close();
continue;
}
socket.setKeepAlive(true);
socket.setSoTimeout(2 * OutboundTcpConnection.WAIT_FOR_VERSION_MAX_TIME);
// determine the connection type to decide whether to buffer
DataInputStream in = new DataInputStream(socket.getInputStream());
MessagingService.validateMagic(in.readInt());
int header = in.readInt();
boolean isStream = MessagingService.getBits(header, 3, 1) == 1;
int version = MessagingService.getBits(header, 15, 8);
logger.debug("Connection version {} from {}", version, socket.getInetAddress());
socket.setSoTimeout(0);
Thread thread = isStream
? new IncomingStreamingConnection(version, socket)
: new IncomingTcpConnection(version, MessagingService.getBits(header, 2, 1) == 1, socket);
thread.start();
}
catch (AsynchronousCloseException e)
{
// this happens when another thread calls close().
logger.debug("Asynchronous close seen by server thread");
break;
}
catch (ClosedChannelException e)
{
logger.debug("MessagingService server thread already closed");
break;
}
catch (IOException e)
{
logger.debug("Error reading the socket " + socket, e);
FileUtils.closeQuietly(socket);
}
}
logger.info("MessagingService has terminated the accept() thread");
}
void close() throws IOException
{
logger.debug("Closing accept() thread");
server.close();
}
private boolean authenticate(Socket socket)
{
return DatabaseDescriptor.getInternodeAuthenticator().authenticate(socket.getInetAddress(), socket.getPort());
}
}
public Map<String, Integer> getCommandPendingTasks()
{
Map<String, Integer> pendingTasks = new HashMap<String, Integer>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().cmdCon.getPendingMessages());
return pendingTasks;
}
public int getCommandPendingTasks(InetAddress address)
{
OutboundTcpConnectionPool connection = connectionManagers.get(address);
return connection == null ? 0 : connection.cmdCon.getPendingMessages();
}
public Map<String, Long> getCommandCompletedTasks()
{
Map<String, Long> completedTasks = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().cmdCon.getCompletedMesssages());
return completedTasks;
}
public Map<String, Long> getCommandDroppedTasks()
{
Map<String, Long> droppedTasks = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
droppedTasks.put(entry.getKey().getHostAddress(), entry.getValue().cmdCon.getDroppedMessages());
return droppedTasks;
}
public Map<String, Integer> getResponsePendingTasks()
{
Map<String, Integer> pendingTasks = new HashMap<String, Integer>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
pendingTasks.put(entry.getKey().getHostAddress(), entry.getValue().ackCon.getPendingMessages());
return pendingTasks;
}
public Map<String, Long> getResponseCompletedTasks()
{
Map<String, Long> completedTasks = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry : connectionManagers.entrySet())
completedTasks.put(entry.getKey().getHostAddress(), entry.getValue().ackCon.getCompletedMesssages());
return completedTasks;
}
public Map<String, Integer> getDroppedMessages()
{
Map<String, Integer> map = new HashMap<String, Integer>();
for (Map.Entry<Verb, DroppedMessageMetrics> entry : droppedMessages.entrySet())
map.put(entry.getKey().toString(), (int) entry.getValue().dropped.count());
return map;
}
public Map<String, Integer> getRecentlyDroppedMessages()
{
Map<String, Integer> map = new HashMap<String, Integer>();
for (Map.Entry<Verb, DroppedMessageMetrics> entry : droppedMessages.entrySet())
map.put(entry.getKey().toString(), entry.getValue().getRecentlyDropped());
return map;
}
public long getTotalTimeouts()
{
return ConnectionMetrics.totalTimeouts.count();
}
public long getRecentTotalTimouts()
{
return ConnectionMetrics.getRecentTotalTimeout();
}
public Map<String, Long> getTimeoutsPerHost()
{
Map<String, Long> result = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry: connectionManagers.entrySet())
{
String ip = entry.getKey().getHostAddress();
long recent = entry.getValue().getTimeouts();
result.put(ip, recent);
}
return result;
}
public Map<String, Long> getRecentTimeoutsPerHost()
{
Map<String, Long> result = new HashMap<String, Long>();
for (Map.Entry<InetAddress, OutboundTcpConnectionPool> entry: connectionManagers.entrySet())
{
String ip = entry.getKey().getHostAddress();
long recent = entry.getValue().getRecentTimeouts();
result.put(ip, recent);
}
return result;
}
}
| |
package com.hadjiminap.kwimobile;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Typeface;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.widget.DrawerLayout;
import android.text.Html;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Iterator;
//All the sources I used are makred in the matura paper in Chpater 2 in the corresponding section
public class MainActivity extends Activity implements AdapterView.OnItemClickListener
{
private DrawerLayout drawerLayout;
private ListView listView;
private ActionBarDrawerToggle drawerListener;
private CustomAdapter myAdapter;
private String[] menus;
private TextView[] lesson;
private ArrayList<Lesson> lessons = new ArrayList<Lesson>();
private SharedPreferences togglePreferences;
private SharedPreferences.Editor togglePrefsEditor;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
//Linking and setup of variables
drawerLayout = (DrawerLayout) findViewById(R.id.drawerLayout1);
menus = getResources().getStringArray(R.array.menu);
listView = (ListView) findViewById(R.id.drawerlist1);
myAdapter = new CustomAdapter(this, "font.ttf");
listView.setAdapter(myAdapter);
listView.setOnItemClickListener(this);
//Setup for ic_navigaion_drawer animation
getActionBar().setHomeButtonEnabled(true);
getActionBar().setDisplayHomeAsUpEnabled(true);
//Action bar styling
getActionBar().setBackgroundDrawable(new ColorDrawable(Color.parseColor("#00b796")));
getActionBar().setDisplayShowHomeEnabled(false);
getActionBar().setTitle(Html.fromHtml("<font color=\"#ffffff\">" + getString(R.string.app_name) + "</font>"));
//Set font of actionBar
int actionBarTitle = Resources.getSystem().getIdentifier("action_bar_title", "id", "android");
TextView actionBarTitleView = (TextView) getWindow().findViewById(actionBarTitle);
Typeface tfbar = Typeface.createFromAsset(getAssets(), "font.ttf");
if (actionBarTitleView != null) {
actionBarTitleView.setTypeface(tfbar);
}
//Set TimeTable fragment as default fragment
Fragment newFragment1;
FragmentManager fm = getFragmentManager();
newFragment1 = new Timetable();
fm.beginTransaction().replace(R.id.mainContent, newFragment1).commit();
setTitle(menus[0]);
parseData();
//Setup Preferences
togglePreferences = this.getSharedPreferences("togglePrefs",Context.MODE_PRIVATE);
togglePrefsEditor = togglePreferences.edit();
//Start automute if toggle is true
boolean saveToggle = togglePreferences.getBoolean("togglePrefs", false);
if (saveToggle == true)
{
MainActivity activity = (MainActivity)this;
Intent i = new Intent(activity, Mute.class);
Bundle b = new Bundle();
b.putParcelableArrayList("lessons", activity.getLessons());
i.putExtra("lessons", b);
this.startService(i);
}
//Drawer listener open close setup
drawerListener= new ActionBarDrawerToggle(this,drawerLayout,R.drawable.ic_navigation_drawer, R.string.drawer_open, R.string.drawer_close)
{
@Override
public void onDrawerClosed(View drawerView)
{
//Toast.makeText(MyActivity.this," closed", Toast.LENGTH_SHORT).show();
}
@Override
public void onDrawerOpened(View drawerView)
{
//Toast.makeText(MainActivity.this," opened", Toast.LENGTH_SHORT).show();
}
};
//Set Drawerlistener
drawerLayout.setDrawerListener(drawerListener);
}
@Override
protected void onDestroy()
{
stopService(new Intent(MainActivity.this, Mute.class));
super.onStop();
}
private void parseData() {
String jdata = getData();
try {
JSONObject main_json = new JSONObject(jdata);
Iterator<String> main_iter = main_json.keys();
while (main_iter.hasNext()) {
String key = main_iter.next();
JSONObject day_json = main_json.optJSONObject(key);
if (day_json == null)
{
JSONArray day_json_array = main_json.optJSONArray(key);
for (int u = 0; u < day_json_array.length(); u ++)
{
JSONArray room_subject_arr = day_json_array.getJSONArray(u);
lessons.add(Lesson.fromJSON(u, room_subject_arr, key));
}
} else
{
Iterator<String> lesson_nr_iter = day_json.keys();
while (lesson_nr_iter.hasNext()) {
String lesson_nr_str = lesson_nr_iter.next();
int lesson_nr = Integer.parseInt(lesson_nr_str);
JSONArray room_subject_arr = day_json.getJSONArray(lesson_nr_str);
lessons.add(Lesson.fromJSON(lesson_nr, room_subject_arr,key));
}
}
}
} catch (JSONException e) {
e.printStackTrace();
}
}
ArrayList<Lesson> getLessons() {
return lessons;
}
//SEND DATA TO FRAGMENT
public String getData()
{
//SENDING DATA WITH SENDER
//GET JSON FROM ASYNC FROM LOGIN.CLASS
Bundle extras = getIntent().getExtras();
String sender = extras.getString("sender");
return sender;
}
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id)
{
selectItem(position);
}
public void selectItem(int position)
{
Fragment newFragment = new Fragment();
FragmentManager fm = getFragmentManager();
switch(position){
case 0:
newFragment = new Timetable();
break;
case 1:
newFragment = new Settings();
break;
case 2:
newFragment = new Improvements();
break;
}
fm.beginTransaction().replace(R.id.mainContent, newFragment).commit();
// Highlight the selected item, update the title, and close the drawer
listView.setItemChecked(position, true);
setTitle(menus[position]);
drawerLayout.closeDrawer(listView);
}
public void setTitle(String title)
{
getActionBar().setTitle(Html.fromHtml("<font color=\"#ffffff\">" + title + "</font>"));
}
//Sync ic_navigaion_drawer animation with navigation drawer open or close
@Override
protected void onPostCreate(Bundle savedInstanceState)
{
super.onPostCreate(savedInstanceState);
drawerListener.syncState();
}
//Click opens Navigation Drawer
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
if (drawerListener.onOptionsItemSelected(item))
{
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onConfigurationChanged(Configuration newConfig)
{
super.onConfigurationChanged(newConfig);
drawerListener.onConfigurationChanged(newConfig);
}
@Override
public void onBackPressed()
{
//Don't do anything when back button is pressed
}
}
//Custom Adapter
class CustomAdapter extends BaseAdapter
{
private Context context;
String[] menus;
int[] images = {R.drawable.icon1, R.drawable.icon2, R.drawable.icon3};
Typeface tf;
public CustomAdapter(Context context, String font)
{
tf = Typeface.createFromAsset(context.getAssets(), font);
this.context = context;
menus = context.getResources().getStringArray(R.array.menu);
}
@Override
public int getCount()
{
return menus.length;
}
@Override
public Object getItem(int position)
{
return menus[position];
}
@Override
public long getItemId(int position)
{
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent)
{
View row = null;
if (convertView == null) {
LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
row = inflater.inflate(R.layout.custom_row, parent, false);
} else {
row = convertView;
}
TextView titleTextView = (TextView) row.findViewById(R.id.textView1);
ImageView titleImageView = (ImageView) row.findViewById(R.id.imageView1);
titleTextView.setTypeface(tf);
titleTextView.setText(menus[position]);
titleImageView.setImageResource(images[position]);
return row;
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.opsworks.model;
import java.io.Serializable;
/**
* <p>
* Describes a time-based instance's auto scaling schedule. The schedule
* consists of a set of key-value pairs.
* </p>
*
* <ul>
* <li>The key is the time period (a UTC hour) and must be an integer
* from 0 - 23.</li>
* <li>The value indicates whether the instance should be online or
* offline for the specified period, and must be set to "on" or
* "off"</li>
*
* </ul>
* <p>
* The default setting for all time periods is off, so you use the
* following parameters primarily to specify the online periods. You
* don't have to explicitly specify offline periods unless you want to
* change an online period to an offline period.
* </p>
* <p>
* The following example specifies that the instance should be online for
* four hours, from UTC 1200 - 1600. It will be off for the remainder of
* the day.
* </p>
* <p>
* <code> { "12":"on", "13":"on", "14":"on", "15":"on" } </code>
* </p>
*/
public class WeeklyAutoScalingSchedule implements Serializable, Cloneable {
/**
* The schedule for Monday.
*/
private java.util.Map<String,String> monday;
/**
* The schedule for Tuesday.
*/
private java.util.Map<String,String> tuesday;
/**
* The schedule for Wednesday.
*/
private java.util.Map<String,String> wednesday;
/**
* The schedule for Thursday.
*/
private java.util.Map<String,String> thursday;
/**
* The schedule for Friday.
*/
private java.util.Map<String,String> friday;
/**
* The schedule for Saturday.
*/
private java.util.Map<String,String> saturday;
/**
* The schedule for Sunday.
*/
private java.util.Map<String,String> sunday;
/**
* The schedule for Monday.
*
* @return The schedule for Monday.
*/
public java.util.Map<String,String> getMonday() {
if (monday == null) {
monday = new java.util.HashMap<String,String>();
}
return monday;
}
/**
* The schedule for Monday.
*
* @param monday The schedule for Monday.
*/
public void setMonday(java.util.Map<String,String> monday) {
this.monday = monday;
}
/**
* The schedule for Monday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param monday The schedule for Monday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withMonday(java.util.Map<String,String> monday) {
setMonday(monday);
return this;
}
/**
* The schedule for Monday.
* <p>
* The method adds a new key-value pair into Monday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Monday.
* @param value The corresponding value of the entry to be added into Monday.
*/
public WeeklyAutoScalingSchedule addMondayEntry(String key, String value) {
if (null == this.monday) {
this.monday = new java.util.HashMap<String,String>();
}
if (this.monday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.monday.put(key, value);
return this;
}
/**
* Removes all the entries added into Monday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearMondayEntries() {
this.monday = null;
return this;
}
/**
* The schedule for Tuesday.
*
* @return The schedule for Tuesday.
*/
public java.util.Map<String,String> getTuesday() {
if (tuesday == null) {
tuesday = new java.util.HashMap<String,String>();
}
return tuesday;
}
/**
* The schedule for Tuesday.
*
* @param tuesday The schedule for Tuesday.
*/
public void setTuesday(java.util.Map<String,String> tuesday) {
this.tuesday = tuesday;
}
/**
* The schedule for Tuesday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tuesday The schedule for Tuesday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withTuesday(java.util.Map<String,String> tuesday) {
setTuesday(tuesday);
return this;
}
/**
* The schedule for Tuesday.
* <p>
* The method adds a new key-value pair into Tuesday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Tuesday.
* @param value The corresponding value of the entry to be added into Tuesday.
*/
public WeeklyAutoScalingSchedule addTuesdayEntry(String key, String value) {
if (null == this.tuesday) {
this.tuesday = new java.util.HashMap<String,String>();
}
if (this.tuesday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tuesday.put(key, value);
return this;
}
/**
* Removes all the entries added into Tuesday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearTuesdayEntries() {
this.tuesday = null;
return this;
}
/**
* The schedule for Wednesday.
*
* @return The schedule for Wednesday.
*/
public java.util.Map<String,String> getWednesday() {
if (wednesday == null) {
wednesday = new java.util.HashMap<String,String>();
}
return wednesday;
}
/**
* The schedule for Wednesday.
*
* @param wednesday The schedule for Wednesday.
*/
public void setWednesday(java.util.Map<String,String> wednesday) {
this.wednesday = wednesday;
}
/**
* The schedule for Wednesday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param wednesday The schedule for Wednesday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withWednesday(java.util.Map<String,String> wednesday) {
setWednesday(wednesday);
return this;
}
/**
* The schedule for Wednesday.
* <p>
* The method adds a new key-value pair into Wednesday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Wednesday.
* @param value The corresponding value of the entry to be added into Wednesday.
*/
public WeeklyAutoScalingSchedule addWednesdayEntry(String key, String value) {
if (null == this.wednesday) {
this.wednesday = new java.util.HashMap<String,String>();
}
if (this.wednesday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.wednesday.put(key, value);
return this;
}
/**
* Removes all the entries added into Wednesday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearWednesdayEntries() {
this.wednesday = null;
return this;
}
/**
* The schedule for Thursday.
*
* @return The schedule for Thursday.
*/
public java.util.Map<String,String> getThursday() {
if (thursday == null) {
thursday = new java.util.HashMap<String,String>();
}
return thursday;
}
/**
* The schedule for Thursday.
*
* @param thursday The schedule for Thursday.
*/
public void setThursday(java.util.Map<String,String> thursday) {
this.thursday = thursday;
}
/**
* The schedule for Thursday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param thursday The schedule for Thursday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withThursday(java.util.Map<String,String> thursday) {
setThursday(thursday);
return this;
}
/**
* The schedule for Thursday.
* <p>
* The method adds a new key-value pair into Thursday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Thursday.
* @param value The corresponding value of the entry to be added into Thursday.
*/
public WeeklyAutoScalingSchedule addThursdayEntry(String key, String value) {
if (null == this.thursday) {
this.thursday = new java.util.HashMap<String,String>();
}
if (this.thursday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.thursday.put(key, value);
return this;
}
/**
* Removes all the entries added into Thursday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearThursdayEntries() {
this.thursday = null;
return this;
}
/**
* The schedule for Friday.
*
* @return The schedule for Friday.
*/
public java.util.Map<String,String> getFriday() {
if (friday == null) {
friday = new java.util.HashMap<String,String>();
}
return friday;
}
/**
* The schedule for Friday.
*
* @param friday The schedule for Friday.
*/
public void setFriday(java.util.Map<String,String> friday) {
this.friday = friday;
}
/**
* The schedule for Friday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param friday The schedule for Friday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withFriday(java.util.Map<String,String> friday) {
setFriday(friday);
return this;
}
/**
* The schedule for Friday.
* <p>
* The method adds a new key-value pair into Friday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Friday.
* @param value The corresponding value of the entry to be added into Friday.
*/
public WeeklyAutoScalingSchedule addFridayEntry(String key, String value) {
if (null == this.friday) {
this.friday = new java.util.HashMap<String,String>();
}
if (this.friday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.friday.put(key, value);
return this;
}
/**
* Removes all the entries added into Friday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearFridayEntries() {
this.friday = null;
return this;
}
/**
* The schedule for Saturday.
*
* @return The schedule for Saturday.
*/
public java.util.Map<String,String> getSaturday() {
if (saturday == null) {
saturday = new java.util.HashMap<String,String>();
}
return saturday;
}
/**
* The schedule for Saturday.
*
* @param saturday The schedule for Saturday.
*/
public void setSaturday(java.util.Map<String,String> saturday) {
this.saturday = saturday;
}
/**
* The schedule for Saturday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param saturday The schedule for Saturday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withSaturday(java.util.Map<String,String> saturday) {
setSaturday(saturday);
return this;
}
/**
* The schedule for Saturday.
* <p>
* The method adds a new key-value pair into Saturday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Saturday.
* @param value The corresponding value of the entry to be added into Saturday.
*/
public WeeklyAutoScalingSchedule addSaturdayEntry(String key, String value) {
if (null == this.saturday) {
this.saturday = new java.util.HashMap<String,String>();
}
if (this.saturday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.saturday.put(key, value);
return this;
}
/**
* Removes all the entries added into Saturday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearSaturdayEntries() {
this.saturday = null;
return this;
}
/**
* The schedule for Sunday.
*
* @return The schedule for Sunday.
*/
public java.util.Map<String,String> getSunday() {
if (sunday == null) {
sunday = new java.util.HashMap<String,String>();
}
return sunday;
}
/**
* The schedule for Sunday.
*
* @param sunday The schedule for Sunday.
*/
public void setSunday(java.util.Map<String,String> sunday) {
this.sunday = sunday;
}
/**
* The schedule for Sunday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param sunday The schedule for Sunday.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WeeklyAutoScalingSchedule withSunday(java.util.Map<String,String> sunday) {
setSunday(sunday);
return this;
}
/**
* The schedule for Sunday.
* <p>
* The method adds a new key-value pair into Sunday parameter, and
* returns a reference to this object so that method calls can be chained
* together.
*
* @param key The key of the entry to be added into Sunday.
* @param value The corresponding value of the entry to be added into Sunday.
*/
public WeeklyAutoScalingSchedule addSundayEntry(String key, String value) {
if (null == this.sunday) {
this.sunday = new java.util.HashMap<String,String>();
}
if (this.sunday.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.sunday.put(key, value);
return this;
}
/**
* Removes all the entries added into Sunday.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*/
public WeeklyAutoScalingSchedule clearSundayEntries() {
this.sunday = null;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMonday() != null) sb.append("Monday: " + getMonday() + ",");
if (getTuesday() != null) sb.append("Tuesday: " + getTuesday() + ",");
if (getWednesday() != null) sb.append("Wednesday: " + getWednesday() + ",");
if (getThursday() != null) sb.append("Thursday: " + getThursday() + ",");
if (getFriday() != null) sb.append("Friday: " + getFriday() + ",");
if (getSaturday() != null) sb.append("Saturday: " + getSaturday() + ",");
if (getSunday() != null) sb.append("Sunday: " + getSunday() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMonday() == null) ? 0 : getMonday().hashCode());
hashCode = prime * hashCode + ((getTuesday() == null) ? 0 : getTuesday().hashCode());
hashCode = prime * hashCode + ((getWednesday() == null) ? 0 : getWednesday().hashCode());
hashCode = prime * hashCode + ((getThursday() == null) ? 0 : getThursday().hashCode());
hashCode = prime * hashCode + ((getFriday() == null) ? 0 : getFriday().hashCode());
hashCode = prime * hashCode + ((getSaturday() == null) ? 0 : getSaturday().hashCode());
hashCode = prime * hashCode + ((getSunday() == null) ? 0 : getSunday().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof WeeklyAutoScalingSchedule == false) return false;
WeeklyAutoScalingSchedule other = (WeeklyAutoScalingSchedule)obj;
if (other.getMonday() == null ^ this.getMonday() == null) return false;
if (other.getMonday() != null && other.getMonday().equals(this.getMonday()) == false) return false;
if (other.getTuesday() == null ^ this.getTuesday() == null) return false;
if (other.getTuesday() != null && other.getTuesday().equals(this.getTuesday()) == false) return false;
if (other.getWednesday() == null ^ this.getWednesday() == null) return false;
if (other.getWednesday() != null && other.getWednesday().equals(this.getWednesday()) == false) return false;
if (other.getThursday() == null ^ this.getThursday() == null) return false;
if (other.getThursday() != null && other.getThursday().equals(this.getThursday()) == false) return false;
if (other.getFriday() == null ^ this.getFriday() == null) return false;
if (other.getFriday() != null && other.getFriday().equals(this.getFriday()) == false) return false;
if (other.getSaturday() == null ^ this.getSaturday() == null) return false;
if (other.getSaturday() != null && other.getSaturday().equals(this.getSaturday()) == false) return false;
if (other.getSunday() == null ^ this.getSunday() == null) return false;
if (other.getSunday() != null && other.getSunday().equals(this.getSunday()) == false) return false;
return true;
}
@Override
public WeeklyAutoScalingSchedule clone() {
try {
return (WeeklyAutoScalingSchedule) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
package liquibase.database.core;
import liquibase.CatalogAndSchema;
import liquibase.database.AbstractJdbcDatabase;
import liquibase.database.DatabaseConnection;
import liquibase.database.ObjectQuotingStrategy;
import liquibase.database.OfflineConnection;
import liquibase.database.jvm.JdbcConnection;
import liquibase.structure.DatabaseObject;
import liquibase.exception.DatabaseException;
import liquibase.executor.ExecutorService;
import liquibase.logging.LogFactory;
import liquibase.statement.core.RawCallStatement;
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.core.Table;
import liquibase.util.StringUtils;
import java.math.BigInteger;
import java.sql.Types;
import java.util.*;
/**
* Encapsulates PostgreSQL database support.
*/
public class PostgresDatabase extends AbstractJdbcDatabase {
public static final String PRODUCT_NAME = "PostgreSQL";
private Set<String> systemTablesAndViews = new HashSet<String>();
private Set<String> reservedWords = new HashSet<String>();
public PostgresDatabase() {
super.setCurrentDateTimeFunction("NOW()");
//got list from http://www.postgresql.org/docs/9.1/static/sql-keywords-appendix.html?
reservedWords.addAll(Arrays.asList("ALL","ANALYSE", "AND", "ANY","ARRAY","AS", "ASC","ASYMMETRIC", "AUTHORIZATION", "BINARY", "BOTH","CASE","CAST","CHECK", "COLLATE","COLLATION", "COLUMN","CONCURRENTLY", "CONSTRAINT", "CREATE", "CURRENT_CATALOG", "CURRENT_DATE", "CURRENT_ROLE", "CURRENT_SCHEMA", "CURRENT_TIME", "CURRENT_TIMESTAMP", "CURRENT_USER", "DEFAULT", "DEFERRABLE", "DESC", "DISTINCT", "DO",
"ELSE", "END", "EXCEPT", "FALSE", "FETCH", "FOR", "FOREIGN", "FROM", "FULL", "GRANT", "GROUP", "HAVING", "ILIKE", "IN", "INITIALLY", "INTERSECT", "INTO", "IS", "ISNULL", "JOIN", "LEADING", "LEFT", "LIKE", "LIMIT", "LITERAL", "LOCALTIME", "LOCALTIMESTAMP", "NOT", "NULL", "OFFSET", "ON", "ONLY", "OR", "ORDER", "OUTER", "OVER", "OVERLAPS",
"PLACING", "PRIMARY", "REFERENCES", "RETURNING", "RIGHT", "SELECT", "SESSION_USER", "SIMILAR", "SOME", "SYMMETRIC", "TABLE", "THEN", "TO", "TRAILING", "TRUE", "UNION", "UNIQUE", "USER", "USING", "VARIADIC", "VERBOSE", "WHEN", "WHERE", "WINDOW", "WITH"));
super.sequenceNextValueFunction = "nextval('%s')";
super.sequenceCurrentValueFunction = "currval('%s')";
super.unmodifiableDataTypes.addAll(Arrays.asList("bool", "int4", "int8", "float4", "float8", "bigserial", "serial", "bytea", "timestamptz", "text"));
super.unquotedObjectsAreUppercased=false;
}
@Override
public String getShortName() {
return "postgresql";
}
@Override
protected String getDefaultDatabaseProductName() {
return "PostgreSQL";
}
@Override
public Integer getDefaultPort() {
return 5432;
}
@Override
public Set<String> getSystemViews() {
return systemTablesAndViews;
}
@Override
public int getPriority() {
return PRIORITY_DEFAULT;
}
@Override
public boolean supportsInitiallyDeferrableColumns() {
return true;
}
@Override
public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException {
return PRODUCT_NAME.equalsIgnoreCase(conn.getDatabaseProductName());
}
@Override
public String getDefaultDriver(String url) {
if (url.startsWith("jdbc:postgresql:")) {
return "org.postgresql.Driver";
}
return null;
}
@Override
public boolean supportsCatalogInObjectName(Class<? extends DatabaseObject> type) {
return false;
}
@Override
public boolean supportsSequences() {
return true;
}
@Override
public String getDatabaseChangeLogTableName() {
return super.getDatabaseChangeLogTableName().toLowerCase();
}
@Override
public String getDatabaseChangeLogLockTableName() {
return super.getDatabaseChangeLogLockTableName().toLowerCase();
}
// public void dropDatabaseObjects(String schema) throws DatabaseException {
// try {
// if (schema == null) {
// schema = getConnectionUsername();
// }
// new Executor(this).execute(new RawSqlStatement("DROP OWNED BY " + schema));
//
// getConnection().commit();
//
// changeLogTableExists = false;
// changeLogLockTableExists = false;
// changeLogCreateAttempted = false;
// changeLogLockCreateAttempted = false;
//
// } catch (SQLException e) {
// throw new DatabaseException(e);
// }
// }
@Override
public boolean isSystemObject(DatabaseObject example) {
if (example instanceof Table) {
if (example.getSchema() != null) {
if ("pg_catalog".equals(example.getSchema().getName())
|| "pg_toast".equals(example.getSchema().getName())) {
return true;
}
}
}
return super.isSystemObject(example);
}
@Override
public boolean supportsTablespaces() {
return true;
}
@Override
public String getAutoIncrementClause() {
return "";
}
@Override
public boolean generateAutoIncrementStartWith(BigInteger startWith) {
return false;
}
@Override
public boolean generateAutoIncrementBy(BigInteger incrementBy) {
return false;
}
@Override
public String escapeObjectName(String objectName, Class<? extends DatabaseObject> objectType) {
if (quotingStrategy == ObjectQuotingStrategy.LEGACY && hasMixedCase(objectName)) {
return "\"" + objectName + "\"";
} else {
return super.escapeObjectName(objectName, objectType);
}
}
@Override
public String correctObjectName(String objectName, Class<? extends DatabaseObject> objectType) {
if (objectName == null || quotingStrategy != ObjectQuotingStrategy.LEGACY) {
return super.correctObjectName(objectName, objectType);
}
if (objectName.contains("-") || hasMixedCase(objectName) || startsWithNumeric(objectName) || isReservedWord(objectName)) {
return objectName;
} else {
return objectName.toLowerCase();
}
}
/*
* Check if given string has case problems according to postgresql documentation.
* If there are at least one characters with upper case while all other are in lower case (or vice versa) this string should be escaped.
*
* Note: This may make postgres support more case sensitive than normally is, but needs to be left in for backwards compatibility.
* Method is public so a subclass extension can override it to always return false.
*/
protected boolean hasMixedCase(String tableName) {
if (tableName == null) {
return false;
}
return StringUtils.hasUpperCase(tableName) && StringUtils.hasLowerCase(tableName);
}
@Override
public boolean isReservedWord(String tableName) {
return reservedWords.contains(tableName.toUpperCase());
}
/*
* Get the current search paths
*/
private List<String> getSearchPaths() {
List<String> searchPaths = null;
try {
DatabaseConnection con = getConnection();
if (con != null) {
String searchPathResult = (String) ExecutorService.getInstance().getExecutor(this).queryForObject(new RawSqlStatement("SHOW search_path"), String.class);
if (searchPathResult != null) {
String dirtySearchPaths[] = searchPathResult.split("\\,");
searchPaths = new ArrayList<String>();
for (String searchPath : dirtySearchPaths) {
searchPath = searchPath.trim();
// Ensure there is consistency ..
if (searchPath.equals("\"$user\"")) {
searchPath = "$user";
}
searchPaths.add(searchPath);
}
}
}
} catch (Exception e) {
// TODO: Something?
e.printStackTrace();
LogFactory.getLogger().severe("Failed to get default catalog name from postgres", e);
}
return searchPaths;
}
@Override
protected String getConnectionSchemaName() {
if (getConnection() == null || getConnection() instanceof OfflineConnection) {
return null;
}
try {
String currentSchema = ExecutorService.getInstance().getExecutor(this)
.queryForObject(new RawCallStatement("select current_schema"), String.class);
return currentSchema;
} catch (Exception e) {
throw new RuntimeException("Failed to get current schema", e);
}
}
private boolean catalogExists(String catalogName) throws DatabaseException {
return catalogName != null && runExistsQuery(
"select count(*) from information_schema.schemata where catalog_name='" + catalogName + "'");
}
private boolean schemaExists(String schemaName) throws DatabaseException {
return schemaName != null && runExistsQuery("select count(*) from information_schema.schemata where schema_name='" + schemaName + "'");
}
private boolean runExistsQuery(String query) throws DatabaseException {
Long count = ExecutorService.getInstance().getExecutor(this).queryForLong(new RawSqlStatement(query));
return count != null && count > 0;
}
}
| |
/*
* Copyright (c) 2007, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.provider;
import org.apache.axiom.om.OMElement;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.rahas.RahasConstants;
import org.apache.rahas.RahasData;
import org.apache.rahas.impl.util.SAMLAttributeCallback;
import org.apache.rahas.impl.util.SAMLCallback;
import org.apache.rahas.impl.util.SAMLCallbackHandler;
import org.opensaml.Configuration;
import org.opensaml.SAMLAttribute;
import org.opensaml.SAMLException;
import org.opensaml.common.SAMLObjectBuilder;
import org.opensaml.saml2.core.Attribute;
import org.opensaml.saml2.core.AttributeValue;
import org.opensaml.xml.XMLObjectBuilderFactory;
import org.opensaml.xml.schema.XSString;
import org.opensaml.xml.schema.impl.XSStringBuilder;
import org.wso2.carbon.identity.base.IdentityConstants;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.IdentityClaimManager;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.claim.Claim;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import javax.xml.namespace.QName;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
public class AttributeCallbackHandler implements SAMLCallbackHandler {
private static final Log log = LogFactory.getLog(AttributeCallbackHandler.class);
private static final String MULTI_ATTRIBUTE_SEPARATOR = "MultiAttributeSeparator";
protected Map<String, RequestedClaimData> requestedClaims = new HashMap<String, RequestedClaimData>();
protected Map<String, String> requestedClaimValues = new HashMap<String, String>();
protected Map<String, Claim> supportedClaims = new HashMap<String, Claim>();
private String userAttributeSeparator = ",";
@Override
public void handle(SAMLCallback callback) throws SAMLException {
SAMLAttributeCallback attrCallback = null;
RahasData data = null;
OMElement claimElem = null;
String userIdentifier = null;
String[] splitArr = null;
IdentityAttributeService[] attributeCallbackServices = null;
if (callback instanceof SAMLAttributeCallback) {
attrCallback = (SAMLAttributeCallback) callback;
data = attrCallback.getData();
claimElem = data.getClaimElem();
userIdentifier = data.getPrincipal().getName();
if (userIdentifier != null) {
/*Extract 'Common Name' as the user id if authenticated
via X.509 certificates*/
splitArr = userIdentifier.split(",")[0].split("=");
if (splitArr.length == 2) {
userIdentifier = splitArr[1];
}
}
try {
loadClaims(claimElem, userIdentifier);
processClaimData(data, claimElem);
populateClaimValues(userIdentifier, attrCallback);
} catch (IdentityProviderException e) {
log.error("Error occurred while populating claim data", e);
}
attributeCallbackServices = IdentityAttributeServiceStore.getAttributeServices();
for (int i = 0; i < attributeCallbackServices.length; i++) {
try {
attributeCallbackServices[i].handle(attrCallback);
} catch (Exception e) {
log.error("Error occurred while calling attribute callback", e);
}
}
if (attrCallback.getSAML2Attributes() == null || attrCallback.getSAML2Attributes().length == 0) {
if (RahasConstants.TOK_TYPE_SAML_20.equals(data.getTokenType())) {
attrCallback.addAttributes(getSAML2Attribute("Name", "Colombo",
"https://rahas.apache.org/saml/attrns"));
} else {
SAMLAttribute attribute = new SAMLAttribute("Name",
"https://rahas.apache.org/saml/attrns", null, -1,
Arrays.asList(new String[]{"Colombo/Rahas"}));
attrCallback.addAttributes(attribute);
}
}
}
}
private Attribute getSAML2Attribute(String name, String value, String namespace) {
XMLObjectBuilderFactory builderFactory = null;
SAMLObjectBuilder<Attribute> attrBuilder = null;
Attribute attribute = null;
XSStringBuilder attributeValueBuilder = null;
XSString stringValue = null;
builderFactory = Configuration.getBuilderFactory();
attrBuilder = (SAMLObjectBuilder<Attribute>) builderFactory.getBuilder(Attribute.DEFAULT_ELEMENT_NAME);
attribute = attrBuilder.buildObject();
attribute.setName(name);
attribute.setNameFormat(namespace);
attributeValueBuilder = (XSStringBuilder) builderFactory.getBuilder(XSString.TYPE_NAME);
stringValue = attributeValueBuilder.buildObject(AttributeValue.DEFAULT_ELEMENT_NAME, XSString.TYPE_NAME);
stringValue.setValue(value);
attribute.getAttributeValues().add(stringValue);
return attribute;
}
/**
* This method loads claim according to the claim dialect that is defined in the request
*
* @param claimsElement
* @param userIdentifier
* @throws IdentityProviderException
*/
private void loadClaims(OMElement claimsElement, String userIdentifier) throws IdentityProviderException {
IdentityClaimManager claimManager = null;
Claim[] claims = null;
String claimDialect = null;
if (claimsElement.getNamespace() != null) {
claimDialect = claimsElement
.getAttributeValue(new QName(claimsElement.getNamespace().getNamespaceURI(), "Dialect"));
}
if (claimDialect == null || claimDialect.trim().length() == 0) {
claimDialect = UserCoreConstants.DEFAULT_CARBON_DIALECT;
}
if (log.isDebugEnabled()) {
log.debug("Loading claims");
}
try {
claimManager = IdentityClaimManager.getInstance();
claims =
claimManager.getAllSupportedClaims(claimDialect, IdentityTenantUtil.getRealm(null, userIdentifier));
for (int i = 0; i < claims.length; i++) {
Claim temp = claims[i];
supportedClaims.put(temp.getClaimUri(), temp);
}
} catch (IdentityException e) {
log.error("Error while loading claims", e);
throw new IdentityProviderException("Error while loading claims", e);
}
}
protected void loadClaims(String userIdentifier) throws IdentityProviderException {
IdentityClaimManager claimManager = null;
Claim[] claims = null;
if (log.isDebugEnabled()) {
log.debug("Loading claims");
}
try {
claimManager = IdentityClaimManager.getInstance();
claims = claimManager.getAllSupportedClaims(UserCoreConstants.DEFAULT_CARBON_DIALECT,
IdentityTenantUtil.getRealm(null, userIdentifier));
for (int i = 0; i < claims.length; i++) {
Claim temp = claims[i];
supportedClaims.put(temp.getClaimUri(), temp);
}
} catch (IdentityException e) {
log.error("Error while loading claims", e);
throw new IdentityProviderException("Error while loading claims", e);
}
}
/**
* @param rahasData
* @param claims
* @throws IdentityProviderException
*/
protected void processClaimData(RahasData rahasData, OMElement claims) throws IdentityProviderException {
if (claims == null) {
return;
}
if (log.isDebugEnabled()) {
log.debug("Processing claim data");
}
Iterator iterator = null;
iterator = claims.getChildrenWithName(
new QName(IdentityConstants.NS, IdentityConstants.LocalNames.IDENTITY_CLAIM_TYPE));
while (iterator.hasNext()) {
OMElement omElem = null;
RequestedClaimData claim = null;
String uriClaim = null;
String optional = null;
omElem = (OMElement) iterator.next();
claim = getRequestedClaim();
uriClaim = omElem.getAttributeValue(new QName(null, "Uri"));
if (uriClaim == null) {
log.error("Empty claim uri found while procession claim data");
throw new IdentityProviderException("Empty claim uri found while procession claim data");
}
if (uriClaim.startsWith("{") && uriClaim.endsWith("}")
&& uriClaim.lastIndexOf("|") == uriClaim.indexOf("|")) {
String tmpUri = uriClaim;
uriClaim = uriClaim.substring(1, uriClaim.indexOf("|"));
String claimValue = tmpUri.substring(tmpUri.indexOf("|") + 1, tmpUri.length() - 1);
requestedClaimValues.put(uriClaim, claimValue);
}
claim.setUri(uriClaim);
optional = (omElem.getAttributeValue(new QName(null, "Optional")));
if (StringUtils.isNotBlank(optional)) {
claim.setBOptional("true".equals(optional));
} else {
claim.setBOptional(true);
}
requestedClaims.put(claim.getUri(), claim);
}
}
protected void populateClaimValues(String userIdentifier, SAMLAttributeCallback callback)
throws IdentityProviderException {
UserStoreManager connector = null;
RahasData rahasData = null;
if (log.isDebugEnabled()) {
log.debug("Populating claim values");
}
if (requestedClaims.isEmpty()) {
return;
}
try {
connector = IdentityTenantUtil.getRealm(null, userIdentifier).getUserStoreManager();
} catch (Exception e) {
log.error("Error while instantiating IdentityUserStore", e);
throw new IdentityProviderException("Error while instantiating IdentityUserStore", e);
}
// get the column names for the URIs
Iterator<RequestedClaimData> ite = requestedClaims.values().iterator();
List<String> claimList = new ArrayList<String>();
rahasData = callback.getData();
while (ite.hasNext()) {
RequestedClaimData claim = ite.next();
if (claim != null && !claim.getUri().equals(IdentityConstants.CLAIM_PPID)) {
claimList.add(claim.getUri());
}
}
String[] claimArray = new String[claimList.size()];
String userId = userIdentifier;
Map<String, String> mapValues = null;
try {
if (MapUtils.isEmpty(requestedClaimValues)) {
mapValues = connector.getUserClaimValues(
MultitenantUtils.getTenantAwareUsername(userId),
claimList.toArray(claimArray), null);
} else {
mapValues = requestedClaimValues;
}
String claimSeparator = mapValues.get(MULTI_ATTRIBUTE_SEPARATOR);
if (claimSeparator != null) {
userAttributeSeparator = claimSeparator;
mapValues.remove(MULTI_ATTRIBUTE_SEPARATOR);
}
ite = requestedClaims.values().iterator();
while (ite.hasNext()) {
SAMLAttribute attribute = null;
Attribute saml2Attribute = null;
RequestedClaimData claimData = ite.next();
claimData.setValue(mapValues.get(claimData.getUri()));
if (claimData.getValue() != null) {
if (RahasConstants.TOK_TYPE_SAML_20.equals(rahasData.getTokenType())) {
saml2Attribute = getSAML2Attribute(claimData.getUri(),
claimData.getValue(), claimData.getUri());
callback.addAttributes(saml2Attribute);
} else {
String name;
String nameSpace;
if (supportedClaims.get(claimData.getUri()) != null) {
name = supportedClaims.get(claimData.getUri()).getDisplayTag();
nameSpace = claimData.getUri();
} else {
nameSpace = claimData.getUri();
if (nameSpace.contains("/") && nameSpace.length() > (nameSpace.lastIndexOf("/") + 1)) {
// Custom claim uri should be in a format of http(s)://nameSpace/name
name = nameSpace.substring(nameSpace.lastIndexOf("/") + 1);
nameSpace = nameSpace.substring(0, nameSpace.lastIndexOf("/"));
} else {
name = nameSpace;
}
}
List<String> values = new ArrayList<String>();
if (claimData.getValue().contains(userAttributeSeparator)) {
StringTokenizer st = new StringTokenizer(claimData.getValue(), userAttributeSeparator);
while (st.hasMoreElements()) {
String attValue = st.nextElement().toString();
if (attValue != null && attValue.trim().length() > 0) {
values.add(attValue);
}
}
} else {
values.add(claimData.getValue());
}
attribute = new SAMLAttribute(name, nameSpace, null, -1, values);
callback.addAttributes(attribute);
}
}
}
} catch (Exception e) {
throw new IdentityProviderException(e.getMessage(), e);
}
}
protected RequestedClaimData getRequestedClaim() {
return new RequestedClaimData();
}
}
| |
/*
* Copyright (c) 2009 - 2012 Deutsches Elektronen-Synchroton,
* Member of the Helmholtz Association, (DESY), HAMBURG, GERMANY
*
* This library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as
* published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this program (see the file COPYING.LIB for more
* details); if not, write to the Free Software Foundation, Inc.,
* 675 Mass Ave, Cambridge, MA 02139, USA.
*/
package org.dcache.nfs.v4;
import org.dcache.nfs.v4.xdr.nfsv4_1_file_layout_ds_addr4;
import org.dcache.nfs.v4.xdr.layout4;
import org.dcache.nfs.v4.xdr.layouttype4;
import org.dcache.nfs.v4.xdr.stateid4;
import org.dcache.nfs.v4.xdr.netaddr4;
import org.dcache.nfs.v4.xdr.nfs_fh4;
import org.dcache.nfs.v4.xdr.deviceid4;
import org.dcache.nfs.v4.xdr.nfs4_prot;
import org.dcache.nfs.v4.xdr.multipath_list4;
import org.dcache.nfs.v4.xdr.device_addr4;
import org.dcache.xdr.OncRpcException;
import org.dcache.xdr.XdrBuffer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import org.dcache.nfs.status.LayoutUnavailableException;
import org.dcache.nfs.vfs.Inode;
import org.dcache.utils.Bytes;
import org.dcache.utils.net.InetSocketAddresses;
import org.glassfish.grizzly.Buffer;
/**
*
* the instance of this class have to ask Pool Manager for a pool and return it
* to the client.
*
*/
public class DeviceManager implements NFSv41DeviceManager {
/*
* reserved device for IO through MDS (for pnfs dot files)
*/
private static final deviceid4 MDS_ID = deviceidOf(0);
private static final Logger _log = LoggerFactory.getLogger(DeviceManager.class);
/* hack for multiple pools */
private final Random _deviceIdGenerator = new Random();
private final Map<deviceid4, device_addr4> _deviceMap =
new ConcurrentHashMap<>();
private InetSocketAddress[] _knownDataServers;
private final StripingPattern<InetSocketAddress> _stripingPattern = new
RoundRobinStripingPattern<>();
/**
* Set configures data servers. Each string represents a dataserver
* as <code>IP:port</code>
* @param servers
*/
public void setDataservers(String[] servers) {
_knownDataServers = new InetSocketAddress[servers.length];
for(int i = 0; i < servers.length; i++) {
_knownDataServers[i] = InetSocketAddresses.inetAddressOf(servers[i]);
}
}
private int nextDeviceID() {
/* 0 is reserved for MDS */
return _deviceIdGenerator.nextInt(255) + 1;
}
/*
* (non-Javadoc)
*
* @see org.dcache.chimera.nfsv4.NFSv41DeviceManager#getIoDeviceId(org.dcache.chimera.FsInode,
* int, java.net.InetAddress)
*/
@Override
public Layout layoutGet(CompoundContext context, Inode inode, int ioMode, stateid4 stateid)
throws IOException {
device_addr4 deviceAddr;
deviceid4 deviceId;
if (!context.getFs().hasIOLayout(inode)) {
deviceId = MDS_ID;
} else {
if(_knownDataServers.length == 0) {
throw new LayoutUnavailableException("No dataservers available");
}
int id = nextDeviceID();
deviceId = deviceidOf(id);
_log.debug("generating new device: {} ({}) for stateid {}",
deviceId, id, stateid);
deviceAddr = deviceAddrOf(_stripingPattern, _knownDataServers);
_deviceMap.put(deviceId, deviceAddr);
}
nfs_fh4 fh = new nfs_fh4(context.currentInode().toNfsHandle());
// -1 is special value, which means entire file
layout4 layout = Layout.getLayoutSegment(deviceId, fh, ioMode, 0, nfs4_prot.NFS4_UINT64_MAX);
return new Layout(true, stateid, new layout4[]{layout});
}
/*
* (non-Javadoc)
*
* @see org.dcache.chimera.nfsv4.NFSv41DeviceManager#layoutGet(int)
*/
@Override
public device_addr4 getDeviceInfo(CompoundContext context, deviceid4 deviceId) {
_log.debug("lookup for device: {}", deviceId );
/* in case of MDS access we return the same interface which client already connected to */
if(deviceId.equals(MDS_ID)) {
return deviceAddrOf(_stripingPattern, context.getRpcCall().getTransport().getLocalSocketAddress());
}
return _deviceMap.get(deviceId);
}
/*
* (non-Javadoc)
*
* @see org.dcache.chimera.nfsv4.NFSv41DeviceManager#getDeviceList()
*/
@Override
public List<deviceid4> getDeviceList(CompoundContext context) {
return new ArrayList<>(_deviceMap.keySet());
}
/*
* (non-Javadoc)
*
* @see org.dcache.chimera.nfsv4.NFSv41DeviceManager#layoutReturn()
*/
@Override
public void layoutReturn(CompoundContext context, stateid4 stateid) {
// I'am fine
_log.debug( "release device for stateid {}", stateid );
}
/**
* Create a multipath based NFSv4.1 file layout address.
*
* @param stripingPattern of the device
* @param deviceAddress
* @return device address
*/
public static device_addr4 deviceAddrOf(StripingPattern<InetSocketAddress> stripingPattern,
InetSocketAddress ... deviceAddress) {
nfsv4_1_file_layout_ds_addr4 file_type = new nfsv4_1_file_layout_ds_addr4();
file_type.nflda_multipath_ds_list = new multipath_list4[deviceAddress.length];
for( int i = 0; i < deviceAddress.length; i++ ) {
file_type.nflda_multipath_ds_list[i] = new multipath_list4();
file_type.nflda_multipath_ds_list[i].value = new netaddr4[1];
file_type.nflda_multipath_ds_list[i].value[0] = new netaddr4(deviceAddress[i]);
}
file_type.nflda_stripe_indices = stripingPattern.getPattern(deviceAddress);
XdrBuffer xdr = new XdrBuffer(128);
try {
xdr.beginEncoding();
file_type.xdrEncode(xdr);
xdr.endEncoding();
}catch(OncRpcException e) {
/* forced by interface, should never happen. */
throw new RuntimeException("Unexpected OncRpcException:", e);
}catch(IOException e) {
/* forced by interface, should never happen. */
throw new RuntimeException("Unexpected IOException:", e);
}
Buffer body = xdr.asBuffer();
byte[] retBytes = new byte[body.remaining()] ;
body.get(retBytes);
device_addr4 addr = new device_addr4();
addr.da_layout_type = layouttype4.LAYOUT4_NFSV4_1_FILES;
addr.da_addr_body = retBytes;
return addr;
}
private static deviceid4 deviceidOf(int id) {
byte[] deviceidBytes = new byte[nfs4_prot.NFS4_DEVICEID4_SIZE];
Bytes.putInt(deviceidBytes, 0, id);
return new deviceid4(deviceidBytes);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.urlfilter.suffix;
import org.apache.hadoop.conf.Configuration;
import org.apache.nutch.net.*;
import org.apache.nutch.util.NutchConfiguration;
import org.apache.nutch.util.SuffixStringMatcher;
import org.apache.nutch.util.TrieStringMatcher;
import org.apache.nutch.plugin.Extension;
import org.apache.nutch.plugin.PluginRepository;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.Reader;
import java.io.FileReader;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
/**
* Filters URLs based on a file of URL suffixes. The file is named by
* <ol>
* <li>property "urlfilter.suffix.file" in ./conf/nutch-default.xml, and</li>
* <li>attribute "file" in plugin.xml of this plugin</li>
* </ol>
* Attribute "file" has higher precedence if defined. If the config file is
* missing, all URLs will be rejected.
*
* <p>This filter can be configured to work in one of two modes:
* <ul>
* <li><b>default to reject</b> ('-'): in this mode, only URLs that match suffixes
* specified in the config file will be accepted, all other URLs will be
* rejected.</li>
* <li><b>default to accept</b> ('+'): in this mode, only URLs that match suffixes
* specified in the config file will be rejected, all other URLs will be
* accepted.</li>
* </ul>
* <p>
* The format of this config file is one URL suffix per line, with no preceding
* whitespace. Order, in which suffixes are specified, doesn't matter. Blank
* lines and comments (#) are allowed.
* </p>
* <p>
* A single '+' or '-' sign not followed by any suffix must be used once, to
* signify the mode this plugin operates in. An optional single 'I' can be appended,
* to signify that suffix matches should be case-insensitive. The default, if
* not specified, is to use case-sensitive matches, i.e. suffix '.JPG'
* does not match '.jpg'.
* </p>
* <p>
* NOTE: the format of this file is different from urlfilter-prefix, because
* that plugin doesn't support allowed/prohibited prefixes (only supports
* allowed prefixes). Please note that this plugin does not support regular
* expressions, it only accepts literal suffixes. I.e. a suffix "+*.jpg" is most
* probably wrong, you should use "+.jpg" instead.
* </p>
* <h4>Example 1</h4>
* <p>
* The configuration shown below will accept all URLs with '.html' or '.htm'
* suffixes (case-sensitive - '.HTML' or '.HTM' will be rejected),
* and prohibit all other suffixes.
* <p>
*
* <pre>
* # this is a comment
*
* # prohibit all unknown, case-sensitive matching
* -
*
* # collect only HTML files.
* .html
* .htm
* </pre>
*
* </p>
* <h4>Example 2</h4>
* <p>
* The configuration shown below will accept all URLs except common graphical
* formats.
* <p>
*
* <pre>
* # this is a comment
*
* # allow all unknown, case-insensitive matching
* +I
*
* # prohibited suffixes
* .gif
* .png
* .jpg
* .jpeg
* .bmp
* </pre>
*
* </p>
* @author Andrzej Bialecki
*/
public class SuffixURLFilter implements URLFilter {
private static final Log LOG = LogFactory.getLog(SuffixURLFilter.class);
// read in attribute "file" of this plugin.
private String attributeFile = null;
private SuffixStringMatcher suffixes;
private boolean modeAccept = false;
private boolean ignoreCase = false;
private Configuration conf;
public SuffixURLFilter() throws IOException {
}
public SuffixURLFilter(Reader reader) throws IOException {
readConfigurationFile(reader);
}
public String filter(String url) {
if (url == null) return null;
String _url;
if (ignoreCase)
_url = url.toLowerCase();
else _url = url;
String a = suffixes.shortestMatch(_url);
if (a == null) {
if (modeAccept) return url;
else return null;
} else {
if (modeAccept) return null;
else return url;
}
}
public void readConfigurationFile(Reader reader) throws IOException {
// handle missing config file
if (reader == null) {
if (LOG.isWarnEnabled()) {
LOG.warn("Missing urlfilter.suffix.file, all URLs will be rejected!");
}
suffixes = new SuffixStringMatcher(new String[0]);
modeAccept = false;
ignoreCase = false;
return;
}
BufferedReader in = new BufferedReader(reader);
List aSuffixes = new ArrayList();
boolean allow = false;
boolean ignore = false;
String line;
while ((line = in.readLine()) != null) {
if (line.length() == 0) continue;
char first = line.charAt(0);
switch (first) {
case ' ':
case '\n':
case '#': // skip blank & comment lines
break;
case '-':
allow = false;
if (line.length() > 1 && line.charAt(1) == 'I')
ignore = true;
break;
case '+':
allow = true;
if (line.length() > 1 && line.charAt(1) == 'I')
ignore = true;
break;
default:
aSuffixes.add(line);
}
}
if (ignore) {
for (int i = 0; i < aSuffixes.size(); i++) {
aSuffixes.set(i, ((String) aSuffixes.get(i)).toLowerCase());
}
}
suffixes = new SuffixStringMatcher(aSuffixes);
modeAccept = allow;
ignoreCase = ignore;
}
public static void main(String args[]) throws IOException {
SuffixURLFilter filter;
if (args.length >= 1)
filter = new SuffixURLFilter(new FileReader(args[0]));
else {
filter = new SuffixURLFilter();
filter.setConf(NutchConfiguration.create());
}
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line;
while ((line = in.readLine()) != null) {
String out = filter.filter(line);
if (out != null) {
System.out.println("ACCEPTED " + out);
} else {
System.out.println("REJECTED " + out);
}
}
}
public void setConf(Configuration conf) {
this.conf = conf;
String pluginName = "urlfilter-suffix";
Extension[] extensions = PluginRepository.get(conf).getExtensionPoint(URLFilter.class.getName()).getExtensions();
for (int i = 0; i < extensions.length; i++) {
Extension extension = extensions[i];
if (extension.getDescriptor().getPluginId().equals(pluginName)) {
attributeFile = extension.getAttribute("file");
break;
}
}
if (attributeFile != null && attributeFile.trim().equals("")) attributeFile = null;
if (attributeFile != null) {
if (LOG.isInfoEnabled()) {
LOG.info("Attribute \"file\" is defined for plugin " + pluginName + " as " + attributeFile);
}
} else {
// if (LOG.isWarnEnabled()) {
// LOG.warn("Attribute \"file\" is not defined in plugin.xml for
// plugin "+pluginName);
// }
}
String file = conf.get("urlfilter.suffix.file");
// attribute "file" takes precedence if defined
if (attributeFile != null) file = attributeFile;
Reader reader = conf.getConfResourceAsReader(file);
try {
readConfigurationFile(reader);
} catch (IOException e) {
if (LOG.isFatalEnabled()) { LOG.fatal(e.getMessage()); }
throw new RuntimeException(e.getMessage(), e);
}
}
public Configuration getConf() {
return this.conf;
}
public boolean isModeAccept() {
return modeAccept;
}
public void setModeAccept(boolean modeAccept) {
this.modeAccept = modeAccept;
}
public boolean isIgnoreCase() {
return ignoreCase;
}
public void setIgnoreCase(boolean ignoreCase) {
this.ignoreCase = ignoreCase;
}
}
| |
/*
Copyright (c) 2014,2015,2016 Ahome' Innovation Technologies. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// TODO - review DSJ
package com.ait.lienzo.client.core.shape.wires;
import com.ait.lienzo.client.core.Attribute;
import com.ait.lienzo.client.core.Context2D;
import com.ait.lienzo.client.core.event.*;
import com.ait.lienzo.client.core.shape.*;
import com.ait.lienzo.client.core.types.*;
import com.ait.lienzo.client.core.util.ScratchPad;
import com.ait.lienzo.shared.core.types.ColorName;
import com.ait.lienzo.shared.core.types.Direction;
import com.ait.lienzo.shared.core.types.DragMode;
import com.ait.tooling.nativetools.client.collection.NFastStringMap;
public class MagnetManager
{
public static final double CONTROL_RADIUS = 5;
public static final double CONTROL_STROKE_WIDTH = 2;
public static final ColorKeyRotor m_c_rotor = new ColorKeyRotor();
private NFastStringMap<Magnets> m_magnetRegistry = new NFastStringMap<Magnets>();
public ImageData drawMagnetsToBack(Magnets magnets, NFastStringMap<WiresShape> shape_color_map, NFastStringMap<WiresMagnet> magnet_color_map, ScratchPad scratch)
{
scratch.clear();
Context2D ctx = scratch.getContext();
// the Shape doesn't need recording, we just need to know the mouse is over something
BackingColorMapUtils.drawShapeToBacking(ctx, magnets.getWiresShape(), m_c_rotor.next(), shape_color_map);
magnet_color_map.clear();
for (int i = 0; i < magnets.size(); i++)
{
WiresMagnet m = magnets.getMagnet(i);
String c = m_c_rotor.next();
magnet_color_map.put(c, m);
ctx.beginPath();
ctx.setStrokeWidth(CONTROL_STROKE_WIDTH);
ctx.setStrokeColor(c);
ctx.setFillColor(c);
ctx.arc(m.getControl().getX(), m.getControl().getY(), CONTROL_RADIUS, 0, 2 * Math.PI, false);
ctx.stroke();
ctx.fill();
}
return ctx.getImageData(0, 0, scratch.getHeight(), scratch.getHeight());
}
public NFastStringMap<Magnets> getMagnetRegistry()
{
return m_magnetRegistry;
}
public Magnets createMagnets(Shape<?> shape, IPrimitive<?> primTarget, Point2DArray points, WiresShape wiresShape)
{
ControlHandleList list = new ControlHandleList(primTarget);
BoundingBox box = shape.getBoundingBox();
double left = box.getX();
double right = left + box.getWidth();
double top = box.getY();
double bottom = top + box.getHeight();
Magnets magnets = new Magnets(this, list, shape, primTarget, wiresShape);
Point2D absLoc = primTarget.getAbsoluteLocation();
double offsetX = absLoc.getX();
double offsetY = absLoc.getY();
for (Point2D p : points)
{
double x = offsetX + p.getX();
double y = offsetY + p.getY();
WiresMagnet m = new WiresMagnet(magnets, null, 0, p.getX(), p.getY(), getControlPrimitive(x, y), true);
Direction d = getDirection(p, left, right, top, bottom);
m.setDirection(d);
list.add(m);
}
String uuid = primTarget.uuid();
m_magnetRegistry.put(uuid, magnets);
return magnets;
}
public Magnets getMagnets(IPrimitive<?> shape)
{
return m_magnetRegistry.get(shape.uuid());
}
public Direction getDirection(Point2D point, double left, double right, double top, double bottom)
{
double x = point.getX();
double y = point.getY();
double leftDist = Math.abs(x - left);
double rightDist = Math.abs(x - right);
double topDist = Math.abs(y - top);
double bottomDist = Math.abs(y - bottom);
boolean moreLeft = leftDist < rightDist;
boolean moreTop = topDist < bottomDist;
if (leftDist == rightDist && topDist == bottomDist)
{
// this is the center, so return NONE
return Direction.NONE;
}
if (moreLeft)
{
if (moreTop)
{
if (topDist < leftDist)
{
return Direction.NORTH;
}
else if (topDist > leftDist)
{
return Direction.WEST;
}
else
{
return Direction.NORTH_WEST;
}
}
else
{
if (bottomDist < leftDist)
{
return Direction.SOUTH;
}
else if (bottomDist > leftDist)
{
return Direction.WEST;
}
else
{
return Direction.SOUTH_WEST;
}
}
}
else
{
if (moreTop)
{
if (topDist < rightDist)
{
return Direction.NORTH;
}
else if (topDist > rightDist)
{
return Direction.EAST;
}
else
{
return Direction.NORTH_EAST;
}
}
else
{
if (bottomDist < rightDist)
{
return Direction.SOUTH;
}
else if (bottomDist > rightDist)
{
return Direction.EAST;
}
else
{
return Direction.SOUTH_EAST;
}
}
}
}
private static Circle getControlPrimitive(double x, double y)
{
return new Circle(CONTROL_RADIUS).setFillColor(ColorName.RED).setFillAlpha(0.4).setX(x).setY(y).setDraggable(true).setDragMode(DragMode.SAME_LAYER).setStrokeColor(ColorName.BLACK).setStrokeWidth(CONTROL_STROKE_WIDTH);
}
public static class Magnets implements AttributesChangedHandler, NodeDragStartHandler, NodeDragMoveHandler, NodeDragEndHandler
{
private IControlHandleList m_list;
private MagnetManager m_magnetManager;
private Shape<?> m_shape;
private IPrimitive<?> m_primTarget;
private boolean m_isDragging;
private WiresShape m_wiresShape;
public Magnets(MagnetManager magnetManager, IControlHandleList list, Shape<?> shape, IPrimitive<?> primTarget, WiresShape wiresShape)
{
m_list = list;
m_magnetManager = magnetManager;
m_shape = shape;
m_primTarget = primTarget;
m_wiresShape = wiresShape;
IPrimitive<?> prim = getPrimTarget();
prim.addAttributesChangedHandler(Attribute.X, this);
prim.addAttributesChangedHandler(Attribute.Y, this);
prim.addNodeDragMoveHandler(this);
}
public WiresShape getWiresShape()
{
return m_wiresShape;
}
public IPrimitive<?> getPrimTarget()
{
return m_primTarget;
}
@Override
public void onAttributesChanged(AttributesChangedEvent event)
{
if (!m_isDragging && event.any(Attribute.X, Attribute.Y))
{
shapeMoved();
}
}
@Override
public void onNodeDragStart(NodeDragStartEvent event)
{
m_isDragging = true;
}
@Override
public void onNodeDragEnd(NodeDragEndEvent event)
{
m_isDragging = false;
}
@Override
public void onNodeDragMove(NodeDragMoveEvent event)
{
shapeMoved();
}
public void shapeMoved()
{
IPrimitive<?> prim = getPrimTarget();
Point2D absLoc = prim.getAbsoluteLocation();
double x = absLoc.getX();
double y = absLoc.getY();
for (int i = 0; i < m_list.size(); i++)
{
WiresMagnet m = (WiresMagnet) m_list.getHandle(i);
m.shapeMoved(x, y);
}
if (m_wiresShape.getChildShapes() != null)
{
for (WiresShape child : m_wiresShape.getChildShapes())
{
child.getMagnets().shapeMoved();
}
}
if (m_list.getContainer() != null && m_list.getContainer().getLayer() != null)
{
// it can be null, if the magnets are not currently displayed
m_list.getContainer().getLayer().batch();
}
}
public void show()
{
m_list.show();
}
public void hide()
{
m_list.hide();
}
public void destroy()
{
m_list.destroy();
m_magnetManager.m_magnetRegistry.remove(m_shape.uuid());
}
public void destroy(WiresMagnet magnet)
{
m_list.remove(magnet);
}
public IControlHandleList getMagnets()
{
return m_list;
}
public int size()
{
return m_list.size();
}
public Shape<?> getShape()
{
return m_shape;
}
public Group getGroup()
{
return (Group) m_primTarget;
}
public WiresMagnet getMagnet(int index)
{
return (WiresMagnet) m_list.getHandle(index);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import javax.cache.Cache;
import javax.cache.configuration.Factory;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.MutableEntry;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.store.CacheStore;
import org.apache.ignite.cache.store.CacheStoreAdapter;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.lang.IgniteBiInClosure;
import org.apache.ignite.lang.IgniteRunnable;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentHashMap8;
import static org.apache.ignite.cache.CacheAtomicWriteOrderMode.PRIMARY;
import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.transactions.TransactionIsolation.values;
/**
*
*/
public class IgniteCacheReadThroughStoreCallTest extends GridCommonAbstractTest {
/** */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/** */
private static final Map<Object, Object> storeMap = new ConcurrentHashMap8<>();
/** */
protected boolean client;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(IP_FINDER);
cfg.setClientMode(client);
return cfg;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
storeMap.clear();
super.afterTest();
}
/**
* @throws Exception If failed.
*/
public void testMultiNode() throws Exception {
startGridsMultiThreaded(4);
client = true;
startGrid(4);
checkLoadCount(cacheConfiguration(PARTITIONED, ATOMIC, 0));
checkLoadCount(cacheConfiguration(PARTITIONED, ATOMIC, 1));
checkLoadCount(cacheConfiguration(PARTITIONED, ATOMIC, 2));
checkLoadCount(cacheConfiguration(PARTITIONED, TRANSACTIONAL, 0));
checkLoadCount(cacheConfiguration(PARTITIONED, TRANSACTIONAL, 1));
checkLoadCount(cacheConfiguration(PARTITIONED, TRANSACTIONAL, 2));
}
/**
* @param ccfg Cache configuration.
* @throws Exception If failed.
*/
private void checkLoadCount(CacheConfiguration<Object, Object> ccfg) throws Exception {
storeMap.clear();
Ignite ignite0 = ignite(0);
ignite0.createCache(ccfg);
try {
int key = 0;
for (Ignite node : G.allGrids()) {
log.info("Test for node: " + node.name());
final IgniteCache<Object, Object> cache = node.cache(ccfg.getName());
for (int i = 0; i < 50; i++) {
final int k = key++;
checkReadThrough(cache, new IgniteRunnable() {
@Override public void run() {
cache.invoke(k, new TestEntryProcessor());
}
}, null, null, 1);
}
for (int i = 0; i < 50; i++) {
final int k = key++;
checkReadThrough(cache, new IgniteRunnable() {
@Override public void run() {
cache.put(k, k);
}
}, null, null, 0);
}
if (ccfg.getAtomicityMode() == TRANSACTIONAL) {
for (TransactionConcurrency concurrency : TransactionConcurrency.values()) {
for (TransactionIsolation isolation : values()) {
log.info("Test tx [concurrency=" + concurrency + ", isolation=" + isolation + ']');
for (int i = 0; i < 50; i++) {
final int k = key++;
checkReadThrough(cache, new IgniteRunnable() {
@Override public void run() {
cache.invoke(k, new TestEntryProcessor());
}
}, concurrency, isolation, 2);
}
}
}
}
}
ignite0.cache(ccfg.getName()).removeAll();
}
finally {
ignite0.destroyCache(ccfg.getName());
}
}
/**
* @param cache Cache.
* @param c Cache operation Closure.
* @param concurrency Transaction concurrency.
* @param isolation Transaction isolation.
* @param expLoadCnt Expected number of store 'load' calls.
* @throws Exception If failed.
*/
private void checkReadThrough(IgniteCache<Object, Object> cache,
IgniteRunnable c,
@Nullable TransactionConcurrency concurrency,
@Nullable TransactionIsolation isolation,
int expLoadCnt) throws Exception {
TestStore.loadCnt.set(0);
Transaction tx = isolation != null ? cache.unwrap(Ignite.class).transactions().txStart(concurrency, isolation)
: null;
try {
c.run();
if (tx != null)
tx.commit();
}
finally {
if (tx != null)
tx.close();
}
assertEquals(expLoadCnt, TestStore.loadCnt.get());
}
/**
* @param cacheMode Cache mode.
* @param atomicityMode Atomicity mode.
* @param backups Number of backups.
* @return Cache configuration.
*/
@SuppressWarnings("unchecked")
protected CacheConfiguration<Object, Object> cacheConfiguration(CacheMode cacheMode,
CacheAtomicityMode atomicityMode,
int backups) {
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setReadThrough(true);
ccfg.setWriteThrough(true);
ccfg.setCacheStoreFactory(new TestStoreFactory());
ccfg.setWriteSynchronizationMode(FULL_SYNC);
ccfg.setAtomicityMode(atomicityMode);
ccfg.setCacheMode(cacheMode);
ccfg.setAffinity(new RendezvousAffinityFunction(false, 32));
ccfg.setAtomicWriteOrderMode(PRIMARY);
if (cacheMode == PARTITIONED)
ccfg.setBackups(backups);
return ccfg;
}
/**
*
*/
public static class TestStoreFactory implements Factory<CacheStore> {
/** {@inheritDoc} */
@Override public CacheStore create() {
return new TestStore();
}
}
/**
*
*/
public static class TestStore extends CacheStoreAdapter<Object, Object> {
/** */
static AtomicInteger loadCnt = new AtomicInteger();
/** {@inheritDoc} */
@Override public void loadCache(IgniteBiInClosure<Object, Object> clo, Object... args) {
fail();
}
/** {@inheritDoc} */
@Override public Object load(Object key) {
loadCnt.incrementAndGet();
return storeMap.get(key);
}
/** {@inheritDoc} */
@Override public void write(Cache.Entry<?, ?> entry) {
storeMap.put(entry.getKey(), entry.getValue());
}
/** {@inheritDoc} */
@Override public void delete(Object key) {
storeMap.remove(key);
}
}
/**
*
*/
static class TestEntryProcessor implements EntryProcessor<Object, Object, Object> {
/** {@inheritDoc} */
@Override public Object process(MutableEntry<Object, Object> entry, Object... args) {
Object val = entry.getValue();
entry.setValue(entry.getKey());
return val;
}
}
}
| |
/*
* [The "BSD license"]
* Copyright (c) 2012 Terence Parr
* Copyright (c) 2012 Sam Harwell
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.v4.test.tool;
import org.antlr.v4.test.runtime.java.BaseTest;
import org.junit.Test;
/** */
@SuppressWarnings("unused")
public class TestActionTranslation extends BaseTest {
String attributeTemplate =
"attributeTemplate(members,init,inline,finally,inline2) ::= <<\n" +
"parser grammar A;\n"+
"@members {#members#<members>#end-members#}\n" +
"a[int x, int x1] returns [int y]\n" +
"@init {#init#<init>#end-init#}\n" +
" : id=ID ids+=ID lab=b[34] c d {\n" +
" #inline#<inline>#end-inline#\n" +
" }\n" +
" c\n" +
" ;\n" +
" finally {#finally#<finally>#end-finally#}\n" +
"b[int d] returns [int e]\n" +
" : {#inline2#<inline2>#end-inline2#}\n" +
" ;\n" +
"c returns [int x, int y] : ;\n" +
"d : ;\n" +
">>";
@Test public void testEscapedLessThanInAction() throws Exception {
String action = "i<3; '<xmltag>'";
String expected = "i<3; '<xmltag>'";
testActions(attributeTemplate, "members", action, expected);
testActions(attributeTemplate, "init", action, expected);
testActions(attributeTemplate, "inline", action, expected);
testActions(attributeTemplate, "finally", action, expected);
testActions(attributeTemplate, "inline2", action, expected);
}
@Test public void testEscaped$InAction() throws Exception {
String action = "int \\$n; \"\\$in string\\$\"";
String expected = "int $n; \"$in string$\"";
testActions(attributeTemplate, "members", action, expected);
testActions(attributeTemplate, "init", action, expected);
testActions(attributeTemplate, "inline", action, expected);
testActions(attributeTemplate, "finally", action, expected);
testActions(attributeTemplate, "inline2", action, expected);
}
/**
* Regression test for "in antlr v4 lexer, $ translation issue in action".
* https://github.com/antlr/antlr4/issues/176
*/
@Test public void testUnescaped$InAction() throws Exception {
String action = "\\$string$";
String expected = "$string$";
testActions(attributeTemplate, "members", action, expected);
testActions(attributeTemplate, "init", action, expected);
testActions(attributeTemplate, "inline", action, expected);
testActions(attributeTemplate, "finally", action, expected);
testActions(attributeTemplate, "inline2", action, expected);
}
@Test public void testEscapedSlash() throws Exception {
String action = "x = '\\n';"; // x = '\n'; -> x = '\n';
String expected = "x = '\\n';";
testActions(attributeTemplate, "members", action, expected);
testActions(attributeTemplate, "init", action, expected);
testActions(attributeTemplate, "inline", action, expected);
testActions(attributeTemplate, "finally", action, expected);
testActions(attributeTemplate, "inline2", action, expected);
}
@Test public void testComplicatedArgParsing() throws Exception {
String action = "x, (*a).foo(21,33), 3.2+1, '\\n', "+
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
String expected = "x, (*a).foo(21,33), 3.2+1, '\\n', "+
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
testActions(attributeTemplate, "members", action, expected);
testActions(attributeTemplate, "init", action, expected);
testActions(attributeTemplate, "inline", action, expected);
testActions(attributeTemplate, "finally", action, expected);
testActions(attributeTemplate, "inline2", action, expected);
}
@Test public void testComplicatedArgParsingWithTranslation() throws Exception {
String action = "x, $ID.text+\"3242\", (*$ID).foo(21,33), 3.2+1, '\\n', "+
"\"a,oo\\nick\", {bl, \"fdkj\"eck}";
String expected =
"x, (((AContext)_localctx).ID!=null?((AContext)_localctx).ID.getText():null)+\"3242\", " +
"(*((AContext)_localctx).ID).foo(21,33), 3.2+1, '\\n', \"a,oo\\nick\", {bl, \"fdkj\"eck}";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testArguments() throws Exception {
String action = "$x; $ctx.x";
String expected = "_localctx.x; _localctx.x";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testReturnValue() throws Exception {
String action = "$y; $ctx.y";
String expected = "_localctx.y; _localctx.y";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testReturnValueWithNumber() throws Exception {
String action = "$ctx.x1";
String expected = "_localctx.x1";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testReturnValuesCurrentRule() throws Exception {
String action = "$y; $ctx.y;";
String expected = "_localctx.y; _localctx.y;";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testReturnValues() throws Exception {
String action = "$lab.e; $b.e; $y.e = \"\";";
String expected = "((AContext)_localctx).lab.e; ((AContext)_localctx).b.e; _localctx.y.e = \"\";";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testReturnWithMultipleRuleRefs() throws Exception {
String action = "$c.x; $c.y;";
String expected = "((AContext)_localctx).c.x; ((AContext)_localctx).c.y;";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testTokenRefs() throws Exception {
String action = "$id; $ID; $id.text; $id.getText(); $id.line;";
String expected = "((AContext)_localctx).id; ((AContext)_localctx).ID; (((AContext)_localctx).id!=null?((AContext)_localctx).id.getText():null); ((AContext)_localctx).id.getText(); (((AContext)_localctx).id!=null?((AContext)_localctx).id.getLine():0);";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testRuleRefs() throws Exception {
String action = "$lab.start; $c.text;";
String expected = "(((AContext)_localctx).lab!=null?(((AContext)_localctx).lab.start):null); (((AContext)_localctx).c!=null?_input.getText(((AContext)_localctx).c.start,((AContext)_localctx).c.stop):null);";
testActions(attributeTemplate, "inline", action, expected);
}
@Test public void testRefToTextAttributeForCurrentRule() throws Exception {
String action = "$ctx.text; $text";
// this is the expected translation for all cases
String expected =
"_localctx.text; _input.getText(_localctx.start, _input.LT(-1))";
testActions(attributeTemplate, "init", action, expected);
testActions(attributeTemplate, "inline", action, expected);
testActions(attributeTemplate, "finally", action, expected);
}
@Test public void testDynamicRuleScopeRefInSubrule() throws Exception {
String action = "$a::n;";
}
@Test public void testRuleScopeFromAnotherRule() throws Exception {
String action = "$a::n;"; // must be qualified
}
@Test public void testFullyQualifiedRefToCurrentRuleParameter() throws Exception {
String action = "$a.i;";
}
@Test public void testFullyQualifiedRefToCurrentRuleRetVal() throws Exception {
String action = "$a.i;";
}
@Test public void testSetFullyQualifiedRefToCurrentRuleRetVal() throws Exception {
String action = "$a.i = 1;";
}
@Test public void testIsolatedRefToCurrentRule() throws Exception {
String action = "$a;";
}
@Test public void testIsolatedRefToRule() throws Exception {
String action = "$x;";
}
@Test public void testFullyQualifiedRefToLabelInCurrentRule() throws Exception {
String action = "$a.x;";
}
@Test public void testFullyQualifiedRefToListLabelInCurrentRule() throws Exception {
String action = "$a.x;"; // must be qualified
}
@Test public void testFullyQualifiedRefToTemplateAttributeInCurrentRule() throws Exception {
String action = "$a.st;"; // can be qualified
}
@Test public void testRuleRefWhenRuleHasScope() throws Exception {
String action = "$b.start;";
}
@Test public void testDynamicScopeRefOkEvenThoughRuleRefExists() throws Exception {
String action = "$b::n;";
}
@Test public void testRefToTemplateAttributeForCurrentRule() throws Exception {
String action = "$st=null;";
}
@Test public void testRefToStartAttributeForCurrentRule() throws Exception {
String action = "$start;";
}
@Test public void testTokenLabelFromMultipleAlts() throws Exception {
String action = "$ID.text;"; // must be qualified
}
@Test public void testRuleLabelFromMultipleAlts() throws Exception {
String action = "$b.text;"; // must be qualified
}
@Test public void testUnqualifiedRuleScopeAttribute() throws Exception {
String action = "$n;"; // must be qualified
}
@Test public void testRuleAndTokenLabelTypeMismatch() throws Exception {
}
@Test public void testListAndTokenLabelTypeMismatch() throws Exception {
}
@Test public void testListAndRuleLabelTypeMismatch() throws Exception {
}
@Test public void testArgReturnValueMismatch() throws Exception {
}
@Test public void testSimplePlusEqualLabel() throws Exception {
String action = "$ids.size();"; // must be qualified
}
@Test public void testPlusEqualStringLabel() throws Exception {
String action = "$ids.size();"; // must be qualified
}
@Test public void testPlusEqualSetLabel() throws Exception {
String action = "$ids.size();"; // must be qualified
}
@Test public void testPlusEqualWildcardLabel() throws Exception {
String action = "$ids.size();"; // must be qualified
}
@Test public void testImplicitTokenLabel() throws Exception {
String action = "$ID; $ID.text; $ID.getText()";
}
@Test public void testImplicitRuleLabel() throws Exception {
String action = "$r.start;";
}
@Test public void testReuseExistingLabelWithImplicitRuleLabel() throws Exception {
String action = "$r.start;";
}
@Test public void testReuseExistingListLabelWithImplicitRuleLabel() throws Exception {
String action = "$r.start;";
}
@Test public void testReuseExistingLabelWithImplicitTokenLabel() throws Exception {
String action = "$ID.text;";
}
@Test public void testReuseExistingListLabelWithImplicitTokenLabel() throws Exception {
String action = "$ID.text;";
}
@Test public void testRuleLabelWithoutOutputOption() throws Exception {
}
@Test public void testMissingArgs() throws Exception {
}
@Test public void testArgsWhenNoneDefined() throws Exception {
}
@Test public void testReturnInitValue() throws Exception {
}
@Test public void testMultipleReturnInitValue() throws Exception {
}
@Test public void testCStyleReturnInitValue() throws Exception {
}
@Test public void testArgsWithInitValues() throws Exception {
}
@Test public void testArgsOnToken() throws Exception {
}
@Test public void testArgsOnTokenInLexer() throws Exception {
}
@Test public void testLabelOnRuleRefInLexer() throws Exception {
String action = "$i.text";
}
@Test public void testRefToRuleRefInLexer() throws Exception {
String action = "$ID.text";
}
@Test public void testRefToRuleRefInLexerNoAttribute() throws Exception {
String action = "$ID";
}
@Test public void testCharLabelInLexer() throws Exception {
}
@Test public void testCharListLabelInLexer() throws Exception {
}
@Test public void testWildcardCharLabelInLexer() throws Exception {
}
@Test public void testWildcardCharListLabelInLexer() throws Exception {
}
@Test public void testMissingArgsInLexer() throws Exception {
}
@Test public void testLexerRulePropertyRefs() throws Exception {
String action = "$text $type $line $pos $channel $index $start $stop";
}
@Test public void testLexerLabelRefs() throws Exception {
String action = "$a $b.text $c $d.text";
}
@Test public void testSettingLexerRulePropertyRefs() throws Exception {
String action = "$text $type=1 $line=1 $pos=1 $channel=1 $index";
}
@Test public void testArgsOnTokenInLexerRuleOfCombined() throws Exception {
}
@Test public void testMissingArgsOnTokenInLexerRuleOfCombined() throws Exception {
}
@Test public void testTokenLabelTreeProperty() throws Exception {
String action = "$id.tree;";
}
@Test public void testTokenRefTreeProperty() throws Exception {
String action = "$ID.tree;";
}
@Test public void testAmbiguousTokenRef() throws Exception {
String action = "$ID;";
}
@Test public void testAmbiguousTokenRefWithProp() throws Exception {
String action = "$ID.text;";
}
@Test public void testRuleRefWithDynamicScope() throws Exception {
String action = "$field::x = $field.st;";
}
@Test public void testAssignToOwnRulenameAttr() throws Exception {
String action = "$rule.tree = null;";
}
@Test public void testAssignToOwnParamAttr() throws Exception {
String action = "$rule.i = 42; $i = 23;";
}
@Test public void testIllegalAssignToOwnRulenameAttr() throws Exception {
String action = "$rule.stop = 0;";
}
@Test public void testIllegalAssignToLocalAttr() throws Exception {
String action = "$tree = null; $st = null; $start = 0; $stop = 0; $text = 0;";
}
@Test public void testIllegalAssignRuleRefAttr() throws Exception {
String action = "$other.tree = null;";
}
@Test public void testIllegalAssignTokenRefAttr() throws Exception {
String action = "$ID.text = \"test\";";
}
@Test public void testAssignToTreeNodeAttribute() throws Exception {
String action = "$tree.scope = localScope;";
}
@Test public void testDoNotTranslateAttributeCompare() throws Exception {
String action = "$a.line == $b.line";
}
@Test public void testDoNotTranslateScopeAttributeCompare() throws Exception {
String action = "if ($rule::foo == \"foo\" || 1) { System.out.println(\"ouch\"); }";
}
@Test public void testTreeRuleStopAttributeIsInvalid() throws Exception {
String action = "$r.x; $r.start; $r.stop";
}
@Test public void testRefToTextAttributeForCurrentTreeRule() throws Exception {
String action = "$text";
}
@Test public void testTypeOfGuardedAttributeRefIsCorrect() throws Exception {
String action = "int x = $b::n;";
}
@Test public void testBracketArgParsing() throws Exception {
}
@Test public void testStringArgParsing() throws Exception {
String action = "34, '{', \"it's<\", '\"', \"\\\"\", 19";
}
@Test public void testComplicatedSingleArgParsing() throws Exception {
String action = "(*a).foo(21,33,\",\")";
}
@Test public void testArgWithLT() throws Exception {
String action = "34<50";
}
@Test public void testGenericsAsArgumentDefinition() throws Exception {
String action = "$foo.get(\"ick\");";
}
@Test public void testGenericsAsArgumentDefinition2() throws Exception {
String action = "$foo.get(\"ick\"); x=3;";
}
@Test public void testGenericsAsReturnValue() throws Exception {
}
// TODO: nonlocal $rule::x
}
| |
/*
* Copyright 2015 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.data2.dataset2.lib.cube;
import co.cask.cdap.api.dataset.DatasetProperties;
import co.cask.cdap.api.dataset.lib.cube.AggregationFunction;
import co.cask.cdap.api.dataset.lib.cube.Cube;
import co.cask.cdap.api.dataset.lib.cube.CubeDeleteQuery;
import co.cask.cdap.api.dataset.lib.cube.CubeExploreQuery;
import co.cask.cdap.api.dataset.lib.cube.CubeFact;
import co.cask.cdap.api.dataset.lib.cube.CubeQuery;
import co.cask.cdap.api.dataset.lib.cube.DimensionValue;
import co.cask.cdap.api.dataset.lib.cube.TimeSeries;
import co.cask.cdap.data2.dataset2.DatasetFrameworkTestUtil;
import co.cask.cdap.proto.id.DatasetId;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.tephra.Transaction;
import org.apache.tephra.TransactionAware;
import org.apache.tephra.TransactionExecutor;
import org.apache.tephra.TransactionManager;
import org.apache.tephra.TransactionSystemClient;
import org.apache.tephra.inmemory.InMemoryTxSystemClient;
import org.junit.ClassRule;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
/**
*
*/
public class CubeDatasetTest extends AbstractCubeTest {
@ClassRule
public static DatasetFrameworkTestUtil dsFrameworkUtil = new DatasetFrameworkTestUtil();
@Override
protected Cube getCube(String name, int[] resolutions,
Map<String, ? extends Aggregation> aggregations) throws Exception {
return new CubeTxnlWrapper(getCubeInternal(name, resolutions, aggregations));
}
private Cube getCubeInternal(String name, int[] resolutions,
Map<String, ? extends Aggregation> aggregations) throws Exception {
DatasetProperties props = configureProperties(resolutions, aggregations);
DatasetId id = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset(name);
if (dsFrameworkUtil.getInstance(id) == null) {
dsFrameworkUtil.createInstance(Cube.class.getName(), id, props);
}
return dsFrameworkUtil.getInstance(id);
}
@Test
public void testTxRetryOnFailure() throws Exception {
// This test ensures that there's no non-transactional cache used in cube dataset. For that, it
// 1) simulates transaction conflict for the first write to cube
// 2) attempts to write again, writes successfully
// 3) uses second cube instance to read the result
//
// In case there's a non-transactional cache used in cube, it would fill entity mappings in the first tx, and only
// use them to write data. Hence, when reading - there will be no mapping in entity table to decode, as first tx
// that wrote it is not visible (was aborted on conflict).
Aggregation agg1 = new DefaultAggregation(ImmutableList.of("dim1", "dim2", "dim3"));
int resolution = 1;
Cube cube1 = getCubeInternal("concurrCube", new int[]{resolution}, ImmutableMap.of("agg1", agg1));
Cube cube2 = getCubeInternal("concurrCube", new int[]{resolution}, ImmutableMap.of("agg1", agg1));
Configuration txConf = HBaseConfiguration.create();
TransactionManager txManager = new TransactionManager(txConf);
txManager.startAndWait();
try {
TransactionSystemClient txClient = new InMemoryTxSystemClient(txManager);
// 1) write and abort after commit to simlate conflict
Transaction tx = txClient.startShort();
((TransactionAware) cube1).startTx(tx);
writeInc(cube1, "metric1", 1, 1, "1", "1", "1");
((TransactionAware) cube1).commitTx();
txClient.abort(tx);
((TransactionAware) cube1).rollbackTx();
// 2) write successfully
tx = txClient.startShort();
((TransactionAware) cube1).startTx(tx);
writeInc(cube1, "metric1", 1, 1, "1", "1", "1");
// let's pretend we had conflict and rollback it
((TransactionAware) cube1).commitTx();
txClient.commit(tx);
((TransactionAware) cube1).postTxCommit();
// 3) read using different cube instance
tx = txClient.startShort();
((TransactionAware) cube2).startTx(tx);
verifyCountQuery(cube2, 0, 2, resolution, "metric1", AggregationFunction.SUM,
new HashMap<String, String>(), new ArrayList<String>(),
ImmutableList.of(
new TimeSeries("metric1", new HashMap<String, String>(), timeValues(1, 1))));
// let's pretend we had conflict and rollback it
((TransactionAware) cube2).commitTx();
txClient.commit(tx);
((TransactionAware) cube2).postTxCommit();
} finally {
txManager.stopAndWait();
}
}
private DatasetProperties configureProperties(int[] resolutions, Map<String, ? extends Aggregation> aggregations) {
DatasetProperties.Builder builder = DatasetProperties.builder();
// add resolution property
StringBuilder resolutionPropValue = new StringBuilder();
for (int resolution : resolutions) {
resolutionPropValue.append(",").append(resolution);
}
// .substring(1) for removing first comma
builder.add(Cube.PROPERTY_RESOLUTIONS, resolutionPropValue.substring(1));
// add aggregation props
for (Map.Entry<String, ? extends Aggregation> entry : aggregations.entrySet()) {
// NOTE: at this moment we support only DefaultAggregation, so all other tests in AbstractCubeTest must be skipped
DefaultAggregation defAgg = (DefaultAggregation) entry.getValue();
String aggPropertyPrefix = CubeDatasetDefinition.PROPERTY_AGGREGATION_PREFIX + (entry.getKey());
if (!defAgg.getDimensionNames().isEmpty()) {
builder.add(aggPropertyPrefix + ".dimensions", Joiner.on(",").join(defAgg.getDimensionNames()));
}
if (!defAgg.getRequiredDimensions().isEmpty()) {
builder.add(aggPropertyPrefix + ".requiredDimensions", Joiner.on(",").join(defAgg.getRequiredDimensions()));
}
}
return builder.build();
}
private static final class CubeTxnlWrapper implements Cube {
private final Cube delegate;
private final TransactionExecutor txnl;
private CubeTxnlWrapper(Cube delegate) {
this.delegate = delegate;
this.txnl = dsFrameworkUtil.newTransactionExecutor((TransactionAware) delegate);
}
@Override
public void add(final CubeFact fact) {
txnl.executeUnchecked(new TransactionExecutor.Subroutine() {
@Override
public void apply() {
delegate.add(fact);
}
});
}
@Override
public void add(final Collection<? extends CubeFact> facts) {
txnl.executeUnchecked(new TransactionExecutor.Subroutine() {
@Override
public void apply() {
delegate.add(facts);
}
});
}
@Override
public Collection<TimeSeries> query(final CubeQuery query) {
return txnl.executeUnchecked(new Callable<Collection<TimeSeries>>() {
@Override
public Collection<TimeSeries> call() {
return delegate.query(query);
}
});
}
@Override
public void delete(final CubeDeleteQuery query) {
txnl.executeUnchecked(new TransactionExecutor.Subroutine() {
@Override
public void apply() {
delegate.delete(query);
}
});
}
@Override
public Collection<DimensionValue> findDimensionValues(final CubeExploreQuery query) {
return txnl.executeUnchecked(new Callable<Collection<DimensionValue>>() {
@Override
public Collection<DimensionValue> call() {
return delegate.findDimensionValues(query);
}
});
}
@Override
public Collection<String> findMeasureNames(final CubeExploreQuery query) {
return txnl.executeUnchecked(new Callable<Collection<String>>() {
@Override
public Collection<String> call() {
return delegate.findMeasureNames(query);
}
});
}
@Override
public void write(Object ignored, CubeFact cubeFact) {
add(cubeFact);
}
@Override
public void close() throws IOException {
delegate.close();
}
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.server;
import android.app.ActivityManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.net.LinkCapabilities;
import android.net.LinkProperties;
import android.os.Binder;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.RemoteException;
import android.os.UserHandle;
import android.telephony.CellLocation;
import android.telephony.PhoneStateListener;
import android.telephony.ServiceState;
import android.telephony.SignalStrength;
import android.telephony.CellInfo;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.util.Slog;
import java.util.ArrayList;
import java.util.List;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import com.android.internal.app.IBatteryStats;
import com.android.internal.telephony.ITelephonyRegistry;
import com.android.internal.telephony.IPhoneStateListener;
import com.android.internal.telephony.DefaultPhoneNotifier;
import com.android.internal.telephony.PhoneConstants;
import com.android.internal.telephony.ServiceStateTracker;
import com.android.internal.telephony.TelephonyIntents;
import com.android.server.am.BatteryStatsService;
/**
* Since phone process can be restarted, this class provides a centralized place
* that applications can register and be called back from.
*/
class TelephonyRegistry extends ITelephonyRegistry.Stub {
private static final String TAG = "TelephonyRegistry";
private static final boolean DBG = false;
private static final boolean DBG_LOC = false;
private static class Record {
String pkgForDebug;
IBinder binder;
IPhoneStateListener callback;
int callerUid;
int events;
@Override
public String toString() {
return "{pkgForDebug=" + pkgForDebug + " callerUid=" + callerUid +
" events=" + Integer.toHexString(events) + "}";
}
}
private final Context mContext;
// access should be inside synchronized (mRecords) for these two fields
private final ArrayList<IBinder> mRemoveList = new ArrayList<IBinder>();
private final ArrayList<Record> mRecords = new ArrayList<Record>();
private final IBatteryStats mBatteryStats;
private int mCallState = TelephonyManager.CALL_STATE_IDLE;
private String mCallIncomingNumber = "";
private ServiceState mServiceState = new ServiceState();
private SignalStrength mSignalStrength = new SignalStrength();
private boolean mMessageWaiting = false;
private boolean mCallForwarding = false;
private int mDataActivity = TelephonyManager.DATA_ACTIVITY_NONE;
private int mDataConnectionState = TelephonyManager.DATA_UNKNOWN;
private boolean mDataConnectionPossible = false;
private String mDataConnectionReason = "";
private String mDataConnectionApn = "";
private ArrayList<String> mConnectedApns;
private LinkProperties mDataConnectionLinkProperties;
private LinkCapabilities mDataConnectionLinkCapabilities;
private Bundle mCellLocation = new Bundle();
private int mDataConnectionNetworkType;
private int mOtaspMode = ServiceStateTracker.OTASP_UNKNOWN;
private List<CellInfo> mCellInfo = null;
static final int PHONE_STATE_PERMISSION_MASK =
PhoneStateListener.LISTEN_CALL_FORWARDING_INDICATOR |
PhoneStateListener.LISTEN_CALL_STATE |
PhoneStateListener.LISTEN_DATA_ACTIVITY |
PhoneStateListener.LISTEN_DATA_CONNECTION_STATE |
PhoneStateListener.LISTEN_MESSAGE_WAITING_INDICATOR;
private static final int MSG_USER_SWITCHED = 1;
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_USER_SWITCHED: {
if (DBG) Slog.d(TAG, "MSG_USER_SWITCHED userId=" + msg.arg1);
TelephonyRegistry.this.notifyCellLocation(mCellLocation);
break;
}
}
}
};
private final BroadcastReceiver mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if (Intent.ACTION_USER_SWITCHED.equals(action)) {
mHandler.sendMessage(mHandler.obtainMessage(MSG_USER_SWITCHED,
intent.getIntExtra(Intent.EXTRA_USER_HANDLE, 0), 0));
}
}
};
// we keep a copy of all of the state so we can send it out when folks
// register for it
//
// In these calls we call with the lock held. This is safe becasuse remote
// calls go through a oneway interface and local calls going through a
// handler before they get to app code.
TelephonyRegistry(Context context) {
CellLocation location = CellLocation.getEmpty();
// Note that location can be null for non-phone builds like
// like the generic one.
if (location != null) {
location.fillInNotifierBundle(mCellLocation);
}
mContext = context;
mBatteryStats = BatteryStatsService.getService();
mConnectedApns = new ArrayList<String>();
}
public void systemRunning() {
// Watch for interesting updates
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_USER_SWITCHED);
filter.addAction(Intent.ACTION_USER_REMOVED);
mContext.registerReceiver(mBroadcastReceiver, filter);
}
@Override
public void listen(String pkgForDebug, IPhoneStateListener callback, int events,
boolean notifyNow) {
int callerUid = UserHandle.getCallingUserId();
int myUid = UserHandle.myUserId();
if (DBG) {
Slog.d(TAG, "listen: E pkg=" + pkgForDebug + " events=0x" + Integer.toHexString(events)
+ " myUid=" + myUid
+ " callerUid=" + callerUid);
}
if (events != 0) {
/* Checks permission and throws Security exception */
checkListenerPermission(events);
synchronized (mRecords) {
// register
Record r = null;
find_and_add: {
IBinder b = callback.asBinder();
final int N = mRecords.size();
for (int i = 0; i < N; i++) {
r = mRecords.get(i);
if (b == r.binder) {
break find_and_add;
}
}
r = new Record();
r.binder = b;
r.callback = callback;
r.pkgForDebug = pkgForDebug;
r.callerUid = callerUid;
mRecords.add(r);
if (DBG) Slog.i(TAG, "listen: add new record=" + r);
}
int send = events & (events ^ r.events);
r.events = events;
if (notifyNow) {
if ((events & PhoneStateListener.LISTEN_SERVICE_STATE) != 0) {
try {
r.callback.onServiceStateChanged(new ServiceState(mServiceState));
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_SIGNAL_STRENGTH) != 0) {
try {
int gsmSignalStrength = mSignalStrength.getGsmSignalStrength();
r.callback.onSignalStrengthChanged((gsmSignalStrength == 99 ? -1
: gsmSignalStrength));
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_MESSAGE_WAITING_INDICATOR) != 0) {
try {
r.callback.onMessageWaitingIndicatorChanged(mMessageWaiting);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_CALL_FORWARDING_INDICATOR) != 0) {
try {
r.callback.onCallForwardingIndicatorChanged(mCallForwarding);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if (validateEventsAndUserLocked(r, PhoneStateListener.LISTEN_CELL_LOCATION)) {
try {
if (DBG_LOC) Slog.d(TAG, "listen: mCellLocation=" + mCellLocation);
r.callback.onCellLocationChanged(new Bundle(mCellLocation));
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_CALL_STATE) != 0) {
try {
r.callback.onCallStateChanged(mCallState, mCallIncomingNumber);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_DATA_CONNECTION_STATE) != 0) {
try {
r.callback.onDataConnectionStateChanged(mDataConnectionState,
mDataConnectionNetworkType);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_DATA_ACTIVITY) != 0) {
try {
r.callback.onDataActivity(mDataActivity);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_SIGNAL_STRENGTHS) != 0) {
try {
r.callback.onSignalStrengthsChanged(mSignalStrength);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if ((events & PhoneStateListener.LISTEN_OTASP_CHANGED) != 0) {
try {
r.callback.onOtaspChanged(mOtaspMode);
} catch (RemoteException ex) {
remove(r.binder);
}
}
if (validateEventsAndUserLocked(r, PhoneStateListener.LISTEN_CELL_INFO)) {
try {
if (DBG_LOC) Slog.d(TAG, "listen: mCellInfo=" + mCellInfo);
r.callback.onCellInfoChanged(mCellInfo);
} catch (RemoteException ex) {
remove(r.binder);
}
}
}
}
} else {
remove(callback.asBinder());
}
}
private void remove(IBinder binder) {
synchronized (mRecords) {
final int recordCount = mRecords.size();
for (int i = 0; i < recordCount; i++) {
if (mRecords.get(i).binder == binder) {
mRecords.remove(i);
return;
}
}
}
}
public void notifyCallState(int state, String incomingNumber) {
if (!checkNotifyPermission("notifyCallState()")) {
return;
}
synchronized (mRecords) {
mCallState = state;
mCallIncomingNumber = incomingNumber;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_CALL_STATE) != 0) {
try {
r.callback.onCallStateChanged(state, incomingNumber);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
broadcastCallStateChanged(state, incomingNumber);
}
public void notifyServiceState(ServiceState state) {
if (!checkNotifyPermission("notifyServiceState()")){
return;
}
synchronized (mRecords) {
mServiceState = state;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_SERVICE_STATE) != 0) {
try {
r.callback.onServiceStateChanged(new ServiceState(state));
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
broadcastServiceStateChanged(state);
}
public void notifySignalStrength(SignalStrength signalStrength) {
if (!checkNotifyPermission("notifySignalStrength()")) {
return;
}
synchronized (mRecords) {
mSignalStrength = signalStrength;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_SIGNAL_STRENGTHS) != 0) {
try {
r.callback.onSignalStrengthsChanged(new SignalStrength(signalStrength));
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
if ((r.events & PhoneStateListener.LISTEN_SIGNAL_STRENGTH) != 0) {
try {
int gsmSignalStrength = signalStrength.getGsmSignalStrength();
r.callback.onSignalStrengthChanged((gsmSignalStrength == 99 ? -1
: gsmSignalStrength));
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
broadcastSignalStrengthChanged(signalStrength);
}
public void notifyCellInfo(List<CellInfo> cellInfo) {
if (!checkNotifyPermission("notifyCellInfo()")) {
return;
}
synchronized (mRecords) {
mCellInfo = cellInfo;
for (Record r : mRecords) {
if (validateEventsAndUserLocked(r, PhoneStateListener.LISTEN_CELL_INFO)) {
try {
if (DBG_LOC) {
Slog.d(TAG, "notifyCellInfo: mCellInfo=" + mCellInfo + " r=" + r);
}
r.callback.onCellInfoChanged(cellInfo);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
public void notifyMessageWaitingChanged(boolean mwi) {
if (!checkNotifyPermission("notifyMessageWaitingChanged()")) {
return;
}
synchronized (mRecords) {
mMessageWaiting = mwi;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_MESSAGE_WAITING_INDICATOR) != 0) {
try {
r.callback.onMessageWaitingIndicatorChanged(mwi);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
public void notifyCallForwardingChanged(boolean cfi) {
if (!checkNotifyPermission("notifyCallForwardingChanged()")) {
return;
}
synchronized (mRecords) {
mCallForwarding = cfi;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_CALL_FORWARDING_INDICATOR) != 0) {
try {
r.callback.onCallForwardingIndicatorChanged(cfi);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
public void notifyDataActivity(int state) {
if (!checkNotifyPermission("notifyDataActivity()" )) {
return;
}
synchronized (mRecords) {
mDataActivity = state;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_DATA_ACTIVITY) != 0) {
try {
r.callback.onDataActivity(state);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
public void notifyDataConnection(int state, boolean isDataConnectivityPossible,
String reason, String apn, String apnType, LinkProperties linkProperties,
LinkCapabilities linkCapabilities, int networkType, boolean roaming) {
if (!checkNotifyPermission("notifyDataConnection()" )) {
return;
}
if (DBG) {
Slog.i(TAG, "notifyDataConnection: state=" + state + " isDataConnectivityPossible="
+ isDataConnectivityPossible + " reason='" + reason
+ "' apn='" + apn + "' apnType=" + apnType + " networkType=" + networkType
+ " mRecords.size()=" + mRecords.size() + " mRecords=" + mRecords);
}
synchronized (mRecords) {
boolean modified = false;
if (state == TelephonyManager.DATA_CONNECTED) {
if (!mConnectedApns.contains(apnType)) {
mConnectedApns.add(apnType);
if (mDataConnectionState != state) {
mDataConnectionState = state;
modified = true;
}
}
} else {
if (mConnectedApns.remove(apnType)) {
if (mConnectedApns.isEmpty()) {
mDataConnectionState = state;
modified = true;
} else {
// leave mDataConnectionState as is and
// send out the new status for the APN in question.
}
}
}
mDataConnectionPossible = isDataConnectivityPossible;
mDataConnectionReason = reason;
mDataConnectionLinkProperties = linkProperties;
mDataConnectionLinkCapabilities = linkCapabilities;
if (mDataConnectionNetworkType != networkType) {
mDataConnectionNetworkType = networkType;
// need to tell registered listeners about the new network type
modified = true;
}
if (modified) {
if (DBG) {
Slog.d(TAG, "onDataConnectionStateChanged(" + mDataConnectionState
+ ", " + mDataConnectionNetworkType + ")");
}
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_DATA_CONNECTION_STATE) != 0) {
try {
r.callback.onDataConnectionStateChanged(mDataConnectionState,
mDataConnectionNetworkType);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
broadcastDataConnectionStateChanged(state, isDataConnectivityPossible, reason, apn,
apnType, linkProperties, linkCapabilities, roaming);
}
public void notifyDataConnectionFailed(String reason, String apnType) {
if (!checkNotifyPermission("notifyDataConnectionFailed()")) {
return;
}
/*
* This is commented out because there is no onDataConnectionFailed callback
* in PhoneStateListener. There should be.
synchronized (mRecords) {
mDataConnectionFailedReason = reason;
final int N = mRecords.size();
for (int i=N-1; i>=0; i--) {
Record r = mRecords.get(i);
if ((r.events & PhoneStateListener.LISTEN_DATA_CONNECTION_FAILED) != 0) {
// XXX
}
}
}
*/
broadcastDataConnectionFailed(reason, apnType);
}
public void notifyCellLocation(Bundle cellLocation) {
if (!checkNotifyPermission("notifyCellLocation()")) {
return;
}
synchronized (mRecords) {
mCellLocation = cellLocation;
for (Record r : mRecords) {
if (validateEventsAndUserLocked(r, PhoneStateListener.LISTEN_CELL_LOCATION)) {
try {
if (DBG_LOC) {
Slog.d(TAG, "notifyCellLocation: mCellLocation=" + mCellLocation
+ " r=" + r);
}
r.callback.onCellLocationChanged(new Bundle(cellLocation));
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
public void notifyOtaspChanged(int otaspMode) {
if (!checkNotifyPermission("notifyOtaspChanged()" )) {
return;
}
synchronized (mRecords) {
mOtaspMode = otaspMode;
for (Record r : mRecords) {
if ((r.events & PhoneStateListener.LISTEN_OTASP_CHANGED) != 0) {
try {
r.callback.onOtaspChanged(otaspMode);
} catch (RemoteException ex) {
mRemoveList.add(r.binder);
}
}
}
handleRemoveListLocked();
}
}
@Override
public void dump(FileDescriptor fd, PrintWriter pw, String[] args) {
if (mContext.checkCallingOrSelfPermission(android.Manifest.permission.DUMP)
!= PackageManager.PERMISSION_GRANTED) {
pw.println("Permission Denial: can't dump telephony.registry from from pid="
+ Binder.getCallingPid() + ", uid=" + Binder.getCallingUid());
return;
}
synchronized (mRecords) {
final int recordCount = mRecords.size();
pw.println("last known state:");
pw.println(" mCallState=" + mCallState);
pw.println(" mCallIncomingNumber=" + mCallIncomingNumber);
pw.println(" mServiceState=" + mServiceState);
pw.println(" mSignalStrength=" + mSignalStrength);
pw.println(" mMessageWaiting=" + mMessageWaiting);
pw.println(" mCallForwarding=" + mCallForwarding);
pw.println(" mDataActivity=" + mDataActivity);
pw.println(" mDataConnectionState=" + mDataConnectionState);
pw.println(" mDataConnectionPossible=" + mDataConnectionPossible);
pw.println(" mDataConnectionReason=" + mDataConnectionReason);
pw.println(" mDataConnectionApn=" + mDataConnectionApn);
pw.println(" mDataConnectionLinkProperties=" + mDataConnectionLinkProperties);
pw.println(" mDataConnectionLinkCapabilities=" + mDataConnectionLinkCapabilities);
pw.println(" mCellLocation=" + mCellLocation);
pw.println(" mCellInfo=" + mCellInfo);
pw.println("registrations: count=" + recordCount);
for (Record r : mRecords) {
pw.println(" " + r.pkgForDebug + " 0x" + Integer.toHexString(r.events));
}
}
}
//
// the legacy intent broadcasting
//
private void broadcastServiceStateChanged(ServiceState state) {
long ident = Binder.clearCallingIdentity();
try {
mBatteryStats.notePhoneState(state.getState());
} catch (RemoteException re) {
// Can't do much
} finally {
Binder.restoreCallingIdentity(ident);
}
Intent intent = new Intent(TelephonyIntents.ACTION_SERVICE_STATE_CHANGED);
Bundle data = new Bundle();
state.fillInNotifierBundle(data);
intent.putExtras(data);
mContext.sendStickyBroadcastAsUser(intent, UserHandle.ALL);
}
private void broadcastSignalStrengthChanged(SignalStrength signalStrength) {
long ident = Binder.clearCallingIdentity();
try {
mBatteryStats.notePhoneSignalStrength(signalStrength);
} catch (RemoteException e) {
/* The remote entity disappeared, we can safely ignore the exception. */
} finally {
Binder.restoreCallingIdentity(ident);
}
Intent intent = new Intent(TelephonyIntents.ACTION_SIGNAL_STRENGTH_CHANGED);
intent.addFlags(Intent.FLAG_RECEIVER_REPLACE_PENDING);
Bundle data = new Bundle();
signalStrength.fillInNotifierBundle(data);
intent.putExtras(data);
mContext.sendStickyBroadcastAsUser(intent, UserHandle.ALL);
}
private void broadcastCallStateChanged(int state, String incomingNumber) {
long ident = Binder.clearCallingIdentity();
try {
if (state == TelephonyManager.CALL_STATE_IDLE) {
mBatteryStats.notePhoneOff();
} else {
mBatteryStats.notePhoneOn();
}
} catch (RemoteException e) {
/* The remote entity disappeared, we can safely ignore the exception. */
} finally {
Binder.restoreCallingIdentity(ident);
}
Intent intent = new Intent(TelephonyManager.ACTION_PHONE_STATE_CHANGED);
intent.putExtra(PhoneConstants.STATE_KEY,
DefaultPhoneNotifier.convertCallState(state).toString());
if (!TextUtils.isEmpty(incomingNumber)) {
intent.putExtra(TelephonyManager.EXTRA_INCOMING_NUMBER, incomingNumber);
}
mContext.sendBroadcastAsUser(intent, UserHandle.ALL,
android.Manifest.permission.READ_PHONE_STATE);
}
private void broadcastDataConnectionStateChanged(int state,
boolean isDataConnectivityPossible,
String reason, String apn, String apnType, LinkProperties linkProperties,
LinkCapabilities linkCapabilities, boolean roaming) {
// Note: not reporting to the battery stats service here, because the
// status bar takes care of that after taking into account all of the
// required info.
Intent intent = new Intent(TelephonyIntents.ACTION_ANY_DATA_CONNECTION_STATE_CHANGED);
intent.putExtra(PhoneConstants.STATE_KEY,
DefaultPhoneNotifier.convertDataState(state).toString());
if (!isDataConnectivityPossible) {
intent.putExtra(PhoneConstants.NETWORK_UNAVAILABLE_KEY, true);
}
if (reason != null) {
intent.putExtra(PhoneConstants.STATE_CHANGE_REASON_KEY, reason);
}
if (linkProperties != null) {
intent.putExtra(PhoneConstants.DATA_LINK_PROPERTIES_KEY, linkProperties);
String iface = linkProperties.getInterfaceName();
if (iface != null) {
intent.putExtra(PhoneConstants.DATA_IFACE_NAME_KEY, iface);
}
}
if (linkCapabilities != null) {
intent.putExtra(PhoneConstants.DATA_LINK_CAPABILITIES_KEY, linkCapabilities);
}
if (roaming) intent.putExtra(PhoneConstants.DATA_NETWORK_ROAMING_KEY, true);
intent.putExtra(PhoneConstants.DATA_APN_KEY, apn);
intent.putExtra(PhoneConstants.DATA_APN_TYPE_KEY, apnType);
mContext.sendStickyBroadcastAsUser(intent, UserHandle.ALL);
}
private void broadcastDataConnectionFailed(String reason, String apnType) {
Intent intent = new Intent(TelephonyIntents.ACTION_DATA_CONNECTION_FAILED);
intent.putExtra(PhoneConstants.FAILURE_REASON_KEY, reason);
intent.putExtra(PhoneConstants.DATA_APN_TYPE_KEY, apnType);
mContext.sendStickyBroadcastAsUser(intent, UserHandle.ALL);
}
private boolean checkNotifyPermission(String method) {
if (mContext.checkCallingOrSelfPermission(android.Manifest.permission.MODIFY_PHONE_STATE)
== PackageManager.PERMISSION_GRANTED) {
return true;
}
String msg = "Modify Phone State Permission Denial: " + method + " from pid="
+ Binder.getCallingPid() + ", uid=" + Binder.getCallingUid();
if (DBG) Slog.w(TAG, msg);
return false;
}
private void checkListenerPermission(int events) {
if ((events & PhoneStateListener.LISTEN_CELL_LOCATION) != 0) {
mContext.enforceCallingOrSelfPermission(
android.Manifest.permission.ACCESS_COARSE_LOCATION, null);
}
if ((events & PhoneStateListener.LISTEN_CELL_INFO) != 0) {
mContext.enforceCallingOrSelfPermission(
android.Manifest.permission.ACCESS_COARSE_LOCATION, null);
}
if ((events & PHONE_STATE_PERMISSION_MASK) != 0) {
mContext.enforceCallingOrSelfPermission(
android.Manifest.permission.READ_PHONE_STATE, null);
}
}
private void handleRemoveListLocked() {
if (mRemoveList.size() > 0) {
for (IBinder b: mRemoveList) {
remove(b);
}
mRemoveList.clear();
}
}
private boolean validateEventsAndUserLocked(Record r, int events) {
int foregroundUser;
long callingIdentity = Binder.clearCallingIdentity();
boolean valid = false;
try {
foregroundUser = ActivityManager.getCurrentUser();
valid = r.callerUid == foregroundUser && (r.events & events) != 0;
if (DBG | DBG_LOC) {
Slog.d(TAG, "validateEventsAndUserLocked: valid=" + valid
+ " r.callerUid=" + r.callerUid + " foregroundUser=" + foregroundUser
+ " r.events=" + r.events + " events=" + events);
}
} finally {
Binder.restoreCallingIdentity(callingIdentity);
}
return valid;
}
}
| |
/**
* All files in the distribution of BLOAT (Bytecode Level Optimization and
* Analysis tool for Java(tm)) are Copyright 1997-2001 by the Purdue
* Research Foundation of Purdue University. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package EDU.purdue.cs.bloat.file;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import EDU.purdue.cs.bloat.reflect.ClassInfo;
import EDU.purdue.cs.bloat.reflect.Constant;
import EDU.purdue.cs.bloat.reflect.FieldInfo;
import EDU.purdue.cs.bloat.reflect.Modifiers;
/**
* Field models a field (member variable) in a class. The Field class grants
* access to information such as the field's modifiers, its name and type
* descriptor (represented as indices into the constant pool), and any
* attributes of the field. Static fields have a ConstantValue attribute.
*
* @see ConstantValue
*
* @author Nate Nystrom (<a
* href="mailto:nystrom@cs.purdue.edu">nystrom@cs.purdue.edu</a>)
*/
public class Field implements FieldInfo {
private ClassInfo classInfo;
private int modifiers;
private int name;
private int type;
private Attribute[] attrs;
private ConstantValue constantValue;
/**
* Constructor for creating a new field from scratch
*/
Field(final ClassInfo classInfo, final int modifiers, final int typeIndex,
final int nameIndex) {
this.classInfo = classInfo;
this.modifiers = modifiers;
this.name = nameIndex;
this.type = typeIndex;
this.attrs = new Attribute[0];
this.constantValue = null;
}
/**
* Constructor for creating a new field that has a constant value from
* scratch
*/
Field(final ClassInfo classInfo, final int modifiers, final int typeIndex,
final int nameIndex, final int cvNameIndex,
final int constantValueIndex) {
this.classInfo = classInfo;
this.modifiers = modifiers;
this.name = nameIndex;
this.type = typeIndex;
this.constantValue = new ConstantValue(cvNameIndex, 2,
constantValueIndex);
// The constant value is an attribute
this.attrs = new Attribute[1];
this.attrs[0] = constantValue;
}
/**
* Constructor. Read a field from a class file.
*
* @param in
* The data stream of the class file.
* @param classInfo
* The class file containing the field.
* @exception IOException
* If an error occurs while reading.
*/
public Field(final DataInputStream in, final ClassInfo classInfo)
throws IOException {
this.classInfo = classInfo;
modifiers = in.readUnsignedShort();
name = in.readUnsignedShort();
type = in.readUnsignedShort();
final int numAttributes = in.readUnsignedShort();
attrs = new Attribute[numAttributes];
for (int i = 0; i < numAttributes; i++) {
final int nameIndex = in.readUnsignedShort();
final int length = in.readInt();
final Constant name = classInfo.constants()[nameIndex];
if (name != null) {
if ("ConstantValue".equals(name.value())) {
constantValue = new ConstantValue(in, nameIndex, length);
attrs[i] = constantValue;
}
}
if (attrs[i] == null) {
attrs[i] = new GenericAttribute(in, nameIndex, length);
}
}
}
/**
* Get the class which declared the field.
*
* @return The ClassInfo of the class which declared the field.
*/
public ClassInfo declaringClass() {
return classInfo;
}
/**
* Set the index into the constant pool of the name of the field.
*
* @param name
* The name of the field.
*/
public void setNameIndex(final int name) {
this.name = name;
}
/**
* Set the index into the constant pool of the type of the field.
*
* @param type
* The type of the field.
*/
public void setTypeIndex(final int type) {
this.type = type;
}
/**
* Get the index into the constant pool of the name of the field.
*
* @return The index into the constant pool of the name of the field.
*/
public int nameIndex() {
return name;
}
/**
* Get the index into the constant pool of the type of the field.
*
* @return The index into the constant pool of the type of the field.
*/
public int typeIndex() {
return type;
}
/**
* Set the modifiers of the field. The values correspond to the constants in
* the Modifiers class.
*
* @param modifiers
* A bit vector of modifier flags for the field.
* @see Modifiers
*/
public void setModifiers(final int modifiers) {
this.modifiers = modifiers;
}
/**
* Get the modifiers of the field. The values correspond to the constants in
* the Modifiers class.
*
* @return A bit vector of modifier flags for the field.
* @see Modifiers
*/
public int modifiers() {
return modifiers;
}
/**
* Get the index into the constant pool of the field's constant value, if
* any. Returns 0 if the field does not have a constant value.
*
* @see ClassInfo#constants
*/
public int constantValue() {
if (constantValue != null) {
return constantValue.constantValueIndex();
}
return 0;
}
/**
* Set the index into the constant pool of the field's constant value.
*
* @see ClassInfo#constants
*/
public void setConstantValue(final int index) {
if (constantValue != null) {
constantValue.setConstantValueIndex(index);
}
}
/**
* Write the field to a class file.
*
* @param out
* The data stream of the class file.
* @exception IOException
* If an error occurs while writing.
*/
public void write(final DataOutputStream out) throws IOException {
out.writeShort(modifiers);
out.writeShort(name);
out.writeShort(type);
out.writeShort(attrs.length);
for (int i = 0; i < attrs.length; i++) {
out.writeShort(attrs[i].nameIndex());
out.writeInt(attrs[i].length());
attrs[i].writeData(out);
}
}
/**
* Convert the field to a string.
*
* @return A string representation of the field.
*/
public String toString() {
String x = "";
x += " (modifiers";
if ((modifiers & Modifiers.PUBLIC) != 0) {
x += " PUBLIC";
}
if ((modifiers & Modifiers.PRIVATE) != 0) {
x += " PRIVATE";
}
if ((modifiers & Modifiers.PROTECTED) != 0) {
x += " PROTECTED";
}
if ((modifiers & Modifiers.STATIC) != 0) {
x += " STATIC";
}
if ((modifiers & Modifiers.FINAL) != 0) {
x += " FINAL";
}
if ((modifiers & Modifiers.SYNCHRONIZED) != 0) {
x += " SYNCHRONIZED";
}
if ((modifiers & Modifiers.VOLATILE) != 0) {
x += " VOLATILE";
}
if ((modifiers & Modifiers.TRANSIENT) != 0) {
x += " TRANSIENT";
}
if ((modifiers & Modifiers.NATIVE) != 0) {
x += " NATIVE";
}
if ((modifiers & Modifiers.INTERFACE) != 0) {
x += " INTERFACE";
}
if ((modifiers & Modifiers.ABSTRACT) != 0) {
x += " ABSTRACT";
}
x += ")";
if (constantValue != null) {
x += " " + constantValue;
}
return "(field " + name + " " + type + x + ")";
}
}
| |
/*
* Copyright 2010 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.endow.document.validation.impl;
import java.util.ArrayList;
import java.util.List;
import org.kuali.kfs.module.endow.EndowKeyConstants;
import org.kuali.kfs.module.endow.businessobject.EndowmentSourceTransactionLine;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionTaxLotLine;
import org.kuali.kfs.module.endow.document.CorporateReorganizationDocument;
import org.kuali.kfs.module.endow.document.EndowmentSecurityDetailsDocument;
import org.kuali.kfs.module.endow.document.EndowmentTaxLotLinesDocument;
import org.kuali.kfs.module.endow.document.EndowmentTransactionLinesDocument;
import org.kuali.kfs.module.endow.document.validation.DeleteTaxLotLineRule;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.util.GlobalVariables;
public class CorporateReorganizationDocumentRules extends EndowmentTransactionLinesDocumentBaseRules implements DeleteTaxLotLineRule<EndowmentTaxLotLinesDocument, EndowmentTransactionTaxLotLine, EndowmentTransactionLine, Number, Number> {
/**
* @see org.kuali.kfs.module.endow.document.validation.AddTransactionLineRule#processAddTransactionLineRules(org.kuali.kfs.module.endow.document.EndowmentTransactionLinesDocument,
* org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine)
*/
@Override
public boolean processAddTransactionLineRules(EndowmentTransactionLinesDocument transLineDocument, EndowmentTransactionLine line) {
boolean isValid = true;
CorporateReorganizationDocument corporateReorganizationDocument = (CorporateReorganizationDocument) transLineDocument;
// Validate source security id and registration code.
isValid &= validateSecurity(isValid, corporateReorganizationDocument, true);
isValid &= validateRegistration(isValid, corporateReorganizationDocument, true);
// Validate target security id and registration code.
isValid &= validateSecurity(isValid, corporateReorganizationDocument, false);
isValid &= validateRegistration(isValid, corporateReorganizationDocument, false);
// there can be only one source transaction line
isValid &= validateOnlyOneSourceTransactionLine(true, corporateReorganizationDocument, line, -1);
if (isValid) {
isValid &= super.processAddTransactionLineRules(corporateReorganizationDocument, line);
}
if (isValid) {
isValid &= validateCorpReorganizationTransferTransactionLine(true, corporateReorganizationDocument, line, -1, -1);
}
if (isValid) {
// Source and target security lines cannot be the same.
isValid &= validateNonDuplicateSecurityCodes(corporateReorganizationDocument);
}
return GlobalVariables.getMessageMap().getErrorCount() == 0;
}
/**
* Validates that one and only one source transaction line can be added.
*
* @param validateForAdd tells whether the validation is for add or not
* @param endowmentTransactionLinesDocument
* @return true if valid, false otherwise
*/
protected boolean validateOnlyOneSourceTransactionLine(boolean validateForAdd, EndowmentTransactionLinesDocument endowmentTransactionLinesDocument, EndowmentTransactionLine line, int index) {
boolean isValid = true;
if (line instanceof EndowmentSourceTransactionLine) {
// if we do validation upon adding a new transaction line make sure we don't allow more than one line to be added; if
// there
// is already one source transaction line than validation will fail as no more source transaction line can be added
if (validateForAdd) {
if (endowmentTransactionLinesDocument.getSourceTransactionLines() != null && endowmentTransactionLinesDocument.getSourceTransactionLines().size() >= 1) {
isValid = false;
putFieldError(getErrorPrefix(line, index), EndowKeyConstants.EndowmentTransactionDocumentConstants.ERROR_SECURITY_TRANSFER_ONE_AND_ONLY_ONE_SOURCE_TRANS_LINE);
}
}
// if we do validation on save or submit we have to make sure that there is one and only one source transaction line
else {
if (endowmentTransactionLinesDocument.getSourceTransactionLines() != null && endowmentTransactionLinesDocument.getSourceTransactionLines().size() != 1) {
isValid = false;
putFieldError(getErrorPrefix(line, index), EndowKeyConstants.EndowmentTransactionDocumentConstants.ERROR_SECURITY_TRANSFER_ONE_AND_ONLY_ONE_SOURCE_TRANS_LINE);
}
}
}
return isValid;
}
/**
* Validate Security Transfer Transaction Line.
*
* @param endowmentTransactionLinesDocumentBase
* @param line
* @param index
* @return
*/
protected boolean validateCorpReorganizationTransferTransactionLine(boolean isAdd, EndowmentTransactionLinesDocument endowmentTransactionLinesDocument, EndowmentTransactionLine line, int transLineIndex, int taxLotIndex) {
boolean isValid = super.validateTransactionLine(endowmentTransactionLinesDocument, line, transLineIndex);
if (isValid) {
// Obtain Prefix for Error fields in UI.
String ERROR_PREFIX = getErrorPrefix(line, transLineIndex);
// Validate Units is Greater then Zero(thus positive) value
isValid &= validateTransactionUnitsGreaterThanZero(line, ERROR_PREFIX);
if (line instanceof EndowmentSourceTransactionLine) {
// Validate if Sufficient Units are Available
isValid &= validateSufficientUnits(isAdd, endowmentTransactionLinesDocument, line, transLineIndex, taxLotIndex);
}
// Check if value of Endowment is being reduced.
checkWhetherReducePermanentlyRestrictedFund(line, ERROR_PREFIX);
}
return GlobalVariables.getMessageMap().getErrorCount() == 0;
}
/**
* @see org.kuali.rice.krad.rules.DocumentRuleBase#processCustomRouteDocumentBusinessRules(org.kuali.rice.krad.document.Document)
*/
@Override
protected boolean processCustomRouteDocumentBusinessRules(Document document) {
boolean isValid = super.processCustomRouteDocumentBusinessRules(document);
if (isValid) {
CorporateReorganizationDocument corporateReorganizationDocument = (CorporateReorganizationDocument) document;
// Validate Security
isValid &= validateSecurity(isValid, corporateReorganizationDocument, true);
// Validate Registration code.
isValid &= validateRegistration(isValid, corporateReorganizationDocument, true);
// Validate at least one Source Tx was entered.
if (!transactionLineSizeGreaterThanZero(corporateReorganizationDocument, true))
return false;
// Validate at least one Target Tx was entered.
if (!transactionLineSizeGreaterThanZero(corporateReorganizationDocument, false))
return false;
// Obtaining all the transaction lines for validations
List<EndowmentTransactionLine> txLines = new ArrayList<EndowmentTransactionLine>();
txLines.addAll(corporateReorganizationDocument.getSourceTransactionLines());
txLines.addAll(corporateReorganizationDocument.getTargetTransactionLines());
// Validate All the Transaction Lines.
for (int i = 0; i < txLines.size(); i++) {
EndowmentTransactionLine txLine = txLines.get(i);
isValid &= validateCorpReorganizationTransferTransactionLine(false, corporateReorganizationDocument, txLine, i, -1);
isValid &= validateTaxLots(corporateReorganizationDocument, txLine, i);
isValid &= validateTotalUnits(corporateReorganizationDocument, txLine, i);
}
// isValid = &= validateSourceTransSecurityEtranEqual(corporateReorganizationDocument);
// isValid = &= validateTargetTransSecurityEtranEqual(corporateReorganizationDocument);
}
return GlobalVariables.getMessageMap().getErrorCount() == 0;
}
/**
* @see org.kuali.kfs.module.endow.document.validation.impl.EndowmentTransactionalDocumentBaseRule#validateSecurityClassTypeCode(org.kuali.kfs.module.endow.document.EndowmentSecurityDetailsDocument,
* boolean, java.lang.String)
*/
@Override
protected boolean validateSecurityClassTypeCode(EndowmentSecurityDetailsDocument document, boolean isSource, String classCodeType) {
return true;
}
/**
* @see org.kuali.kfs.module.endow.document.validation.impl.EndowmentTransactionLinesDocumentBaseRules#processRefreshTransactionLineRules(org.kuali.kfs.module.endow.document.EndowmentTransactionLinesDocument,
* org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine, java.lang.Number)
*/
@Override
public boolean processRefreshTransactionLineRules(EndowmentTransactionLinesDocument endowmentTransactionLinesDocument, EndowmentTransactionLine endowmentTransactionLine, Number index) {
boolean isValid = super.processRefreshTransactionLineRules(endowmentTransactionLinesDocument, endowmentTransactionLine, index);
if (isValid) {
isValid &= validateCorpReorganizationTransferTransactionLine(false, endowmentTransactionLinesDocument, endowmentTransactionLine, (Integer) index, -1);
}
return isValid;
}
/**
* @see org.kuali.kfs.module.endow.document.validation.DeleteTaxLotLineRule#processDeleteTaxLotLineRules(org.kuali.kfs.module.endow.document.EndowmentTaxLotLinesDocument,
* org.kuali.kfs.module.endow.businessobject.EndowmentTransactionTaxLotLine,
* org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine, java.lang.Number, java.lang.Number)
*/
public boolean processDeleteTaxLotLineRules(EndowmentTaxLotLinesDocument endowmentTaxLotLinesDocument, EndowmentTransactionTaxLotLine endowmentTransactionTaxLotLine, EndowmentTransactionLine endowmentTransactionLine, Number index, Number numberX) {
boolean isValid = true;
isValid &= validateTransactionLine(endowmentTaxLotLinesDocument, endowmentTransactionLine, (Integer) index);
if (isValid) {
isValid &= validateCorpReorganizationTransferTransactionLine(false, endowmentTaxLotLinesDocument, endowmentTransactionLine, (Integer) index, (Integer) numberX);
}
return isValid;
}
/**
* @see org.kuali.kfs.module.endow.document.validation.impl.EndowmentTransactionLinesDocumentBaseRules#hasEtranCode(org.kuali.kfs.module.endow.document.EndowmentTransactionLinesDocument)
*/
@Override
protected boolean hasEtranCode(EndowmentTransactionLinesDocument endowmentTransactionLinesDocument) {
return false;
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver10;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnHybridGetReplyVer10 implements OFBsnHybridGetReply {
private static final Logger logger = LoggerFactory.getLogger(OFBsnHybridGetReplyVer10.class);
// version: 1.0
final static byte WIRE_VERSION = 1;
final static int LENGTH = 24;
private final static long DEFAULT_XID = 0x0L;
private final static short DEFAULT_HYBRID_ENABLE = (short) 0x0;
private final static int DEFAULT_HYBRID_VERSION = 0x0;
// OF message fields
private final long xid;
private final short hybridEnable;
private final int hybridVersion;
//
// Immutable default instance
final static OFBsnHybridGetReplyVer10 DEFAULT = new OFBsnHybridGetReplyVer10(
DEFAULT_XID, DEFAULT_HYBRID_ENABLE, DEFAULT_HYBRID_VERSION
);
// package private constructor - used by readers, builders, and factory
OFBsnHybridGetReplyVer10(long xid, short hybridEnable, int hybridVersion) {
this.xid = U32.normalize(xid);
this.hybridEnable = U8.normalize(hybridEnable);
this.hybridVersion = U16.normalize(hybridVersion);
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x1cL;
}
@Override
public short getHybridEnable() {
return hybridEnable;
}
@Override
public int getHybridVersion() {
return hybridVersion;
}
public OFBsnHybridGetReply.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnHybridGetReply.Builder {
final OFBsnHybridGetReplyVer10 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean hybridEnableSet;
private short hybridEnable;
private boolean hybridVersionSet;
private int hybridVersion;
BuilderWithParent(OFBsnHybridGetReplyVer10 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnHybridGetReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x1cL;
}
@Override
public short getHybridEnable() {
return hybridEnable;
}
@Override
public OFBsnHybridGetReply.Builder setHybridEnable(short hybridEnable) {
this.hybridEnable = hybridEnable;
this.hybridEnableSet = true;
return this;
}
@Override
public int getHybridVersion() {
return hybridVersion;
}
@Override
public OFBsnHybridGetReply.Builder setHybridVersion(int hybridVersion) {
this.hybridVersion = hybridVersion;
this.hybridVersionSet = true;
return this;
}
@Override
public OFBsnHybridGetReply build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
short hybridEnable = this.hybridEnableSet ? this.hybridEnable : parentMessage.hybridEnable;
int hybridVersion = this.hybridVersionSet ? this.hybridVersion : parentMessage.hybridVersion;
//
return new OFBsnHybridGetReplyVer10(
xid,
hybridEnable,
hybridVersion
);
}
}
static class Builder implements OFBsnHybridGetReply.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean hybridEnableSet;
private short hybridEnable;
private boolean hybridVersionSet;
private int hybridVersion;
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnHybridGetReply.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x1cL;
}
@Override
public short getHybridEnable() {
return hybridEnable;
}
@Override
public OFBsnHybridGetReply.Builder setHybridEnable(short hybridEnable) {
this.hybridEnable = hybridEnable;
this.hybridEnableSet = true;
return this;
}
@Override
public int getHybridVersion() {
return hybridVersion;
}
@Override
public OFBsnHybridGetReply.Builder setHybridVersion(int hybridVersion) {
this.hybridVersion = hybridVersion;
this.hybridVersionSet = true;
return this;
}
//
@Override
public OFBsnHybridGetReply build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
short hybridEnable = this.hybridEnableSet ? this.hybridEnable : DEFAULT_HYBRID_ENABLE;
int hybridVersion = this.hybridVersionSet ? this.hybridVersion : DEFAULT_HYBRID_VERSION;
return new OFBsnHybridGetReplyVer10(
xid,
hybridEnable,
hybridVersion
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnHybridGetReply> {
@Override
public OFBsnHybridGetReply readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 1
byte version = bb.readByte();
if(version != (byte) 0x1)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_10(1), got="+version);
// fixed value property type == 4
byte type = bb.readByte();
if(type != (byte) 0x4)
throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type);
int length = U16.f(bb.readShort());
if(length != 24)
throw new OFParseError("Wrong length: Expected=24(24), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0x1cL
int subtype = bb.readInt();
if(subtype != 0x1c)
throw new OFParseError("Wrong subtype: Expected=0x1cL(0x1cL), got="+subtype);
short hybridEnable = U8.f(bb.readByte());
// pad: 1 bytes
bb.skipBytes(1);
int hybridVersion = U16.f(bb.readShort());
// pad: 4 bytes
bb.skipBytes(4);
OFBsnHybridGetReplyVer10 bsnHybridGetReplyVer10 = new OFBsnHybridGetReplyVer10(
xid,
hybridEnable,
hybridVersion
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnHybridGetReplyVer10);
return bsnHybridGetReplyVer10;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnHybridGetReplyVer10Funnel FUNNEL = new OFBsnHybridGetReplyVer10Funnel();
static class OFBsnHybridGetReplyVer10Funnel implements Funnel<OFBsnHybridGetReplyVer10> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnHybridGetReplyVer10 message, PrimitiveSink sink) {
// fixed value property version = 1
sink.putByte((byte) 0x1);
// fixed value property type = 4
sink.putByte((byte) 0x4);
// fixed value property length = 24
sink.putShort((short) 0x18);
sink.putLong(message.xid);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0x1cL
sink.putInt(0x1c);
sink.putShort(message.hybridEnable);
// skip pad (1 bytes)
sink.putInt(message.hybridVersion);
// skip pad (4 bytes)
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnHybridGetReplyVer10> {
@Override
public void write(ByteBuf bb, OFBsnHybridGetReplyVer10 message) {
// fixed value property version = 1
bb.writeByte((byte) 0x1);
// fixed value property type = 4
bb.writeByte((byte) 0x4);
// fixed value property length = 24
bb.writeShort((short) 0x18);
bb.writeInt(U32.t(message.xid));
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0x1cL
bb.writeInt(0x1c);
bb.writeByte(U8.t(message.hybridEnable));
// pad: 1 bytes
bb.writeZero(1);
bb.writeShort(U16.t(message.hybridVersion));
// pad: 4 bytes
bb.writeZero(4);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnHybridGetReplyVer10(");
b.append("xid=").append(xid);
b.append(", ");
b.append("hybridEnable=").append(hybridEnable);
b.append(", ");
b.append("hybridVersion=").append(hybridVersion);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnHybridGetReplyVer10 other = (OFBsnHybridGetReplyVer10) obj;
if( xid != other.xid)
return false;
if( hybridEnable != other.hybridEnable)
return false;
if( hybridVersion != other.hybridVersion)
return false;
return true;
}
@Override
public boolean equalsIgnoreXid(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnHybridGetReplyVer10 other = (OFBsnHybridGetReplyVer10) obj;
// ignore XID
if( hybridEnable != other.hybridEnable)
return false;
if( hybridVersion != other.hybridVersion)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + hybridEnable;
result = prime * result + hybridVersion;
return result;
}
@Override
public int hashCodeIgnoreXid() {
final int prime = 31;
int result = 1;
// ignore XID
result = prime * result + hybridEnable;
result = prime * result + hybridVersion;
return result;
}
}
| |
/*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.rule;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.drools.core.WorkingMemory;
import org.drools.core.base.mvel.MVELEvalExpression;
import org.drools.core.spi.EvalExpression;
import org.drools.core.spi.Tuple;
import org.drools.core.spi.Wireable;
import org.kie.internal.security.KiePolicyHelper;
public class EvalCondition extends ConditionalElement
implements
Externalizable,
Wireable {
private static final long serialVersionUID = 510l;
private EvalExpression expression;
private Declaration[] requiredDeclarations;
private static final Declaration[] EMPTY_DECLARATIONS = new Declaration[0];
private List<EvalCondition> cloned = Collections.<EvalCondition> emptyList();
public EvalCondition() {
this( null );
}
public EvalCondition(final Declaration[] requiredDeclarations) {
this( null,
requiredDeclarations );
}
public EvalCondition(final EvalExpression eval,
final Declaration[] requiredDeclarations) {
this.expression = eval;
if ( requiredDeclarations == null ) {
this.requiredDeclarations = EvalCondition.EMPTY_DECLARATIONS;
} else {
this.requiredDeclarations = requiredDeclarations;
}
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
expression = (EvalExpression) in.readObject();
requiredDeclarations = (Declaration[]) in.readObject();
this.cloned = (List<EvalCondition>) in.readObject();
}
public void writeExternal(ObjectOutput out) throws IOException {
if ( EvalExpression.isCompiledInvoker(this.expression) ) {
out.writeObject( null );
} else {
out.writeObject( this.expression );
}
out.writeObject( requiredDeclarations );
out.writeObject( this.cloned );
}
public EvalExpression getEvalExpression() {
return this.expression;
}
public void wire(Object object) {
EvalExpression expression = KiePolicyHelper.isPolicyEnabled() ? new EvalExpression.SafeEvalExpression((EvalExpression) object) : (EvalExpression) object;
setEvalExpression( expression );
for ( EvalCondition clone : this.cloned ) {
clone.wireClone( expression );
}
}
private void wireClone(EvalExpression expression) {
setEvalExpression( this.expression instanceof MVELEvalExpression && expression instanceof MVELEvalExpression ?
( (MVELEvalExpression) expression ).clonePreservingDeclarations( (MVELEvalExpression) this.expression ) :
expression );
for ( EvalCondition clone : this.cloned ) {
clone.wireClone( expression );
}
}
public void setEvalExpression(final EvalExpression expression) {
this.expression = expression;
}
public Declaration[] getRequiredDeclarations() {
return this.requiredDeclarations;
}
public Object createContext() {
return this.expression.createContext();
}
public boolean isAllowed(final Tuple tuple,
final WorkingMemory workingMemory,
final Object context) {
try {
return this.expression.evaluate( tuple,
this.requiredDeclarations,
workingMemory,
context );
} catch ( final Exception e ) {
throw new RuntimeException( this.getEvalExpression() + " : " + e,
e );
}
}
public EvalCondition clone() {
final EvalCondition clone = new EvalCondition( this.expression.clone(),
(Declaration[]) this.requiredDeclarations.clone() );
if ( this.cloned == Collections.EMPTY_LIST ) {
this.cloned = new ArrayList<EvalCondition>( 1 );
}
this.cloned.add( clone );
return clone;
}
public int hashCode() {
return this.expression.hashCode();
}
public boolean equals(final Object object) {
if ( object == this ) {
return true;
}
if ( object == null || object.getClass() != EvalCondition.class ) {
return false;
}
final EvalCondition other = (EvalCondition) object;
if ( this.requiredDeclarations.length != other.requiredDeclarations.length ) {
return false;
}
for ( int i = 0, length = this.requiredDeclarations.length; i < length; i++ ) {
if ( this.requiredDeclarations[i].getOffset() != other.requiredDeclarations[i].getOffset() ) {
return false;
}
if ( !this.requiredDeclarations[i].getExtractor().equals( other.requiredDeclarations[i].getExtractor() ) ) {
return false;
}
}
return this.expression.equals( other.expression );
}
public Map<String, Declaration> getInnerDeclarations() {
return Collections.EMPTY_MAP;
}
public Map<String, Declaration> getOuterDeclarations() {
return Collections.EMPTY_MAP;
}
public List<? extends RuleConditionElement> getNestedElements() {
return Collections.EMPTY_LIST;
}
public boolean isPatternScopeDelimiter() {
return true;
}
/**
* @inheritDoc
*/
public Declaration resolveDeclaration(final String identifier) {
return null;
}
public void replaceDeclaration(Declaration declaration,
Declaration resolved) {
for ( int i = 0; i < this.requiredDeclarations.length; i++ ) {
if ( this.requiredDeclarations[i].equals( declaration ) ) {
this.requiredDeclarations[i] = resolved;
}
}
this.expression.replaceDeclaration( declaration,
resolved );
}
@Override
public String toString() {
return this.expression.toString();
}
}
| |
/*
* Copyright 2016 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery;
import com.google.api.core.ApiFunction;
import com.google.api.services.bigquery.model.JobConfiguration;
import com.google.api.services.bigquery.model.JobStatistics2;
import com.google.api.services.bigquery.model.JobStatistics3;
import com.google.api.services.bigquery.model.JobStatistics4;
import com.google.cloud.StringEnumType;
import com.google.cloud.StringEnumValue;
import com.google.common.base.Function;
import com.google.common.base.MoreObjects;
import com.google.common.base.MoreObjects.ToStringHelper;
import com.google.common.collect.Lists;
import java.io.Serializable;
import java.util.List;
import java.util.Objects;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/** A Google BigQuery Job statistics. */
public abstract class JobStatistics implements Serializable {
private static final long serialVersionUID = 1433024714741660399L;
private final Long creationTime;
private final Long endTime;
private final Long startTime;
private final Long numChildJobs;
private final String parentJobId;
private final ScriptStatistics scriptStatistics;
private final List<ReservationUsage> reservationUsage;
private final TransactionInfo transactionInfo;
private final SessionInfo sessionInfo;
/** A Google BigQuery Copy Job statistics. */
public static class CopyStatistics extends JobStatistics {
private static final long serialVersionUID = 8218325588441660938L;
static final class Builder extends JobStatistics.Builder<CopyStatistics, Builder> {
private Builder() {}
private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) {
super(statisticsPb);
}
@Override
CopyStatistics build() {
return new CopyStatistics(this);
}
}
private CopyStatistics(Builder builder) {
super(builder);
}
@Override
public final boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(CopyStatistics.class)
&& baseEquals((CopyStatistics) obj);
}
@Override
public final int hashCode() {
return baseHashCode();
}
static Builder newBuilder() {
return new Builder();
}
@SuppressWarnings("unchecked")
static CopyStatistics fromPb(com.google.api.services.bigquery.model.JobStatistics statisticPb) {
return new Builder(statisticPb).build();
}
}
/** A Google BigQuery Extract Job statistics. */
public static class ExtractStatistics extends JobStatistics {
private static final long serialVersionUID = -1566598819212767373L;
private final List<Long> destinationUriFileCounts;
static final class Builder extends JobStatistics.Builder<ExtractStatistics, Builder> {
private List<Long> destinationUriFileCounts;
private Builder() {}
private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) {
super(statisticsPb);
if (statisticsPb.getExtract() != null) {
this.destinationUriFileCounts = statisticsPb.getExtract().getDestinationUriFileCounts();
}
}
Builder setDestinationUriFileCounts(List<Long> destinationUriFileCounts) {
this.destinationUriFileCounts = destinationUriFileCounts;
return self();
}
@Override
ExtractStatistics build() {
return new ExtractStatistics(this);
}
}
private ExtractStatistics(Builder builder) {
super(builder);
this.destinationUriFileCounts = builder.destinationUriFileCounts;
}
/**
* Returns the number of files per destination URI or URI pattern specified in the extract job.
* These values will be in the same order as the URIs specified by {@link
* ExtractJobConfiguration#getDestinationUris()}.
*/
public List<Long> getDestinationUriFileCounts() {
return destinationUriFileCounts;
}
@Override
ToStringHelper toStringHelper() {
return super.toStringHelper().add("destinationUriFileCounts", destinationUriFileCounts);
}
@Override
public final boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(ExtractStatistics.class)
&& baseEquals((ExtractStatistics) obj);
}
@Override
public final int hashCode() {
return Objects.hash(baseHashCode(), destinationUriFileCounts);
}
@Override
com.google.api.services.bigquery.model.JobStatistics toPb() {
com.google.api.services.bigquery.model.JobStatistics statisticsPb = super.toPb();
return statisticsPb.setExtract(
new JobStatistics4().setDestinationUriFileCounts(destinationUriFileCounts));
}
static Builder newBuilder() {
return new Builder();
}
@SuppressWarnings("unchecked")
static ExtractStatistics fromPb(
com.google.api.services.bigquery.model.JobStatistics statisticPb) {
return new Builder(statisticPb).build();
}
}
/** A Google BigQuery Load Job statistics. */
public static class LoadStatistics extends JobStatistics {
private static final long serialVersionUID = -707369246536309215L;
private final Long inputBytes;
private final Long inputFiles;
private final Long outputBytes;
private final Long outputRows;
private final Long badRecords;
static final class Builder extends JobStatistics.Builder<LoadStatistics, Builder> {
private Long inputBytes;
private Long inputFiles;
private Long outputBytes;
private Long outputRows;
private Long badRecords;
private Builder() {}
private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) {
super(statisticsPb);
if (statisticsPb.getLoad() != null) {
this.inputBytes = statisticsPb.getLoad().getInputFileBytes();
this.inputFiles = statisticsPb.getLoad().getInputFiles();
this.outputBytes = statisticsPb.getLoad().getOutputBytes();
this.outputRows = statisticsPb.getLoad().getOutputRows();
this.badRecords = statisticsPb.getLoad().getBadRecords();
}
}
Builder setInputBytes(Long inputBytes) {
this.inputBytes = inputBytes;
return self();
}
Builder setInputFiles(Long inputFiles) {
this.inputFiles = inputFiles;
return self();
}
Builder setOutputBytes(Long outputBytes) {
this.outputBytes = outputBytes;
return self();
}
Builder setOutputRows(Long outputRows) {
this.outputRows = outputRows;
return self();
}
Builder setBadRecords(Long badRecords) {
this.badRecords = badRecords;
return self();
}
@Override
LoadStatistics build() {
return new LoadStatistics(this);
}
}
private LoadStatistics(Builder builder) {
super(builder);
this.inputBytes = builder.inputBytes;
this.inputFiles = builder.inputFiles;
this.outputBytes = builder.outputBytes;
this.outputRows = builder.outputRows;
this.badRecords = builder.badRecords;
}
/** Returns the number of bytes of source data in a load job. */
public Long getInputBytes() {
return inputBytes;
}
/** Returns the number of source files in a load job. */
public Long getInputFiles() {
return inputFiles;
}
/** Returns the size of the data loaded by a load job so far, in bytes. */
public Long getOutputBytes() {
return outputBytes;
}
/** Returns the number of rows loaded by a load job so far. */
public Long getOutputRows() {
return outputRows;
}
/** Returns the number of bad records reported in a job. */
public Long getBadRecords() {
return badRecords;
}
@Override
ToStringHelper toStringHelper() {
return super.toStringHelper()
.add("inputBytes", inputBytes)
.add("inputFiles", inputFiles)
.add("outputBytes", outputBytes)
.add("outputRows", outputRows)
.add("badRecords", badRecords);
}
@Override
public final boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(LoadStatistics.class)
&& baseEquals((LoadStatistics) obj);
}
@Override
public final int hashCode() {
return Objects.hash(
baseHashCode(), inputBytes, inputFiles, outputBytes, outputRows, badRecords);
}
@Override
com.google.api.services.bigquery.model.JobStatistics toPb() {
JobStatistics3 loadStatisticsPb = new JobStatistics3();
loadStatisticsPb.setInputFileBytes(inputBytes);
loadStatisticsPb.setInputFiles(inputFiles);
loadStatisticsPb.setOutputBytes(outputBytes);
loadStatisticsPb.setOutputRows(outputRows);
loadStatisticsPb.setBadRecords(badRecords);
return super.toPb().setLoad(loadStatisticsPb);
}
static Builder newBuilder() {
return new Builder();
}
@SuppressWarnings("unchecked")
static LoadStatistics fromPb(com.google.api.services.bigquery.model.JobStatistics statisticPb) {
return new Builder(statisticPb).build();
}
}
/** A Google BigQuery Query Job statistics. */
public static class QueryStatistics extends JobStatistics {
private static final long serialVersionUID = 7539354109226732353L;
private final BiEngineStats biEngineStats;
private final Integer billingTier;
private final Boolean cacheHit;
private final String ddlOperationPerformed;
private final TableId ddlTargetTable;
private final RoutineId ddlTargetRoutine;
private final Long estimatedBytesProcessed;
private final Long numDmlAffectedRows;
private final DmlStats dmlStats;
private final List<TableId> referencedTables;
private final StatementType statementType;
private final Long totalBytesBilled;
private final Long totalBytesProcessed;
private final Long totalPartitionsProcessed;
private final Long totalSlotMs;
private final List<QueryStage> queryPlan;
private final List<TimelineSample> timeline;
private final Schema schema;
/**
* StatementType represents possible types of SQL statements reported as part of the
* QueryStatistics of a BigQuery job.
*/
public static final class StatementType extends StringEnumValue {
private static final long serialVersionUID = 818920627219751204L;
private static final ApiFunction<String, StatementType> CONSTRUCTOR =
new ApiFunction<String, StatementType>() {
@Override
public StatementType apply(String constant) {
return new StatementType(constant);
}
};
private static final StringEnumType<StatementType> type =
new StringEnumType<StatementType>(StatementType.class, CONSTRUCTOR);
public static final StatementType SELECT = type.createAndRegister("SELECT");
public static final StatementType UPDATE = type.createAndRegister("UPDATE");
public static final StatementType INSERT = type.createAndRegister("INSERT");
public static final StatementType DELETE = type.createAndRegister("DELETE");
public static final StatementType CREATE_TABLE = type.createAndRegister("CREATE_TABLE");
public static final StatementType CREATE_TABLE_AS_SELECT =
type.createAndRegister("CREATE_TABLE_AS_SELECT");
public static final StatementType CREATE_VIEW = type.createAndRegister("CREATE_VIEW");
public static final StatementType CREATE_MODEL = type.createAndRegister("CREATE_MODEL");
public static final StatementType CREATE_FUNCTION = type.createAndRegister("CREATE_FUNCTION");
public static final StatementType CREATE_PROCEDURE =
type.createAndRegister("CREATE_PROCEDURE");
public static final StatementType ALTER_TABLE = type.createAndRegister("ALTER_TABLE");
public static final StatementType ALTER_VIEW = type.createAndRegister("ALTER_VIEW");
public static final StatementType DROP_TABLE = type.createAndRegister("DROP_TABLE");
public static final StatementType DROP_VIEW = type.createAndRegister("DROP_VIEW");
public static final StatementType DROP_FUNCTION = type.createAndRegister("DROP_FUNCTION");
public static final StatementType DROP_PROCEDURE = type.createAndRegister("DROP_PROCEDURE");
public static final StatementType MERGE = type.createAndRegister("MERGE");
private StatementType(String constant) {
super(constant);
}
/**
* Get the StatementType for the given String constant, and throw an exception if the constant
* is not recognized.
*/
public static StatementType valueOfStrict(String constant) {
return type.valueOfStrict(constant);
}
/** Get the State for the given String constant, and allow unrecognized values. */
public static StatementType valueOf(String constant) {
return type.valueOf(constant);
}
/** Return the known values for State. */
public static StatementType[] values() {
return type.values();
}
}
static final class Builder extends JobStatistics.Builder<QueryStatistics, Builder> {
private BiEngineStats biEngineStats;
private Integer billingTier;
private Boolean cacheHit;
private String ddlOperationPerformed;
private TableId ddlTargetTable;
private RoutineId ddlTargetRoutine;
private Long estimatedBytesProcessed;
private Long numDmlAffectedRows;
private DmlStats dmlStats;
private List<TableId> referencedTables;
private StatementType statementType;
private Long totalBytesBilled;
private Long totalBytesProcessed;
private Long totalPartitionsProcessed;
private Long totalSlotMs;
private List<QueryStage> queryPlan;
private List<TimelineSample> timeline;
private Schema schema;
private Builder() {}
private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) {
super(statisticsPb);
if (statisticsPb.getQuery() != null) {
if (statisticsPb.getQuery().getBiEngineStatistics() != null) {
this.biEngineStats =
BiEngineStats.fromPb(statisticsPb.getQuery().getBiEngineStatistics());
}
this.billingTier = statisticsPb.getQuery().getBillingTier();
this.cacheHit = statisticsPb.getQuery().getCacheHit();
this.ddlOperationPerformed = statisticsPb.getQuery().getDdlOperationPerformed();
if (statisticsPb.getQuery().getDdlTargetTable() != null) {
this.ddlTargetTable = TableId.fromPb(statisticsPb.getQuery().getDdlTargetTable());
}
if (statisticsPb.getQuery().getDdlTargetRoutine() != null) {
this.ddlTargetRoutine = RoutineId.fromPb(statisticsPb.getQuery().getDdlTargetRoutine());
}
this.estimatedBytesProcessed = statisticsPb.getQuery().getEstimatedBytesProcessed();
this.numDmlAffectedRows = statisticsPb.getQuery().getNumDmlAffectedRows();
this.totalBytesBilled = statisticsPb.getQuery().getTotalBytesBilled();
this.totalBytesProcessed = statisticsPb.getQuery().getTotalBytesProcessed();
this.totalPartitionsProcessed = statisticsPb.getQuery().getTotalPartitionsProcessed();
this.totalSlotMs = statisticsPb.getQuery().getTotalSlotMs();
if (statisticsPb.getQuery().getStatementType() != null) {
this.statementType = StatementType.valueOf(statisticsPb.getQuery().getStatementType());
}
if (statisticsPb.getQuery().getReferencedTables() != null) {
this.referencedTables =
Lists.transform(
statisticsPb.getQuery().getReferencedTables(), TableId.FROM_PB_FUNCTION);
}
if (statisticsPb.getQuery().getQueryPlan() != null) {
this.queryPlan =
Lists.transform(
statisticsPb.getQuery().getQueryPlan(), QueryStage.FROM_PB_FUNCTION);
}
if (statisticsPb.getQuery().getTimeline() != null) {
this.timeline =
Lists.transform(
statisticsPb.getQuery().getTimeline(), TimelineSample.FROM_PB_FUNCTION);
}
if (statisticsPb.getQuery().getSchema() != null) {
this.schema = Schema.fromPb(statisticsPb.getQuery().getSchema());
}
if (statisticsPb.getQuery().getDmlStats() != null) {
this.dmlStats = DmlStats.fromPb(statisticsPb.getQuery().getDmlStats());
}
}
}
Builder setBiEngineStats(BiEngineStats biEngineStats) {
this.biEngineStats = biEngineStats;
return self();
}
Builder setBillingTier(Integer billingTier) {
this.billingTier = billingTier;
return self();
}
Builder setCacheHit(Boolean cacheHit) {
this.cacheHit = cacheHit;
return self();
}
Builder setDDLOperationPerformed(String ddlOperationPerformed) {
this.ddlOperationPerformed = ddlOperationPerformed;
return self();
}
Builder setDDLTargetTable(TableId ddlTargetTable) {
this.ddlTargetTable = ddlTargetTable;
return self();
}
Builder setDDLTargetRoutine(RoutineId ddlTargetRoutine) {
this.ddlTargetRoutine = ddlTargetRoutine;
return self();
}
Builder setEstimatedBytesProcessed(Long estimatedBytesProcessed) {
this.estimatedBytesProcessed = estimatedBytesProcessed;
return self();
}
Builder setNumDmlAffectedRows(Long numDmlAffectedRows) {
this.numDmlAffectedRows = numDmlAffectedRows;
return self();
}
Builder setDmlStats(DmlStats dmlStats) {
this.dmlStats = dmlStats;
return self();
}
Builder setReferenceTables(List<TableId> referencedTables) {
this.referencedTables = referencedTables;
return self();
}
Builder setStatementType(StatementType statementType) {
this.statementType = statementType;
return self();
}
Builder setStatementType(String strStatementType) {
this.statementType = StatementType.valueOf(strStatementType);
return self();
}
Builder setTotalBytesBilled(Long totalBytesBilled) {
this.totalBytesBilled = totalBytesBilled;
return self();
}
Builder setTotalBytesProcessed(Long totalBytesProcessed) {
this.totalBytesProcessed = totalBytesProcessed;
return self();
}
Builder setTotalPartitionsProcessed(Long totalPartitionsProcessed) {
this.totalPartitionsProcessed = totalPartitionsProcessed;
return self();
}
Builder setTotalSlotMs(Long totalSlotMs) {
this.totalSlotMs = totalSlotMs;
return self();
}
Builder setQueryPlan(List<QueryStage> queryPlan) {
this.queryPlan = queryPlan;
return self();
}
Builder setTimeline(List<TimelineSample> timeline) {
this.timeline = timeline;
return self();
}
Builder setSchema(Schema schema) {
this.schema = schema;
return self();
}
@Override
QueryStatistics build() {
return new QueryStatistics(this);
}
}
private QueryStatistics(Builder builder) {
super(builder);
this.biEngineStats = builder.biEngineStats;
this.billingTier = builder.billingTier;
this.cacheHit = builder.cacheHit;
this.ddlOperationPerformed = builder.ddlOperationPerformed;
this.ddlTargetTable = builder.ddlTargetTable;
this.ddlTargetRoutine = builder.ddlTargetRoutine;
this.estimatedBytesProcessed = builder.estimatedBytesProcessed;
this.numDmlAffectedRows = builder.numDmlAffectedRows;
this.dmlStats = builder.dmlStats;
this.referencedTables = builder.referencedTables;
this.statementType = builder.statementType;
this.totalBytesBilled = builder.totalBytesBilled;
this.totalBytesProcessed = builder.totalBytesProcessed;
this.totalPartitionsProcessed = builder.totalPartitionsProcessed;
this.totalSlotMs = builder.totalSlotMs;
this.queryPlan = builder.queryPlan;
this.timeline = builder.timeline;
this.schema = builder.schema;
}
/** Returns query statistics specific to the use of BI Engine. */
public BiEngineStats getBiEngineStats() {
return biEngineStats;
}
/** Returns the billing tier for the job. */
public Integer getBillingTier() {
return billingTier;
}
/**
* Returns whether the query result was fetched from the query cache.
*
* @see <a href="https://cloud.google.com/bigquery/querying-data#querycaching">Query Caching</a>
*/
public Boolean getCacheHit() {
return cacheHit;
}
/** [BETA] For DDL queries, returns the operation applied to the DDL target table. */
public String getDdlOperationPerformed() {
return ddlOperationPerformed;
}
/** [BETA] For DDL queries, returns the TableID of the targeted table. */
public TableId getDdlTargetTable() {
return ddlTargetTable;
}
/** [BETA] For DDL queries, returns the RoutineId of the targeted routine. */
public RoutineId getDdlTargetRoutine() {
return ddlTargetRoutine;
}
/** The original estimate of bytes processed for the job. */
public Long getEstimatedBytesProcessed() {
return estimatedBytesProcessed;
}
/**
* The number of rows affected by a DML statement. Present only for DML statements INSERT,
* UPDATE or DELETE.
*/
public Long getNumDmlAffectedRows() {
return numDmlAffectedRows;
}
/** Detailed statistics for DML statements. */
public DmlStats getDmlStats() {
return dmlStats;
}
/**
* Referenced tables for the job. Queries that reference more than 50 tables will not have a
* complete list.
*/
public List<TableId> getReferencedTables() {
return referencedTables;
}
/**
* [BETA] The type of query statement, if valid. Possible values include: SELECT INSERT UPDATE
* DELETE CREATE_TABLE CREATE_TABLE_AS_SELECT DROP_TABLE CREATE_VIEW DROP_VIEW
*/
public StatementType getStatementType() {
return statementType;
}
/** Returns the total number of bytes billed for the job. */
public Long getTotalBytesBilled() {
return totalBytesBilled;
}
/** Returns the total number of bytes processed by the job. */
public Long getTotalBytesProcessed() {
return totalBytesProcessed;
}
/** Total number of partitions processed from all partitioned tables referenced in the job. */
public Long getTotalPartitionsProcessed() {
return totalPartitionsProcessed;
}
/** Returns the slot-milliseconds consumed by the query. */
public Long getTotalSlotMs() {
return totalSlotMs;
}
/**
* Returns the query plan as a list of stages or {@code null} if a query plan is not available.
* Each stage involves a number of steps that read from data sources, perform a series of
* transformations on the input, and emit an output to a future stage (or the final result). The
* query plan is available for a completed query job and is retained for 7 days.
*
* @see <a href="https://cloud.google.com/bigquery/query-plan-explanation">Query Plan</a>
*/
public List<QueryStage> getQueryPlan() {
return queryPlan;
}
/**
* Return the timeline for the query, as a list of timeline samples. Each sample provides
* information about the overall progress of the query. Information includes time of the sample,
* progress reporting on active, completed, and pending units of work, as well as the cumulative
* estimation of slot-milliseconds consumed by the query.
*/
public List<TimelineSample> getTimeline() {
return timeline;
}
/**
* Returns the schema for the query result. Present only for successful dry run of non-legacy
* SQL queries.
*/
public Schema getSchema() {
return schema;
}
@Override
ToStringHelper toStringHelper() {
return super.toStringHelper()
.add("biEngineStats", biEngineStats)
.add("billingTier", billingTier)
.add("cacheHit", cacheHit)
.add("totalBytesBilled", totalBytesBilled)
.add("totalBytesProcessed", totalBytesProcessed)
.add("queryPlan", queryPlan)
.add("timeline", timeline)
.add("schema", schema);
}
@Override
public final boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(QueryStatistics.class)
&& baseEquals((QueryStatistics) obj);
}
@Override
public final int hashCode() {
return Objects.hash(
baseHashCode(),
biEngineStats,
billingTier,
cacheHit,
totalBytesBilled,
totalBytesProcessed,
queryPlan,
schema);
}
@Override
com.google.api.services.bigquery.model.JobStatistics toPb() {
JobStatistics2 queryStatisticsPb = new JobStatistics2();
if (biEngineStats != null) {
queryStatisticsPb.setBiEngineStatistics(biEngineStats.toPb());
}
queryStatisticsPb.setBillingTier(billingTier);
queryStatisticsPb.setCacheHit(cacheHit);
queryStatisticsPb.setDdlOperationPerformed(ddlOperationPerformed);
queryStatisticsPb.setEstimatedBytesProcessed(estimatedBytesProcessed);
queryStatisticsPb.setTotalBytesBilled(totalBytesBilled);
queryStatisticsPb.setTotalBytesProcessed(totalBytesProcessed);
queryStatisticsPb.setTotalPartitionsProcessed(totalPartitionsProcessed);
queryStatisticsPb.setTotalSlotMs(totalSlotMs);
if (ddlTargetTable != null) {
queryStatisticsPb.setDdlTargetTable(ddlTargetTable.toPb());
}
if (ddlTargetRoutine != null) {
queryStatisticsPb.setDdlTargetRoutine(ddlTargetRoutine.toPb());
}
if (dmlStats != null) {
queryStatisticsPb.setDmlStats(dmlStats.toPb());
}
if (referencedTables != null) {
queryStatisticsPb.setReferencedTables(
Lists.transform(referencedTables, TableId.TO_PB_FUNCTION));
}
if (statementType != null) {
queryStatisticsPb.setStatementType(statementType.toString());
}
if (queryPlan != null) {
queryStatisticsPb.setQueryPlan(Lists.transform(queryPlan, QueryStage.TO_PB_FUNCTION));
}
if (timeline != null) {
queryStatisticsPb.setTimeline(Lists.transform(timeline, TimelineSample.TO_PB_FUNCTION));
}
if (schema != null) {
queryStatisticsPb.setSchema(schema.toPb());
}
return super.toPb().setQuery(queryStatisticsPb);
}
static Builder newBuilder() {
return new Builder();
}
@SuppressWarnings("unchecked")
static QueryStatistics fromPb(
com.google.api.services.bigquery.model.JobStatistics statisticPb) {
return new Builder(statisticPb).build();
}
}
/** A Google BigQuery Script statistics. */
public static class ScriptStatistics {
private static final long serialVersionUID = 1328768324537448161L;
private final String evaluationKind;
private final List<ScriptStackFrame> stackFrames;
public static class ScriptStackFrame {
static final Function<
com.google.api.services.bigquery.model.ScriptStackFrame, ScriptStackFrame>
FROM_PB_FUNCTION =
new Function<
com.google.api.services.bigquery.model.ScriptStackFrame, ScriptStackFrame>() {
@NullableDecl
@Override
public ScriptStackFrame apply(
@NullableDecl
com.google.api.services.bigquery.model.ScriptStackFrame stackFrame) {
return ScriptStackFrame.fromPb(stackFrame);
}
};
static final Function<
ScriptStackFrame, com.google.api.services.bigquery.model.ScriptStackFrame>
TO_PB_FUNCTION =
new Function<
ScriptStackFrame, com.google.api.services.bigquery.model.ScriptStackFrame>() {
@NullableDecl
@Override
public com.google.api.services.bigquery.model.ScriptStackFrame apply(
@NullableDecl ScriptStackFrame scriptStackFrame) {
return scriptStackFrame.toPb();
}
};
private final Integer endColumn;
private final Integer endLine;
private final String procedureId;
private final Integer startColumn;
private final Integer startLine;
private final String text;
private ScriptStackFrame(Builder builder) {
this.endColumn = builder.endColumn;
this.endLine = builder.endLine;
this.procedureId = builder.procedureId;
this.startColumn = builder.startColumn;
this.startLine = builder.startLine;
this.text = builder.text;
}
static class Builder {
private Integer endColumn;
private Integer endLine;
private String procedureId;
private Integer startColumn;
private Integer startLine;
private String text;
Builder setEndColumn(Integer endColumn) {
this.endColumn = endColumn;
return this;
}
Builder setEndLine(Integer endLine) {
this.endLine = endLine;
return this;
}
Builder setProcedureId(String procedureId) {
this.procedureId = procedureId;
return this;
}
Builder setStartColumn(Integer startColumn) {
this.startColumn = startColumn;
return this;
}
Builder setStartLine(Integer startLine) {
this.startLine = startLine;
return this;
}
Builder setText(String text) {
this.text = text;
return this;
}
ScriptStackFrame build() {
return new ScriptStackFrame(this);
}
}
/** Returns the end column. */
public Integer getEndColumn() {
return endColumn;
}
/** Returns the end line. */
public Integer getEndLine() {
return endLine;
}
/** Returns Name of the active procedure, empty if in a top-level script. */
public String getProcedureId() {
return procedureId;
}
/** Returns the start column */
public Integer getStartColumn() {
return startColumn;
}
/** Returns the start line. */
public Integer getStartLine() {
return startLine;
}
/** Returns Text of the current statement/expression. */
public String getText() {
return text;
}
ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(this)
.add("endColumn", endColumn)
.add("endLine", endLine)
.add("procedureId", procedureId)
.add("startColumn", startColumn)
.add("startLine", startLine)
.add("text", text);
}
@Override
public String toString() {
return toStringHelper().toString();
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(ScriptStackFrame.class)
&& Objects.equals(toPb(), ((ScriptStackFrame) obj).toPb());
}
@Override
public int hashCode() {
return Objects.hash(endColumn, endLine, procedureId, startColumn, startLine, text);
}
com.google.api.services.bigquery.model.ScriptStackFrame toPb() {
com.google.api.services.bigquery.model.ScriptStackFrame stackFrame =
new com.google.api.services.bigquery.model.ScriptStackFrame();
stackFrame.setEndColumn(endColumn);
stackFrame.setEndLine(endLine);
stackFrame.setProcedureId(procedureId);
stackFrame.setStartColumn(startColumn);
stackFrame.setStartLine(startLine);
stackFrame.setText(text);
return stackFrame;
}
static Builder newBuilder() {
return new Builder();
}
static ScriptStackFrame fromPb(
com.google.api.services.bigquery.model.ScriptStackFrame stackFrame) {
Builder builder = newBuilder();
if (stackFrame.getEndColumn() != null) {
builder.setEndColumn(stackFrame.getEndColumn());
}
if (stackFrame.getEndLine() != null) {
builder.setEndLine(stackFrame.getEndLine());
}
if (stackFrame.getProcedureId() != null) {
builder.setProcedureId(stackFrame.getProcedureId());
}
if (stackFrame.getStartColumn() != null) {
builder.setStartColumn(stackFrame.getStartColumn());
}
if (stackFrame.getStartLine() != null) {
builder.setStartLine(stackFrame.getStartLine());
}
if (stackFrame.getText() != null) {
builder.setText(stackFrame.getText());
}
return builder.build();
}
}
private ScriptStatistics(Builder builder) {
this.evaluationKind = builder.evaluationKind;
this.stackFrames = builder.stackFrames;
}
static class Builder {
private String evaluationKind;
private List<ScriptStackFrame> stackFrames;
private Builder() {}
Builder setEvaluationKind(String evaluationKind) {
this.evaluationKind = evaluationKind;
return this;
}
Builder setStackFrames(List<ScriptStackFrame> stackFrames) {
this.stackFrames = stackFrames;
return this;
}
ScriptStatistics build() {
return new ScriptStatistics(this);
}
}
static Builder newBuilder() {
return new Builder();
}
/** Returns child job was a statement or expression */
public String getEvaluationKind() {
return evaluationKind;
}
/**
* Stack trace showing the line/column/procedure name of each frame on the stack at the point
* where the current evaluation happened. The leaf frame is first, the primary script is last.
* Never empty.
*/
public List<ScriptStackFrame> getStackFrames() {
return stackFrames;
}
ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(this)
.add("evaluationKind", evaluationKind)
.add("stackFrames", stackFrames);
}
@Override
public String toString() {
return toStringHelper().toString();
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(ScriptStatistics.class)
&& Objects.equals(toPb(), ((ScriptStatistics) obj).toPb());
}
@Override
public int hashCode() {
return Objects.hash(evaluationKind, stackFrames);
}
com.google.api.services.bigquery.model.ScriptStatistics toPb() {
com.google.api.services.bigquery.model.ScriptStatistics scriptStatistics =
new com.google.api.services.bigquery.model.ScriptStatistics();
scriptStatistics.setEvaluationKind(evaluationKind);
scriptStatistics.setStackFrames(
Lists.transform(stackFrames, ScriptStackFrame.TO_PB_FUNCTION));
return scriptStatistics;
}
static ScriptStatistics fromPb(
com.google.api.services.bigquery.model.ScriptStatistics scriptStatistics) {
Builder builder = newBuilder();
if (scriptStatistics.getEvaluationKind() != null) {
builder.setEvaluationKind(scriptStatistics.getEvaluationKind());
}
if (scriptStatistics.getStackFrames() != null) {
builder.setStackFrames(
Lists.transform(scriptStatistics.getStackFrames(), ScriptStackFrame.FROM_PB_FUNCTION));
}
return builder.build();
}
}
/** ReservationUsage contains information about a job's usage of a single reservation. */
public static class ReservationUsage {
static final Function<
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage, ReservationUsage>
FROM_PB_FUNCTION =
new Function<
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage,
ReservationUsage>() {
@Override
public ReservationUsage apply(
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage usage) {
return ReservationUsage.fromPb(usage);
}
};
static final Function<
ReservationUsage, com.google.api.services.bigquery.model.JobStatistics.ReservationUsage>
TO_PB_FUNCTION =
new Function<
ReservationUsage,
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage>() {
@Override
public com.google.api.services.bigquery.model.JobStatistics.ReservationUsage apply(
ReservationUsage usage) {
return usage.toPb();
}
};
private final String name;
private final Long slotMs;
public static class Builder {
private String name;
private Long slotMs;
private Builder() {};
Builder setName(String name) {
this.name = name;
return this;
}
Builder setSlotMs(Long slotMs) {
this.slotMs = slotMs;
return this;
}
ReservationUsage build() {
return new ReservationUsage(this);
}
}
private ReservationUsage(Builder builder) {
this.name = builder.name;
this.slotMs = builder.slotMs;
}
// Return mame indicates the utilized reservation name, or "unreserved" for ondemand usage.
public String getName() {
return name;
}
// Returns slotMs reports the slot milliseconds utilized within in the given reservation.
public Long getSlotMs() {
return slotMs;
}
static Builder newBuilder() {
return new Builder();
}
ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(this).add("name", name).add("slotMs", slotMs);
}
@Override
public String toString() {
return toStringHelper().toString();
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(ReservationUsage.class)
&& Objects.equals(toPb(), ((ReservationUsage) obj).toPb());
}
@Override
public int hashCode() {
return Objects.hash(name, slotMs);
}
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage toPb() {
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage usage =
new com.google.api.services.bigquery.model.JobStatistics.ReservationUsage();
usage.setName(name);
usage.setSlotMs(slotMs);
return usage;
}
static ReservationUsage fromPb(
com.google.api.services.bigquery.model.JobStatistics.ReservationUsage usage) {
Builder builder = newBuilder();
builder.setName(usage.getName());
builder.setSlotMs(usage.getSlotMs());
return builder.build();
}
}
// TransactionInfo contains information about a multi-statement transaction that may have
// associated with a job.
public static class TransactionInfo {
// TransactionID is the system-generated identifier for the transaction.
private final String transactionId;
public static class Builder {
private String transactionId;
private Builder() {};
Builder setTransactionId(String transactionId) {
this.transactionId = transactionId;
return this;
}
TransactionInfo build() {
return new TransactionInfo(this);
}
}
private TransactionInfo(Builder builder) {
this.transactionId = builder.transactionId;
}
public String getTransactionId() {
return transactionId;
}
static Builder newbuilder() {
return new Builder();
}
ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(this).add("transactionId", transactionId);
}
@Override
public String toString() {
return toStringHelper().toString();
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(TransactionInfo.class)
&& Objects.equals(toPb(), ((TransactionInfo) obj).toPb());
}
@Override
public int hashCode() {
return Objects.hash(transactionId);
}
com.google.api.services.bigquery.model.TransactionInfo toPb() {
com.google.api.services.bigquery.model.TransactionInfo transactionInfo =
new com.google.api.services.bigquery.model.TransactionInfo();
transactionInfo.setTransactionId(transactionId);
return transactionInfo;
}
static TransactionInfo fromPb(
com.google.api.services.bigquery.model.TransactionInfo transactionInfo) {
Builder builder = newbuilder();
builder.setTransactionId(transactionInfo.getTransactionId());
return builder.build();
}
}
// SessionInfo contains information about the session if this job is part of one.
public static class SessionInfo {
// Id of the session
private final String sessionId;
public static class Builder {
private String sessionId;
private Builder() {};
Builder setSessionId(String sessionId) {
this.sessionId = sessionId;
return this;
}
SessionInfo build() {
return new SessionInfo(this);
}
}
private SessionInfo(Builder builder) {
this.sessionId = builder.sessionId;
}
public String getSessionId() {
return sessionId;
}
static Builder newBuilder() {
return new Builder();
}
ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(this).add("sessionId", sessionId);
}
@Override
public String toString() {
return toStringHelper().toString();
}
@Override
public boolean equals(Object obj) {
return obj == this
|| obj != null
&& obj.getClass().equals(SessionInfo.class)
&& Objects.equals(toPb(), ((SessionInfo) obj).toPb());
}
@Override
public int hashCode() {
return Objects.hash(sessionId);
}
com.google.api.services.bigquery.model.SessionInfo toPb() {
com.google.api.services.bigquery.model.SessionInfo sessionInfo =
new com.google.api.services.bigquery.model.SessionInfo();
sessionInfo.setSessionId(sessionId);
return sessionInfo;
}
static SessionInfo fromPb(com.google.api.services.bigquery.model.SessionInfo sessionInfo) {
SessionInfo.Builder builder = newBuilder();
builder.setSessionId(sessionInfo.getSessionId());
return builder.build();
}
}
abstract static class Builder<T extends JobStatistics, B extends Builder<T, B>> {
private Long creationTime;
private Long endTime;
private Long startTime;
private Long numChildJobs;
private String parentJobId;
private ScriptStatistics scriptStatistics;
private List<ReservationUsage> reservationUsage;
private TransactionInfo transactionInfo;
private SessionInfo sessionInfo;
protected Builder() {}
protected Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) {
this.creationTime = statisticsPb.getCreationTime();
this.endTime = statisticsPb.getEndTime();
this.startTime = statisticsPb.getStartTime();
this.numChildJobs = statisticsPb.getNumChildJobs();
this.parentJobId = statisticsPb.getParentJobId();
if (statisticsPb.getScriptStatistics() != null) {
this.scriptStatistics = ScriptStatistics.fromPb(statisticsPb.getScriptStatistics());
}
if (reservationUsage != null) {
this.reservationUsage =
Lists.transform(statisticsPb.getReservationUsage(), ReservationUsage.FROM_PB_FUNCTION);
}
if (statisticsPb.getTransactionInfo() != null) {
this.transactionInfo = TransactionInfo.fromPb(statisticsPb.getTransactionInfo());
}
if (statisticsPb.getSessionInfo() != null) {
this.sessionInfo = SessionInfo.fromPb(statisticsPb.getSessionInfo());
}
}
@SuppressWarnings("unchecked")
protected B self() {
return (B) this;
}
B setCreationTimestamp(Long creationTime) {
this.creationTime = creationTime;
return self();
}
B setEndTime(Long endTime) {
this.endTime = endTime;
return self();
}
B setStartTime(Long startTime) {
this.startTime = startTime;
return self();
}
abstract T build();
}
protected JobStatistics(Builder builder) {
this.creationTime = builder.creationTime;
this.endTime = builder.endTime;
this.startTime = builder.startTime;
this.numChildJobs = builder.numChildJobs;
this.parentJobId = builder.parentJobId;
this.scriptStatistics = builder.scriptStatistics;
this.reservationUsage = builder.reservationUsage;
this.transactionInfo = builder.transactionInfo;
this.sessionInfo = builder.sessionInfo;
}
/** Returns the creation time of the job in milliseconds since epoch. */
public Long getCreationTime() {
return creationTime;
}
/**
* Returns the end time of the job in milliseconds since epoch. Returns {@code null} if the job
* has not finished yet.
*/
public Long getEndTime() {
return endTime;
}
/**
* Returns the start time of the job in milliseconds since epoch. Returns {@code null} if the job
* has not started yet.
*/
public Long getStartTime() {
return startTime;
}
/** Returns the number of child job executed. */
public Long getNumChildJobs() {
return numChildJobs;
}
/** Returns the parent job id of child job. */
public String getParentJobId() {
return parentJobId;
}
/** Returns the statistics for a child job of a script. */
public ScriptStatistics getScriptStatistics() {
return scriptStatistics;
}
/** ReservationUsage contains information about a job's usage of a single reservation. */
public List<ReservationUsage> getReservationUsage() {
return reservationUsage;
}
/** Info indicates the transaction ID associated with the job, if any. */
public TransactionInfo getTransactionInfo() {
return transactionInfo;
}
/** Info of the session if this job is part of one. */
public SessionInfo getSessionInfo() {
return sessionInfo;
}
ToStringHelper toStringHelper() {
return MoreObjects.toStringHelper(this)
.add("creationTime", creationTime)
.add("endTime", endTime)
.add("startTime", startTime)
.add("numChildJobs", numChildJobs)
.add("parentJobId", parentJobId)
.add("scriptStatistics", scriptStatistics)
.add("reservationUsage", reservationUsage)
.add("transactionInfo", transactionInfo)
.add("sessionInfo", sessionInfo);
}
@Override
public String toString() {
return toStringHelper().toString();
}
final int baseHashCode() {
return Objects.hash(
creationTime,
endTime,
startTime,
numChildJobs,
parentJobId,
scriptStatistics,
reservationUsage,
transactionInfo,
sessionInfo);
}
final boolean baseEquals(JobStatistics jobStatistics) {
return Objects.equals(toPb(), jobStatistics.toPb());
}
com.google.api.services.bigquery.model.JobStatistics toPb() {
com.google.api.services.bigquery.model.JobStatistics statistics =
new com.google.api.services.bigquery.model.JobStatistics();
statistics.setCreationTime(creationTime);
statistics.setEndTime(endTime);
statistics.setStartTime(startTime);
statistics.setNumChildJobs(numChildJobs);
statistics.setParentJobId(parentJobId);
if (scriptStatistics != null) {
statistics.setScriptStatistics(scriptStatistics.toPb());
}
if (reservationUsage != null) {
statistics.setReservationUsage(
Lists.transform(reservationUsage, ReservationUsage.TO_PB_FUNCTION));
}
if (transactionInfo != null) {
statistics.setTransactionInfo(transactionInfo.toPb());
}
if (sessionInfo != null) {
statistics.setSessionInfo(sessionInfo.toPb());
}
return statistics;
}
@SuppressWarnings("unchecked")
static <T extends JobStatistics> T fromPb(com.google.api.services.bigquery.model.Job jobPb) {
JobConfiguration jobConfigPb = jobPb.getConfiguration();
com.google.api.services.bigquery.model.JobStatistics statisticPb = jobPb.getStatistics();
if (jobConfigPb.getLoad() != null) {
return (T) LoadStatistics.fromPb(statisticPb);
} else if (jobConfigPb.getExtract() != null) {
return (T) ExtractStatistics.fromPb(statisticPb);
} else if (jobConfigPb.getQuery() != null) {
return (T) QueryStatistics.fromPb(statisticPb);
} else if (jobConfigPb.getCopy() != null) {
return (T) CopyStatistics.fromPb(statisticPb);
} else {
throw new IllegalArgumentException("unknown job configuration: " + jobConfigPb);
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http.multipart;
import io.netty.handler.codec.DecoderException;
import io.netty.handler.codec.http.HttpConstants;
import io.netty.handler.codec.http.HttpContent;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.util.internal.ObjectUtil;
import io.netty.util.internal.StringUtil;
import java.nio.charset.Charset;
import java.util.List;
/**
* This decoder will decode Body and can handle POST BODY.
*
* You <strong>MUST</strong> call {@link #destroy()} after completion to release all resources.
*
*/
public class HttpPostRequestDecoder implements InterfaceHttpPostRequestDecoder {
static final int DEFAULT_DISCARD_THRESHOLD = 10 * 1024 * 1024;
private final InterfaceHttpPostRequestDecoder decoder;
/**
*
* @param request
* the request to decode
* @throws NullPointerException
* for request
* @throws ErrorDataDecoderException
* if the default charset was wrong when decoding or other
* errors
*/
public HttpPostRequestDecoder(HttpRequest request) {
this(new DefaultHttpDataFactory(DefaultHttpDataFactory.MINSIZE), request, HttpConstants.DEFAULT_CHARSET);
}
/**
*
* @param factory
* the factory used to create InterfaceHttpData
* @param request
* the request to decode
* @throws NullPointerException
* for request or factory
* @throws ErrorDataDecoderException
* if the default charset was wrong when decoding or other
* errors
*/
public HttpPostRequestDecoder(HttpDataFactory factory, HttpRequest request) {
this(factory, request, HttpConstants.DEFAULT_CHARSET);
}
/**
*
* @param factory
* the factory used to create InterfaceHttpData
* @param request
* the request to decode
* @param charset
* the charset to use as default
* @throws NullPointerException
* for request or charset or factory
* @throws ErrorDataDecoderException
* if the default charset was wrong when decoding or other
* errors
*/
public HttpPostRequestDecoder(HttpDataFactory factory, HttpRequest request, Charset charset) {
ObjectUtil.checkNotNull(factory, "factory");
ObjectUtil.checkNotNull(request, "request");
ObjectUtil.checkNotNull(charset, "charset");
// Fill default values
if (isMultipart(request)) {
decoder = new HttpPostMultipartRequestDecoder(factory, request, charset);
} else {
decoder = new HttpPostStandardRequestDecoder(factory, request, charset);
}
}
/**
* states follow NOTSTARTED PREAMBLE ( (HEADERDELIMITER DISPOSITION (FIELD |
* FILEUPLOAD))* (HEADERDELIMITER DISPOSITION MIXEDPREAMBLE (MIXEDDELIMITER
* MIXEDDISPOSITION MIXEDFILEUPLOAD)+ MIXEDCLOSEDELIMITER)* CLOSEDELIMITER)+
* EPILOGUE
*
* First getStatus is: NOSTARTED
*
* Content-type: multipart/form-data, boundary=AaB03x => PREAMBLE in Header
*
* --AaB03x => HEADERDELIMITER content-disposition: form-data; name="field1"
* => DISPOSITION
*
* Joe Blow => FIELD --AaB03x => HEADERDELIMITER content-disposition:
* form-data; name="pics" => DISPOSITION Content-type: multipart/mixed,
* boundary=BbC04y
*
* --BbC04y => MIXEDDELIMITER Content-disposition: attachment;
* filename="file1.txt" => MIXEDDISPOSITION Content-Type: text/plain
*
* ... contents of file1.txt ... => MIXEDFILEUPLOAD --BbC04y =>
* MIXEDDELIMITER Content-disposition: file; filename="file2.gif" =>
* MIXEDDISPOSITION Content-type: image/gif Content-Transfer-Encoding:
* binary
*
* ...contents of file2.gif... => MIXEDFILEUPLOAD --BbC04y-- =>
* MIXEDCLOSEDELIMITER --AaB03x-- => CLOSEDELIMITER
*
* Once CLOSEDELIMITER is found, last getStatus is EPILOGUE
*/
protected enum MultiPartStatus {
NOTSTARTED, PREAMBLE, HEADERDELIMITER, DISPOSITION, FIELD, FILEUPLOAD, MIXEDPREAMBLE, MIXEDDELIMITER,
MIXEDDISPOSITION, MIXEDFILEUPLOAD, MIXEDCLOSEDELIMITER, CLOSEDELIMITER, PREEPILOGUE, EPILOGUE
}
/**
* Check if the given request is a multipart request
* @return True if the request is a Multipart request
*/
public static boolean isMultipart(HttpRequest request) {
String mimeType = request.headers().get(HttpHeaderNames.CONTENT_TYPE);
if (mimeType != null && mimeType.startsWith(HttpHeaderValues.MULTIPART_FORM_DATA.toString())) {
return getMultipartDataBoundary(mimeType) != null;
}
return false;
}
/**
* Check from the request ContentType if this request is a Multipart request.
* @return an array of String if multipartDataBoundary exists with the multipartDataBoundary
* as first element, charset if any as second (missing if not set), else null
*/
protected static String[] getMultipartDataBoundary(String contentType) {
// Check if Post using "multipart/form-data; boundary=--89421926422648 [; charset=xxx]"
String[] headerContentType = splitHeaderContentType(contentType);
final String multiPartHeader = HttpHeaderValues.MULTIPART_FORM_DATA.toString();
if (headerContentType[0].regionMatches(true, 0, multiPartHeader, 0 , multiPartHeader.length())) {
int mrank;
int crank;
final String boundaryHeader = HttpHeaderValues.BOUNDARY.toString();
if (headerContentType[1].regionMatches(true, 0, boundaryHeader, 0, boundaryHeader.length())) {
mrank = 1;
crank = 2;
} else if (headerContentType[2].regionMatches(true, 0, boundaryHeader, 0, boundaryHeader.length())) {
mrank = 2;
crank = 1;
} else {
return null;
}
String boundary = StringUtil.substringAfter(headerContentType[mrank], '=');
if (boundary == null) {
throw new ErrorDataDecoderException("Needs a boundary value");
}
if (boundary.charAt(0) == '"') {
String bound = boundary.trim();
int index = bound.length() - 1;
if (bound.charAt(index) == '"') {
boundary = bound.substring(1, index);
}
}
final String charsetHeader = HttpHeaderValues.CHARSET.toString();
if (headerContentType[crank].regionMatches(true, 0, charsetHeader, 0, charsetHeader.length())) {
String charset = StringUtil.substringAfter(headerContentType[crank], '=');
if (charset != null) {
return new String[] {"--" + boundary, charset};
}
}
return new String[] {"--" + boundary};
}
return null;
}
@Override
public boolean isMultipart() {
return decoder.isMultipart();
}
@Override
public void setDiscardThreshold(int discardThreshold) {
decoder.setDiscardThreshold(discardThreshold);
}
@Override
public int getDiscardThreshold() {
return decoder.getDiscardThreshold();
}
@Override
public List<InterfaceHttpData> getBodyHttpDatas() {
return decoder.getBodyHttpDatas();
}
@Override
public List<InterfaceHttpData> getBodyHttpDatas(String name) {
return decoder.getBodyHttpDatas(name);
}
@Override
public InterfaceHttpData getBodyHttpData(String name) {
return decoder.getBodyHttpData(name);
}
@Override
public InterfaceHttpPostRequestDecoder offer(HttpContent content) {
return decoder.offer(content);
}
@Override
public boolean hasNext() {
return decoder.hasNext();
}
@Override
public InterfaceHttpData next() {
return decoder.next();
}
@Override
public InterfaceHttpData currentPartialHttpData() {
return decoder.currentPartialHttpData();
}
@Override
public void destroy() {
decoder.destroy();
}
@Override
public void cleanFiles() {
decoder.cleanFiles();
}
@Override
public void removeHttpDataFromClean(InterfaceHttpData data) {
decoder.removeHttpDataFromClean(data);
}
/**
* Split the very first line (Content-Type value) in 3 Strings
*
* @return the array of 3 Strings
*/
private static String[] splitHeaderContentType(String sb) {
int aStart;
int aEnd;
int bStart;
int bEnd;
int cStart;
int cEnd;
aStart = HttpPostBodyUtil.findNonWhitespace(sb, 0);
aEnd = sb.indexOf(';');
if (aEnd == -1) {
return new String[] { sb, "", "" };
}
bStart = HttpPostBodyUtil.findNonWhitespace(sb, aEnd + 1);
if (sb.charAt(aEnd - 1) == ' ') {
aEnd--;
}
bEnd = sb.indexOf(';', bStart);
if (bEnd == -1) {
bEnd = HttpPostBodyUtil.findEndOfString(sb);
return new String[] { sb.substring(aStart, aEnd), sb.substring(bStart, bEnd), "" };
}
cStart = HttpPostBodyUtil.findNonWhitespace(sb, bEnd + 1);
if (sb.charAt(bEnd - 1) == ' ') {
bEnd--;
}
cEnd = HttpPostBodyUtil.findEndOfString(sb);
return new String[] { sb.substring(aStart, aEnd), sb.substring(bStart, bEnd), sb.substring(cStart, cEnd) };
}
/**
* Exception when try reading data from request in chunked format, and not
* enough data are available (need more chunks)
*/
public static class NotEnoughDataDecoderException extends DecoderException {
private static final long serialVersionUID = -7846841864603865638L;
public NotEnoughDataDecoderException() {
}
public NotEnoughDataDecoderException(String msg) {
super(msg);
}
public NotEnoughDataDecoderException(Throwable cause) {
super(cause);
}
public NotEnoughDataDecoderException(String msg, Throwable cause) {
super(msg, cause);
}
}
/**
* Exception when the body is fully decoded, even if there is still data
*/
public static class EndOfDataDecoderException extends DecoderException {
private static final long serialVersionUID = 1336267941020800769L;
}
/**
* Exception when an error occurs while decoding
*/
public static class ErrorDataDecoderException extends DecoderException {
private static final long serialVersionUID = 5020247425493164465L;
public ErrorDataDecoderException() {
}
public ErrorDataDecoderException(String msg) {
super(msg);
}
public ErrorDataDecoderException(Throwable cause) {
super(cause);
}
public ErrorDataDecoderException(String msg, Throwable cause) {
super(msg, cause);
}
}
}
| |
/**
* Copyright 2005-2007 Xue Yong Zhi, Ye Zheng
* Distributed under the BSD License
*/
package com.xruby.runtime.lang;
import com.xruby.runtime.builtin.RubyArray;
import com.xruby.runtime.builtin.RubyFloat;
import com.xruby.runtime.builtin.RubyInteger;
import com.xruby.runtime.builtin.RubyString;
import java.util.HashMap;
import java.util.Map;
/*
'return' inside a block will cause return in the method, for example:
def f
1.times {return 1}
end
Or worse (compile-time undetectable):
def f block; block.call; print 'yyy'; end
def test_return2; a = Proc.new {return}; f a; end
test_return2
so we need to check if this happend after each function call.
As java does not support multiple return value, we made RubyValue
subclass of BlockCallStatus. Should be replaced with a 'Context' object
in the future.
*/
abstract class BlockCallStatus {
private boolean returned_in_block_ = false;
private boolean breaked_in_block_ = false;
private boolean do_not_allow_break_ = false;
public void setReturnedInBlock(boolean returned, boolean break_or_returned, boolean do_not_allow_break) {
returned_in_block_ = returned;
breaked_in_block_ = break_or_returned && !returned;//just breaked
do_not_allow_break_ = do_not_allow_break;
}
public boolean returnedInBlock() {
if (do_not_allow_break_ && breaked_in_block_) {
//TODO should check context!
throw new RubyException(RubyRuntime.LocalJumpErrorClass, "break from proc-closure");
}
return returned_in_block_;
}
}
public abstract class RubyValue extends BlockCallStatus implements Cloneable {
private boolean frozen_ = false;
private boolean taint_ = false;
private static Map<RubyValue, Map<RubyID, RubyValue>> genericIvTbl;
public abstract void setRubyClass(RubyClass klass);
public abstract RubyClass getRubyClass();
public RubyValue clone() {
RubyValue v;
try {
v = (RubyValue) super.clone();
} catch (CloneNotSupportedException e) {
throw new RubyException(RubyRuntime.ExceptionClass, e.toString());
}
return v;
}
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (o instanceof RubyValue) {
return RubyAPI.callPublicOneArgMethod(this, (RubyValue)o, null, RubyID.equalID).isTrue();
} else {
return false;
}
}
public boolean frozen() {
return frozen_;
}
public void freeze() {
frozen_ = true;
}
public boolean tainted() {
return taint_;
}
public void taint() {
taint_ = true;
}
public RubyValue getInstanceVariable(RubyID id) {
if (genericIvTbl != null) {
Map<RubyID, RubyValue> table = genericIvTbl.get(this);
if (table != null) {
RubyValue v = table.get(id);
if (v != null) {
return v;
}
}
}
return RubyConstant.QNIL;
}
public RubyValue setInstanceVariable(RubyValue value, RubyID id) {
if (genericIvTbl == null) {
genericIvTbl = new HashMap<RubyValue, Map<RubyID, RubyValue>>();
}
Map<RubyID, RubyValue> table = genericIvTbl.get(this);
if (table == null) {
table = new HashMap<RubyID, RubyValue>();
genericIvTbl.put(this, table);
}
table.put(id, value);
return value;
}
public RubyClass getSingletonClass() {
return getSingletonClass(null);
}
public RubyClass getSingletonClass(RubyModule scope) {
RubyClass klass = this.getRubyClass();
if (klass.isSingleton()
&& klass.getInstanceVariable(RubyID.attachedID) == this) {
klass.setScope(scope);
klass.setAccessPublic();
return klass;
} else {
return new RubySingletonClass(this, this.getRubyClass(), scope);
}
}
public boolean respondTo(RubyID id) {
RubyClass klass = this.getRubyClass();
if (klass.findMethod(RubyID.RESPOND_TO_P) == RubyRuntime.getRespondMethod()) {
return klass.isMethodBound(id, false);
} else {
return RubyAPI.callOneArgMethod(this, id.toSymbol(), null, RubyID.RESPOND_TO_P).isTrue();
}
}
public boolean isTrue() {
return true;
}
public boolean isKindOf(RubyModule m) {
return m.isKindOf(this.getRubyClass());
}
public String toString() {
return getRubyClass().getName() + super.toString();
}
public String inspect() {
return RubyAPI.callNoArgMethod(this, null, RubyID.toSID).toStr();
}
public RubyID toID() {
throw new RubyException(RubyRuntime.TypeErrorClass, this.inspect() + " is not a symbol");
}
public int toInt() {
return this.convertToInteger().toInt();
}
public long toLong() {
return this.convertToInteger().toLong();
}
public RubyArray toAry() {
return this.contertToArray().toAry();
}
public double toFloat() {
return this.convertToFloat().toFloat();
}
public String toStr() {
return this.convertToString().toStr();
}
public String asString() {
RubyValue value = RubyAPI.callPublicNoArgMethod(this, null, RubyID.toSID);
if (value instanceof RubyString) {
return value.toStr();
}
return "#<" + this.getRubyClass().getName() + ":0x" + Integer.toHexString(this.hashCode()) + "x>";
}
public RubyInteger toRubyInteger() {
return this.convertToInteger().toRubyInteger();
}
public RubyFloat toRubyFloat() {
return this.convertToFloat().toRubyFloat();
}
public RubyString toRubyString() {
return this.convertToString().toRubyString();
}
private RubyValue convertToInteger() {
return convertToType(RubyRuntime.IntegerClass, RubyID.toIntID);
}
private RubyValue contertToArray() {
return convertToType(RubyRuntime.ArrayClass, RubyID.toAryID);
}
private RubyValue convertToFloat() {
return convertToType(RubyRuntime.FloatClass, RubyID.toFID);
}
private RubyValue convertToString() {
return convertToType(RubyRuntime.StringClass, RubyID.toStrID);
}
private RubyValue convertToType(RubyClass klass, RubyID id) {
if (this.isKindOf(klass)) {
return this;
}
if (!this.respondTo(id)) {
throw new RubyException("can't convert " + this.getRubyClass().getName() + " into " + klass.getName());
}
RubyValue v = RubyAPI.callNoArgMethod(this, null, id);
if (!v.isKindOf(klass)) {
throw new RubyException(this.getRubyClass().getName() + "#" + id.toString() + " should return " + klass.getName());
}
return v;
}
public RubyMethod findPublicMethod(RubyID mid) {
return getRubyClass().findOwnPublicMethod(mid);
}
public RubyMethod findMethod(RubyID mid) {
return getRubyClass().findOwnMethod(mid);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
import org.apache.hadoop.mapred.JobInProgress.Counter;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.metrics.MetricsContext;
import org.mortbay.log.Log;
/**
* A schedulable pool of jobs.
*/
public class Pool {
/** Name of the default pool, where jobs with no pool parameter go. */
public static final String DEFAULT_POOL_NAME = "default";
/** Pool name. */
private String name;
/** Jobs in this specific pool; does not include children pools' jobs. */
private Collection<JobInProgress> jobs = new ArrayList<JobInProgress>();
/** Scheduling mode for jobs inside the pool (fair or FIFO) */
private SchedulingMode schedulingMode;
private PoolSchedulable mapSchedulable;
private PoolSchedulable reduceSchedulable;
private float mapCredit = 0;
private float reduceCredit = 0;
private int nFinishedjobs = 0;
private float inputSize = 0;
private float mapInSize = 0;
private float reduceInSize = 0;
private float responseTime = 0;
private float mapResponseTime = 0;
private float reduceResponseTime = 0;
private float stretch = 0;
private float mapStretch = 0;
private float reduceStretch = 0;
public Pool(CreditScheduler scheduler, String name) {
this.name = name;
mapSchedulable = new PoolSchedulable(scheduler, this, TaskType.MAP);
reduceSchedulable = new PoolSchedulable(scheduler, this, TaskType.REDUCE);
}
public Collection<JobInProgress> getJobs() {
return jobs;
}
public void addJob(JobInProgress job) {
jobs.add(job);
mapSchedulable.addJob(job);
reduceSchedulable.addJob(job);
}
public void removeJob(JobInProgress job) {
Counters mapCounters = new Counters();
Counters reduceCounters = new Counters();
boolean isFine = job.getMapCounters(mapCounters);
mapCounters = (isFine? mapCounters : new Counters());
isFine = job.getReduceCounters(reduceCounters);
reduceCounters = (isFine? reduceCounters : new Counters());
float jobminputsize = 0;//job.getInputLength();
float jobrinputsize = 0;//job.
float exitingJobResponseTime = (job.finishTime - job.startTime)/1000;
float exitingJobMResponseTime = job.getJobCounters().getCounter(JobInProgress.Counter.SLOTS_MILLIS_MAPS) /
1000;
//job.getJobCounters().getCounter(JobInProgress.Counter.SLOTS_MILLIS_MAPS);
//mapCounters.getCounter(JobInProgress.Counter.SLOTS_MILLIS_MAPS);
float exitingJobRResponseTime = job.getJobCounters().getCounter(JobInProgress.Counter.SLOTS_MILLIS_REDUCES) /
1000;
//reduceCounters.getCounter(JobInProgress.Counter.SLOTS_MILLIS_REDUCES);
jobminputsize = (float)mapCounters.getGroup("FileSystemCounters").getCounter("HDFS_BYTES_READ") /
(float)(1024 * 1024);
jobrinputsize = (float)reduceCounters.getGroup("FileSystemCounters").getCounter("FILE_BYTES_WRITTEN") /
(float)(1024 * 1024);
this.mapInSize += jobminputsize;
this.reduceInSize += jobrinputsize;
jobs.remove(job);
mapSchedulable.removeJob(job);
reduceSchedulable.removeJob(job);
//update metrics
nFinishedjobs++;
inputSize += ((float)job.getInputLength()/(1024 * 1024));
//mapInSize += inFormat.get
responseTime = (responseTime * (nFinishedjobs - 1) + exitingJobResponseTime)
/ nFinishedjobs;
mapResponseTime = (this.mapResponseTime * (nFinishedjobs - 1) + exitingJobMResponseTime)
/ nFinishedjobs;
reduceResponseTime = (this.reduceResponseTime * (nFinishedjobs - 1) + exitingJobRResponseTime)
/ nFinishedjobs;
stretch = (stretch * (nFinishedjobs - 1) + exitingJobResponseTime / (float)jobminputsize)
/ nFinishedjobs;
mapStretch = (mapStretch * (nFinishedjobs - 1) + exitingJobMResponseTime / (float)jobminputsize)
/ nFinishedjobs;
reduceStretch = (reduceStretch * (nFinishedjobs - 1) + exitingJobRResponseTime / (float)jobrinputsize)
/ nFinishedjobs;
}
public String getName() {
return name;
}
public float getResponseTime(){
return responseTime;
}
public float getStretch(){
return stretch;
}
public float getInputSize(){
return inputSize;
}
public float getMapInSize(){
return this.mapInSize;
}
public float getReduceInSize(){
return this.reduceInSize;
}
public float getMapResponseTime(){
return this.mapResponseTime;
}
public float getReduceResponseTime(){
return this.reduceResponseTime;
}
public float getMapStretch(){
return this.mapStretch;
}
public float getReduceStretch(){
return this.reduceStretch;
}
public SchedulingMode getSchedulingMode() {
return schedulingMode;
}
public void setSchedulingMode(SchedulingMode schedulingMode) {
this.schedulingMode = schedulingMode;
}
public boolean isDefaultPool() {
return Pool.DEFAULT_POOL_NAME.equals(name);
}
public PoolSchedulable getMapSchedulable() {
return mapSchedulable;
}
public PoolSchedulable getReduceSchedulable() {
return reduceSchedulable;
}
public PoolSchedulable getSchedulable(TaskType type) {
return type == TaskType.MAP ? mapSchedulable : reduceSchedulable;
}
public void updateMetrics() {
mapSchedulable.updateMetrics();
reduceSchedulable.updateMetrics();
}
public int getRunningTasks(TaskType ttype){
PoolSchedulable taskSchedulable = (ttype == TaskType.MAP ? mapSchedulable : reduceSchedulable);
return taskSchedulable.getRunningTasks();
}
public int getDemand(TaskType ttype){
PoolSchedulable taskSchedulable = (ttype == TaskType.MAP ? mapSchedulable : reduceSchedulable);
return taskSchedulable.getDemand();
}
public void updateCredit(TaskType ttype, float l){
if (ttype == TaskType.MAP){
this.mapCredit += l;
}
if (ttype == TaskType.REDUCE){
this.reduceCredit += l;
}
}
public float getCredit(TaskType ttype){
return ((ttype == TaskType.MAP ? this.mapCredit : this.reduceCredit));
}
}
| |
package de.geeksfactory.opacclient.apis;
import org.apache.http.NameValuePair;
import org.apache.http.client.CookieStore;
import org.apache.http.message.BasicNameValuePair;
import org.json.JSONException;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.nodes.TextNode;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.geeksfactory.opacclient.i18n.StringProvider;
import de.geeksfactory.opacclient.objects.Account;
import de.geeksfactory.opacclient.objects.AccountData;
import de.geeksfactory.opacclient.objects.Detail;
import de.geeksfactory.opacclient.objects.DetailledItem;
import de.geeksfactory.opacclient.objects.Filter;
import de.geeksfactory.opacclient.objects.Library;
import de.geeksfactory.opacclient.objects.SearchRequestResult;
import de.geeksfactory.opacclient.objects.SearchResult;
import de.geeksfactory.opacclient.searchfields.DropdownSearchField;
import de.geeksfactory.opacclient.searchfields.SearchField;
import de.geeksfactory.opacclient.searchfields.SearchQuery;
import de.geeksfactory.opacclient.searchfields.TextSearchField;
import static java.net.URLDecoder.decode;
public class Primo extends BaseApi {
protected static HashMap<String, String> languageCodes = new HashMap<>();
protected static HashMap<String, SearchResult.MediaType> mediaTypeClasses = new HashMap<>();
static {
languageCodes.put("en", "en_US");
languageCodes.put("de", "de_DE");
languageCodes.put("es", "es_ES");
languageCodes.put("ru", "ru_RU");
languageCodes.put("fr", "fr_FR");
languageCodes.put("nl", "nl_NL");
languageCodes.put("cz", "cz_CZ");
languageCodes.put("zh", "zh_ZH");
languageCodes.put("it", "it_IT");
languageCodes.put("pl", "pl_PL");
mediaTypeClasses.put("EXLResultMediaTYPEbook", SearchResult.MediaType.BOOK);
mediaTypeClasses.put("EXLResultMediaTYPEarticle", SearchResult.MediaType.BOOK);
mediaTypeClasses.put("EXLResultMediaTYPEjournal", SearchResult.MediaType.MAGAZINE);
mediaTypeClasses.put("EXLResultMediaTYPEvideo", SearchResult.MediaType.MOVIE);
mediaTypeClasses.put("EXLResultMediaTYPEaudio", SearchResult.MediaType.CD_MUSIC);
mediaTypeClasses.put("EXLResultMediaTYPEdatabase", SearchResult.MediaType.EDOC);
mediaTypeClasses.put("EXLResultMediaTYPEimage", SearchResult.MediaType.ART);
mediaTypeClasses.put("EXLResultMediaTYPEscore", SearchResult.MediaType.SCORE_MUSIC);
mediaTypeClasses.put("EXLResultMediaTYPEmap", SearchResult.MediaType.MAP);
mediaTypeClasses.put("EXLResultMediaTYPEwebsite", SearchResult.MediaType.URL);
mediaTypeClasses.put("EXLResultMediaTYPEarchived_website", SearchResult.MediaType.URL);
mediaTypeClasses.put("EXLResultMediaTYPEwork", SearchResult.MediaType.BOOK);
mediaTypeClasses.put("EXLResultMediaTYPEwebsite", SearchResult.MediaType.URL);
mediaTypeClasses.put("EXLResultMediaTYPEreference_entry", SearchResult.MediaType.EDOC);
}
protected String languageCode = "en_US";
protected String opac_url = "";
protected String vid = "";
protected JSONObject data;
protected List<SearchQuery> last_query;
@Override
public void init(Library lib) {
super.init(lib);
this.library = lib;
this.data = lib.getData();
try {
this.opac_url = data.getString("baseurl");
this.vid = data.getString("db");
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
protected List<NameValuePair> buildSearchParams(List<SearchQuery> query)
throws IOException, OpacErrorException {
List<NameValuePair> params = new ArrayList<>();
String tab = "";
if (data.has("searchtab"))
tab = "&tab=" + data.optString("searchtab", "default_tab");
String html =
httpGet(opac_url + "/action/search.do?mode=Advanced&ct=AdvancedSearch&vid=" + vid + tab,
getDefaultEncoding());
Document doc = Jsoup.parse(html);
for (Element input : doc.select("form[name=searchForm] input[type=hidden]")) {
if (!input.attr("name").trim().equals("")) {
params.add(new BasicNameValuePair(input.attr("name"), input.val()));
}
}
params.add(new BasicNameValuePair("fn", "search"));
int i = 1;
for (SearchQuery q : query) {
if (q.getValue().equals("")) {
continue;
}
if (q.getKey().startsWith("#")) {
params.add(
new BasicNameValuePair(doc.select(q.getKey()).attr("name"), q.getValue()));
} else {
if (i > 3) {
throw new OpacErrorException(stringProvider.getQuantityString(
StringProvider.LIMITED_NUM_OF_CRITERIA, 3, 3));
}
params.add(
new BasicNameValuePair(doc.select("#exlidInput_scope_" + i).attr("name"),
q.getKey()));
params.add(new BasicNameValuePair(
doc.select("#input_freeText" + (i - 1)).attr("name"), q.getValue()));
params.add(new BasicNameValuePair(
doc.select("#exlidInput_precisionOperator_" + i).attr("name"),
"contains"));
params.add(new BasicNameValuePair(
doc.select("#exlidInput_boolOperator_" + i).attr("name"), "AND"));
i++;
}
}
return params;
}
@Override
public SearchRequestResult search(List<SearchQuery> query)
throws IOException, OpacErrorException, JSONException {
if (!initialised) start();
last_query = query;
String html;
if (query.size() == 1 && query.get(0).getKey().equals("url")) {
html = httpGet(query.get(0).getValue(), getDefaultEncoding());
} else {
html = httpGet(opac_url + "/action/search.do" +
buildHttpGetParams(buildSearchParams(query)),
getDefaultEncoding());
}
Document doc = Jsoup.parse(html);
return parse_search(doc, 1);
}
protected SearchRequestResult parse_search(Document doc, int page)
throws OpacErrorException, IOException {
doc.setBaseUri(opac_url + "/action/search.do");
List<SearchResult> reslist = new ArrayList<>();
int resnum = -1;
if (doc.select(".EXLResultsNumbers").size() > 0) {
try {
resnum = Integer.valueOf(
doc.select(".EXLResultsNumbers em").first().text().trim().replace(".", "")
.replace(",", "").replace(" ", ""));
} catch (NumberFormatException e) {
e.printStackTrace();
}
}
for (Element resrow : doc.select(".EXLResult")) {
SearchResult res = new SearchResult();
StringBuilder description = new StringBuilder();
description.append("<b>").append(resrow.select(".EXLResultTitle").text())
.append("</b>");
if (resrow.select(".EXLResultAuthor").size() > 0) {
description.append("<br />").append(resrow.select(".EXLResultAuthor").text());
}
if (resrow.select(".EXLResultDetails").size() > 0) {
description.append("<br />").append(resrow.select(".EXLResultDetails").text());
}
String availSelect = ".EXLResultAvailability span, .EXLResultAvailability em";
if (resrow.select(availSelect).size() > 0) {
description.append("<br />").append(resrow.select(availSelect).first().ownText());
}
res.setInnerhtml(description.toString());
if (resrow.select(".EXLResultStatusAvailable").size() > 0) {
res.setStatus(SearchResult.Status.GREEN);
} else if (resrow.select(".EXLResultStatusNotAvailable").size() > 0) {
res.setStatus(SearchResult.Status.RED);
}
res.setPage(page);
for (Element a : resrow
.select(".EXLResultTitle a, a.EXLThumbnailLinkMarker, .EXLDetailsTab a")) {
Map<String, String> q = getQueryParamsFirst(a.absUrl("href"));
if (q.containsKey("doc")) {
res.setId(q.get("doc"));
break;
}
}
if (res.getId() == null) {
continue;
}
if (resrow.select("img.EXLBriefResultsCover").size() > 0) {
String src = resrow.select("img.EXLBriefResultsCover").first().absUrl("src");
if (!src.contains("pixel.png")) {
res.setCover(src);
}
}
for (Map.Entry<String, SearchResult.MediaType> cls : mediaTypeClasses.entrySet()) {
if (resrow.hasClass(cls.getKey()) || resrow.select("." + cls.getKey()).size() > 0) {
res.setType(cls.getValue());
break;
}
}
if (resrow.select("a.EXLBriefResultsDisplayMultipleLink").size() > 0) {
String url = resrow.select("a.EXLBriefResultsDisplayMultipleLink").first()
.absUrl("href");
List<SearchQuery> query = new ArrayList<>();
TextSearchField field =
new TextSearchField("url", "url", false, false, "url", false, false);
field.setVisible(false);
query.add(new SearchQuery(field, url));
res.setChildQuery(query);
}
reslist.add(res);
}
return new SearchRequestResult(reslist, resnum, page);
}
public static Map<String, String> getQueryParamsFirst(String url) {
try {
Map<String, String> params = new HashMap<>();
String[] urlParts = url.split("\\?");
if (urlParts.length > 1) {
String query = urlParts[1];
for (String param : query.split("&")) {
String[] pair = param.split("=");
String key = decode(pair[0], "UTF-8");
String value = "";
if (pair.length > 1) {
value = decode(pair[1], "UTF-8");
}
String values = params.get(key);
if (values == null) {
params.put(key, value);
}
}
}
return params;
} catch (UnsupportedEncodingException ex) {
throw new AssertionError(ex);
}
}
@Override
public SearchRequestResult filterResults(Filter filter, Filter.Option option)
throws IOException, OpacErrorException {
return null;
}
@Override
public SearchRequestResult searchGetPage(int page)
throws IOException, OpacErrorException, JSONException {
List<NameValuePair> params = buildSearchParams(last_query);
params.add(new BasicNameValuePair("indx", String.valueOf(((page - 1) * 10) + 1)));
params.add(new BasicNameValuePair("pag", "cur"));
String html = httpGet(opac_url + "/action/search.do" +
buildHttpGetParams(params),
getDefaultEncoding());
Document doc = Jsoup.parse(html);
return parse_search(doc, page);
}
@Override
public DetailledItem getResultById(String id, String homebranch)
throws IOException, OpacErrorException {
if (!initialised) start();
String html =
httpGet(opac_url + "/action/display.do?ct=display&fn=search&vid=" + vid + "&doc=" +
id + "&tabs=detailsTab",
getDefaultEncoding());
Document doc = Jsoup.parse(html);
return parse_detail(id, doc);
}
protected DetailledItem parse_detail(String id, Document doc)
throws OpacErrorException, IOException {
DetailledItem res = new DetailledItem();
res.setId(id);
res.setTitle(doc.select(".EXLResultTitle").text());
for (Element detrow : doc.select(".EXLDetailsContent li")) {
String title = null;
String value = "";
for (Node node : detrow.childNodes()) {
if (node instanceof Element && (((Element) node).tagName().equals("strong") ||
((Element) node).hasClass("bib-EXLDetailsContent-item-title"))) {
title = ((Element) node).text();
} else if (node instanceof Element && title != null) {
value += ((Element) node).text();
} else if (node instanceof TextNode && title != null) {
value += ((TextNode) node).text();
}
}
if (title != null) {
res.addDetail(new Detail(title, value.trim()));
}
}
String html2 =
httpGet(opac_url + "/action/display.do?ct=display&fn=search&vid=" + vid + "&doc=" +
id + "&tabs=locationsTab",
getDefaultEncoding());
Document doc2 = Jsoup.parse(html2);
if (doc2.select(".EXLLocationTitlesRow").size() > 0) {
Map<Integer, String> copymap = new HashMap<>();
int i = 0;
for (Element th : doc2.select(".EXLLocationTitlesRow th")) {
String title = th.text().toLowerCase(Locale.GERMAN).trim();
if (title.contains("library") || title.contains("bibliothek") ||
title.contains("branch")) {
copymap.put(i, DetailledItem.KEY_COPY_BRANCH);
} else if (title.contains("location") || title.contains("ort")) {
copymap.put(i, DetailledItem.KEY_COPY_LOCATION);
} else if (title.contains("call number") || title.contains("signatur")) {
copymap.put(i, DetailledItem.KEY_COPY_SHELFMARK);
} else if (title.contains("due date") || title.contains("llig am") ||
title.contains("ausgeliehen bis") || title.contains("lligkeit")
|| title.contains("ausleihstatus")) {
copymap.put(i, DetailledItem.KEY_COPY_RETURN);
} else if (title.contains("loan to") || title.contains("bezugsmodalit") ||
title.contains("ausleihm") || title.contains("status")) {
copymap.put(i, DetailledItem.KEY_COPY_STATUS);
} else if (title.contains("queue") || title.contains("vormerker")) {
copymap.put(i, DetailledItem.KEY_COPY_RESERVATIONS);
}
i++;
}
for (Element tr : doc2
.select(".EXLLocationTable tr:not(.EXLLocationTitlesRow):not(" +
".EXLAdditionalFieldsRow)")) {
int j = 0;
Map<String, String> copy = new HashMap<>();
for (Element td : tr.children()) {
if (copymap.containsKey(j)) {
copy.put(copymap.get(j), td.text().trim());
}
j++;
}
res.addCopy(copy);
}
}
if (res.getCopies().size() == 0) {
// Online-Medium?
String html3 =
httpGet(opac_url + "/action/display.do?ct=display&fn=search&vid=" + vid +
"&doc=" +
id + "&tabs=viewOnlineTab",
getDefaultEncoding());
Document doc3 = Jsoup.parse(html3);
doc3.setBaseUri(opac_url + "/action/display.do");
if (doc3.select(".EXLTabHeaderContent a").size() > 0) {
Element link = doc3.select(".EXLTabHeaderContent a").first();
res.addDetail(new Detail(link.text().trim(), cleanUrl(link.absUrl("href"))));
}
for (Element link : doc3.select(".EXLViewOnlineLinksTitle a")) {
res.addDetail(new Detail(link.text().trim(), cleanUrl(link.absUrl("href"))));
}
}
return res;
}
@Override
public DetailledItem getResult(int position) throws IOException, OpacErrorException {
return null;
}
public void start() throws IOException {
super.start();
httpGet(opac_url + "/action/preferences.do?fn=change_lang&vid=" + vid + "&prefLang=" +
languageCode,
getDefaultEncoding());
}
@Override
public List<SearchField> getSearchFields()
throws IOException, OpacErrorException, JSONException {
start();
String html =
httpGet(opac_url + "/action/search.do?mode=Advanced&ct=AdvancedSearch&vid=" + vid,
getDefaultEncoding());
Document doc = Jsoup.parse(html);
List<SearchField> fields = new ArrayList<>();
Elements options = doc.select("select#exlidInput_scope_1").first().select("option");
for (Element option : options) {
TextSearchField field = new TextSearchField();
field.setDisplayName(option.text());
field.setId(option.val());
field.setHint("");
field.setData(new JSONObject());
field.getData().put("meaning", option.val());
fields.add(field);
}
if (fields.size() == 0) {
// Weird JavaScript, e.g. view-source:http://vopac.nlg.gr/Search/Advanced
Pattern pattern_key = Pattern
.compile("searchFields\\[\"([^\"]+)\"\\] = \"([^\"]+)\";");
for (Element script : doc.select("script")) {
if (!script.html().contains("searchFields")) continue;
for (String line : script.html().split("\n")) {
Matcher matcher = pattern_key.matcher(line);
if (matcher.find()) {
TextSearchField field = new TextSearchField();
field.setDisplayName(matcher.group(2));
field.setId(matcher.group(1));
field.setHint("");
field.setData(new JSONObject());
field.getData().put("meaning", field.getId());
fields.add(field);
}
}
}
}
Elements selects = doc.select("#exlidInput_mediaType_, #exlidInput_publicationDate_, " +
"#exlidInput_language_, #exlidSearchIn");
for (Element select : selects) {
DropdownSearchField field = new DropdownSearchField();
if (select.parent().select("label").size() > 0) {
field.setDisplayName(select.parent().select("label").first()
.text());
} else {
continue;
}
field.setId("#" + select.attr("id"));
for (Element option : select.select("option")) {
if (option.val().equals("all_items")) {
field.addDropdownValue(0, option.val(), option.text());
} else {
field.addDropdownValue(option.val(), option.text());
}
}
field.setData(new JSONObject());
field.getData().put("meaning", field.getId());
fields.add(field);
}
return fields;
}
@Override
public String getShareUrl(String id, String title) {
return opac_url + "/action/display.do?ct=display&fn=search&vid=" + vid + "&doc=" +
id + "&tabs=detailsTab";
}
@Override
public int getSupportFlags() {
return SUPPORT_FLAG_ENDLESS_SCROLLING | SUPPORT_FLAG_CHANGE_ACCOUNT;
}
@Override
public Set<String> getSupportedLanguages() throws IOException {
Set<String> langs = new HashSet<>();
// Not that easy, as some libaries do and some don't include the
// default language in their language chooser. We go for configuration flags in the
// meantime.
// With full chooser: http://primo.kobv.de/primo_library/libweb/action/search.do?vid=FUB
// With toggle only: http://primo.kobv.de/primo_library/libweb/action/search.do?vid=hub_ub
// Without any chooser: http://explore.bl.uk/primo_library/libweb/action/search.do?vid=BLVU1
if (data.has("languages")) {
try {
for (int i = 0; i < data.getJSONArray("languages").length(); i++) {
langs.add(data.getJSONArray("languages").getString(i));
}
} catch (JSONException e) {
e.printStackTrace();
}
} else {
langs.add("de");
langs.add("en");
langs.add("it");
langs.add("zh");
langs.add("cz");
langs.add("ru");
langs.add("nl");
langs.add("es");
langs.add("fr");
}
return langs;
}
protected String getDefaultEncoding() {
return "UTF-8";
}
@Override
public void setLanguage(String language) {
if (data.has("languages")) {
try {
for (int i = 0; i < data.getJSONArray("languages").length(); i++) {
if (data.getJSONArray("languages").getString(i).equals(language)) {
languageCode =
languageCodes.containsKey(language) ? languageCodes.get(language) :
language;
break;
}
}
} catch (JSONException e) {
e.printStackTrace();
}
} else {
languageCode =
languageCodes.containsKey(language) ? languageCodes.get(language) : language;
}
}
@Override
public boolean isAccountSupported(Library library) {
return false;
}
@Override
public boolean isAccountExtendable() {
return false;
}
@Override
public String getAccountExtendableInfo(Account account) throws IOException {
return null;
}
@Override
public ReservationResult reservation(DetailledItem item, Account account,
int useraction, String selection) throws IOException {
return null;
}
@Override
public ProlongResult prolong(String media, Account account, int useraction,
String selection) throws IOException {
return null;
}
@Override
public ProlongAllResult prolongAll(Account account, int useraction, String selection)
throws IOException {
return null;
}
@Override
public CancelResult cancel(String media, Account account, int useraction,
String selection) throws IOException, OpacErrorException {
return null;
}
@Override
public AccountData account(Account account)
throws IOException, JSONException, OpacErrorException {
return null;
}
@Override
public void checkAccountData(Account account)
throws IOException, JSONException, OpacErrorException {
}
public String httpGet(String url, String encoding, boolean ignore_errors,
CookieStore cookieStore) throws
IOException {
String html = super.httpGet(url, encoding, ignore_errors, cookieStore);
if (html.contains("id=\"connect\"")) {
// British Library and Uni Duesburg-Essen do weird JavaScript redirects
Document doc = Jsoup.parse(html);
doc.setBaseUri(url);
return httpGet(doc.select("#connect a").first().attr("href").substring(6), encoding,
ignore_errors, cookieStore);
}
return html;
}
}
| |
/*
Modified BSD License
====================
Copyright (c) 2015, CoNWeTLab, Universidad Politecnica Madrid
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of UPM nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL UPM BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.fiware.apps.repository.dao.impl;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.DBObject;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import org.fiware.apps.repository.dao.MongoDAOFactory;
import org.fiware.apps.repository.dao.VirtuosoDAOFactory;
import org.fiware.apps.repository.exceptions.db.DatasourceException;
import org.fiware.apps.repository.exceptions.db.SameIdException;
import org.fiware.apps.repository.model.ResourceCollection;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import static org.mockito.Mockito.*;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({DBCollection.class, DB.class, MongoDAOFactory.class, VirtuosoDAOFactory.class})
public class MongoCollectionDAOTest {
@Mock private DB db;
@Mock private DBCollection mongoCollection;
@Mock private VirtuosoResourceDAO virtuosoResourceDAO;
@Mock private DBObject dBObject;
private MongoCollectionDAO toTest;
public MongoCollectionDAOTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
db = mock(DB.class);
mongoCollection = mock(DBCollection.class);
virtuosoResourceDAO = mock(VirtuosoResourceDAO.class);
dBObject = mock(DBObject.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
}
@After
public void tearDown() {
}
//Tests
@Test
public void voidConstructorTest()
{
/* PowerMockito.mockStatic(MongoDAOFactory.class);
PowerMockito.mockStatic(DB.class);
PowerMockito.mockStatic(VirtuosoDAOFactory.class);
PowerMockito.when(MongoDAOFactory.createConnection()).thenReturn(db);
when(db.getCollection(anyString())).thenReturn(mongoCollection);
PowerMockito.when(VirtuosoDAOFactory.getVirtuosoResourceDAO()).thenReturn(virtuosoResourceDAO);
MongoCollectionDAO voidConstructor = new MongoCollectionDAO();
MongoCollectionDAO notVoidConstructor = new MongoCollectionDAO(db, mongoCollection, virtuosoResourceDAO);*/
}
@Test
public void findCollectionTest() {
getCollectionTest();
}
@Test
public void findCollectionNullTest() {
getCollectionNullTest();
}
@Test(expected = DatasourceException.class)
public void findCollectionExceptionTest() throws DatasourceException {
getCollectionExceptionTest();
}
@Test
public void updateCollectionTest() {
String id = "/id";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, true);
List list = new LinkedList();
boolean returned = false;
list.add(dBObject);
rulesdbObjectCollection(id, date);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(list.iterator());
doNothing().when(mongoCollection).update(any(DBObject.class), any(DBObject.class), anyBoolean(), anyBoolean());
try {
returned = toTest.updateCollection(id, resourceCollection);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertTrue(returned);
verify(db).requestStart();
verify(db).requestDone();
}
@Test
public void updateCollectionNullTest() {
String id = "/id";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, true);
boolean returned = true;
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(null);
try {
returned = toTest.updateCollection(id, resourceCollection);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertFalse(returned);
verify(db).requestStart();
verify(db).requestDone();
}
@Test(expected = DatasourceException.class)
public void updateCollectionExceptionTest1() throws DatasourceException {
String id = "/id";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, true);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenThrow(Exception.class);
toTest.updateCollection(id, resourceCollection);
}
@Test(expected = DatasourceException.class)
public void updateCollectionExceptionTest2() throws DatasourceException {
String id = "/id";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, false);
List list = new LinkedList();
list.add(dBObject);
rulesdbObjectCollection(id, date);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(list.iterator());
doThrow(IllegalArgumentException.class).when(mongoCollection).update(any(DBObject.class), any(DBObject.class), anyBoolean(), anyBoolean());
toTest.updateCollection(id, resourceCollection);
}
@Test
public void deleteCollectionTest() {
String id = "/id";
Date date = new Date();
DBCursor dBCursor = mock(DBCursor.class);
List list = new LinkedList();
Boolean returned = false;
DBCollection mongoResource = PowerMockito.mock(DBCollection.class);
db = PowerMockito.mock(DB.class);
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
list.add(dBObject);
rulesdbObjectCollection(id, date);
PowerMockito.when(db.getCollection(anyString())).thenReturn(mongoResource);
PowerMockito.when(mongoResource.find(any(DBObject.class))).thenReturn(dBCursor);
PowerMockito.when(dBCursor.toArray()).thenReturn(list);
when(virtuosoResourceDAO.deleteResource(eq(id+"Id"))).thenReturn(true);
PowerMockito.doNothing().when(mongoResource).remove(dBObject);
PowerMockito.when(mongoCollection.find(any(DBObject.class))).thenReturn(dBCursor);
PowerMockito.doNothing().when(mongoCollection).remove(dBObject);
PowerMockito.when(mongoCollection.findOne(any(DBObject.class))).thenReturn(dBObject);
try {
returned = toTest.deleteCollection(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertTrue(returned);
verify(db, times(2)).requestStart();
verify(db, times(2)).requestDone();
}
@Test
public void deleteCollectionNullTest() {
String id = "/id";
Date date = new Date();
DBCursor dBCursor = mock(DBCursor.class);
List list = new LinkedList();
Boolean returned = false;
DBCollection mongoResource = PowerMockito.mock(DBCollection.class);
db = PowerMockito.mock(DB.class);
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
list.add(dBObject);
rulesdbObjectCollection(id, date);
PowerMockito.when(db.getCollection(anyString())).thenReturn(mongoResource);
PowerMockito.when(mongoResource.find(any(DBObject.class))).thenReturn(dBCursor);
PowerMockito.when(dBCursor.toArray()).thenReturn(list);
when(virtuosoResourceDAO.deleteResource(eq(id+"Id"))).thenReturn(true);
PowerMockito.doNothing().when(mongoResource).remove(dBObject);
PowerMockito.when(mongoCollection.find(any(DBObject.class))).thenReturn(dBCursor);
PowerMockito.doNothing().when(mongoCollection).remove(dBObject);
PowerMockito.when(mongoCollection.findOne(any(DBObject.class))).thenReturn(null);
try {
returned = toTest.deleteCollection(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertFalse(returned);
verify(db, times(2)).requestStart();
verify(db, times(2)).requestDone();
}
@Test(expected = DatasourceException.class)
public void deleteCollectionExceptionTest1() throws DatasourceException {
String id = "/id";
Date date = new Date();
db = PowerMockito.mock(DB.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
rulesdbObjectCollection(id, date);
PowerMockito.when(db.getCollection(anyString())).thenThrow(Exception.class);
toTest.deleteCollection(id);
//fail();
}
@Test(expected = DatasourceException.class)
public void deleteCollectionExceptionTest2() throws DatasourceException {
String id = "/id";
Date date = new Date();
DBCursor dBCursor = mock(DBCursor.class);
List list = new LinkedList();
DBCollection mongoResource = PowerMockito.mock(DBCollection.class);
db = PowerMockito.mock(DB.class);
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
list.add(dBObject);
rulesdbObjectCollection(id, date);
PowerMockito.when(db.getCollection(anyString())).thenReturn(mongoResource);
PowerMockito.when(mongoResource.find(any(DBObject.class))).thenReturn(dBCursor);
PowerMockito.when(dBCursor.toArray()).thenReturn(list);
when(virtuosoResourceDAO.deleteResource(eq(id+"Id"))).thenReturn(true);
PowerMockito.doNothing().when(mongoResource).remove(dBObject);
PowerMockito.when(mongoCollection.find(any(DBObject.class))).thenReturn(dBCursor);
PowerMockito.doNothing().when(mongoCollection).remove(dBObject);
PowerMockito.when(mongoCollection.findOne(any(DBObject.class))).thenThrow(IllegalArgumentException.class);
toTest.deleteCollection(id);
//fail();
}
@Test
public void getCollectionTest() {
String id = "/id";
Date date = new Date();
List list = new LinkedList();
ResourceCollection returned = null;
list.add(dBObject);
rulesdbObjectCollection(id, null);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(list.iterator());
try {
returned = toTest.findCollection(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertEquals(id+"Id", returned.getId());
assertEquals(id+"Creator", returned.getCreator());
verify(db, times(3)).requestStart();
verify(db, times(3)).requestDone();
}
@Test
public void getCollectionNullTest() {
String id = "/id";
Date date = new Date();
ResourceCollection returned = null;
rulesdbObjectCollection(id, date);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(null);
try {
returned = toTest.findCollection(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertNull(returned);
verify(db).requestStart();
verify(db).requestDone();
}
@Test(expected = DatasourceException.class)
public void getCollectionExceptionTest() throws DatasourceException {
String id = "/id";
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenThrow(Exception.class);
toTest.findCollection(id);
}
@Test
public void insertCollectionTest() {
String id = "/id/hola";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, true);
boolean returned = false;
rulesdbObjectCollection(id, date);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(null);
doNothing().when(mongoCollection).insert(any(DBObject.class));
try {
returned = toTest.insertCollection(resourceCollection);
} catch (Exception ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
assertTrue(returned);
verify(db, atLeast(1)).requestStart();
verify(db, atLeast(1)).requestDone();
}
@Test(expected = DatasourceException.class)
public void insertCollectionRecursiveExceptionTest() throws DatasourceException, SameIdException {
String id = "/id/hola";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, false);
boolean returned = false;
rulesdbObjectCollection(id, null);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(null);
doNothing().doThrow(Exception.class).when(mongoCollection).insert(any(DBObject.class));
returned = toTest.insertCollection(resourceCollection);
assertTrue(returned);
verify(db, atLeast(1)).requestStart();
verify(db, atLeast(1)).requestDone();
}
@Test(expected = DatasourceException.class)
public void insertCollectionDatasourceExceptionTest() throws DatasourceException, SameIdException {
String id = "/id/hola";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, false);
rulesdbObjectCollection(id, date);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(null);
doThrow(Exception.class).when(mongoCollection).insert(any(DBObject.class));
toTest.insertCollection(resourceCollection);
}
@Test(expected = SameIdException.class)
public void insertCollectionSameIdExceptionTest() throws DatasourceException, SameIdException {
String id = "/id";
Date date = new Date();
ResourceCollection resourceCollection = generateResourceCollection(id, date, false);
List list = new LinkedList();
list.add(dBObject);
rulesdbObjectCollection(id, date);
when(mongoCollection.find(any(DBObject.class), any(DBObject.class), anyInt(), anyInt(), anyInt())).thenReturn(list.iterator());
toTest.insertCollection(resourceCollection);
}
@Test
public void getCollectionsTest() {
String id = "/id";
Date date = new Date();
DBCursor dBCursor = mock(DBCursor.class);
List list = new LinkedList();
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
list.add(dBObject);
PowerMockito.when(mongoCollection.find(any())).thenReturn(dBCursor);
rulesdbObjectCollection(id, date);
when(dBCursor.toArray()).thenReturn(list);
try {
toTest.getCollections(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
verify(db).requestStart();
verify(db).requestDone();
}
@Test
public void getCollectionsTest2() {
String id = "/id";
Date date = new Date();
DBCursor dBCursor = mock(DBCursor.class);
List list = new LinkedList();
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
list.add(dBObject);
list.add(null);
PowerMockito.when(mongoCollection.find(any())).thenReturn(dBCursor);
rulesdbObjectCollection(id, null);
when(dBCursor.toArray()).thenReturn(list);
try {
toTest.getCollections(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
verify(db).requestStart();
verify(db).requestDone();
}
@Test
public void getCollectionsTest3() {
String id = "/id";
Date date = new Date();
DBCursor dBCursor = mock(DBCursor.class);
List list = new LinkedList();
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
list.add(dBObject);
list.add(dBObject);
PowerMockito.when(mongoCollection.find(any())).thenReturn(dBCursor);
rulesdbObjectCollection(id, date);
when(dBObject.get("id")).thenReturn(null);
when(dBCursor.toArray()).thenReturn(list);
try {
toTest.getCollections(id);
} catch (DatasourceException ex) {
fail("Exception not expected:\n" + ex.getLocalizedMessage());
}
verify(db).requestStart();
verify(db).requestDone();
}
@Test(expected = DatasourceException.class)
public void getCollectionsExceptionTest() throws DatasourceException {
String id = "/id";
Date date = new Date();
mongoCollection = PowerMockito.mock(DBCollection.class);
toTest = new MongoCollectionDAO(db, mongoCollection, mongoCollection, virtuosoResourceDAO);
PowerMockito.when(mongoCollection.find(any())).thenThrow(Exception.class);
rulesdbObjectCollection(id, date);
toTest.getCollections(id);
//fail();
}
private void rulesdbObjectCollection(String string, Date date) {
if(date != null)
when(this.dBObject.get("creationDate")).thenReturn(date);
else
when(this.dBObject.get("creationDate")).thenReturn(null);
when(this.dBObject.get("id")).thenReturn(string + "Id");
when(this.dBObject.get("name")).thenReturn(string + "name");
when(this.dBObject.get("_id")).thenReturn("507f1f77bcf86cd799439011");
when(this.dBObject.get("creator")).thenReturn(string + "Creator");
}
private ResourceCollection generateResourceCollection(String string, Date date, boolean creationDate) {
ResourceCollection resourceCollection = new ResourceCollection();
resourceCollection.setId(string+"Id");
resourceCollection.setCreator(string+"Creator");
if(creationDate)
resourceCollection.setCreationDate(date);
return resourceCollection;
}
}
| |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2010 Oracle. All rights reserved.
*
* $Id: TextProtocol.java,v 1.25 2010/01/04 15:50:46 cwl Exp $
*/
package com.sleepycat.je.rep.impl;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketException;
import java.nio.channels.SocketChannel;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.config.DurationConfigParam;
import com.sleepycat.je.dbi.DbConfigManager;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.rep.elections.Utils;
import com.sleepycat.je.rep.impl.node.NameIdPair;
import com.sleepycat.je.rep.utilint.ReplicationFormatter;
import com.sleepycat.je.rep.utilint.ServiceDispatcher;
import com.sleepycat.je.rep.utilint.ServiceDispatcher.ServiceConnectFailedException;
import com.sleepycat.je.utilint.LoggerUtils;
/**
* SimpleProtocol provides the support for implementing simple low performance
* protocols involving replication nodes. The protocol is primarily text based,
* and checks group membership and version matches with every message favoring
* flexibility over performance.
*
* The base class is primarily responsible for the message formatting and
* message envelope validation. The subclasses define the specific messages
* that constitute the protocol and the request/response semantics.
*
* Every message has the format:
* <version>|<name>|<id>|<op>|<op-specific payload>
*
* <version> is the version of the protocol in use.
* <name> identifies a group participating in an election. It avoids
* accidental cross-talk across groups holding concurrent elections.
* <id> identifies the originator of the message within the group.
* <op> the operation identified by the specific message.
* <op-specific payload> the payload associated with the particular operation.
*/
public abstract class TextProtocol {
/*
* Protocol version string. Format: <major version>.<minor version>
* It's used to ensure compatibility across versions.
*/
private final String VERSION;
/* The name of the group executing this protocol. */
private final String groupName;
/*
* The set of ids constituting the entire group. It's updated as nodes
* enter and leave the dynamic group.
*/
private Set<Integer> memberIds;
/* The id associated with this protocol participant. */
private final NameIdPair nameIdPair;
/*
* The message prefix constituting the "fixed" part of the message for this
* group and node.
*/
protected final String messagePrefixNocheck;
/*
* Timeouts used for network communications. Use setTimeouts() to override
* the defaults.
*/
private int openTimeoutMs = 10000; // Default to 10 sec
private int readTimeoutMs = 10000; // Default to 10 sec
/* The token separator in messages */
public static final String SEPARATOR = "|";
public static final String SEPARATOR_REGEXP="\\" + SEPARATOR;
/* A message defined by the base class to deal with all errors. */
public final MessageOp PROTOCOL_ERROR =
new MessageOp("PE", ProtocolError.class);
public final MessageOp OK_RESP = new MessageOp("OK", OK.class);
public final MessageOp FAIL_RESP = new MessageOp("FAIL", Fail.class);
/* The number of message types defined by the subclass. */
private int nonDefaultMessageCount;
/* Maps request Ops to the corresponding enumerator. */
private final Map<String,MessageOp> ops = new HashMap<String,MessageOp>();
protected final Logger logger;
protected final Formatter formatter;
protected final EnvironmentImpl envImpl;
/**
* Creates an instance of the Protocol.
*
* @parameter version the protocol version number
* @parameter groupName the name of the group executing this protocol
* @param nameIdPair a unique identifier for this node
* @param envImpl for logging, may be null
*/
public TextProtocol(String version,
String groupName,
NameIdPair nameIdPair,
EnvironmentImpl envImpl) {
this.VERSION = version;
this.groupName = groupName;
this.nameIdPair = nameIdPair;
this.envImpl = envImpl;
messagePrefixNocheck =
VERSION + SEPARATOR + groupName + SEPARATOR +
NameIdPair.NOCHECK_NODE_ID;
if (envImpl != null) {
this.logger = LoggerUtils.getLogger(getClass());
} else {
this.logger = LoggerUtils.getLoggerFormatterNeeded(getClass());
}
this.formatter = new ReplicationFormatter(nameIdPair);
}
/**
* Set the network timeouts associated with uses of this protocol instance.
*/
protected void setTimeouts(RepImpl repImpl,
DurationConfigParam openTimeoutConfig,
DurationConfigParam readTimeoutConfig) {
if (repImpl == null) {
return;
}
final DbConfigManager configManager = repImpl.getConfigManager();
openTimeoutMs = configManager.getDuration(openTimeoutConfig);
readTimeoutMs = configManager.getDuration(readTimeoutConfig);
}
/**
* The messages as defined by the subclass. Note that PROTOCOL_ERROR is a
* pre-defined message that is defined by this class. The initialization is
* not considered until this method after been invoked typically in the
* constructor itself. This two-step is unfortunately necessary since the
* creation of MessageOps instances requires that this class be completely
* initialized, otherwise the MessageOp list could have been passed in as a
* constructor argument.
*
* @param protocolOps the message ops defined by the subclass.
*/
protected void initializeMessageOps(MessageOp[] protocolOps) {
for (MessageOp op : protocolOps) {
ops.put(op.opId, op);
}
nonDefaultMessageCount = protocolOps.length;
ops.put(PROTOCOL_ERROR.opId, PROTOCOL_ERROR);
ops.put(OK_RESP.opId, OK_RESP);
ops.put(FAIL_RESP.opId, FAIL_RESP);
}
public int getOpenTimeout() {
return openTimeoutMs;
}
public int getReadTimeout() {
return readTimeoutMs;
}
/* The total number of nonDefault messages defined by the protocol. */
public int messageCount() {
return nonDefaultMessageCount;
}
/**
* Updates the current set of nodes that constitutes the group. Also
* update the id used in the header, in case it was null and isn't anymore.
*
* @param newMemberIds
*/
public void updateNodeIds(Set<Integer> newMemberIds) {
/* Update the set of known members as well. */
memberIds = newMemberIds;
}
/**
* The Operations that are part of the protocol.
*/
public static class MessageOp {
/* The string denoting the operation for the request message. */
private final String opId;
/* The class used to represent the message. */
private final Class<? extends Message> messageClass;
public MessageOp(String opId, Class<? extends Message> messageClass) {
this.opId = opId;
this.messageClass = messageClass;
}
String getOpId() {
return opId;
}
Class<? extends Message> getMessageClass() {
return messageClass;
}
@Override
public String toString() {
return opId;
}
}
/**
* Represents the tokens on a message line. The order of the enumerators
* represents the order of the tokens in the wire format.
*/
public enum TOKENS {
VERSION_TOKEN,
NAME_TOKEN,
ID_TOKEN,
OP_TOKEN,
FIRST_PAYLOAD_TOKEN;
}
/* Used to indicate that an entity is formatable and can be serialized and
* de-serialized.
*/
protected interface WireFormatable {
/*
* Returns the string representation suitable for use in a network
* request.
*/
abstract String wireFormat();
}
/**
* Parses a line into a Request/Response message.
*
* @param line containing the message
* @return a message instance
* @throws InvalidMessageException
*/
public Message parse(String line)
throws InvalidMessageException {
String[] tokens = line.split(SEPARATOR_REGEXP);
final int index = TOKENS.OP_TOKEN.ordinal();
if (index >= tokens.length) {
throw new InvalidMessageException(MessageError.BAD_FORMAT,
"Missing message op");
}
MessageOp op = ops.get(tokens[index]);
assert(op != null);
try {
Class<? extends Message> c = op.getMessageClass();
Constructor<? extends Message> cons =
c.getConstructor(c.getEnclosingClass(),
line.getClass(),
tokens.getClass());
Message message = cons.newInstance(this, line, tokens);
return message;
} catch (InstantiationException e) {
throw EnvironmentFailureException.unexpectedException(e);
} catch (IllegalAccessException e) {
throw EnvironmentFailureException.unexpectedException(e);
} catch (SecurityException e) {
throw EnvironmentFailureException.unexpectedException(e);
} catch (NoSuchMethodException e) {
throw EnvironmentFailureException.unexpectedException(e);
} catch (InvocationTargetException e) {
/* Unwrap the exception. */
Throwable target = e.getTargetException();
if (target instanceof RuntimeException) {
throw (RuntimeException)e.getTargetException();
} else if (target instanceof InvalidMessageException) {
throw (InvalidMessageException)target;
}
throw EnvironmentFailureException.unexpectedException(e);
}
}
/**
* Base message class for all messages exchanged in the protocol.
*/
public abstract class Message implements WireFormatable {
/* The sender of the message. */
private int senderId = 0;
/* The line representing the message. */
private final String line;
/* The tokenized form of the above line. */
private final String[] tokens;
/* The current variable arg token */
private int currToken = TOKENS.FIRST_PAYLOAD_TOKEN.ordinal();
/**
* The constructor used for the original non-serialized instance of the
* message, which does not use the line or tokens.
*/
protected Message() {
line = null;
tokens = null;
}
/**
* Every message must define a constructor of this form so that it can
* be de-serialized. The constructor is invoked using reflection by the
* parse() method.
*
* @param line the line constituting the message
* @param tokens the line in token form
* @throws InvalidMessageException
* @throws EnvironmentFailureException on format errors
*/
protected Message(String line, String[] tokens)
throws InvalidMessageException {
this.line = line;
this.tokens = tokens;
/* Validate the leading fixed fields. */
final String version = getTokenString(TOKENS.VERSION_TOKEN);
if (!VERSION.equals(version)) {
throw new InvalidMessageException
(MessageError.VERSION_MISMATCH,
"Version argument mismatch." +
" Expected: " + VERSION + " Found: " + version);
}
final String messageGroupName = getTokenString(TOKENS.NAME_TOKEN);
if (!groupName.equals(messageGroupName)) {
throw new InvalidMessageException
(MessageError.GROUP_MISMATCH,
"Group name mismatch; this group name: " + groupName +
", message group name: " + messageGroupName);
}
senderId =
new Integer(getTokenString(TOKENS.ID_TOKEN)).intValue();
if ((memberIds != null) &&
(nameIdPair.getId() != NameIdPair.NOCHECK_NODE_ID) &&
(senderId != NameIdPair.NOCHECK_NODE_ID) &&
(senderId != nameIdPair.getId()) &&
!memberIds.contains(senderId)) {
throw new InvalidMessageException
(MessageError.NOT_A_MEMBER,
"Sender's message id: " + senderId +
" message op: " + getTokenString(TOKENS.OP_TOKEN) +
", was not a member of the group: " + memberIds);
}
}
public int getSenderId() {
return senderId;
}
protected String getMessagePrefix() {
return VERSION + SEPARATOR + groupName + SEPARATOR +
nameIdPair.getId();
}
public abstract MessageOp getOp();
/**
* Returns the protocol associated with this message
*/
public TextProtocol getProtocol() {
return TextProtocol.this;
}
/**
* Returns the token value associated with the token type.
*
* @param tokenType identifies the token in the message
* @return the associated token value
*/
private String getTokenString(TOKENS tokenType) {
final int index = tokenType.ordinal();
if (index >= tokens.length) {
throw EnvironmentFailureException.unexpectedState
("Bad format; missing token: " + tokenType +
"at position: " + index + "in message: " + line);
}
return tokens[index];
}
/**
* Returns the next token in the payload.
*
* @return the next payload token
* @throws InvalidMessageException
*/
protected String nextPayloadToken()
throws InvalidMessageException {
if (currToken >= tokens.length) {
throw new InvalidMessageException
(MessageError.BAD_FORMAT,
"Bad format; missing token at position: " + currToken +
" in message: " + line);
}
return tokens[currToken++];
}
/**
* Returns the current token position in the payload.
*
* @return the current token position
*/
protected int getCurrentTokenPosition() {
return currToken;
}
}
/**
* Base classes for response messages.
*/
public abstract class ResponseMessage extends Message {
protected ResponseMessage() {
super();
}
protected ResponseMessage(String line, String[] tokens)
throws InvalidMessageException {
super(line, tokens);
}
/**
* Returns the version id and Op concatenation that starts every
* message.
*/
protected String wireFormatPrefix() {
return getMessagePrefix() + SEPARATOR + getOp().opId;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof ResponseMessage)) {
return false;
}
return getOp().equals(((ResponseMessage)obj).getOp());
}
@Override
public int hashCode() {
return getOp().getOpId().hashCode();
}
}
public class ProtocolError extends ResponseMessage {
private final String message;
private final MessageError errorType;
public ProtocolError(InvalidMessageException messageException) {
this(messageException.getErrorType(),
messageException.getMessage());
}
public ProtocolError(MessageError messageError, String message) {
this.message = message;
this.errorType = messageError;
}
public ProtocolError(String responseLine, String[] tokens)
throws InvalidMessageException {
super(responseLine, tokens);
errorType = MessageError.valueOf(nextPayloadToken());
message = nextPayloadToken();
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result
+ ((message == null) ? 0 : message.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (!(obj instanceof ProtocolError)) {
return false;
}
final ProtocolError other = (ProtocolError) obj;
if (message == null) {
if (other.message != null) {
return false;
}
} else if (!message.equals(other.message)) {
return false;
}
return true;
}
@Override
public MessageOp getOp() {
return PROTOCOL_ERROR;
}
public String wireFormat() {
return wireFormatPrefix() +
SEPARATOR + errorType.toString() +
SEPARATOR + message;
}
public MessageError getErrorType() {
return errorType;
}
public String getMessage() {
return message;
}
}
public class OK extends ResponseMessage {
public OK() {
}
public OK(String line, String[] tokens)
throws InvalidMessageException {
super(line, tokens);
}
@Override
public MessageOp getOp() {
return OK_RESP;
}
@Override
protected String getMessagePrefix() {
return messagePrefixNocheck;
}
public String wireFormat() {
return wireFormatPrefix();
}
}
public class Fail extends ResponseMessage {
private final String message;
public Fail(String message) {
this.message = message;
}
public Fail(String line, String[] tokens)
throws InvalidMessageException {
super(line, tokens);
message = nextPayloadToken();
}
public String getMessage() {
return message;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + getOuterType().hashCode();
result = prime * result
+ ((message == null) ? 0 : message.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (!(obj instanceof Fail)) {
return false;
}
Fail other = (Fail) obj;
if (!getOuterType().equals(other.getOuterType())) {
return false;
}
if (message == null) {
if (other.message != null) {
return false;
}
} else if (!message.equals(other.message)) {
return false;
}
return true;
}
@Override
public MessageOp getOp() {
return FAIL_RESP;
}
@Override
protected String getMessagePrefix() {
return messagePrefixNocheck;
}
public String wireFormat() {
return wireFormatPrefix() + SEPARATOR + message;
}
private TextProtocol getOuterType() {
return TextProtocol.this;
}
}
/**
* Base class for all Request messages
*/
public abstract class RequestMessage extends Message {
protected RequestMessage() {}
protected RequestMessage(String line, String[] tokens)
throws InvalidMessageException {
super(line, tokens);
}
/**
* Returns the version id and Op concatenation that form the prefix
* for every message.
*/
protected String wireFormatPrefix() {
return getMessagePrefix() + SEPARATOR + getOp().opId;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof RequestMessage)) {
return false;
}
return getOp().equals(((RequestMessage) obj).getOp());
}
@Override
public int hashCode() {
return getOp().getOpId().hashCode();
}
}
/**
* Converts a response line into a ResponseMessage.
*
* @param responseLine
* @return the response message
* @throws InvalidMessageException
*/
ResponseMessage parseResponse(String responseLine)
throws InvalidMessageException {
return (ResponseMessage) parse(responseLine);
}
/**
* Converts a request line into a requestMessage.
*
* @param requestLine
* @return the request message
* @throws InvalidMessageException
*/
public RequestMessage parseRequest(String requestLine)
throws InvalidMessageException {
return (RequestMessage) parse(requestLine);
}
/**
* Reads the channel and returns a read request. If the message format
* was bad, it sends a ProtocolError response back over the channel and
* no further action is needed by the caller.
*
* @param channel the channel delivering the request
* @return null if EOF was reached or the message format was bad
* @throws IOException
*/
public RequestMessage getRequestMessage(SocketChannel channel)
throws IOException {
final Socket socket = channel.socket();
BufferedReader in = new BufferedReader
(new InputStreamReader(socket.getInputStream()));
String requestLine = in.readLine();
if (requestLine == null) {
/* EOF */
return null;
}
try {
return parseRequest(requestLine);
} catch (InvalidMessageException e) {
LoggerUtils.logMsg(logger, envImpl, formatter, Level.WARNING,
"Message format error:" + e.getMessage());
PrintWriter out = new PrintWriter(socket.getOutputStream(), true);
out.println(new ProtocolError(e).wireFormat());
out.close();
return null;
}
}
public ResponseMessage process(Object requestProcessor,
RequestMessage requestMessage) {
Class<? extends Object> cl = requestProcessor.getClass();
try {
Method method =
cl.getMethod("process", requestMessage.getClass());
return (ResponseMessage) method.invoke
(requestProcessor, requestMessage);
} catch (NoSuchMethodException e) {
LoggerUtils.logMsg(logger, envImpl, formatter, Level.SEVERE,
"Method: process(" +
requestMessage.getClass().getName() +
") was missing");
throw EnvironmentFailureException.unexpectedException(e);
} catch (Exception e) {
LoggerUtils.logMsg(logger, envImpl, formatter, Level.SEVERE,
"Unexpected exception: " + e.getMessage());
throw EnvironmentFailureException.unexpectedException(e);
}
}
/**
* Use to parallelize message exchanges via Futures.
*/
public class MessageExchange implements Runnable {
public final InetSocketAddress target;
private final RequestMessage requestMessage;
private final String serviceName;
private ResponseMessage responseMessage;
public Exception exception;
public MessageExchange(InetSocketAddress target,
String serviceName,
RequestMessage request) {
this.target = target;
this.serviceName = serviceName;
this.requestMessage = request;
}
/**
* Run a message exchange. A successful exchange results in a response
* message being set. All failures result in the response message being
* null and an exception being set.
*/
public void run() {
Socket socket = new Socket();
BufferedReader in = null;
PrintWriter out = null;
try {
socket.setSoTimeout(readTimeoutMs);
socket.setTcpNoDelay(true);
socket.setReuseAddress(true);
socket.connect(target, openTimeoutMs);
OutputStream ostream =
ServiceDispatcher.getServiceOutputStream(socket,
serviceName);
out = new PrintWriter(ostream, true);
out.println(requestMessage.wireFormat());
out.flush();
in = new BufferedReader
(new InputStreamReader(socket.getInputStream()));
final String line = in.readLine();
if (line == null) {
setResponseMessage
(new ProtocolError(MessageError.BAD_FORMAT,
"Premature EOF for request: " +
requestMessage.wireFormat()));
} else {
setResponseMessage(parseResponse(line));
}
} catch (java.net.SocketTimeoutException e){
this.exception = e;
} catch (SocketException e) {
this.exception = e;
} catch (IOException e) {
this.exception = e;
} catch (TextProtocol.InvalidMessageException e) {
this.exception = e;
} catch (ServiceConnectFailedException e) {
this.exception = e;
} catch (Exception e) {
this.exception = e;
LoggerUtils.logMsg(logger, envImpl, formatter, Level.SEVERE,
"Unexpected exception:" + e.getMessage());
throw EnvironmentFailureException.unexpectedException
("Service: " + serviceName +
" failed; attempting request: " + requestMessage.getOp(),
e);
} finally {
Utils.cleanup(logger, envImpl, formatter, socket, in, out);
}
}
public void setResponseMessage(ResponseMessage responseMessage) {
this.responseMessage = responseMessage;
}
public ResponseMessage getResponseMessage() {
/* Make sure the response was from a member of the same group. */
if ((exception instanceof InvalidMessageException) &&
(((InvalidMessageException) exception).getErrorType() ==
MessageError.GROUP_MISMATCH)) {
throw EnvironmentFailureException.
unexpectedState(envImpl, exception.getMessage());
}
return responseMessage;
}
public RequestMessage getRequestMessage() {
return requestMessage;
}
}
protected static class StringFormatable implements WireFormatable {
protected String s;
StringFormatable() {}
protected StringFormatable(String s) {
this.s = s;
}
public void init(String wireFormat) {
s = wireFormat;
}
public String wireFormat() {
return s;
}
@Override
public int hashCode() {
return ((s == null) ? 0 : s.hashCode());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof StringFormatable)) {
return false;
}
final StringFormatable other = (StringFormatable) obj;
if (s == null) {
if (other.s != null) {
return false;
}
} else if (!s.equals(other.s)) {
return false;
}
return true;
}
}
/*
* The type associated with an invalid Message. It's used by the exception
* below and by ProtocolError.
*/
static public enum MessageError
{BAD_FORMAT, VERSION_MISMATCH, GROUP_MISMATCH, NOT_A_MEMBER}
/**
* Used to indicate a message format or invalid content exception.
*/
@SuppressWarnings("serial")
public static class InvalidMessageException extends Exception {
private final MessageError errorType;
InvalidMessageException(MessageError errorType, String message) {
super(message);
this.errorType = errorType;
}
public MessageError getErrorType() {
return errorType;
}
}
}
| |
/*
* Written by Mike Wallace (mfwallace at gmail.com). Available
* on the web site http://mfwallace.googlepages.com/.
*
* Copyright (c) 2006 Mike Wallace.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom
* the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package io.miti.nemo.app;
import io.miti.nemo.common.StoreInfo;
import io.miti.nemo.common.Utility;
import java.io.File;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* Handler for importing application data.
*
* @author mwallace
* @version 1.0
*/
public final class AppDataImporter extends DefaultHandler
{
/**
* The input filename.
*/
private String filename = null;
/**
* The element that we're currently in.
*/
private transient int mode = 0;
/**
* The instance of the AppData class.
*/
private transient AppData appData = null;
/**
* The current store info.
*/
private transient StoreInfo storeInfo = null;
/**
* The buffer for element strings.
*/
private transient StringBuilder wordBuilder = new StringBuilder(100);
/**
* Default constructor.
*/
@SuppressWarnings("unused")
private AppDataImporter()
{
super();
}
/**
* Constructor taking a filename.
*
* @param sFilename the input filename
* @param pAppData the AppData instance
*/
public AppDataImporter(final String sFilename,
final AppData pAppData)
{
super();
filename = sFilename;
appData = pAppData;
}
/**
* Parses an XML file using a SAX parser.
*
* @return the result of the operation
*/
public boolean parseXmlFile()
{
boolean bResult = false;
try
{
// Create a builder factory
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(false);
// Create the builder and parse the file
factory.newSAXParser().parse(new File(filename), this);
// Check for an error
bResult = true;
}
catch (SAXException e)
{
bResult = false;
e.printStackTrace();
}
catch (ParserConfigurationException e)
{
bResult = false;
e.printStackTrace();
}
catch (IOException e)
{
bResult = false;
e.printStackTrace();
}
// Return the result of the operation
return bResult;
}
/**
* Handle the characters.
*
* @param ch the character array
* @param start the start index
* @param length the array length
* @throws SAXException parsing exception
* @see org.xml.sax.helpers.DefaultHandler#characters(char[], int, int)
*/
@Override
public void characters(final char[] ch,
final int start,
final int length) throws SAXException
{
// Check if we're handling a field of interest
if (mode == 0)
{
// We're not, so return
return;
}
// Add the string to our word buffer
final String word = new String(ch, start, length);
wordBuilder.append(word);
}
/**
* Finish the document.
*
* @throws SAXException parsing exception
* @see org.xml.sax.helpers.DefaultHandler#endDocument()
*/
@Override
public void endDocument() throws SAXException
{
}
/**
* Finish parsing an element.
*
* @param uri the URI
* @param localName the local name
* @param name the element name
* @throws SAXException parsing exception
* @see org.xml.sax.helpers.DefaultHandler#endElement(java.lang.String,
* java.lang.String, java.lang.String)
*/
@Override
public void endElement(final String uri,
final String localName,
final String name)
throws SAXException
{
// Check the current mode
String word = wordBuilder.toString();
switch (mode)
{
case 1:
appData.setWindowRootX(Utility.getStringAsInteger(word, 0));
break;
case 2:
appData.setWindowRootY(Utility.getStringAsInteger(word, 0));
break;
case 3:
appData.setWindowHeight(Utility.getStringAsInteger(word, 0));
break;
case 4:
appData.setWindowWidth(Utility.getStringAsInteger(word, 0));
break;
case 5:
appData.setIndexDirectory(Utility.removeXmlChars(word));
break;
case 6:
storeInfo = new StoreInfo();
storeInfo.setStoreFilename(Utility.removeXmlChars(word));
break;
case 7:
storeInfo.setStoreName(Utility.removeXmlChars(word));
appData.addStore(storeInfo);
break;
case 8:
appData.addSearch(Utility.removeXmlChars(word));
break;
case 9:
appData.setSearchFiles(Utility.getStringAsInteger(word, 0));
break;
case 10:
appData.setSearchSelected(Utility.getStringAsBoolean(word));
break;
case 11:
appData.setSearchFilter(Utility.getStringAsInteger(word, 0));
break;
case 12:
appData.setSearchCase(Utility.getStringAsBoolean(word));
break;
case 13:
appData.setSearchLimit(Utility.getStringAsBoolean(word));
break;
case 14:
appData.setSearchMax(Utility.getStringAsInteger(word, 100));
break;
default:
break;
}
// Reset the mode
mode = 0;
}
/**
* Start processing the document.
*
* @throws SAXException parsing exception
* @see org.xml.sax.helpers.DefaultHandler#startDocument()
*/
@Override
public void startDocument() throws SAXException
{
}
/**
* Start parsing an element.
*
* @param uri the uri
* @param localName the local name
* @param name the name
* @param attributes the element attributes
* @throws SAXException parsing exception
* @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String,
* java.lang.String, java.lang.String, org.xml.sax.Attributes)
*/
@Override
public void startElement(final String uri,
final String localName,
final String name,
final Attributes attributes) throws SAXException
{
// Set the mode, based on what element we're in
if (name.equals("rootx"))
{
mode = 1;
}
else if (name.equals("rooty"))
{
mode = 2;
}
else if (name.equals("sizeheight"))
{
mode = 3;
}
else if (name.equals("sizewidth"))
{
mode = 4;
}
else if (name.equals("indexdir"))
{
mode = 5;
}
else if (name.equals("file"))
{
mode = 6;
}
else if (name.equals("name"))
{
mode = 7;
}
else if (name.equals("search"))
{
mode = 8;
}
else if (name.equals("search.files"))
{
mode = 9;
}
else if (name.equals("search.selected"))
{
mode = 10;
}
else if (name.equals("search.filter"))
{
mode = 11;
}
else if (name.equals("search.case"))
{
mode = 12;
}
else if (name.equals("search.limit"))
{
mode = 13;
}
else if (name.equals("search.max"))
{
mode = 14;
}
else
{
mode = 0;
}
// Clear the word buffer
wordBuilder.setLength(0);
}
}
| |
package com.test.arduinosocket.common;
import android.app.Activity;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.media.RingtoneManager;
import android.util.Log;
import android.widget.Toast;
import com.google.gson.Gson;
import com.test.arduinosocket.activity.AsyncListenActivity;
import com.test.arduinosocket.MyApplication;
import com.test.arduinosocket.R;
import java.io.*;
import java.net.*;
import java.util.*;
import static android.content.Intent.*;
//import org.apache.http.conn.util.InetAddressUtils;
public class Utils {
/**
* Convert byte array to hex string
* @param bytes
* @return
*/
public static String bytesToHex(byte[] bytes) {
StringBuilder sbuf = new StringBuilder();
for(int idx=0; idx < bytes.length; idx++) {
int intVal = bytes[idx] & 0xff;
if (intVal < 0x10) sbuf.append("0");
sbuf.append(Integer.toHexString(intVal).toUpperCase());
}
return sbuf.toString();
}
/**
* Get utf8 byte array.
* @param str
* @return array of NULL if error was found
*/
public static byte[] getUTF8Bytes(String str) {
try { return str.getBytes("UTF-8"); } catch (Exception ex) { return null; }
}
/**
* Load UTF8withBOM or any ansi text file.
* @param filename
* @return
* @throws java.io.IOException
*/
public static String loadFileAsString(String filename) throws java.io.IOException {
final int BUFLEN=1024;
BufferedInputStream is = new BufferedInputStream(new FileInputStream(filename), BUFLEN);
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream(BUFLEN);
byte[] bytes = new byte[BUFLEN];
boolean isUTF8=false;
int read,count=0;
while((read=is.read(bytes)) != -1) {
if (count==0 && bytes[0]==(byte)0xEF && bytes[1]==(byte)0xBB && bytes[2]==(byte)0xBF ) {
isUTF8=true;
baos.write(bytes, 3, read-3); // drop UTF8 bom marker
} else {
baos.write(bytes, 0, read);
}
count+=read;
}
return isUTF8 ? new String(baos.toByteArray(), "UTF-8") : new String(baos.toByteArray());
} finally {
try{ is.close(); } catch(Exception ex){}
}
}
/**
* Returns MAC address of the given interface name.
* @param interfaceName eth0, wlan0 or NULL=use first interface
* @return mac address or empty string
*/
public static String getMACAddress(String interfaceName) {
try {
List<NetworkInterface> interfaces = Collections.list(NetworkInterface.getNetworkInterfaces());
for (NetworkInterface intf : interfaces) {
if (interfaceName != null) {
if (!intf.getName().equalsIgnoreCase(interfaceName)) continue;
}
byte[] mac = intf.getHardwareAddress();
if (mac==null) return "";
StringBuilder buf = new StringBuilder();
for (int idx=0; idx<mac.length; idx++)
buf.append(String.format("%02X:", mac[idx]));
if (buf.length()>0) buf.deleteCharAt(buf.length()-1);
return buf.toString();
}
} catch (Exception ex) { } // for now eat exceptions
return "";
/*try {
// this is so Linux hack
return loadFileAsString("/sys/class/net/" +interfaceName + "/address").toUpperCase().trim();
} catch (IOException ex) {
return null;
}*/
}
/**
* Get IP address from first non-localhost interface
* @param useIPv4 true=return ipv4, false=return ipv6
* @return address or empty string
*/
public static String getIPAddress(boolean useIPv4) {
try {
List<NetworkInterface> interfaces = Collections.list(NetworkInterface.getNetworkInterfaces());
for (NetworkInterface intf : interfaces) {
if(!intf.getName().toLowerCase().startsWith("wlan")){
continue;
}
List<InetAddress> addrs = Collections.list(intf.getInetAddresses());
for (InetAddress addr : addrs) {
if (!addr.isLoopbackAddress()) {
String sAddr = addr.getHostAddress();
//boolean isIPv4 = InetAddressUtils.isIPv4Address(sAddr);
boolean isIPv4 = sAddr.indexOf(':')<0;
if (useIPv4) {
if (isIPv4)
return sAddr;
} else {
if (!isIPv4) {
int delim = sAddr.indexOf('%'); // drop ip6 zone suffix
return delim<0 ? sAddr.toUpperCase() : sAddr.substring(0, delim).toUpperCase();
}
}
}
}
}
} catch (Exception ex) { } // for now eat exceptions
return "";
}
public static void createNotification(int notificationId, String contentText, Context context, boolean isSticky){
// prepare intent which is triggered if the
// notification is selected
//Intent intent = new Intent(context, NotificationEventHandler.class);
//PendingIntent pIntent = PendingIntent.getActivity(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
Intent myIntent = new Intent(context, AsyncListenActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(context,0,myIntent, FILL_IN_ACTION);
// build notification
// the addAction re-use the same intent to keep the example short
Bitmap bitmap= BitmapFactory.decodeResource(context.getResources(), R.drawable.notify_large);
Notification.Builder notificationBuilder = new Notification.Builder(context)
.setContentTitle(Constants.DOOR_LOCKER_STATUS)
.setContentText(contentText)
.setStyle(new Notification.BigTextStyle().bigText(contentText))
.setLargeIcon(Bitmap.createScaledBitmap(bitmap , 112, 112, false))
.setSmallIcon(R.drawable.ic_stat_notify)
.setContentIntent(pendingIntent)
.setAutoCancel(true)
.setDefaults(Notification.DEFAULT_LIGHTS | Notification.DEFAULT_VIBRATE | Notification.PRIORITY_MAX);
if(isSticky){
notificationBuilder.setSound(RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE));
}else{
notificationBuilder.setSound(RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION));
}
Notification notification =notificationBuilder.build();
notification.flags=isSticky?Notification.FLAG_INSISTENT|Notification.FLAG_AUTO_CANCEL:Notification.FLAG_AUTO_CANCEL;
NotificationManager notificationManager =
(NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(notificationId, notification);
}
public static void createNotificationWithYesNo(String contentText, Context context, boolean isSticky, String deviceId){
// prepare intent which is triggered if the
// notification is selected
Intent myIntent = new Intent(context, AsyncListenActivity.class);
myIntent.setAction("YES_ACTION");
myIntent.putExtra(Constants.LOCAL_BC_EVENT_DATA, Constants.YES_RESPONSE);
myIntent.putExtra("DEVICE_ID", deviceId);
PendingIntent pendingIntent = PendingIntent.getActivity(context,0,myIntent, FILL_IN_ACTION);
Intent yesReceive = new Intent(context, AsyncListenActivity.class);
yesReceive.setAction("YES_ACTION");
yesReceive.putExtra(Constants.LOCAL_BC_EVENT_DATA, Constants.YES_RESPONSE);
yesReceive.putExtra("DEVICE_ID", deviceId);
PendingIntent pendingIntentYes = PendingIntent.getActivity(context, 1, yesReceive, PendingIntent.FLAG_UPDATE_CURRENT);
Notification.Action yesAction = new Notification.Action.Builder(R.drawable.ic_action_done, "Yes", pendingIntentYes).build();
Intent noReceive = new Intent(context, AsyncListenActivity.class);
noReceive.setAction("NO_ACTION");
noReceive.putExtra(Constants.LOCAL_BC_EVENT_DATA, Constants.NO_RESPONSE);
noReceive.putExtra("DEVICE_ID", deviceId);
PendingIntent pendingIntentNo = PendingIntent.getActivity(context, 1, noReceive, PendingIntent.FLAG_UPDATE_CURRENT);
Notification.Action noAction = new Notification.Action.Builder(R.drawable.ic_action_highlight_remove, "No", pendingIntentNo).build();
// build notification
// the addAction re-use the same intent to keep the example short
Bitmap bitmap= BitmapFactory.decodeResource(context.getResources(), R.drawable.notify_large);
Notification.Builder notificationBuilder = new Notification.Builder(context)
.setContentTitle(Constants.DOOR_LOCKER_STATUS)
.setContentText(contentText)
.setStyle(new Notification.BigTextStyle().bigText(contentText))
.setLargeIcon(Bitmap.createScaledBitmap(bitmap , 112, 112, false))
.setSmallIcon(R.drawable.ic_stat_notify)
.setContentIntent(pendingIntent)
.setAutoCancel(false)
.setPriority(Notification.PRIORITY_HIGH)
.setOngoing(true)
.setDefaults(Notification.DEFAULT_VIBRATE|Notification.FLAG_ONGOING_EVENT);
//if (Build.VERSION.SDK_INT >= 21) notificationBuilder.setVibrate(new long[0]);
notificationBuilder.addAction(yesAction);
notificationBuilder.addAction(noAction);
if(isSticky){
notificationBuilder.setSound(RingtoneManager.getDefaultUri(RingtoneManager.TYPE_RINGTONE));
}else{
notificationBuilder.setSound(RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION));
}
Notification notification =notificationBuilder.build();
notification.flags=isSticky?Notification.FLAG_INSISTENT:Notification.FLAG_AUTO_CANCEL;
NotificationManager notificationManager =
(NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(Constants.DEVICE_REQUEST_NOTIFICATION_ID, notification);
}
public static void removeNotification(int notificationId, Context context){
NotificationManager nMgr = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
nMgr.cancel(notificationId);
}
private static Toast prevToast=null;
private static Activity runningActivity=null;
public static void showMessage(final String text){
if(text!=null) {
Log.i(Constants.LOG_TAG_TOAST, text);
try {
runningActivity = MyApplication.getCurrentActivity();
if (runningActivity != null) {
if (prevToast != null) {
runningActivity.runOnUiThread(new Runnable() {
public void run() {
prevToast.cancel();
}
});
}
runningActivity.runOnUiThread(new Runnable() {
public void run() {
try {
prevToast = Toast.makeText(runningActivity.getBaseContext(), text, Toast.LENGTH_LONG);
prevToast.show();
} catch (Exception ex) {
//ignore
}
}
});
}
} catch (Exception ex) {
//ignore
}
}
}
public static String serialize(Object object){
Gson gson=new Gson();
String jsonStr=gson.toJson(object);
return jsonStr;
}
public static <T> T deserialize(String serializedStr, Class<T> clazz){
Gson gson=new Gson();
return gson.fromJson(serializedStr, clazz);
}
}
| |
/**
* Copyright (C) 2016 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.bond;
import java.io.Serializable;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableDefaults;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.google.common.collect.ImmutableSet;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.currency.Currency;
import com.opengamma.strata.basics.value.Rounding;
import com.opengamma.strata.collect.Messages;
import com.opengamma.strata.product.Security;
import com.opengamma.strata.product.SecurityId;
import com.opengamma.strata.product.SecurityInfo;
import com.opengamma.strata.product.TradeInfo;
import com.opengamma.strata.product.common.PutCall;
import com.opengamma.strata.product.option.FutureOptionPremiumStyle;
/**
* A security representing a futures contract, based on a basket of fixed coupon bonds.
* <p>
* A bond future is a financial instrument that is based on the future value of
* a basket of fixed coupon bonds. The profit or loss of a bond future is settled daily.
*
* <h4>Price</h4>
* Strata uses <i>decimal prices</i> for bond futures options in the trade model, pricers and market data.
* This is coherent with the pricing of {@link BondFuture}.
*/
@BeanDefinition
public final class BondFutureOptionSecurity
implements Security, ImmutableBean, Serializable {
/**
* The standard security information.
* <p>
* This includes the security identifier.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final SecurityInfo info;
/**
* The currency that the future is traded in.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final Currency currency;
/**
* Whether the option is put or call.
* <p>
* A call gives the owner the right, but not obligation, to buy the underlying at
* an agreed price in the future. A put gives a similar option to sell.
*/
@PropertyDefinition
private final PutCall putCall;
/**
* The strike price, represented in decimal form.
* <p>
* This is the price at which the option applies and refers to the price of the underlying future.
* This must be represented in decimal form, {@code (1.0 - decimalRate)}.
* As such, the common market price of 99.3 for a 0.7% rate must be input as 0.993.
* The rate implied by the strike can take negative values.
*/
@PropertyDefinition
private final double strikePrice;
/**
* The expiry date of the option.
* <p>
* The expiry date is related to the expiry time and time-zone.
* The date must not be after last trade date of the underlying future.
*/
@PropertyDefinition(validate = "notNull")
private final LocalDate expiryDate;
/**
* The expiry time of the option.
* <p>
* The expiry time is related to the expiry date and time-zone.
*/
@PropertyDefinition(validate = "notNull")
private final LocalTime expiryTime;
/**
* The time-zone of the expiry time.
* <p>
* The expiry time-zone is related to the expiry date and time.
*/
@PropertyDefinition(validate = "notNull")
private final ZoneId expiryZone;
/**
* The style of the option premium.
* <p>
* The two options are daily margining and upfront premium.
*/
@PropertyDefinition(validate = "notNull")
private final FutureOptionPremiumStyle premiumStyle;
/**
* The definition of how to round the option price, defaulted to no rounding.
* <p>
* The price is represented in decimal form, not percentage form.
* As such, the decimal places expressed by the rounding refers to this decimal form.
* For example, the common market price of 99.7125 is represented as 0.997125 which
* has 6 decimal places.
*/
@PropertyDefinition(validate = "notNull")
private final Rounding rounding;
/**
* The identifier of the underlying future.
*/
@PropertyDefinition(validate = "notNull")
private final SecurityId underlyingFutureId;
//-------------------------------------------------------------------------
@ImmutableDefaults
private static void applyDefaults(Builder builder) {
builder.rounding(Rounding.none());
}
//-------------------------------------------------------------------------
@Override
public ImmutableSet<SecurityId> getUnderlyingIds() {
return ImmutableSet.of(underlyingFutureId);
}
//-------------------------------------------------------------------------
@Override
public BondFutureOption createProduct(ReferenceData refData) {
Security security = refData.getValue(underlyingFutureId);
if (!(security instanceof BondFutureSecurity)) {
throw new ClassCastException(Messages.format(
"{} underlying future '{}' resolved to '{}' when '{}' was expected",
BondFutureOptionSecurity.class.getSimpleName(),
underlyingFutureId,
security.getClass().getSimpleName(),
BondFutureSecurity.class.getSimpleName()));
}
BondFutureSecurity futureSec = (BondFutureSecurity) security;
BondFuture underlying = futureSec.createProduct(refData);
return new BondFutureOption(
getSecurityId(),
putCall,
strikePrice,
expiryDate,
expiryTime,
expiryZone,
premiumStyle,
rounding,
underlying);
}
@Override
public BondFutureOptionTrade createTrade(TradeInfo info, double quantity, double tradePrice, ReferenceData refData) {
BondFutureOption product = createProduct(refData);
return new BondFutureOptionTrade(info, product, quantity, tradePrice);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code BondFutureOptionSecurity}.
* @return the meta-bean, not null
*/
public static BondFutureOptionSecurity.Meta meta() {
return BondFutureOptionSecurity.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(BondFutureOptionSecurity.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static BondFutureOptionSecurity.Builder builder() {
return new BondFutureOptionSecurity.Builder();
}
private BondFutureOptionSecurity(
SecurityInfo info,
Currency currency,
PutCall putCall,
double strikePrice,
LocalDate expiryDate,
LocalTime expiryTime,
ZoneId expiryZone,
FutureOptionPremiumStyle premiumStyle,
Rounding rounding,
SecurityId underlyingFutureId) {
JodaBeanUtils.notNull(info, "info");
JodaBeanUtils.notNull(currency, "currency");
JodaBeanUtils.notNull(expiryDate, "expiryDate");
JodaBeanUtils.notNull(expiryTime, "expiryTime");
JodaBeanUtils.notNull(expiryZone, "expiryZone");
JodaBeanUtils.notNull(premiumStyle, "premiumStyle");
JodaBeanUtils.notNull(rounding, "rounding");
JodaBeanUtils.notNull(underlyingFutureId, "underlyingFutureId");
this.info = info;
this.currency = currency;
this.putCall = putCall;
this.strikePrice = strikePrice;
this.expiryDate = expiryDate;
this.expiryTime = expiryTime;
this.expiryZone = expiryZone;
this.premiumStyle = premiumStyle;
this.rounding = rounding;
this.underlyingFutureId = underlyingFutureId;
}
@Override
public BondFutureOptionSecurity.Meta metaBean() {
return BondFutureOptionSecurity.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the standard security information.
* <p>
* This includes the security identifier.
* @return the value of the property, not null
*/
@Override
public SecurityInfo getInfo() {
return info;
}
//-----------------------------------------------------------------------
/**
* Gets the currency that the future is traded in.
* @return the value of the property, not null
*/
@Override
public Currency getCurrency() {
return currency;
}
//-----------------------------------------------------------------------
/**
* Gets whether the option is put or call.
* <p>
* A call gives the owner the right, but not obligation, to buy the underlying at
* an agreed price in the future. A put gives a similar option to sell.
* @return the value of the property
*/
public PutCall getPutCall() {
return putCall;
}
//-----------------------------------------------------------------------
/**
* Gets the strike price, represented in decimal form.
* <p>
* This is the price at which the option applies and refers to the price of the underlying future.
* This must be represented in decimal form, {@code (1.0 - decimalRate)}.
* As such, the common market price of 99.3 for a 0.7% rate must be input as 0.993.
* The rate implied by the strike can take negative values.
* @return the value of the property
*/
public double getStrikePrice() {
return strikePrice;
}
//-----------------------------------------------------------------------
/**
* Gets the expiry date of the option.
* <p>
* The expiry date is related to the expiry time and time-zone.
* The date must not be after last trade date of the underlying future.
* @return the value of the property, not null
*/
public LocalDate getExpiryDate() {
return expiryDate;
}
//-----------------------------------------------------------------------
/**
* Gets the expiry time of the option.
* <p>
* The expiry time is related to the expiry date and time-zone.
* @return the value of the property, not null
*/
public LocalTime getExpiryTime() {
return expiryTime;
}
//-----------------------------------------------------------------------
/**
* Gets the time-zone of the expiry time.
* <p>
* The expiry time-zone is related to the expiry date and time.
* @return the value of the property, not null
*/
public ZoneId getExpiryZone() {
return expiryZone;
}
//-----------------------------------------------------------------------
/**
* Gets the style of the option premium.
* <p>
* The two options are daily margining and upfront premium.
* @return the value of the property, not null
*/
public FutureOptionPremiumStyle getPremiumStyle() {
return premiumStyle;
}
//-----------------------------------------------------------------------
/**
* Gets the definition of how to round the option price, defaulted to no rounding.
* <p>
* The price is represented in decimal form, not percentage form.
* As such, the decimal places expressed by the rounding refers to this decimal form.
* For example, the common market price of 99.7125 is represented as 0.997125 which
* has 6 decimal places.
* @return the value of the property, not null
*/
public Rounding getRounding() {
return rounding;
}
//-----------------------------------------------------------------------
/**
* Gets the identifier of the underlying future.
* @return the value of the property, not null
*/
public SecurityId getUnderlyingFutureId() {
return underlyingFutureId;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
BondFutureOptionSecurity other = (BondFutureOptionSecurity) obj;
return JodaBeanUtils.equal(info, other.info) &&
JodaBeanUtils.equal(currency, other.currency) &&
JodaBeanUtils.equal(putCall, other.putCall) &&
JodaBeanUtils.equal(strikePrice, other.strikePrice) &&
JodaBeanUtils.equal(expiryDate, other.expiryDate) &&
JodaBeanUtils.equal(expiryTime, other.expiryTime) &&
JodaBeanUtils.equal(expiryZone, other.expiryZone) &&
JodaBeanUtils.equal(premiumStyle, other.premiumStyle) &&
JodaBeanUtils.equal(rounding, other.rounding) &&
JodaBeanUtils.equal(underlyingFutureId, other.underlyingFutureId);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(info);
hash = hash * 31 + JodaBeanUtils.hashCode(currency);
hash = hash * 31 + JodaBeanUtils.hashCode(putCall);
hash = hash * 31 + JodaBeanUtils.hashCode(strikePrice);
hash = hash * 31 + JodaBeanUtils.hashCode(expiryDate);
hash = hash * 31 + JodaBeanUtils.hashCode(expiryTime);
hash = hash * 31 + JodaBeanUtils.hashCode(expiryZone);
hash = hash * 31 + JodaBeanUtils.hashCode(premiumStyle);
hash = hash * 31 + JodaBeanUtils.hashCode(rounding);
hash = hash * 31 + JodaBeanUtils.hashCode(underlyingFutureId);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(352);
buf.append("BondFutureOptionSecurity{");
buf.append("info").append('=').append(info).append(',').append(' ');
buf.append("currency").append('=').append(currency).append(',').append(' ');
buf.append("putCall").append('=').append(putCall).append(',').append(' ');
buf.append("strikePrice").append('=').append(strikePrice).append(',').append(' ');
buf.append("expiryDate").append('=').append(expiryDate).append(',').append(' ');
buf.append("expiryTime").append('=').append(expiryTime).append(',').append(' ');
buf.append("expiryZone").append('=').append(expiryZone).append(',').append(' ');
buf.append("premiumStyle").append('=').append(premiumStyle).append(',').append(' ');
buf.append("rounding").append('=').append(rounding).append(',').append(' ');
buf.append("underlyingFutureId").append('=').append(JodaBeanUtils.toString(underlyingFutureId));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code BondFutureOptionSecurity}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code info} property.
*/
private final MetaProperty<SecurityInfo> info = DirectMetaProperty.ofImmutable(
this, "info", BondFutureOptionSecurity.class, SecurityInfo.class);
/**
* The meta-property for the {@code currency} property.
*/
private final MetaProperty<Currency> currency = DirectMetaProperty.ofImmutable(
this, "currency", BondFutureOptionSecurity.class, Currency.class);
/**
* The meta-property for the {@code putCall} property.
*/
private final MetaProperty<PutCall> putCall = DirectMetaProperty.ofImmutable(
this, "putCall", BondFutureOptionSecurity.class, PutCall.class);
/**
* The meta-property for the {@code strikePrice} property.
*/
private final MetaProperty<Double> strikePrice = DirectMetaProperty.ofImmutable(
this, "strikePrice", BondFutureOptionSecurity.class, Double.TYPE);
/**
* The meta-property for the {@code expiryDate} property.
*/
private final MetaProperty<LocalDate> expiryDate = DirectMetaProperty.ofImmutable(
this, "expiryDate", BondFutureOptionSecurity.class, LocalDate.class);
/**
* The meta-property for the {@code expiryTime} property.
*/
private final MetaProperty<LocalTime> expiryTime = DirectMetaProperty.ofImmutable(
this, "expiryTime", BondFutureOptionSecurity.class, LocalTime.class);
/**
* The meta-property for the {@code expiryZone} property.
*/
private final MetaProperty<ZoneId> expiryZone = DirectMetaProperty.ofImmutable(
this, "expiryZone", BondFutureOptionSecurity.class, ZoneId.class);
/**
* The meta-property for the {@code premiumStyle} property.
*/
private final MetaProperty<FutureOptionPremiumStyle> premiumStyle = DirectMetaProperty.ofImmutable(
this, "premiumStyle", BondFutureOptionSecurity.class, FutureOptionPremiumStyle.class);
/**
* The meta-property for the {@code rounding} property.
*/
private final MetaProperty<Rounding> rounding = DirectMetaProperty.ofImmutable(
this, "rounding", BondFutureOptionSecurity.class, Rounding.class);
/**
* The meta-property for the {@code underlyingFutureId} property.
*/
private final MetaProperty<SecurityId> underlyingFutureId = DirectMetaProperty.ofImmutable(
this, "underlyingFutureId", BondFutureOptionSecurity.class, SecurityId.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"info",
"currency",
"putCall",
"strikePrice",
"expiryDate",
"expiryTime",
"expiryZone",
"premiumStyle",
"rounding",
"underlyingFutureId");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 3237038: // info
return info;
case 575402001: // currency
return currency;
case -219971059: // putCall
return putCall;
case 50946231: // strikePrice
return strikePrice;
case -816738431: // expiryDate
return expiryDate;
case -816254304: // expiryTime
return expiryTime;
case -816069761: // expiryZone
return expiryZone;
case -1257652838: // premiumStyle
return premiumStyle;
case -142444: // rounding
return rounding;
case -109104965: // underlyingFutureId
return underlyingFutureId;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BondFutureOptionSecurity.Builder builder() {
return new BondFutureOptionSecurity.Builder();
}
@Override
public Class<? extends BondFutureOptionSecurity> beanType() {
return BondFutureOptionSecurity.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code info} property.
* @return the meta-property, not null
*/
public MetaProperty<SecurityInfo> info() {
return info;
}
/**
* The meta-property for the {@code currency} property.
* @return the meta-property, not null
*/
public MetaProperty<Currency> currency() {
return currency;
}
/**
* The meta-property for the {@code putCall} property.
* @return the meta-property, not null
*/
public MetaProperty<PutCall> putCall() {
return putCall;
}
/**
* The meta-property for the {@code strikePrice} property.
* @return the meta-property, not null
*/
public MetaProperty<Double> strikePrice() {
return strikePrice;
}
/**
* The meta-property for the {@code expiryDate} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalDate> expiryDate() {
return expiryDate;
}
/**
* The meta-property for the {@code expiryTime} property.
* @return the meta-property, not null
*/
public MetaProperty<LocalTime> expiryTime() {
return expiryTime;
}
/**
* The meta-property for the {@code expiryZone} property.
* @return the meta-property, not null
*/
public MetaProperty<ZoneId> expiryZone() {
return expiryZone;
}
/**
* The meta-property for the {@code premiumStyle} property.
* @return the meta-property, not null
*/
public MetaProperty<FutureOptionPremiumStyle> premiumStyle() {
return premiumStyle;
}
/**
* The meta-property for the {@code rounding} property.
* @return the meta-property, not null
*/
public MetaProperty<Rounding> rounding() {
return rounding;
}
/**
* The meta-property for the {@code underlyingFutureId} property.
* @return the meta-property, not null
*/
public MetaProperty<SecurityId> underlyingFutureId() {
return underlyingFutureId;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 3237038: // info
return ((BondFutureOptionSecurity) bean).getInfo();
case 575402001: // currency
return ((BondFutureOptionSecurity) bean).getCurrency();
case -219971059: // putCall
return ((BondFutureOptionSecurity) bean).getPutCall();
case 50946231: // strikePrice
return ((BondFutureOptionSecurity) bean).getStrikePrice();
case -816738431: // expiryDate
return ((BondFutureOptionSecurity) bean).getExpiryDate();
case -816254304: // expiryTime
return ((BondFutureOptionSecurity) bean).getExpiryTime();
case -816069761: // expiryZone
return ((BondFutureOptionSecurity) bean).getExpiryZone();
case -1257652838: // premiumStyle
return ((BondFutureOptionSecurity) bean).getPremiumStyle();
case -142444: // rounding
return ((BondFutureOptionSecurity) bean).getRounding();
case -109104965: // underlyingFutureId
return ((BondFutureOptionSecurity) bean).getUnderlyingFutureId();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code BondFutureOptionSecurity}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<BondFutureOptionSecurity> {
private SecurityInfo info;
private Currency currency;
private PutCall putCall;
private double strikePrice;
private LocalDate expiryDate;
private LocalTime expiryTime;
private ZoneId expiryZone;
private FutureOptionPremiumStyle premiumStyle;
private Rounding rounding;
private SecurityId underlyingFutureId;
/**
* Restricted constructor.
*/
private Builder() {
applyDefaults(this);
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(BondFutureOptionSecurity beanToCopy) {
this.info = beanToCopy.getInfo();
this.currency = beanToCopy.getCurrency();
this.putCall = beanToCopy.getPutCall();
this.strikePrice = beanToCopy.getStrikePrice();
this.expiryDate = beanToCopy.getExpiryDate();
this.expiryTime = beanToCopy.getExpiryTime();
this.expiryZone = beanToCopy.getExpiryZone();
this.premiumStyle = beanToCopy.getPremiumStyle();
this.rounding = beanToCopy.getRounding();
this.underlyingFutureId = beanToCopy.getUnderlyingFutureId();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 3237038: // info
return info;
case 575402001: // currency
return currency;
case -219971059: // putCall
return putCall;
case 50946231: // strikePrice
return strikePrice;
case -816738431: // expiryDate
return expiryDate;
case -816254304: // expiryTime
return expiryTime;
case -816069761: // expiryZone
return expiryZone;
case -1257652838: // premiumStyle
return premiumStyle;
case -142444: // rounding
return rounding;
case -109104965: // underlyingFutureId
return underlyingFutureId;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 3237038: // info
this.info = (SecurityInfo) newValue;
break;
case 575402001: // currency
this.currency = (Currency) newValue;
break;
case -219971059: // putCall
this.putCall = (PutCall) newValue;
break;
case 50946231: // strikePrice
this.strikePrice = (Double) newValue;
break;
case -816738431: // expiryDate
this.expiryDate = (LocalDate) newValue;
break;
case -816254304: // expiryTime
this.expiryTime = (LocalTime) newValue;
break;
case -816069761: // expiryZone
this.expiryZone = (ZoneId) newValue;
break;
case -1257652838: // premiumStyle
this.premiumStyle = (FutureOptionPremiumStyle) newValue;
break;
case -142444: // rounding
this.rounding = (Rounding) newValue;
break;
case -109104965: // underlyingFutureId
this.underlyingFutureId = (SecurityId) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public BondFutureOptionSecurity build() {
return new BondFutureOptionSecurity(
info,
currency,
putCall,
strikePrice,
expiryDate,
expiryTime,
expiryZone,
premiumStyle,
rounding,
underlyingFutureId);
}
//-----------------------------------------------------------------------
/**
* Sets the standard security information.
* <p>
* This includes the security identifier.
* @param info the new value, not null
* @return this, for chaining, not null
*/
public Builder info(SecurityInfo info) {
JodaBeanUtils.notNull(info, "info");
this.info = info;
return this;
}
/**
* Sets the currency that the future is traded in.
* @param currency the new value, not null
* @return this, for chaining, not null
*/
public Builder currency(Currency currency) {
JodaBeanUtils.notNull(currency, "currency");
this.currency = currency;
return this;
}
/**
* Sets whether the option is put or call.
* <p>
* A call gives the owner the right, but not obligation, to buy the underlying at
* an agreed price in the future. A put gives a similar option to sell.
* @param putCall the new value
* @return this, for chaining, not null
*/
public Builder putCall(PutCall putCall) {
this.putCall = putCall;
return this;
}
/**
* Sets the strike price, represented in decimal form.
* <p>
* This is the price at which the option applies and refers to the price of the underlying future.
* This must be represented in decimal form, {@code (1.0 - decimalRate)}.
* As such, the common market price of 99.3 for a 0.7% rate must be input as 0.993.
* The rate implied by the strike can take negative values.
* @param strikePrice the new value
* @return this, for chaining, not null
*/
public Builder strikePrice(double strikePrice) {
this.strikePrice = strikePrice;
return this;
}
/**
* Sets the expiry date of the option.
* <p>
* The expiry date is related to the expiry time and time-zone.
* The date must not be after last trade date of the underlying future.
* @param expiryDate the new value, not null
* @return this, for chaining, not null
*/
public Builder expiryDate(LocalDate expiryDate) {
JodaBeanUtils.notNull(expiryDate, "expiryDate");
this.expiryDate = expiryDate;
return this;
}
/**
* Sets the expiry time of the option.
* <p>
* The expiry time is related to the expiry date and time-zone.
* @param expiryTime the new value, not null
* @return this, for chaining, not null
*/
public Builder expiryTime(LocalTime expiryTime) {
JodaBeanUtils.notNull(expiryTime, "expiryTime");
this.expiryTime = expiryTime;
return this;
}
/**
* Sets the time-zone of the expiry time.
* <p>
* The expiry time-zone is related to the expiry date and time.
* @param expiryZone the new value, not null
* @return this, for chaining, not null
*/
public Builder expiryZone(ZoneId expiryZone) {
JodaBeanUtils.notNull(expiryZone, "expiryZone");
this.expiryZone = expiryZone;
return this;
}
/**
* Sets the style of the option premium.
* <p>
* The two options are daily margining and upfront premium.
* @param premiumStyle the new value, not null
* @return this, for chaining, not null
*/
public Builder premiumStyle(FutureOptionPremiumStyle premiumStyle) {
JodaBeanUtils.notNull(premiumStyle, "premiumStyle");
this.premiumStyle = premiumStyle;
return this;
}
/**
* Sets the definition of how to round the option price, defaulted to no rounding.
* <p>
* The price is represented in decimal form, not percentage form.
* As such, the decimal places expressed by the rounding refers to this decimal form.
* For example, the common market price of 99.7125 is represented as 0.997125 which
* has 6 decimal places.
* @param rounding the new value, not null
* @return this, for chaining, not null
*/
public Builder rounding(Rounding rounding) {
JodaBeanUtils.notNull(rounding, "rounding");
this.rounding = rounding;
return this;
}
/**
* Sets the identifier of the underlying future.
* @param underlyingFutureId the new value, not null
* @return this, for chaining, not null
*/
public Builder underlyingFutureId(SecurityId underlyingFutureId) {
JodaBeanUtils.notNull(underlyingFutureId, "underlyingFutureId");
this.underlyingFutureId = underlyingFutureId;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(352);
buf.append("BondFutureOptionSecurity.Builder{");
buf.append("info").append('=').append(JodaBeanUtils.toString(info)).append(',').append(' ');
buf.append("currency").append('=').append(JodaBeanUtils.toString(currency)).append(',').append(' ');
buf.append("putCall").append('=').append(JodaBeanUtils.toString(putCall)).append(',').append(' ');
buf.append("strikePrice").append('=').append(JodaBeanUtils.toString(strikePrice)).append(',').append(' ');
buf.append("expiryDate").append('=').append(JodaBeanUtils.toString(expiryDate)).append(',').append(' ');
buf.append("expiryTime").append('=').append(JodaBeanUtils.toString(expiryTime)).append(',').append(' ');
buf.append("expiryZone").append('=').append(JodaBeanUtils.toString(expiryZone)).append(',').append(' ');
buf.append("premiumStyle").append('=').append(JodaBeanUtils.toString(premiumStyle)).append(',').append(' ');
buf.append("rounding").append('=').append(JodaBeanUtils.toString(rounding)).append(',').append(' ');
buf.append("underlyingFutureId").append('=').append(JodaBeanUtils.toString(underlyingFutureId));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.provisioning.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.Validate;
import org.springframework.stereotype.Component;
import com.evolveum.midpoint.provisioning.api.ChangeNotificationDispatcher;
import com.evolveum.midpoint.provisioning.api.GenericConnectorException;
import com.evolveum.midpoint.provisioning.api.ResourceEventDescription;
import com.evolveum.midpoint.provisioning.api.ResourceEventListener;
import com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener;
import com.evolveum.midpoint.provisioning.api.ResourceObjectShadowChangeDescription;
import com.evolveum.midpoint.provisioning.api.ResourceOperationDescription;
import com.evolveum.midpoint.provisioning.api.ResourceOperationListener;
import com.evolveum.midpoint.schema.internals.InternalsConfig;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.util.exception.CommunicationException;
import com.evolveum.midpoint.util.exception.ConfigurationException;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.exception.SecurityViolationException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
/**
* @author Radovan Semancik
*
*/
@Component
public class ChangeNotificationDispatcherImpl implements ChangeNotificationDispatcher {
private boolean filterProtectedObjects = true;
private List<ResourceObjectChangeListener> changeListeners = new ArrayList<ResourceObjectChangeListener>();
private List<ResourceOperationListener> operationListeners = new ArrayList<ResourceOperationListener>();
private List<ResourceEventListener> eventListeners = new ArrayList<ResourceEventListener>();
private static final Trace LOGGER = TraceManager.getTrace(ChangeNotificationDispatcherImpl.class);
public boolean isFilterProtectedObjects() {
return filterProtectedObjects;
}
public void setFilterProtectedObjects(boolean filterProtectedObjects) {
this.filterProtectedObjects = filterProtectedObjects;
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeNotificationManager#registerNotificationListener(com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener)
*/
@Override
public synchronized void registerNotificationListener(ResourceObjectChangeListener listener) {
if (changeListeners.contains(listener)) {
LOGGER.warn(
"Resource object change listener '{}' is already registered. Subsequent registration is ignored",
listener);
} else {
changeListeners.add(listener);
}
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeNotificationManager#registerNotificationListener(com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener)
*/
@Override
public synchronized void registerNotificationListener(ResourceOperationListener listener) {
if (operationListeners.contains(listener)) {
LOGGER.warn(
"Resource operation listener '{}' is already registered. Subsequent registration is ignored",
listener);
} else {
operationListeners.add(listener);
}
}
@Override
public synchronized void registerNotificationListener(ResourceEventListener listener) {
if (eventListeners.contains(listener)) {
LOGGER.warn(
"Resource event listener '{}' is already registered. Subsequent registration is ignored",
listener);
} else {
eventListeners.add(listener);
}
}
@Override
public void unregisterNotificationListener(ResourceEventListener listener) {
eventListeners.remove(listener);
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeNotificationManager#unregisterNotificationListener(com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener)
*/
@Override
public synchronized void unregisterNotificationListener(ResourceOperationListener listener) {
changeListeners.remove(listener);
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeNotificationManager#unregisterNotificationListener(com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener)
*/
@Override
public synchronized void unregisterNotificationListener(ResourceObjectChangeListener listener) {
operationListeners.remove(listener);
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener#notifyChange(com.evolveum.midpoint.xml.ns._public.common.common_2.ResourceObjectShadowChangeDescriptionType, com.evolveum.midpoint.common.result.OperationResult)
*/
@Override
public void notifyChange(ResourceObjectShadowChangeDescription change, Task task, OperationResult parentResult) {
Validate.notNull(change, "Change description of resource object shadow must not be null.");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("SYNCHRONIZATION change notification\n{} ", change.debugDump());
}
if (InternalsConfig.consistencyChecks) change.checkConsistence();
if ((null != changeListeners) && (!changeListeners.isEmpty())) {
for (ResourceObjectChangeListener listener : new ArrayList<>(changeListeners)) { // sometimes there is registration/deregistration from within
//LOGGER.trace("Listener: {}", listener.getClass().getSimpleName());
try {
listener.notifyChange(change, task, parentResult);
} catch (RuntimeException e) {
LOGGER.error("Exception {} thrown by object change listener {}: {}", new Object[]{
e.getClass(), listener.getName(), e.getMessage(), e });
parentResult.createSubresult(CLASS_NAME_WITH_DOT + "notifyChange").recordWarning("Change listener has thrown unexpected exception", e);
throw e;
}
}
} else {
LOGGER.warn("Change notification received but listener list is empty, there is nobody to get the message");
}
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener#notifyFailure(com.evolveum.midpoint.provisioning.api.ResourceObjectShadowFailureDescription, com.evolveum.midpoint.task.api.Task, com.evolveum.midpoint.schema.result.OperationResult)
*/
@Override
public void notifyFailure(ResourceOperationDescription failureDescription,
Task task, OperationResult parentResult) {
Validate.notNull(failureDescription, "Operation description of resource object shadow must not be null.");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Resource operation failure notification\n{} ", failureDescription.debugDump());
}
failureDescription.checkConsistence();
if ((null != changeListeners) && (!changeListeners.isEmpty())) {
for (ResourceOperationListener listener : new ArrayList<>(operationListeners)) { // sometimes there is registration/deregistration from within
//LOGGER.trace("Listener: {}", listener.getClass().getSimpleName());
try {
listener.notifyFailure(failureDescription, task, parentResult);
} catch (RuntimeException e) {
LOGGER.error("Exception {} thrown by operation failure listener {}: {}", new Object[]{
e.getClass(), listener.getName(), e.getMessage(), e });
parentResult.createSubresult(CLASS_NAME_WITH_DOT + "notifyFailure").recordWarning("Operation failure listener has thrown unexpected exception", e);
}
}
} else {
LOGGER.debug("Operation failure received but listener list is empty, there is nobody to get the message");
}
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener#notifyFailure(com.evolveum.midpoint.provisioning.api.ResourceObjectShadowFailureDescription, com.evolveum.midpoint.task.api.Task, com.evolveum.midpoint.schema.result.OperationResult)
*/
@Override
public void notifySuccess(ResourceOperationDescription failureDescription,
Task task, OperationResult parentResult) {
Validate.notNull(failureDescription, "Operation description of resource object shadow must not be null.");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Resource operation success notification\n{} ", failureDescription.debugDump());
}
failureDescription.checkConsistence();
if ((null != changeListeners) && (!changeListeners.isEmpty())) {
for (ResourceOperationListener listener : new ArrayList<>(operationListeners)) { // sometimes there is registration/deregistration from within
//LOGGER.trace("Listener: {}", listener.getClass().getSimpleName());
try {
listener.notifySuccess(failureDescription, task, parentResult);
} catch (RuntimeException e) {
LOGGER.error("Exception {} thrown by operation success listener {}: {}", new Object[]{
e.getClass(), listener.getName(), e.getMessage(), e });
parentResult.createSubresult(CLASS_NAME_WITH_DOT + "notifySuccess").recordWarning("Operation success listener has thrown unexpected exception", e);
}
}
} else {
LOGGER.debug("Operation success received but listener list is empty, there is nobody to get the message");
}
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener#notifyFailure(com.evolveum.midpoint.provisioning.api.ResourceObjectShadowFailureDescription, com.evolveum.midpoint.task.api.Task, com.evolveum.midpoint.schema.result.OperationResult)
*/
@Override
public void notifyInProgress(ResourceOperationDescription failureDescription,
Task task, OperationResult parentResult) {
Validate.notNull(failureDescription, "Operation description of resource object shadow must not be null.");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Resource operation in-progress notification\n{} ", failureDescription.debugDump());
}
failureDescription.checkConsistence();
if ((null != changeListeners) && (!changeListeners.isEmpty())) {
for (ResourceOperationListener listener : new ArrayList<>(operationListeners)) { // sometimes there is registration/deregistration from within
//LOGGER.trace("Listener: {}", listener.getClass().getSimpleName());
try {
listener.notifyInProgress(failureDescription, task, parentResult);
} catch (RuntimeException e) {
LOGGER.error("Exception {} thrown by operation in-progress listener {}: {}", new Object[]{
e.getClass(), listener.getName(), e.getMessage(), e });
parentResult.createSubresult(CLASS_NAME_WITH_DOT + "notifyInProgress").recordWarning("Operation in-progress listener has thrown unexpected exception", e);
}
}
} else {
LOGGER.debug("Operation in-progress received but listener list is empty, there is nobody to get the message");
}
}
/* (non-Javadoc)
* @see com.evolveum.midpoint.provisioning.api.ResourceObjectChangeListener#getName()
*/
@Override
public String getName() {
return "object change notification dispatcher";
}
@Override
public void notifyEvent(ResourceEventDescription eventDescription,
Task task, OperationResult parentResult) throws SchemaException,
CommunicationException, ConfigurationException,
SecurityViolationException, ObjectNotFoundException,
GenericConnectorException, ObjectAlreadyExistsException {
Validate.notNull(eventDescription, "Event description must not be null.");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("SYNCHRONIZATION change notification\n{} ", eventDescription.debugDump());
}
if (filterProtectedObjects && eventDescription.isProtected()) {
LOGGER.trace("Skipping dispatching of {} because it is protected", eventDescription);
return;
}
// if (InternalsConfig.consistencyChecks) eventDescription.checkConsistence();
if ((null != eventListeners) && (!eventListeners.isEmpty())) {
for (ResourceEventListener listener : new ArrayList<>(eventListeners)) { // sometimes there is registration/deregistration from within
//LOGGER.trace("Listener: {}", listener.getClass().getSimpleName());
try {
listener.notifyEvent(eventDescription, task, parentResult);
} catch (RuntimeException e) {
LOGGER.error("Exception {} thrown by event listener {}: {}", new Object[]{
e.getClass(), listener.getName(), e.getMessage(), e });
parentResult.createSubresult(CLASS_NAME_WITH_DOT + "notifyEvent").recordWarning("Event listener has thrown unexpected exception", e);
throw e;
}
}
} else {
LOGGER.warn("Event notification received but listener list is empty, there is nobody to get the message");
}
}
}
| |
/*
* Copyright (C) 2007-2013 Geometer Plus <contact@geometerplus.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
package org.geometerplus.android.fbreader.libraryService;
import java.util.*;
import android.content.*;
import android.os.IBinder;
import android.os.RemoteException;
import org.geometerplus.zlibrary.core.filesystem.ZLFile;
import org.geometerplus.zlibrary.text.view.ZLTextFixedPosition;
import org.geometerplus.zlibrary.text.view.ZLTextPosition;
import org.geometerplus.fbreader.Paths;
import org.geometerplus.fbreader.book.*;
import org.geometerplus.android.fbreader.api.TextPosition;
public class BookCollectionShadow extends AbstractBookCollection implements ServiceConnection {
private Context myContext;
private volatile LibraryInterface myInterface;
private Runnable myOnBindAction;
private final BroadcastReceiver myReceiver = new BroadcastReceiver() {
public void onReceive(Context context, Intent intent) {
if (!hasListeners()) {
return;
}
try {
final String type = intent.getStringExtra("type");
if (LibraryService.BOOK_EVENT_ACTION.equals(intent.getAction())) {
final Book book = SerializerUtil.deserializeBook(intent.getStringExtra("book"));
fireBookEvent(BookEvent.valueOf(type), book);
} else {
fireBuildEvent(Status.valueOf(type));
}
} catch (Exception e) {
// ignore
}
}
};
private static Runnable combined(final Runnable action0, final Runnable action1) {
if (action0 == null) {
return action1;
}
if (action1 == null) {
return action0;
}
return new Runnable() {
public void run() {
action0.run();
action1.run();
}
};
}
public synchronized void bindToService(Context context, Runnable onBindAction) {
if (myInterface != null && myContext == context) {
if (onBindAction != null) {
onBindAction.run();
}
} else {
myOnBindAction = combined(myOnBindAction, onBindAction);
context.bindService(
new Intent(context, LibraryService.class),
this,
LibraryService.BIND_AUTO_CREATE
);
myContext = context;
}
}
public synchronized void unbind() {
if (myContext != null && myInterface != null) {
try {
myContext.unregisterReceiver(myReceiver);
} catch (Exception e) {
e.printStackTrace();
}
try {
myContext.unbindService(this);
} catch (Exception e) {
e.printStackTrace();
}
myInterface = null;
myContext = null;
}
}
public synchronized void reset(boolean force) {
if (myInterface != null) {
try {
myInterface.reset(Paths.BookPathOption().getValue(), force);
} catch (RemoteException e) {
}
}
}
public synchronized int size() {
if (myInterface == null) {
return 0;
}
try {
return myInterface.size();
} catch (RemoteException e) {
return 0;
}
}
public synchronized Status status() {
if (myInterface == null) {
return Status.NotStarted;
}
try {
return Status.valueOf(myInterface.status());
} catch (Throwable t) {
return Status.NotStarted;
}
}
public synchronized List<Book> books(BookQuery query) {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return SerializerUtil.deserializeBookList(myInterface.books(SerializerUtil.serialize(query)));
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized boolean hasBooks(Filter filter) {
if (myInterface == null) {
return false;
}
try {
return myInterface.hasBooks(SerializerUtil.serialize(new BookQuery(filter, 1)));
} catch (RemoteException e) {
return false;
}
}
public synchronized List<Book> recentBooks() {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return SerializerUtil.deserializeBookList(myInterface.recentBooks());
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized Book getRecentBook(int index) {
if (myInterface == null) {
return null;
}
try {
return SerializerUtil.deserializeBook(myInterface.getRecentBook(index));
} catch (RemoteException e) {
e.printStackTrace();
return null;
}
}
public synchronized Book getBookByFile(ZLFile file) {
if (myInterface == null) {
return null;
}
try {
return SerializerUtil.deserializeBook(myInterface.getBookByFile(file.getPath()));
} catch (RemoteException e) {
return null;
}
}
public synchronized Book getBookById(long id) {
if (myInterface == null) {
return null;
}
try {
return SerializerUtil.deserializeBook(myInterface.getBookById(id));
} catch (RemoteException e) {
return null;
}
}
public synchronized Book getBookByUid(UID uid) {
if (myInterface == null) {
return null;
}
try {
return SerializerUtil.deserializeBook(myInterface.getBookByUid(uid.Type, uid.Id));
} catch (RemoteException e) {
return null;
}
}
public synchronized List<Author> authors() {
if (myInterface == null) {
return Collections.emptyList();
}
try {
final List<String> strings = myInterface.authors();
final List<Author> authors = new ArrayList<Author>(strings.size());
for (String s : strings) {
authors.add(Util.stringToAuthor(s));
}
return authors;
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized List<Tag> tags() {
if (myInterface == null) {
return Collections.emptyList();
}
try {
final List<String> strings = myInterface.tags();
final List<Tag> tags = new ArrayList<Tag>(strings.size());
for (String s : strings) {
tags.add(Util.stringToTag(s));
}
return tags;
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized boolean hasSeries() {
if (myInterface != null) {
try {
return myInterface.hasSeries();
} catch (RemoteException e) {
}
}
return false;
}
public synchronized List<String> series() {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return myInterface.series();
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized List<String> titles(BookQuery query) {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return myInterface.titles(SerializerUtil.serialize(query));
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized List<String> firstTitleLetters() {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return myInterface.firstTitleLetters();
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized boolean saveBook(Book book) {
if (myInterface == null) {
return false;
}
try {
return myInterface.saveBook(SerializerUtil.serialize(book));
} catch (RemoteException e) {
return false;
}
}
public synchronized void removeBook(Book book, boolean deleteFromDisk) {
if (myInterface != null) {
try {
myInterface.removeBook(SerializerUtil.serialize(book), deleteFromDisk);
} catch (RemoteException e) {
}
}
}
public synchronized void addBookToRecentList(Book book) {
if (myInterface != null) {
try {
myInterface.addBookToRecentList(SerializerUtil.serialize(book));
} catch (RemoteException e) {
}
}
}
public synchronized List<String> labels() {
if (myInterface != null) {
try {
return myInterface.labels();
} catch (RemoteException e) {
}
}
return Collections.emptyList();
}
public synchronized ZLTextPosition getStoredPosition(long bookId) {
if (myInterface == null) {
return null;
}
try {
final TextPosition position = myInterface.getStoredPosition(bookId);
if (position == null) {
return null;
}
return new ZLTextFixedPosition(
position.ParagraphIndex, position.ElementIndex, position.CharIndex
);
} catch (RemoteException e) {
return null;
}
}
public synchronized void storePosition(long bookId, ZLTextPosition position) {
if (position != null && myInterface != null) {
try {
myInterface.storePosition(bookId, new TextPosition(
position.getParagraphIndex(), position.getElementIndex(), position.getCharIndex()
));
} catch (RemoteException e) {
}
}
}
public synchronized boolean isHyperlinkVisited(Book book, String linkId) {
if (myInterface == null) {
return false;
}
try {
return myInterface.isHyperlinkVisited(SerializerUtil.serialize(book), linkId);
} catch (RemoteException e) {
return false;
}
}
public synchronized void markHyperlinkAsVisited(Book book, String linkId) {
if (myInterface != null) {
try {
myInterface.markHyperlinkAsVisited(SerializerUtil.serialize(book), linkId);
} catch (RemoteException e) {
}
}
}
@Override
public synchronized boolean saveCover(Book book, String url) {
if (myInterface == null) {
return false;
}
try {
return myInterface.saveCover(SerializerUtil.serialize(book), url);
} catch (RemoteException e) {
e.printStackTrace();
return false;
}
}
public synchronized List<Bookmark> bookmarks(BookmarkQuery query) {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return SerializerUtil.deserializeBookmarkList(
myInterface.bookmarks(SerializerUtil.serialize(query))
);
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized void saveBookmark(Bookmark bookmark) {
if (myInterface != null) {
try {
bookmark.update(SerializerUtil.deserializeBookmark(
myInterface.saveBookmark(SerializerUtil.serialize(bookmark))
));
} catch (RemoteException e) {
}
}
}
public synchronized void deleteBookmark(Bookmark bookmark) {
if (myInterface != null) {
try {
myInterface.deleteBookmark(SerializerUtil.serialize(bookmark));
} catch (RemoteException e) {
}
}
}
public synchronized HighlightingStyle getHighlightingStyle(int styleId) {
if (myInterface == null) {
return null;
}
try {
return SerializerUtil.deserializeStyle(myInterface.getHighlightingStyle(styleId));
} catch (RemoteException e) {
return null;
}
}
public synchronized List<HighlightingStyle> highlightingStyles() {
if (myInterface == null) {
return Collections.emptyList();
}
try {
return SerializerUtil.deserializeStyleList(myInterface.highlightingStyles());
} catch (RemoteException e) {
return Collections.emptyList();
}
}
public synchronized void saveHighlightingStyle(HighlightingStyle style) {
if (myInterface != null) {
try {
myInterface.saveHighlightingStyle(SerializerUtil.serialize(style));
} catch (RemoteException e) {
// ignore
}
}
}
public synchronized void rescan(String path) {
if (myInterface != null) {
try {
myInterface.rescan(path);
} catch (RemoteException e) {
// ignore
}
}
}
// method from ServiceConnection interface
public synchronized void onServiceConnected(ComponentName name, IBinder service) {
myInterface = LibraryInterface.Stub.asInterface(service);
if (myOnBindAction != null) {
myOnBindAction.run();
myOnBindAction = null;
}
if (myContext != null) {
myContext.registerReceiver(myReceiver, new IntentFilter(LibraryService.BOOK_EVENT_ACTION));
myContext.registerReceiver(myReceiver, new IntentFilter(LibraryService.BUILD_EVENT_ACTION));
}
}
// method from ServiceConnection interface
public synchronized void onServiceDisconnected(ComponentName name) {
}
}
| |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.loan.struts.action;
import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mifos.accounts.loan.util.helpers.RequestConstants.PERSPECTIVE;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mifos.accounts.loan.business.LoanBO;
import org.mifos.accounts.loan.business.service.LoanBusinessService;
import org.mifos.accounts.loan.business.service.OriginalScheduleInfoDto;
import org.mifos.accounts.loan.struts.actionforms.LoanAccountActionForm;
import org.mifos.accounts.loan.util.helpers.LoanConstants;
import org.mifos.accounts.loan.util.helpers.RepaymentScheduleInstallment;
import org.mifos.accounts.productdefinition.business.service.LoanPrdBusinessService;
import org.mifos.application.questionnaire.struts.QuestionnaireFlowAdapter;
import org.mifos.application.servicefacade.LoanAccountServiceFacade;
import org.mifos.application.servicefacade.LoanServiceFacade;
import org.mifos.customers.persistence.CustomerDao;
import org.mifos.framework.TestUtils;
import org.mifos.framework.exceptions.PageExpiredException;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.Flow;
import org.mifos.framework.util.helpers.FlowManager;
import org.mifos.platform.questionnaire.service.QuestionDetail;
import org.mifos.platform.questionnaire.service.QuestionGroupDetail;
import org.mifos.platform.questionnaire.service.QuestionGroupInstanceDetail;
import org.mifos.platform.questionnaire.service.QuestionType;
import org.mifos.platform.questionnaire.service.QuestionnaireServiceFacade;
import org.mifos.platform.questionnaire.service.SectionDetail;
import org.mifos.platform.questionnaire.service.SectionQuestionDetail;
import org.mifos.platform.questionnaire.service.dtos.ChoiceDto;
import org.mifos.platform.validations.Errors;
import org.mifos.security.util.UserContext;
import org.mockito.Matchers;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class LoanAccountActionTest {
@Mock
private QuestionnaireServiceFacade questionnaireServiceFacade;
@Mock
private HttpServletRequest request;
@Mock
private HttpServletResponse response;
@Mock
private HttpSession session;
@Mock
private FlowManager flowManager;
@Mock
private ActionMapping mapping;
private LoanAccountAction loanAccountAction;
public static final String FLOW_KEY = "FlowKey";
@Mock
private LoanAccountActionForm form;
private Flow flow;
@Mock
private LoanPrdBusinessService loanPrdBusinessService;
@Mock
private LoanServiceFacade loanServiceFacade;
@Mock
private LoanAccountServiceFacade loanAccountServiceFacade;
@Mock
private UserContext userContext;
private Short localeId = new Short("1");
@Mock
private LoanBusinessService loanBusinessService;
@Mock
private CustomerDao customerDao;
@Mock
private QuestionnaireFlowAdapter createLoanQuestionnaire;
@Mock
private LoanBO loanBO;
@Before
public void setUp() throws PageExpiredException {
loanAccountAction = new LoanAccountAction(null, loanBusinessService, null, loanPrdBusinessService, null, null, null, null) {
@SuppressWarnings("unused")
@Override
LoanBO getLoan(Integer loanId) {
return loanBO;
}
@Override
protected UserContext getUserContext(@SuppressWarnings("unused") HttpServletRequest request) {
return userContext;
}
@Override
QuestionnaireFlowAdapter getCreateLoanQuestionnaire() {
return createLoanQuestionnaire;
}
};
loanAccountAction.setLoanServiceFacade(loanServiceFacade);
loanAccountAction.setLoanAccountServiceFacade(loanAccountServiceFacade);
loanAccountAction.setCustomerDao(customerDao);
when(request.getAttribute(Constants.CURRENTFLOWKEY)).thenReturn(FLOW_KEY);
when(request.getSession()).thenReturn(session);
when(session.getAttribute(Constants.FLOWMANAGER)).thenReturn(flowManager);
flow = new Flow();
when(flowManager.getFlowWithValidation(FLOW_KEY)).thenReturn(flow);
when(userContext.getLocaleId()).thenReturn(localeId);
when(userContext.getPreferredLocale()).thenReturn(Locale.US);
}
@SuppressWarnings("unchecked")
@Test
public void shouldSetQuestionGroupInstanceDetailsInSession() throws PageExpiredException {
List<QuestionGroupInstanceDetail> instanceDetails = asList(getQuestionGroupInstanceDetail("QG1"), getQuestionGroupInstanceDetail("QG2"));
when(questionnaireServiceFacade.getQuestionGroupInstances(101, "View", "Loan")).thenReturn(instanceDetails);
loanAccountAction.setQuestionGroupInstances(questionnaireServiceFacade, request, 101);
assertThat((List<QuestionGroupInstanceDetail>) flow.getObjectFromSession("questionGroupInstances"), is(instanceDetails));
verify(questionnaireServiceFacade, times(1)).getQuestionGroupInstances(101, "View", "Loan");
verify(request, times(1)).getAttribute(Constants.CURRENTFLOWKEY);
verify(request, times(1)).getSession();
verify(session, times(1)).getAttribute(Constants.FLOWMANAGER);
}
@SuppressWarnings("unchecked")
@Test
public void shouldViewOriginalSchedule() throws Exception {
ActionForward viewOriginalScheduleForward = new ActionForward("viewOriginalSchedule");
int loanId = 1;
String loanAmount = "123";
List<RepaymentScheduleInstallment> installments = Collections.EMPTY_LIST;
java.sql.Date disbursementDate = new java.sql.Date(new DateTime().toDate().getTime());
OriginalScheduleInfoDto dto = mock(OriginalScheduleInfoDto.class);
when(dto.getOriginalLoanScheduleInstallment()).thenReturn(installments);
when(dto.getLoanAmount()).thenReturn(loanAmount);
when(dto.getDisbursementDate()).thenReturn(disbursementDate);
when(request.getParameter(LoanAccountAction.ACCOUNT_ID)).thenReturn(String.valueOf(loanId));
when(loanServiceFacade.retrieveOriginalLoanSchedule(loanId)).thenReturn(dto);
when(mapping.findForward("viewOriginalSchedule")).thenReturn(viewOriginalScheduleForward);
ActionForward forward = loanAccountAction.viewOriginalSchedule(mapping, form, request, response);
assertThat(forward, is(viewOriginalScheduleForward));
verify(request).getParameter(LoanAccountAction.ACCOUNT_ID);
verify(loanServiceFacade).retrieveOriginalLoanSchedule(loanId);
verify(dto).getOriginalLoanScheduleInstallment();
verify(dto).getLoanAmount();
verify(dto).getDisbursementDate();
verify(mapping).findForward("viewOriginalSchedule");
}
@Test
public void captureQuestionResponses() throws Exception {
String redoLoan = "redoLoan";
when(form.getPerspective()).thenReturn(redoLoan);
ActionErrors errors = mock(ActionErrors.class);
ActionForward forward = mock(ActionForward.class);
when(createLoanQuestionnaire.validateResponses(request, form)).thenReturn(errors);
when(errors.isEmpty()).thenReturn(true);
when(createLoanQuestionnaire.rejoinFlow(mapping)).thenReturn(forward);
loanAccountAction.captureQuestionResponses(mapping, form, request, response);
verify(request,times(1)).setAttribute(eq(LoanConstants.METHODCALLED), eq("captureQuestionResponses"));
verify(request,times(1)).setAttribute(PERSPECTIVE, redoLoan);
verify(createLoanQuestionnaire).rejoinFlow(mapping);
}
@Test
public void getLoanRepaymentScheduleShouldCalculateExtraInterest() throws Exception {
when(loanBusinessService.computeExtraInterest(eq(loanBO), Matchers.<Date>any())).thenReturn(new Errors());
when(request.getParameter("accountId")).thenReturn("1");
when(loanServiceFacade.retrieveOriginalLoanSchedule(Matchers.<Integer>any())).
thenReturn(new OriginalScheduleInfoDto("100", new Date(), Collections.<RepaymentScheduleInstallment>emptyList()));
loanAccountAction.getLoanRepaymentSchedule(mapping, form, request, response);
verify(loanBusinessService, times(1)).computeExtraInterest(Matchers.<LoanBO>any(), Matchers.<Date>any());
}
@Test
public void getLoanRepaymentScheduleShouldValidateViewDate() throws Exception {
ActionForward getLoanScheduleFailure = new ActionForward("getLoanRepaymentScheduleFailure");
java.sql.Date extraInterestDate = TestUtils.getSqlDate(10, 7, 2010);
Errors errors = new Errors();
errors.addError(LoanConstants.CANNOT_VIEW_REPAYMENT_SCHEDULE, new String[] {extraInterestDate.toString()});
when(loanBusinessService.computeExtraInterest(loanBO, extraInterestDate)).thenReturn(errors);
when(form.getScheduleViewDateValue(Locale.US)).thenReturn(extraInterestDate);
when(request.getParameter("accountId")).thenReturn("1");
when(mapping.findForward("getLoanRepaymentScheduleFailure")).thenReturn(getLoanScheduleFailure);
when(loanServiceFacade.retrieveOriginalLoanSchedule(Matchers.<Integer>any())).
thenReturn(new OriginalScheduleInfoDto("100", new Date(), Collections.<RepaymentScheduleInstallment>emptyList()));
ActionForward forward = loanAccountAction.getLoanRepaymentSchedule(mapping, form, request, response);
assertThat(forward, is(getLoanScheduleFailure));
verify(form).resetScheduleViewDate();
}
private QuestionGroupInstanceDetail getQuestionGroupInstanceDetail(String questionGroupTitle) {
QuestionGroupInstanceDetail detail = new QuestionGroupInstanceDetail();
detail.setDateCompleted(Calendar.getInstance().getTime());
detail.setQuestionGroupDetail(getQuestionGroupDetail(questionGroupTitle, asList("red", "green", "blue")));
return detail;
}
private QuestionGroupDetail getQuestionGroupDetail(String title, List<String> answerChoices) {
QuestionGroupDetail questionGroupDetail = new QuestionGroupDetail();
questionGroupDetail.setTitle(title);
questionGroupDetail.setId(123);
questionGroupDetail.setSectionDetails(asList(getSectionDetail("Section1", "Question1", answerChoices)));
return questionGroupDetail;
}
private SectionDetail getSectionDetail(String name, String title, List<String> answerChoices) {
SectionDetail sectionDetail = new SectionDetail();
sectionDetail.setName(name);
QuestionDetail questionDetail = new QuestionDetail(111, title, QuestionType.SINGLE_SELECT, true, true);
List<ChoiceDto> choiceDtos = new ArrayList<ChoiceDto>();
for (String answerChoice : answerChoices) {
choiceDtos.add(new ChoiceDto(answerChoice));
}
questionDetail.setAnswerChoices(choiceDtos);
sectionDetail.setQuestionDetails(asList(new SectionQuestionDetail(questionDetail, true)));
return sectionDetail;
}
}
| |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.ws.bridge.filter;
import static org.junit.Assert.assertNotNull;
import static org.kaazing.gateway.transport.ws.AbstractWsControlMessage.Style.CLIENT;
import java.nio.ByteBuffer;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.mina.core.filterchain.IoFilter;
import org.apache.mina.core.filterchain.IoFilter.NextFilter;
import org.apache.mina.core.filterchain.IoFilterChain;
import org.apache.mina.core.session.IdleStatus;
import org.jmock.Mockery;
import org.junit.Ignore;
import org.junit.Test;
import org.kaazing.gateway.transport.ws.WsAcceptor;
import org.kaazing.gateway.transport.ws.WsPingMessage;
import org.kaazing.gateway.transport.ws.WsPongMessage;
import org.kaazing.gateway.transport.ws.extension.ActiveWsExtensions;
import org.kaazing.gateway.transport.ws.util.Expectations;
import org.kaazing.gateway.util.Utils;
import org.kaazing.mina.core.buffer.SimpleBufferAllocator;
import org.kaazing.mina.core.session.IoSessionConfigEx;
import org.kaazing.mina.core.session.IoSessionEx;
import org.slf4j.Logger;
public class WsCheckAliveFilterTest {
private WsPingMessage PING = new WsPingMessage();
{
PING.setStyle(CLIENT);
}
private static final String FILTER_NAME = "wsn#checkalive";
private static final long STANDARD_INACTIVITY_TIMEOUT_MILLIS = Utils.parseTimeInterval("30sec", TimeUnit.MILLISECONDS);
@Test
public void validateSystemPropertiesShouldThrowErrorIfObsoleteSystemPropertyIsSet() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
context.checking(new Expectations() {
{
oneOf(logger).error(with(stringMatching(String.format(
"System property %s is no longer supported, please use accept-option %s instead in the gateway configuration file",
"org.kaazing.gateway.transport.ws.INACTIVITY_TIMEOUT", "ws.inactivity.timeout"))));
}
});
Properties configuration = new Properties();
configuration.setProperty("org.kaazing.gateway.transport.ws.INACTIVITY_TIMEOUT", "15s");
Exception caught = null;
try {
WsCheckAliveFilter.validateSystemProperties(configuration, logger);
}
catch(Exception e) {
caught = e;
}
context.assertIsSatisfied();
assertNotNull(caught);
}
@Test
public void validateSystemPropertiesShouldNotThrowErrorIfObsoleteSystemPropertyNotSet() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
context.checking(new Expectations() {
{
}
});
Properties configuration = new Properties();
WsCheckAliveFilter.validateSystemProperties(configuration, logger);
context.assertIsSatisfied();
}
@Test
public void addIfFeatureEnabledShouldDisableWebsocketInactivityTimeoutByDefault() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final IoFilterChain filterChain = context.mock(IoFilterChain.class);
context.checking(new Expectations() {
{
oneOf(logger).isDebugEnabled(); will(returnValue(true));
oneOf(logger).debug(with(stringMatching("WebSocket inactivity timeout is disabled.*")));
}
});
WsCheckAliveFilter.addIfFeatureEnabled(filterChain, FILTER_NAME, 0L, logger);
context.assertIsSatisfied();
}
@Test
public void addIfFeatureEnabledShouldAcceptZeroTimeoutOptionToDisableFeature() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final IoFilterChain filterChain = context.mock(IoFilterChain.class);
context.checking(new Expectations() {
{
oneOf(logger).isDebugEnabled(); will(returnValue(true));
oneOf(logger).debug(with(stringMatching("WebSocket inactivity timeout is disabled.*")));
}
});
WsCheckAliveFilter.addIfFeatureEnabled(filterChain, FILTER_NAME, 0L, logger);
context.assertIsSatisfied();
}
@Test
public void postAddShouldSchedulePingWithTimeoutEqualsHalfWsIntactivityTimeout() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoFilterChain filterChain = context.mock(IoFilterChain.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
final IoSessionConfigEx sessionConfig = context.mock(IoSessionConfigEx.class);
final IoFilter[] filterHolder = new IoFilter[1];
context.checking(new Expectations() {
{
oneOf(logger).isDebugEnabled(); will(returnValue(false));
allowing(session).getBufferAllocator(); will(returnValue(SimpleBufferAllocator.BUFFER_ALLOCATOR));
oneOf(logger).isTraceEnabled(); will(returnValue(false));
allowing(filterChain).getSession(); will(returnValue(session));
oneOf(filterChain).addLast(with(FILTER_NAME), with(any(IoFilter.class)));
will(saveParameter(filterHolder, 1));
allowing(session).getConfig(); will(returnValue(sessionConfig));
allowing(session).getAttribute(with(typedAttributeKeyMatching(".*xtension.*")));
will(returnValue(ActiveWsExtensions.EMPTY));
oneOf(sessionConfig).setIdleTimeInMillis(IdleStatus.READER_IDLE, STANDARD_INACTIVITY_TIMEOUT_MILLIS / 2);
}
});
WsCheckAliveFilter.addIfFeatureEnabled(filterChain, FILTER_NAME, STANDARD_INACTIVITY_TIMEOUT_MILLIS, logger);
WsCheckAliveFilter filter = (WsCheckAliveFilter)filterHolder[0];
filter.onPostAdd(filterChain, FILTER_NAME, nextFilter);
context.assertIsSatisfied();
}
@Test
@Ignore( "No longer relevant, since compile-time checking now proves we support millisecond idle timeout")
public void postAddShouldLogErrorIfIoSessionDoesNotSupportMillisecondIdleTimeout() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final IoFilterChain filterChain = context.mock(IoFilterChain.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
context.checking(new Expectations() {
{
oneOf(filterChain).getSession(); will(returnValue(session));
oneOf(logger).error(with(any(String.class)));
}
});
WsCheckAliveFilter.addIfFeatureEnabled(filterChain, FILTER_NAME, STANDARD_INACTIVITY_TIMEOUT_MILLIS, logger);
context.assertIsSatisfied();
}
@Test
public void receivePongShouldNotPingImmediatelyIfConfiguredTimeoutIs2sec() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
final IoSessionConfigEx config = context.mock(IoSessionConfigEx.class);
final long inactivityTimeout = 2000L;
context.checking(new Expectations() {
{
allowing(logger).isDebugEnabled();
allowing(logger).isTraceEnabled();
allowing(session).getBufferAllocator(); will(returnValue(SimpleBufferAllocator.BUFFER_ALLOCATOR));
oneOf(session).getConfig(); will(returnValue(config));
oneOf(config).setIdleTimeInMillis(IdleStatus.READER_IDLE, inactivityTimeout / 2);
}
});
WsCheckAliveFilter filter = new WsCheckAliveFilter(inactivityTimeout, logger);
filter.flipNextAction();
filter.pingWritten(System.currentTimeMillis());
filter.messageReceived(nextFilter, session, new WsPongMessage(SimpleBufferAllocator.BUFFER_ALLOCATOR.wrap(ByteBuffer.allocate(0))));
context.assertIsSatisfied();
}
@Test
public void receivePongShouldNotPingImmediatelyEvenWithTimeout1ms() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
final IoSessionConfigEx config = context.mock(IoSessionConfigEx.class);
context.checking(new Expectations() {
{
allowing(logger).isDebugEnabled();
allowing(logger).isTraceEnabled();
oneOf(session).getConfig(); will(returnValue(config));
oneOf(config).setIdleTimeInMillis(IdleStatus.READER_IDLE, 1L);
}
});
WsCheckAliveFilter filter = new WsCheckAliveFilter(1L, logger);
filter.flipNextAction();
long now = System.currentTimeMillis();
filter.pingWritten(now);
filter.messageReceived(nextFilter, session, new WsPongMessage(SimpleBufferAllocator.BUFFER_ALLOCATOR.wrap(ByteBuffer.allocate(0))));
context.assertIsSatisfied();
}
@Test
// If some rogue client sends a PONG randomly without having received a PING
public void unexpectedPongShouldBeIgnored() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
final IoSessionConfigEx config = context.mock(IoSessionConfigEx.class);
final long rtt = 10;
final long inactivityTimeout = 3000L;
context.checking(new Expectations() {
{
allowing(logger).isDebugEnabled();
allowing(logger).isTraceEnabled();
allowing(session).getConfig(); will(returnValue(config));
oneOf(config).setIdleTimeInMillis(IdleStatus.READER_IDLE, inactivityTimeout / 2);
}
});
WsCheckAliveFilter filter = new WsCheckAliveFilter(inactivityTimeout, logger);
filter.flipNextAction();
filter.pingWritten(System.currentTimeMillis());
Thread.sleep(rtt);
filter.messageReceived(nextFilter, session, new WsPongMessage(SimpleBufferAllocator.BUFFER_ALLOCATOR.wrap(ByteBuffer.allocate(0))));
filter.messageReceived(nextFilter, session, new WsPongMessage(SimpleBufferAllocator.BUFFER_ALLOCATOR.wrap(ByteBuffer.allocate(0))));
context.assertIsSatisfied();
}
@Test
public void sessionIdleShouldCloseConnectionIfAwaitingPong() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
final IoFilterChain filterChain = context.mock(IoFilterChain.class);
context.checking(new Expectations() {
{
allowing(logger).info(with(any(String.class)), with(any(IoSessionEx.class)));
oneOf(logger).isTraceEnabled(); will(returnValue(false));
oneOf(session).getConfig();
oneOf(session).getFilterChain(); will(returnValue(filterChain));
oneOf(filterChain).contains(WsAcceptor.CLOSE_FILTER); will(returnValue(true));
oneOf(filterChain).remove(WsAcceptor.CLOSE_FILTER);
oneOf(session).close(true);
oneOf(nextFilter).sessionIdle(session, IdleStatus.READER_IDLE);
}
});
WsCheckAliveFilter filter = new WsCheckAliveFilter(STANDARD_INACTIVITY_TIMEOUT_MILLIS, logger);
filter.flipNextAction();
filter.pingWritten(System.currentTimeMillis());
filter.sessionIdle(nextFilter, session, IdleStatus.READER_IDLE);
context.assertIsSatisfied();
}
@Test
public void sessionIdleShouldResetIdleTimeAndSendPingIfNotAwaitingPong() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
final IoSessionConfigEx config = context.mock(IoSessionConfigEx.class);
context.checking(new Expectations() {
{
allowing(logger).isTraceEnabled(); will(returnValue(false));
oneOf(session).getConfig(); will(returnValue(config));
oneOf(config).setIdleTimeInMillis(IdleStatus.READER_IDLE, STANDARD_INACTIVITY_TIMEOUT_MILLIS / 2);
oneOf(session).write(with(PING));
oneOf(nextFilter).sessionIdle(session, IdleStatus.READER_IDLE);
}
});
WsCheckAliveFilter filter = new WsCheckAliveFilter(STANDARD_INACTIVITY_TIMEOUT_MILLIS, logger);
filter.doSessionIdle(nextFilter, session, IdleStatus.READER_IDLE);
context.assertIsSatisfied();
}
@Test
public void sessionWriterIdleShouldCorrectlyPassItToNextFilter() throws Exception {
Mockery context = new Mockery();
final Logger logger = context.mock(Logger.class);
final NextFilter nextFilter = context.mock(NextFilter.class);
final IoSessionEx session = context.mock(IoSessionEx.class);
context.checking(new Expectations() {
{
oneOf(nextFilter).sessionIdle(session, IdleStatus.WRITER_IDLE);
}
});
WsCheckAliveFilter filter = new WsCheckAliveFilter(STANDARD_INACTIVITY_TIMEOUT_MILLIS, logger);
filter.sessionIdle(nextFilter, session, IdleStatus.WRITER_IDLE);
context.assertIsSatisfied();
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kim.service.impl;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.kuali.rice.core.api.membership.MemberType;
import org.kuali.rice.kim.api.KimConstants;
import org.kuali.rice.kim.api.group.Group;
import org.kuali.rice.kim.api.identity.address.EntityAddress;
import org.kuali.rice.kim.api.identity.affiliation.EntityAffiliation;
import org.kuali.rice.kim.api.identity.email.EntityEmail;
import org.kuali.rice.kim.api.identity.employment.EntityEmployment;
import org.kuali.rice.kim.api.identity.entity.Entity;
import org.kuali.rice.kim.api.identity.name.EntityName;
import org.kuali.rice.kim.api.identity.phone.EntityPhone;
import org.kuali.rice.kim.api.identity.principal.Principal;
import org.kuali.rice.kim.api.identity.type.EntityTypeContactInfo;
import org.kuali.rice.kim.api.role.Role;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import org.kuali.rice.kim.bo.ui.KimDocumentRoleMember;
import org.kuali.rice.kim.bo.ui.PersonDocumentAddress;
import org.kuali.rice.kim.bo.ui.PersonDocumentAffiliation;
import org.kuali.rice.kim.bo.ui.PersonDocumentEmail;
import org.kuali.rice.kim.bo.ui.PersonDocumentEmploymentInfo;
import org.kuali.rice.kim.bo.ui.PersonDocumentName;
import org.kuali.rice.kim.bo.ui.PersonDocumentPhone;
import org.kuali.rice.kim.document.IdentityManagementPersonDocument;
import org.kuali.rice.kim.document.IdentityManagementRoleDocument;
import org.kuali.rice.kim.framework.services.KimFrameworkServiceLocator;
import org.kuali.rice.kim.framework.type.KimTypeService;
import org.kuali.rice.kim.impl.common.delegate.DelegateTypeBo;
import org.kuali.rice.kim.impl.group.GroupBo;
import org.kuali.rice.kim.impl.group.GroupMemberBo;
import org.kuali.rice.kim.impl.identity.entity.EntityBo;
import org.kuali.rice.kim.impl.identity.principal.PrincipalBo;
import org.kuali.rice.kim.impl.role.RoleBo;
import org.kuali.rice.kim.impl.role.RoleMemberAttributeDataBo;
import org.kuali.rice.kim.impl.role.RoleMemberBo;
import org.kuali.rice.kim.impl.role.RoleResponsibilityActionBo;
import org.kuali.rice.kim.impl.services.KimImplServiceLocator;
import org.kuali.rice.kim.util.KimCommonUtilsInternal;
import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.rice.krad.bo.PersistableBusinessObject;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.util.ObjectUtils;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Customized version of the UiDocumentServiceImpl to support LDAP communcation
*
* @author Leo Przybylski (przybyls@arizona.edu)
*/
public class LdapUiDocumentServiceImpl extends org.kuali.rice.kim.service.impl.UiDocumentServiceImpl {
/**
*
* @see org.kuali.rice.kim.service.UiDocumentService#loadEntityToPersonDoc(IdentityManagementPersonDocument, String)
*/
public void loadEntityToPersonDoc(IdentityManagementPersonDocument identityManagementPersonDocument, String principalId) {
Principal principal = this.getIdentityService().getPrincipal(principalId);
if(principal==null) {
throw new RuntimeException("Principal does not exist for principal id:"+principalId);
}
identityManagementPersonDocument.setPrincipalId(principal.getPrincipalId());
identityManagementPersonDocument.setPrincipalName(principal.getPrincipalName());
//identityManagementPersonDocument.setPassword(principal.getPassword());
identityManagementPersonDocument.setActive(principal.isActive());
Entity kimEntity = this.getIdentityService().getEntity(principal.getEntityId());
identityManagementPersonDocument.setEntityId(kimEntity.getId());
if ( ObjectUtils.isNotNull( kimEntity.getPrivacyPreferences() ) ) {
identityManagementPersonDocument.setPrivacy(loadPrivacyReferences(kimEntity.getPrivacyPreferences()));
}
//identityManagementPersonDocument.setActive(kimEntity.isActive());
identityManagementPersonDocument.setAffiliations(loadAffiliations(kimEntity.getAffiliations(),kimEntity.getEmploymentInformation()));
identityManagementPersonDocument.setNames(loadNames( identityManagementPersonDocument, principalId, kimEntity.getNames(), identityManagementPersonDocument.getPrivacy().isSuppressName() ));
EntityTypeContactInfo entityType = null;
for (EntityTypeContactInfo type : kimEntity.getEntityTypeContactInfos()) {
if (KimConstants.EntityTypes.PERSON.equals(type.getEntityTypeCode())) {
entityType = EntityTypeContactInfo.Builder.create(type).build();
}
}
if(entityType!=null){
identityManagementPersonDocument.setEmails(loadEmails(identityManagementPersonDocument, principalId, entityType.getEmailAddresses(), identityManagementPersonDocument.getPrivacy().isSuppressEmail()));
identityManagementPersonDocument.setPhones(loadPhones(identityManagementPersonDocument, principalId, entityType.getPhoneNumbers(), identityManagementPersonDocument.getPrivacy().isSuppressPhone()));
identityManagementPersonDocument.setAddrs(loadAddresses(identityManagementPersonDocument, principalId, entityType.getAddresses(), identityManagementPersonDocument.getPrivacy().isSuppressAddress()));
}
List<Group> groups = getGroupService().getGroups(getGroupService().getDirectGroupIdsByPrincipalId(
identityManagementPersonDocument.getPrincipalId()));
loadGroupToPersonDoc(identityManagementPersonDocument, groups);
loadRoleToPersonDoc(identityManagementPersonDocument);
loadDelegationsToPersonDoc(identityManagementPersonDocument);
}
protected String getInitiatorPrincipalId(Document document){
try{
return document.getDocumentHeader().getWorkflowDocument().getInitiatorPrincipalId();
} catch(Exception ex){
return null;
}
}
/**
* @see org.kuali.rice.kim.service.UiDocumentService#saveEntityPerson(IdentityManagementPersonDocument)
*/
public void saveEntityPerson(IdentityManagementPersonDocument identityManagementPersonDocument) {
final Entity kimEntity = getIdentityService().getEntity(identityManagementPersonDocument.getEntityId());
boolean creatingNew = false;
String initiatorPrincipalId = getInitiatorPrincipalId(identityManagementPersonDocument);
boolean inactivatingPrincipal = false;
List <GroupMemberBo> groupPrincipals = populateGroupMembers(identityManagementPersonDocument);
List <RoleMemberBo> rolePrincipals = populateRoleMembers(identityManagementPersonDocument);
List <DelegateTypeBo> personDelegations = populateDelegations(identityManagementPersonDocument);
List <PersistableBusinessObject> bos = new ArrayList<PersistableBusinessObject>();
List <RoleResponsibilityActionBo> roleRspActions = populateRoleRspActions(identityManagementPersonDocument);
List <RoleMemberAttributeDataBo> blankRoleMemberAttrs = getBlankRoleMemberAttrs(rolePrincipals);
//if(ObjectUtils.isNotNull(kimEntity.getPrivacyPreferences()))
// bos.add(kimEntity.getPrivacyPreferences());
bos.addAll(groupPrincipals);
bos.addAll(rolePrincipals);
bos.addAll(roleRspActions);
bos.addAll(personDelegations);
// boservice.save(bos) does not handle deleteawarelist
getBusinessObjectService().save(bos);
if (!blankRoleMemberAttrs.isEmpty()) {
getBusinessObjectService().delete(blankRoleMemberAttrs);
}
if ( inactivatingPrincipal ) {
//when a person is inactivated, inactivate their group, role, and delegation memberships
KimImplServiceLocator.getRoleInternalService().principalInactivated(identityManagementPersonDocument.getPrincipalId());
}
}
protected boolean setupPrincipal(IdentityManagementPersonDocument identityManagementPersonDocument,EntityBo kimEntity, List<PrincipalBo> origPrincipals) {
boolean inactivatingPrincipal = false;
List<PrincipalBo> principals = new ArrayList<PrincipalBo>();
Principal.Builder principal = Principal.Builder.create(identityManagementPersonDocument.getPrincipalName());
principal.setPrincipalId(identityManagementPersonDocument.getPrincipalId());
//principal.setPassword(identityManagementPersonDocument.getPassword());
principal.setActive(identityManagementPersonDocument.isActive());
principal.setEntityId(identityManagementPersonDocument.getEntityId());
if(ObjectUtils.isNotNull(origPrincipals)){
for (PrincipalBo prncpl : origPrincipals) {
if (prncpl.getPrincipalId()!=null && StringUtils.equals(prncpl.getPrincipalId(), principal.getPrincipalId())) {
principal.setVersionNumber(prncpl.getVersionNumber());
principal.setObjectId(prncpl.getObjectId());
// check if inactivating the principal
if ( prncpl.isActive() && !principal.isActive() ) {
inactivatingPrincipal = true;
}
}
}
}
principals.add(PrincipalBo.from(principal.build()));
kimEntity.setPrincipals(principals);
return inactivatingPrincipal;
}
protected List<PersonDocumentAffiliation> loadAffiliations(List <EntityAffiliation> affiliations, List<EntityEmployment> empInfos) {
List<PersonDocumentAffiliation> docAffiliations = new ArrayList<PersonDocumentAffiliation>();
if(ObjectUtils.isNotNull(affiliations)){
for (EntityAffiliation affiliation: affiliations) {
if(affiliation.isActive()){
PersonDocumentAffiliation docAffiliation = new PersonDocumentAffiliation();
docAffiliation.setAffiliationTypeCode(affiliation.getAffiliationType().getCode());
docAffiliation.setCampusCode(affiliation.getCampusCode());
docAffiliation.setActive(affiliation.isActive());
docAffiliation.setDflt(affiliation.isDefaultValue());
docAffiliation.setEntityAffiliationId(affiliation.getId());
docAffiliation.refreshReferenceObject("affiliationType");
// EntityAffiliationImpl does not define empinfos as collection
docAffiliations.add(docAffiliation);
docAffiliation.setEdit(true);
// employment informations
List<PersonDocumentEmploymentInfo> docEmploymentInformations = new ArrayList<PersonDocumentEmploymentInfo>();
if(ObjectUtils.isNotNull(empInfos)){
for (EntityEmployment empInfo: empInfos) {
if (empInfo.isActive()
&& StringUtils.equals(docAffiliation.getEntityAffiliationId(),
(empInfo.getEntityAffiliation() != null ? empInfo.getEntityAffiliation().getId() : null))) {
PersonDocumentEmploymentInfo docEmpInfo = new PersonDocumentEmploymentInfo();
docEmpInfo.setEntityEmploymentId(empInfo.getEmployeeId());
docEmpInfo.setEmployeeId(empInfo.getEmployeeId());
docEmpInfo.setEmploymentRecordId(empInfo.getEmploymentRecordId());
docEmpInfo.setBaseSalaryAmount(empInfo.getBaseSalaryAmount());
docEmpInfo.setPrimaryDepartmentCode(empInfo.getPrimaryDepartmentCode());
docEmpInfo.setEmploymentStatusCode(empInfo.getEmployeeStatus() != null ? empInfo.getEmployeeStatus().getCode() : null);
docEmpInfo.setEmploymentTypeCode(empInfo.getEmployeeType() != null ? empInfo.getEmployeeType().getCode() : null);
docEmpInfo.setActive(empInfo.isActive());
docEmpInfo.setPrimary(empInfo.isPrimary());
docEmpInfo.setEntityAffiliationId(empInfo.getEntityAffiliation() != null ? empInfo.getEntityAffiliation().getId() : null);
// there is no version number on KimEntityEmploymentInformationInfo
//docEmpInfo.setVersionNumber(empInfo.getVersionNumber());
docEmpInfo.setEdit(true);
docEmpInfo.refreshReferenceObject("employmentType");
docEmploymentInformations.add(docEmpInfo);
}
}
}
docAffiliation.setEmpInfos(docEmploymentInformations);
}
}
}
return docAffiliations;
}
protected List<PersonDocumentName> loadNames( IdentityManagementPersonDocument personDoc, String principalId, List <EntityName> names, boolean suppressDisplay ) {
List<PersonDocumentName> docNames = new ArrayList<PersonDocumentName>();
if(ObjectUtils.isNotNull(names)){
for (EntityName name: names) {
if(name.isActive()){
PersonDocumentName docName = new PersonDocumentName();
if (name.getNameType() != null) {
docName.setNameCode(name.getNameType().getCode());
}
//We do not need to check the privacy setting here - The UI should care of it
docName.setFirstName(name.getFirstNameUnmasked());
docName.setLastName(name.getLastNameUnmasked());
docName.setMiddleName(name.getMiddleNameUnmasked());
docName.setNamePrefix(name.getNamePrefixUnmasked());
docName.setNameSuffix(name.getNameSuffixUnmasked());
docName.setActive(name.isActive());
docName.setDflt(name.isDefaultValue());
docName.setEdit(true);
docName.setEntityNameId(name.getId());
docNames.add(docName);
}
}
}
return docNames;
}
protected List<PersonDocumentAddress> loadAddresses(IdentityManagementPersonDocument identityManagementPersonDocument, String principalId, List<EntityAddress> entityAddresses, boolean suppressDisplay ) {
List<PersonDocumentAddress> docAddresses = new ArrayList<PersonDocumentAddress>();
if(ObjectUtils.isNotNull(entityAddresses)){
for (EntityAddress address: entityAddresses) {
if(address.isActive()){
PersonDocumentAddress docAddress = new PersonDocumentAddress();
docAddress.setEntityTypeCode(address.getEntityTypeCode());
docAddress.setAddressTypeCode(address.getAddressType().getCode());
//We do not need to check the privacy setting here - The UI should care of it
docAddress.setLine1(address.getLine1Unmasked());
docAddress.setLine2(address.getLine2Unmasked());
docAddress.setLine3(address.getLine3Unmasked());
docAddress.setStateProvinceCode(address.getStateProvinceCodeUnmasked());
docAddress.setPostalCode(address.getPostalCodeUnmasked());
docAddress.setCountryCode(address.getCountryCodeUnmasked());
docAddress.setCity(address.getCityUnmasked());
docAddress.setActive(address.isActive());
docAddress.setDflt(address.isDefaultValue());
docAddress.setEntityAddressId(address.getId());
docAddress.setEdit(true);
docAddresses.add(docAddress);
}
}
}
return docAddresses;
}
protected List<PersonDocumentEmail> loadEmails(IdentityManagementPersonDocument identityManagementPersonDocument, String principalId, List<EntityEmail> entityEmails, boolean suppressDisplay ) {
List<PersonDocumentEmail> emails = new ArrayList<PersonDocumentEmail>();
if(ObjectUtils.isNotNull(entityEmails)){
for (EntityEmail email: entityEmails) {
if(email.isActive()){
PersonDocumentEmail docEmail = new PersonDocumentEmail();
//docEmail.setEntityId(email.getEntityId());
docEmail.setEntityTypeCode(email.getEntityTypeCode());
if (email.getEmailType() != null) {
docEmail.setEmailTypeCode(email.getEmailType().getCode());
}
// EmailType not on info object.
//docEmail.setEmailType(((KimEntityEmailImpl)email).getEmailType());
//We do not need to check the privacy setting here - The UI should care of it
docEmail.setEmailAddress(email.getEmailAddressUnmasked());
docEmail.setActive(email.isActive());
docEmail.setDflt(email.isDefaultValue());
docEmail.setEntityEmailId(email.getId());
docEmail.setEdit(true);
emails.add(docEmail);
}
}
}
return emails;
}
protected List<PersonDocumentPhone> loadPhones(IdentityManagementPersonDocument identityManagementPersonDocument, String principalId, List<EntityPhone> entityPhones, boolean suppressDisplay ) {
List<PersonDocumentPhone> docPhones = new ArrayList<PersonDocumentPhone>();
if(ObjectUtils.isNotNull(entityPhones)){
for (EntityPhone phone: entityPhones) {
if(phone.isActive()){
PersonDocumentPhone docPhone = new PersonDocumentPhone();
if (phone.getPhoneType() != null) {
docPhone.setPhoneTypeCode(phone.getPhoneType().getCode());
}
//docPhone.setPhoneType(((KimEntityPhoneImpl)phone).getPhoneType());
docPhone.setEntityTypeCode(phone.getEntityTypeCode());
//We do not need to check the privacy setting here - The UI should care of it
docPhone.setPhoneNumber(phone.getPhoneNumberUnmasked());
docPhone.setCountryCode(phone.getCountryCodeUnmasked());
docPhone.setExtensionNumber(phone.getExtensionNumberUnmasked());
docPhone.setActive(phone.isActive());
docPhone.setDflt(phone.isDefaultValue());
docPhone.setEntityPhoneId(phone.getId());
docPhone.setEdit(true);
docPhones.add(docPhone);
}
}
}
return docPhones;
}
public BusinessObject getMember(String memberTypeCode, String memberId){
Class<? extends BusinessObject> roleMemberTypeClass = null;
String roleMemberIdName = "";
if(MemberType.PRINCIPAL.getCode().equals(memberTypeCode)){
roleMemberTypeClass = PrincipalBo.class;
roleMemberIdName = KimConstants.PrimaryKeyConstants.PRINCIPAL_ID;
Principal principalInfo = getIdentityService().getPrincipal(memberId);
if (principalInfo != null) {
}
} else if(MemberType.GROUP.getCode().equals(memberTypeCode)){
roleMemberTypeClass = GroupBo.class;
roleMemberIdName = KimConstants.PrimaryKeyConstants.GROUP_ID;
Group groupInfo = null;
groupInfo = getGroupService().getGroup(memberId);
if (groupInfo != null) {
}
} else if(MemberType.ROLE.getCode().equals(memberTypeCode)){
roleMemberTypeClass = RoleBo.class;
roleMemberIdName = KimConstants.PrimaryKeyConstants.ROLE_ID;
Role role = getRoleService().getRole(memberId);
if (role != null) {
}
}
Map<String, String> criteria = new HashMap<String, String>();
criteria.put(roleMemberIdName, memberId);
return getBusinessObjectService().findByPrimaryKey(roleMemberTypeClass, criteria);
}
/**
* Overridden to only check permission - users should not be able to edit themselves.
*
* @see org.kuali.rice.kim.service.impl.UiDocumentServiceImpl#canModifyEntity(java.lang.String, java.lang.String)
*/
@Override
public boolean canModifyEntity( String currentUserPrincipalId, String toModifyPrincipalId ){
return (StringUtils.isNotBlank(currentUserPrincipalId) && StringUtils.isNotBlank(toModifyPrincipalId) &&
currentUserPrincipalId.equals(toModifyPrincipalId)) ||
getPermissionService().isAuthorized(
currentUserPrincipalId,
KimConstants.NAMESPACE_CODE,
KimConstants.PermissionNames.MODIFY_ENTITY,
Collections.singletonMap(KimConstants.AttributeConstants.PRINCIPAL_ID, currentUserPrincipalId));
}
protected List<RoleMemberBo> getRoleMembers(IdentityManagementRoleDocument identityManagementRoleDocument, List<RoleMemberBo> origRoleMembers){
List<RoleMemberBo> roleMembers = new ArrayList<RoleMemberBo>();
RoleMemberBo newRoleMember;
RoleMemberBo origRoleMemberImplTemp;
List<RoleMemberAttributeDataBo> origAttributes;
boolean activatingInactive = false;
String newRoleMemberIdAssigned = "";
identityManagementRoleDocument.setKimType(KimApiServiceLocator.getKimTypeInfoService().getKimType(identityManagementRoleDocument.getRoleTypeId()));
KimTypeService kimTypeService = KimFrameworkServiceLocator.getKimTypeService(identityManagementRoleDocument.getKimType());
if(CollectionUtils.isNotEmpty(identityManagementRoleDocument.getMembers())){
for(KimDocumentRoleMember documentRoleMember: identityManagementRoleDocument.getMembers()){
origRoleMemberImplTemp = null;
newRoleMember = new RoleMemberBo();
KimCommonUtilsInternal.copyProperties(newRoleMember, documentRoleMember);
newRoleMember.setRoleId(identityManagementRoleDocument.getRoleId());
if(ObjectUtils.isNotNull(origRoleMembers)){
for(RoleMemberBo origRoleMemberImpl: origRoleMembers){
if((origRoleMemberImpl.getRoleId()!=null && StringUtils.equals(origRoleMemberImpl.getRoleId(), newRoleMember.getRoleId())) &&
(origRoleMemberImpl.getMemberId()!=null && StringUtils.equals(origRoleMemberImpl.getMemberId(), newRoleMember.getMemberId())) &&
(origRoleMemberImpl.getType()!=null && org.apache.commons.lang.ObjectUtils.equals(origRoleMemberImpl.getType(), newRoleMember.getType())) &&
!origRoleMemberImpl.isActive(new Timestamp(System.currentTimeMillis())) &&
!kimTypeService.validateUniqueAttributes(identityManagementRoleDocument.getKimType().getId(),
documentRoleMember.getQualifierAsMap(), origRoleMemberImpl.getAttributes()).isEmpty()) {
//TODO: verify if you want to add && newRoleMember.isActive() condition to if...
newRoleMemberIdAssigned = newRoleMember.getId();
newRoleMember.setId(origRoleMemberImpl.getId());
activatingInactive = true;
}
if(origRoleMemberImpl.getId()!=null && StringUtils.equals(origRoleMemberImpl.getId(), newRoleMember.getId())){
newRoleMember.setVersionNumber(origRoleMemberImpl.getVersionNumber());
origRoleMemberImplTemp = origRoleMemberImpl;
}
}
}
origAttributes = (origRoleMemberImplTemp==null || origRoleMemberImplTemp.getAttributes()==null)?
new ArrayList<RoleMemberAttributeDataBo>():origRoleMemberImplTemp.getAttributeDetails();
newRoleMember.setActiveFromDateValue(documentRoleMember.getActiveFromDate());
newRoleMember.setActiveToDateValue(documentRoleMember.getActiveToDate());
newRoleMember.setAttributeDetails(getRoleMemberAttributeData(documentRoleMember.getQualifiers(), origAttributes, activatingInactive, newRoleMemberIdAssigned));
newRoleMember.setRoleRspActions(getRoleMemberResponsibilityActions(documentRoleMember, origRoleMemberImplTemp, activatingInactive, newRoleMemberIdAssigned));
roleMembers.add(newRoleMember);
activatingInactive = false;
}
}
return roleMembers;
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.drm;
import android.media.DeniedByServerException;
import android.media.MediaCryptoException;
import android.media.MediaDrm;
import android.media.MediaDrmException;
import android.media.NotProvisionedException;
import android.os.Handler;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Used to obtain keys for decrypting protected media streams. See {@link android.media.MediaDrm}.
*/
public interface ExoMediaDrm<T extends ExoMediaCrypto> {
/**
* @see MediaDrm#EVENT_KEY_REQUIRED
*/
@SuppressWarnings("InlinedApi")
int EVENT_KEY_REQUIRED = MediaDrm.EVENT_KEY_REQUIRED;
/**
* @see MediaDrm#EVENT_KEY_EXPIRED
*/
@SuppressWarnings("InlinedApi")
int EVENT_KEY_EXPIRED = MediaDrm.EVENT_KEY_EXPIRED;
/**
* @see MediaDrm#EVENT_PROVISION_REQUIRED
*/
@SuppressWarnings("InlinedApi")
int EVENT_PROVISION_REQUIRED = MediaDrm.EVENT_PROVISION_REQUIRED;
/**
* @see MediaDrm#KEY_TYPE_STREAMING
*/
@SuppressWarnings("InlinedApi")
int KEY_TYPE_STREAMING = MediaDrm.KEY_TYPE_STREAMING;
/**
* @see MediaDrm#KEY_TYPE_OFFLINE
*/
@SuppressWarnings("InlinedApi")
int KEY_TYPE_OFFLINE = MediaDrm.KEY_TYPE_OFFLINE;
/**
* @see MediaDrm#KEY_TYPE_RELEASE
*/
@SuppressWarnings("InlinedApi")
int KEY_TYPE_RELEASE = MediaDrm.KEY_TYPE_RELEASE;
/**
* @see android.media.MediaDrm.OnEventListener
*/
interface OnEventListener<T extends ExoMediaCrypto> {
/**
* Called when an event occurs that requires the app to be notified
*
* @param mediaDrm The {@link ExoMediaDrm} object on which the event occurred.
* @param sessionId The DRM session ID on which the event occurred.
* @param event Indicates the event type.
* @param extra A secondary error code.
* @param data Optional byte array of data that may be associated with the event.
*/
void onEvent(
ExoMediaDrm<? extends T> mediaDrm,
byte[] sessionId,
int event,
int extra,
@Nullable byte[] data);
}
/**
* @see android.media.MediaDrm.OnKeyStatusChangeListener
*/
interface OnKeyStatusChangeListener<T extends ExoMediaCrypto> {
/**
* Called when the keys in a session change status, such as when the license is renewed or
* expires.
*
* @param mediaDrm The {@link ExoMediaDrm} object on which the event occurred.
* @param sessionId The DRM session ID on which the event occurred.
* @param exoKeyInformation A list of {@link KeyStatus} that contains key ID and status.
* @param hasNewUsableKey Whether a new key became usable.
*/
void onKeyStatusChange(
ExoMediaDrm<? extends T> mediaDrm,
byte[] sessionId,
List<KeyStatus> exoKeyInformation,
boolean hasNewUsableKey);
}
/** @see android.media.MediaDrm.KeyStatus */
final class KeyStatus {
private final int statusCode;
private final byte[] keyId;
public KeyStatus(int statusCode, byte[] keyId) {
this.statusCode = statusCode;
this.keyId = keyId;
}
public int getStatusCode() {
return statusCode;
}
public byte[] getKeyId() {
return keyId;
}
}
/** @see android.media.MediaDrm.KeyRequest */
final class KeyRequest {
private final byte[] data;
private final String licenseServerUrl;
public KeyRequest(byte[] data, String licenseServerUrl) {
this.data = data;
this.licenseServerUrl = licenseServerUrl;
}
public byte[] getData() {
return data;
}
public String getLicenseServerUrl() {
return licenseServerUrl;
}
}
/** @see android.media.MediaDrm.ProvisionRequest */
final class ProvisionRequest {
private final byte[] data;
private final String defaultUrl;
public ProvisionRequest(byte[] data, String defaultUrl) {
this.data = data;
this.defaultUrl = defaultUrl;
}
public byte[] getData() {
return data;
}
public String getDefaultUrl() {
return defaultUrl;
}
}
/**
* @see MediaDrm#setOnEventListener(MediaDrm.OnEventListener)
*/
void setOnEventListener(OnEventListener<? super T> listener);
/**
* @see MediaDrm#setOnKeyStatusChangeListener(MediaDrm.OnKeyStatusChangeListener, Handler)
*/
void setOnKeyStatusChangeListener(OnKeyStatusChangeListener<? super T> listener);
/**
* @see MediaDrm#openSession()
*/
byte[] openSession() throws MediaDrmException;
/**
* @see MediaDrm#closeSession(byte[])
*/
void closeSession(byte[] sessionId);
/**
* Generates a key request.
*
* @param scope If {@code keyType} is {@link #KEY_TYPE_STREAMING} or {@link #KEY_TYPE_OFFLINE},
* the session id that the keys will be provided to. If {@code keyType} is {@link
* #KEY_TYPE_RELEASE}, the keySetId of the keys to release.
* @param schemeDatas If key type is {@link #KEY_TYPE_STREAMING} or {@link #KEY_TYPE_OFFLINE}, a
* list of {@link SchemeData} instances extracted from the media. Null otherwise.
* @param keyType The type of the request. Either {@link #KEY_TYPE_STREAMING} to acquire keys for
* streaming, {@link #KEY_TYPE_OFFLINE} to acquire keys for offline usage, or {@link
* #KEY_TYPE_RELEASE} to release acquired keys. Releasing keys invalidates them for all
* sessions.
* @param optionalParameters Are included in the key request message to allow a client application
* to provide additional message parameters to the server. This may be {@code null} if no
* additional parameters are to be sent.
* @return The generated key request.
* @see MediaDrm#getKeyRequest(byte[], byte[], String, int, HashMap)
*/
KeyRequest getKeyRequest(
byte[] scope,
@Nullable List<SchemeData> schemeDatas,
int keyType,
@Nullable HashMap<String, String> optionalParameters)
throws NotProvisionedException;
/** @see MediaDrm#provideKeyResponse(byte[], byte[]) */
byte[] provideKeyResponse(byte[] scope, byte[] response)
throws NotProvisionedException, DeniedByServerException;
/**
* @see MediaDrm#getProvisionRequest()
*/
ProvisionRequest getProvisionRequest();
/**
* @see MediaDrm#provideProvisionResponse(byte[])
*/
void provideProvisionResponse(byte[] response) throws DeniedByServerException;
/**
* @see MediaDrm#queryKeyStatus(byte[])
*/
Map<String, String> queryKeyStatus(byte[] sessionId);
/**
* @see MediaDrm#release()
*/
void release();
/**
* @see MediaDrm#restoreKeys(byte[], byte[])
*/
void restoreKeys(byte[] sessionId, byte[] keySetId);
/**
* @see MediaDrm#getPropertyString(String)
*/
String getPropertyString(String propertyName);
/**
* @see MediaDrm#getPropertyByteArray(String)
*/
byte[] getPropertyByteArray(String propertyName);
/**
* @see MediaDrm#setPropertyString(String, String)
*/
void setPropertyString(String propertyName, String value);
/**
* @see MediaDrm#setPropertyByteArray(String, byte[])
*/
void setPropertyByteArray(String propertyName, byte[] value);
/**
* @see android.media.MediaCrypto#MediaCrypto(UUID, byte[])
* @param sessionId The DRM session ID.
* @return An object extends {@link ExoMediaCrypto}, using opaque crypto scheme specific data.
* @throws MediaCryptoException If the instance can't be created.
*/
T createMediaCrypto(byte[] sessionId) throws MediaCryptoException;
}
| |
package woods.log.timber;
import android.support.annotation.NonNull;
import java.util.ArrayList;
import java.util.List;
/**
* This code is modified copy from JakeWharton's timber project
* You can find the original code at Github. Url: "https://github.com/JakeWharton/timber"
* Changes:
* - Remove class DebugLog
* - Draw interface 'Plant' out of class Timber
* - Move getStackTrace() method to class Tools
* - Re-code 'TREE_OF_SOULS'
*/
public class Timber {
private static final List<Tree> Forest = new ArrayList<>();
// private static final Tree[] TREE_ARRAY_EMPTY = new Tree[0];
private static volatile Tree[] forestAsArray = null;
private static ThreadLocal<String> Tags = new ThreadLocal<>();
private static ThreadLocal<Milieu> Milieus = new ThreadLocal<>();
/**
* A {@link Tree} that delegates to all planted trees in the {@linkplain #Forest forest}.
*/
private static final Tree TREE_OF_SOULS = new Tree() {
@Override
public void v(@NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.V, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].v(message, args);
}
}
}
@Override
public void v(@NonNull Throwable e, @NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.V, e);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].v(e, message, args);
}
}
}
@Override
public void d(@NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.D, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].d(message, args);
}
}
}
@Override
public void d(@NonNull Throwable e, @NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.D, e);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].d(e, message, args);
}
}
}
@Override
public void i(@NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.I, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].i(message, args);
}
}
}
@Override
public void i(@NonNull Throwable e, @NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.I, e);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].i(e, message, args);
}
}
}
@Override
public void w(@NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.W, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].w(message, args);
}
}
}
@Override
public void w(@NonNull Throwable e, @NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.W, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].w(e, message, args);
}
}
}
@Override
public void e(@NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.E, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].e(message, args);
}
}
}
@Override
public void e(@NonNull Throwable e, @NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.E, e);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].e(e, message, args);
}
}
}
@Override
public void wtf(@NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.A, null);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].wtf(message, args);
}
}
}
@Override
public void wtf(@NonNull Throwable e, @NonNull String message, Object... args) {
int n = forestAsArray.length;
if (n > 0) {
Timber.probe(Level.A, e);
Tree[] forest = forestAsArray;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
forest[i].wtf(e, message, args);
}
}
}
@Override
public void plant() {
throw new AssertionError("Plant 'soul of tree'?");
}
@Override
public void uproot() {
throw new AssertionError("Uproot 'Tree of soul'?");
}
@Override
public void pin(@NonNull Spec spec) {
}
};
private Timber() {
throw new AssertionError("No instances for 'Timber'.");
}
/**
* Log verbose message with optional format args.
*/
public static void v(@NonNull String message, Object... args) {
astree().v(message, args);
}
/**
* Log verbose exception and message with optional format args.
*/
public static void v(@NonNull Throwable e, @NonNull String message, Object... args) {
astree().v(e, message, args);
}
/**
* Log debug message with optional format args.
*/
public static void d(@NonNull String message, Object... args) {
astree().d(message, args);
}
/**
* Log debug exception and message with optional format args.
*/
public static void d(@NonNull Throwable e, @NonNull String message, Object... args) {
astree().d(e, message, args);
}
/**
* Log an info message with optional format args.
*/
public static void i(@NonNull String message, Object... args) {
astree().i(message, args);
}
/**
* Log an info exception and message with optional format args.
*/
public static void i(@NonNull Throwable e, @NonNull String message, Object... args) {
astree().i(e, message, args);
}
/**
* Log warning message with optional format args.
*/
public static void w(@NonNull String message, Object... args) {
astree().w(message, args);
}
/**
* Log warning exception and message with optional format args.
*/
public static void w(@NonNull Throwable e, @NonNull String message, Object... args) {
astree().w(e, message, args);
}
/**
* Log an error message with optional format args.
*/
public static void e(@NonNull String message, Object... args) {
astree().e(message, args);
}
/**
* Log an error exception and message with optional format args.
*/
public static void e(@NonNull Throwable e, @NonNull String message, Object... args) {
astree().e(e, message, args);
}
/**
* Log an assert message with optional format args.
*/
public static void wtf(@NonNull String message, Object... args) {
astree().wtf(new AssertionError("Assertion Hit."), message, args);
}
/**
* Log an assert exception and message with optional format args.
*/
public static void wtf(@NonNull Throwable e, @NonNull String message, Object... args) {
astree().wtf(e, message, args);
}
/**
* Set one-time tag for use on the next logging call.
*/
public static Tree tag(@NonNull String tag) {
Tags.set(tag);
return astree();
}
/**
* Probe the milieu for use on the next logging call.
* @param level
* @param e
*/
public static void probe(Level level, Throwable e) {
Milieu milieu = new Milieu(Tags.get());
if (Milieus.get() == null) {
Timber.supervise();
}
milieu.bind(level, e);
Milieus.set(milieu);
}
/**
* Probe the milieu for use on the next logging call.
*/
public static Milieu get() {
Milieu milieu = Milieus.get();
if (milieu == null) {
throw new AssertionError("probe() have not been called before logging.");
}
return milieu;
}
/**
* A view into Timber's planted trees as tree itself. This can be used for injecting logger
* instance rather than using static methods or to facilitate testing.
*/
public static Tree astree() {
return TREE_OF_SOULS;
}
/**
* Create builder to initialize woods context.
*/
public static WoodsBuilder builder() {
return new WoodsBuilder();
}
/**
* Adds new logging trees.
*/
public static void plant(@NonNull Tree tree) {
if (tree == TREE_OF_SOULS) {
throw new AssertionError("Cannot plant 'TREE_OF_SOULS'.");
}
// Tell tree to get ready
tree.plant();
// Then add it to the forest, this order should be obeyed.
synchronized (Forest) {
Forest.add(tree);
}
forestAsArray = Forest.toArray(new Tree[Forest.size()]);
}
/**
* Remove planted tree.
*/
public static void uproot(@NonNull Tree tree) {
// Remove the tree from forest
synchronized (Forest) {
if (!Forest.remove(tree)) {
throw new AssertionError("Cannot uproot tree which is not planted: " + tree);
}
}
forestAsArray = Forest.toArray(new Tree[Forest.size()]);
// And then tell the tree. This order should be obeyed.
tree.uproot();
}
/**
* Remove all planted trees.
*/
public static void uprootall() {
Tree[] trees = Forest.toArray(new Tree[Forest.size()]);
for (Tree tree : trees) {
uproot(tree);
}
}
/**
* Remove all planted trees.
*/
public static void supervise() {
WoodsUncaughtExceptionHandler handler = new WoodsUncaughtExceptionHandler();
handler.setDefaultHandler(Thread.getDefaultUncaughtExceptionHandler());
Thread.setDefaultUncaughtExceptionHandler(handler);
}
static class WoodsUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler {
private Thread.UncaughtExceptionHandler defaultHandler = null;
void setDefaultHandler(Thread.UncaughtExceptionHandler handler) {
defaultHandler = handler;
}
public void uncaughtException(Thread thread, Throwable e) {
if (e != null) {
Timber.e(e, e.getMessage());
try {
Thread.sleep(1200);
} catch (InterruptedException e1) {
Timber.w("Timber messages may not been write to file.");
}
Timber.uprootall();
if (defaultHandler != null) {
defaultHandler.uncaughtException(thread, e);
}
}
}
}
}
| |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.step04;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.stunner.core.graph.processing.layout.OrientedEdgeImpl;
import org.kie.workbench.common.stunner.core.graph.processing.layout.ReorderedGraph;
import org.kie.workbench.common.stunner.core.graph.processing.layout.Vertex;
import org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.GraphLayer;
import org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.LayeredGraph;
import org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.OrientedEdge;
import org.mockito.InOrder;
import org.mockito.junit.MockitoJUnitRunner;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.step04.DefaultVertexPositioning.DEFAULT_LAYER_HORIZONTAL_PADDING;
import static org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.step04.DefaultVertexPositioning.DEFAULT_LAYER_VERTICAL_PADDING;
import static org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.step04.DefaultVertexPositioning.DEFAULT_VERTEX_SPACE;
import static org.kie.workbench.common.stunner.core.graph.processing.layout.sugiyama.step04.VertexPositioning.DEFAULT_VERTEX_WIDTH;
import static org.mockito.Mockito.doCallRealMethod;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class DefaultVertexPositioningTest {
private DefaultVertexPositioning tested;
@Before
public void setup() {
tested = spy(new DefaultVertexPositioning());
}
@Test
public void testArrangeVertices() {
final GraphLayer layer1 = mock(GraphLayer.class);
final GraphLayer layer2 = mock(GraphLayer.class);
final List<GraphLayer> layers = Arrays.asList(layer1, layer2);
final ReorderedGraph graph = mock(ReorderedGraph.class);
final HashMap hash = mock(HashMap.class);
final HashMap layersStartX = mock(HashMap.class);
final int largestWidth = 100;
final int newY = 17;
doReturn(hash).when(tested).createHashForLayersWidth();
doReturn(largestWidth).when(tested).calculateLayersWidth(layers, hash);
doReturn(layersStartX).when(tested).getLayersStartX(layers.size(), hash, largestWidth);
doReturn(newY).when(tested).distributeVertices(layers,
layersStartX,
DEFAULT_LAYER_VERTICAL_PADDING,
0,
graph);
doReturn(newY).when(tested).distributeVertices(layers,
layersStartX,
newY,
1,
graph);
tested.arrangeVertices(layers,
LayerArrangement.TopDown,
graph);
final InOrder inOrder = inOrder(tested);
inOrder.verify(tested).distributeVertices(layers,
layersStartX,
DEFAULT_LAYER_VERTICAL_PADDING,
0,
graph);
inOrder.verify(tested).distributeVertices(layers,
layersStartX,
newY,
1,
graph);
}
@Test
public void testArrangeVerticesBottomUp() {
final GraphLayer layer1 = mock(GraphLayer.class);
final GraphLayer layer2 = mock(GraphLayer.class);
final List<GraphLayer> layers = Arrays.asList(layer1, layer2);
final ReorderedGraph graph = mock(ReorderedGraph.class);
final HashMap hash = mock(HashMap.class);
final HashMap layersStartX = mock(HashMap.class);
final int largestWidth = 100;
final int newY = 17;
doReturn(hash).when(tested).createHashForLayersWidth();
doReturn(largestWidth).when(tested).calculateLayersWidth(layers, hash);
doReturn(layersStartX).when(tested).getLayersStartX(layers.size(), hash, largestWidth);
doReturn(newY).when(tested).distributeVertices(layers,
layersStartX,
newY,
0,
graph);
doReturn(newY).when(tested).distributeVertices(layers,
layersStartX,
DEFAULT_LAYER_VERTICAL_PADDING,
1,
graph);
tested.arrangeVertices(layers,
LayerArrangement.BottomUp,
graph);
final InOrder inOrder = inOrder(tested);
inOrder.verify(tested).distributeVertices(layers,
layersStartX,
DEFAULT_LAYER_VERTICAL_PADDING,
1,
graph);
inOrder.verify(tested).distributeVertices(layers,
layersStartX,
newY,
0,
graph);
}
@Test
public void testCalculateVerticesPositions() {
final DefaultVertexPositioning tested = mock(DefaultVertexPositioning.class);
final LayeredGraph graph = mock(LayeredGraph.class);
final List<OrientedEdge> edges = mock(List.class);
final LayerArrangement arrangement = LayerArrangement.BottomUp;
final Set<Vertex> vertices = mock(Set.class);
when(tested.getVertices(graph)).thenReturn(vertices);
final List<GraphLayer> layers = mock(List.class);
when(graph.getLayers()).thenReturn(layers);
when(graph.getEdges()).thenReturn(edges);
doCallRealMethod().when(tested).calculateVerticesPositions(graph, arrangement);
tested.calculateVerticesPositions(graph, arrangement);
final InOrder inOrder = inOrder(tested);
inOrder.verify(tested).deReverseEdges(graph);
inOrder.verify(tested).getVertices(graph);
inOrder.verify(tested).removeVirtualVertices(edges, vertices);
inOrder.verify(tested).removeVirtualVerticesFromLayers(layers, vertices);
inOrder.verify(tested).arrangeVertices(layers, arrangement, graph);
}
@Test
public void testGetVertices() {
final LayeredGraph graph = new LayeredGraph();
final GraphLayer layer1 = mock(GraphLayer.class);
final GraphLayer layer2 = mock(GraphLayer.class);
final List<Vertex> vertices1 = new ArrayList<>();
final List<Vertex> vertices2 = new ArrayList<>();
final Vertex v1 = mock(Vertex.class);
final Vertex v2 = mock(Vertex.class);
final Vertex v3 = mock(Vertex.class);
vertices1.add(v1);
vertices1.add(v2);
vertices1.add(v3);
when(layer1.getVertices()).thenReturn(vertices1);
when(layer2.getVertices()).thenReturn(vertices2);
graph.getLayers().add(layer1);
graph.getLayers().add(layer2);
final Set<Vertex> actual = tested.getVertices(graph);
assertTrue(actual.contains(v1));
assertTrue(actual.contains(v2));
assertTrue(actual.contains(v3));
assertEquals(3, actual.size());
}
@Test
public void testDeReverseEdges() {
final LayeredGraph graph = new LayeredGraph();
final OrientedEdgeImpl e1 = new OrientedEdgeImpl("1", "2", true);
final OrientedEdgeImpl e2 = new OrientedEdgeImpl("2", "3", false);
final OrientedEdgeImpl e3 = new OrientedEdgeImpl("2", "4", true);
graph.addEdge(e1);
graph.addEdge(e2);
graph.addEdge(e3);
tested.deReverseEdges(graph);
assertFalse(e1.isReversed());
assertFalse(e2.isReversed());
assertFalse(e3.isReversed());
}
@Test
public void testGetLayersStartX() {
final int largestWidth = 600;
final int layersCount = 3;
final HashMap<Integer, Integer> layersWidth = new HashMap<>();
layersWidth.put(0, 200);
layersWidth.put(1, 600);
layersWidth.put(2, 300);
final HashMap<Integer, Integer> startX = tested.getLayersStartX(layersCount, layersWidth, largestWidth);
assertEquals(200 + DEFAULT_LAYER_HORIZONTAL_PADDING, (int) startX.get(0));
assertEquals(0 + DEFAULT_LAYER_HORIZONTAL_PADDING, (int) startX.get(1));
assertEquals(150 + DEFAULT_LAYER_HORIZONTAL_PADDING, (int) startX.get(2));
}
@Test
public void testGetLargestWidth() {
final GraphLayer layer1 = createGraphLayer(2);
final int expectedSize1 = getExpectSize(2);
final GraphLayer layer2 = createGraphLayer(4);
final int expectedSize2 = getExpectSize(4);
final GraphLayer layer3 = createGraphLayer(1);
final int expectedSize3 = getExpectSize(1);
final List<GraphLayer> layers = Arrays.asList(layer1, layer2, layer3);
final HashMap<Integer, Integer> layersWidth = new HashMap<>();
final int largest = tested.calculateLayersWidth(layers, layersWidth);
assertEquals(expectedSize2, largest);
assertEquals((int) layersWidth.get(0), expectedSize1);
assertEquals((int) layersWidth.get(1), expectedSize2);
assertEquals((int) layersWidth.get(2), expectedSize3);
}
private int getExpectSize(final int totalOfVertices) {
return (totalOfVertices * DEFAULT_VERTEX_WIDTH) + ((totalOfVertices - 1) * DEFAULT_VERTEX_SPACE);
}
private GraphLayer createGraphLayer(final int verticesSize) {
final GraphLayer layer = mock(GraphLayer.class);
final List<Vertex> vertices = mock(List.class);
when(vertices.size()).thenReturn(verticesSize);
when(layer.getVertices()).thenReturn(vertices);
return layer;
}
}
| |
/* Copyright 2007 Ben Gunter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.stripes.validation;
import java.beans.PropertyDescriptor;
import java.lang.annotation.Annotation;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import net.sourceforge.stripes.config.Configuration;
import net.sourceforge.stripes.controller.ParameterName;
import net.sourceforge.stripes.exception.StripesRuntimeException;
import net.sourceforge.stripes.util.Log;
import net.sourceforge.stripes.util.ReflectUtil;
/**
* An implementation of {@link ValidationMetadataProvider} that scans classes and their superclasses
* for properties annotated with {@link Validate} and/or {@link ValidateNestedProperties} and
* exposes the validation metadata specified by those annotations. When searching for annotations,
* this implementation looks first at the property's read method (getter), then its write method
* (setter), and finally at the field itself.
*
* @author Ben Gunter, Freddy Daoud
* @since Stripes 1.5
*/
public class DefaultValidationMetadataProvider implements ValidationMetadataProvider {
private static final Log log = Log.getInstance(DefaultValidationMetadataProvider.class);
private Configuration configuration;
/** Map class -> field -> validation meta data */
private final Map<Class<?>, Map<String, ValidationMetadata>> cache = new ConcurrentHashMap<Class<?>, Map<String, ValidationMetadata>>();
/** Currently does nothing except store a reference to {@code configuration}. */
public void init(Configuration configuration) throws Exception {
this.configuration = configuration;
}
/** Get the {@link Configuration} object that was passed into {@link #init(Configuration)}. */
public Configuration getConfiguration() {
return configuration;
}
public Map<String, ValidationMetadata> getValidationMetadata(Class<?> beanType) {
Map<String, ValidationMetadata> meta = cache.get(beanType);
if (meta == null) {
meta = loadForClass(beanType);
logDebugMessageForConfiguredValidations(beanType, meta);
cache.put(beanType, meta);
}
return meta;
}
public ValidationMetadata getValidationMetadata(Class<?> beanType, ParameterName field) {
return getValidationMetadata(beanType).get(field.getStrippedName());
}
/**
* Get validation information for all the properties and nested properties of the given class.
* The {@link Validate} and/or {@link ValidateNestedProperties} annotations may be applied to
* the property's read method, write method, or field declaration. If a property has a
* {@link ValidateNestedProperties} annotation, then the nested properties named in its
* {@link Validate} annotations will be included as well.
*
* @param beanType a class
* @return A map of (possibly nested) property names to {@link ValidationMetadata} for the
* property.
* @throws StripesRuntimeException if conflicts are found in the validation annotations
*/
protected Map<String, ValidationMetadata> loadForClass(Class<?> beanType) {
Map<String, ValidationMetadata> meta = new HashMap<String, ValidationMetadata>();
@SuppressWarnings("unchecked")
Map<String, AnnotationInfo> annotationInfoMap =
getAnnotationInfoMap(beanType, Validate.class, ValidateNestedProperties.class);
for (String propertyName : annotationInfoMap.keySet()) {
AnnotationInfo annotationInfo = annotationInfoMap.get(propertyName);
// get the @Validate and/or @ValidateNestedProperties
Validate simple = annotationInfo.getAnnotation(Validate.class);
ValidateNestedProperties nested = annotationInfo.getAnnotation(ValidateNestedProperties.class);
Class<?> clazz = annotationInfo.getTargetClass();
// add to allow list if @Validate present
if (simple != null) {
if (simple.field() == null || "".equals(simple.field())) {
meta.put(propertyName, new ValidationMetadata(propertyName, simple));
}
else {
log.warn("Field name present in @Validate but should be omitted: ",
clazz, ", property ", propertyName, ", given field name ",
simple.field());
}
}
// add all sub-properties referenced in @ValidateNestedProperties
if (nested != null) {
Validate[] validates = nested.value();
if (validates != null) {
for (Validate validate : validates) {
if (validate.field() != null && !"".equals(validate.field())) {
String fullName = propertyName + '.' + validate.field();
if (meta.containsKey(fullName)) {
log.warn("More than one nested @Validate with same field name: "
+ validate.field() + " on property " + propertyName);
}
meta.put(fullName, new ValidationMetadata(fullName, validate));
}
else {
log.warn("Field name missing from nested @Validate: ", clazz,
", property ", propertyName);
}
}
}
}
}
return Collections.unmodifiableMap(meta);
}
/**
* Looks at a class's properties, searching for the specified annotations on the properties
* (field, getter method, or setter method). An exception is thrown if annotations are found
* in more than one of those three places.
*
* @param beanType the class on which to look for annotations.
* @param annotationClasses the classes of the annotations for which to look for.
* @return a map of property names to AnnotationInfo objects, which contain the class on which
* the annotations were found (if any), and the annotation objects that correspond to the
* annotation classes.
*/
protected Map<String, AnnotationInfo> getAnnotationInfoMap(Class<?> beanType,
Class<? extends Annotation>... annotationClasses)
{
Map<String, AnnotationInfo> annotationInfoMap = new HashMap<String, AnnotationInfo>();
Set<String> seen = new HashSet<String>();
try {
for (Class<?> clazz = beanType; clazz != null; clazz = clazz.getSuperclass()) {
List<PropertyDescriptor> pds = new ArrayList<PropertyDescriptor>(
Arrays.asList(ReflectUtil.getPropertyDescriptors(clazz)));
// Also look at public fields
Field[] publicFields = clazz.getFields();
for (Field field : publicFields) {
pds.add(new PropertyDescriptor(field.getName(), null, null));
}
for (PropertyDescriptor pd : pds) {
String propertyName = pd.getName();
Method accessor = pd.getReadMethod();
Method mutator = pd.getWriteMethod();
Field field = null;
try {
field = clazz.getDeclaredField(propertyName);
}
catch (NoSuchFieldException e) {
}
// this method throws an exception if there are conflicts
AnnotationInfo annotationInfo = getAnnotationInfo(clazz, propertyName,
new PropertyWrapper[] {
new PropertyWrapper(accessor),
new PropertyWrapper(mutator),
new PropertyWrapper(field),
},
annotationClasses);
// after the conflict check, stop processing fields we've already seen
if (seen.contains(propertyName))
continue;
if (annotationInfo.atLeastOneAnnotationFound()) {
annotationInfoMap.put(propertyName, annotationInfo);
seen.add(propertyName);
}
}
}
}
catch (RuntimeException e) {
log.error(e, "Failure checking @Validate annotations ", getClass().getName());
throw e;
}
catch (Exception e) {
log.error(e, "Failure checking @Validate annotations ", getClass().getName());
StripesRuntimeException sre = new StripesRuntimeException(e.getMessage(), e);
sre.setStackTrace(e.getStackTrace());
throw sre;
}
return annotationInfoMap;
}
/**
* Looks at a class's properties, searching for the specified annotations on the given property
* objects. An exception is thrown if annotations are found in more than one of the specified
* property accessors (normally field, getter method, and setter method).
*
* @param clazz the class on which to look for annotations.
* @param propertyName the name of the property.
* @param propertyWrappers the property accessors.
* @param annotationClasses the classes of the annotations for which to look for.
* @return an AnnotationInfo object, which contains the class on which the annotations were found
* (if any), and the annotation objects that correspond to the annotation classes.
*/
protected AnnotationInfo getAnnotationInfo(Class<?> clazz, String propertyName,
PropertyWrapper[] propertyWrappers, Class<? extends Annotation>... annotationClasses)
{
AnnotationInfo annotationInfo = new AnnotationInfo(clazz);
Map<PropertyWrapper, Map<Class<? extends Annotation>, Annotation>> map =
new HashMap<PropertyWrapper, Map<Class<? extends Annotation>, Annotation>>();
for (PropertyWrapper property : propertyWrappers) {
Map<Class<? extends Annotation>, Annotation> annotationMap =
new HashMap<Class<? extends Annotation>, Annotation>();
for (Class<? extends Annotation> annotationClass : annotationClasses) {
Annotation annotation = findAnnotation(clazz, property, annotationClass);
if (annotation != null) {
annotationMap.put(annotationClass, annotation);
}
}
if (!annotationMap.isEmpty()) {
map.put(property, annotationMap);
}
}
// must be 0 or 1
if (map.size() > 1) {
StringBuilder buf = new StringBuilder(
"There are conflicting @Validate and/or @ValidateNestedProperties annotations in ")
.append(clazz)
.append(". The following elements are improperly annotated for the '")
.append(propertyName)
.append("' property:\n");
for (PropertyWrapper property : map.keySet()) {
Map<Class<? extends Annotation>, Annotation> annotationMap = map.get(property);
buf.append("--> ").append(property.getType()).append(' ')
.append(property.getName()).append(" is annotated with ");
for (Class<?> cls : annotationMap.keySet()) {
buf.append('@').append(cls.getSimpleName()).append(' ');
}
buf.append('\n');
}
throw new StripesRuntimeException(buf.toString());
}
if (!map.isEmpty()) {
annotationInfo.setAnnotationMap(map.entrySet().iterator().next().getValue());
}
return annotationInfo;
}
/**
* Returns an annotation (or <code>null</code> if none is found) for the given property
* accessor of a class. The property object must not be <code>null</code>, must be declared on
* the class, must be public if it is a method, and must not be static if it is a field, for it
* to be considered eligible to having the annotation.
*
* @param clazz the class on which to look for the annotation.
* @param property the property accessor.
* @param annotationClass the class of the annotation to look for.
* @return the annotation object, or <code>null</code> if no annotation was found.
*/
protected Annotation findAnnotation(Class<?> clazz, PropertyWrapper property,
Class<? extends Annotation> annotationClass)
{
AccessibleObject accessible = property.getAccessibleObject();
if (accessible != null
&& property.getDeclaringClass().equals(clazz)
&& ( (accessible.getClass().equals(Method.class) && Modifier.isPublic(property.getModifiers()))
|| (accessible.getClass().equals(Field.class) && !Modifier.isStatic(property.getModifiers()))
))
{
return accessible.getAnnotation(annotationClass);
}
return null;
}
/**
* Prints out a pretty debug message showing what validations got configured.
*/
protected void logDebugMessageForConfiguredValidations(Class<?> beanType, Map<String, ValidationMetadata> meta) {
StringBuilder builder = new StringBuilder(128);
for (Map.Entry<String, ValidationMetadata> entry : meta.entrySet()) {
if (builder.length() > 0) {
builder.append(", ");
}
builder.append(entry.getKey());
builder.append("->");
builder.append(entry.getValue());
}
log.debug("Loaded validations for ActionBean ", beanType.getSimpleName(), ": ",
builder.length() > 0 ? builder : "<none>");
}
/**
* Contains the class on which the annotations were found (if any), and the annotation objects
* that correspond to the annotation classes.
*/
protected class AnnotationInfo {
private Class<?> targetClass;
private Map<Class<? extends Annotation>, Annotation> annotationMap;
public AnnotationInfo(Class<?> targetClass) {
this.targetClass = targetClass;
}
public Class<?> getTargetClass() {
return targetClass;
}
public void setAnnotationMap(Map<Class<? extends Annotation>, Annotation> annotationMap) {
this.annotationMap = annotationMap;
}
@SuppressWarnings("unchecked")
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
return (T) annotationMap.get(annotationClass);
}
public boolean atLeastOneAnnotationFound() {
return !(annotationMap == null || annotationMap.isEmpty());
}
}
/**
* For some reason, methods common to both the Field and Method classes are not in their parent
* class, AccessibleObject, so this class works around that limitation.
*/
protected class PropertyWrapper {
private Field field;
private Method method;
private String type;
public PropertyWrapper(Field field) {
this.field = field;
this.type = "Field";
}
public PropertyWrapper(Method method) {
this.method = method;
this.type = "Method";
}
public AccessibleObject getAccessibleObject() {
return field != null ? field : method;
}
public String getName() {
return field != null ? field.getName() : method.getName();
}
public Class<?> getDeclaringClass() {
return field != null ? field.getDeclaringClass() : method.getDeclaringClass();
}
public int getModifiers() {
return field != null ? field.getModifiers() : method.getModifiers();
}
public String getType() {
return type;
}
}
}
| |
package de.micromata.opengis.kml.v_2_2_0;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import de.micromata.opengis.kml.v_2_2_0.annotations.Obvious;
/**
* <LineStyle>
* <p>
* Specifies the drawing style (color, color mode, and line width) for all line geometry.
* Line geometry includes the outlines of outlined polygons and the extruded "tether"
* of Placemark icons (if extrusion is enabled).
* </p>
*
* Syntax:
* <pre><strong><LineStyle id="ID"></strong>
* <!-- inherited from <em>ColorStyle</em> -->
* <color>ffffffff</color> <!-- kml:color -->
* <colorMode>normal</colorMode> <!-- colorModeEnum: normal <em>or</em> random -->
*
* <!-- specific to LineStyle -->
* <width>1</width> <!-- float -->
* <strong></LineStyle></strong></pre>
*
* Extends:
* @see: <ColorStyle>
*
* Contained By:
* @see: <Style>
*
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "LineStyleType", propOrder = {
"width",
"lineStyleSimpleExtension",
"lineStyleObjectExtension"
})
@XmlRootElement(name = "LineStyle", namespace = "http://www.opengis.net/kml/2.2")
public class LineStyle
extends ColorStyle
implements Cloneable
{
/**
* <width>
* <p>
* Width of the line, in pixels.
* </p>
*
*
*
*/
@XmlElement(defaultValue = "1.0")
protected double width;
/**
* <Object>
* <p>
* This is an abstract base class and cannot be used directly in a KML file. It provides
* the id attribute, which allows unique identification of a KML element, and the targetId
* attribute, which is used to reference objects that have already been loaded into
* Google Earth. The id attribute must be assigned if the <Update> mechanism is to
* be used.
* </p>
*
* Syntax:
* <pre><!-- abstract element; do not create --><strong>
* <!-- <em>Object</em> id="ID" targetId="NCName" -->
* <!-- /<em>Object</em>> --></strong></pre>
*
*
*
*/
@XmlElement(name = "LineStyleSimpleExtensionGroup")
protected List<Object> lineStyleSimpleExtension;
/**
* <Object>
* <p>
* This is an abstract base class and cannot be used directly in a KML file. It provides
* the id attribute, which allows unique identification of a KML element, and the targetId
* attribute, which is used to reference objects that have already been loaded into
* Google Earth. The id attribute must be assigned if the <Update> mechanism is to
* be used.
* </p>
*
* Syntax:
* <pre><!-- abstract element; do not create --><strong>
* <!-- <em>Object</em> id="ID" targetId="NCName" -->
* <!-- /<em>Object</em>> --></strong></pre>
*
*
*
*/
@XmlElement(name = "LineStyleObjectExtensionGroup")
protected List<AbstractObject> lineStyleObjectExtension;
public LineStyle() {
super();
}
/**
* @see width
*
* @return
* possible object is
* {@link Double}
*
*/
public double getWidth() {
return width;
}
/**
* @see width
*
* @param value
* allowed object is
* {@link Double}
*
*/
public void setWidth(double value) {
this.width = value;
}
/**
* @see lineStyleSimpleExtension
*
*/
public List<Object> getLineStyleSimpleExtension() {
if (lineStyleSimpleExtension == null) {
lineStyleSimpleExtension = new ArrayList<Object>();
}
return this.lineStyleSimpleExtension;
}
/**
* @see lineStyleObjectExtension
*
*/
public List<AbstractObject> getLineStyleObjectExtension() {
if (lineStyleObjectExtension == null) {
lineStyleObjectExtension = new ArrayList<AbstractObject>();
}
return this.lineStyleObjectExtension;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
long temp;
temp = Double.doubleToLongBits(width);
result = ((prime*result)+((int)(temp^(temp >>>(32)))));
result = ((prime*result)+((lineStyleSimpleExtension == null)? 0 :lineStyleSimpleExtension.hashCode()));
result = ((prime*result)+((lineStyleObjectExtension == null)? 0 :lineStyleObjectExtension.hashCode()));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (super.equals(obj) == false) {
return false;
}
if ((obj instanceof LineStyle) == false) {
return false;
}
LineStyle other = ((LineStyle) obj);
if (width!= other.width) {
return false;
}
if (lineStyleSimpleExtension == null) {
if (other.lineStyleSimpleExtension!= null) {
return false;
}
} else {
if (lineStyleSimpleExtension.equals(other.lineStyleSimpleExtension) == false) {
return false;
}
}
if (lineStyleObjectExtension == null) {
if (other.lineStyleObjectExtension!= null) {
return false;
}
} else {
if (lineStyleObjectExtension.equals(other.lineStyleObjectExtension) == false) {
return false;
}
}
return true;
}
/**
* @see lineStyleSimpleExtension
*
* @param lineStyleSimpleExtension
*/
public void setLineStyleSimpleExtension(final List<Object> lineStyleSimpleExtension) {
this.lineStyleSimpleExtension = lineStyleSimpleExtension;
}
/**
* add a value to the lineStyleSimpleExtension property collection
*
* @param lineStyleSimpleExtension
* Objects of the following type are allowed in the list: {@code <}{@link Object}{@code>}{@link JAXBElement}{@code <}{@link byte[]}{@code >}{@link JAXBElement}{@code <}{@link Float}{@code>}{@link JAXBElement}{@code <}{@link Float}{@code>}
* @return
* <tt>true</tt> (as general contract of <tt>Collection.add</tt>).
*/
public LineStyle addToLineStyleSimpleExtension(final Object lineStyleSimpleExtension) {
this.getLineStyleSimpleExtension().add(lineStyleSimpleExtension);
return this;
}
/**
* @see lineStyleObjectExtension
*
* @param lineStyleObjectExtension
*/
public void setLineStyleObjectExtension(final List<AbstractObject> lineStyleObjectExtension) {
this.lineStyleObjectExtension = lineStyleObjectExtension;
}
/**
* add a value to the lineStyleObjectExtension property collection
*
* @param lineStyleObjectExtension
* Objects of the following type are allowed in the list: {@link AbstractObject}
* @return
* <tt>true</tt> (as general contract of <tt>Collection.add</tt>).
*/
public LineStyle addToLineStyleObjectExtension(final AbstractObject lineStyleObjectExtension) {
this.getLineStyleObjectExtension().add(lineStyleObjectExtension);
return this;
}
/**
* @see objectSimpleExtension
*
*/
@Obvious
@Override
public void setObjectSimpleExtension(final List<Object> objectSimpleExtension) {
super.setObjectSimpleExtension(objectSimpleExtension);
}
@Obvious
@Override
public LineStyle addToObjectSimpleExtension(final Object objectSimpleExtension) {
super.getObjectSimpleExtension().add(objectSimpleExtension);
return this;
}
/**
* @see subStyleSimpleExtension
*
*/
@Obvious
@Override
public void setSubStyleSimpleExtension(final List<Object> subStyleSimpleExtension) {
super.setSubStyleSimpleExtension(subStyleSimpleExtension);
}
@Obvious
@Override
public LineStyle addToSubStyleSimpleExtension(final Object subStyleSimpleExtension) {
super.getSubStyleSimpleExtension().add(subStyleSimpleExtension);
return this;
}
/**
* @see subStyleObjectExtension
*
*/
@Obvious
@Override
public void setSubStyleObjectExtension(final List<AbstractObject> subStyleObjectExtension) {
super.setSubStyleObjectExtension(subStyleObjectExtension);
}
@Obvious
@Override
public LineStyle addToSubStyleObjectExtension(final AbstractObject subStyleObjectExtension) {
super.getSubStyleObjectExtension().add(subStyleObjectExtension);
return this;
}
/**
* @see colorStyleSimpleExtension
*
*/
@Obvious
@Override
public void setColorStyleSimpleExtension(final List<Object> colorStyleSimpleExtension) {
super.setColorStyleSimpleExtension(colorStyleSimpleExtension);
}
@Obvious
@Override
public LineStyle addToColorStyleSimpleExtension(final Object colorStyleSimpleExtension) {
super.getColorStyleSimpleExtension().add(colorStyleSimpleExtension);
return this;
}
/**
* @see colorStyleObjectExtension
*
*/
@Obvious
@Override
public void setColorStyleObjectExtension(final List<AbstractObject> colorStyleObjectExtension) {
super.setColorStyleObjectExtension(colorStyleObjectExtension);
}
@Obvious
@Override
public LineStyle addToColorStyleObjectExtension(final AbstractObject colorStyleObjectExtension) {
super.getColorStyleObjectExtension().add(colorStyleObjectExtension);
return this;
}
/**
* fluent setter
* @see #setWidth(double)
*
* @param width
* required parameter
*/
public LineStyle withWidth(final double width) {
this.setWidth(width);
return this;
}
/**
* fluent setter
* @see #setLineStyleSimpleExtension(List<Object>)
*
* @param lineStyleSimpleExtension
* required parameter
*/
public LineStyle withLineStyleSimpleExtension(final List<Object> lineStyleSimpleExtension) {
this.setLineStyleSimpleExtension(lineStyleSimpleExtension);
return this;
}
/**
* fluent setter
* @see #setLineStyleObjectExtension(List<AbstractObject>)
*
* @param lineStyleObjectExtension
* required parameter
*/
public LineStyle withLineStyleObjectExtension(final List<AbstractObject> lineStyleObjectExtension) {
this.setLineStyleObjectExtension(lineStyleObjectExtension);
return this;
}
@Obvious
@Override
public LineStyle withObjectSimpleExtension(final List<Object> objectSimpleExtension) {
super.withObjectSimpleExtension(objectSimpleExtension);
return this;
}
@Obvious
@Override
public LineStyle withId(final String id) {
super.withId(id);
return this;
}
@Obvious
@Override
public LineStyle withTargetId(final String targetId) {
super.withTargetId(targetId);
return this;
}
@Obvious
@Override
public LineStyle withSubStyleSimpleExtension(final List<Object> subStyleSimpleExtension) {
super.withSubStyleSimpleExtension(subStyleSimpleExtension);
return this;
}
@Obvious
@Override
public LineStyle withSubStyleObjectExtension(final List<AbstractObject> subStyleObjectExtension) {
super.withSubStyleObjectExtension(subStyleObjectExtension);
return this;
}
@Obvious
@Override
public LineStyle withColor(final String color) {
super.withColor(color);
return this;
}
@Obvious
@Override
public LineStyle withColorMode(final ColorMode colorMode) {
super.withColorMode(colorMode);
return this;
}
@Obvious
@Override
public LineStyle withColorStyleSimpleExtension(final List<Object> colorStyleSimpleExtension) {
super.withColorStyleSimpleExtension(colorStyleSimpleExtension);
return this;
}
@Obvious
@Override
public LineStyle withColorStyleObjectExtension(final List<AbstractObject> colorStyleObjectExtension) {
super.withColorStyleObjectExtension(colorStyleObjectExtension);
return this;
}
@Override
public LineStyle clone() {
LineStyle copy;
copy = ((LineStyle) super.clone());
copy.lineStyleSimpleExtension = new ArrayList<Object>((getLineStyleSimpleExtension().size()));
for (Object iter: lineStyleSimpleExtension) {
copy.lineStyleSimpleExtension.add(iter);
}
copy.lineStyleObjectExtension = new ArrayList<AbstractObject>((getLineStyleObjectExtension().size()));
for (AbstractObject iter: lineStyleObjectExtension) {
copy.lineStyleObjectExtension.add(iter.clone());
}
return copy;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport.netty;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.ReleasablePagedBytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.math.MathUtils;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.netty.NettyUtils;
import org.elasticsearch.common.netty.OpenChannelsHandler;
import org.elasticsearch.common.netty.ReleaseChannelFutureListener;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.network.NetworkUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.PortsRange;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.KeyedLock;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import org.elasticsearch.transport.support.TransportStatus;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.*;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.channel.socket.nio.NioWorkerPool;
import org.jboss.netty.channel.socket.oio.OioClientSocketChannelFactory;
import org.jboss.netty.channel.socket.oio.OioServerSocketChannelFactory;
import org.jboss.netty.util.HashedWheelTimer;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.channels.CancelledKeyException;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.elasticsearch.common.network.NetworkService.TcpSettings.*;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.transport.NetworkExceptionHelper.isCloseConnectionException;
import static org.elasticsearch.common.transport.NetworkExceptionHelper.isConnectException;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap;
import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory;
/**
* There are 4 types of connections per node, low/med/high/ping. Low if for batch oriented APIs (like recovery or
* batch) with high payload that will cause regular request. (like search or single index) to take
* longer. Med is for the typical search / single doc index. And High for things like cluster state. Ping is reserved for
* sending out ping requests to other nodes.
*/
public class NettyTransport extends AbstractLifecycleComponent<Transport> implements Transport {
static {
NettyUtils.setup();
}
public static final String HTTP_SERVER_WORKER_THREAD_NAME_PREFIX = "http_server_worker";
public static final String HTTP_SERVER_BOSS_THREAD_NAME_PREFIX = "http_server_boss";
public static final String TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX = "transport_client_worker";
public static final String TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX = "transport_client_boss";
public static final String WORKER_COUNT = "transport.netty.worker_count";
public static final String CONNECTIONS_PER_NODE_RECOVERY = "transport.connections_per_node.recovery";
public static final String CONNECTIONS_PER_NODE_BULK = "transport.connections_per_node.bulk";
public static final String CONNECTIONS_PER_NODE_REG = "transport.connections_per_node.reg";
public static final String CONNECTIONS_PER_NODE_STATE = "transport.connections_per_node.state";
public static final String CONNECTIONS_PER_NODE_PING = "transport.connections_per_node.ping";
public static final String PING_SCHEDULE = "transport.ping_schedule"; // the scheduled internal ping interval setting
public static final TimeValue DEFAULT_PING_SCHEDULE = TimeValue.timeValueMillis(-1); // the default ping schedule, defaults to disabled (-1)
public static final String DEFAULT_PORT_RANGE = "9300-9400";
public static final String DEFAULT_PROFILE = "default";
protected final NetworkService networkService;
protected final Version version;
protected final boolean blockingClient;
protected final TimeValue connectTimeout;
protected final ByteSizeValue maxCumulationBufferCapacity;
protected final int maxCompositeBufferComponents;
protected final boolean compress;
protected final ReceiveBufferSizePredictorFactory receiveBufferSizePredictorFactory;
protected final int workerCount;
protected final ByteSizeValue receivePredictorMin;
protected final ByteSizeValue receivePredictorMax;
protected final int connectionsPerNodeRecovery;
protected final int connectionsPerNodeBulk;
protected final int connectionsPerNodeReg;
protected final int connectionsPerNodeState;
protected final int connectionsPerNodePing;
private final TimeValue pingSchedule;
protected final BigArrays bigArrays;
protected final ThreadPool threadPool;
protected volatile OpenChannelsHandler serverOpenChannels;
protected volatile ClientBootstrap clientBootstrap;
// node id to actual channel
protected final ConcurrentMap<DiscoveryNode, NodeChannels> connectedNodes = newConcurrentMap();
protected final Map<String, ServerBootstrap> serverBootstraps = newConcurrentMap();
protected final Map<String, Channel> serverChannels = newConcurrentMap();
protected final Map<String, BoundTransportAddress> profileBoundAddresses = newConcurrentMap();
protected volatile TransportServiceAdapter transportServiceAdapter;
protected volatile BoundTransportAddress boundAddress;
protected final KeyedLock<String> connectionLock = new KeyedLock<>();
protected final NamedWriteableRegistry namedWriteableRegistry;
// this lock is here to make sure we close this transport and disconnect all the client nodes
// connections while no connect operations is going on... (this might help with 100% CPU when stopping the transport?)
private final ReadWriteLock globalLock = new ReentrantReadWriteLock();
// package visibility for tests
final ScheduledPing scheduledPing;
@Inject
public NettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, Version version, NamedWriteableRegistry namedWriteableRegistry) {
super(settings);
this.threadPool = threadPool;
this.networkService = networkService;
this.bigArrays = bigArrays;
this.version = version;
if (settings.getAsBoolean("netty.epollBugWorkaround", false)) {
System.setProperty("org.jboss.netty.epollBugWorkaround", "true");
}
this.workerCount = settings.getAsInt(WORKER_COUNT, EsExecutors.boundedNumberOfProcessors(settings) * 2);
this.blockingClient = settings.getAsBoolean("transport.netty.transport.tcp.blocking_client", settings.getAsBoolean(TCP_BLOCKING_CLIENT, settings.getAsBoolean(TCP_BLOCKING, false)));
this.connectTimeout = this.settings.getAsTime("transport.netty.connect_timeout", settings.getAsTime("transport.tcp.connect_timeout", settings.getAsTime(TCP_CONNECT_TIMEOUT, TCP_DEFAULT_CONNECT_TIMEOUT)));
this.maxCumulationBufferCapacity = this.settings.getAsBytesSize("transport.netty.max_cumulation_buffer_capacity", null);
this.maxCompositeBufferComponents = this.settings.getAsInt("transport.netty.max_composite_buffer_components", -1);
this.compress = settings.getAsBoolean(TransportSettings.TRANSPORT_TCP_COMPRESS, false);
this.connectionsPerNodeRecovery = this.settings.getAsInt("transport.netty.connections_per_node.recovery", settings.getAsInt(CONNECTIONS_PER_NODE_RECOVERY, 2));
this.connectionsPerNodeBulk = this.settings.getAsInt("transport.netty.connections_per_node.bulk", settings.getAsInt(CONNECTIONS_PER_NODE_BULK, 3));
this.connectionsPerNodeReg = this.settings.getAsInt("transport.netty.connections_per_node.reg", settings.getAsInt(CONNECTIONS_PER_NODE_REG, 6));
this.connectionsPerNodeState = this.settings.getAsInt("transport.netty.connections_per_node.high", settings.getAsInt(CONNECTIONS_PER_NODE_STATE, 1));
this.connectionsPerNodePing = this.settings.getAsInt("transport.netty.connections_per_node.ping", settings.getAsInt(CONNECTIONS_PER_NODE_PING, 1));
// we want to have at least 1 for reg/state/ping
if (this.connectionsPerNodeReg == 0) {
throw new IllegalArgumentException("can't set [connection_per_node.reg] to 0");
}
if (this.connectionsPerNodePing == 0) {
throw new IllegalArgumentException("can't set [connection_per_node.ping] to 0");
}
if (this.connectionsPerNodeState == 0) {
throw new IllegalArgumentException("can't set [connection_per_node.state] to 0");
}
long defaultReceiverPredictor = 512 * 1024;
if (JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes() > 0) {
// we can guess a better default...
long l = (long) ((0.3 * JvmInfo.jvmInfo().getMem().getDirectMemoryMax().bytes()) / workerCount);
defaultReceiverPredictor = Math.min(defaultReceiverPredictor, Math.max(l, 64 * 1024));
}
// See AdaptiveReceiveBufferSizePredictor#DEFAULT_XXX for default values in netty..., we can use higher ones for us, even fixed one
this.receivePredictorMin = this.settings.getAsBytesSize("transport.netty.receive_predictor_min", this.settings.getAsBytesSize("transport.netty.receive_predictor_size", new ByteSizeValue(defaultReceiverPredictor)));
this.receivePredictorMax = this.settings.getAsBytesSize("transport.netty.receive_predictor_max", this.settings.getAsBytesSize("transport.netty.receive_predictor_size", new ByteSizeValue(defaultReceiverPredictor)));
if (receivePredictorMax.bytes() == receivePredictorMin.bytes()) {
receiveBufferSizePredictorFactory = new FixedReceiveBufferSizePredictorFactory((int) receivePredictorMax.bytes());
} else {
receiveBufferSizePredictorFactory = new AdaptiveReceiveBufferSizePredictorFactory((int) receivePredictorMin.bytes(), (int) receivePredictorMin.bytes(), (int) receivePredictorMax.bytes());
}
this.scheduledPing = new ScheduledPing();
this.pingSchedule = settings.getAsTime(PING_SCHEDULE, DEFAULT_PING_SCHEDULE);
if (pingSchedule.millis() > 0) {
threadPool.schedule(pingSchedule, ThreadPool.Names.GENERIC, scheduledPing);
}
this.namedWriteableRegistry = namedWriteableRegistry;
}
public Settings settings() {
return this.settings;
}
@Override
public void transportServiceAdapter(TransportServiceAdapter service) {
this.transportServiceAdapter = service;
}
TransportServiceAdapter transportServiceAdapter() {
return transportServiceAdapter;
}
ThreadPool threadPool() {
return threadPool;
}
@Override
protected void doStart() {
boolean success = false;
try {
clientBootstrap = createClientBootstrap();
if (settings.getAsBoolean("network.server", true)) {
final OpenChannelsHandler openChannels = new OpenChannelsHandler(logger);
this.serverOpenChannels = openChannels;
// extract default profile first and create standard bootstrap
Map<String, Settings> profiles = settings.getGroups("transport.profiles", true);
if (!profiles.containsKey(DEFAULT_PROFILE)) {
profiles = Maps.newHashMap(profiles);
profiles.put(DEFAULT_PROFILE, Settings.EMPTY);
}
Settings fallbackSettings = createFallbackSettings();
Settings defaultSettings = profiles.get(DEFAULT_PROFILE);
// loop through all profiles and start them up, special handling for default one
for (Map.Entry<String, Settings> entry : profiles.entrySet()) {
Settings profileSettings = entry.getValue();
String name = entry.getKey();
if (!Strings.hasLength(name)) {
logger.info("transport profile configured without a name. skipping profile with settings [{}]", profileSettings.toDelimitedString(','));
continue;
} else if (DEFAULT_PROFILE.equals(name)) {
profileSettings = settingsBuilder()
.put(profileSettings)
.put("port", profileSettings.get("port", this.settings.get("transport.tcp.port", DEFAULT_PORT_RANGE)))
.build();
} else if (profileSettings.get("port") == null) {
// if profile does not have a port, skip it
logger.info("No port configured for profile [{}], not binding", name);
continue;
}
// merge fallback settings with default settings with profile settings so we have complete settings with default values
Settings mergedSettings = settingsBuilder()
.put(fallbackSettings)
.put(defaultSettings)
.put(profileSettings)
.build();
createServerBootstrap(name, mergedSettings);
bindServerBootstrap(name, mergedSettings);
}
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(DEFAULT_PROFILE).getLocalAddress();
int publishPort = settings.getAsInt("transport.netty.publish_port", settings.getAsInt("transport.publish_port", boundAddress.getPort()));
String publishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
this.boundAddress = new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress));
}
success = true;
} finally {
if (success == false) {
doStop();
}
}
}
@Override
public Map<String, BoundTransportAddress> profileBoundAddresses() {
return ImmutableMap.copyOf(profileBoundAddresses);
}
private InetSocketAddress createPublishAddress(String publishHost, int publishPort) {
try {
return new InetSocketAddress(networkService.resolvePublishHostAddress(publishHost), publishPort);
} catch (Exception e) {
throw new BindTransportException("Failed to resolve publish address", e);
}
}
private ClientBootstrap createClientBootstrap() {
if (blockingClient) {
clientBootstrap = new ClientBootstrap(new OioClientSocketChannelFactory(Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX))));
} else {
int bossCount = settings.getAsInt("transport.netty.boss_count", 1);
clientBootstrap = new ClientBootstrap(new NioClientSocketChannelFactory(
Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX)),
bossCount,
new NioWorkerPool(Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX)), workerCount),
new HashedWheelTimer(daemonThreadFactory(settings, "transport_client_timer"))));
}
clientBootstrap.setPipelineFactory(configureClientChannelPipelineFactory());
clientBootstrap.setOption("connectTimeoutMillis", connectTimeout.millis());
String tcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
if (!"default".equals(tcpNoDelay)) {
clientBootstrap.setOption("tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null));
}
String tcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
if (!"default".equals(tcpKeepAlive)) {
clientBootstrap.setOption("keepAlive", Booleans.parseBoolean(tcpKeepAlive, null));
}
ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) {
clientBootstrap.setOption("sendBufferSize", tcpSendBufferSize.bytes());
}
ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) {
clientBootstrap.setOption("receiveBufferSize", tcpReceiveBufferSize.bytes());
}
clientBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
boolean reuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
clientBootstrap.setOption("reuseAddress", reuseAddress);
return clientBootstrap;
}
private Settings createFallbackSettings() {
Settings.Builder fallbackSettingsBuilder = settingsBuilder();
String fallbackBindHost = settings.get("transport.netty.bind_host", settings.get("transport.bind_host", settings.get("transport.host")));
if (fallbackBindHost != null) {
fallbackSettingsBuilder.put("bind_host", fallbackBindHost);
}
String fallbackPublishHost = settings.get("transport.netty.publish_host", settings.get("transport.publish_host", settings.get("transport.host")));
if (fallbackPublishHost != null) {
fallbackSettingsBuilder.put("publish_host", fallbackPublishHost);
}
String fallbackTcpNoDelay = settings.get("transport.netty.tcp_no_delay", settings.get(TCP_NO_DELAY, "true"));
if (fallbackTcpNoDelay != null) {
fallbackSettingsBuilder.put("tcp_no_delay", fallbackTcpNoDelay);
}
String fallbackTcpKeepAlive = settings.get("transport.netty.tcp_keep_alive", settings.get(TCP_KEEP_ALIVE, "true"));
if (fallbackTcpKeepAlive != null) {
fallbackSettingsBuilder.put("tcp_keep_alive", fallbackTcpKeepAlive);
}
boolean fallbackReuseAddress = settings.getAsBoolean("transport.netty.reuse_address", settings.getAsBoolean(TCP_REUSE_ADDRESS, NetworkUtils.defaultReuseAddress()));
fallbackSettingsBuilder.put("reuse_address", fallbackReuseAddress);
ByteSizeValue fallbackTcpSendBufferSize = settings.getAsBytesSize("transport.netty.tcp_send_buffer_size", settings.getAsBytesSize(TCP_SEND_BUFFER_SIZE, TCP_DEFAULT_SEND_BUFFER_SIZE));
if (fallbackTcpSendBufferSize != null) {
fallbackSettingsBuilder.put("tcp_send_buffer_size", fallbackTcpSendBufferSize);
}
ByteSizeValue fallbackTcpBufferSize = settings.getAsBytesSize("transport.netty.tcp_receive_buffer_size", settings.getAsBytesSize(TCP_RECEIVE_BUFFER_SIZE, TCP_DEFAULT_RECEIVE_BUFFER_SIZE));
if (fallbackTcpBufferSize != null) {
fallbackSettingsBuilder.put("tcp_receive_buffer_size", fallbackTcpBufferSize);
}
return fallbackSettingsBuilder.build();
}
private void bindServerBootstrap(final String name, final Settings settings) {
// Bind and start to accept incoming connections.
InetAddress hostAddressX;
String bindHost = settings.get("bind_host");
try {
hostAddressX = networkService.resolveBindHostAddress(bindHost);
} catch (IOException e) {
throw new BindTransportException("Failed to resolve host [" + bindHost + "]", e);
}
final InetAddress hostAddress = hostAddressX;
String port = settings.get("port");
PortsRange portsRange = new PortsRange(port);
final AtomicReference<Exception> lastException = new AtomicReference<>();
boolean success = portsRange.iterate(new PortsRange.PortCallback() {
@Override
public boolean onPortNumber(int portNumber) {
try {
serverChannels.put(name, serverBootstraps.get(name).bind(new InetSocketAddress(hostAddress, portNumber)));
} catch (Exception e) {
lastException.set(e);
return false;
}
return true;
}
});
if (!success) {
throw new BindTransportException("Failed to bind to [" + port + "]", lastException.get());
}
if (!DEFAULT_PROFILE.equals(name)) {
InetSocketAddress boundAddress = (InetSocketAddress) serverChannels.get(name).getLocalAddress();
int publishPort = settings.getAsInt("publish_port", boundAddress.getPort());
String publishHost = settings.get("publish_host", boundAddress.getHostString());
InetSocketAddress publishAddress = createPublishAddress(publishHost, publishPort);
profileBoundAddresses.put(name, new BoundTransportAddress(new InetSocketTransportAddress(boundAddress), new InetSocketTransportAddress(publishAddress)));
}
logger.debug("Bound profile [{}] to address [{}]", name, serverChannels.get(name).getLocalAddress());
}
private void createServerBootstrap(String name, Settings settings) {
boolean blockingServer = settings.getAsBoolean("transport.tcp.blocking_server", this.settings.getAsBoolean(TCP_BLOCKING_SERVER, this.settings.getAsBoolean(TCP_BLOCKING, false)));
String port = settings.get("port");
String bindHost = settings.get("bind_host");
String publishHost = settings.get("publish_host");
String tcpNoDelay = settings.get("tcp_no_delay");
String tcpKeepAlive = settings.get("tcp_keep_alive");
boolean reuseAddress = settings.getAsBoolean("reuse_address", NetworkUtils.defaultReuseAddress());
ByteSizeValue tcpSendBufferSize = settings.getAsBytesSize("tcp_send_buffer_size", TCP_DEFAULT_SEND_BUFFER_SIZE);
ByteSizeValue tcpReceiveBufferSize = settings.getAsBytesSize("tcp_receive_buffer_size", TCP_DEFAULT_RECEIVE_BUFFER_SIZE);
logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]",
name, workerCount, port, bindHost, publishHost, compress, connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, connectionsPerNodePing, receivePredictorMin, receivePredictorMax);
final ThreadFactory bossFactory = daemonThreadFactory(this.settings, HTTP_SERVER_BOSS_THREAD_NAME_PREFIX, name);
final ThreadFactory workerFactory = daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX, name);
ServerBootstrap serverBootstrap;
if (blockingServer) {
serverBootstrap = new ServerBootstrap(new OioServerSocketChannelFactory(
Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory)
));
} else {
serverBootstrap = new ServerBootstrap(new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(bossFactory),
Executors.newCachedThreadPool(workerFactory),
workerCount));
}
serverBootstrap.setPipelineFactory(configureServerChannelPipelineFactory(name, settings));
if (!"default".equals(tcpNoDelay)) {
serverBootstrap.setOption("child.tcpNoDelay", Booleans.parseBoolean(tcpNoDelay, null));
}
if (!"default".equals(tcpKeepAlive)) {
serverBootstrap.setOption("child.keepAlive", Booleans.parseBoolean(tcpKeepAlive, null));
}
if (tcpSendBufferSize != null && tcpSendBufferSize.bytes() > 0) {
serverBootstrap.setOption("child.sendBufferSize", tcpSendBufferSize.bytes());
}
if (tcpReceiveBufferSize != null && tcpReceiveBufferSize.bytes() > 0) {
serverBootstrap.setOption("child.receiveBufferSize", tcpReceiveBufferSize.bytes());
}
serverBootstrap.setOption("receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
serverBootstrap.setOption("child.receiveBufferSizePredictorFactory", receiveBufferSizePredictorFactory);
serverBootstrap.setOption("reuseAddress", reuseAddress);
serverBootstrap.setOption("child.reuseAddress", reuseAddress);
serverBootstraps.put(name, serverBootstrap);
}
@Override
protected void doStop() {
final CountDownLatch latch = new CountDownLatch(1);
// make sure we run it on another thread than a possible IO handler thread
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
globalLock.writeLock().lock();
try {
for (Iterator<NodeChannels> it = connectedNodes.values().iterator(); it.hasNext(); ) {
NodeChannels nodeChannels = it.next();
it.remove();
nodeChannels.close();
}
Iterator<Map.Entry<String, Channel>> serverChannelIterator = serverChannels.entrySet().iterator();
while (serverChannelIterator.hasNext()) {
Map.Entry<String, Channel> serverChannelEntry = serverChannelIterator.next();
String name = serverChannelEntry.getKey();
Channel serverChannel = serverChannelEntry.getValue();
try {
serverChannel.close().awaitUninterruptibly();
} catch (Throwable t) {
logger.debug("Error closing serverChannel for profile [{}]", t, name);
}
serverChannelIterator.remove();
}
if (serverOpenChannels != null) {
serverOpenChannels.close();
serverOpenChannels = null;
}
Iterator<Map.Entry<String, ServerBootstrap>> serverBootstrapIterator = serverBootstraps.entrySet().iterator();
while (serverBootstrapIterator.hasNext()) {
Map.Entry<String, ServerBootstrap> serverBootstrapEntry = serverBootstrapIterator.next();
String name = serverBootstrapEntry.getKey();
ServerBootstrap serverBootstrap = serverBootstrapEntry.getValue();
try {
serverBootstrap.releaseExternalResources();
} catch (Throwable t) {
logger.debug("Error closing serverBootstrap for profile [{}]", t, name);
}
serverBootstrapIterator.remove();
}
for (Iterator<NodeChannels> it = connectedNodes.values().iterator(); it.hasNext(); ) {
NodeChannels nodeChannels = it.next();
it.remove();
nodeChannels.close();
}
if (clientBootstrap != null) {
clientBootstrap.releaseExternalResources();
clientBootstrap = null;
}
} finally {
globalLock.writeLock().unlock();
latch.countDown();
}
}
});
try {
latch.await(30, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// ignore
}
}
@Override
protected void doClose() {
}
@Override
public TransportAddress[] addressesFromString(String address) throws Exception {
int index = address.indexOf('[');
if (index != -1) {
String host = address.substring(0, index);
Set<String> ports = Strings.commaDelimitedListToSet(address.substring(index + 1, address.indexOf(']')));
List<TransportAddress> addresses = Lists.newArrayList();
for (String port : ports) {
int[] iPorts = new PortsRange(port).ports();
for (int iPort : iPorts) {
addresses.add(new InetSocketTransportAddress(host, iPort));
}
}
return addresses.toArray(new TransportAddress[addresses.size()]);
} else {
index = address.lastIndexOf(':');
if (index == -1) {
List<TransportAddress> addresses = Lists.newArrayList();
String defaultPort = settings.get("transport.profiles.default.port", settings.get("transport.netty.port", this.settings.get("transport.tcp.port", DEFAULT_PORT_RANGE)));
int[] iPorts = new PortsRange(defaultPort).ports();
for (int iPort : iPorts) {
addresses.add(new InetSocketTransportAddress(address, iPort));
}
return addresses.toArray(new TransportAddress[addresses.size()]);
} else {
String host = address.substring(0, index);
int port = Integer.parseInt(address.substring(index + 1));
return new TransportAddress[]{new InetSocketTransportAddress(host, port)};
}
}
}
@Override
public boolean addressSupported(Class<? extends TransportAddress> address) {
return InetSocketTransportAddress.class.equals(address);
}
@Override
public BoundTransportAddress boundAddress() {
return this.boundAddress;
}
protected void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
if (!lifecycle.started()) {
// ignore
return;
}
if (isCloseConnectionException(e.getCause())) {
logger.trace("close connection exception caught on transport layer [{}], disconnecting from relevant node", e.getCause(), ctx.getChannel());
// close the channel, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
} else if (isConnectException(e.getCause())) {
logger.trace("connect exception caught on transport layer [{}]", e.getCause(), ctx.getChannel());
// close the channel as safe measure, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
} else if (e.getCause() instanceof CancelledKeyException) {
logger.trace("cancelled key exception caught on transport layer [{}], disconnecting from relevant node", e.getCause(), ctx.getChannel());
// close the channel as safe measure, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
} else if (e.getCause() instanceof SizeHeaderFrameDecoder.HttpOnTransportException) {
// in case we are able to return data, serialize the exception content and sent it back to the client
if (ctx.getChannel().isOpen()) {
ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(e.getCause().getMessage().getBytes(Charsets.UTF_8));
ChannelFuture channelFuture = ctx.getChannel().write(buffer);
channelFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
future.getChannel().close();
}
});
}
} else {
logger.warn("exception caught on transport layer [{}], closing connection", e.getCause(), ctx.getChannel());
// close the channel, which will cause a node to be disconnected if relevant
ctx.getChannel().close();
disconnectFromNodeChannel(ctx.getChannel(), e.getCause());
}
}
TransportAddress wrapAddress(SocketAddress socketAddress) {
return new InetSocketTransportAddress((InetSocketAddress) socketAddress);
}
@Override
public long serverOpen() {
OpenChannelsHandler channels = serverOpenChannels;
return channels == null ? 0 : channels.numberOfOpenChannels();
}
@Override
public void sendRequest(final DiscoveryNode node, final long requestId, final String action, final TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
Channel targetChannel = nodeChannel(node, options);
if (compress) {
options.withCompress(true);
}
byte status = 0;
status = TransportStatus.setRequest(status);
ReleasableBytesStreamOutput bStream = new ReleasableBytesStreamOutput(bigArrays);
boolean addedReleaseListener = false;
try {
bStream.skip(NettyHeader.HEADER_SIZE);
StreamOutput stream = bStream;
// only compress if asked, and, the request is not bytes, since then only
// the header part is compressed, and the "body" can't be extracted as compressed
if (options.compress() && (!(request instanceof BytesTransportRequest))) {
status = TransportStatus.setCompress(status);
stream = CompressorFactory.defaultCompressor().streamOutput(stream);
}
// we pick the smallest of the 2, to support both backward and forward compatibility
// note, this is the only place we need to do this, since from here on, we use the serialized version
// as the version to use also when the node receiving this request will send the response with
Version version = Version.smallest(this.version, node.version());
stream.setVersion(version);
stream.writeString(action);
ReleasablePagedBytesReference bytes;
ChannelBuffer buffer;
// it might be nice to somehow generalize this optimization, maybe a smart "paged" bytes output
// that create paged channel buffers, but its tricky to know when to do it (where this option is
// more explicit).
if (request instanceof BytesTransportRequest) {
BytesTransportRequest bRequest = (BytesTransportRequest) request;
assert node.version().equals(bRequest.version());
bRequest.writeThin(stream);
stream.close();
bytes = bStream.bytes();
ChannelBuffer headerBuffer = bytes.toChannelBuffer();
ChannelBuffer contentBuffer = bRequest.bytes().toChannelBuffer();
buffer = ChannelBuffers.wrappedBuffer(NettyUtils.DEFAULT_GATHERING, headerBuffer, contentBuffer);
} else {
request.writeTo(stream);
stream.close();
bytes = bStream.bytes();
buffer = bytes.toChannelBuffer();
}
NettyHeader.writeHeader(buffer, requestId, status, version);
ChannelFuture future = targetChannel.write(buffer);
ReleaseChannelFutureListener listener = new ReleaseChannelFutureListener(bytes);
future.addListener(listener);
addedReleaseListener = true;
transportServiceAdapter.onRequestSent(node, requestId, action, request, options);
} finally {
if (!addedReleaseListener) {
Releasables.close(bStream.bytes());
}
}
}
@Override
public boolean nodeConnected(DiscoveryNode node) {
return connectedNodes.containsKey(node);
}
@Override
public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException {
connectToNode(node, true);
}
@Override
public void connectToNode(DiscoveryNode node) {
connectToNode(node, false);
}
public void connectToNode(DiscoveryNode node, boolean light) {
if (!lifecycle.started()) {
throw new IllegalStateException("can't add nodes to a stopped transport");
}
if (node == null) {
throw new ConnectTransportException(null, "can't connect to a null node");
}
globalLock.readLock().lock();
try {
connectionLock.acquire(node.id());
try {
if (!lifecycle.started()) {
throw new IllegalStateException("can't add nodes to a stopped transport");
}
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null) {
return;
}
try {
if (light) {
nodeChannels = connectToChannelsLight(node);
} else {
nodeChannels = new NodeChannels(new Channel[connectionsPerNodeRecovery], new Channel[connectionsPerNodeBulk], new Channel[connectionsPerNodeReg], new Channel[connectionsPerNodeState], new Channel[connectionsPerNodePing]);
try {
connectToChannels(nodeChannels, node);
} catch (Throwable e) {
logger.trace("failed to connect to [{}], cleaning dangling connections", e, node);
nodeChannels.close();
throw e;
}
}
// we acquire a connection lock, so no way there is an existing connection
nodeChannels.start();
connectedNodes.put(node, nodeChannels);
if (logger.isDebugEnabled()) {
logger.debug("connected to node [{}]", node);
}
transportServiceAdapter.raiseNodeConnected(node);
} catch (ConnectTransportException e) {
throw e;
} catch (Exception e) {
throw new ConnectTransportException(node, "general node connection failure", e);
}
} finally {
connectionLock.release(node.id());
}
} finally {
globalLock.readLock().unlock();
}
}
protected NodeChannels connectToChannelsLight(DiscoveryNode node) {
InetSocketAddress address = ((InetSocketTransportAddress) node.address()).address();
ChannelFuture connect = clientBootstrap.connect(address);
connect.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connect.isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connect.getCause());
}
Channel[] channels = new Channel[1];
channels[0] = connect.getChannel();
channels[0].getCloseFuture().addListener(new ChannelCloseListener(node));
return new NodeChannels(channels, channels, channels, channels, channels);
}
protected void connectToChannels(NodeChannels nodeChannels, DiscoveryNode node) {
ChannelFuture[] connectRecovery = new ChannelFuture[nodeChannels.recovery.length];
ChannelFuture[] connectBulk = new ChannelFuture[nodeChannels.bulk.length];
ChannelFuture[] connectReg = new ChannelFuture[nodeChannels.reg.length];
ChannelFuture[] connectState = new ChannelFuture[nodeChannels.state.length];
ChannelFuture[] connectPing = new ChannelFuture[nodeChannels.ping.length];
InetSocketAddress address = ((InetSocketTransportAddress) node.address()).address();
for (int i = 0; i < connectRecovery.length; i++) {
connectRecovery[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectBulk.length; i++) {
connectBulk[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectReg.length; i++) {
connectReg[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectState.length; i++) {
connectState[i] = clientBootstrap.connect(address);
}
for (int i = 0; i < connectPing.length; i++) {
connectPing[i] = clientBootstrap.connect(address);
}
try {
for (int i = 0; i < connectRecovery.length; i++) {
connectRecovery[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectRecovery[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectRecovery[i].getCause());
}
nodeChannels.recovery[i] = connectRecovery[i].getChannel();
nodeChannels.recovery[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectBulk.length; i++) {
connectBulk[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectBulk[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectBulk[i].getCause());
}
nodeChannels.bulk[i] = connectBulk[i].getChannel();
nodeChannels.bulk[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectReg.length; i++) {
connectReg[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectReg[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectReg[i].getCause());
}
nodeChannels.reg[i] = connectReg[i].getChannel();
nodeChannels.reg[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectState.length; i++) {
connectState[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectState[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectState[i].getCause());
}
nodeChannels.state[i] = connectState[i].getChannel();
nodeChannels.state[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
for (int i = 0; i < connectPing.length; i++) {
connectPing[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5));
if (!connectPing[i].isSuccess()) {
throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectPing[i].getCause());
}
nodeChannels.ping[i] = connectPing[i].getChannel();
nodeChannels.ping[i].getCloseFuture().addListener(new ChannelCloseListener(node));
}
if (nodeChannels.recovery.length == 0) {
if (nodeChannels.bulk.length > 0) {
nodeChannels.recovery = nodeChannels.bulk;
} else {
nodeChannels.recovery = nodeChannels.reg;
}
}
if (nodeChannels.bulk.length == 0) {
nodeChannels.bulk = nodeChannels.reg;
}
} catch (RuntimeException e) {
// clean the futures
for (ChannelFuture future : ImmutableList.<ChannelFuture>builder().add(connectRecovery).add(connectBulk).add(connectReg).add(connectState).add(connectPing).build()) {
future.cancel();
if (future.getChannel() != null && future.getChannel().isOpen()) {
try {
future.getChannel().close();
} catch (Exception e1) {
// ignore
}
}
}
throw e;
}
}
@Override
public void disconnectFromNode(DiscoveryNode node) {
connectionLock.acquire(node.id());
try {
NodeChannels nodeChannels = connectedNodes.remove(node);
if (nodeChannels != null) {
try {
logger.debug("disconnecting from [{}] due to explicit disconnect call", node);
nodeChannels.close();
} finally {
logger.trace("disconnected from [{}] due to explicit disconnect call", node);
transportServiceAdapter.raiseNodeDisconnected(node);
}
}
} finally {
connectionLock.release(node.id());
}
}
/**
* Disconnects from a node, only if the relevant channel is found to be part of the node channels.
*/
protected boolean disconnectFromNode(DiscoveryNode node, Channel channel, String reason) {
// this might be called multiple times from all the node channels, so do a lightweight
// check outside of the lock
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null && nodeChannels.hasChannel(channel)) {
connectionLock.acquire(node.id());
try {
nodeChannels = connectedNodes.get(node);
// check again within the connection lock, if its still applicable to remove it
if (nodeChannels != null && nodeChannels.hasChannel(channel)) {
connectedNodes.remove(node);
try {
logger.debug("disconnecting from [{}], {}", node, reason);
nodeChannels.close();
} finally {
logger.trace("disconnected from [{}], {}", node, reason);
transportServiceAdapter.raiseNodeDisconnected(node);
}
return true;
}
} finally {
connectionLock.release(node.id());
}
}
return false;
}
/**
* Disconnects from a node if a channel is found as part of that nodes channels.
*/
protected void disconnectFromNodeChannel(final Channel channel, final Throwable failure) {
threadPool().generic().execute(new Runnable() {
@Override
public void run() {
for (DiscoveryNode node : connectedNodes.keySet()) {
if (disconnectFromNode(node, channel, ExceptionsHelper.detailedMessage(failure))) {
// if we managed to find this channel and disconnect from it, then break, no need to check on
// the rest of the nodes
break;
}
}
}
});
}
protected Channel nodeChannel(DiscoveryNode node, TransportRequestOptions options) throws ConnectTransportException {
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels == null) {
throw new NodeNotConnectedException(node, "Node not connected");
}
return nodeChannels.channel(options.type());
}
public ChannelPipelineFactory configureClientChannelPipelineFactory() {
return new ClientChannelPipelineFactory(this);
}
protected static class ClientChannelPipelineFactory implements ChannelPipelineFactory {
protected final NettyTransport nettyTransport;
public ClientChannelPipelineFactory(NettyTransport nettyTransport) {
this.nettyTransport = nettyTransport;
}
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline channelPipeline = Channels.pipeline();
SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder();
if (nettyTransport.maxCumulationBufferCapacity != null) {
if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) {
sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE);
} else {
sizeHeader.setMaxCumulationBufferCapacity((int) nettyTransport.maxCumulationBufferCapacity.bytes());
}
}
if (nettyTransport.maxCompositeBufferComponents != -1) {
sizeHeader.setMaxCumulationBufferComponents(nettyTransport.maxCompositeBufferComponents);
}
channelPipeline.addLast("size", sizeHeader);
// using a dot as a prefix means, this cannot come from any settings parsed
channelPipeline.addLast("dispatcher", new MessageChannelHandler(nettyTransport, nettyTransport.logger, ".client"));
return channelPipeline;
}
}
public ChannelPipelineFactory configureServerChannelPipelineFactory(String name, Settings settings) {
return new ServerChannelPipelineFactory(this, name, settings);
}
protected static class ServerChannelPipelineFactory implements ChannelPipelineFactory {
protected final NettyTransport nettyTransport;
protected final String name;
protected final Settings settings;
public ServerChannelPipelineFactory(NettyTransport nettyTransport, String name, Settings settings) {
this.nettyTransport = nettyTransport;
this.name = name;
this.settings = settings;
}
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline channelPipeline = Channels.pipeline();
channelPipeline.addLast("openChannels", nettyTransport.serverOpenChannels);
SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder();
if (nettyTransport.maxCumulationBufferCapacity != null) {
if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) {
sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE);
} else {
sizeHeader.setMaxCumulationBufferCapacity((int) nettyTransport.maxCumulationBufferCapacity.bytes());
}
}
if (nettyTransport.maxCompositeBufferComponents != -1) {
sizeHeader.setMaxCumulationBufferComponents(nettyTransport.maxCompositeBufferComponents);
}
channelPipeline.addLast("size", sizeHeader);
channelPipeline.addLast("dispatcher", new MessageChannelHandler(nettyTransport, nettyTransport.logger, name));
return channelPipeline;
}
}
protected class ChannelCloseListener implements ChannelFutureListener {
private final DiscoveryNode node;
private ChannelCloseListener(DiscoveryNode node) {
this.node = node;
}
@Override
public void operationComplete(final ChannelFuture future) throws Exception {
NodeChannels nodeChannels = connectedNodes.get(node);
if (nodeChannels != null && nodeChannels.hasChannel(future.getChannel())) {
threadPool().generic().execute(new Runnable() {
@Override
public void run() {
disconnectFromNode(node, future.getChannel(), "channel closed event");
}
});
}
}
}
public static class NodeChannels {
ImmutableList<Channel> allChannels = ImmutableList.of();
private Channel[] recovery;
private final AtomicInteger recoveryCounter = new AtomicInteger();
private Channel[] bulk;
private final AtomicInteger bulkCounter = new AtomicInteger();
private Channel[] reg;
private final AtomicInteger regCounter = new AtomicInteger();
private Channel[] state;
private final AtomicInteger stateCounter = new AtomicInteger();
private Channel[] ping;
private final AtomicInteger pingCounter = new AtomicInteger();
public NodeChannels(Channel[] recovery, Channel[] bulk, Channel[] reg, Channel[] state, Channel[] ping) {
this.recovery = recovery;
this.bulk = bulk;
this.reg = reg;
this.state = state;
this.ping = ping;
}
public void start() {
this.allChannels = ImmutableList.<Channel>builder().add(recovery).add(bulk).add(reg).add(state).add(ping).build();
}
public boolean hasChannel(Channel channel) {
for (Channel channel1 : allChannels) {
if (channel.equals(channel1)) {
return true;
}
}
return false;
}
public Channel channel(TransportRequestOptions.Type type) {
if (type == TransportRequestOptions.Type.REG) {
return reg[MathUtils.mod(regCounter.incrementAndGet(), reg.length)];
} else if (type == TransportRequestOptions.Type.STATE) {
return state[MathUtils.mod(stateCounter.incrementAndGet(), state.length)];
} else if (type == TransportRequestOptions.Type.PING) {
return ping[MathUtils.mod(pingCounter.incrementAndGet(), ping.length)];
} else if (type == TransportRequestOptions.Type.BULK) {
return bulk[MathUtils.mod(bulkCounter.incrementAndGet(), bulk.length)];
} else if (type == TransportRequestOptions.Type.RECOVERY) {
return recovery[MathUtils.mod(recoveryCounter.incrementAndGet(), recovery.length)];
} else {
throw new IllegalArgumentException("no type channel for [" + type + "]");
}
}
public synchronized void close() {
List<ChannelFuture> futures = new ArrayList<>();
for (Channel channel : allChannels) {
try {
if (channel != null && channel.isOpen()) {
futures.add(channel.close());
}
} catch (Exception e) {
//ignore
}
}
for (ChannelFuture future : futures) {
future.awaitUninterruptibly();
}
}
}
class ScheduledPing extends AbstractRunnable {
final CounterMetric successfulPings = new CounterMetric();
final CounterMetric failedPings = new CounterMetric();
@Override
protected void doRun() throws Exception {
if (lifecycle.stoppedOrClosed()) {
return;
}
for (Map.Entry<DiscoveryNode, NodeChannels> entry : connectedNodes.entrySet()) {
DiscoveryNode node = entry.getKey();
NodeChannels channels = entry.getValue();
for (Channel channel : channels.allChannels) {
try {
ChannelFuture future = channel.write(NettyHeader.pingHeader());
future.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
successfulPings.inc();
}
});
} catch (Throwable t) {
if (channel.isOpen()) {
logger.debug("[{}] failed to send ping transport message", t, node);
failedPings.inc();
} else {
logger.trace("[{}] failed to send ping transport message (channel closed)", t, node);
}
}
}
}
threadPool.schedule(pingSchedule, ThreadPool.Names.GENERIC, this);
}
@Override
public void onFailure(Throwable t) {
if (lifecycle.stoppedOrClosed()) {
logger.trace("[{}] failed to send ping transport message", t);
} else {
logger.warn("[{}] failed to send ping transport message", t);
}
}
}
}
| |
/*
* Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ebi.spot.goci.service.junidecode;
/**
* Character map for Unicode characters with codepoint U+7Axx.
* @author Giuseppe Cardone
* @version 0.1
*/
class X7a {
public static final String[] map = new String[]{
"Xi ", // 0x00
"Kao ", // 0x01
"Lang ", // 0x02
"Fu ", // 0x03
"Ze ", // 0x04
"Shui ", // 0x05
"Lu ", // 0x06
"Kun ", // 0x07
"Gan ", // 0x08
"Geng ", // 0x09
"Ti ", // 0x0a
"Cheng ", // 0x0b
"Tu ", // 0x0c
"Shao ", // 0x0d
"Shui ", // 0x0e
"Ya ", // 0x0f
"Lun ", // 0x10
"Lu ", // 0x11
"Gu ", // 0x12
"Zuo ", // 0x13
"Ren ", // 0x14
"Zhun ", // 0x15
"Bang ", // 0x16
"Bai ", // 0x17
"Ji ", // 0x18
"Zhi ", // 0x19
"Zhi ", // 0x1a
"Kun ", // 0x1b
"Leng ", // 0x1c
"Peng ", // 0x1d
"Ke ", // 0x1e
"Bing ", // 0x1f
"Chou ", // 0x20
"Zu ", // 0x21
"Yu ", // 0x22
"Su ", // 0x23
"Lue ", // 0x24
"[?] ", // 0x25
"Yi ", // 0x26
"Xi ", // 0x27
"Bian ", // 0x28
"Ji ", // 0x29
"Fu ", // 0x2a
"Bi ", // 0x2b
"Nuo ", // 0x2c
"Jie ", // 0x2d
"Zhong ", // 0x2e
"Zong ", // 0x2f
"Xu ", // 0x30
"Cheng ", // 0x31
"Dao ", // 0x32
"Wen ", // 0x33
"Lian ", // 0x34
"Zi ", // 0x35
"Yu ", // 0x36
"Ji ", // 0x37
"Xu ", // 0x38
"Zhen ", // 0x39
"Zhi ", // 0x3a
"Dao ", // 0x3b
"Jia ", // 0x3c
"Ji ", // 0x3d
"Gao ", // 0x3e
"Gao ", // 0x3f
"Gu ", // 0x40
"Rong ", // 0x41
"Sui ", // 0x42
"You ", // 0x43
"Ji ", // 0x44
"Kang ", // 0x45
"Mu ", // 0x46
"Shan ", // 0x47
"Men ", // 0x48
"Zhi ", // 0x49
"Ji ", // 0x4a
"Lu ", // 0x4b
"Su ", // 0x4c
"Ji ", // 0x4d
"Ying ", // 0x4e
"Wen ", // 0x4f
"Qiu ", // 0x50
"Se ", // 0x51
"[?] ", // 0x52
"Yi ", // 0x53
"Huang ", // 0x54
"Qie ", // 0x55
"Ji ", // 0x56
"Sui ", // 0x57
"Xiao ", // 0x58
"Pu ", // 0x59
"Jiao ", // 0x5a
"Zhuo ", // 0x5b
"Tong ", // 0x5c
"Sai ", // 0x5d
"Lu ", // 0x5e
"Sui ", // 0x5f
"Nong ", // 0x60
"Se ", // 0x61
"Hui ", // 0x62
"Rang ", // 0x63
"Nuo ", // 0x64
"Yu ", // 0x65
"Bin ", // 0x66
"Ji ", // 0x67
"Tui ", // 0x68
"Wen ", // 0x69
"Cheng ", // 0x6a
"Huo ", // 0x6b
"Gong ", // 0x6c
"Lu ", // 0x6d
"Biao ", // 0x6e
"[?] ", // 0x6f
"Rang ", // 0x70
"Zhuo ", // 0x71
"Li ", // 0x72
"Zan ", // 0x73
"Xue ", // 0x74
"Wa ", // 0x75
"Jiu ", // 0x76
"Qiong ", // 0x77
"Xi ", // 0x78
"Qiong ", // 0x79
"Kong ", // 0x7a
"Yu ", // 0x7b
"Sen ", // 0x7c
"Jing ", // 0x7d
"Yao ", // 0x7e
"Chuan ", // 0x7f
"Zhun ", // 0x80
"Tu ", // 0x81
"Lao ", // 0x82
"Qie ", // 0x83
"Zhai ", // 0x84
"Yao ", // 0x85
"Bian ", // 0x86
"Bao ", // 0x87
"Yao ", // 0x88
"Bing ", // 0x89
"Wa ", // 0x8a
"Zhu ", // 0x8b
"Jiao ", // 0x8c
"Qiao ", // 0x8d
"Diao ", // 0x8e
"Wu ", // 0x8f
"Gui ", // 0x90
"Yao ", // 0x91
"Zhi ", // 0x92
"Chuang ", // 0x93
"Yao ", // 0x94
"Tiao ", // 0x95
"Jiao ", // 0x96
"Chuang ", // 0x97
"Jiong ", // 0x98
"Xiao ", // 0x99
"Cheng ", // 0x9a
"Kou ", // 0x9b
"Cuan ", // 0x9c
"Wo ", // 0x9d
"Dan ", // 0x9e
"Ku ", // 0x9f
"Ke ", // 0xa0
"Zhui ", // 0xa1
"Xu ", // 0xa2
"Su ", // 0xa3
"Guan ", // 0xa4
"Kui ", // 0xa5
"Dou ", // 0xa6
"[?] ", // 0xa7
"Yin ", // 0xa8
"Wo ", // 0xa9
"Wa ", // 0xaa
"Ya ", // 0xab
"Yu ", // 0xac
"Ju ", // 0xad
"Qiong ", // 0xae
"Yao ", // 0xaf
"Yao ", // 0xb0
"Tiao ", // 0xb1
"Chao ", // 0xb2
"Yu ", // 0xb3
"Tian ", // 0xb4
"Diao ", // 0xb5
"Ju ", // 0xb6
"Liao ", // 0xb7
"Xi ", // 0xb8
"Wu ", // 0xb9
"Kui ", // 0xba
"Chuang ", // 0xbb
"Zhao ", // 0xbc
"[?] ", // 0xbd
"Kuan ", // 0xbe
"Long ", // 0xbf
"Cheng ", // 0xc0
"Cui ", // 0xc1
"Piao ", // 0xc2
"Zao ", // 0xc3
"Cuan ", // 0xc4
"Qiao ", // 0xc5
"Qiong ", // 0xc6
"Dou ", // 0xc7
"Zao ", // 0xc8
"Long ", // 0xc9
"Qie ", // 0xca
"Li ", // 0xcb
"Chu ", // 0xcc
"Shi ", // 0xcd
"Fou ", // 0xce
"Qian ", // 0xcf
"Chu ", // 0xd0
"Hong ", // 0xd1
"Qi ", // 0xd2
"Qian ", // 0xd3
"Gong ", // 0xd4
"Shi ", // 0xd5
"Shu ", // 0xd6
"Miao ", // 0xd7
"Ju ", // 0xd8
"Zhan ", // 0xd9
"Zhu ", // 0xda
"Ling ", // 0xdb
"Long ", // 0xdc
"Bing ", // 0xdd
"Jing ", // 0xde
"Jing ", // 0xdf
"Zhang ", // 0xe0
"Yi ", // 0xe1
"Si ", // 0xe2
"Jun ", // 0xe3
"Hong ", // 0xe4
"Tong ", // 0xe5
"Song ", // 0xe6
"Jing ", // 0xe7
"Diao ", // 0xe8
"Yi ", // 0xe9
"Shu ", // 0xea
"Jing ", // 0xeb
"Qu ", // 0xec
"Jie ", // 0xed
"Ping ", // 0xee
"Duan ", // 0xef
"Shao ", // 0xf0
"Zhuan ", // 0xf1
"Ceng ", // 0xf2
"Deng ", // 0xf3
"Cui ", // 0xf4
"Huai ", // 0xf5
"Jing ", // 0xf6
"Kan ", // 0xf7
"Jing ", // 0xf8
"Zhu ", // 0xf9
"Zhu ", // 0xfa
"Le ", // 0xfb
"Peng ", // 0xfc
"Yu ", // 0xfd
"Chi ", // 0xfe
"Gan " // 0xff
};
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.operators;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeutils.base.array.BytePrimitiveArraySerializer;
import org.apache.flink.api.connector.source.ReaderOutput;
import org.apache.flink.api.connector.source.SourceEvent;
import org.apache.flink.api.connector.source.SourceReader;
import org.apache.flink.api.connector.source.SourceReaderContext;
import org.apache.flink.api.connector.source.SourceSplit;
import org.apache.flink.core.io.InputStatus;
import org.apache.flink.core.io.SimpleVersionedSerializer;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.runtime.operators.coordination.OperatorEvent;
import org.apache.flink.runtime.operators.coordination.OperatorEventGateway;
import org.apache.flink.runtime.operators.coordination.OperatorEventHandler;
import org.apache.flink.runtime.source.event.AddSplitEvent;
import org.apache.flink.runtime.source.event.ReaderRegistrationEvent;
import org.apache.flink.runtime.source.event.SourceEventWrapper;
import org.apache.flink.runtime.state.StateInitializationContext;
import org.apache.flink.runtime.state.StateSnapshotContext;
import org.apache.flink.streaming.api.operators.source.TimestampsAndWatermarks;
import org.apache.flink.streaming.api.operators.util.SimpleVersionedListState;
import org.apache.flink.streaming.runtime.io.PushingAsyncDataInput;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService;
import org.apache.flink.util.CollectionUtil;
import org.apache.flink.util.FlinkRuntimeException;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* Base source operator only used for integrating the source reader which is proposed by FLIP-27. It implements
* the interface of {@link PushingAsyncDataInput} for naturally compatible with one input processing in runtime
* stack.
*
* <p><b>Important Note on Serialization:</b> The SourceOperator inherits the {@link java.io.Serializable}
* interface from the StreamOperator, but is in fact NOT serializable. The operator must only be instantiates
* in the StreamTask from its factory.
*
* @param <OUT> The output type of the operator.
*/
@Internal
@SuppressWarnings("serial")
public class SourceOperator<OUT, SplitT extends SourceSplit>
extends AbstractStreamOperator<OUT>
implements OperatorEventHandler, PushingAsyncDataInput<OUT> {
private static final long serialVersionUID = 1405537676017904695L;
// Package private for unit test.
static final ListStateDescriptor<byte[]> SPLITS_STATE_DESC =
new ListStateDescriptor<>("SourceReaderState", BytePrimitiveArraySerializer.INSTANCE);
/** The factory for the source reader. This is a workaround, because currently the SourceReader
* must be lazily initialized, which is mainly because the metrics groups that the reader relies on is
* lazily initialized. */
private final Function<SourceReaderContext, SourceReader<OUT, SplitT>> readerFactory;
/** The serializer for the splits, applied to the split types before storing them in the reader state. */
private final SimpleVersionedSerializer<SplitT> splitSerializer;
/** The event gateway through which this operator talks to its coordinator. */
private final OperatorEventGateway operatorEventGateway;
/** The factory for timestamps and watermark generators. */
private final WatermarkStrategy<OUT> watermarkStrategy;
// ---- lazily initialized fields (these fields are the "hot" fields) ----
/** The source reader that does most of the work. */
private SourceReader<OUT, SplitT> sourceReader;
private ReaderOutput<OUT> currentMainOutput;
private DataOutput<OUT> lastInvokedOutput;
/** The state that holds the currently assigned splits. */
private ListState<SplitT> readerState;
/** The event time and watermarking logic. Ideally this would be eagerly passed into this operator,
* but we currently need to instantiate this lazily, because the metric groups exist only later. */
private TimestampsAndWatermarks<OUT> eventTimeLogic;
public SourceOperator(
Function<SourceReaderContext, SourceReader<OUT, SplitT>> readerFactory,
OperatorEventGateway operatorEventGateway,
SimpleVersionedSerializer<SplitT> splitSerializer,
WatermarkStrategy<OUT> watermarkStrategy,
ProcessingTimeService timeService) {
this.readerFactory = checkNotNull(readerFactory);
this.operatorEventGateway = checkNotNull(operatorEventGateway);
this.splitSerializer = checkNotNull(splitSerializer);
this.watermarkStrategy = checkNotNull(watermarkStrategy);
this.processingTimeService = timeService;
}
@Override
public void open() throws Exception {
final MetricGroup metricGroup = getMetricGroup();
final SourceReaderContext context = new SourceReaderContext() {
@Override
public MetricGroup metricGroup() {
return metricGroup;
}
@Override
public void sendSourceEventToCoordinator(SourceEvent event) {
operatorEventGateway.sendEventToCoordinator(new SourceEventWrapper(event));
}
};
// in the future when we support both batch and streaming modes for the source operator,
// and when this one is migrated to the "eager initialization" operator (StreamOperatorV2),
// then we should evaluate this during operator construction.
eventTimeLogic = TimestampsAndWatermarks.createStreamingEventTimeLogic(
watermarkStrategy,
metricGroup,
getProcessingTimeService(),
getExecutionConfig().getAutoWatermarkInterval());
sourceReader = readerFactory.apply(context);
// restore the state if necessary.
final List<SplitT> splits = CollectionUtil.iterableToList(readerState.get());
if (!splits.isEmpty()) {
sourceReader.addSplits(splits);
}
// Start the reader.
sourceReader.start();
// Register the reader to the coordinator.
registerReader();
eventTimeLogic.startPeriodicWatermarkEmits();
}
@Override
public void close() throws Exception {
sourceReader.close();
eventTimeLogic.stopPeriodicWatermarkEmits();
super.close();
}
@Override
public InputStatus emitNext(DataOutput<OUT> output) throws Exception {
// guarding an assumptions we currently make due to the fact that certain classes
// assume a constant output
assert lastInvokedOutput == output || lastInvokedOutput == null;
// short circuit the common case (every invocation except the first)
if (currentMainOutput != null) {
return sourceReader.pollNext(currentMainOutput);
}
// this creates a batch or streaming output based on the runtime mode
currentMainOutput = eventTimeLogic.createMainOutput(output);
lastInvokedOutput = output;
return sourceReader.pollNext(currentMainOutput);
}
@Override
public void snapshotState(StateSnapshotContext context) throws Exception {
LOG.debug("Taking a snapshot for checkpoint {}", context.getCheckpointId());
readerState.update(sourceReader.snapshotState());
}
@Override
public CompletableFuture<?> getAvailableFuture() {
return sourceReader.isAvailable();
}
@Override
public void initializeState(StateInitializationContext context) throws Exception {
super.initializeState(context);
final ListState<byte[]> rawState = context.getOperatorStateStore().getListState(SPLITS_STATE_DESC);
readerState = new SimpleVersionedListState<>(rawState, splitSerializer);
}
@SuppressWarnings("unchecked")
public void handleOperatorEvent(OperatorEvent event) {
if (event instanceof AddSplitEvent) {
try {
sourceReader.addSplits(((AddSplitEvent<SplitT>) event).splits(splitSerializer));
} catch (IOException e) {
throw new FlinkRuntimeException("Failed to deserialize the splits.", e);
}
} else if (event instanceof SourceEventWrapper) {
sourceReader.handleSourceEvents(((SourceEventWrapper) event).getSourceEvent());
} else {
throw new IllegalStateException("Received unexpected operator event " + event);
}
}
private void registerReader() {
operatorEventGateway.sendEventToCoordinator(new ReaderRegistrationEvent(
getRuntimeContext().getIndexOfThisSubtask(),
"UNKNOWN_LOCATION"));
}
// --------------- methods for unit tests ------------
@VisibleForTesting
public SourceReader<OUT, SplitT> getSourceReader() {
return sourceReader;
}
@VisibleForTesting
ListState<SplitT> getReaderState() {
return readerState;
}
}
| |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2010 Oracle. All rights reserved.
*
* $Id: TruncateTest.java,v 1.33 2010/01/04 15:50:58 cwl Exp $
*/
package com.sleepycat.je;
import java.io.File;
import com.sleepycat.bind.tuple.IntegerBinding;
import com.sleepycat.je.config.EnvironmentParams;
import com.sleepycat.je.dbi.DatabaseImpl;
import com.sleepycat.je.dbi.DbTree;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.util.DualTestCase;
import com.sleepycat.je.util.TestUtils;
/**
* Basic database operations, excluding configuration testing.
*/
public class TruncateTest extends DualTestCase {
private static final int NUM_RECS = 257;
private static final String DB_NAME = "testDb";
private File envHome;
private Environment env;
public TruncateTest() {
envHome = new File(System.getProperty(TestUtils.DEST_DIR));
}
@Override
public void setUp()
throws Exception {
super.setUp();
TestUtils.removeLogFiles("Setup", envHome, false);
}
@Override
public void tearDown()
throws Exception {
if (env != null) {
try {
/* Close in case we hit an exception and didn't close. */
close(env);
} catch (Exception e) {
/* Ok if already closed */
}
}
env = null; // for JUNIT, to reduce memory usage when run in a suite.
super.tearDown();
TestUtils.removeLogFiles("TearDown", envHome, false);
}
public void testEnvTruncateAbort()
throws Throwable {
doTruncateAndAdd(true, // transactional
256, // step1 num records
false, // step2 autocommit
150, // step3 num records
true, // step4 abort
0); // step5 num records
}
public void testEnvTruncateCommit()
throws Throwable {
doTruncateAndAdd(true, // transactional
256, // step1 num records
false, // step2 autocommit
150, // step3 num records
false, // step4 abort
150); // step5 num records
}
public void testEnvTruncateAutocommit()
throws Throwable {
doTruncateAndAdd(true, // transactional
256, // step1 num records
true, // step2 autocommit
150, // step3 num records
false, // step4 abort
150); // step5 num records
}
public void testEnvTruncateNoFirstInsert()
throws Throwable {
doTruncateAndAdd(true, // transactional
0, // step1 num records
false, // step2 autocommit
150, // step3 num records
false, // step4 abort
150); // step5 num records
}
public void testNoTxnEnvTruncateCommit()
throws Throwable {
doTruncateAndAdd(false, // transactional
256, // step1 num records
false, // step2 autocommit
150, // step3 num records
false, // step4 abort
150); // step5 num records
}
public void testTruncateCommit()
throws Throwable {
doTruncate(false, false);
}
public void testTruncateCommitAutoTxn()
throws Throwable {
doTruncate(false, true);
}
public void testTruncateAbort()
throws Throwable {
doTruncate(true, false);
}
/*
* SR 10386, 11252. This used to deadlock, because the truncate did not
* use an AutoTxn on the new mapLN, and the put operations conflicted with
* the held write lock.
*/
public void testWriteAfterTruncate()
throws Throwable {
try {
Database myDb = initEnvAndDb(true);
myDb.close();
Transaction txn = env.beginTransaction(null, null);
long truncateCount = env.truncateDatabase(txn, DB_NAME, true);
assertEquals(0, truncateCount);
txn.commit();
close(env);
} catch (Throwable t) {
t.printStackTrace();
throw t;
}
}
public void testTruncateEmptyDeferredWriteDatabase()
throws Throwable {
try {
EnvironmentConfig envConfig = TestUtils.initEnvConfig();
envConfig.setTransactional(false);
envConfig.setConfigParam
(EnvironmentParams.ENV_CHECK_LEAKS.getName(), "false");
envConfig.setAllowCreate(true);
env = create(envHome, envConfig);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setTransactional(false);
dbConfig.setSortedDuplicates(true);
dbConfig.setAllowCreate(true);
dbConfig.setDeferredWrite(true);
Database myDb = env.openDatabase(null, DB_NAME, dbConfig);
myDb.close();
long truncateCount;
truncateCount = env.truncateDatabase(null, DB_NAME, true);
assertEquals(0, truncateCount);
} catch (Throwable T) {
T.printStackTrace();
throw T;
}
}
public void testTruncateNoLocking()
throws Throwable {
try {
EnvironmentConfig envConfig = TestUtils.initEnvConfig();
envConfig.setTransactional(false);
envConfig.setConfigParam
(EnvironmentConfig.ENV_IS_LOCKING, "false");
envConfig.setAllowCreate(true);
env = create(envHome, envConfig);
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setTransactional(false);
dbConfig.setAllowCreate(true);
Database myDb = env.openDatabase(null, DB_NAME, dbConfig);
myDb.put(null, new DatabaseEntry(new byte[0]),
new DatabaseEntry(new byte[0]));
myDb.close();
long truncateCount;
truncateCount = env.truncateDatabase(null, DB_NAME, true);
assertEquals(1, truncateCount);
} catch (Throwable T) {
T.printStackTrace();
throw T;
}
}
/**
* 1. Populate a database.
* 2. Truncate.
* 3. Commit or abort.
* 4. Check that database has the right amount of records.
*/
private void doTruncate(boolean abort, boolean useAutoTxn)
throws Throwable {
try {
int numRecsAfterTruncate =
useAutoTxn ? 0 : ((abort) ? NUM_RECS : 0);
Database myDb = initEnvAndDb(true);
DatabaseEntry key = new DatabaseEntry();
DatabaseEntry data = new DatabaseEntry();
/* Populate database. */
for (int i = NUM_RECS; i > 0; i--) {
key.setData(TestUtils.getTestArray(i));
data.setData(TestUtils.getTestArray(i));
assertEquals(OperationStatus.SUCCESS,
myDb.put(null, key, data));
}
/* Truncate, check the count, commit. */
myDb.close();
long truncateCount = 0;
if (useAutoTxn) {
truncateCount = env.truncateDatabase(null, DB_NAME, true);
} else {
Transaction txn = env.beginTransaction(null, null);
truncateCount = env.truncateDatabase(txn, DB_NAME, true);
if (abort) {
txn.abort();
} else {
txn.commit();
}
}
assertEquals(NUM_RECS, truncateCount);
/* Do a cursor read, make sure there's the right amount of data. */
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setSortedDuplicates(true);
dbConfig.setTransactional(true);
myDb = env.openDatabase(null, DB_NAME, dbConfig);
int count = 0;
Transaction txn = null;
if (DualTestCase.isReplicatedTest(getClass())) {
txn = env.beginTransaction(null, null);
}
Cursor cursor = myDb.openCursor(txn, null);
while (cursor.getNext(key, data, LockMode.DEFAULT) ==
OperationStatus.SUCCESS) {
count++;
}
assertEquals(numRecsAfterTruncate, count);
cursor.close();
if (txn != null) {
txn.commit();
}
/* Recover the database. */
myDb.close();
close(env);
myDb = initEnvAndDb(true);
/* Check data after recovery. */
count = 0;
if (DualTestCase.isReplicatedTest(getClass())) {
txn = env.beginTransaction(null, null);
}
cursor = myDb.openCursor(txn, null);
while (cursor.getNext(key, data, LockMode.DEFAULT) ==
OperationStatus.SUCCESS) {
count++;
}
assertEquals(numRecsAfterTruncate, count);
cursor.close();
if (txn != null) {
txn.commit();
}
myDb.close();
close(env);
} catch (Throwable t) {
t.printStackTrace();
throw t;
}
}
/**
* This method can be configured to execute a number of these steps:
* - Populate a database with 0 or N records
* 2. Truncate.
* 3. add more records
* 4. abort or commit
* 5. Check that database has the right amount of records.
*/
private void doTruncateAndAdd(boolean transactional,
int step1NumRecs,
boolean step2AutoCommit,
int step3NumRecs,
boolean step4Abort,
int step5NumRecs)
throws Throwable {
String databaseName = "testdb";
try {
/* Use enough records to force a split. */
EnvironmentConfig envConfig = TestUtils.initEnvConfig();
envConfig.setTransactional(transactional);
envConfig.setAllowCreate(true);
envConfig.setConfigParam(EnvironmentParams.NODE_MAX.getName(),
"6");
env = create(envHome, envConfig);
/* Make a db and open it. */
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setTransactional(transactional);
dbConfig.setAllowCreate(true);
Database myDb = env.openDatabase(null, databaseName, dbConfig);
DatabaseEntry key = new DatabaseEntry();
DatabaseEntry data = new DatabaseEntry();
/* Populate database with step1NumRecs. */
Transaction txn = null;
if (transactional) {
txn = env.beginTransaction(null, null);
}
for (int i = 0; i < step1NumRecs; i++) {
IntegerBinding.intToEntry(i, key);
IntegerBinding.intToEntry(i, data);
assertEquals(OperationStatus.SUCCESS,
myDb.put(txn, key, data));
}
myDb.close();
/* Truncate. Possibly autocommit*/
if (step2AutoCommit && transactional) {
txn.commit();
txn = null;
}
/*
* Before truncate, there should be two databases in the system:
* the testDb database, and the FileSummary database.
*/
countLNs(2, 2);
long truncateCount = env.truncateDatabase(txn, databaseName, true);
assertEquals(step1NumRecs, truncateCount);
/*
* The naming tree should always have two entries now, the
* mapping tree might have 2 or 3, depending on abort.
*/
if (step2AutoCommit || !transactional) {
countLNs(2, 2);
} else {
countLNs(2, 3);
}
/* Add more records. */
myDb = env.openDatabase(txn, databaseName, dbConfig);
checkCount(myDb, txn, 0);
for (int i = 0; i < step3NumRecs; i++) {
IntegerBinding.intToEntry(i, key);
IntegerBinding.intToEntry(i, data);
assertEquals(OperationStatus.SUCCESS,
myDb.put(txn, key, data));
}
checkCount(myDb, txn, step3NumRecs);
myDb.close();
if (txn != null) {
if (step4Abort) {
txn.abort();
} else {
txn.commit();
}
}
/* Now the mapping tree should only have two entries. */
countLNs(2, 2);
/* Do a cursor read, make sure there's the right amount of data. */
myDb = env.openDatabase(null, databaseName, dbConfig);
checkCount(myDb, null, step5NumRecs);
myDb.close();
close(env);
/* Check data after recovery. */
env = create(envHome, envConfig);
myDb = env.openDatabase(null, databaseName, dbConfig);
checkCount(myDb, null, step5NumRecs);
myDb.close();
close(env);
} catch (Throwable t) {
t.printStackTrace();
throw t;
}
}
/**
* Test that truncateDatabase and removeDatabase can be called after
* replaying an LN in that database during recovery. This is to test a fix
* to a bug where truncateDatabase caused a hang because DbTree.releaseDb
* was not called by RecoveryUtilizationTracker. [#16329]
*/
public void testTruncateAfterRecovery()
throws Throwable {
DatabaseEntry key = new DatabaseEntry(new byte[10]);
DatabaseEntry data = new DatabaseEntry(new byte[10]);
Database db = initEnvAndDb(true);
EnvironmentImpl envImpl = DbInternal.getEnvironmentImpl(env);
/* Write a single record for recovery. */
OperationStatus status = db.put(null, key, data);
assertSame(OperationStatus.SUCCESS, status);
/* Close without a checkpoint and run recovery. */
db.close();
envImpl.abnormalClose();
envImpl = null;
env = null;
db = initEnvAndDb(true);
/* Ensure that truncateDatabase does not hang. */
db.close();
long truncateCount = env.truncateDatabase(null, DB_NAME, true);
assertEquals(1, truncateCount);
/* removeDatabase should also work. */
env.removeDatabase(null, DB_NAME);
assertTrue(!env.getDatabaseNames().contains(DB_NAME));
close(env);
env = null;
}
/**
* Set up the environment and db.
*/
private Database initEnvAndDb(boolean isTransactional)
throws DatabaseException {
EnvironmentConfig envConfig = TestUtils.initEnvConfig();
envConfig.setTransactional(isTransactional);
envConfig.setConfigParam
(EnvironmentParams.ENV_CHECK_LEAKS.getName(), "false");
envConfig.setConfigParam(EnvironmentParams.NODE_MAX.getName(), "6");
envConfig.setAllowCreate(true);
env = create(envHome, envConfig);
/* Make a db and open it. */
DatabaseConfig dbConfig = new DatabaseConfig();
dbConfig.setTransactional(isTransactional);
dbConfig.setSortedDuplicates(true);
dbConfig.setAllowCreate(true);
Database myDb = env.openDatabase(null, DB_NAME, dbConfig);
return myDb;
}
private void checkCount(Database db, Transaction txn, int expectedCount)
throws DatabaseException {
Cursor cursor = db.openCursor(txn, null);
int count = 0;
DatabaseEntry key = new DatabaseEntry();
DatabaseEntry data = new DatabaseEntry();
while (cursor.getNext(key, data, null) == OperationStatus.SUCCESS) {
count++;
}
assertEquals(expectedCount, count);
cursor.close();
}
/**
* Use stats to count the number of LNs in the id and name mapping
* trees. It's not possible to use Cursor, and stats areg easier to use
* than CursorImpl. This relies on the fact that the stats actually
* correctly account for deleted entries.
*/
private void countLNs(int expectNameLNs,
int expectMapLNs)
throws DatabaseException {
EnvironmentImpl envImpl = DbInternal.getEnvironmentImpl(env);
/* check number of LNs in the id mapping tree. */
DatabaseImpl mapDbImpl =
envImpl.getDbTree().getDb(DbTree.ID_DB_ID);
// mapDbImpl.getTree().dump();
BtreeStats mapStats =
(BtreeStats) mapDbImpl.stat(new StatsConfig());
assertEquals(expectMapLNs,
(mapStats.getLeafNodeCount()));
/* check number of LNs in the naming tree. */
DatabaseImpl nameDbImpl =
envImpl.getDbTree().getDb(DbTree.NAME_DB_ID);
BtreeStats nameStats =
(BtreeStats) nameDbImpl.stat(new StatsConfig());
assertEquals(expectNameLNs,
(nameStats.getLeafNodeCount()));
}
}
| |
// GyroITG3200 I2C device class file
// Based on InvenSense ITG-3200 datasheet rev. 1.4, 3/30/2010 (PS-ITG-3200A-00-01.4)
// Original work by 7/31/2011 by Jeff Rowberg <jeff@rowberg.net>
// Java implementation for First Robotics Competition Team 2521 using WPILibj
// 1/27/2015 by Joe Bussell <joe dot bussell at gmail dot com>
// Updates should (hopefully) always be available at https://github.com/bussell
//
// Changelog:
// 2011-07-31 - initial release
// 2015-01-30 - Java FRC revision
/* ============================================
GyroITG3200 device library code is placed under the MIT license
Copyright (c) 2011 by Jeff Rowberg
Copyright (c) 2015 Joe Bussell
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
===============================================
*/
package com.team2576.lib.sensors;
import java.util.Arrays;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.PIDSource;
import edu.wpi.first.wpilibj.SensorBase;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.livewindow.LiveWindowSendable;
import edu.wpi.first.wpilibj.tables.ITable;
import edu.wpi.first.wpilibj.I2C;
/**
* @author Joe Bussell Team 2521 Mentor
* With thanks to the c++ version authors at: https://github.com/jrowberg/i2cdevlib/tree/master/Arduino/ITG3200
*
*/
public class GyroITG3200 extends SensorBase implements PIDSource, LiveWindowSendable
{
int devAddr;
byte buffer[] = new byte[7];
public static final boolean DEBUG = true;
I2C m_i2c;
/** Default constructor, uses default I2C address.
* @see ITG3200_DEFAULT_ADDRESS
*/
public GyroITG3200( I2C.Port port )
{
devAddr = ITG3200_DEFAULT_ADDRESS;
m_i2c = new I2C(port, devAddr);
// TODO: This report is incorrect. Need to create instance for I2C ITG3200 Gyro
//UsageReporting.report( tResourceType.kResourceType_I2C, tInstances.?? );
LiveWindow.addSensor( "ITG3200_Gyro_I2C", port.getValue(), this );
}
/** Specific address constructor.
* @param address I2C address
* @see ITG3200_DEFAULT_ADDRESS
* @see ITG3200_ADDRESS_AD0_LOW
* @see ITG3200_ADDRESS_AD0_HIGH
*/
public GyroITG3200( I2C.Port port, byte address )
{
devAddr = address;
m_i2c = new I2C( port, address );
// TODO: This report is incorrect. Need to create instance for I2C ITG3200 Gyro
//UsageReporting.report( tResourceType.kResourceType_I2C, tInstances.?? );
LiveWindow.addSensor( "ITG3200_Gyro_I2C", port.getValue(), this );
}
/** Power on and prepare for general usage.
* This will activate the gyroscope, so be sure to adjust the power settings
* after you call this method if you want it to enter standby mode, or another
* less demanding mode of operation. This also sets the gyroscope to use the
* X-axis gyro for a clock source. Note that it doesn't have any delays in the
* routine, which means you might want to add ~50ms to be safe if you happen
* to need to read gyro data immediately after initialization. The data will
* flow in either case, but the first reports may have higher error offsets.
*/
public void initialize()
{
if ( !testConnection() )
{
DriverStation.reportError( "Test connection failed!", false );
}
setFullScaleRange( ITG3200_FULLSCALE_2000 );
setClockSource( ITG3200_CLOCK_PLL_XGYRO );
setIntDeviceReadyEnabled( true );
setIntDataReadyEnabled( true );
}
/** Verify the I2C connection.
* Make sure the device is connected and responds as expected.
* @return True if connection is valid, false otherwise
*/
public boolean testConnection()
{
return getDeviceID() == 0b110100;
}
private void writeBit( int register, byte bit, boolean value )
{
byte[] buf = new byte[1];
ReadI2CBuffer( register, 1, buf);
byte newValue = (byte) ( value ? (buf[0] | (1 << bit ))
: ( buf[0] & ~(1 << bit ) ) );
writeI2CBuffer( register, newValue );
if ( DEBUG )
{
ReadI2CBuffer( register, 1, buf);
if ( newValue != buf[0] )
{
System.out.println( "Expected " + newValue + " seeing " + buf[0] );
}
}
}
// this routine should update the original byte with the new data properly shifted to the correct bit location
public static byte updateByte( byte original, int bit, int numBits, byte value )
{
if ( numBits > 7 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. \n Value: "
+ GetBinaryString(value)
+ "\n Number bits: " + numBits );
}
if ( bit > 7 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. \n Value: "
+ GetBinaryString(value)
+ "\n Bit: " + bit );
}
if ( bit < numBits - 1 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. \n Value: "
+ GetBinaryString(value)
+ "\n Bit: " + bit
+ "\n Number bits: " + numBits );
}
if ( value > Math.pow( 2, numBits ) )
{
throw new IllegalArgumentException( "Cannot encode a number this big using the number of bits requested \n Value: "
+ GetBinaryString(value)
+ "\n Number bits: " + numBits );
}
if ( bit < 0 || numBits < 0 || value < 0 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. "
+ "\n All inputs should be greater than 0. "
+ "\n Value: " + GetBinaryString(value)
+ "\n Bit: " + bit
+ "\n Number bits: " + numBits );
}
byte mask = getMask( bit, numBits );
byte maskedOriginal = (byte) ( ( original & mask ) & 0xFF );
byte shiftedValue = (byte) ( (value << (1 + bit - numBits) ) & 0xFF );
byte result = (byte) ( ( shiftedValue | maskedOriginal ) & 0xFF );
/*
// Debug code
System.out.println( "bit = " + bit );
System.out.println( "num bits = " + numBits );
System.out.println( "original = " + GetBinaryString(original) );
System.out.println( " Value = " + GetBinaryString(value) );
System.out.println( "" );
System.out.println( "mask = " + GetBinaryString(mask) );
System.out.println( "maskedOriginal = " + GetBinaryString(maskedOriginal) );
System.out.println( "shifted Value = " + GetBinaryString(shiftedValue) );
System.out.println( "" );
System.out.println( "result = " + GetBinaryString(result) );
*/
return result;
}
public static String GetBinaryString( byte value )
{
return String.format( "%8s", Integer.toBinaryString( value & 0xFF ) ).replace(' ', '0');
}
public boolean writeI2CBuffer(int registerAddress, int data)
{
boolean retVal = false;
try
{
retVal = m_i2c.write( registerAddress, data );
if ( DEBUG )
{
byte[] buf = new byte[1];
ReadI2CBuffer( registerAddress, 1, buf);
if ( data != buf[0] )
{
DriverStation.reportError( "Expected " + data + "\nseeing " + buf[0] + "\n", false );
}
}
}
catch (Throwable t)
{
DriverStation.reportError("ERROR Unhandled exception: " + t.toString() + " at " + Arrays.toString(t.getStackTrace()), false);
}
return retVal;
}
//
// I2Cdev::writeBits(devAddr, ITG3200_RA_WHO_AM_I, ITG3200_DEVID_BIT, ITG3200_DEVID_LENGTH, id);
private void writeBits( int register, int bit, int numBits, byte value )
{
try
{
byte[] rawData = new byte[1];
ReadI2CBuffer( register, 1, rawData );
byte newValue = updateByte( rawData[0], bit, numBits, value );
writeI2CBuffer( register, newValue );
}
catch (Throwable t)
{
DriverStation.reportError("ERROR Unhandled exception: " + t.toString() + " at " + Arrays.toString(t.getStackTrace()), false);
}
}
private boolean readBit( int register, byte bit )
{
byte buf[] = new byte[1];
ReadI2CBuffer( register, 1, buf);
return ( buf[0] & bit) != 0;
}
// Get n bits from the byte to form a byte slice
private static byte getBits( byte bitField, int bit, int numBits )
{
if ( numBits > 7 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes."
+ "\n Number bits: " + numBits );
}
if ( bit > 7 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. "
+ "\n Bit: " + bit );
}
if ( bit < numBits - 1 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. "
+ "\n Bit: " + bit
+ "\n Number bits: " + numBits );
}
if ( bit < 0 || numBits < 0 )
{
throw new IllegalArgumentException( "This routine is intended to use 8-bit bytes. "
+ "\n All inputs should be greater than 0. "
+ "\n Bit: " + bit
+ "\n Number bits: " + numBits );
}
byte result = 0;
byte mask = (byte)( ~getMask( bit, numBits ) & 0xFF );
byte maskedInput = (byte) ( ( bitField & mask) & 0xFF );
result = (byte) ( (maskedInput >>> ( 1 + bit - numBits ) ) & 0xFF );
/*
// Debug code
System.out.println( "mask = " + GetBinaryString(mask) );
System.out.println( "maskedInput = " + GetBinaryString(maskedInput) );
System.out.println( "result = " + GetBinaryString(result) );
*/
return result;
}
// Gets the bit mask for the given bit and number of bits
private static byte getMask(int bit, int numBits)
{
int newMask = 0;
for ( int i = 0; i <= 7; i++ )
{
if ( i > bit || i <= bit - numBits )
{
// set the mask bit
newMask = (int) ( newMask + Math.pow( 2, i ) );
}
}
byte mask = (byte) ( newMask & 0xFF );
return mask;
}
private byte getRegisterByte( int register )
{
byte[] buf = new byte[1];
ReadI2CBuffer( register, 1, buf );
return buf[0];
}
/** Get specified bits from the specified register.
* Form a new value from
* a byte (0b10110100)
* get the 3rd bit
* request 6 bits
* and you should get a new byte (0b00110100).
*/
private byte getRegisterBits( int register, int bit, int numBits )
{
byte containingByte = getRegisterByte( register );
return getBits( containingByte, bit, numBits );
}
// WHO_AM_I register
/** Get Device ID.
* This register is used to verify the identity of the device (0b110100).
* @return Device ID (should be 0x34, 52 dec, 64 oct)
* @see ITG3200_RA_WHO_AM_I
* @see ITG3200_RA_DEVID_BIT
* @see ITG3200_RA_DEVID_LENGTH
*/
public byte getDeviceID()
{
return getRegisterBits( ITG3200_RA_WHO_AM_I, ITG3200_DEVID_BIT, ITG3200_DEVID_LENGTH );
}
/** Set Device ID.
* Write a new ID into the WHO_AM_I register (no idea why this should ever be
* necessary though).
* @param id New device ID to set.
* @see getDeviceID()
* @see ITG3200_RA_WHO_AM_I
* @see ITG3200_RA_DEVID_BIT
* @see ITG3200_RA_DEVID_LENGTH
*/
public void setDeviceID(byte id)
{
writeBits( ITG3200_RA_WHO_AM_I, ITG3200_DEVID_BIT, ITG3200_DEVID_LENGTH, id );
}
// SMPLRT_DIV register
/** Get sample rate.
* This register determines the sample rate of the ITG-3200 gyros. The gyros'
* outputs are sampled internally at either 1kHz or 8kHz, determined by the
* DLPF_CFG setting (see register 22). This sampling is then filtered digitally
* and delivered into the sensor registers after the number of cycles determined
* by this register. The sample rate is given by the following formula:
*
* F_sample = F_internal / (divider+1), where F_internal is either 1kHz or 8kHz
*
* As an example, if the internal sampling is at 1kHz, then setting this
* register to 7 would give the following:
*
* F_sample = 1kHz / (7 + 1) = 125Hz, or 8ms per sample
*
* @return Current sample rate
* @see setDLPFBandwidth()
* @see ITG3200_RA_SMPLRT_DIV
*/
public byte getRate()
{
return getRegisterByte( ITG3200_RA_SMPLRT_DIV );
}
/** Set sample rate.
* @param rate New sample rate
* @see getRate()
* @see setDLPFBandwidth()
* @see ITG3200_RA_SMPLRT_DIV
*/
public void setRate(byte rate)
{
writeI2CBuffer( ITG3200_RA_SMPLRT_DIV, rate );
}
// DLPF_FS register
/** Full-scale range.
* The FS_SEL parameter allows setting the full-scale range of the gyro sensors,
* as described in the table below. The power-on-reset value of FS_SEL is 00h.
* Set to 03h for proper operation.
*
* 0 = Reserved
* 1 = Reserved
* 2 = Reserved
* 3 = +/- 2000 degrees/sec
*
* @return Current full-scale range setting
* @see ITG3200_FULLSCALE_2000
* @see ITG3200_RA_DLPF_FS
* @see ITG3200_DF_FS_SEL_BIT
* @see ITG3200_DF_FS_SEL_LENGTH
*/
public byte getFullScaleRange()
{
return getRegisterBits( ITG3200_RA_DLPF_FS, ITG3200_DF_FS_SEL_BIT, ITG3200_DF_FS_SEL_LENGTH );
}
/** Set full-scale range setting.
* @param range New full-scale range value
* @see getFullScaleRange()
* @see ITG3200_FULLSCALE_2000
* @see ITG3200_RA_DLPF_FS
* @see ITG3200_DF_FS_SEL_BIT
* @see ITG3200_DF_FS_SEL_LENGTH
*/
public void setFullScaleRange(byte range)
{
writeBits( ITG3200_RA_DLPF_FS, ITG3200_DF_FS_SEL_BIT, ITG3200_DF_FS_SEL_LENGTH, range );
}
/** Get digital low-pass filter bandwidth.
* The DLPF_CFG parameter sets the digital low pass filter configuration. It
* also determines the internal sampling rate used by the device as shown in
* the table below.
*
* DLPF_CFG | Low-Pass Filter Bandwidth | Internal Sample Rate
* ---------+---------------------------+---------------------
* 0 | 256Hz | 8kHz
* 1 | 188Hz | 1kHz
* 2 | 98Hz | 1kHz
* 3 | 42Hz | 1kHz
* 4 | 20Hz | 1kHz
* 5 | 10Hz | 1kHz
* 6 | 5Hz | 1kHz
* 7 | Reserved | Reserved
*
* @return DLFP bandwidth setting
* @see ITG3200_RA_DLPF_FS
* @see ITG3200_DF_DLPF_CFG_BIT
* @see ITG3200_DF_DLPF_CFG_LENGTH
*/
public byte getDLPFBandwidth()
{
return getRegisterBits( ITG3200_RA_DLPF_FS, ITG3200_DF_DLPF_CFG_BIT, ITG3200_DF_DLPF_CFG_LENGTH );
}
/** Set digital low-pass filter bandwidth.
* @param bandwidth New DLFP bandwidth setting
* @see getDLPFBandwidth()
* @see ITG3200_DLPF_BW_256
* @see ITG3200_RA_DLPF_FS
* @see ITG3200_DF_DLPF_CFG_BIT
* @see ITG3200_DF_DLPF_CFG_LENGTH
*/
public void setDLPFBandwidth(byte bandwidth)
{
writeBits( ITG3200_RA_DLPF_FS, ITG3200_DF_DLPF_CFG_BIT, ITG3200_DF_DLPF_CFG_LENGTH, bandwidth );
}
// INT_CFG register
/** Get interrupt logic level mode.
* Will be set 0 for active-high, 1 for active-low.
* @return Current interrupt mode (0=active-high, 1=active-low)
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_ACTL_BIT
*/
public boolean getInterruptMode()
{
return readBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_ACTL_BIT );
}
/** Set interrupt logic level mode.
* @param mode New interrupt mode (0=active-high, 1=active-low)
* @see getInterruptMode()
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_ACTL_BIT
*/
public void setInterruptMode(boolean mode)
{
writeBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_ACTL_BIT, mode );
}
/** Get interrupt drive mode.
* Will be set 0 for push-pull, 1 for open-drain.
* @return Current interrupt drive mode (0=push-pull, 1=open-drain)
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_OPEN_BIT
*/
public boolean getInterruptDrive()
{
return readBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_OPEN_BIT );
}
/** Set interrupt drive mode.
* @param drive New interrupt drive mode (0=push-pull, 1=open-drain)
* @see getInterruptDrive()
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_OPEN_BIT
*/
public void setInterruptDrive(boolean drive)
{
writeBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_OPEN_BIT, drive );
}
/** Get interrupt latch mode.
* Will be set 0 for 50us-pulse, 1 for latch-until-int-cleared.
* @return Current latch mode (0=50us-pulse, 1=latch-until-int-cleared)
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_LATCH_INT_EN_BIT
*/
public boolean getInterruptLatch()
{
return readBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_LATCH_INT_EN_BIT );
}
/** Set interrupt latch mode.
* @param latch New latch mode (0=50us-pulse, 1=latch-until-int-cleared)
* @see getInterruptLatch()
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_LATCH_INT_EN_BIT
*/
public void setInterruptLatch(boolean latch)
{
writeBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_LATCH_INT_EN_BIT, latch );
}
/** Get interrupt latch clear mode.
* Will be set 0 for status-read-only, 1 for any-register-read.
* @return Current latch clear mode (0=status-read-only, 1=any-register-read)
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_INT_ANYRD_2CLEAR_BIT
*/
public boolean getInterruptLatchClear()
{
return readBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_INT_ANYRD_2CLEAR_BIT );
}
/** Set interrupt latch clear mode.
* @param clear New latch clear mode (0=status-read-only, 1=any-register-read)
* @see getInterruptLatchClear()
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_INT_ANYRD_2CLEAR_BIT
*/
public void setInterruptLatchClear(boolean clear)
{
writeBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_INT_ANYRD_2CLEAR_BIT, clear );
}
/** Get "device ready" interrupt enabled setting.
* Will be set 0 for disabled, 1 for enabled.
* @return Current interrupt enabled setting
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_ITG_RDY_EN_BIT
*/
public boolean getIntDeviceReadyEnabled()
{
return readBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_ITG_RDY_EN_BIT );
}
/** Set "device ready" interrupt enabled setting.
* @param enabled New interrupt enabled setting
* @see getIntDeviceReadyEnabled()
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_ITG_RDY_EN_BIT
*/
public void setIntDeviceReadyEnabled( boolean enabled )
{
writeBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_ITG_RDY_EN_BIT, enabled );
}
/** Get "data ready" interrupt enabled setting.
* Will be set 0 for disabled, 1 for enabled.
* @return Current interrupt enabled setting
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_RAW_RDY_EN_BIT
*/
public boolean getIntDataReadyEnabled()
{
return readBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_RAW_RDY_EN_BIT );
}
/** Set "data ready" interrupt enabled setting.
* @param enabled New interrupt enabled setting
* @see getIntDataReadyEnabled()
* @see ITG3200_RA_INT_CFG
* @see ITG3200_INTCFG_RAW_RDY_EN_BIT
*/
public void setIntDataReadyEnabled( boolean enabled )
{
writeBit( ITG3200_RA_INT_CFG, ITG3200_INTCFG_RAW_RDY_EN_BIT, enabled );
}
// INT_STATUS register
/** Get Device Ready interrupt status.
* The ITG_RDY interrupt indicates that the PLL is ready and gyroscopic data can
* be read.
* @return Device Ready interrupt status
* @see ITG3200_RA_INT_STATUS
* @see ITG3200_INTSTAT_RAW_DATA_READY_BIT
*/
public boolean getIntDeviceReadyStatus()
{
return readBit( ITG3200_RA_INT_STATUS, ITG3200_INTSTAT_ITG_RDY_BIT );
}
/** Get Data Ready interrupt status.
* In normal use, the RAW_DATA_RDY interrupt is used to determine when new
* sensor data is available in and of the sensor registers (27 to 32).
* @return Data Ready interrupt status
* @see ITG3200_RA_INT_STATUS
* @see ITG3200_INTSTAT_RAW_DATA_READY_BIT
*/
public boolean getIntDataReadyStatus()
{
return readBit( ITG3200_RA_INT_STATUS, ITG3200_INTSTAT_RAW_DATA_READY_BIT );
}
// TEMP_OUT_* registers
/** Get current internal temperature.
* @return Temperature reading in 16-bit 2's complement format
* @see ITG3200_RA_TEMP_OUT_H
*/
public short getTemperature()
{
byte[] buf = new byte[2];
ReadI2CBuffer( ITG3200_RA_TEMP_OUT_H, 2, buf);
return (short) ( ((short)(buf[0]) << 8) | (short)buf[1] );
}
// GYRO_*OUT_* registers
public static class AllAxes
{
public short XAxis;
public short YAxis;
public short ZAxis;
}
/** Get 3-axis gyroscope readings.
* @param x 16-bit signed integer container for X-axis rotation
* @param y 16-bit signed integer container for Y-axis rotation
* @param z 16-bit signed integer container for Z-axis rotation
* @see ITG3200_RA_GYRO_XOUT_H
*/
public AllAxes getRotation()
{
AllAxes data = new AllAxes();
byte[] buffer = new byte[6];
ReadI2CBuffer( ITG3200_RA_GYRO_XOUT_H, 6, buffer);
data.XAxis = (short) ( (((short)buffer[0]) << 8) | buffer[1] );
data.YAxis = (short) ( (((short)buffer[2]) << 8) | buffer[3] );
data.ZAxis = (short) ( (((short)buffer[4]) << 8) | buffer[5] );
return data;
}
public void ReadI2CBuffer( int registerAddress, int count, byte[] buffer )
{
try
{
m_i2c.read( registerAddress, count, buffer );
}
catch (Throwable t)
{
DriverStation.reportError("ERROR Unhandled exception in I2C Read: " + t.toString() + " at " + Arrays.toString(t.getStackTrace()), false);
}
}
public short ReadShortFromRegister( byte register, int count )
{
byte[] buffer = new byte[count];
ReadI2CBuffer( register, count, buffer );
return (short) ( (((short)buffer[0]) << 8) | buffer[1] );
}
/** Get X-axis gyroscope reading.
* @return X-axis rotation measurement in 16-bit 2's complement format
* @see ITG3200_RA_GYRO_XOUT_H
*/
public short getRotationX()
{
return ReadShortFromRegister( ITG3200_RA_GYRO_XOUT_H, 2 );
}
/** Get Y-axis gyroscope reading.
* @return Y-axis rotation measurement in 16-bit 2's complement format
* @see ITG3200_RA_GYRO_YOUT_H
*/
public short getRotationY()
{
return ReadShortFromRegister( ITG3200_RA_GYRO_YOUT_H, 2 );
}
/** Get Z-axis gyroscope reading.
* @return Z-axis rotation measurement in 16-bit 2's complement format
* @see ITG3200_RA_GYRO_ZOUT_H
*/
public short getRotationZ()
{
return ReadShortFromRegister( ITG3200_RA_GYRO_ZOUT_H, 2 );
}
// PWR_MGM register
/** Trigger a full device reset.
* A small delay of ~50ms may be desirable after triggering a reset.
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_H_RESET_BIT
*/
public void reset()
{
writeBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_H_RESET_BIT, true );
}
/** Get sleep mode status.
* Setting the SLEEP bit in the register puts the device into very low power
* sleep mode. In this mode, only the serial interface and internal registers
* remain active, allowing for a very low standby current. Clearing this bit
* puts the device back into normal mode. To save power, the individual standby
* selections for each of the gyros should be used if any gyro axis is not used
* by the application.
* @return Current sleep mode enabled status
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_SLEEP_BIT
*/
public boolean getSleepEnabled()
{
return readBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_SLEEP_BIT );
}
/** Set sleep mode status.
* @param enabled New sleep mode enabled status
* @see getSleepEnabled()
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_SLEEP_BIT
*/
public void setSleepEnabled( boolean enabled )
{
writeBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_SLEEP_BIT, enabled );
}
/** Get X-axis standby enabled status.
* If enabled, the X-axis will not gather or report data (or use power).
* @return Current X-axis standby enabled status
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_STBY_XG_BIT
*/
public boolean getStandbyXEnabled(){
return readBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_STBY_XG_BIT );
}
/** Set X-axis standby enabled status.
* @param New X-axis standby enabled status
* @see getStandbyXEnabled()
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_STBY_XG_BIT
*/
public void setStandbyXEnabled( boolean enabled )
{
writeBit(ITG3200_RA_PWR_MGM, ITG3200_PWR_STBY_XG_BIT, enabled );
}
/** Get Y-axis standby enabled status.
* If enabled, the Y-axis will not gather or report data (or use power).
* @return Current Y-axis standby enabled status
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_STBY_YG_BIT
*/
public boolean getStandbyYEnabled()
{
return readBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_STBY_YG_BIT );
}
/** Set Y-axis standby enabled status.
* @param New Y-axis standby enabled status
* @see getStandbyYEnabled()
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_STBY_YG_BIT
*/
public void setStandbyYEnabled( boolean enabled )
{
writeBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_STBY_YG_BIT, enabled );
}
/** Get Z-axis standby enabled status.
* If enabled, the Z-axis will not gather or report data (or use power).
* @return Current Z-axis standby enabled status
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_STBY_ZG_BIT
*/
public boolean getStandbyZEnabled()
{
return readBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_STBY_ZG_BIT );
}
/** Set Z-axis standby enabled status.
* @param New Z-axis standby enabled status
* @see getStandbyZEnabled()
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_STBY_ZG_BIT
*/
public void setStandbyZEnabled( boolean enabled )
{
writeBit( ITG3200_RA_PWR_MGM, ITG3200_PWR_STBY_ZG_BIT, enabled );
}
/** Get clock source setting.
* @return Current clock source setting
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_CLK_SEL_BIT
* @see ITG3200_PWR_CLK_SEL_LENGTH
*/
public byte getClockSource()
{
byte[] buf = new byte[1];
ReadI2CBuffer( ITG3200_RA_PWR_MGM, 1, buf );
// I2Cdev::readBits(devAddr, ITG3200_RA_PWR_MGM, ITG3200_PWR_CLK_SEL_BIT, ITG3200_PWR_CLK_SEL_LENGTH, buffer);
return (byte) ( buf[0] & ITG3200_PWR_CLK_SEL_BIT );
}
/** Set clock source setting.
* On power up, the ITG-3200 defaults to the internal oscillator. It is highly recommended that the device is configured to use one of the gyros (or an external clock) as the clock reference, due to the improved stability.
*
* The CLK_SEL setting determines the device clock source as follows:
*
* CLK_SEL | Clock Source
* --------+--------------------------------------
* 0 | Internal oscillator
* 1 | PLL with X Gyro reference
* 2 | PLL with Y Gyro reference
* 3 | PLL with Z Gyro reference
* 4 | PLL with external 32.768kHz reference
* 5 | PLL with external 19.2MHz reference
* 6 | Reserved
* 7 | Reserved
*
* @param source New clock source setting
* @see getClockSource()
* @see ITG3200_RA_PWR_MGM
* @see ITG3200_PWR_CLK_SEL_BIT
* @see ITG3200_PWR_CLK_SEL_LENGTH
*/
public void setClockSource( byte source )
{
writeBits( ITG3200_RA_PWR_MGM, ITG3200_PWR_CLK_SEL_BIT, ITG3200_PWR_CLK_SEL_LENGTH, source );
}
private ITable m_table;
/**
* {@inheritDoc}
*/
@Override
public void initTable( ITable subtable )
{
m_table = subtable;
updateTable();
}
/**
* {@inheritDoc}
*/
@Override
public ITable getTable()
{
return m_table;
}
/**
* {@inheritDoc}
*/
@Override
public void updateTable()
{
if (m_table != null)
{
m_table.putNumber("GyroX", getRotationX());
m_table.putNumber("GyroY", getRotationY());
m_table.putNumber("GyroZ", getRotationZ());
m_table.putNumber("GyroPIDValue", pidGet());
}
}
/* (non-Javadoc)
* @see edu.wpi.first.wpilibj.Sendable#getSmartDashboardType()
*/
@Override
public String getSmartDashboardType()
{
return "Gyro";
}
/* (non-Javadoc)
* @see edu.wpi.first.wpilibj.livewindow.LiveWindowSendable#startLiveWindowMode()
*/
@Override
public void startLiveWindowMode()
{
}
/* (non-Javadoc)
* @see edu.wpi.first.wpilibj.livewindow.LiveWindowSendable#stopLiveWindowMode()
*/
@Override
public void stopLiveWindowMode()
{
}
/* (non-Javadoc)
* @see edu.wpi.first.wpilibj.PIDSource#pidGet()
*/
@Override
public double pidGet()
{
// TODO We likely want to return one of the axes based on a setup option.
AllAxes var = getRotation();
double result = Math.cbrt( var.XAxis * var.XAxis + var.YAxis * var.YAxis + var.ZAxis * var.ZAxis );
return result;
// return 0;
}
public static final byte ITG3200_ADDRESS_AD0_LOW = 0x68; // address pin low (GND), default for SparkFun IMU Digital Combo board
public static final byte ITG3200_ADDRESS_AD0_HIGH = 0x69; // address pin high (VCC), default for SparkFun ITG-3200 Breakout board
public static final int ITG3200_SPARKFUN_ADDRES = 0xD2;
public static final int ITG3200_DEFAULT_ADDRESS = ITG3200_ADDRESS_AD0_LOW; // ITG3200_ADDRESS_AD0_HIGH;
public static final byte ITG3200_RA_WHO_AM_I = 0x00;
public static final byte ITG3200_RA_SMPLRT_DIV = 0x15;
public static final byte ITG3200_RA_DLPF_FS = 0x16;
public static final byte ITG3200_RA_INT_CFG = 0x17;
public static final byte ITG3200_RA_INT_STATUS = 0x1A;
public static final byte ITG3200_RA_TEMP_OUT_H = 0x1B;
public static final byte ITG3200_RA_TEMP_OUT_L = 0x1C;
public static final byte ITG3200_RA_GYRO_XOUT_H = 0x1D;
public static final byte ITG3200_RA_GYRO_XOUT_L = 0x1E;
public static final byte ITG3200_RA_GYRO_YOUT_H = 0x1F;
public static final byte ITG3200_RA_GYRO_YOUT_L = 0x20;
public static final byte ITG3200_RA_GYRO_ZOUT_H = 0x21;
public static final byte ITG3200_RA_GYRO_ZOUT_L = 0x22;
public static final byte ITG3200_RA_PWR_MGM = 0x3E;
public static final short ITG3200_DEVID_BIT = 6;
public static final short ITG3200_DEVID_LENGTH = 6;
public static final short ITG3200_DF_FS_SEL_BIT = 4;
public static final short ITG3200_DF_FS_SEL_LENGTH = 2;
public static final short ITG3200_DF_DLPF_CFG_BIT = 2;
public static final short ITG3200_DF_DLPF_CFG_LENGTH = 3;
public static final byte ITG3200_FULLSCALE_2000 = 0x03;
public static final byte ITG3200_DLPF_BW_256 = 0x00;
public static final byte ITG3200_DLPF_BW_188 = 0x01;
public static final byte ITG3200_DLPF_BW_98 = 0x02;
public static final byte ITG3200_DLPF_BW_42 = 0x03;
public static final byte ITG3200_DLPF_BW_20 = 0x04;
public static final byte ITG3200_DLPF_BW_10 = 0x05;
public static final byte ITG3200_DLPF_BW_5 = 0x06;
public static final byte ITG3200_INTCFG_ACTL_BIT = 7;
public static final byte ITG3200_INTCFG_OPEN_BIT = 6;
public static final byte ITG3200_INTCFG_LATCH_INT_EN_BIT = 5;
public static final byte ITG3200_INTCFG_INT_ANYRD_2CLEAR_BIT = 4;
public static final byte ITG3200_INTCFG_ITG_RDY_EN_BIT = 2;
public static final byte ITG3200_INTCFG_RAW_RDY_EN_BIT = 0;
public static final byte ITG3200_INTMODE_ACTIVEHIGH = 0x00;
public static final byte ITG3200_INTMODE_ACTIVELOW = 0x01;
public static final byte ITG3200_INTDRV_PUSHPULL = 0x00;
public static final byte ITG3200_INTDRV_OPENDRAIN = 0x01;
public static final byte ITG3200_INTLATCH_50USPULSE = 0x00;
public static final byte ITG3200_INTLATCH_WAITCLEAR = 0x01;
public static final byte ITG3200_INTCLEAR_STATUSREAD = 0x00;
public static final byte ITG3200_INTCLEAR_ANYREAD = 0x01;
public static final byte ITG3200_INTSTAT_ITG_RDY_BIT = 2;
public static final byte ITG3200_INTSTAT_RAW_DATA_READY_BIT = 0;
public static final byte ITG3200_PWR_H_RESET_BIT = 7;
public static final byte ITG3200_PWR_SLEEP_BIT = 6;
public static final byte ITG3200_PWR_STBY_XG_BIT = 5;
public static final byte ITG3200_PWR_STBY_YG_BIT = 4;
public static final byte ITG3200_PWR_STBY_ZG_BIT = 3;
public static final byte ITG3200_PWR_CLK_SEL_BIT = 2;
public static final byte ITG3200_PWR_CLK_SEL_LENGTH = 3;
public static final byte ITG3200_CLOCK_INTERNAL = 0x00;
public static final byte ITG3200_CLOCK_PLL_XGYRO = 0x01;
public static final byte ITG3200_CLOCK_PLL_YGYRO = 0x02;
public static final byte ITG3200_CLOCK_PLL_ZGYRO = 0x03;
public static final byte ITG3200_CLOCK_PLL_EXT32K = 0x04;
public static final byte ITG3200_CLOCK_PLL_EXT19M = 0x05;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collection;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridNearAtomicAbstractUpdateRequest;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.ForceRebalanceExchangeTask;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionDemandMessage;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionExchangeId;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionSupplyMessage;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsExchangeFuture;
import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPreloaderAssignments;
import org.apache.ignite.internal.util.future.GridCompoundFuture;
import org.jetbrains.annotations.Nullable;
/**
* Cache preloader that is responsible for loading cache entries either from remote
* nodes (for distributed cache) or anywhere else at cache startup.
*/
public interface GridCachePreloader {
/**
* Starts preloading.
*
* @throws IgniteCheckedException If start failed.
*/
public void start() throws IgniteCheckedException;
/**
* Kernal stop callback.
*/
public void onKernalStop();
/**
* Client reconnected callback.
*/
public void onReconnected();
/**
* Callback by exchange manager when initial partition exchange is complete.
*
* @param err Error, if any happened on initial exchange.
*/
public void onInitialExchangeComplete(@Nullable Throwable err);
/**
* @param rebTopVer Previous rebalance topology version or {@code NONE} if there is no info.
* @param exchFut Completed exchange future.
* @return {@code True} if rebalance should be started (previous will be interrupted).
*/
public boolean rebalanceRequired(AffinityTopologyVersion rebTopVer, GridDhtPartitionsExchangeFuture exchFut);
/**
* @param exchId Exchange ID.
* @param exchFut Completed exchange future. Can be {@code null} if forced or reassigned generation occurs.
* @return Partition assignments which will be requested from supplier nodes.
*/
@Nullable public GridDhtPreloaderAssignments generateAssignments(GridDhtPartitionExchangeId exchId,
@Nullable GridDhtPartitionsExchangeFuture exchFut);
/**
* Adds assignments to preloader.
*
* @param assignments Assignments to add.
* @param forcePreload {@code True} if preload requested by {@link ForceRebalanceExchangeTask}.
* @param rebalanceId Rebalance id created by exchange thread.
* @param next Runnable responsible for cache rebalancing chain.
* @param forcedRebFut External future for forced rebalance.
* @return Rebalancing runnable.
*/
public Runnable addAssignments(GridDhtPreloaderAssignments assignments,
boolean forcePreload,
long rebalanceId,
Runnable next,
@Nullable GridCompoundFuture<Boolean, Boolean> forcedRebFut);
/**
* @return Future which will complete when preloader is safe to use.
*/
public IgniteInternalFuture<Object> startFuture();
/**
* @return Future which will complete when preloading is finished.
*/
public IgniteInternalFuture<?> syncFuture();
/**
* @return Future which will complete when preloading finishes on current topology.
*
* Future result is {@code true} in case rebalancing successfully finished at current topology.
* Future result is {@code false} in case rebalancing cancelled or finished with missed partitions and will be
* restarted at current or pending topology.
*
*/
public IgniteInternalFuture<Boolean> rebalanceFuture();
/**
* @return {@code true} if there is no need to force keys preloading
* (e.g. rebalancing has been completed).
*/
public boolean needForceKeys();
/**
* Requests that preloader sends the request for the key.
*
* @param cctx Cache context.
* @param keys Keys to request.
* @param topVer Topology version, {@code -1} if not required.
* @return Future to complete when all keys are preloaded.
*/
public GridDhtFuture<Object> request(GridCacheContext cctx,
Collection<KeyCacheObject> keys,
AffinityTopologyVersion topVer);
/**
* Requests that preloader sends the request for the key.
*
* @param cctx Cache context.
* @param req Message with keys to request.
* @param topVer Topology version, {@code -1} if not required.
* @return Future to complete when all keys are preloaded.
*/
public GridDhtFuture<Object> request(GridCacheContext cctx,
GridNearAtomicAbstractUpdateRequest req,
AffinityTopologyVersion topVer);
/**
* Force Rebalance process.
*/
public IgniteInternalFuture<Boolean> forceRebalance();
/**
* Unwinds undeploys.
*/
public void unwindUndeploys();
/**
* Handles Supply message.
*
* @param idx Index.
* @param id Node Id.
* @param s Supply message.
*/
public void handleSupplyMessage(int idx, UUID id, final GridDhtPartitionSupplyMessage s);
/**
* Handles Demand message.
*
* @param idx Index.
* @param id Node Id.
* @param d Demand message.
*/
public void handleDemandMessage(int idx, UUID id, GridDhtPartitionDemandMessage d);
/**
* @param lastFut Last future.
*/
public void onTopologyChanged(GridDhtPartitionsExchangeFuture lastFut);
/**
* Dumps debug information.
*/
public void dumpDebugInfo();
/**
* Pause preloader.
*/
public void pause();
/**
* Resume preloader.
*/
public void resume();
/**
* Rebalance timeout for supply and demand messages in milliseconds.
* <p>
* The {@link IgniteConfiguration#getRebalanceTimeout()} will be used by default. If an Ignite's configuration value
* is not provided than the {@link CacheConfiguration#getRebalanceTimeout()} will be used instead.
*
* @return Rebalance message timeout in milliseconds.
*/
public long timeout();
/**
* The number of batches generated by supply node at rebalancing procedure start.
* <p>
* The {@link IgniteConfiguration#getRebalanceBatchesPrefetchCount()} will be used by default. If an Ignite's
* configuration value is not provided than the {@link CacheConfiguration#getRebalanceBatchesPrefetchCount()}
* will be used instead.
*
* @return The number of batches prefetch count.
*/
public long batchesPrefetchCount();
/**
* Time in milliseconds to wait between rebalance messages to avoid overloading of CPU or network.
* <p>
* The {@link IgniteConfiguration#getRebalanceThrottle()} will be used by default. If an Ignite's
* configuration value is not provided than the {@link CacheConfiguration#getRebalanceThrottle()}
* will be used instead.
*
* @return Time in milliseconds to wait between rebalance messages, {@code 0} to disable throttling.
*/
public long throttle();
/**
* The supply message size in bytes to be loaded within a single rebalance batch.
* <p>
* The {@link IgniteConfiguration#getRebalanceBatchSize()} will be used by default. If an Ignite's
* configuration value is not provided than the {@link CacheConfiguration#getRebalanceBatchSize()}
* will be used instead.
*
* @return Rebalance message size in bytes.
*/
public int batchSize();
}
| |
package com.planet_ink.coffee_mud.CharClasses;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class Barbarian extends StdCharClass
{
@Override public String ID(){return "Barbarian";}
private final static String localizedStaticName = CMLib.lang().L("Barbarian");
@Override public String name() { return localizedStaticName; }
@Override public String baseClass(){return "Fighter";}
@Override public int getBonusPracLevel(){return -1;}
@Override public int getBonusAttackLevel(){return 0;}
@Override public int getAttackAttribute(){return CharStats.STAT_STRENGTH;}
@Override public int getLevelsPerBonusDamage(){ return 30;}
@Override public int getPracsFirstLevel(){return 3;}
@Override public int getTrainsFirstLevel(){return 4;}
@Override public String getMovementFormula(){return "13*((@x2<@x3)/18)"; }
@Override public String getHitPointsFormula(){return "((@x6<@x7)/2)+(2*(1?7))"; }
@Override public String getManaFormula(){return "((@x4<@x5)/8)+(1*(1?2))"; }
@Override public int allowedArmorLevel(){return CharClass.ARMOR_NONMETAL;}
public Barbarian()
{
super();
maxStatAdj[CharStats.STAT_STRENGTH]=4;
maxStatAdj[CharStats.STAT_CONSTITUTION]=4;
}
@Override
public void initializeClass()
{
super.initializeClass();
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Write",25,true);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Axe",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_BluntWeapon",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_EdgedWeapon",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_FlailedWeapon",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Hammer",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Natural",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Polearm",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Ranged",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Sword",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Armor",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Specialization_Shield",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Recall",50,true);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_Swim",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Skill_WildernessLore",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),1,"Fighter_Charge",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),2,"Fighter_Kick",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),3,"Skill_Parry",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),3,"Skill_TwoWeaponFighting",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),4,"Skill_Bash",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),5,"Fighter_SmokeSignals",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),5,"Scalp",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),6,"Fighter_Cleave",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),6,"Fighter_Battlecry",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),7,"Skill_Dodge",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),7,"Skill_Disarm",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),8,"Fighter_Berzerk",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),8,"Fighter_Rescue",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),9,"Skill_Attack2",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),9,"Fighter_ArmorTweaking",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),10,"Fighter_Spring",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),10,"Apothecary",0,"ANTIDOTES",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),11,"Skill_Dirt",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),11,"Fighter_JungleTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),12,"Fighter_Intimidate",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),12,"Fighter_SwampTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),13,"Fighter_Warcry",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),13,"Fighter_DesertTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),14,"Fighter_ImprovedThrowing",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),14,"Fighter_MountainTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),15,"Skill_Climb",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),15,"Fighter_WeaponBreak",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),16,"Fighter_Sweep",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),16,"Fighter_Rallycry",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),17,"Skill_MountedCombat",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),17,"Fighter_HillsTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),18,"Fighter_Endurance",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),19,"Skill_IdentifyPoison",true,CMParms.parseSemicolons("Apothecary",true));
CMLib.ableMapper().addCharAbilityMapping(ID(),20,"Skill_AttackHalf",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),20,"Scrapping",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),21,"Fighter_Roll",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),22,"Fighter_ForestTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),23,"Fighter_BullRush",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),24,"Fighter_Fragmentation",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),24,"Fighter_PlainsTactics",false);
CMLib.ableMapper().addCharAbilityMapping(ID(),25,"Fighter_Stonebody",true);
CMLib.ableMapper().addCharAbilityMapping(ID(),30,"Fighter_Shrug",true);
}
@Override public int availabilityCode(){return Area.THEME_FANTASY;}
@Override
public String getOtherBonusDesc()
{
return "Damage reduction 1pt/5 levels. A 1%/level resistance to Enchantments. Receives bonus conquest and duel experience.";
}
@Override
public void executeMsg(Environmental host, CMMsg msg)
{
super.executeMsg(host,msg);
Fighter.conquestExperience(this,host,msg);
Fighter.duelExperience(this, host, msg);
}
private final String[] raceRequiredList=new String[]{"All"};
@Override public String[] getRequiredRaceList(){ return raceRequiredList; }
private final Pair<String,Integer>[] minimumStatRequirements=new Pair[]{
new Pair<String,Integer>("Strength",Integer.valueOf(9)),
new Pair<String,Integer>("Constitution",Integer.valueOf(9))
};
@Override public Pair<String,Integer>[] getMinimumStatRequirements() { return minimumStatRequirements; }
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!(myHost instanceof MOB))
return super.okMessage(myHost,msg);
final MOB myChar=(MOB)myHost;
if((msg.amITarget(myChar))
&&(msg.tool() instanceof Weapon)
&&(msg.targetMinor()==CMMsg.TYP_DAMAGE))
{
final int classLevel=myChar.charStats().getClassLevel(this);
int recovery=(classLevel/5);
final double minPct=.10+((classLevel>33)?((classLevel-30)*.0025):0);
final int minAmount=(int)Math.round(CMath.mul(msg.value(), minPct));
if(recovery < minAmount)
recovery=minAmount;
msg.setValue(msg.value()-recovery);
}
else
if((msg.amITarget(myChar))
&&(CMath.bset(msg.targetMajor(),CMMsg.MASK_MALICIOUS))
&&(msg.tool() instanceof Ability)
&&((((Ability)msg.tool()).classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_ENCHANTMENT))
{
if(CMLib.dice().rollPercentage()<=myChar.charStats().getClassLevel(this))
{
myChar.location().show(myChar,null,msg.source(),CMMsg.MSG_OK_ACTION,L("<S-NAME> resist(s) the @x1 attack from <O-NAMESELF>!",msg.tool().name()));
return false;
}
}
return super.okMessage(myChar,msg);
}
@Override
public void grantAbilities(MOB mob, boolean isBorrowedClass)
{
super.grantAbilities(mob,isBorrowedClass);
if(mob.playerStats()==null)
{
final List<AbilityMapper.AbilityMapping> V=CMLib.ableMapper().getUpToLevelListings(ID(),
mob.charStats().getClassLevel(ID()),
false,
false);
for(final AbilityMapper.AbilityMapping able : V)
{
final Ability A=CMClass.getAbility(able.abilityID);
if((A!=null)
&&(!CMLib.ableMapper().getAllQualified(ID(),true,A.ID()))
&&(!CMLib.ableMapper().getDefaultGain(ID(),true,A.ID())))
giveMobAbility(mob,A,CMLib.ableMapper().getDefaultProficiency(ID(),true,A.ID()),CMLib.ableMapper().getDefaultParm(ID(),true,A.ID()),isBorrowedClass);
}
}
}
@Override
public List<Item> outfit(MOB myChar)
{
if(outfitChoices==null)
{
final Weapon w=CMClass.getWeapon("Shortsword");
if(w == null)
return new Vector<Item>();
outfitChoices=new Vector();
outfitChoices.add(w);
}
return outfitChoices;
}
}
| |
/*
* The Gemma project.
*
* Copyright (c) 2006-2007 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.persistence.service.expression.experiment;
import gemma.gsec.SecurityService;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import ubic.gemma.core.expression.experiment.ExpressionExperimentSetValueObjectHelper;
import ubic.gemma.model.analysis.expression.ExpressionExperimentSet;
import ubic.gemma.model.expression.experiment.*;
import ubic.gemma.model.genome.Taxon;
import ubic.gemma.persistence.service.AbstractVoEnabledService;
import ubic.gemma.persistence.service.analysis.expression.ExpressionExperimentSetDao;
import ubic.gemma.persistence.service.genome.taxon.TaxonService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
/**
* Spring Service base class for <code>ubic.gemma.model.analysis.expression.ExpressionExperimentSetService</code>,
* provides access to all services and entities referenced by this service.
*
* @see ExpressionExperimentSetService
*/
@Service
public class ExpressionExperimentSetServiceImpl
extends AbstractVoEnabledService<ExpressionExperimentSet, ExpressionExperimentSetValueObject>
implements ExpressionExperimentSetService {
private final ExpressionExperimentSetDao expressionExperimentSetDao;
private SecurityService securityService;
private ExpressionExperimentService expressionExperimentService;
private TaxonService taxonService;
private ExpressionExperimentSetValueObjectHelper expressionExperimentValueObjectHelper;
@Autowired
public ExpressionExperimentSetServiceImpl( ExpressionExperimentSetDao expressionExperimentSetDao ) {
super( expressionExperimentSetDao );
this.expressionExperimentSetDao = expressionExperimentSetDao;
}
@Autowired
public void setSecurityService( SecurityService securityService ) {
this.securityService = securityService;
}
@Autowired
public void setExpressionExperimentService( ExpressionExperimentService expressionExperimentService ) {
this.expressionExperimentService = expressionExperimentService;
}
@Autowired
public void setTaxonService( TaxonService taxonService ) {
this.taxonService = taxonService;
}
@Autowired
public void setExpressionExperimentValueObjectHelper(
ExpressionExperimentSetValueObjectHelper expressionExperimentValueObjectHelper ) {
this.expressionExperimentValueObjectHelper = expressionExperimentValueObjectHelper;
}
@Override
@Transactional
public ExpressionExperimentSet createFromValueObject( ExpressionExperimentSetValueObject eesvo ) {
/*
* Sanity check.
*/
Collection<ExpressionExperimentSet> dups = this.findByName( eesvo.getName() );
if ( dups == null || !dups.isEmpty() ) {
throw new IllegalArgumentException(
"Sorry, there is already a set with that name (" + eesvo.getName() + ")" );
}
ExpressionExperimentSet newSet = ExpressionExperimentSet.Factory.newInstance();
newSet.setName( eesvo.getName() );
newSet.setDescription( eesvo.getDescription() );
Collection<? extends BioAssaySet> datasetsAnalyzed = expressionExperimentService
.load( eesvo.getExpressionExperimentIds() );
newSet.getExperiments().addAll( datasetsAnalyzed );
if ( eesvo.getTaxonId() != null )
newSet.setTaxon( taxonService.load( eesvo.getTaxonId() ) );
else {
/*
* Figure out the taxon from the experiments. mustn't be heterogeneous.
*/
Taxon taxon = null;
for ( BioAssaySet bioAssaySet : newSet.getExperiments() ) {
Taxon eeTaxon = this.getTaxonForSet( bioAssaySet );
/*
* this can be null.
*/
if ( taxon == null ) {
taxon = eeTaxon;
} else {
assert eeTaxon != null;
if ( !eeTaxon.equals( taxon ) ) {
throw new UnsupportedOperationException( "EESets with mixed taxa are not supported" );
}
}
}
if ( taxon == null ) {
throw new IllegalStateException( "Could not determine taxon for new EEset" );
}
newSet.setTaxon( taxon );
}
if ( newSet.getTaxon() == null ) {
throw new IllegalArgumentException( "Unable to determine the taxon for the EESet" );
}
ExpressionExperimentSet newEESet = this.create( newSet );
// make groups private by default
if ( eesvo.getIsPublic() ) {
securityService.makePublic( newEESet );
} else {
securityService.makePrivate( newEESet );
}
return newEESet;
}
@Override
@Transactional
public void deleteDatabaseEntity( ExpressionExperimentSetValueObject eesvo ) {
try {
this.remove( this.load( eesvo.getId() ) );
} catch ( Exception e ) {
throw new RuntimeException( e );
}
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentSet> find( BioAssaySet bioAssaySet ) {
return this.expressionExperimentSetDao.find( bioAssaySet );
}
@Override
public Collection<ExpressionExperimentSet> findByName( String name ) {
return this.expressionExperimentSetDao.findByName( name );
}
@Override
@Transactional(readOnly = true)
public Collection<Long> findIds( BioAssaySet bioAssaySet ) {
Collection<Long> ids = new ArrayList<>();
Collection<ExpressionExperimentSet> eesets = this.expressionExperimentSetDao.find( bioAssaySet );
for ( ExpressionExperimentSet eeset : eesets ) {
ids.add( eeset.getId() );
}
return ids;
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperiment> getExperimentsInSet( Long id ) {
return this.expressionExperimentSetDao.getExperimentsInSet( id );
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentDetailsValueObject> getExperimentValueObjectsInSet( Long id ) {
return this.expressionExperimentSetDao.getExperimentValueObjectsInSet( id );
}
/**
* Instantiate non-persistent experiment set with description = "Automatically generated for ## EEs.". Mostly for
* use in Gene2GenePopulationServiceImpl.intializeNewAnalysis(Collection, Taxon, Collection,
* String, int). By convention, these sets should not be modifiable.
*/
@Override
public ExpressionExperimentSet initAutomaticallyGeneratedExperimentSet(
Collection<ExpressionExperiment> expressionExperiments, Taxon taxon ) {
ExpressionExperimentSet eeSet;
eeSet = ExpressionExperimentSet.Factory.newInstance();
eeSet.setTaxon( taxon );
eeSet.setName( this.getMasterSetName( taxon ) );
eeSet.setDescription(
String.format( ExpressionExperimentSetService.AUTOMATICALLY_GENERATED_EXPERIMENT_GROUP_DESCRIPTION,
String.valueOf( expressionExperiments.size() ) ) );
eeSet.getExperiments().addAll( expressionExperiments );
return eeSet;
}
/**
* Determines if set was automatically generated by matching the description to that used in
* ubic.gemma.core.analysis.expression
* .coexpression.ExpressionExperimentSetService.AUTOMATICALLY_GENERATED_EXPERIMENT_GROUP_DESCRIPTION
*
* @return true if the set was automatically generated, false otherwise
*/
@Override
@Transactional(readOnly = true)
public boolean isAutomaticallyGenerated( String experimentSetDescription ) {
String regexDesc = String
.format( ExpressionExperimentSetService.AUTOMATICALLY_GENERATED_EXPERIMENT_GROUP_DESCRIPTION, ".*" );
return experimentSetDescription.matches( regexDesc );
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentSet> loadAllExperimentSetsWithTaxon() {
return this.expressionExperimentSetDao.loadAllExperimentSetsWithTaxon();
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentSetValueObject> loadAllExperimentSetValueObjects( boolean loadEEIds ) {
return this.expressionExperimentSetDao.loadAllValueObjects( loadEEIds );
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentSetValueObject> loadMySetValueObjects( boolean loadEEIds ) {
return this.expressionExperimentSetDao.loadAllValueObjects( loadEEIds );
}
@Override
@Transactional(readOnly = true)
public ExpressionExperimentSetValueObject loadValueObjectById( Long id, boolean loadEEIds ) {
return this.expressionExperimentSetDao.loadValueObject( id, loadEEIds );
}
@Override
@Transactional
public void updateDatabaseEntity( ExpressionExperimentSetValueObject eesvo ) {
try {
ExpressionExperimentSet eeset = expressionExperimentValueObjectHelper.convertToEntity( eesvo );
if ( eeset == null ) {
throw new IllegalArgumentException( "Cannot update null set" );
}
this.update( eeset );
} catch ( Exception e ) {
throw new RuntimeException( e );
}
}
/**
* update the members of the experiment set with the given ids
*
* @param groupId set to update
* @param eeIds new set member ids
*/
@Override
@Transactional
public void updateDatabaseEntityMembers( Long groupId, Collection<Long> eeIds ) {
if ( eeIds.isEmpty() ) {
throw new IllegalArgumentException( "No expression experiment ids provided. Cannot save an empty set." );
}
ExpressionExperimentSet eeSet = this.load( groupId );
if ( eeSet == null ) {
throw new IllegalArgumentException( "No experiment set with id=" + groupId + " could be loaded. "
+ "Either it does not exist or you do not have permission to view it." );
}
// check that new member ids are valid
Collection<ExpressionExperiment> newExperiments = expressionExperimentService.load( eeIds );
if ( newExperiments.isEmpty() ) {
throw new IllegalArgumentException(
"None of the experiment ids were valid (out of " + eeIds.size() + " provided)" );
}
if ( newExperiments.size() < eeIds.size() ) {
throw new IllegalArgumentException(
"Some of the experiment ids were invalid: only found " + newExperiments.size() + " out of " + eeIds
.size() + " provided)" );
}
assert newExperiments.size() == eeIds.size();
Collection<BioAssaySet> basColl = new HashSet<>();
for ( ExpressionExperiment experiment : newExperiments ) {
Taxon eeTaxon = this.getTaxonForSet( experiment );
// make sure experiments being added are from the right taxon
if ( eeTaxon == null || !eeTaxon.equals( eeSet.getTaxon() ) ) {
throw new IllegalArgumentException(
experiment + " is of the wrong taxon to add to eeset. EESet taxon is " + eeSet.getTaxon() );
}
basColl.add( experiment );
}
eeSet.getExperiments().clear();
eeSet.getExperiments().addAll( basColl );
this.update( eeSet );
}
@Override
@Transactional
public ExpressionExperimentSetValueObject updateDatabaseEntityNameDesc( ExpressionExperimentSetValueObject eeSetVO,
boolean loadEEIds ) {
Long groupId = eeSetVO.getId();
ExpressionExperimentSet eeSet = this.load( groupId );
if ( eeSet == null ) {
throw new IllegalArgumentException( "No experiment set with id=" + groupId + " could be loaded" );
}
eeSet.setDescription( eeSetVO.getDescription() );
if ( eeSetVO.getName() != null && eeSetVO.getName().length() > 0 )
eeSet.setName( eeSetVO.getName() );
this.update( eeSet );
return this.loadValueObjectById( eeSet.getId(), loadEEIds );
}
@Override
@Transactional(readOnly = true)
public ExpressionExperimentSetValueObject loadValueObjectById( Long id ) {
return this.expressionExperimentSetDao.loadValueObject( id, false );
}
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentSetValueObject> loadValueObjectsByIds( Collection<Long> eeSetIds ) {
return this.expressionExperimentSetDao.loadValueObjects( eeSetIds, false );
}
@Override
@Transactional(readOnly = true)
public void thaw( ExpressionExperimentSet expressionExperimentSet ) {
this.expressionExperimentSetDao.thaw( expressionExperimentSet );
}
@SuppressWarnings("unchecked")
@Override
@Transactional(readOnly = true)
public Collection<ExpressionExperimentSet> load( Collection<Long> ids ) {
return this.expressionExperimentSetDao.load( ids );
}
/**
* @see ExpressionExperimentSetService#update(ExpressionExperimentSet)
*/
@Override
@Transactional
public void update( final ExpressionExperimentSet expressionExperimentSet ) {
if ( expressionExperimentSet == null ) {
throw new IllegalArgumentException( "Cannot update null set" );
}
if ( expressionExperimentSet.getId() == null || expressionExperimentSet.getId() < 0 ) {
throw new IllegalArgumentException(
"Can only update an existing eeset (passed id=" + expressionExperimentSet.getId() + ")" );
}
if ( StringUtils.isBlank( expressionExperimentSet.getName() ) ) {
throw new IllegalArgumentException( "You must provide a name" );
}
// make sure potentially new experiment members are of the right taxon
Taxon groupTaxon = expressionExperimentSet.getTaxon();
Taxon eeTaxon;
for ( BioAssaySet ee : expressionExperimentSet.getExperiments() ) {
eeTaxon = this.getTaxonForSet( ee );
assert eeTaxon != null;
if ( !eeTaxon.equals( groupTaxon ) ) {
throw new IllegalArgumentException(
"Failed to add experiments of wrong taxa (" + ee + ") to eeset. " + "EESet taxon is "
+ groupTaxon + ", experiment was " + eeTaxon );
}
}
if ( StringUtils.isBlank( expressionExperimentSet.getName() ) ) {
throw new IllegalArgumentException( "Attempt to update an ExpressionExperimentSet so it has no name" );
}
this.expressionExperimentSetDao.update( expressionExperimentSet );
}
private String getMasterSetName( Taxon taxon ) {
return "Master set for " + taxon.getCommonName();
}
private Taxon getTaxonForSet( BioAssaySet experiment ) {
Taxon eeTaxon = expressionExperimentService.getTaxon( experiment );
if ( eeTaxon == null ) {
// can happen if the experiment has no samples.
return null;
}
return eeTaxon;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Anatoly F. Bondarenko
*/
/**
* Created on 18.02.2005
*/
package org.apache.harmony.jpda.tests.jdwp.ReferenceType;
import org.apache.harmony.jpda.tests.framework.jdwp.CommandPacket;
import org.apache.harmony.jpda.tests.framework.jdwp.JDWPCommands;
import org.apache.harmony.jpda.tests.framework.jdwp.ReplyPacket;
import org.apache.harmony.jpda.tests.jdwp.share.JDWPSyncTestCase;
import org.apache.harmony.jpda.tests.share.JPDADebuggeeSynchronizer;
/**
* JDWP Unit test for ReferenceType.Methods command.
*/
public class MethodsTest extends JDWPSyncTestCase {
static final int testStatusPassed = 0;
static final int testStatusFailed = -1;
static final String thisCommandName = "ReferenceType.Methods command";
static final String debuggeeSignature = "Lorg/apache/harmony/jpda/tests/jdwp/ReferenceType/MethodsDebuggee;";
protected String getDebuggeeClassName() {
return "org.apache.harmony.jpda.tests.jdwp.ReferenceType.MethodsDebuggee";
}
/**
* This testcase exercises ReferenceType.Methods command.
* <BR>The test starts MethodsDebuggee class, requests referenceTypeId
* for this class by VirtualMachine.ClassesBySignature command, then
* performs ReferenceType.Methods command and checks that returned
* list of methods corresponds to expected list of methods with expected attributes.
*/
public void testMethods001() {
String thisTestName = "testMethods001";
logWriter.println("==> " + thisTestName + " for " + thisCommandName + ": START...");
int testStatus = testStatusPassed;
synchronizer.receiveMessage(JPDADebuggeeSynchronizer.SGNL_READY);
long refTypeID = getClassIDBySignature(debuggeeSignature);
logWriter.println("=> Debuggee class = " + getDebuggeeClassName());
logWriter.println("=> referenceTypeID for Debuggee class = " + refTypeID);
logWriter.println("=> CHECK: send " + thisCommandName + " and check reply...");
CommandPacket methodsCommand = new CommandPacket(
JDWPCommands.ReferenceTypeCommandSet.CommandSetID,
JDWPCommands.ReferenceTypeCommandSet.MethodsCommand);
methodsCommand.setNextValueAsReferenceTypeID(refTypeID);
ReplyPacket methodsReply = debuggeeWrapper.vmMirror.performCommand(methodsCommand);
methodsCommand = null;
checkReplyPacket(methodsReply, thisCommandName);
int returnedMethodsNumber = methodsReply.getNextValueAsInt();
logWriter.println("=> Returned methods number = " + returnedMethodsNumber);
String methodNames[] = {
"staticTestMethod",
"objectTestMethod",
"run",
"main",
"<init>"
};
String methodSignatures[] = {
"(J)I",
"(Ljava/lang/Object;)Ljava/lang/Object;",
"()V",
"([Ljava/lang/String;)V",
"()V"
};
int methodModifiers[] = {
0x8,
0x0,
0x1,
0x9,
0x1
};
boolean methodFound[] = {
false,
false,
false,
false,
false
};
int expectedMetodsNumber = methodNames.length;
int methodSyntheticFlag = 0xf0000000;
String failMessage = "";
logWriter.println("=> CHECK for all expected methods...");
for (int i = 0; i < returnedMethodsNumber; i++) {
long returnedMethodID = methodsReply.getNextValueAsMethodID();
String returnedMethodName = methodsReply.getNextValueAsString();
String returnedMethodSignature = methodsReply.getNextValueAsString();
int returnedMethodModifiers = methodsReply.getNextValueAsInt();
logWriter.println("\n=> Method ID = " + returnedMethodID);
logWriter.println("=> Method name = " + returnedMethodName);
logWriter.println("=> Method signature = " + returnedMethodSignature);
logWriter.println("=> Method modifiers = 0x" + Integer.toHexString(returnedMethodModifiers));
if ( (returnedMethodModifiers & methodSyntheticFlag) == methodSyntheticFlag ) {
continue; // do not check synthetic methods
}
int k = 0;
for (; k < expectedMetodsNumber; k++) {
if ( ! methodNames[k].equals(returnedMethodName)) {
continue;
}
if ( methodFound[k] ) {
logWriter.println("\n## FAILURE: The method is found out repeatedly in the list");
logWriter.println("## Method name = " + returnedMethodName);
testStatus = testStatusFailed;
failMessage = failMessage +
"The method '" + returnedMethodName +
"' is found repeatedly in the list;\n";
break;
}
methodFound[k] = true;
if ( ! methodSignatures[k].equals(returnedMethodSignature) ) {
logWriter.println("\n## FAILURE: Unexpected method signature is returned:");
logWriter.println("## Method name = " + returnedMethodName);
logWriter.println("## Expected signature = " + methodSignatures[k]);
logWriter.println("## Returned signature = " + returnedMethodSignature);
testStatus = testStatusFailed;
failMessage = failMessage +
"Unexpected signature is returned for method: " +
returnedMethodName +
", Expected: " + methodSignatures[k] +
", Returned: " + returnedMethodSignature + ";\n";
}
if ( methodModifiers[k] != returnedMethodModifiers ) {
logWriter.println("\n## FAILURE: Unexpected method modifiers are returned:");
logWriter.println("## Method name = " + returnedMethodName);
logWriter.println
("## Expected modifiers = 0x" + Integer.toHexString(methodModifiers[k]));
logWriter.println
("## Returned modifiers = 0x" + Integer.toHexString(returnedMethodModifiers));
testStatus = testStatusFailed;
failMessage = failMessage +
"Unexpected modifiers are returned for method: " +
returnedMethodName +
", Expected: 0x" + Integer.toHexString(methodModifiers[k]) +
", Returned: 0x" + Integer.toHexString(returnedMethodModifiers) + ";\n";
}
break;
}
if ( k == expectedMetodsNumber ) {
// returned method is not found out in the list of expected methods
logWriter.println("\n## FAILURE: It is found out unexpected returned method:");
logWriter.println("## Method name = " + returnedMethodName);
logWriter.println("## Method signature = " + returnedMethodSignature);
logWriter.println
("## Method modifiers = 0x" + Integer.toHexString(returnedMethodModifiers));
testStatus = testStatusFailed;
failMessage = failMessage +
"Unexpected returned method is found:" +
", name = " + returnedMethodName +
", signature = " + returnedMethodSignature +
", modifiers = 0x" + Integer.toHexString(returnedMethodModifiers) + ";\n";
}
}
for (int k=0; k < expectedMetodsNumber; k++) {
if ( ! methodFound[k] ) {
logWriter.println
("\n## FAILURE: Expected method is NOT found out in the list of retuned methods:");
logWriter.println("## Method name = " + methodNames[k]);
testStatus = testStatusFailed;
failMessage = failMessage +
"Expected method is NOT found in the list of retuned methods:" +
" name = " + methodNames[k];
}
}
if ( testStatus == testStatusPassed ) {
logWriter.println
("=> CHECK PASSED: All expected methods are found out and have expected attributes");
}
synchronizer.sendMessage(JPDADebuggeeSynchronizer.SGNL_CONTINUE);
logWriter.println("==> " + thisTestName + " for " + thisCommandName + ": FINISH");
if (testStatus == testStatusFailed) {
fail(failMessage);
}
assertAllDataRead(methodsReply);
}
public static void main(String[] args) {
junit.textui.TestRunner.run(MethodsTest.class);
}
}
| |
package org.ema.activities.student;
import android.app.Activity;
import android.app.FragmentManager;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.media.MediaPlayer;
import android.os.Handler;
import android.os.Vibrator;
import android.speech.tts.TextToSpeech;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Menu;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ProgressBar;
import android.widget.TextView;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.ema.R;
import org.ema.dialogs.EndGameDialog;
import org.ema.dialogs.FinishGameDialog;
import org.ema.dialogs.NoWordsRegisteredDialog;
import org.ema.entities.AppConfiguration;
import org.ema.entities.Challenge;
import org.ema.entities.PlayStats;
import org.ema.entities.Student;
import org.ema.services.Speaker;
import io.realm.Realm;
public class StudentGameActivity extends Activity implements View.OnClickListener, View.OnTouchListener {
private static final String TAG = "StudentGameActivity";
private Vibrator vibrator;
private final int CHECK_CODE = 0x1;
private String word = "";
private List<Challenge> wordList;
private Random generator = new Random();
private String answerString = "";
private EditText answerText;
private LinearLayout scrambledLayout;
private MediaPlayer correctSound = null;
private MediaPlayer wrongSound = null;
private Button nextButton;
private ImageButton clearButton;
private Student student;
private int counter = 0;
private static int points = 0;
private PlayStats activityStats;
private boolean help;
private Realm realm;
private Speaker speaker;
private static double progress = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
realm = Realm.getDefaultInstance();
super.onCreate(savedInstanceState);
//start speaker
speaker = new Speaker(getApplicationContext());
setContentView(R.layout.activity_student_game);
vibrator = (Vibrator) getSystemService(VIBRATOR_SERVICE);
Intent intent = getIntent();
String studentId = intent.getStringExtra("studentId");
student = realm.where(Student.class).equalTo("id", studentId).findFirst();
Log.i(TAG,"Student "+ student.getNickname()+", ID = "+studentId );
Log.i(TAG,"With "+student.getChallenges().size()+" challenges.");
nextButton = (Button) findViewById(R.id.nextButton);
nextButton.setOnClickListener(this);
clearButton = (ImageButton) findViewById(R.id.backspaceBtn);
clearButton.setOnClickListener(this);
answerText = (EditText) findViewById(R.id.awnser);
AppConfiguration config = realm.where(AppConfiguration.class).findFirst();
if(config.getSound()){
correctSound = MediaPlayer.create(getApplicationContext(), R.raw.correct);
wrongSound = MediaPlayer.create(getApplicationContext(), R.raw.wrongsound);
}
//set space button
Button spaceButton = (Button)findViewById(R.id.spaceBtn);
spaceButton.setOnTouchListener(this);
spaceButton.setOnClickListener(this);
//---
this.help = config.getShowWord();
this.wordList = student.getChallenges();
if(wordList.size() > 0){
startWord();
TextView progressText = (TextView) findViewById(R.id.progressValue);
if(progressText != null){
progressText.setText((int)(progress)+"/"+wordList.size());
}
Log.d(TAG, "creating activity stats...");
activityStats = new PlayStats();
activityStats.setUserId(student.getId());
activityStats.setStart(new Date());
activityStats.setTotalPoints(0);
Log.d(TAG, "created.");
}else{
//alert informing that the student does not have any words registered for him.
NoWordsRegisteredDialog wordsRegisteredDialog = new NoWordsRegisteredDialog();
FragmentManager fm = getFragmentManager();
wordsRegisteredDialog.show(fm,"no words");
}
}
@Override
protected void onDestroy(){
super.onDestroy();
if(speaker != null) {
speaker.destroy();
}
realm.close();
}
@Override
protected void onPause(){
super.onPause();
if(speaker != null){
speaker.destroy();
}
}
/**
* Method to setup the screen to type the word
*/
public void startWord(){
clearButton.setEnabled(Boolean.FALSE);
Challenge challenge = getNewWord();
if(challenge == null){
//End of the game.
realm = Realm.getDefaultInstance();
EndGameDialog endGameDialog = new EndGameDialog();
FragmentManager fm = getFragmentManager();
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(Realm realm) {
activityStats.setEnd(new Date());
realm.insertOrUpdate(activityStats);
}
});
endGameDialog.show(fm,"end");
}else {
word = challenge.getText();
if(help){
TextView helpText = (TextView) findViewById(R.id.tip);
helpText.setText(word.toUpperCase());
}else{
TextView helpText = (TextView) findViewById(R.id.tip);
helpText.setText("");
}
String scrambledWord = scramble(word);
scrambledLayout = (LinearLayout) findViewById(R.id.keyboardLayout);
setImageView(challenge.getImage(), word, challenge.getImageRotation());
Set<Character> letters = new HashSet<>();
for (int i = 0; i < scrambledWord.length(); i++) {
letters.add(scrambledWord.charAt(i));
}
if(letters.size() <= 9){
LinearLayout row = new LinearLayout(this);
row.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
row.setOrientation(LinearLayout.HORIZONTAL);
Iterator<Character> characters = letters.iterator();
while (characters.hasNext()) {
Character c = characters.next();
String l = Character.toString(c).toUpperCase();
if (!l.trim().isEmpty()) {
Button button = new Button(this);
button.setId(c);
button.setLayoutParams(new LinearLayout.LayoutParams(getButtonWidthBasedOnScreenDensity(), ViewGroup.LayoutParams.WRAP_CONTENT));
button.setText(Character.toString(c).toUpperCase());
button.setTextSize(12);
button.setOnTouchListener(this);
button.setOnClickListener(this);
row.addView(button);
}
}
scrambledLayout.addView(row);
}else{
LinearLayout row = new LinearLayout(this);
row.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
row.setOrientation(LinearLayout.HORIZONTAL);
Iterator<Character> characters = letters.iterator();
int counter = 0;
while (characters.hasNext()) {
Character c = characters.next();
counter++;
String l = Character.toString(c).toUpperCase();
if (!l.trim().isEmpty()) {
Button button = new Button(this);
button.setId(c);
button.setLayoutParams(new LinearLayout.LayoutParams(getButtonWidthBasedOnScreenDensity(), ViewGroup.LayoutParams.WRAP_CONTENT));
button.setText(Character.toString(c).toUpperCase());
button.setTextSize(12);
button.setOnTouchListener(this);
button.setOnClickListener(this);
row.addView(button);
}
if(counter % 9 == 0){
scrambledLayout.addView(row);
row = new LinearLayout(this);
row.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
row.setOrientation(LinearLayout.HORIZONTAL);
}
}
scrambledLayout.addView(row); // last line
}
}
}
/**
* Method called to speak the current word of the activity.
* @param view
*/
public void speak(View view){
speaker.speak(word);
}
/**
* Method to process the actions of the buttons.
* @param v
*/
public void onClick(View v){
if(clearButton.getId() == v.getId()){ //delete letter
if(answerText.getText().toString().length() == 0){
clearButton.setEnabled(Boolean.FALSE);
}else {
answerText.setText(answerText.getText().toString().substring(0, answerText.getText().toString().length() - 1));
answerString = answerString.substring(0, answerString.length() - 1);
}
}else{
TextView clicked = (TextView) findViewById(v.getId());
if(nextButton.getId() == v.getId()) { // next word
clearButton.setEnabled(Boolean.FALSE);
scrambledLayout.removeAllViews();
answerText.setText("");
answerString = "";
answerText.setTextColor(Color.rgb(0, 0, 0));
ProgressBar progressBar = (ProgressBar) findViewById(R.id.progressBar);
progressBar.setProgress(getActivityProgress(1));
TextView progressText = (TextView) findViewById(R.id.progressValue);
progressText.setText((int)(progress)+"/"+wordList.size());
startWord();
}
else {
clearButton.setEnabled(Boolean.TRUE);
if(clicked.getText().toString().trim().isEmpty()){
answerText.setText(answerText.getText().toString() + " ");
answerString += " ";
}else{
answerText.setText(answerText.getText().toString() + clicked.getText());
answerString += clicked.getText();
}
try {
if(answerString.length() == word.length()){
if(answerString.equalsIgnoreCase(word)){
clearButton.setEnabled(Boolean.FALSE);
answerText.setTextColor(Color.rgb(33, 196, 18));
if(correctSound != null){
correctSound.start();
}
activityStats.addPoint(1);
ProgressBar progressBar = (ProgressBar) findViewById(R.id.progressBar);
progressBar.setProgress(getActivityProgress(1));
TextView progressText = (TextView) findViewById(R.id.progressValue);
progressText.setText((int)(progress)+"/"+wordList.size());
points++;
new Handler().postDelayed(new Runnable()
{
@Override
public void run()
{
scrambledLayout.removeAllViews();
answerText.setText("");
answerString = "";
answerText.setTextColor(Color.rgb(0, 0, 0));
startWord();
}
}, 1200);
}
else {
clearButton.setEnabled(Boolean.FALSE);
answerText.setTextColor(Color.rgb(255, 0, 0));
if(wrongSound != null){
wrongSound.start();
}
vibrator.vibrate(1500);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
answerString = "";
answerText.setText("");
answerText.setTextColor(Color.rgb(0, 0, 0));
//TODO add
}
}, 1200);
}
}
} catch (Exception e){
e.printStackTrace();
}
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_scrabble, menu);
return true;
}
public Challenge getNewWord(){
if(wordList.size() > counter){
//int randomWord = generator.nextInt(wordList.size());
Challenge temp = wordList.get(counter);
counter++;
return temp;
}
else{
return null;
}
}
/**
* Scramble the String
*/
public String scramble(String wordToScramble){
String scrambled = "";
int randomNumber;
boolean letter[] = new boolean[wordToScramble.length()];
if(letter.length == 1){
return wordToScramble;
}
do {
randomNumber = generator.nextInt(wordToScramble.length());
if(letter[randomNumber] == false){
scrambled += wordToScramble.charAt(randomNumber);
letter[randomNumber] = true;
}
} while(scrambled.length() < wordToScramble.length());
if(scrambled.equals(wordToScramble))
scramble(word);
return scrambled;
}
@Override
public boolean onTouch(View v, MotionEvent motion) {
TextView touched = (TextView) findViewById(v.getId());
if(motion.getAction() == MotionEvent.ACTION_DOWN){
touched.setTextColor(Color.rgb(0, 189, 252));
}
else if(motion.getAction() == MotionEvent.ACTION_UP){
touched.setTextColor(Color.rgb(0, 0, 0));
}
return false;
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == CHECK_CODE){
if(resultCode == TextToSpeech.Engine.CHECK_VOICE_DATA_PASS){
if(speaker == null){
speaker = new Speaker(this);
}else{
speaker.destroy();
speaker = new Speaker(this);
}
}else {
Intent install = new Intent();
install.setAction(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA);
startActivity(install);
}
}
}
/**
* Sets the image path and the content description
* @param contentDescription
*/
private void setImageView(byte[] image, String contentDescription, int rotation) {
if (image != null) {
Bitmap bitmap = BitmapFactory.decodeByteArray(image, 0, image.length);
ImageView imageView = (ImageView) findViewById(R.id.scrambbleImage);
String prefix = getString(R.string.imageOf);
imageView.setContentDescription(prefix+contentDescription);
imageView.setImageBitmap(bitmap);
imageView.setRotation(rotation);
}
}
public void onFinish(View view){
FinishGameDialog dialog = new FinishGameDialog();
FragmentManager fm = getFragmentManager();
realm.executeTransaction(new Realm.Transaction() {
@Override
public void execute(Realm realm) {
activityStats.setEnd(new Date());
realm.insertOrUpdate(activityStats);
}
});
progress = 0;
dialog.show(fm, "finish the game");
}
public static int getPoints() {
return points;
}
public static void setPoints(int points) {
StudentGameActivity.points = points;
}
public int getActivityProgress(int add){
progress = progress + add;
double result = progress / wordList.size();
return (int) (result * 100);
}
public int getDpValue(int size){
float scale = getResources().getDisplayMetrics().density;
int dpAsPixels = (int) (size*scale + 0.5f);
return dpAsPixels;
}
public int getButtonWidthBasedOnScreenDensity(){
DisplayMetrics metrics = getResources().getDisplayMetrics();
int densityDpi = metrics.densityDpi;
return (int) (( densityDpi / 8 ) * 1.7) ;
}
}
| |
package com.vmware.vim25;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for OvfParseDescriptorResult complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="OvfParseDescriptorResult">
* <complexContent>
* <extension base="{urn:vim25}DynamicData">
* <sequence>
* <element name="eula" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="network" type="{urn:vim25}OvfNetworkInfo" maxOccurs="unbounded" minOccurs="0"/>
* <element name="ipAllocationScheme" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="ipProtocols" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/>
* <element name="property" type="{urn:vim25}VAppPropertyInfo" maxOccurs="unbounded" minOccurs="0"/>
* <element name="productInfo" type="{urn:vim25}VAppProductInfo" minOccurs="0"/>
* <element name="annotation" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="approximateDownloadSize" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="approximateFlatDeploymentSize" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="approximateSparseDeploymentSize" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="defaultEntityName" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="virtualApp" type="{http://www.w3.org/2001/XMLSchema}boolean"/>
* <element name="deploymentOption" type="{urn:vim25}OvfDeploymentOption" maxOccurs="unbounded" minOccurs="0"/>
* <element name="defaultDeploymentOption" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="entityName" type="{urn:vim25}KeyValue" maxOccurs="unbounded" minOccurs="0"/>
* <element name="annotatedOst" type="{urn:vim25}OvfConsumerOstNode" minOccurs="0"/>
* <element name="error" type="{urn:vim25}LocalizedMethodFault" maxOccurs="unbounded" minOccurs="0"/>
* <element name="warning" type="{urn:vim25}LocalizedMethodFault" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "OvfParseDescriptorResult", propOrder = {
"eula",
"network",
"ipAllocationScheme",
"ipProtocols",
"property",
"productInfo",
"annotation",
"approximateDownloadSize",
"approximateFlatDeploymentSize",
"approximateSparseDeploymentSize",
"defaultEntityName",
"virtualApp",
"deploymentOption",
"defaultDeploymentOption",
"entityName",
"annotatedOst",
"error",
"warning"
})
public class OvfParseDescriptorResult
extends DynamicData
{
protected List<String> eula;
protected List<OvfNetworkInfo> network;
protected List<String> ipAllocationScheme;
protected List<String> ipProtocols;
protected List<VAppPropertyInfo> property;
protected VAppProductInfo productInfo;
@XmlElement(required = true)
protected String annotation;
protected Long approximateDownloadSize;
protected Long approximateFlatDeploymentSize;
protected Long approximateSparseDeploymentSize;
@XmlElement(required = true)
protected String defaultEntityName;
protected boolean virtualApp;
protected List<OvfDeploymentOption> deploymentOption;
@XmlElement(required = true)
protected String defaultDeploymentOption;
protected List<KeyValue> entityName;
protected OvfConsumerOstNode annotatedOst;
protected List<LocalizedMethodFault> error;
protected List<LocalizedMethodFault> warning;
/**
* Gets the value of the eula property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the eula property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getEula().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getEula() {
if (eula == null) {
eula = new ArrayList<String>();
}
return this.eula;
}
/**
* Gets the value of the network property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the network property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getNetwork().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link OvfNetworkInfo }
*
*
*/
public List<OvfNetworkInfo> getNetwork() {
if (network == null) {
network = new ArrayList<OvfNetworkInfo>();
}
return this.network;
}
/**
* Gets the value of the ipAllocationScheme property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the ipAllocationScheme property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getIpAllocationScheme().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getIpAllocationScheme() {
if (ipAllocationScheme == null) {
ipAllocationScheme = new ArrayList<String>();
}
return this.ipAllocationScheme;
}
/**
* Gets the value of the ipProtocols property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the ipProtocols property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getIpProtocols().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getIpProtocols() {
if (ipProtocols == null) {
ipProtocols = new ArrayList<String>();
}
return this.ipProtocols;
}
/**
* Gets the value of the property property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the property property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getProperty().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link VAppPropertyInfo }
*
*
*/
public List<VAppPropertyInfo> getProperty() {
if (property == null) {
property = new ArrayList<VAppPropertyInfo>();
}
return this.property;
}
/**
* Gets the value of the productInfo property.
*
* @return
* possible object is
* {@link VAppProductInfo }
*
*/
public VAppProductInfo getProductInfo() {
return productInfo;
}
/**
* Sets the value of the productInfo property.
*
* @param value
* allowed object is
* {@link VAppProductInfo }
*
*/
public void setProductInfo(VAppProductInfo value) {
this.productInfo = value;
}
/**
* Gets the value of the annotation property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAnnotation() {
return annotation;
}
/**
* Sets the value of the annotation property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAnnotation(String value) {
this.annotation = value;
}
/**
* Gets the value of the approximateDownloadSize property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getApproximateDownloadSize() {
return approximateDownloadSize;
}
/**
* Sets the value of the approximateDownloadSize property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setApproximateDownloadSize(Long value) {
this.approximateDownloadSize = value;
}
/**
* Gets the value of the approximateFlatDeploymentSize property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getApproximateFlatDeploymentSize() {
return approximateFlatDeploymentSize;
}
/**
* Sets the value of the approximateFlatDeploymentSize property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setApproximateFlatDeploymentSize(Long value) {
this.approximateFlatDeploymentSize = value;
}
/**
* Gets the value of the approximateSparseDeploymentSize property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getApproximateSparseDeploymentSize() {
return approximateSparseDeploymentSize;
}
/**
* Sets the value of the approximateSparseDeploymentSize property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setApproximateSparseDeploymentSize(Long value) {
this.approximateSparseDeploymentSize = value;
}
/**
* Gets the value of the defaultEntityName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefaultEntityName() {
return defaultEntityName;
}
/**
* Sets the value of the defaultEntityName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefaultEntityName(String value) {
this.defaultEntityName = value;
}
/**
* Gets the value of the virtualApp property.
*
*/
public boolean isVirtualApp() {
return virtualApp;
}
/**
* Sets the value of the virtualApp property.
*
*/
public void setVirtualApp(boolean value) {
this.virtualApp = value;
}
/**
* Gets the value of the deploymentOption property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the deploymentOption property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDeploymentOption().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link OvfDeploymentOption }
*
*
*/
public List<OvfDeploymentOption> getDeploymentOption() {
if (deploymentOption == null) {
deploymentOption = new ArrayList<OvfDeploymentOption>();
}
return this.deploymentOption;
}
/**
* Gets the value of the defaultDeploymentOption property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefaultDeploymentOption() {
return defaultDeploymentOption;
}
/**
* Sets the value of the defaultDeploymentOption property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefaultDeploymentOption(String value) {
this.defaultDeploymentOption = value;
}
/**
* Gets the value of the entityName property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the entityName property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getEntityName().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link KeyValue }
*
*
*/
public List<KeyValue> getEntityName() {
if (entityName == null) {
entityName = new ArrayList<KeyValue>();
}
return this.entityName;
}
/**
* Gets the value of the annotatedOst property.
*
* @return
* possible object is
* {@link OvfConsumerOstNode }
*
*/
public OvfConsumerOstNode getAnnotatedOst() {
return annotatedOst;
}
/**
* Sets the value of the annotatedOst property.
*
* @param value
* allowed object is
* {@link OvfConsumerOstNode }
*
*/
public void setAnnotatedOst(OvfConsumerOstNode value) {
this.annotatedOst = value;
}
/**
* Gets the value of the error property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the error property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getError().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link LocalizedMethodFault }
*
*
*/
public List<LocalizedMethodFault> getError() {
if (error == null) {
error = new ArrayList<LocalizedMethodFault>();
}
return this.error;
}
/**
* Gets the value of the warning property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the warning property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getWarning().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link LocalizedMethodFault }
*
*
*/
public List<LocalizedMethodFault> getWarning() {
if (warning == null) {
warning = new ArrayList<LocalizedMethodFault>();
}
return this.warning;
}
}
| |
package org.drip.analytics.holset;
/*
* -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*/
/*
* GENERATED on Fri Jan 11 19:54:06 EST 2013 ---- DO NOT DELETE
*/
/*!
* Copyright (C) 2013 Lakshmi Krishnamurthy
* Copyright (C) 2012 Lakshmi Krishnamurthy
* Copyright (C) 2011 Lakshmi Krishnamurthy
*
* This file is part of CreditAnalytics, a free-software/open-source library for
* fixed income analysts and developers - http://www.credit-trader.org
*
* CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus
* towards the needs of the bonds and credit products community.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class CLFHoliday implements org.drip.analytics.holset.LocationHoliday {
public CLFHoliday()
{
}
public java.lang.String getHolidayLoc()
{
return "CLF";
}
public org.drip.analytics.holiday.Locale getHolidaySet()
{
org.drip.analytics.holiday.Locale lh = new
org.drip.analytics.holiday.Locale();
lh.addStaticHoliday ("01-JAN-1998", "New Years Day");
lh.addStaticHoliday ("10-APR-1998", "Good Friday");
lh.addStaticHoliday ("01-MAY-1998", "Labour Day");
lh.addStaticHoliday ("21-MAY-1998", "Battle of Iquique Day");
lh.addStaticHoliday ("11-JUN-1998", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-1998", "Saints Peter and Paul Day");
lh.addStaticHoliday ("11-SEP-1998", "Liberation Day");
lh.addStaticHoliday ("18-SEP-1998", "Independence Day");
lh.addStaticHoliday ("12-OCT-1998", "Day of the Race");
lh.addStaticHoliday ("08-DEC-1998", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-1998", "Christmas Day");
lh.addStaticHoliday ("01-JAN-1999", "New Years Day");
lh.addStaticHoliday ("02-APR-1999", "Good Friday");
lh.addStaticHoliday ("21-MAY-1999", "Battle of Iquique Day");
lh.addStaticHoliday ("03-JUN-1999", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-1999", "Saints Peter and Paul Day");
lh.addStaticHoliday ("12-OCT-1999", "Day of the Race");
lh.addStaticHoliday ("01-NOV-1999", "All Saints Day");
lh.addStaticHoliday ("08-DEC-1999", "Immaculate Conception");
lh.addStaticHoliday ("21-APR-2000", "Good Friday");
lh.addStaticHoliday ("01-MAY-2000", "Labour Day");
lh.addStaticHoliday ("22-JUN-2000", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-2000", "Saints Peter and Paul Day");
lh.addStaticHoliday ("15-AUG-2000", "Assumption Day");
lh.addStaticHoliday ("11-SEP-2000", "Liberation Day");
lh.addStaticHoliday ("18-SEP-2000", "Independence Day");
lh.addStaticHoliday ("19-SEP-2000", "Armed Forces Day");
lh.addStaticHoliday ("12-OCT-2000", "Day of the Race");
lh.addStaticHoliday ("01-NOV-2000", "All Saints Day");
lh.addStaticHoliday ("08-DEC-2000", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2000", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2001", "New Years Day");
lh.addStaticHoliday ("13-APR-2001", "Good Friday");
lh.addStaticHoliday ("01-MAY-2001", "Labour Day");
lh.addStaticHoliday ("21-MAY-2001", "Battle of Iquique Day");
lh.addStaticHoliday ("14-JUN-2001", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-2001", "Saints Peter and Paul Day");
lh.addStaticHoliday ("15-AUG-2001", "Assumption Day");
lh.addStaticHoliday ("11-SEP-2001", "Liberation Day");
lh.addStaticHoliday ("18-SEP-2001", "Independence Day");
lh.addStaticHoliday ("19-SEP-2001", "Armed Forces Day");
lh.addStaticHoliday ("12-OCT-2001", "Day of the Race");
lh.addStaticHoliday ("01-NOV-2001", "All Saints Day");
lh.addStaticHoliday ("25-DEC-2001", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2002", "New Years Day");
lh.addStaticHoliday ("29-MAR-2002", "Good Friday");
lh.addStaticHoliday ("01-MAY-2002", "Labour Day");
lh.addStaticHoliday ("21-MAY-2002", "Battle of Iquique Day");
lh.addStaticHoliday ("30-MAY-2002", "Corpus Christi");
lh.addStaticHoliday ("15-AUG-2002", "Assumption Day");
lh.addStaticHoliday ("11-SEP-2002", "Liberation Day");
lh.addStaticHoliday ("18-SEP-2002", "Independence Day");
lh.addStaticHoliday ("19-SEP-2002", "Armed Forces Day");
lh.addStaticHoliday ("01-NOV-2002", "All Saints Day");
lh.addStaticHoliday ("25-DEC-2002", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2003", "New Years Day");
lh.addStaticHoliday ("18-APR-2003", "Good Friday");
lh.addStaticHoliday ("01-MAY-2003", "Labour Day");
lh.addStaticHoliday ("21-MAY-2003", "Battle of Iquique Day");
lh.addStaticHoliday ("19-JUN-2003", "Corpus Christi");
lh.addStaticHoliday ("15-AUG-2003", "Assumption Day");
lh.addStaticHoliday ("11-SEP-2003", "Liberation Day");
lh.addStaticHoliday ("18-SEP-2003", "Independence Day");
lh.addStaticHoliday ("19-SEP-2003", "Armed Forces Day");
lh.addStaticHoliday ("08-DEC-2003", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2003", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2004", "New Years Day");
lh.addStaticHoliday ("09-APR-2004", "Good Friday");
lh.addStaticHoliday ("21-MAY-2004", "Battle of Iquique Day");
lh.addStaticHoliday ("10-JUN-2004", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-2004", "Saints Peter and Paul Day");
lh.addStaticHoliday ("12-OCT-2004", "Day of the Race");
lh.addStaticHoliday ("01-NOV-2004", "All Saints Day");
lh.addStaticHoliday ("08-DEC-2004", "Immaculate Conception");
lh.addStaticHoliday ("25-MAR-2005", "Good Friday");
lh.addStaticHoliday ("26-MAY-2005", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-2005", "Saints Peter and Paul Day");
lh.addStaticHoliday ("15-AUG-2005", "Assumption Day");
lh.addStaticHoliday ("19-SEP-2005", "Armed Forces Day");
lh.addStaticHoliday ("12-OCT-2005", "Day of the Race");
lh.addStaticHoliday ("01-NOV-2005", "All Saints Day");
lh.addStaticHoliday ("08-DEC-2005", "Immaculate Conception");
lh.addStaticHoliday ("14-APR-2006", "Good Friday");
lh.addStaticHoliday ("01-MAY-2006", "Labour Day");
lh.addStaticHoliday ("15-JUN-2006", "Corpus Christi");
lh.addStaticHoliday ("29-JUN-2006", "Saints Peter and Paul Day");
lh.addStaticHoliday ("15-AUG-2006", "Assumption Day");
lh.addStaticHoliday ("11-SEP-2006", "Liberation Day");
lh.addStaticHoliday ("18-SEP-2006", "Independence Day");
lh.addStaticHoliday ("19-SEP-2006", "Armed Forces Day");
lh.addStaticHoliday ("12-OCT-2006", "Day of the Race");
lh.addStaticHoliday ("01-NOV-2006", "All Saints Day");
lh.addStaticHoliday ("08-DEC-2006", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2006", "Christmas Day");
lh.addStaticHoliday ("01-JAN-2007", "New Years Day");
lh.addStaticHoliday ("06-APR-2007", "Good Friday");
lh.addStaticHoliday ("01-MAY-2007", "Labour Day");
lh.addStaticHoliday ("21-MAY-2007", "Battle of Iquique Day");
lh.addStaticHoliday ("02-JUL-2007", "Saints Peter and Paul Day");
lh.addStaticHoliday ("16-JUL-2007", "Virgen Del Carmen");
lh.addStaticHoliday ("15-AUG-2007", "Assumption Day");
lh.addStaticHoliday ("17-SEP-2007", "One off holiday");
lh.addStaticHoliday ("18-SEP-2007", "Independence Day");
lh.addStaticHoliday ("19-SEP-2007", "Armed Forces Day");
lh.addStaticHoliday ("15-OCT-2007", "Day of the Race");
lh.addStaticHoliday ("01-NOV-2007", "All Saints Day");
lh.addStaticHoliday ("25-DEC-2007", "Christmas Day");
lh.addStaticHoliday ("31-DEC-2007", "Bank Holiday");
lh.addStaticHoliday ("01-JAN-2008", "New Years Day");
lh.addStaticHoliday ("21-MAR-2008", "Good Friday");
lh.addStaticHoliday ("01-MAY-2008", "Labour Day");
lh.addStaticHoliday ("21-MAY-2008", "Battle of Iquique Day");
lh.addStaticHoliday ("15-AUG-2008", "Assumption Day");
lh.addStaticHoliday ("18-SEP-2008", "Independence Day");
lh.addStaticHoliday ("19-SEP-2008", "Armed Forces Day");
lh.addStaticHoliday ("31-OCT-2008", "National Evangelical Date");
lh.addStaticHoliday ("08-DEC-2008", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2008", "Christmas Day");
lh.addStaticHoliday ("31-DEC-2008", "Day before New Year");
lh.addStaticHoliday ("01-JAN-2009", "New Years Day");
lh.addStaticHoliday ("10-APR-2009", "Good Friday");
lh.addStaticHoliday ("01-MAY-2009", "Labour Day");
lh.addStaticHoliday ("21-MAY-2009", "Battle of Iquique Day");
lh.addStaticHoliday ("29-JUN-2009", "Saints Peter and Paul Day");
lh.addStaticHoliday ("16-JUL-2009", "Birgen Del Carmen Day");
lh.addStaticHoliday ("18-SEP-2009", "Independence Day");
lh.addStaticHoliday ("12-OCT-2009", "Day of the Race");
lh.addStaticHoliday ("31-OCT-2009", "National Evangelical Date");
lh.addStaticHoliday ("08-DEC-2009", "Immaculate Conception");
lh.addStaticHoliday ("25-DEC-2009", "Christmas Day");
lh.addStaticHoliday ("31-DEC-2009", "New Years Eve Bank Holiday");
lh.addStaticHoliday ("01-JAN-2010", "CLP");
lh.addStaticHoliday ("02-APR-2010", "CLP");
lh.addStaticHoliday ("21-MAY-2010", "CLP");
lh.addStaticHoliday ("28-JUN-2010", "CLP");
lh.addStaticHoliday ("16-JUL-2010", "CLP");
lh.addStaticHoliday ("17-SEP-2010", "CLP");
lh.addStaticHoliday ("20-SEP-2010", "CLP");
lh.addStaticHoliday ("11-OCT-2010", "CLP");
lh.addStaticHoliday ("01-NOV-2010", "CLP");
lh.addStaticHoliday ("08-DEC-2010", "CLP");
lh.addStaticHoliday ("31-DEC-2010", "CLP");
lh.addStaticHoliday ("22-APR-2011", "CLP");
lh.addStaticHoliday ("27-JUN-2011", "CLP");
lh.addStaticHoliday ("15-AUG-2011", "CLP");
lh.addStaticHoliday ("19-SEP-2011", "CLP");
lh.addStaticHoliday ("10-OCT-2011", "CLP");
lh.addStaticHoliday ("31-OCT-2011", "CLP");
lh.addStaticHoliday ("01-NOV-2011", "CLP");
lh.addStaticHoliday ("08-DEC-2011", "CLP");
lh.addStaticHoliday ("06-APR-2012", "CLP");
lh.addStaticHoliday ("01-MAY-2012", "CLP");
lh.addStaticHoliday ("21-MAY-2012", "CLP");
lh.addStaticHoliday ("02-JUL-2012", "CLP");
lh.addStaticHoliday ("16-JUL-2012", "CLP");
lh.addStaticHoliday ("15-AUG-2012", "CLP");
lh.addStaticHoliday ("17-SEP-2012", "Independence Day");
lh.addStaticHoliday ("18-SEP-2012", "CLP");
lh.addStaticHoliday ("19-SEP-2012", "CLP");
lh.addStaticHoliday ("15-OCT-2012", "CLP");
lh.addStaticHoliday ("01-NOV-2012", "CLP");
lh.addStaticHoliday ("02-NOV-2012", "CLP");
lh.addStaticHoliday ("25-DEC-2012", "CLP");
lh.addStaticHoliday ("31-DEC-2012", "CLP");
lh.addStaticHoliday ("01-JAN-2013", "CLP");
lh.addStaticHoliday ("29-MAR-2013", "CLP");
lh.addStaticHoliday ("01-MAY-2013", "CLP");
lh.addStaticHoliday ("21-MAY-2013", "CLP");
lh.addStaticHoliday ("16-JUL-2013", "CLP");
lh.addStaticHoliday ("15-AUG-2013", "CLP");
lh.addStaticHoliday ("18-SEP-2013", "CLP");
lh.addStaticHoliday ("19-SEP-2013", "CLP");
lh.addStaticHoliday ("31-OCT-2013", "CLP");
lh.addStaticHoliday ("01-NOV-2013", "CLP");
lh.addStaticHoliday ("25-DEC-2013", "CLP");
lh.addStaticHoliday ("31-DEC-2013", "CLP");
lh.addStaticHoliday ("01-JAN-2014", "CLP");
lh.addStaticHoliday ("18-APR-2014", "CLP");
lh.addStaticHoliday ("01-MAY-2014", "CLP");
lh.addStaticHoliday ("21-MAY-2014", "CLP");
lh.addStaticHoliday ("16-JUL-2014", "CLP");
lh.addStaticHoliday ("15-AUG-2014", "CLP");
lh.addStaticHoliday ("18-SEP-2014", "CLP");
lh.addStaticHoliday ("19-SEP-2014", "CLP");
lh.addStaticHoliday ("31-OCT-2014", "CLP");
lh.addStaticHoliday ("08-DEC-2014", "CLP");
lh.addStaticHoliday ("25-DEC-2014", "CLP");
lh.addStaticHoliday ("31-DEC-2014", "CLP");
lh.addStaticHoliday ("01-JAN-2015", "CLP");
lh.addStaticHoliday ("03-APR-2015", "CLP");
lh.addStaticHoliday ("01-MAY-2015", "CLP");
lh.addStaticHoliday ("21-MAY-2015", "CLP");
lh.addStaticHoliday ("29-JUN-2015", "CLP");
lh.addStaticHoliday ("16-JUL-2015", "CLP");
lh.addStaticHoliday ("18-SEP-2015", "CLP");
lh.addStaticHoliday ("12-OCT-2015", "CLP");
lh.addStaticHoliday ("08-DEC-2015", "CLP");
lh.addStaticHoliday ("25-DEC-2015", "CLP");
lh.addStaticHoliday ("31-DEC-2015", "CLP");
lh.addStaticHoliday ("01-JAN-2016", "CLP");
lh.addStaticHoliday ("25-MAR-2016", "CLP");
lh.addStaticHoliday ("27-JUN-2016", "CLP");
lh.addStaticHoliday ("15-AUG-2016", "CLP");
lh.addStaticHoliday ("19-SEP-2016", "CLP");
lh.addStaticHoliday ("10-OCT-2016", "CLP");
lh.addStaticHoliday ("31-OCT-2016", "CLP");
lh.addStaticHoliday ("01-NOV-2016", "CLP");
lh.addStaticHoliday ("08-DEC-2016", "CLP");
lh.addStaticHoliday ("14-APR-2017", "CLP");
lh.addStaticHoliday ("01-MAY-2017", "CLP");
lh.addStaticHoliday ("26-JUN-2017", "CLP");
lh.addStaticHoliday ("15-AUG-2017", "CLP");
lh.addStaticHoliday ("18-SEP-2017", "CLP");
lh.addStaticHoliday ("19-SEP-2017", "CLP");
lh.addStaticHoliday ("09-OCT-2017", "CLP");
lh.addStaticHoliday ("27-OCT-2017", "CLP");
lh.addStaticHoliday ("01-NOV-2017", "CLP");
lh.addStaticHoliday ("08-DEC-2017", "CLP");
lh.addStaticHoliday ("25-DEC-2017", "CLP");
lh.addStaticHoliday ("01-JAN-2018", "CLP");
lh.addStaticHoliday ("30-MAR-2018", "CLP");
lh.addStaticHoliday ("01-MAY-2018", "CLP");
lh.addStaticHoliday ("21-MAY-2018", "CLP");
lh.addStaticHoliday ("02-JUL-2018", "CLP");
lh.addStaticHoliday ("16-JUL-2018", "CLP");
lh.addStaticHoliday ("15-AUG-2018", "CLP");
lh.addStaticHoliday ("18-SEP-2018", "CLP");
lh.addStaticHoliday ("19-SEP-2018", "CLP");
lh.addStaticHoliday ("15-OCT-2018", "CLP");
lh.addStaticHoliday ("01-NOV-2018", "CLP");
lh.addStaticHoliday ("02-NOV-2018", "CLP");
lh.addStaticHoliday ("25-DEC-2018", "CLP");
lh.addStaticHoliday ("31-DEC-2018", "CLP");
lh.addStaticHoliday ("01-JAN-2019", "CLP");
lh.addStaticHoliday ("19-APR-2019", "CLP");
lh.addStaticHoliday ("01-MAY-2019", "CLP");
lh.addStaticHoliday ("21-MAY-2019", "CLP");
lh.addStaticHoliday ("16-JUL-2019", "CLP");
lh.addStaticHoliday ("15-AUG-2019", "CLP");
lh.addStaticHoliday ("18-SEP-2019", "CLP");
lh.addStaticHoliday ("19-SEP-2019", "CLP");
lh.addStaticHoliday ("31-OCT-2019", "CLP");
lh.addStaticHoliday ("01-NOV-2019", "CLP");
lh.addStaticHoliday ("25-DEC-2019", "CLP");
lh.addStaticHoliday ("31-DEC-2019", "CLP");
lh.addStaticHoliday ("01-JAN-2020", "CLP");
lh.addStaticHoliday ("10-APR-2020", "CLP");
lh.addStaticHoliday ("01-MAY-2020", "CLP");
lh.addStaticHoliday ("21-MAY-2020", "CLP");
lh.addStaticHoliday ("29-JUN-2020", "CLP");
lh.addStaticHoliday ("16-JUL-2020", "CLP");
lh.addStaticHoliday ("18-SEP-2020", "CLP");
lh.addStaticHoliday ("12-OCT-2020", "CLP");
lh.addStaticHoliday ("08-DEC-2020", "CLP");
lh.addStaticHoliday ("25-DEC-2020", "CLP");
lh.addStaticHoliday ("31-DEC-2020", "CLP");
lh.addStaticHoliday ("01-JAN-2021", "CLP");
lh.addStaticHoliday ("02-APR-2021", "CLP");
lh.addStaticHoliday ("21-MAY-2021", "CLP");
lh.addStaticHoliday ("28-JUN-2021", "CLP");
lh.addStaticHoliday ("16-JUL-2021", "CLP");
lh.addStaticHoliday ("12-OCT-2021", "CLP");
lh.addStaticHoliday ("01-NOV-2021", "CLP");
lh.addStaticHoliday ("08-DEC-2021", "CLP");
lh.addStaticHoliday ("31-DEC-2021", "CLP");
lh.addStaticHoliday ("15-APR-2022", "CLP");
lh.addStaticHoliday ("27-JUN-2022", "CLP");
lh.addStaticHoliday ("15-AUG-2022", "CLP");
lh.addStaticHoliday ("19-SEP-2022", "CLP");
lh.addStaticHoliday ("10-OCT-2022", "CLP");
lh.addStaticHoliday ("31-OCT-2022", "CLP");
lh.addStaticHoliday ("01-NOV-2022", "CLP");
lh.addStaticHoliday ("08-DEC-2022", "CLP");
lh.addStaticHoliday ("07-APR-2023", "CLP");
lh.addStaticHoliday ("01-MAY-2023", "CLP");
lh.addStaticHoliday ("26-JUN-2023", "CLP");
lh.addStaticHoliday ("15-AUG-2023", "CLP");
lh.addStaticHoliday ("18-SEP-2023", "CLP");
lh.addStaticHoliday ("19-SEP-2023", "CLP");
lh.addStaticHoliday ("09-OCT-2023", "CLP");
lh.addStaticHoliday ("27-OCT-2023", "CLP");
lh.addStaticHoliday ("01-NOV-2023", "CLP");
lh.addStaticHoliday ("08-DEC-2023", "CLP");
lh.addStaticHoliday ("25-DEC-2023", "CLP");
lh.addStaticHoliday ("01-JAN-2024", "CLP");
lh.addStaticHoliday ("29-MAR-2024", "CLP");
lh.addStaticHoliday ("01-MAY-2024", "CLP");
lh.addStaticHoliday ("21-MAY-2024", "CLP");
lh.addStaticHoliday ("16-JUL-2024", "CLP");
lh.addStaticHoliday ("15-AUG-2024", "CLP");
lh.addStaticHoliday ("18-SEP-2024", "CLP");
lh.addStaticHoliday ("19-SEP-2024", "CLP");
lh.addStaticHoliday ("31-OCT-2024", "CLP");
lh.addStaticHoliday ("01-NOV-2024", "CLP");
lh.addStaticHoliday ("25-DEC-2024", "CLP");
lh.addStaticHoliday ("31-DEC-2024", "CLP");
lh.addStaticHoliday ("01-JAN-2025", "CLP");
lh.addStaticHoliday ("18-APR-2025", "CLP");
lh.addStaticHoliday ("01-MAY-2025", "CLP");
lh.addStaticHoliday ("21-MAY-2025", "CLP");
lh.addStaticHoliday ("16-JUL-2025", "CLP");
lh.addStaticHoliday ("15-AUG-2025", "CLP");
lh.addStaticHoliday ("18-SEP-2025", "CLP");
lh.addStaticHoliday ("19-SEP-2025", "CLP");
lh.addStaticHoliday ("31-OCT-2025", "CLP");
lh.addStaticHoliday ("08-DEC-2025", "CLP");
lh.addStaticHoliday ("25-DEC-2025", "CLP");
lh.addStaticHoliday ("31-DEC-2025", "CLP");
lh.addStaticHoliday ("01-JAN-2026", "CLP");
lh.addStaticHoliday ("03-APR-2026", "CLP");
lh.addStaticHoliday ("01-MAY-2026", "CLP");
lh.addStaticHoliday ("21-MAY-2026", "CLP");
lh.addStaticHoliday ("29-JUN-2026", "CLP");
lh.addStaticHoliday ("16-JUL-2026", "CLP");
lh.addStaticHoliday ("18-SEP-2026", "CLP");
lh.addStaticHoliday ("12-OCT-2026", "CLP");
lh.addStaticHoliday ("08-DEC-2026", "CLP");
lh.addStaticHoliday ("25-DEC-2026", "CLP");
lh.addStaticHoliday ("31-DEC-2026", "CLP");
lh.addStaticHoliday ("01-JAN-2027", "CLP");
lh.addStaticHoliday ("26-MAR-2027", "CLP");
lh.addStaticHoliday ("21-MAY-2027", "CLP");
lh.addStaticHoliday ("28-JUN-2027", "CLP");
lh.addStaticHoliday ("16-JUL-2027", "CLP");
lh.addStaticHoliday ("11-OCT-2027", "CLP");
lh.addStaticHoliday ("01-NOV-2027", "CLP");
lh.addStaticHoliday ("08-DEC-2027", "CLP");
lh.addStaticHoliday ("31-DEC-2027", "CLP");
lh.addStaticHoliday ("14-APR-2028", "CLP");
lh.addStaticHoliday ("01-MAY-2028", "CLP");
lh.addStaticHoliday ("26-JUN-2028", "CLP");
lh.addStaticHoliday ("15-AUG-2028", "CLP");
lh.addStaticHoliday ("18-SEP-2028", "CLP");
lh.addStaticHoliday ("19-SEP-2028", "CLP");
lh.addStaticHoliday ("09-OCT-2028", "CLP");
lh.addStaticHoliday ("27-OCT-2028", "CLP");
lh.addStaticHoliday ("01-NOV-2028", "CLP");
lh.addStaticHoliday ("08-DEC-2028", "CLP");
lh.addStaticHoliday ("25-DEC-2028", "CLP");
lh.addStaticHoliday ("01-JAN-2029", "CLP");
lh.addStaticHoliday ("30-MAR-2029", "CLP");
lh.addStaticHoliday ("01-MAY-2029", "CLP");
lh.addStaticHoliday ("21-MAY-2029", "CLP");
lh.addStaticHoliday ("02-JUL-2029", "CLP");
lh.addStaticHoliday ("16-JUL-2029", "CLP");
lh.addStaticHoliday ("15-AUG-2029", "CLP");
lh.addStaticHoliday ("18-SEP-2029", "CLP");
lh.addStaticHoliday ("19-SEP-2029", "CLP");
lh.addStaticHoliday ("15-OCT-2029", "CLP");
lh.addStaticHoliday ("01-NOV-2029", "CLP");
lh.addStaticHoliday ("02-NOV-2029", "CLP");
lh.addStaticHoliday ("25-DEC-2029", "CLP");
lh.addStaticHoliday ("31-DEC-2029", "CLP");
lh.addStaticHoliday ("01-JAN-2030", "CLP");
lh.addStaticHoliday ("19-APR-2030", "CLP");
lh.addStaticHoliday ("01-MAY-2030", "CLP");
lh.addStaticHoliday ("21-MAY-2030", "CLP");
lh.addStaticHoliday ("16-JUL-2030", "CLP");
lh.addStaticHoliday ("15-AUG-2030", "CLP");
lh.addStaticHoliday ("18-SEP-2030", "CLP");
lh.addStaticHoliday ("19-SEP-2030", "CLP");
lh.addStaticHoliday ("31-OCT-2030", "CLP");
lh.addStaticHoliday ("01-NOV-2030", "CLP");
lh.addStaticHoliday ("25-DEC-2030", "CLP");
lh.addStaticHoliday ("31-DEC-2030", "CLP");
lh.addStaticHoliday ("01-JAN-2031", "CLP");
lh.addStaticHoliday ("11-APR-2031", "CLP");
lh.addStaticHoliday ("01-MAY-2031", "CLP");
lh.addStaticHoliday ("21-MAY-2031", "CLP");
lh.addStaticHoliday ("16-JUL-2031", "CLP");
lh.addStaticHoliday ("15-AUG-2031", "CLP");
lh.addStaticHoliday ("18-SEP-2031", "CLP");
lh.addStaticHoliday ("19-SEP-2031", "CLP");
lh.addStaticHoliday ("31-OCT-2031", "CLP");
lh.addStaticHoliday ("08-DEC-2031", "CLP");
lh.addStaticHoliday ("25-DEC-2031", "CLP");
lh.addStaticHoliday ("31-DEC-2031", "CLP");
lh.addStaticHoliday ("01-JAN-2032", "CLP");
lh.addStaticHoliday ("26-MAR-2032", "CLP");
lh.addStaticHoliday ("21-MAY-2032", "CLP");
lh.addStaticHoliday ("28-JUN-2032", "CLP");
lh.addStaticHoliday ("16-JUL-2032", "CLP");
lh.addStaticHoliday ("11-OCT-2032", "CLP");
lh.addStaticHoliday ("01-NOV-2032", "CLP");
lh.addStaticHoliday ("08-DEC-2032", "CLP");
lh.addStaticHoliday ("31-DEC-2032", "CLP");
lh.addStaticHoliday ("15-APR-2033", "CLP");
lh.addStaticHoliday ("27-JUN-2033", "CLP");
lh.addStaticHoliday ("15-AUG-2033", "CLP");
lh.addStaticHoliday ("19-SEP-2033", "CLP");
lh.addStaticHoliday ("10-OCT-2033", "CLP");
lh.addStaticHoliday ("31-OCT-2033", "CLP");
lh.addStaticHoliday ("01-NOV-2033", "CLP");
lh.addStaticHoliday ("08-DEC-2033", "CLP");
lh.addStaticHoliday ("07-APR-2034", "CLP");
lh.addStaticHoliday ("01-MAY-2034", "CLP");
lh.addStaticHoliday ("26-JUN-2034", "CLP");
lh.addStaticHoliday ("15-AUG-2034", "CLP");
lh.addStaticHoliday ("18-SEP-2034", "CLP");
lh.addStaticHoliday ("19-SEP-2034", "CLP");
lh.addStaticHoliday ("09-OCT-2034", "CLP");
lh.addStaticHoliday ("27-OCT-2034", "CLP");
lh.addStaticHoliday ("01-NOV-2034", "CLP");
lh.addStaticHoliday ("08-DEC-2034", "CLP");
lh.addStaticHoliday ("25-DEC-2034", "CLP");
lh.addStaticHoliday ("01-JAN-2035", "CLP");
lh.addStaticHoliday ("23-MAR-2035", "CLP");
lh.addStaticHoliday ("01-MAY-2035", "CLP");
lh.addStaticHoliday ("21-MAY-2035", "CLP");
lh.addStaticHoliday ("02-JUL-2035", "CLP");
lh.addStaticHoliday ("16-JUL-2035", "CLP");
lh.addStaticHoliday ("15-AUG-2035", "CLP");
lh.addStaticHoliday ("18-SEP-2035", "CLP");
lh.addStaticHoliday ("19-SEP-2035", "CLP");
lh.addStaticHoliday ("15-OCT-2035", "CLP");
lh.addStaticHoliday ("01-NOV-2035", "CLP");
lh.addStaticHoliday ("02-NOV-2035", "CLP");
lh.addStaticHoliday ("25-DEC-2035", "CLP");
lh.addStaticHoliday ("31-DEC-2035", "CLP");
lh.addStaticHoliday ("01-JAN-2036", "CLP");
lh.addStaticHoliday ("11-APR-2036", "CLP");
lh.addStaticHoliday ("01-MAY-2036", "CLP");
lh.addStaticHoliday ("21-MAY-2036", "CLP");
lh.addStaticHoliday ("16-JUL-2036", "CLP");
lh.addStaticHoliday ("15-AUG-2036", "CLP");
lh.addStaticHoliday ("18-SEP-2036", "CLP");
lh.addStaticHoliday ("19-SEP-2036", "CLP");
lh.addStaticHoliday ("31-OCT-2036", "CLP");
lh.addStaticHoliday ("08-DEC-2036", "CLP");
lh.addStaticHoliday ("25-DEC-2036", "CLP");
lh.addStaticHoliday ("31-DEC-2036", "CLP");
lh.addStaticHoliday ("01-JAN-2037", "CLP");
lh.addStaticHoliday ("03-APR-2037", "CLP");
lh.addStaticHoliday ("01-MAY-2037", "CLP");
lh.addStaticHoliday ("21-MAY-2037", "CLP");
lh.addStaticHoliday ("29-JUN-2037", "CLP");
lh.addStaticHoliday ("16-JUL-2037", "CLP");
lh.addStaticHoliday ("18-SEP-2037", "CLP");
lh.addStaticHoliday ("12-OCT-2037", "CLP");
lh.addStaticHoliday ("08-DEC-2037", "CLP");
lh.addStaticHoliday ("25-DEC-2037", "CLP");
lh.addStaticHoliday ("31-DEC-2037", "CLP");
lh.addStaticHoliday ("01-JAN-2038", "CLP");
lh.addStaticHoliday ("23-APR-2038", "CLP");
lh.addStaticHoliday ("21-MAY-2038", "CLP");
lh.addStaticHoliday ("28-JUN-2038", "CLP");
lh.addStaticHoliday ("16-JUL-2038", "CLP");
lh.addStaticHoliday ("11-OCT-2038", "CLP");
lh.addStaticHoliday ("01-NOV-2038", "CLP");
lh.addStaticHoliday ("08-DEC-2038", "CLP");
lh.addStaticHoliday ("31-DEC-2038", "CLP");
lh.addStaticHoliday ("08-APR-2039", "CLP");
lh.addStaticHoliday ("27-JUN-2039", "CLP");
lh.addStaticHoliday ("15-AUG-2039", "CLP");
lh.addStaticHoliday ("19-SEP-2039", "CLP");
lh.addStaticHoliday ("10-OCT-2039", "CLP");
lh.addStaticHoliday ("31-OCT-2039", "CLP");
lh.addStaticHoliday ("01-NOV-2039", "CLP");
lh.addStaticHoliday ("08-DEC-2039", "CLP");
lh.addStaticHoliday ("30-MAR-2040", "CLP");
lh.addStaticHoliday ("01-MAY-2040", "CLP");
lh.addStaticHoliday ("21-MAY-2040", "CLP");
lh.addStaticHoliday ("02-JUL-2040", "CLP");
lh.addStaticHoliday ("16-JUL-2040", "CLP");
lh.addStaticHoliday ("15-AUG-2040", "CLP");
lh.addStaticHoliday ("18-SEP-2040", "CLP");
lh.addStaticHoliday ("19-SEP-2040", "CLP");
lh.addStaticHoliday ("15-OCT-2040", "CLP");
lh.addStaticHoliday ("01-NOV-2040", "CLP");
lh.addStaticHoliday ("02-NOV-2040", "CLP");
lh.addStaticHoliday ("25-DEC-2040", "CLP");
lh.addStaticHoliday ("31-DEC-2040", "CLP");
lh.addStaticHoliday ("01-JAN-2041", "CLP");
lh.addStaticHoliday ("19-APR-2041", "CLP");
lh.addStaticHoliday ("01-MAY-2041", "CLP");
lh.addStaticHoliday ("21-MAY-2041", "CLP");
lh.addStaticHoliday ("16-JUL-2041", "CLP");
lh.addStaticHoliday ("15-AUG-2041", "CLP");
lh.addStaticHoliday ("18-SEP-2041", "CLP");
lh.addStaticHoliday ("19-SEP-2041", "CLP");
lh.addStaticHoliday ("31-OCT-2041", "CLP");
lh.addStaticHoliday ("01-NOV-2041", "CLP");
lh.addStaticHoliday ("25-DEC-2041", "CLP");
lh.addStaticHoliday ("31-DEC-2041", "CLP");
lh.addStaticHoliday ("01-JAN-2042", "CLP");
lh.addStaticHoliday ("04-APR-2042", "CLP");
lh.addStaticHoliday ("01-MAY-2042", "CLP");
lh.addStaticHoliday ("21-MAY-2042", "CLP");
lh.addStaticHoliday ("16-JUL-2042", "CLP");
lh.addStaticHoliday ("15-AUG-2042", "CLP");
lh.addStaticHoliday ("18-SEP-2042", "CLP");
lh.addStaticHoliday ("19-SEP-2042", "CLP");
lh.addStaticHoliday ("31-OCT-2042", "CLP");
lh.addStaticHoliday ("08-DEC-2042", "CLP");
lh.addStaticHoliday ("25-DEC-2042", "CLP");
lh.addStaticHoliday ("31-DEC-2042", "CLP");
lh.addStaticHoliday ("01-JAN-2043", "CLP");
lh.addStaticHoliday ("27-MAR-2043", "CLP");
lh.addStaticHoliday ("01-MAY-2043", "CLP");
lh.addStaticHoliday ("21-MAY-2043", "CLP");
lh.addStaticHoliday ("29-JUN-2043", "CLP");
lh.addStaticHoliday ("16-JUL-2043", "CLP");
lh.addStaticHoliday ("18-SEP-2043", "CLP");
lh.addStaticHoliday ("12-OCT-2043", "CLP");
lh.addStaticHoliday ("08-DEC-2043", "CLP");
lh.addStaticHoliday ("25-DEC-2043", "CLP");
lh.addStaticHoliday ("31-DEC-2043", "CLP");
lh.addStaticHoliday ("01-JAN-2044", "CLP");
lh.addStaticHoliday ("15-APR-2044", "CLP");
lh.addStaticHoliday ("27-JUN-2044", "CLP");
lh.addStaticHoliday ("15-AUG-2044", "CLP");
lh.addStaticHoliday ("19-SEP-2044", "CLP");
lh.addStaticHoliday ("10-OCT-2044", "CLP");
lh.addStaticHoliday ("31-OCT-2044", "CLP");
lh.addStaticHoliday ("01-NOV-2044", "CLP");
lh.addStaticHoliday ("08-DEC-2044", "CLP");
lh.addStaticHoliday ("07-APR-2045", "CLP");
lh.addStaticHoliday ("01-MAY-2045", "CLP");
lh.addStaticHoliday ("26-JUN-2045", "CLP");
lh.addStaticHoliday ("15-AUG-2045", "CLP");
lh.addStaticHoliday ("18-SEP-2045", "CLP");
lh.addStaticHoliday ("19-SEP-2045", "CLP");
lh.addStaticHoliday ("09-OCT-2045", "CLP");
lh.addStaticHoliday ("27-OCT-2045", "CLP");
lh.addStaticHoliday ("01-NOV-2045", "CLP");
lh.addStaticHoliday ("08-DEC-2045", "CLP");
lh.addStaticHoliday ("25-DEC-2045", "CLP");
lh.addStaticHoliday ("01-JAN-2046", "CLP");
lh.addStaticHoliday ("23-MAR-2046", "CLP");
lh.addStaticHoliday ("01-MAY-2046", "CLP");
lh.addStaticHoliday ("21-MAY-2046", "CLP");
lh.addStaticHoliday ("02-JUL-2046", "CLP");
lh.addStaticHoliday ("16-JUL-2046", "CLP");
lh.addStaticHoliday ("15-AUG-2046", "CLP");
lh.addStaticHoliday ("18-SEP-2046", "CLP");
lh.addStaticHoliday ("19-SEP-2046", "CLP");
lh.addStaticHoliday ("15-OCT-2046", "CLP");
lh.addStaticHoliday ("01-NOV-2046", "CLP");
lh.addStaticHoliday ("02-NOV-2046", "CLP");
lh.addStaticHoliday ("25-DEC-2046", "CLP");
lh.addStaticHoliday ("31-DEC-2046", "CLP");
lh.addStaticHoliday ("01-JAN-2047", "CLP");
lh.addStaticHoliday ("12-APR-2047", "CLP");
lh.addStaticHoliday ("01-MAY-2047", "CLP");
lh.addStaticHoliday ("21-MAY-2047", "CLP");
lh.addStaticHoliday ("16-JUL-2047", "CLP");
lh.addStaticHoliday ("15-AUG-2047", "CLP");
lh.addStaticHoliday ("18-SEP-2047", "CLP");
lh.addStaticHoliday ("19-SEP-2047", "CLP");
lh.addStaticHoliday ("31-OCT-2047", "CLP");
lh.addStaticHoliday ("01-NOV-2047", "CLP");
lh.addStaticHoliday ("25-DEC-2047", "CLP");
lh.addStaticHoliday ("31-DEC-2047", "CLP");
lh.addStaticHoliday ("01-JAN-2048", "CLP");
lh.addStaticHoliday ("03-APR-2048", "CLP");
lh.addStaticHoliday ("01-MAY-2048", "CLP");
lh.addStaticHoliday ("21-MAY-2048", "CLP");
lh.addStaticHoliday ("29-JUN-2048", "CLP");
lh.addStaticHoliday ("16-JUL-2048", "CLP");
lh.addStaticHoliday ("18-SEP-2048", "CLP");
lh.addStaticHoliday ("12-OCT-2048", "CLP");
lh.addStaticHoliday ("08-DEC-2048", "CLP");
lh.addStaticHoliday ("25-DEC-2048", "CLP");
lh.addStaticHoliday ("31-DEC-2048", "CLP");
lh.addStaticHoliday ("01-JAN-2049", "CLP");
lh.addStaticHoliday ("16-APR-2049", "CLP");
lh.addStaticHoliday ("21-MAY-2049", "CLP");
lh.addStaticHoliday ("28-JUN-2049", "CLP");
lh.addStaticHoliday ("16-JUL-2049", "CLP");
lh.addStaticHoliday ("11-OCT-2049", "CLP");
lh.addStaticHoliday ("01-NOV-2049", "CLP");
lh.addStaticHoliday ("08-DEC-2049", "CLP");
lh.addStaticHoliday ("31-DEC-2049", "CLP");
lh.addStaticHoliday ("08-APR-2050", "CLP");
lh.addStaticHoliday ("27-JUN-2050", "CLP");
lh.addStaticHoliday ("15-AUG-2050", "CLP");
lh.addStaticHoliday ("19-SEP-2050", "CLP");
lh.addStaticHoliday ("10-OCT-2050", "CLP");
lh.addStaticHoliday ("31-OCT-2050", "CLP");
lh.addStaticHoliday ("01-NOV-2050", "CLP");
lh.addStaticHoliday ("08-DEC-2050", "CLP");
lh.addStaticHoliday ("31-MAR-2051", "CLP");
lh.addStaticHoliday ("01-MAY-2051", "CLP");
lh.addStaticHoliday ("26-JUN-2051", "CLP");
lh.addStaticHoliday ("15-AUG-2051", "CLP");
lh.addStaticHoliday ("18-SEP-2051", "CLP");
lh.addStaticHoliday ("19-SEP-2051", "CLP");
lh.addStaticHoliday ("09-OCT-2051", "CLP");
lh.addStaticHoliday ("27-OCT-2051", "CLP");
lh.addStaticHoliday ("01-NOV-2051", "CLP");
lh.addStaticHoliday ("08-DEC-2051", "CLP");
lh.addStaticHoliday ("25-DEC-2051", "CLP");
lh.addStaticHoliday ("01-JAN-2052", "CLP");
lh.addStaticHoliday ("19-APR-2052", "CLP");
lh.addStaticHoliday ("01-MAY-2052", "CLP");
lh.addStaticHoliday ("21-MAY-2052", "CLP");
lh.addStaticHoliday ("16-JUL-2052", "CLP");
lh.addStaticHoliday ("15-AUG-2052", "CLP");
lh.addStaticHoliday ("18-SEP-2052", "CLP");
lh.addStaticHoliday ("19-SEP-2052", "CLP");
lh.addStaticHoliday ("31-OCT-2052", "CLP");
lh.addStaticHoliday ("01-NOV-2052", "CLP");
lh.addStaticHoliday ("25-DEC-2052", "CLP");
lh.addStaticHoliday ("31-DEC-2052", "CLP");
lh.addStaticHoliday ("01-JAN-2053", "CLP");
lh.addStaticHoliday ("04-APR-2053", "CLP");
lh.addStaticHoliday ("01-MAY-2053", "CLP");
lh.addStaticHoliday ("21-MAY-2053", "CLP");
lh.addStaticHoliday ("16-JUL-2053", "CLP");
lh.addStaticHoliday ("15-AUG-2053", "CLP");
lh.addStaticHoliday ("18-SEP-2053", "CLP");
lh.addStaticHoliday ("19-SEP-2053", "CLP");
lh.addStaticHoliday ("31-OCT-2053", "CLP");
lh.addStaticHoliday ("08-DEC-2053", "CLP");
lh.addStaticHoliday ("25-DEC-2053", "CLP");
lh.addStaticHoliday ("31-DEC-2053", "CLP");
lh.addStaticHoliday ("01-JAN-2054", "CLP");
lh.addStaticHoliday ("27-MAR-2054", "CLP");
lh.addStaticHoliday ("01-MAY-2054", "CLP");
lh.addStaticHoliday ("21-MAY-2054", "CLP");
lh.addStaticHoliday ("29-JUN-2054", "CLP");
lh.addStaticHoliday ("16-JUL-2054", "CLP");
lh.addStaticHoliday ("18-SEP-2054", "CLP");
lh.addStaticHoliday ("12-OCT-2054", "CLP");
lh.addStaticHoliday ("08-DEC-2054", "CLP");
lh.addStaticHoliday ("25-DEC-2054", "CLP");
lh.addStaticHoliday ("31-DEC-2054", "CLP");
lh.addStaticHoliday ("01-JAN-2055", "CLP");
lh.addStaticHoliday ("16-APR-2055", "CLP");
lh.addStaticHoliday ("21-MAY-2055", "CLP");
lh.addStaticHoliday ("28-JUN-2055", "CLP");
lh.addStaticHoliday ("16-JUL-2055", "CLP");
lh.addStaticHoliday ("11-OCT-2055", "CLP");
lh.addStaticHoliday ("01-NOV-2055", "CLP");
lh.addStaticHoliday ("08-DEC-2055", "CLP");
lh.addStaticHoliday ("31-DEC-2055", "CLP");
lh.addStaticHoliday ("31-MAR-2056", "CLP");
lh.addStaticHoliday ("01-MAY-2056", "CLP");
lh.addStaticHoliday ("26-JUN-2056", "CLP");
lh.addStaticHoliday ("15-AUG-2056", "CLP");
lh.addStaticHoliday ("18-SEP-2056", "CLP");
lh.addStaticHoliday ("19-SEP-2056", "CLP");
lh.addStaticHoliday ("09-OCT-2056", "CLP");
lh.addStaticHoliday ("27-OCT-2056", "CLP");
lh.addStaticHoliday ("01-NOV-2056", "CLP");
lh.addStaticHoliday ("08-DEC-2056", "CLP");
lh.addStaticHoliday ("25-DEC-2056", "CLP");
lh.addStaticHoliday ("01-JAN-2057", "CLP");
lh.addStaticHoliday ("20-APR-2057", "CLP");
lh.addStaticHoliday ("01-MAY-2057", "CLP");
lh.addStaticHoliday ("21-MAY-2057", "CLP");
lh.addStaticHoliday ("02-JUL-2057", "CLP");
lh.addStaticHoliday ("16-JUL-2057", "CLP");
lh.addStaticHoliday ("15-AUG-2057", "CLP");
lh.addStaticHoliday ("18-SEP-2057", "CLP");
lh.addStaticHoliday ("19-SEP-2057", "CLP");
lh.addStaticHoliday ("15-OCT-2057", "CLP");
lh.addStaticHoliday ("01-NOV-2057", "CLP");
lh.addStaticHoliday ("02-NOV-2057", "CLP");
lh.addStaticHoliday ("25-DEC-2057", "CLP");
lh.addStaticHoliday ("31-DEC-2057", "CLP");
lh.addStaticHoliday ("01-JAN-2058", "CLP");
lh.addStaticHoliday ("12-APR-2058", "CLP");
lh.addStaticHoliday ("01-MAY-2058", "CLP");
lh.addStaticHoliday ("21-MAY-2058", "CLP");
lh.addStaticHoliday ("16-JUL-2058", "CLP");
lh.addStaticHoliday ("15-AUG-2058", "CLP");
lh.addStaticHoliday ("18-SEP-2058", "CLP");
lh.addStaticHoliday ("19-SEP-2058", "CLP");
lh.addStaticHoliday ("31-OCT-2058", "CLP");
lh.addStaticHoliday ("01-NOV-2058", "CLP");
lh.addStaticHoliday ("25-DEC-2058", "CLP");
lh.addStaticHoliday ("31-DEC-2058", "CLP");
lh.addStaticHoliday ("01-JAN-2059", "CLP");
lh.addStaticHoliday ("28-MAR-2059", "CLP");
lh.addStaticHoliday ("01-MAY-2059", "CLP");
lh.addStaticHoliday ("21-MAY-2059", "CLP");
lh.addStaticHoliday ("16-JUL-2059", "CLP");
lh.addStaticHoliday ("15-AUG-2059", "CLP");
lh.addStaticHoliday ("18-SEP-2059", "CLP");
lh.addStaticHoliday ("19-SEP-2059", "CLP");
lh.addStaticHoliday ("31-OCT-2059", "CLP");
lh.addStaticHoliday ("08-DEC-2059", "CLP");
lh.addStaticHoliday ("25-DEC-2059", "CLP");
lh.addStaticHoliday ("31-DEC-2059", "CLP");
lh.addStaticHoliday ("01-JAN-2060", "CLP");
lh.addStaticHoliday ("16-APR-2060", "CLP");
lh.addStaticHoliday ("21-MAY-2060", "CLP");
lh.addStaticHoliday ("28-JUN-2060", "CLP");
lh.addStaticHoliday ("16-JUL-2060", "CLP");
lh.addStaticHoliday ("11-OCT-2060", "CLP");
lh.addStaticHoliday ("01-NOV-2060", "CLP");
lh.addStaticHoliday ("08-DEC-2060", "CLP");
lh.addStaticHoliday ("31-DEC-2060", "CLP");
lh.addStaticHoliday ("08-APR-2061", "CLP");
lh.addStaticHoliday ("27-JUN-2061", "CLP");
lh.addStaticHoliday ("15-AUG-2061", "CLP");
lh.addStaticHoliday ("19-SEP-2061", "CLP");
lh.addStaticHoliday ("10-OCT-2061", "CLP");
lh.addStaticHoliday ("31-OCT-2061", "CLP");
lh.addStaticHoliday ("01-NOV-2061", "CLP");
lh.addStaticHoliday ("08-DEC-2061", "CLP");
lh.addStandardWeekend();
return lh;
}
}
| |
package com.oic.vdd.manager;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import com.facebook.AccessToken;
import com.facebook.GraphRequest;
import com.facebook.GraphResponse;
import com.facebook.HttpMethod;
import com.facebook.login.LoginManager;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.oic.vdd.models.Page;
import com.oic.vdd.models.Video;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import rx.Observable;
import rx.Subscriber;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
/**
* Created by khacpham on 12/23/15.
*/
public class FacebookMng {
public static String[] PERMISSION = {
"user_videos",
"user_likes",
// "user_actions.video",
// ,"user_actions.news",
// "user_location",
// "user_about_me",
// "user_actions.books",
// "user_friends",
// "user_posts",
// "user_events",
// "user_photos",
// "user_relationship_details",
// "user_website",
// "user_religion_politics",
// "user_work_history",
// "user_hometown",
// "user_tagged_places",
// "user_status",
// "user_managed_groups",
// "email",
// "public_profile",
// "user_birthday",
// "user_actions.music",
// "user_relationships",
// "user_actions.video"
//
// // advanced
// //,"ads_management"
// //,"publish_actions"
// ,"read_insights"
// ,"email"
// ,"pages_manage_leads"
// //,"read_custom_friendlists"
// ,"read_page_mailboxes"
// //,"rsvp_event"
// //,"publish_pages"
// ,"ads_read"
};
private static FacebookMng _instance;
private Context context;
private FacebookMng(Context context) {
this.context = context;
}
public static FacebookMng getInstance(Context context) {
if (_instance == null) {
_instance = new FacebookMng(context);
}
return _instance;
}
public void login(Activity context){
LoginManager.getInstance().logInWithReadPermissions(context, Arrays.asList(PERMISSION));
}
public String getUserId() {
return AccessToken.getCurrentAccessToken().getUserId();
}
public Observable<String> getUserName() {
final Observable<String> rxObj = Observable.create(new Observable.OnSubscribe<String>() {
@Override
public void call(final Subscriber<? super String> subscriber) {
Bundle bundle = new Bundle();
bundle.putString("limit","999");
/* make the API call */
GraphRequest request = new GraphRequest(
AccessToken.getCurrentAccessToken(),
"/me",
bundle,
HttpMethod.GET,
new GraphRequest.Callback() {
public void onCompleted(GraphResponse response) {
/* handle the result */
String json=response.getRawResponse();
Log.e("TAG",json+"");
try {
String name = response.getJSONObject().get("name")+"";
subscriber.onNext(name);
subscriber.onCompleted();
} catch (JSONException e) {
e.printStackTrace();
subscriber.onError(e);
} catch (Exception e){
e.printStackTrace();
subscriber.onError(e);
}
}
}
,"v2.5");
request.executeAsync();
}
});
return rxObj
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
public String getAccessToken(){
return AccessToken.getCurrentAccessToken().getToken();
}
public Set<String> getPermission(){
try {
return AccessToken.getCurrentAccessToken().getPermissions();
}catch (NullPointerException e){
return new HashSet<>();
}
}
public boolean isLoggedin(){
AccessToken accessToken = AccessToken.getCurrentAccessToken();
if(accessToken == null || TextUtils.isEmpty(accessToken.getToken()) || accessToken.isExpired()){
return false;
}
return true;
}
public Bitmap getPhotoFacebook() {
Bitmap bitmap=null;
final String nomimg = "https://graph.facebook.com/"+getUserId()+"/picture?type=large";
URL imageURL = null;
try {
imageURL = new URL(nomimg);
} catch (MalformedURLException e) {
e.printStackTrace();
}
try {
HttpURLConnection connection = (HttpURLConnection) imageURL.openConnection();
connection.setDoInput(true);
connection.setInstanceFollowRedirects( true );
connection.connect();
InputStream inputStream = connection.getInputStream();
//img_value.openConnection().setInstanceFollowRedirects(true).getInputStream()
bitmap = BitmapFactory.decodeStream(inputStream);
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
}
public Observable<List<Video>> getVideoUser(){
final Observable<List<Video>> rxObj = Observable.create(new Observable.OnSubscribe<List<Video>>() {
@Override
public void call(final Subscriber<? super List<Video>> subscriber) {
Bundle bundle = new Bundle();
bundle.putString("fields","id,description,source,format,from");
bundle.putString("limit","999");
/* make the API call */
GraphRequest request = new GraphRequest(
AccessToken.getCurrentAccessToken(),
"v2.2/me/videos",
bundle,
HttpMethod.GET,
new GraphRequest.Callback() {
public void onCompleted(GraphResponse response) {
/* handle the result */
String json=response.getRawResponse();
Log.e("TAG",json);
try {
JSONArray data = response.getJSONObject().getJSONArray("data");
String dataStr = data.toString();
List<Video> videos = new Gson().fromJson(dataStr,new TypeToken<List<Video>>(){}.getType());
subscriber.onNext(videos);
subscriber.onCompleted();
} catch (JSONException e) {
e.printStackTrace();
subscriber.onError(e);
} catch (Exception e){
e.printStackTrace();
subscriber.onError(e);
}
}
}
,"v2.2");
request.setVersion("v2.2");
request.executeAsync();
}
});
return rxObj
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
public Observable<List<Video>> getVideoPage(final String pageId){
final Observable<List<Video>> rxObj = Observable.create(new Observable.OnSubscribe<List<Video>>() {
@Override
public void call(final Subscriber<? super List<Video>> subscriber) {
Bundle bundle = new Bundle();
bundle.putString("fields","id,description,source,format,from");
bundle.putString("limit","999");
/* make the API call */
GraphRequest request = new GraphRequest(
AccessToken.getCurrentAccessToken(),
String.format("v2.2/%s/videos", pageId),
bundle,
HttpMethod.GET,
new GraphRequest.Callback() {
public void onCompleted(GraphResponse response) {
/* handle the result */
String json=response.getRawResponse();
Log.e("TAG",json);
try {
JSONArray data = response.getJSONObject().getJSONArray("data");
String dataStr = data.toString();
List<Video> videos = new Gson().fromJson(dataStr,new TypeToken<List<Video>>(){}.getType());
subscriber.onNext(videos);
subscriber.onCompleted();
} catch (JSONException e) {
e.printStackTrace();
subscriber.onError(e);
} catch (Exception e){
e.printStackTrace();
subscriber.onError(e);
}
}
}
,"v2.2");
request.setVersion("v2.2");
request.executeAsync();
}
});
return rxObj
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
public Observable<List<Video>> getUploadedVideos(){
Observable<List<Video>> rxObj = Observable.create(new Observable.OnSubscribe<List<Video>>() {
@Override
public void call(final Subscriber<? super List<Video>> subscriber) {
Bundle bundle = new Bundle();
bundle.putString("fields","id,description,source,format,from");
bundle.putString("limit","999");
/* make the API call */
GraphRequest request = new GraphRequest(
AccessToken.getCurrentAccessToken(),
"v2.2/me/videos/uploaded",
bundle,
HttpMethod.GET,
new GraphRequest.Callback() {
public void onCompleted(GraphResponse response) {
/* handle the result */
String json = response.getRawResponse();
Log.e("TAG","getUploadedVideos:"+json);
try {
JSONArray data = response.getJSONObject().getJSONArray("data");
JSONObject paging = response.getJSONObject().getJSONObject("paging");
String dataStr = data.toString();
List<Video> videos = new Gson().fromJson(dataStr,new TypeToken<List<Video>>(){}.getType());
subscriber.onNext(videos);
subscriber.onCompleted();
} catch (JSONException e) {
e.printStackTrace();
subscriber.onError(e);
} catch (Exception e){
e.printStackTrace();
subscriber.onError(e);
}
}
}
,"v2.2");
request.setVersion("v2.2");
request.executeAsync();
}
});
return rxObj.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
public Observable<List<Page>> getLikePage(){
Observable<List<Page>> rxObj = Observable.create(new Observable.OnSubscribe<List<Page>>() {
@Override
public void call(final Subscriber<? super List<Page>> subscriber) {
Bundle bundle = new Bundle();
bundle.putString("fields","id,name,cover,category,created_time,picture.type(large)");
bundle.putString("limit","999");
/* make the API call */
GraphRequest request = new GraphRequest(
AccessToken.getCurrentAccessToken(),
"v2.2/me/likes",
bundle,
HttpMethod.GET,
new GraphRequest.Callback() {
public void onCompleted(GraphResponse response) {
/* handle the result */
String json = response.getRawResponse();
Log.e("TAG","getLikePage:"+json);
try {
JSONArray data = response.getJSONObject().getJSONArray("data");
JSONObject paging = response.getJSONObject().getJSONObject("paging");
String dataStr = data.toString();
List<Page> pages = new Gson().fromJson(dataStr,new TypeToken<List<Page>>(){}.getType());
subscriber.onNext(pages);
subscriber.onCompleted();
} catch (JSONException e) {
e.printStackTrace();
subscriber.onError(e);
} catch (Exception e){
e.printStackTrace();
subscriber.onError(e);
}
}
}
,"v2.2");
request.setVersion("v2.2");
request.executeAsync();
}
});
return rxObj.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread());
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Represents the output of a <code>BatchWriteItem</code> operation.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/BatchWriteItem" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class BatchWriteItemResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A map of tables and requests against those tables that were not processed. The <code>UnprocessedItems</code>
* value is in the same form as <code>RequestItems</code>, so you can provide this value directly to a subsequent
* <code>BatchGetItem</code> operation. For more information, see <code>RequestItems</code> in the Request
* Parameters section.
* </p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of operations to
* perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item to be
* deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in this map
* consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be put is
* identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute name
* and an attribute value. Attribute values must not be null; string and binary type attributes must have lengths
* greater than zero; and set type attributes must not be empty. Requests that contain empty values will be rejected
* with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes must match
* those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code> map.
* </p>
*/
private java.util.Map<String, java.util.List<WriteRequest>> unprocessedItems;
/**
* <p>
* A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information about any
* item collections that were affected by individual <code>DeleteItem</code> or <code>PutItem</code> operations.
* </p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a two-element
* array containing a lower bound and an upper bound for the estimate. The estimate includes the size of all the
* items in the table, plus the size of all attributes projected into all of the local secondary indexes on the
* table. Use this estimate to measure whether a local secondary index is approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the estimate.
* </p>
* </li>
* </ul>
*/
private java.util.Map<String, java.util.List<ItemCollectionMetrics>> itemCollectionMetrics;
/**
* <p>
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.
* </p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* </ul>
*/
private java.util.List<ConsumedCapacity> consumedCapacity;
/**
* <p>
* A map of tables and requests against those tables that were not processed. The <code>UnprocessedItems</code>
* value is in the same form as <code>RequestItems</code>, so you can provide this value directly to a subsequent
* <code>BatchGetItem</code> operation. For more information, see <code>RequestItems</code> in the Request
* Parameters section.
* </p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of operations to
* perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item to be
* deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in this map
* consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be put is
* identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute name
* and an attribute value. Attribute values must not be null; string and binary type attributes must have lengths
* greater than zero; and set type attributes must not be empty. Requests that contain empty values will be rejected
* with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes must match
* those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code> map.
* </p>
*
* @return A map of tables and requests against those tables that were not processed. The
* <code>UnprocessedItems</code> value is in the same form as <code>RequestItems</code>, so you can provide
* this value directly to a subsequent <code>BatchGetItem</code> operation. For more information, see
* <code>RequestItems</code> in the Request Parameters section.</p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of
* operations to perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item
* to be deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in
* this map consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be
* put is identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute
* name and an attribute value. Attribute values must not be null; string and binary type attributes must
* have lengths greater than zero; and set type attributes must not be empty. Requests that contain empty
* values will be rejected with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes
* must match those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code>
* map.
*/
public java.util.Map<String, java.util.List<WriteRequest>> getUnprocessedItems() {
return unprocessedItems;
}
/**
* <p>
* A map of tables and requests against those tables that were not processed. The <code>UnprocessedItems</code>
* value is in the same form as <code>RequestItems</code>, so you can provide this value directly to a subsequent
* <code>BatchGetItem</code> operation. For more information, see <code>RequestItems</code> in the Request
* Parameters section.
* </p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of operations to
* perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item to be
* deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in this map
* consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be put is
* identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute name
* and an attribute value. Attribute values must not be null; string and binary type attributes must have lengths
* greater than zero; and set type attributes must not be empty. Requests that contain empty values will be rejected
* with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes must match
* those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code> map.
* </p>
*
* @param unprocessedItems
* A map of tables and requests against those tables that were not processed. The
* <code>UnprocessedItems</code> value is in the same form as <code>RequestItems</code>, so you can provide
* this value directly to a subsequent <code>BatchGetItem</code> operation. For more information, see
* <code>RequestItems</code> in the Request Parameters section.</p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of
* operations to perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item
* to be deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in
* this map consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be
* put is identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute
* name and an attribute value. Attribute values must not be null; string and binary type attributes must
* have lengths greater than zero; and set type attributes must not be empty. Requests that contain empty
* values will be rejected with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes must
* match those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code>
* map.
*/
public void setUnprocessedItems(java.util.Map<String, java.util.List<WriteRequest>> unprocessedItems) {
this.unprocessedItems = unprocessedItems;
}
/**
* <p>
* A map of tables and requests against those tables that were not processed. The <code>UnprocessedItems</code>
* value is in the same form as <code>RequestItems</code>, so you can provide this value directly to a subsequent
* <code>BatchGetItem</code> operation. For more information, see <code>RequestItems</code> in the Request
* Parameters section.
* </p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of operations to
* perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item to be
* deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in this map
* consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be put is
* identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute name
* and an attribute value. Attribute values must not be null; string and binary type attributes must have lengths
* greater than zero; and set type attributes must not be empty. Requests that contain empty values will be rejected
* with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes must match
* those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code> map.
* </p>
*
* @param unprocessedItems
* A map of tables and requests against those tables that were not processed. The
* <code>UnprocessedItems</code> value is in the same form as <code>RequestItems</code>, so you can provide
* this value directly to a subsequent <code>BatchGetItem</code> operation. For more information, see
* <code>RequestItems</code> in the Request Parameters section.</p>
* <p>
* Each <code>UnprocessedItems</code> entry consists of a table name and, for that table, a list of
* operations to perform (<code>DeleteRequest</code> or <code>PutRequest</code>).
* </p>
* <ul>
* <li>
* <p>
* <code>DeleteRequest</code> - Perform a <code>DeleteItem</code> operation on the specified item. The item
* to be deleted is identified by a <code>Key</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Key</code> - A map of primary key attribute values that uniquely identify the item. Each entry in
* this map consists of an attribute name and an attribute value.
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <code>PutRequest</code> - Perform a <code>PutItem</code> operation on the specified item. The item to be
* put is identified by an <code>Item</code> subelement:
* </p>
* <ul>
* <li>
* <p>
* <code>Item</code> - A map of attributes and their values. Each entry in this map consists of an attribute
* name and an attribute value. Attribute values must not be null; string and binary type attributes must
* have lengths greater than zero; and set type attributes must not be empty. Requests that contain empty
* values will be rejected with a <code>ValidationException</code> exception.
* </p>
* <p>
* If you specify any attributes that are part of an index key, then the data types for those attributes must
* match those of the schema in the table's attribute definition.
* </p>
* </li>
* </ul>
* </li>
* </ul>
* <p>
* If there are no unprocessed items remaining, the response contains an empty <code>UnprocessedItems</code>
* map.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public BatchWriteItemResult withUnprocessedItems(java.util.Map<String, java.util.List<WriteRequest>> unprocessedItems) {
setUnprocessedItems(unprocessedItems);
return this;
}
public BatchWriteItemResult addUnprocessedItemsEntry(String key, java.util.List<WriteRequest> value) {
if (null == this.unprocessedItems) {
this.unprocessedItems = new java.util.HashMap<String, java.util.List<WriteRequest>>();
}
if (this.unprocessedItems.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.unprocessedItems.put(key, value);
return this;
}
/**
* Removes all the entries added into UnprocessedItems.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public BatchWriteItemResult clearUnprocessedItemsEntries() {
this.unprocessedItems = null;
return this;
}
/**
* <p>
* A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information about any
* item collections that were affected by individual <code>DeleteItem</code> or <code>PutItem</code> operations.
* </p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a two-element
* array containing a lower bound and an upper bound for the estimate. The estimate includes the size of all the
* items in the table, plus the size of all attributes projected into all of the local secondary indexes on the
* table. Use this estimate to measure whether a local secondary index is approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the estimate.
* </p>
* </li>
* </ul>
*
* @return A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information
* about any item collections that were affected by individual <code>DeleteItem</code> or
* <code>PutItem</code> operations.</p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a
* two-element array containing a lower bound and an upper bound for the estimate. The estimate includes the
* size of all the items in the table, plus the size of all attributes projected into all of the local
* secondary indexes on the table. Use this estimate to measure whether a local secondary index is
* approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the
* estimate.
* </p>
* </li>
*/
public java.util.Map<String, java.util.List<ItemCollectionMetrics>> getItemCollectionMetrics() {
return itemCollectionMetrics;
}
/**
* <p>
* A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information about any
* item collections that were affected by individual <code>DeleteItem</code> or <code>PutItem</code> operations.
* </p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a two-element
* array containing a lower bound and an upper bound for the estimate. The estimate includes the size of all the
* items in the table, plus the size of all attributes projected into all of the local secondary indexes on the
* table. Use this estimate to measure whether a local secondary index is approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the estimate.
* </p>
* </li>
* </ul>
*
* @param itemCollectionMetrics
* A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information about
* any item collections that were affected by individual <code>DeleteItem</code> or <code>PutItem</code>
* operations.</p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a
* two-element array containing a lower bound and an upper bound for the estimate. The estimate includes the
* size of all the items in the table, plus the size of all attributes projected into all of the local
* secondary indexes on the table. Use this estimate to measure whether a local secondary index is
* approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the
* estimate.
* </p>
* </li>
*/
public void setItemCollectionMetrics(java.util.Map<String, java.util.List<ItemCollectionMetrics>> itemCollectionMetrics) {
this.itemCollectionMetrics = itemCollectionMetrics;
}
/**
* <p>
* A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information about any
* item collections that were affected by individual <code>DeleteItem</code> or <code>PutItem</code> operations.
* </p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a two-element
* array containing a lower bound and an upper bound for the estimate. The estimate includes the size of all the
* items in the table, plus the size of all attributes projected into all of the local secondary indexes on the
* table. Use this estimate to measure whether a local secondary index is approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the estimate.
* </p>
* </li>
* </ul>
*
* @param itemCollectionMetrics
* A list of tables that were processed by <code>BatchWriteItem</code> and, for each table, information about
* any item collections that were affected by individual <code>DeleteItem</code> or <code>PutItem</code>
* operations.</p>
* <p>
* Each entry consists of the following subelements:
* </p>
* <ul>
* <li>
* <p>
* <code>ItemCollectionKey</code> - The partition key value of the item collection. This is the same as the
* partition key value of the item.
* </p>
* </li>
* <li>
* <p>
* <code>SizeEstimateRangeGB</code> - An estimate of item collection size, expressed in GB. This is a
* two-element array containing a lower bound and an upper bound for the estimate. The estimate includes the
* size of all the items in the table, plus the size of all attributes projected into all of the local
* secondary indexes on the table. Use this estimate to measure whether a local secondary index is
* approaching its size limit.
* </p>
* <p>
* The estimate is subject to change over time; therefore, do not rely on the precision or accuracy of the
* estimate.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public BatchWriteItemResult withItemCollectionMetrics(java.util.Map<String, java.util.List<ItemCollectionMetrics>> itemCollectionMetrics) {
setItemCollectionMetrics(itemCollectionMetrics);
return this;
}
public BatchWriteItemResult addItemCollectionMetricsEntry(String key, java.util.List<ItemCollectionMetrics> value) {
if (null == this.itemCollectionMetrics) {
this.itemCollectionMetrics = new java.util.HashMap<String, java.util.List<ItemCollectionMetrics>>();
}
if (this.itemCollectionMetrics.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.itemCollectionMetrics.put(key, value);
return this;
}
/**
* Removes all the entries added into ItemCollectionMetrics.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public BatchWriteItemResult clearItemCollectionMetricsEntries() {
this.itemCollectionMetrics = null;
return this;
}
/**
* <p>
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.
* </p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* </ul>
*
* @return The capacity units consumed by the entire <code>BatchWriteItem</code> operation.</p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
*/
public java.util.List<ConsumedCapacity> getConsumedCapacity() {
return consumedCapacity;
}
/**
* <p>
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.
* </p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* </ul>
*
* @param consumedCapacity
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.</p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
*/
public void setConsumedCapacity(java.util.Collection<ConsumedCapacity> consumedCapacity) {
if (consumedCapacity == null) {
this.consumedCapacity = null;
return;
}
this.consumedCapacity = new java.util.ArrayList<ConsumedCapacity>(consumedCapacity);
}
/**
* <p>
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.
* </p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* </ul>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setConsumedCapacity(java.util.Collection)} or {@link #withConsumedCapacity(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param consumedCapacity
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.</p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public BatchWriteItemResult withConsumedCapacity(ConsumedCapacity... consumedCapacity) {
if (this.consumedCapacity == null) {
setConsumedCapacity(new java.util.ArrayList<ConsumedCapacity>(consumedCapacity.length));
}
for (ConsumedCapacity ele : consumedCapacity) {
this.consumedCapacity.add(ele);
}
return this;
}
/**
* <p>
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.
* </p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* </ul>
*
* @param consumedCapacity
* The capacity units consumed by the entire <code>BatchWriteItem</code> operation.</p>
* <p>
* Each element consists of:
* </p>
* <ul>
* <li>
* <p>
* <code>TableName</code> - The table that consumed the provisioned throughput.
* </p>
* </li>
* <li>
* <p>
* <code>CapacityUnits</code> - The total number of capacity units consumed.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public BatchWriteItemResult withConsumedCapacity(java.util.Collection<ConsumedCapacity> consumedCapacity) {
setConsumedCapacity(consumedCapacity);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUnprocessedItems() != null)
sb.append("UnprocessedItems: ").append(getUnprocessedItems()).append(",");
if (getItemCollectionMetrics() != null)
sb.append("ItemCollectionMetrics: ").append(getItemCollectionMetrics()).append(",");
if (getConsumedCapacity() != null)
sb.append("ConsumedCapacity: ").append(getConsumedCapacity());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof BatchWriteItemResult == false)
return false;
BatchWriteItemResult other = (BatchWriteItemResult) obj;
if (other.getUnprocessedItems() == null ^ this.getUnprocessedItems() == null)
return false;
if (other.getUnprocessedItems() != null && other.getUnprocessedItems().equals(this.getUnprocessedItems()) == false)
return false;
if (other.getItemCollectionMetrics() == null ^ this.getItemCollectionMetrics() == null)
return false;
if (other.getItemCollectionMetrics() != null && other.getItemCollectionMetrics().equals(this.getItemCollectionMetrics()) == false)
return false;
if (other.getConsumedCapacity() == null ^ this.getConsumedCapacity() == null)
return false;
if (other.getConsumedCapacity() != null && other.getConsumedCapacity().equals(this.getConsumedCapacity()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getUnprocessedItems() == null) ? 0 : getUnprocessedItems().hashCode());
hashCode = prime * hashCode + ((getItemCollectionMetrics() == null) ? 0 : getItemCollectionMetrics().hashCode());
hashCode = prime * hashCode + ((getConsumedCapacity() == null) ? 0 : getConsumedCapacity().hashCode());
return hashCode;
}
@Override
public BatchWriteItemResult clone() {
try {
return (BatchWriteItemResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
Copyright (c) 2014, Cisco Systems
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.cisco.dvbu.ps.jdbcdecorator;
import java.util.Properties;
import java.sql.DatabaseMetaData;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
public class MetaDataProvider implements DatabaseMetaData {
protected Connection conn;
private DatabaseMetaData dbmd;
public MetaDataProvider(Connection conn)
throws SQLException
{
this.conn = conn;
dbmd = conn.getMetaData();
}
public boolean allProceduresAreCallable()
throws SQLException
{
return dbmd.allProceduresAreCallable();
}
public boolean allTablesAreSelectable()
throws SQLException
{
return dbmd.allTablesAreSelectable();
}
public String getURL()
throws SQLException
{
return dbmd.getURL();
}
public String getUserName()
throws SQLException
{
return dbmd.getUserName();
}
public boolean isReadOnly()
throws SQLException
{
return dbmd.isReadOnly();
}
public boolean nullsAreSortedHigh()
throws SQLException
{
return dbmd.nullsAreSortedHigh();
}
public boolean nullsAreSortedLow()
throws SQLException
{
return dbmd.nullsAreSortedLow();
}
public boolean nullsAreSortedAtStart()
throws SQLException
{
return dbmd.nullsAreSortedAtStart();
}
public boolean nullsAreSortedAtEnd()
throws SQLException
{
return dbmd.nullsAreSortedAtEnd();
}
public String getDatabaseProductName()
throws SQLException
{
return dbmd.getDatabaseProductName();
}
public String getDatabaseProductVersion()
throws SQLException
{
return dbmd.getDatabaseProductVersion();
}
public String getDriverName()
throws SQLException
{
return dbmd.getDriverName();
}
public String getDriverVersion()
throws SQLException
{
return dbmd.getDriverVersion();
}
public int getDriverMajorVersion()
{
return dbmd.getDriverMajorVersion();
}
public int getDriverMinorVersion()
{
return dbmd.getDriverMinorVersion();
}
public boolean usesLocalFiles()
throws SQLException
{
return dbmd.usesLocalFiles();
}
public boolean usesLocalFilePerTable()
throws SQLException
{
return dbmd.usesLocalFilePerTable();
}
public boolean supportsMixedCaseIdentifiers()
throws SQLException
{
return dbmd.supportsMixedCaseIdentifiers();
}
public boolean storesUpperCaseIdentifiers()
throws SQLException
{
return dbmd.storesUpperCaseIdentifiers();
}
public boolean storesLowerCaseIdentifiers()
throws SQLException
{
return dbmd.storesLowerCaseIdentifiers();
}
public boolean storesMixedCaseIdentifiers()
throws SQLException
{
return dbmd.storesMixedCaseIdentifiers();
}
public boolean supportsMixedCaseQuotedIdentifiers()
throws SQLException
{
return dbmd.supportsMixedCaseQuotedIdentifiers();
}
public boolean storesUpperCaseQuotedIdentifiers()
throws SQLException
{
return dbmd.storesUpperCaseQuotedIdentifiers();
}
public boolean storesLowerCaseQuotedIdentifiers()
throws SQLException
{
return dbmd.storesLowerCaseQuotedIdentifiers();
}
public boolean storesMixedCaseQuotedIdentifiers()
throws SQLException
{
return dbmd.storesMixedCaseQuotedIdentifiers();
}
public String getIdentifierQuoteString()
throws SQLException
{
return dbmd.getIdentifierQuoteString();
}
public String getSQLKeywords()
throws SQLException
{
return dbmd.getSQLKeywords();
}
public String getNumericFunctions()
throws SQLException
{
return dbmd.getNumericFunctions();
}
public String getStringFunctions()
throws SQLException
{
return dbmd.getStringFunctions();
}
public String getSystemFunctions()
throws SQLException
{
return dbmd.getSystemFunctions();
}
public String getTimeDateFunctions()
throws SQLException
{
return dbmd.getTimeDateFunctions();
}
public String getSearchStringEscape()
throws SQLException
{
return dbmd.getSearchStringEscape();
}
public String getExtraNameCharacters()
throws SQLException
{
return dbmd.getExtraNameCharacters();
}
public boolean supportsAlterTableWithAddColumn()
throws SQLException
{
return dbmd.supportsAlterTableWithAddColumn();
}
public boolean supportsAlterTableWithDropColumn()
throws SQLException
{
return dbmd.supportsAlterTableWithDropColumn();
}
public boolean supportsColumnAliasing()
throws SQLException
{
return dbmd.supportsColumnAliasing();
}
public boolean nullPlusNonNullIsNull()
throws SQLException
{
return dbmd.nullPlusNonNullIsNull();
}
public boolean supportsConvert()
throws SQLException
{
return dbmd.supportsConvert();
}
public boolean supportsConvert(int int1, int int2)
throws SQLException
{
return dbmd.supportsConvert(int1, int2);
}
public boolean supportsTableCorrelationNames()
throws SQLException
{
return dbmd.supportsTableCorrelationNames();
}
public boolean supportsDifferentTableCorrelationNames()
throws SQLException
{
return dbmd.supportsDifferentTableCorrelationNames();
}
public boolean supportsExpressionsInOrderBy()
throws SQLException
{
return dbmd.supportsExpressionsInOrderBy();
}
public boolean supportsOrderByUnrelated()
throws SQLException
{
return dbmd.supportsOrderByUnrelated();
}
public boolean supportsGroupBy()
throws SQLException
{
return dbmd.supportsGroupBy();
}
public boolean supportsGroupByUnrelated()
throws SQLException
{
return dbmd.supportsGroupByUnrelated();
}
public boolean supportsGroupByBeyondSelect()
throws SQLException
{
return dbmd.supportsGroupByBeyondSelect();
}
public boolean supportsLikeEscapeClause()
throws SQLException
{
return dbmd.supportsLikeEscapeClause();
}
public boolean supportsMultipleResultSets()
throws SQLException
{
return dbmd.supportsMultipleResultSets();
}
public boolean supportsMultipleTransactions()
throws SQLException
{
return dbmd.supportsMultipleTransactions();
}
public boolean supportsNonNullableColumns()
throws SQLException
{
return dbmd.supportsNonNullableColumns();
}
public boolean supportsMinimumSQLGrammar()
throws SQLException
{
return dbmd.supportsMinimumSQLGrammar();
}
public boolean supportsCoreSQLGrammar()
throws SQLException
{
return dbmd.supportsCoreSQLGrammar();
}
public boolean supportsExtendedSQLGrammar()
throws SQLException
{
return dbmd.supportsExtendedSQLGrammar();
}
public boolean supportsANSI92EntryLevelSQL()
throws SQLException
{
return dbmd.supportsANSI92EntryLevelSQL();
}
public boolean supportsANSI92IntermediateSQL()
throws SQLException
{
return dbmd.supportsANSI92IntermediateSQL();
}
public boolean supportsANSI92FullSQL()
throws SQLException
{
return dbmd.supportsANSI92FullSQL();
}
public boolean supportsIntegrityEnhancementFacility()
throws SQLException
{
return dbmd.supportsIntegrityEnhancementFacility();
}
public boolean supportsOuterJoins()
throws SQLException
{
return dbmd.supportsOuterJoins();
}
public boolean supportsFullOuterJoins()
throws SQLException
{
return dbmd.supportsFullOuterJoins();
}
public boolean supportsLimitedOuterJoins()
throws SQLException
{
return dbmd.supportsLimitedOuterJoins();
}
public String getSchemaTerm()
throws SQLException
{
return dbmd.getSchemaTerm();
}
public String getProcedureTerm()
throws SQLException
{
return dbmd.getProcedureTerm();
}
public String getCatalogTerm()
throws SQLException
{
return dbmd.getCatalogTerm();
}
public boolean isCatalogAtStart()
throws SQLException
{
return dbmd.isCatalogAtStart();
}
public String getCatalogSeparator()
throws SQLException
{
return dbmd.getCatalogSeparator();
}
public boolean supportsSchemasInDataManipulation()
throws SQLException
{
return dbmd.supportsSchemasInDataManipulation();
}
public boolean supportsSchemasInProcedureCalls()
throws SQLException
{
return dbmd.supportsSchemasInProcedureCalls();
}
public boolean supportsSchemasInTableDefinitions()
throws SQLException
{
return dbmd.supportsSchemasInTableDefinitions();
}
public boolean supportsSchemasInIndexDefinitions()
throws SQLException
{
return dbmd.supportsSchemasInIndexDefinitions();
}
public boolean supportsSchemasInPrivilegeDefinitions()
throws SQLException
{
return dbmd.supportsSchemasInPrivilegeDefinitions();
}
public boolean supportsCatalogsInDataManipulation()
throws SQLException
{
return dbmd.supportsCatalogsInDataManipulation();
}
public boolean supportsCatalogsInProcedureCalls()
throws SQLException
{
return dbmd.supportsCatalogsInProcedureCalls();
}
public boolean supportsCatalogsInTableDefinitions()
throws SQLException
{
return dbmd.supportsCatalogsInTableDefinitions();
}
public boolean supportsCatalogsInIndexDefinitions()
throws SQLException
{
return dbmd.supportsCatalogsInIndexDefinitions();
}
public boolean supportsCatalogsInPrivilegeDefinitions()
throws SQLException
{
return dbmd.supportsCatalogsInPrivilegeDefinitions();
}
public boolean supportsPositionedDelete()
throws SQLException
{
return dbmd.supportsPositionedDelete();
}
public boolean supportsPositionedUpdate()
throws SQLException
{
return dbmd.supportsPositionedUpdate();
}
public boolean supportsSelectForUpdate()
throws SQLException
{
return dbmd.supportsSelectForUpdate();
}
public boolean supportsStoredProcedures()
throws SQLException
{
return dbmd.supportsStoredProcedures();
}
public boolean supportsSubqueriesInComparisons()
throws SQLException
{
return dbmd.supportsSubqueriesInComparisons();
}
public boolean supportsSubqueriesInExists()
throws SQLException
{
return dbmd.supportsSubqueriesInExists();
}
public boolean supportsSubqueriesInIns()
throws SQLException
{
return dbmd.supportsSubqueriesInIns();
}
public boolean supportsSubqueriesInQuantifieds()
throws SQLException
{
return dbmd.supportsSubqueriesInQuantifieds();
}
public boolean supportsCorrelatedSubqueries()
throws SQLException
{
return dbmd.supportsCorrelatedSubqueries();
}
public boolean supportsUnion()
throws SQLException
{
return dbmd.supportsUnion();
}
public boolean supportsUnionAll()
throws SQLException
{
return dbmd.supportsUnionAll();
}
public boolean supportsOpenCursorsAcrossCommit()
throws SQLException
{
return dbmd.supportsOpenCursorsAcrossCommit();
}
public boolean supportsOpenCursorsAcrossRollback()
throws SQLException
{
return dbmd.supportsOpenCursorsAcrossRollback();
}
public boolean supportsOpenStatementsAcrossCommit()
throws SQLException
{
return dbmd.supportsOpenStatementsAcrossCommit();
}
public boolean supportsOpenStatementsAcrossRollback()
throws SQLException
{
return dbmd.supportsOpenStatementsAcrossRollback();
}
public int getMaxBinaryLiteralLength()
throws SQLException
{
return dbmd.getMaxBinaryLiteralLength();
}
public int getMaxCharLiteralLength()
throws SQLException
{
return dbmd.getMaxCharLiteralLength();
}
public int getMaxColumnNameLength()
throws SQLException
{
return dbmd.getMaxColumnNameLength();
}
public int getMaxColumnsInGroupBy()
throws SQLException
{
return dbmd.getMaxColumnsInGroupBy();
}
public int getMaxColumnsInIndex()
throws SQLException
{
return dbmd.getMaxColumnsInIndex();
}
public int getMaxColumnsInOrderBy()
throws SQLException
{
return dbmd.getMaxColumnsInOrderBy();
}
public int getMaxColumnsInSelect()
throws SQLException
{
return dbmd.getMaxColumnsInSelect();
}
public int getMaxColumnsInTable()
throws SQLException
{
return dbmd.getMaxColumnsInTable();
}
public int getMaxConnections()
throws SQLException
{
return dbmd.getMaxConnections();
}
public int getMaxCursorNameLength()
throws SQLException
{
return dbmd.getMaxCursorNameLength();
}
public int getMaxIndexLength()
throws SQLException
{
return dbmd.getMaxIndexLength();
}
public int getMaxSchemaNameLength()
throws SQLException
{
return dbmd.getMaxSchemaNameLength();
}
public int getMaxProcedureNameLength()
throws SQLException
{
return dbmd.getMaxProcedureNameLength();
}
public int getMaxCatalogNameLength()
throws SQLException
{
return dbmd.getMaxCatalogNameLength();
}
public int getMaxRowSize()
throws SQLException
{
return dbmd.getMaxRowSize();
}
public boolean doesMaxRowSizeIncludeBlobs()
throws SQLException
{
return dbmd.doesMaxRowSizeIncludeBlobs();
}
public int getMaxStatementLength()
throws SQLException
{
return dbmd.getMaxStatementLength();
}
public int getMaxStatements()
throws SQLException
{
return dbmd.getMaxStatements();
}
public int getMaxTableNameLength()
throws SQLException
{
return dbmd.getMaxTableNameLength();
}
public int getMaxTablesInSelect()
throws SQLException
{
return dbmd.getMaxTablesInSelect();
}
public int getMaxUserNameLength()
throws SQLException
{
return dbmd.getMaxUserNameLength();
}
public int getDefaultTransactionIsolation()
throws SQLException
{
return dbmd.getDefaultTransactionIsolation();
}
public boolean supportsTransactions()
throws SQLException
{
return dbmd.supportsTransactions();
}
public boolean supportsTransactionIsolationLevel(int int1)
throws SQLException
{
return dbmd.supportsTransactionIsolationLevel(int1);
}
public boolean supportsDataDefinitionAndDataManipulationTransactions()
throws SQLException
{
return dbmd.supportsDataDefinitionAndDataManipulationTransactions();
}
public boolean supportsDataManipulationTransactionsOnly()
throws SQLException
{
return dbmd.supportsDataManipulationTransactionsOnly();
}
public boolean dataDefinitionCausesTransactionCommit()
throws SQLException
{
return dbmd.dataDefinitionCausesTransactionCommit();
}
public boolean dataDefinitionIgnoredInTransactions()
throws SQLException
{
return dbmd.dataDefinitionIgnoredInTransactions();
}
public ResultSet getProcedures(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getProcedures(string1, string2, string3);
}
public ResultSet getProcedureColumns(String string1, String string2, String string3, String string4)
throws SQLException
{
return dbmd.getProcedureColumns(string1, string2, string3, string4);
}
public ResultSet getTables(String string1, String string2, String string3, String[] strings)
throws SQLException
{
return dbmd.getTables(string1, string2, string3, strings);
}
public ResultSet getSchemas()
throws SQLException
{
return dbmd.getSchemas();
}
public ResultSet getCatalogs()
throws SQLException
{
return dbmd.getCatalogs();
}
public ResultSet getTableTypes()
throws SQLException
{
return dbmd.getTableTypes();
}
public ResultSet getColumns(String string1, String string2, String string3, String string4)
throws SQLException
{
return dbmd.getColumns(string1, string2, string3, string4);
}
public ResultSet getColumnPrivileges(String string1, String string2, String string3, String string4)
throws SQLException
{
return dbmd.getColumnPrivileges(string1, string2, string3, string4);
}
public ResultSet getTablePrivileges(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getTablePrivileges(string1, string2, string3);
}
public ResultSet getBestRowIdentifier(String string1, String string2, String string3, int int1, boolean boolean1)
throws SQLException
{
return dbmd.getBestRowIdentifier(string1, string2, string3, int1, boolean1);
}
public ResultSet getVersionColumns(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getVersionColumns(string1, string2, string3);
}
public ResultSet getPrimaryKeys(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getPrimaryKeys(string1, string2, string3);
}
public ResultSet getImportedKeys(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getImportedKeys(string1, string2, string3);
}
public ResultSet getExportedKeys(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getExportedKeys(string1, string2, string3);
}
public ResultSet getCrossReference(String string1, String string2, String string3, String string4, String string5, String string6)
throws SQLException
{
return dbmd.getCrossReference(string1, string2, string3, string4, string5, string6);
}
public ResultSet getTypeInfo()
throws SQLException
{
return dbmd.getTypeInfo();
}
public ResultSet getIndexInfo(String string1, String string2, String string3, boolean boolean1, boolean boolean2)
throws SQLException
{
return dbmd.getIndexInfo(string1, string2, string3, boolean1, boolean2);
}
public boolean supportsResultSetType(int int1)
throws SQLException
{
return dbmd.supportsResultSetType(int1);
}
public boolean supportsResultSetConcurrency(int int1, int int2)
throws SQLException
{
return dbmd.supportsResultSetConcurrency(int1, int2);
}
public boolean ownUpdatesAreVisible(int int1)
throws SQLException
{
return dbmd.ownUpdatesAreVisible(int1);
}
public boolean ownDeletesAreVisible(int int1)
throws SQLException
{
return dbmd.ownDeletesAreVisible(int1);
}
public boolean ownInsertsAreVisible(int int1)
throws SQLException
{
return dbmd.ownInsertsAreVisible(int1);
}
public boolean othersUpdatesAreVisible(int int1)
throws SQLException
{
return dbmd.othersUpdatesAreVisible(int1);
}
public boolean othersDeletesAreVisible(int int1)
throws SQLException
{
return dbmd.othersDeletesAreVisible(int1);
}
public boolean othersInsertsAreVisible(int int1)
throws SQLException
{
return dbmd.othersInsertsAreVisible(int1);
}
public boolean updatesAreDetected(int int1)
throws SQLException
{
return dbmd.updatesAreDetected(int1);
}
public boolean deletesAreDetected(int int1)
throws SQLException
{
return dbmd.deletesAreDetected(int1);
}
public boolean insertsAreDetected(int int1)
throws SQLException
{
return dbmd.insertsAreDetected(int1);
}
public boolean supportsBatchUpdates()
throws SQLException
{
return dbmd.supportsBatchUpdates();
}
public ResultSet getUDTs(String string1, String string2, String string3, int[] ints)
throws SQLException
{
return dbmd.getUDTs(string1, string2, string3, ints);
}
public Connection getConnection()
throws SQLException
{
return dbmd.getConnection();
}
public boolean supportsSavepoints()
throws SQLException
{
return dbmd.supportsSavepoints();
}
public boolean supportsNamedParameters()
throws SQLException
{
return dbmd.supportsNamedParameters();
}
public boolean supportsMultipleOpenResults()
throws SQLException
{
return dbmd.supportsMultipleOpenResults();
}
public boolean supportsGetGeneratedKeys()
throws SQLException
{
return dbmd.supportsGetGeneratedKeys();
}
public ResultSet getSuperTypes(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getSuperTypes(string1, string2, string3);
}
public ResultSet getSuperTables(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getSuperTables(string1, string2, string3);
}
public ResultSet getAttributes(String string1, String string2, String string3, String string4)
throws SQLException
{
return dbmd.getAttributes(string1, string2, string3, string4);
}
public boolean supportsResultSetHoldability(int int1)
throws SQLException
{
return dbmd.supportsResultSetHoldability(int1);
}
public int getResultSetHoldability()
throws SQLException
{
return dbmd.getResultSetHoldability();
}
public int getDatabaseMajorVersion()
throws SQLException
{
return dbmd.getDatabaseMajorVersion();
}
public int getDatabaseMinorVersion()
throws SQLException
{
return dbmd.getDatabaseMinorVersion();
}
public int getJDBCMajorVersion()
throws SQLException
{
return dbmd.getJDBCMajorVersion();
}
public int getJDBCMinorVersion()
throws SQLException
{
return dbmd.getJDBCMinorVersion();
}
public int getSQLStateType()
throws SQLException
{
return dbmd.getSQLStateType();
}
public boolean locatorsUpdateCopy()
throws SQLException
{
return dbmd.locatorsUpdateCopy();
}
public boolean supportsStatementPooling()
throws SQLException
{
return dbmd.supportsStatementPooling();
}
public RowIdLifetime getRowIdLifetime()
throws SQLException
{
return dbmd.getRowIdLifetime();
}
public ResultSet getSchemas(String string1, String string2)
throws SQLException
{
return dbmd.getSchemas(string1, string2);
}
public boolean supportsStoredFunctionsUsingCallSyntax()
throws SQLException
{
return dbmd.supportsStoredFunctionsUsingCallSyntax();
}
public boolean autoCommitFailureClosesAllResultSets()
throws SQLException
{
return dbmd.autoCommitFailureClosesAllResultSets();
}
public ResultSet getClientInfoProperties()
throws SQLException
{
return dbmd.getClientInfoProperties();
}
public ResultSet getFunctions(String string1, String string2, String string3)
throws SQLException
{
return dbmd.getFunctions(string1, string2, string3);
}
public ResultSet getFunctionColumns(String string1, String string2, String string3, String string4)
throws SQLException
{
return dbmd.getFunctionColumns(string1, string2, string3, string4);
}
public Object unwrap(Class class1)
throws java.sql.SQLException
{
return dbmd.unwrap(class1);
}
public boolean isWrapperFor(Class class1)
throws java.sql.SQLException
{
return dbmd.isWrapperFor(class1);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors;
import java.util.Collection;
import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IPhysicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractUnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.WriteResultOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.HashPartitionExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.HashPartitionMergeExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.RangePartitionExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.RangePartitionMergeExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.physical.SortMergeExchangePOperator;
import org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn;
import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
public class UsedVariableVisitor implements ILogicalOperatorVisitor<Void, Void> {
private Collection<LogicalVariable> usedVariables;
public UsedVariableVisitor(Collection<LogicalVariable> usedVariables) {
this.usedVariables = usedVariables;
}
@Override
public Void visitAggregateOperator(AggregateOperator op, Void arg) {
for (Mutable<ILogicalExpression> exprRef : op.getExpressions()) {
exprRef.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitAssignOperator(AssignOperator op, Void arg) {
for (Mutable<ILogicalExpression> exprRef : op.getExpressions()) {
exprRef.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitDataScanOperator(DataSourceScanOperator op, Void arg) {
if (op.getAdditionalFilteringExpressions() != null) {
for (Mutable<ILogicalExpression> e : op.getAdditionalFilteringExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
}
return null;
}
@Override
public Void visitDistinctOperator(DistinctOperator op, Void arg) {
for (Mutable<ILogicalExpression> eRef : op.getExpressions()) {
eRef.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, Void arg) {
// does not use any variable
return null;
}
@Override
public Void visitExchangeOperator(ExchangeOperator op, Void arg) throws AlgebricksException {
// Used variables depend on the physical operator.
if (op.getPhysicalOperator() != null) {
IPhysicalOperator physOp = op.getPhysicalOperator();
switch (physOp.getOperatorTag()) {
case BROADCAST_EXCHANGE:
case ONE_TO_ONE_EXCHANGE:
case RANDOM_MERGE_EXCHANGE: {
// No variables used.
break;
}
case HASH_PARTITION_EXCHANGE: {
HashPartitionExchangePOperator concreteOp = (HashPartitionExchangePOperator) physOp;
usedVariables.addAll(concreteOp.getHashFields());
break;
}
case HASH_PARTITION_MERGE_EXCHANGE: {
HashPartitionMergeExchangePOperator concreteOp = (HashPartitionMergeExchangePOperator) physOp;
usedVariables.addAll(concreteOp.getPartitionFields());
for (OrderColumn orderCol : concreteOp.getOrderColumns()) {
usedVariables.add(orderCol.getColumn());
}
break;
}
case SORT_MERGE_EXCHANGE: {
SortMergeExchangePOperator concreteOp = (SortMergeExchangePOperator) physOp;
for (OrderColumn orderCol : concreteOp.getSortColumns()) {
usedVariables.add(orderCol.getColumn());
}
break;
}
case RANGE_PARTITION_EXCHANGE: {
RangePartitionExchangePOperator concreteOp = (RangePartitionExchangePOperator) physOp;
for (OrderColumn partCol : concreteOp.getPartitioningFields()) {
usedVariables.add(partCol.getColumn());
}
break;
}
case RANGE_PARTITION_MERGE_EXCHANGE: {
RangePartitionMergeExchangePOperator concreteOp = (RangePartitionMergeExchangePOperator) physOp;
for (OrderColumn partCol : concreteOp.getPartitioningFields()) {
usedVariables.add(partCol.getColumn());
}
break;
}
case RANDOM_PARTITION_EXCHANGE: {
break;
}
default: {
throw new AlgebricksException("Unhandled physical operator tag '" + physOp.getOperatorTag() + "'.");
}
}
}
return null;
}
@Override
public Void visitGroupByOperator(GroupByOperator op, Void arg) throws AlgebricksException {
for (ILogicalPlan p : op.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
VariableUtilities.getUsedVariablesInDescendantsAndSelf(r.getValue(), usedVariables);
}
}
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> g : op.getGroupByList()) {
g.second.getValue().getUsedVariables(usedVariables);
}
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> g : op.getDecorList()) {
g.second.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitInnerJoinOperator(InnerJoinOperator op, Void arg) {
op.getCondition().getValue().getUsedVariables(usedVariables);
return null;
}
@Override
public Void visitLeftOuterJoinOperator(LeftOuterJoinOperator op, Void arg) {
op.getCondition().getValue().getUsedVariables(usedVariables);
return null;
}
@Override
public Void visitLimitOperator(LimitOperator op, Void arg) {
op.getMaxObjects().getValue().getUsedVariables(usedVariables);
ILogicalExpression offsetExpr = op.getOffset().getValue();
if (offsetExpr != null) {
offsetExpr.getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) {
// does not use any variable
return null;
}
@Override
public Void visitOrderOperator(OrderOperator op, Void arg) {
for (Pair<IOrder, Mutable<ILogicalExpression>> oe : op.getOrderExpressions()) {
oe.second.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitProjectOperator(ProjectOperator op, Void arg) {
List<LogicalVariable> parameterVariables = op.getVariables();
for (LogicalVariable v : parameterVariables) {
if (!usedVariables.contains(v)) {
usedVariables.add(v);
}
}
return null;
}
@Override
public Void visitRunningAggregateOperator(RunningAggregateOperator op, Void arg) {
for (Mutable<ILogicalExpression> exprRef : op.getExpressions()) {
exprRef.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitScriptOperator(ScriptOperator op, Void arg) {
List<LogicalVariable> parameterVariables = op.getInputVariables();
for (LogicalVariable v : parameterVariables) {
if (!usedVariables.contains(v)) {
usedVariables.add(v);
}
}
return null;
}
@Override
public Void visitSelectOperator(SelectOperator op, Void arg) {
op.getCondition().getValue().getUsedVariables(usedVariables);
return null;
}
@Override
public Void visitSubplanOperator(SubplanOperator op, Void arg) throws AlgebricksException {
for (ILogicalPlan p : op.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
VariableUtilities.getUsedVariablesInDescendantsAndSelf(r.getValue(), usedVariables);
}
}
return null;
}
@Override
public Void visitUnionOperator(UnionAllOperator op, Void arg) {
for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> m : op.getVariableMappings()) {
if (!usedVariables.contains(m.first)) {
usedVariables.add(m.first);
}
if (!usedVariables.contains(m.second)) {
usedVariables.add(m.second);
}
}
return null;
}
@Override
public Void visitIntersectOperator(IntersectOperator op, Void arg) throws AlgebricksException {
for (int i = 0; i < op.getNumInput(); i++) {
for (LogicalVariable var : op.getInputVariables(i)) {
if (!usedVariables.contains(var)) {
usedVariables.add(var);
}
}
}
return null;
}
@Override
public Void visitUnnestMapOperator(UnnestMapOperator op, Void arg) {
getUsedVarsFromExprAndFilterExpr(op);
return null;
}
@Override
public Void visitLeftOuterUnnestMapOperator(LeftOuterUnnestMapOperator op, Void arg) throws AlgebricksException {
getUsedVarsFromExprAndFilterExpr(op);
return null;
}
private void getUsedVarsFromExprAndFilterExpr(AbstractUnnestMapOperator op) {
op.getExpressionRef().getValue().getUsedVariables(usedVariables);
if (op.getAdditionalFilteringExpressions() != null) {
for (Mutable<ILogicalExpression> e : op.getAdditionalFilteringExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
}
}
@Override
public Void visitUnnestOperator(UnnestOperator op, Void arg) {
op.getExpressionRef().getValue().getUsedVariables(usedVariables);
return null;
}
@Override
public Void visitWriteOperator(WriteOperator op, Void arg) {
for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
expr.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitDistributeResultOperator(DistributeResultOperator op, Void arg) {
for (Mutable<ILogicalExpression> expr : op.getExpressions()) {
expr.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitWriteResultOperator(WriteResultOperator op, Void arg) {
op.getPayloadExpression().getValue().getUsedVariables(usedVariables);
for (Mutable<ILogicalExpression> e : op.getKeyExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
if (op.getAdditionalFilteringExpressions() != null) {
for (Mutable<ILogicalExpression> e : op.getAdditionalFilteringExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
}
return null;
}
@Override
public Void visitInsertDeleteUpsertOperator(InsertDeleteUpsertOperator op, Void arg) {
// 1. The record variable
op.getPayloadExpression().getValue().getUsedVariables(usedVariables);
// 2. The primary key variables
for (Mutable<ILogicalExpression> e : op.getPrimaryKeyExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
// 3. The filters variables
if (op.getAdditionalFilteringExpressions() != null) {
for (Mutable<ILogicalExpression> e : op.getAdditionalFilteringExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
}
// 4. The Other variables (Not key, Not payload, and Not Filter)
if (op.getAdditionalNonFilteringExpressions() != null) {
for (Mutable<ILogicalExpression> e : op.getAdditionalNonFilteringExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
}
return null;
}
@Override
public Void visitIndexInsertDeleteUpsertOperator(IndexInsertDeleteUpsertOperator op, Void arg) {
for (Mutable<ILogicalExpression> e : op.getPrimaryKeyExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
for (Mutable<ILogicalExpression> e : op.getSecondaryKeyExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
if (op.getFilterExpression() != null) {
op.getFilterExpression().getValue().getUsedVariables(usedVariables);
}
if (op.getAdditionalFilteringExpressions() != null) {
for (Mutable<ILogicalExpression> e : op.getAdditionalFilteringExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
}
if (op.getPrevAdditionalFilteringExpression() != null) {
op.getPrevAdditionalFilteringExpression().getValue().getUsedVariables(usedVariables);
}
if (op.getPrevSecondaryKeyExprs() != null) {
for (Mutable<ILogicalExpression> e : op.getPrevSecondaryKeyExprs()) {
e.getValue().getUsedVariables(usedVariables);
}
}
return null;
}
@Override
public Void visitTokenizeOperator(TokenizeOperator op, Void arg) {
for (Mutable<ILogicalExpression> e : op.getPrimaryKeyExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
for (Mutable<ILogicalExpression> e : op.getSecondaryKeyExpressions()) {
e.getValue().getUsedVariables(usedVariables);
}
return null;
}
@Override
public Void visitSinkOperator(SinkOperator op, Void arg) {
return null;
}
@Override
public Void visitReplicateOperator(ReplicateOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitSplitOperator(SplitOperator op, Void arg) throws AlgebricksException {
op.getBranchingExpression().getValue().getUsedVariables(usedVariables);
return null;
}
@Override
public Void visitMaterializeOperator(MaterializeOperator op, Void arg) throws AlgebricksException {
return null;
}
@Override
public Void visitDelegateOperator(DelegateOperator op, Void arg) throws AlgebricksException {
op.getDelegate().getUsedVariables(usedVariables);
return null;
}
@Override
public Void visitLeftOuterUnnestOperator(LeftOuterUnnestOperator op, Void arg) throws AlgebricksException {
op.getExpressionRef().getValue().getUsedVariables(usedVariables);
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.builder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assume.assumeFalse;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.SystemUtils;
import org.junit.After;
import org.junit.Test;
/**
* Unit tests for {@link org.apache.commons.lang3.builder.ToStringBuilder}.
*
* @version $Id: ToStringBuilderTest.java 1533566 2013-10-18 17:21:23Z sebb $
*/
public class ToStringBuilderTest {
private final Integer base = Integer.valueOf(5);
private final String baseStr = base.getClass().getName() + "@" + Integer.toHexString(System.identityHashCode(base));
/*
* All tests should leave the registry empty.
*/
@After
public void after(){
validateNullToStringStyleRegistry();
}
//-----------------------------------------------------------------------
@Test
public void testConstructorEx1() {
assertEquals("<null>", new ToStringBuilder(null).toString());
}
@Test
public void testConstructorEx2() {
assertEquals("<null>", new ToStringBuilder(null, null).toString());
new ToStringBuilder(this.base, null).toString();
}
@Test
public void testConstructorEx3() {
assertEquals("<null>", new ToStringBuilder(null, null, null).toString());
new ToStringBuilder(this.base, null, null).toString();
new ToStringBuilder(this.base, ToStringStyle.DEFAULT_STYLE, null).toString();
}
@Test
public void testGetSetDefault() {
try {
ToStringBuilder.setDefaultStyle(ToStringStyle.NO_FIELD_NAMES_STYLE);
assertSame(ToStringStyle.NO_FIELD_NAMES_STYLE, ToStringBuilder.getDefaultStyle());
} finally {
// reset for other tests
ToStringBuilder.setDefaultStyle(ToStringStyle.DEFAULT_STYLE);
}
}
@Test(expected=IllegalArgumentException.class)
public void testSetDefaultEx() {
ToStringBuilder.setDefaultStyle(null);
}
@Test
public void testBlank() {
assertEquals(baseStr + "[]", new ToStringBuilder(base).toString());
}
/**
* Test wrapper for int primitive.
*/
@Test
public void testReflectionInteger() {
assertEquals(baseStr + "[value=5]", ToStringBuilder.reflectionToString(base));
}
/**
* Test wrapper for char primitive.
*/
@Test
public void testReflectionCharacter() {
final Character c = new Character('A');
assertEquals(this.toBaseString(c) + "[value=A]", ToStringBuilder.reflectionToString(c));
}
/**
* Test wrapper for char boolean.
*/
@Test
public void testReflectionBoolean() {
Boolean b;
b = Boolean.TRUE;
assertEquals(this.toBaseString(b) + "[value=true]", ToStringBuilder.reflectionToString(b));
b = Boolean.FALSE;
assertEquals(this.toBaseString(b) + "[value=false]", ToStringBuilder.reflectionToString(b));
}
/**
* Create the same toString() as Object.toString().
* @param o the object to create the string for.
* @return a String in the Object.toString format.
*/
private String toBaseString(final Object o) {
return o.getClass().getName() + "@" + Integer.toHexString(System.identityHashCode(o));
}
// Reflection Array tests
//
// Note on the following line of code repeated in the reflection array tests.
//
// assertReflectionArray("<null>", array);
//
// The expected value is not baseStr + "[<null>]" since array==null and is typed as Object.
// The null array does not carry array type information.
// If we added a primitive array type constructor and pile of associated methods,
// then type declaring type information could be carried forward. IMHO, null is null.
//
// Gary Gregory - 2003-03-12 - ggregory@seagullsw.com
//
public void assertReflectionArray(final String expected, final Object actual) {
if (actual == null) {
// Until ToStringBuilder supports null objects.
return;
}
assertEquals(expected, ToStringBuilder.reflectionToString(actual));
assertEquals(expected, ToStringBuilder.reflectionToString(actual, null));
assertEquals(expected, ToStringBuilder.reflectionToString(actual, null, true));
assertEquals(expected, ToStringBuilder.reflectionToString(actual, null, false));
}
@Test
public void testReflectionObjectArray() {
Object[] array = new Object[] { null, base, new int[] { 3, 6 } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{<null>,5,{3,6}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionLongArray() {
long[] array = new long[] { 1, 2, -3, 4 };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{1,2,-3,4}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionIntArray() {
int[] array = new int[] { 1, 2, -3, 4 };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{1,2,-3,4}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionShortArray() {
short[] array = new short[] { 1, 2, -3, 4 };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{1,2,-3,4}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionyteArray() {
byte[] array = new byte[] { 1, 2, -3, 4 };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{1,2,-3,4}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionCharArray() {
char[] array = new char[] { 'A', '2', '_', 'D' };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{A,2,_,D}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionDoubleArray() {
double[] array = new double[] { 1.0, 2.9876, -3.00001, 4.3 };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{1.0,2.9876,-3.00001,4.3}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionFloatArray() {
float[] array = new float[] { 1.0f, 2.9876f, -3.00001f, 4.3f };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{1.0,2.9876,-3.00001,4.3}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionBooleanArray() {
boolean[] array = new boolean[] { true, false, false };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{true,false,false}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
// Reflection Array Array tests
@Test
public void testReflectionFloatArrayArray() {
float[][] array = new float[][] { { 1.0f, 2.29686f }, null, { Float.NaN } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{1.0,2.29686},<null>,{NaN}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionLongArrayArray() {
long[][] array = new long[][] { { 1, 2 }, null, { 5 } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{1,2},<null>,{5}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionIntArrayArray() {
int[][] array = new int[][] { { 1, 2 }, null, { 5 } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{1,2},<null>,{5}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionhortArrayArray() {
short[][] array = new short[][] { { 1, 2 }, null, { 5 } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{1,2},<null>,{5}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionByteArrayArray() {
byte[][] array = new byte[][] { { 1, 2 }, null, { 5 } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{1,2},<null>,{5}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionCharArrayArray() {
char[][] array = new char[][] { { 'A', 'B' }, null, { 'p' } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{A,B},<null>,{p}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionDoubleArrayArray() {
double[][] array = new double[][] { { 1.0, 2.29686 }, null, { Double.NaN } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{1.0,2.29686},<null>,{NaN}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
@Test
public void testReflectionBooleanArrayArray() {
boolean[][] array = new boolean[][] { { true, false }, null, { false } };
final String baseString = this.toBaseString(array);
assertEquals(baseString + "[{{true,false},<null>,{false}}]", ToStringBuilder.reflectionToString(array));
assertEquals(baseString + "[{{true,false},<null>,{false}}]", ToStringBuilder.reflectionToString(array));
array = null;
assertReflectionArray("<null>", array);
}
// Reflection hierarchy tests
@Test
public void testReflectionHierarchyArrayList() {
// note, the test data depends on the internal representation of the ArrayList, which may differ between JDK versions and vendors
// representation different for IBM JDK 1.6.0, LANG-727
assumeFalse("IBM Corporation".equals(SystemUtils.JAVA_VENDOR) && "1.6".equals(SystemUtils.JAVA_SPECIFICATION_VERSION));
assumeFalse("Oracle Corporation".equals(SystemUtils.JAVA_VENDOR) && "1.6".compareTo(SystemUtils.JAVA_SPECIFICATION_VERSION) < 0);
final List<Object> list = new ArrayList<Object>();
final String baseString = this.toBaseString(list);
final String expectedWithTransients = baseString + "[elementData={<null>,<null>,<null>,<null>,<null>,<null>,<null>,<null>,<null>,<null>},size=0,modCount=0]";
final String toStringWithTransients = ToStringBuilder.reflectionToString(list, null, true);
if (!expectedWithTransients.equals(toStringWithTransients)) {
assertEquals(expectedWithTransients, toStringWithTransients);
}
final String expectedWithoutTransients = baseString + "[size=0]";
final String toStringWithoutTransients = ToStringBuilder.reflectionToString(list, null, false);
if (!expectedWithoutTransients.equals(toStringWithoutTransients)) {
assertEquals(expectedWithoutTransients, toStringWithoutTransients);
}
}
@Test
public void testReflectionHierarchy() {
final ReflectionTestFixtureA baseA = new ReflectionTestFixtureA();
String baseString = this.toBaseString(baseA);
assertEquals(baseString + "[a=a]", ToStringBuilder.reflectionToString(baseA));
assertEquals(baseString + "[a=a]", ToStringBuilder.reflectionToString(baseA, null));
assertEquals(baseString + "[a=a]", ToStringBuilder.reflectionToString(baseA, null, false));
assertEquals(baseString + "[a=a,transientA=t]", ToStringBuilder.reflectionToString(baseA, null, true));
assertEquals(baseString + "[a=a]", ToStringBuilder.reflectionToString(baseA, null, false, null));
assertEquals(baseString + "[a=a]", ToStringBuilder.reflectionToString(baseA, null, false, Object.class));
assertEquals(baseString + "[a=a]", ToStringBuilder.reflectionToString(baseA, null, false, ReflectionTestFixtureA.class));
final ReflectionTestFixtureB baseB = new ReflectionTestFixtureB();
baseString = this.toBaseString(baseB);
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB));
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB));
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB, null));
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB, null, false));
assertEquals(baseString + "[b=b,transientB=t,a=a,transientA=t]", ToStringBuilder.reflectionToString(baseB, null, true));
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB, null, false, null));
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB, null, false, Object.class));
assertEquals(baseString + "[b=b,a=a]", ToStringBuilder.reflectionToString(baseB, null, false, ReflectionTestFixtureA.class));
assertEquals(baseString + "[b=b]", ToStringBuilder.reflectionToString(baseB, null, false, ReflectionTestFixtureB.class));
}
static class ReflectionTestFixtureA {
@SuppressWarnings("unused")
private final char a='a';
@SuppressWarnings("unused")
private transient char transientA='t';
}
static class ReflectionTestFixtureB extends ReflectionTestFixtureA {
@SuppressWarnings("unused")
private final char b='b';
@SuppressWarnings("unused")
private transient char transientB='t';
}
@Test
public void testInnerClassReflection() {
final Outer outer = new Outer();
assertEquals(toBaseString(outer) + "[inner=" + toBaseString(outer.inner) + "[]]", outer.toString());
}
static class Outer {
Inner inner = new Inner();
class Inner {
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
// Reflection cycle tests
/**
* Test an array element pointing to its container.
*/
@Test
public void testReflectionArrayCycle() throws Exception {
final Object[] objects = new Object[1];
objects[0] = objects;
assertEquals(
this.toBaseString(objects) + "[{" + this.toBaseString(objects) + "}]",
ToStringBuilder.reflectionToString(objects));
}
/**
* Test an array element pointing to its container.
*/
@Test
public void testReflectionArrayCycleLevel2() throws Exception {
final Object[] objects = new Object[1];
final Object[] objectsLevel2 = new Object[1];
objects[0] = objectsLevel2;
objectsLevel2[0] = objects;
assertEquals(
this.toBaseString(objects) + "[{{" + this.toBaseString(objects) + "}}]",
ToStringBuilder.reflectionToString(objects));
assertEquals(
this.toBaseString(objectsLevel2) + "[{{" + this.toBaseString(objectsLevel2) + "}}]",
ToStringBuilder.reflectionToString(objectsLevel2));
}
@Test
public void testReflectionArrayArrayCycle() throws Exception {
final Object[][] objects = new Object[2][2];
objects[0][0] = objects;
objects[0][1] = objects;
objects[1][0] = objects;
objects[1][1] = objects;
final String basicToString = this.toBaseString(objects);
assertEquals(
basicToString
+ "[{{"
+ basicToString
+ ","
+ basicToString
+ "},{"
+ basicToString
+ ","
+ basicToString
+ "}}]",
ToStringBuilder.reflectionToString(objects));
}
/**
* A reflection test fixture.
*/
static class ReflectionTestCycleA {
ReflectionTestCycleB b;
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
/**
* A reflection test fixture.
*/
static class ReflectionTestCycleB {
ReflectionTestCycleA a;
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
/**
* A reflection test fixture.
*/
static class SimpleReflectionTestFixture {
Object o;
public SimpleReflectionTestFixture() {
}
public SimpleReflectionTestFixture(final Object o) {
this.o = o;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
private static class SelfInstanceVarReflectionTestFixture {
@SuppressWarnings("unused")
private final SelfInstanceVarReflectionTestFixture typeIsSelf;
public SelfInstanceVarReflectionTestFixture() {
this.typeIsSelf = this;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
private static class SelfInstanceTwoVarsReflectionTestFixture {
@SuppressWarnings("unused")
private final SelfInstanceTwoVarsReflectionTestFixture typeIsSelf;
private final String otherType = "The Other Type";
public SelfInstanceTwoVarsReflectionTestFixture() {
this.typeIsSelf = this;
}
public String getOtherType(){
return this.otherType;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
}
/**
* Test an Object pointing to itself, the simplest test.
*
* @throws Exception
*/
@Test
public void testSimpleReflectionObjectCycle() throws Exception {
final SimpleReflectionTestFixture simple = new SimpleReflectionTestFixture();
simple.o = simple;
assertEquals(this.toBaseString(simple) + "[o=" + this.toBaseString(simple) + "]", simple.toString());
}
/**
* Test a class that defines an ivar pointing to itself.
*
* @throws Exception
*/
@Test
public void testSelfInstanceVarReflectionObjectCycle() throws Exception {
final SelfInstanceVarReflectionTestFixture test = new SelfInstanceVarReflectionTestFixture();
assertEquals(this.toBaseString(test) + "[typeIsSelf=" + this.toBaseString(test) + "]", test.toString());
}
/**
* Test a class that defines an ivar pointing to itself. This test was
* created to show that handling cyclical object resulted in a missing endFieldSeparator call.
*
* @throws Exception
*/
@Test
public void testSelfInstanceTwoVarsReflectionObjectCycle() throws Exception {
final SelfInstanceTwoVarsReflectionTestFixture test = new SelfInstanceTwoVarsReflectionTestFixture();
assertEquals(this.toBaseString(test) + "[typeIsSelf=" + this.toBaseString(test) + ",otherType=" + test.getOtherType().toString() + "]", test.toString());
}
/**
* Test Objects pointing to each other.
*
* @throws Exception
*/
@Test
public void testReflectionObjectCycle() throws Exception {
final ReflectionTestCycleA a = new ReflectionTestCycleA();
final ReflectionTestCycleB b = new ReflectionTestCycleB();
a.b = b;
b.a = a;
assertEquals(
this.toBaseString(a) + "[b=" + this.toBaseString(b) + "[a=" + this.toBaseString(a) + "]]",
a.toString());
}
/**
* Test a nasty combination of arrays and Objects pointing to each other.
* objects[0] -> SimpleReflectionTestFixture[ o -> objects ]
*
* @throws Exception
*/
@Test
public void testReflectionArrayAndObjectCycle() throws Exception {
final Object[] objects = new Object[1];
final SimpleReflectionTestFixture simple = new SimpleReflectionTestFixture(objects);
objects[0] = simple;
assertEquals(
this.toBaseString(objects)
+ "[{"
+ this.toBaseString(simple)
+ "[o="
+ this.toBaseString(objects)
+ "]"
+ "}]",
ToStringBuilder.reflectionToString(objects));
assertEquals(
this.toBaseString(simple)
+ "[o={"
+ this.toBaseString(simple)
+ "}]",
ToStringBuilder.reflectionToString(simple));
}
void validateNullToStringStyleRegistry() {
final Map<Object, Object> registry = ToStringStyle.getRegistry();
assertNull("Expected null, actual: "+registry, registry);
}
// End: Reflection cycle tests
@Test
public void testAppendSuper() {
assertEquals(baseStr + "[]", new ToStringBuilder(base).appendSuper("Integer@8888[]").toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).appendSuper("Integer@8888[<null>]").toString());
assertEquals(baseStr + "[a=hello]", new ToStringBuilder(base).appendSuper("Integer@8888[]").append("a", "hello").toString());
assertEquals(baseStr + "[<null>,a=hello]", new ToStringBuilder(base).appendSuper("Integer@8888[<null>]").append("a", "hello").toString());
assertEquals(baseStr + "[a=hello]", new ToStringBuilder(base).appendSuper(null).append("a", "hello").toString());
}
@Test
public void testAppendToString() {
assertEquals(baseStr + "[]", new ToStringBuilder(base).appendToString("Integer@8888[]").toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).appendToString("Integer@8888[<null>]").toString());
assertEquals(baseStr + "[a=hello]", new ToStringBuilder(base).appendToString("Integer@8888[]").append("a", "hello").toString());
assertEquals(baseStr + "[<null>,a=hello]", new ToStringBuilder(base).appendToString("Integer@8888[<null>]").append("a", "hello").toString());
assertEquals(baseStr + "[a=hello]", new ToStringBuilder(base).appendToString(null).append("a", "hello").toString());
}
@Test
public void testObject() {
final Integer i3 = Integer.valueOf(3);
final Integer i4 = Integer.valueOf(4);
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) null).toString());
assertEquals(baseStr + "[3]", new ToStringBuilder(base).append(i3).toString());
assertEquals(baseStr + "[a=<null>]", new ToStringBuilder(base).append("a", (Object) null).toString());
assertEquals(baseStr + "[a=3]", new ToStringBuilder(base).append("a", i3).toString());
assertEquals(baseStr + "[a=3,b=4]", new ToStringBuilder(base).append("a", i3).append("b", i4).toString());
assertEquals(baseStr + "[a=<Integer>]", new ToStringBuilder(base).append("a", i3, false).toString());
assertEquals(baseStr + "[a=<size=0>]", new ToStringBuilder(base).append("a", new ArrayList<Object>(), false).toString());
assertEquals(baseStr + "[a=[]]", new ToStringBuilder(base).append("a", new ArrayList<Object>(), true).toString());
assertEquals(baseStr + "[a=<size=0>]", new ToStringBuilder(base).append("a", new HashMap<Object, Object>(), false).toString());
assertEquals(baseStr + "[a={}]", new ToStringBuilder(base).append("a", new HashMap<Object, Object>(), true).toString());
assertEquals(baseStr + "[a=<size=0>]", new ToStringBuilder(base).append("a", (Object) new String[0], false).toString());
assertEquals(baseStr + "[a={}]", new ToStringBuilder(base).append("a", (Object) new String[0], true).toString());
}
@Test
public void testObjectBuild() {
final Integer i3 = Integer.valueOf(3);
final Integer i4 = Integer.valueOf(4);
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) null).build());
assertEquals(baseStr + "[3]", new ToStringBuilder(base).append(i3).build());
assertEquals(baseStr + "[a=<null>]", new ToStringBuilder(base).append("a", (Object) null).build());
assertEquals(baseStr + "[a=3]", new ToStringBuilder(base).append("a", i3).build());
assertEquals(baseStr + "[a=3,b=4]", new ToStringBuilder(base).append("a", i3).append("b", i4).build());
assertEquals(baseStr + "[a=<Integer>]", new ToStringBuilder(base).append("a", i3, false).build());
assertEquals(baseStr + "[a=<size=0>]", new ToStringBuilder(base).append("a", new ArrayList<Object>(), false).build());
assertEquals(baseStr + "[a=[]]", new ToStringBuilder(base).append("a", new ArrayList<Object>(), true).build());
assertEquals(baseStr + "[a=<size=0>]", new ToStringBuilder(base).append("a", new HashMap<Object, Object>(), false).build());
assertEquals(baseStr + "[a={}]", new ToStringBuilder(base).append("a", new HashMap<Object, Object>(), true).build());
assertEquals(baseStr + "[a=<size=0>]", new ToStringBuilder(base).append("a", (Object) new String[0], false).build());
assertEquals(baseStr + "[a={}]", new ToStringBuilder(base).append("a", (Object) new String[0], true).build());
}
@Test
public void testLong() {
assertEquals(baseStr + "[3]", new ToStringBuilder(base).append(3L).toString());
assertEquals(baseStr + "[a=3]", new ToStringBuilder(base).append("a", 3L).toString());
assertEquals(baseStr + "[a=3,b=4]", new ToStringBuilder(base).append("a", 3L).append("b", 4L).toString());
}
@SuppressWarnings("cast") // cast is not really needed, keep for consistency
@Test
public void testInt() {
assertEquals(baseStr + "[3]", new ToStringBuilder(base).append((int) 3).toString());
assertEquals(baseStr + "[a=3]", new ToStringBuilder(base).append("a", (int) 3).toString());
assertEquals(baseStr + "[a=3,b=4]", new ToStringBuilder(base).append("a", (int) 3).append("b", (int) 4).toString());
}
@Test
public void testShort() {
assertEquals(baseStr + "[3]", new ToStringBuilder(base).append((short) 3).toString());
assertEquals(baseStr + "[a=3]", new ToStringBuilder(base).append("a", (short) 3).toString());
assertEquals(baseStr + "[a=3,b=4]", new ToStringBuilder(base).append("a", (short) 3).append("b", (short) 4).toString());
}
@Test
public void testChar() {
assertEquals(baseStr + "[A]", new ToStringBuilder(base).append((char) 65).toString());
assertEquals(baseStr + "[a=A]", new ToStringBuilder(base).append("a", (char) 65).toString());
assertEquals(baseStr + "[a=A,b=B]", new ToStringBuilder(base).append("a", (char) 65).append("b", (char) 66).toString());
}
@Test
public void testByte() {
assertEquals(baseStr + "[3]", new ToStringBuilder(base).append((byte) 3).toString());
assertEquals(baseStr + "[a=3]", new ToStringBuilder(base).append("a", (byte) 3).toString());
assertEquals(baseStr + "[a=3,b=4]", new ToStringBuilder(base).append("a", (byte) 3).append("b", (byte) 4).toString());
}
@SuppressWarnings("cast")
@Test
public void testDouble() {
assertEquals(baseStr + "[3.2]", new ToStringBuilder(base).append((double) 3.2).toString());
assertEquals(baseStr + "[a=3.2]", new ToStringBuilder(base).append("a", (double) 3.2).toString());
assertEquals(baseStr + "[a=3.2,b=4.3]", new ToStringBuilder(base).append("a", (double) 3.2).append("b", (double) 4.3).toString());
}
@Test
public void testFloat() {
assertEquals(baseStr + "[3.2]", new ToStringBuilder(base).append((float) 3.2).toString());
assertEquals(baseStr + "[a=3.2]", new ToStringBuilder(base).append("a", (float) 3.2).toString());
assertEquals(baseStr + "[a=3.2,b=4.3]", new ToStringBuilder(base).append("a", (float) 3.2).append("b", (float) 4.3).toString());
}
@Test
public void testBoolean() {
assertEquals(baseStr + "[true]", new ToStringBuilder(base).append(true).toString());
assertEquals(baseStr + "[a=true]", new ToStringBuilder(base).append("a", true).toString());
assertEquals(baseStr + "[a=true,b=false]", new ToStringBuilder(base).append("a", true).append("b", false).toString());
}
@Test
public void testObjectArray() {
Object[] array = new Object[] {null, base, new int[] {3, 6}};
assertEquals(baseStr + "[{<null>,5,{3,6}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{<null>,5,{3,6}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testLongArray() {
long[] array = new long[] {1, 2, -3, 4};
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testIntArray() {
int[] array = new int[] {1, 2, -3, 4};
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testShortArray() {
short[] array = new short[] {1, 2, -3, 4};
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testByteArray() {
byte[] array = new byte[] {1, 2, -3, 4};
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{1,2,-3,4}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testCharArray() {
char[] array = new char[] {'A', '2', '_', 'D'};
assertEquals(baseStr + "[{A,2,_,D}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{A,2,_,D}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testDoubleArray() {
double[] array = new double[] {1.0, 2.9876, -3.00001, 4.3};
assertEquals(baseStr + "[{1.0,2.9876,-3.00001,4.3}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{1.0,2.9876,-3.00001,4.3}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testFloatArray() {
float[] array = new float[] {1.0f, 2.9876f, -3.00001f, 4.3f};
assertEquals(baseStr + "[{1.0,2.9876,-3.00001,4.3}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{1.0,2.9876,-3.00001,4.3}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testBooleanArray() {
boolean[] array = new boolean[] {true, false, false};
assertEquals(baseStr + "[{true,false,false}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{true,false,false}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testLongArrayArray() {
long[][] array = new long[][] {{1, 2}, null, {5}};
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testIntArrayArray() {
int[][] array = new int[][] {{1, 2}, null, {5}};
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testShortArrayArray() {
short[][] array = new short[][] {{1, 2}, null, {5}};
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testByteArrayArray() {
byte[][] array = new byte[][] {{1, 2}, null, {5}};
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{1,2},<null>,{5}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testCharArrayArray() {
char[][] array = new char[][] {{'A', 'B'}, null, {'p'}};
assertEquals(baseStr + "[{{A,B},<null>,{p}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{A,B},<null>,{p}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testDoubleArrayArray() {
double[][] array = new double[][] {{1.0, 2.29686}, null, {Double.NaN}};
assertEquals(baseStr + "[{{1.0,2.29686},<null>,{NaN}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{1.0,2.29686},<null>,{NaN}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testFloatArrayArray() {
float[][] array = new float[][] {{1.0f, 2.29686f}, null, {Float.NaN}};
assertEquals(baseStr + "[{{1.0,2.29686},<null>,{NaN}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{1.0,2.29686},<null>,{NaN}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testBooleanArrayArray() {
boolean[][] array = new boolean[][] {{true, false}, null, {false}};
assertEquals(baseStr + "[{{true,false},<null>,{false}}]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[{{true,false},<null>,{false}}]", new ToStringBuilder(base).append((Object) array).toString());
array = null;
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append(array).toString());
assertEquals(baseStr + "[<null>]", new ToStringBuilder(base).append((Object) array).toString());
}
@Test
public void testObjectCycle() {
final ObjectCycle a = new ObjectCycle();
final ObjectCycle b = new ObjectCycle();
a.obj = b;
b.obj = a;
final String expected = toBaseString(a) + "[" + toBaseString(b) + "[" + toBaseString(a) + "]]";
assertEquals(expected, a.toString());
}
static class ObjectCycle {
Object obj;
@Override
public String toString() {
return new ToStringBuilder(this).append(obj).toString();
}
}
@Test
public void testSimpleReflectionStatics() {
final SimpleReflectionStaticFieldsFixture instance1 = new SimpleReflectionStaticFieldsFixture();
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345]",
ReflectionToStringBuilder.toString(instance1, null, false, true, SimpleReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345]",
ReflectionToStringBuilder.toString(instance1, null, true, true, SimpleReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345]",
this.toStringWithStatics(instance1, null, SimpleReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345]",
this.toStringWithStatics(instance1, null, SimpleReflectionStaticFieldsFixture.class));
}
/**
* Tests ReflectionToStringBuilder.toString() for statics.
*/
@Test
public void testReflectionStatics() {
final ReflectionStaticFieldsFixture instance1 = new ReflectionStaticFieldsFixture();
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345,instanceString=instanceString,instanceInt=67890]",
ReflectionToStringBuilder.toString(instance1, null, false, true, ReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345,staticTransientString=staticTransientString,staticTransientInt=54321,instanceString=instanceString,instanceInt=67890,transientString=transientString,transientInt=98765]",
ReflectionToStringBuilder.toString(instance1, null, true, true, ReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345,instanceString=instanceString,instanceInt=67890]",
this.toStringWithStatics(instance1, null, ReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString=staticString,staticInt=12345,instanceString=instanceString,instanceInt=67890]",
this.toStringWithStatics(instance1, null, ReflectionStaticFieldsFixture.class));
}
/**
* Tests ReflectionToStringBuilder.toString() for statics.
*/
@Test
public void testInheritedReflectionStatics() {
final InheritedReflectionStaticFieldsFixture instance1 = new InheritedReflectionStaticFieldsFixture();
assertEquals(
this.toBaseString(instance1) + "[staticString2=staticString2,staticInt2=67890]",
ReflectionToStringBuilder.toString(instance1, null, false, true, InheritedReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString2=staticString2,staticInt2=67890,staticString=staticString,staticInt=12345]",
ReflectionToStringBuilder.toString(instance1, null, false, true, SimpleReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString2=staticString2,staticInt2=67890,staticString=staticString,staticInt=12345]",
this.toStringWithStatics(instance1, null, SimpleReflectionStaticFieldsFixture.class));
assertEquals(
this.toBaseString(instance1) + "[staticString2=staticString2,staticInt2=67890,staticString=staticString,staticInt=12345]",
this.toStringWithStatics(instance1, null, SimpleReflectionStaticFieldsFixture.class));
}
/**
* <p>This method uses reflection to build a suitable
* <code>toString</code> value which includes static fields.</p>
*
* <p>It uses <code>AccessibleObject.setAccessible</code> to gain access to private
* fields. This means that it will throw a security exception if run
* under a security manager, if the permissions are not set up correctly.
* It is also not as efficient as testing explicitly. </p>
*
* <p>Transient fields are not output.</p>
*
* <p>Superclass fields will be appended up to and including the specified superclass.
* A null superclass is treated as <code>java.lang.Object</code>.</p>
*
* <p>If the style is <code>null</code>, the default
* <code>ToStringStyle</code> is used.</p>
*
* @param object the Object to be output
* @param style the style of the <code>toString</code> to create,
* may be <code>null</code>
* @param reflectUpToClass the superclass to reflect up to (inclusive),
* may be <code>null</code>
* @return the String result
* @throws IllegalArgumentException if the Object is <code>null</code>
*/
public <T> String toStringWithStatics(final T object, final ToStringStyle style, final Class<? super T> reflectUpToClass) {
return ReflectionToStringBuilder.toString(object, style, false, true, reflectUpToClass);
}
/**
* Tests ReflectionToStringBuilder setUpToClass().
*/
@Test
public void test_setUpToClass_valid() {
final Integer val = Integer.valueOf(5);
final ReflectionToStringBuilder test = new ReflectionToStringBuilder(val);
test.setUpToClass(Number.class);
test.toString();
}
/**
* Tests ReflectionToStringBuilder setUpToClass().
*/
@Test(expected=IllegalArgumentException.class)
public void test_setUpToClass_invalid() {
final Integer val = Integer.valueOf(5);
final ReflectionToStringBuilder test = new ReflectionToStringBuilder(val);
try {
test.setUpToClass(String.class);
} finally {
test.toString();
}
}
/**
* Tests ReflectionToStringBuilder.toString() for statics.
*/
class ReflectionStaticFieldsFixture {
static final String staticString = "staticString";
static final int staticInt = 12345;
static final transient String staticTransientString = "staticTransientString";
static final transient int staticTransientInt = 54321;
String instanceString = "instanceString";
int instanceInt = 67890;
transient String transientString = "transientString";
transient int transientInt = 98765;
}
/**
* Test fixture for ReflectionToStringBuilder.toString() for statics.
*/
class SimpleReflectionStaticFieldsFixture {
static final String staticString = "staticString";
static final int staticInt = 12345;
}
/**
* Test fixture for ReflectionToStringBuilder.toString() for statics.
*/
class InheritedReflectionStaticFieldsFixture extends SimpleReflectionStaticFieldsFixture {
static final String staticString2 = "staticString2";
static final int staticInt2 = 67890;
}
@Test
public void testReflectionNull() {
assertEquals("<null>", ReflectionToStringBuilder.toString(null));
}
/**
* Points out failure to print anything from appendToString methods using MULTI_LINE_STYLE.
* See issue LANG-372.
*/
class MultiLineTestObject {
Integer i = Integer.valueOf(31337);
@Override
public String toString() {
return new ToStringBuilder(this).append("testInt", i).toString();
}
}
@Test
public void testAppendToStringUsingMultiLineStyle() {
final MultiLineTestObject obj = new MultiLineTestObject();
final ToStringBuilder testBuilder = new ToStringBuilder(this, ToStringStyle.MULTI_LINE_STYLE)
.appendToString(obj.toString());
assertEquals(testBuilder.toString().indexOf("testInt=31337"), -1);
}
}
| |
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.core.graph.transformation;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import com.facebook.buck.core.graph.transformation.executor.DepsAwareExecutor;
import com.facebook.buck.core.graph.transformation.executor.DepsAwareTask;
import com.facebook.buck.core.util.log.Logger;
import com.facebook.buck.util.MoreSuppliers;
import com.facebook.buck.util.function.ThrowingSupplier;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.Futures;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
/**
* Transformation engine that transforms supplied {@link ComputeKey} into {@link ComputeResult} via
* {@link GraphTransformer}. This engine is able to asynchronously run graph based computation,
* reusing results when possible. Note that the computation dependency graph must be an acyclic
* graph.
*
* <p>This engine is able to deal with dependencies in the computation graph by having Transformer
* request dependent results of other transformations through {@link
* GraphTransformer#discoverPreliminaryDeps(com.facebook.buck.core.graph.transformation.ComputeKey)}
* and {@link GraphTransformer#discoverDeps(ComputeKey, TransformationEnvironment)}. The engine
* guarantees that all dependencies are completed before performing the transformation.
*
* <p>This engine allows for multiple stages of transformations via different {@link
* GraphTransformer}s. These are specified during construction of the engine by supplying multiple
* {@link GraphTransformationStage}s. Different stages are allowed to depend on other stages, as
* long as the computation nodes do not form a cyclic dependency.
*
* <p>{@link GraphTransformer#discoverPreliminaryDeps(ComputeKey)} will be ran first to discover a
* set of dependencies based only on the {@link ComputeKey}. The keys are then computed, and the
* results passed via the {@link TransformationEnvironment} to {@link
* GraphTransformer#discoverDeps(ComputeKey, TransformationEnvironment)} for a second stage of
* dependency discovery, where the dependency discovery can use the dependencies previously
* specified. The results of dependencies returned via both {@link
* GraphTransformer#discoverPreliminaryDeps(ComputeKey)} and {@link
* GraphTransformer#discoverDeps(ComputeKey, TransformationEnvironment)} will be available for the
* {@link GraphTransformer#transform(ComputeKey, TransformationEnvironment)} operation.
*
* <p>Transformations also should never block waiting for each other in any manner. If required to
* wait, the transformation must declare it through {@link
* GraphTransformer#discoverPreliminaryDeps(ComputeKey)} or {@link
* GraphTransformer#discoverDeps(ComputeKey, TransformationEnvironment)}
*
* <p>The transformation is incremental, so cached portions of the transformation will be used
* whenever possible based on {@code ComputeKey.equals()}. Therefore, {@link ComputeKey} should be
* immutable, and have deterministic equals. For future perspective, we want to have {@link
* ComputeKey} be serializable, so that we can eventually send keys to be computed remotely.
*
* <p>A custom cache can be supplied to the engine to cache the computation as desired.
*
* <p>Transformations will be applied asynchronously, so independent transformations can be executed
* in parallel. It is therefore important that transformations are thread safe.
*
* <p>By using all callback based operations and queue based operations, this engine will also
* reduce stack usage, eliminating stack overflow for large graph computations, provided that the
* {@link GraphTransformer} itself does not stack overflow within its {@link
* GraphTransformer#discoverPreliminaryDeps(ComputeKey)}, {@link
* GraphTransformer#discoverDeps(ComputeKey, TransformationEnvironment)}, and {@link
* GraphTransformer#transform(ComputeKey, TransformationEnvironment)} methods.
*/
public final class DefaultGraphTransformationEngine implements GraphTransformationEngine {
private static final Logger LOG = Logger.get(DefaultGraphTransformationEngine.class);
@VisibleForTesting final GraphTransformationEngineImpl<?> impl;
/**
* Constructs a {@link DefaultGraphTransformationEngine} with the given transformations.
*
* @param stages all of the available transformation stages this engine can execute
* @param estimatedNumOps the estimated number of operations this engine will execute given a
* computation, to reserve the size of its computation index
* @param executor the custom {@link DepsAwareExecutor} the engine uses to execute tasks
*/
@SuppressWarnings("unchecked")
public DefaultGraphTransformationEngine(
ImmutableList<GraphTransformationStage<?, ?>> stages,
int estimatedNumOps,
DepsAwareExecutor<? super ComputeResult, ?> executor) {
this.impl =
new GraphTransformationEngineImpl<>(
TransformationStageMap.from(stages),
estimatedNumOps,
(DepsAwareExecutor<ComputeResult, ?>) executor);
}
@Override
public void close() {
impl.close();
}
@Override
public final <KeyType extends ComputeKey<ResultType>, ResultType extends ComputeResult>
Future<ResultType> compute(KeyType key) {
return impl.compute(key);
}
@Override
public final <KeyType extends ComputeKey<ResultType>, ResultType extends ComputeResult>
ResultType computeUnchecked(KeyType key) {
return Futures.getUnchecked(compute(key));
}
@Override
public final <KeyType extends ComputeKey<ResultType>, ResultType extends ComputeResult>
ImmutableMap<KeyType, Future<ResultType>> computeAll(Set<KeyType> keys) {
return keys.parallelStream().collect(toImmutableMap(key -> key, this::compute));
}
@Override
public final <KeyType extends ComputeKey<ResultType>, ResultType extends ComputeResult>
ImmutableMap<KeyType, ResultType> computeAllUnchecked(Set<KeyType> keys) {
return ImmutableMap.copyOf(Maps.transformValues(computeAll(keys), Futures::getUnchecked));
}
/**
* Internal implementation of the {@link DefaultGraphTransformationEngine} to hide some type
* parameters
*
* <p>Internally, on the first request, Transformation schedules the requested key to be
* completed, and stores the pending computation in a map with key of ComputeKey. Subsequent
* requests for the same ComputeKey will reuse the stored pending computation.
*
* <p>Due to memory overhead of the pending task, upon completion, the pending task is deleted
* from the stored map to allow it to be garbage collected. The raw result will be put into the
* result cache. Subsequent requests will reuse the raw result from the cache directly.
*/
@VisibleForTesting
class GraphTransformationEngineImpl<TaskType extends DepsAwareTask<ComputeResult, TaskType>> {
private final TransformationStageMap transformationStageMap;
private final DepsAwareExecutor<ComputeResult, TaskType> executor;
@VisibleForTesting
final ConcurrentHashMap<ComputeKey<? extends ComputeResult>, TaskType> computationIndex;
/**
* @param transformationStageMap a map of the key types to the transformation stages
* @param estimatedNumOps the estimated number of operations this engine will execute given a
* computation, to reserve the size of its computation index
* @param executor the custom {@link Executor} the engine uses to execute tasks
*/
private GraphTransformationEngineImpl(
TransformationStageMap transformationStageMap,
int estimatedNumOps,
DepsAwareExecutor<ComputeResult, TaskType> executor) {
this.transformationStageMap = transformationStageMap;
this.computationIndex = new ConcurrentHashMap<>(estimatedNumOps);
this.executor = executor;
}
public void close() {
executor.close();
}
@SuppressWarnings("unchecked")
private <UResultType extends ComputeResult, UKeyType extends ComputeKey<UResultType>>
Future<UResultType> compute(UKeyType key) {
LOG.verbose("Attempting to load from cache for key: %s", key);
GraphTransformationStage<ComputeKey<? extends ComputeResult>, ? extends ComputeResult> stage =
transformationStageMap.get(key);
Optional<? extends ComputeResult> result = stage.getCache().get(key);
if (result.isPresent()) {
return CompletableFuture.completedFuture((UResultType) result.get());
}
TaskType task = convertKeyToTask(key, stage);
return (Future<UResultType>) executor.submit(task);
}
private TaskType convertKeyToTask(
ComputeKey<? extends ComputeResult> key,
GraphTransformationStage<ComputeKey<? extends ComputeResult>, ? extends ComputeResult>
stage) {
return computationIndex.computeIfAbsent(
key,
mapKey -> {
// recheck the resultCache in event that the cache got populated while we were waiting
// to access the computationIndex.
Optional<? extends ComputeResult> cachedResult = stage.getCache().get(key);
if (cachedResult.isPresent()) {
return executor.createTask(
() -> {
computationIndex.remove(key);
return cachedResult.get();
});
}
ImmutableMap.Builder<ComputeKey<?>, Future<ComputeResult>> depResults =
ImmutableMap.builder();
ThrowingSupplier<ImmutableSet<TaskType>, Exception> preliminaryDepsSupplier =
MoreSuppliers.memoize(
() -> computePreliminaryDepForKey(key, stage, depResults), Exception.class);
ThrowingSupplier<ImmutableSet<TaskType>, Exception> depsSupplier =
MoreSuppliers.memoize(
() -> computeDepsForKey(stage, key, depResults), Exception.class);
return executor.createThrowingTask(
() -> computeForKey(key, stage, collectDeps(depResults.build())),
preliminaryDepsSupplier,
depsSupplier);
});
}
private ComputeResult computeForKey(
ComputeKey<? extends ComputeResult> key,
GraphTransformationStage<ComputeKey<?>, ? extends ComputeResult> stage,
ImmutableMap<ComputeKey<?>, ComputeResult> depResults)
throws Exception {
ComputeResult result = stage.transform(key, new DefaultTransformationEnvironment(depResults));
computationIndex.remove(key);
return result;
}
private ImmutableSet<TaskType> computePreliminaryDepForKey(
ComputeKey<? extends ComputeResult> key,
GraphTransformationStage<ComputeKey<? extends ComputeResult>, ? extends ComputeResult>
stage,
ImmutableMap.Builder<ComputeKey<?>, Future<ComputeResult>> depResults)
throws Exception {
ImmutableSet<? extends ComputeKey<?>> preliminaryDepKeys =
stage.getTransformer().discoverPreliminaryDeps(key);
ImmutableSet.Builder<TaskType> preliminaryDepWorkBuilder =
ImmutableSet.builderWithExpectedSize(preliminaryDepKeys.size());
preliminaryDepKeys.forEach(
preliminaryDepKey -> {
GraphTransformationStage<ComputeKey<? extends ComputeResult>, ? extends ComputeResult>
depStage = transformationStageMap.get(preliminaryDepKey);
TaskType task = convertKeyToTask(preliminaryDepKey, depStage);
depResults.put(preliminaryDepKey, task.getResultFuture());
preliminaryDepWorkBuilder.add(task);
});
return preliminaryDepWorkBuilder.build();
}
private ImmutableSet<TaskType> computeDepsForKey(
GraphTransformationStage<ComputeKey<? extends ComputeResult>, ? extends ComputeResult>
stage,
ComputeKey<? extends ComputeResult> key,
ImmutableMap.Builder<ComputeKey<?>, Future<ComputeResult>> depResults)
throws Exception {
ImmutableSet<? extends ComputeKey<? extends ComputeResult>> depKeys =
stage
.getTransformer()
.discoverDeps(
key, new DefaultTransformationEnvironment(collectDeps(depResults.build())));
// task that executes secondary deps, depending on the initial deps
ImmutableSet.Builder<TaskType> depWorkBuilder =
ImmutableSet.builderWithExpectedSize(depKeys.size());
for (ComputeKey<? extends ComputeResult> depKey : depKeys) {
TaskType task = convertKeyToTask(depKey, transformationStageMap.get(depKey));
depResults.put(depKey, task.getResultFuture());
depWorkBuilder.add(task);
}
return depWorkBuilder.build();
}
private ImmutableMap<ComputeKey<?>, ComputeResult> collectDeps(
ImmutableMap<ComputeKey<?>, Future<ComputeResult>> deps) {
return ImmutableMap.copyOf(
Maps.transformValues(
deps,
futureRes -> {
Preconditions.checkState(futureRes.isDone());
return Futures.getUnchecked(futureRes);
}));
}
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings.bluetooth;
import android.bluetooth.BluetoothAdapter;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.SystemProperties;
import android.preference.Preference;
import android.text.format.DateUtils;
import com.android.settings.R;
import android.text.format.Time;
import android.util.Log;
/**
* BluetoothDiscoverableEnabler is a helper to manage the "Discoverable"
* checkbox. It sets/unsets discoverability and keeps track of how much time
* until the the discoverability is automatically turned off.
*/
final class BluetoothDiscoverableEnabler implements Preference.OnPreferenceClickListener {
private static final String TAG = "BluetoothDiscoverableEnabler";
private static final String SYSTEM_PROPERTY_DISCOVERABLE_TIMEOUT =
"debug.bt.discoverable_time";
private static final int DISCOVERABLE_TIMEOUT_TWO_MINUTES = 120;
private static final int DISCOVERABLE_TIMEOUT_FIVE_MINUTES = 300;
private static final int DISCOVERABLE_TIMEOUT_ONE_HOUR = 3600;
static final int DISCOVERABLE_TIMEOUT_NEVER = 0;
// Bluetooth advanced settings screen was replaced with action bar items.
// Use the same preference key for discoverable timeout as the old ListPreference.
private static final String KEY_DISCOVERABLE_TIMEOUT = "bt_discoverable_timeout";
private static final String VALUE_DISCOVERABLE_TIMEOUT_TWO_MINUTES = "twomin";
private static final String VALUE_DISCOVERABLE_TIMEOUT_FIVE_MINUTES = "fivemin";
private static final String VALUE_DISCOVERABLE_TIMEOUT_ONE_HOUR = "onehour";
private static final String VALUE_DISCOVERABLE_TIMEOUT_NEVER = "never";
static final int DEFAULT_DISCOVERABLE_TIMEOUT = DISCOVERABLE_TIMEOUT_TWO_MINUTES;
private final Context mContext;
private final Handler mUiHandler;
private final Preference mDiscoveryPreference;
private final LocalBluetoothAdapter mLocalAdapter;
private final SharedPreferences mSharedPreferences;
private boolean mDiscoverable;
private int mNumberOfPairedDevices;
private int mTimeoutSecs = -1;
private final BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (BluetoothAdapter.ACTION_SCAN_MODE_CHANGED.equals(intent.getAction())) {
int mode = intent.getIntExtra(BluetoothAdapter.EXTRA_SCAN_MODE,
BluetoothAdapter.ERROR);
if (mode != BluetoothAdapter.ERROR) {
handleModeChanged(mode);
}
}
}
};
private final Runnable mUpdateCountdownSummaryRunnable = new Runnable() {
public void run() {
updateCountdownSummary();
}
};
BluetoothDiscoverableEnabler(Context context, LocalBluetoothAdapter adapter,
Preference discoveryPreference) {
mContext = context;
mUiHandler = new Handler();
mLocalAdapter = adapter;
mDiscoveryPreference = discoveryPreference;
mSharedPreferences = discoveryPreference.getSharedPreferences();
discoveryPreference.setPersistent(false);
}
public void resume() {
if (mLocalAdapter == null) {
return;
}
IntentFilter filter = new IntentFilter(BluetoothAdapter.ACTION_SCAN_MODE_CHANGED);
mContext.registerReceiver(mReceiver, filter);
mDiscoveryPreference.setOnPreferenceClickListener(this);
handleModeChanged(mLocalAdapter.getScanMode());
}
public void pause() {
if (mLocalAdapter == null) {
return;
}
mUiHandler.removeCallbacks(mUpdateCountdownSummaryRunnable);
mContext.unregisterReceiver(mReceiver);
mDiscoveryPreference.setOnPreferenceClickListener(null);
}
public boolean onPreferenceClick(Preference preference) {
// toggle discoverability
mDiscoverable = !mDiscoverable;
setEnabled(mDiscoverable);
return true;
}
private void setEnabled(boolean enable) {
if (enable) {
int timeout = getDiscoverableTimeout();
long endTimestamp = System.currentTimeMillis() + timeout * 1000L;
LocalBluetoothPreferences.persistDiscoverableEndTimestamp(mContext, endTimestamp);
mLocalAdapter.setScanMode(BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE, timeout);
updateCountdownSummary();
Log.d(TAG, "setEnabled(): enabled = " + enable + "timeout = " + timeout);
if (timeout > 0) {
BluetoothDiscoverableTimeoutReceiver.setDiscoverableAlarm(mContext, endTimestamp);
}
} else {
mLocalAdapter.setScanMode(BluetoothAdapter.SCAN_MODE_CONNECTABLE);
BluetoothDiscoverableTimeoutReceiver.cancelDiscoverableAlarm(mContext);
}
}
private void updateTimerDisplay(int timeout) {
if (getDiscoverableTimeout() == DISCOVERABLE_TIMEOUT_NEVER) {
mDiscoveryPreference.setSummary(R.string.bluetooth_is_discoverable_always);
} else {
String textTimeout = formatTimeRemaining(timeout);
mDiscoveryPreference.setSummary(mContext.getString(R.string.bluetooth_is_discoverable,
textTimeout));
}
}
private static String formatTimeRemaining(int timeout) {
StringBuilder sb = new StringBuilder(6); // "mmm:ss"
int min = timeout / 60;
sb.append(min).append(':');
int sec = timeout - (min * 60);
if (sec < 10) {
sb.append('0');
}
sb.append(sec);
return sb.toString();
}
void setDiscoverableTimeout(int index) {
String timeoutValue;
switch (index) {
case 0:
default:
mTimeoutSecs = DISCOVERABLE_TIMEOUT_TWO_MINUTES;
timeoutValue = VALUE_DISCOVERABLE_TIMEOUT_TWO_MINUTES;
break;
case 1:
mTimeoutSecs = DISCOVERABLE_TIMEOUT_FIVE_MINUTES;
timeoutValue = VALUE_DISCOVERABLE_TIMEOUT_FIVE_MINUTES;
break;
case 2:
mTimeoutSecs = DISCOVERABLE_TIMEOUT_ONE_HOUR;
timeoutValue = VALUE_DISCOVERABLE_TIMEOUT_ONE_HOUR;
break;
case 3:
mTimeoutSecs = DISCOVERABLE_TIMEOUT_NEVER;
timeoutValue = VALUE_DISCOVERABLE_TIMEOUT_NEVER;
break;
}
mSharedPreferences.edit().putString(KEY_DISCOVERABLE_TIMEOUT, timeoutValue).apply();
setEnabled(true); // enable discovery and reset timer
}
private int getDiscoverableTimeout() {
if (mTimeoutSecs != -1) {
return mTimeoutSecs;
}
int timeout = SystemProperties.getInt(SYSTEM_PROPERTY_DISCOVERABLE_TIMEOUT, -1);
if (timeout < 0) {
String timeoutValue = mSharedPreferences.getString(KEY_DISCOVERABLE_TIMEOUT,
VALUE_DISCOVERABLE_TIMEOUT_TWO_MINUTES);
if (timeoutValue.equals(VALUE_DISCOVERABLE_TIMEOUT_NEVER)) {
timeout = DISCOVERABLE_TIMEOUT_NEVER;
} else if (timeoutValue.equals(VALUE_DISCOVERABLE_TIMEOUT_ONE_HOUR)) {
timeout = DISCOVERABLE_TIMEOUT_ONE_HOUR;
} else if (timeoutValue.equals(VALUE_DISCOVERABLE_TIMEOUT_FIVE_MINUTES)) {
timeout = DISCOVERABLE_TIMEOUT_FIVE_MINUTES;
} else {
timeout = DISCOVERABLE_TIMEOUT_TWO_MINUTES;
}
}
mTimeoutSecs = timeout;
return timeout;
}
int getDiscoverableTimeoutIndex() {
int timeout = getDiscoverableTimeout();
switch (timeout) {
case DISCOVERABLE_TIMEOUT_TWO_MINUTES:
default:
return 0;
case DISCOVERABLE_TIMEOUT_FIVE_MINUTES:
return 1;
case DISCOVERABLE_TIMEOUT_ONE_HOUR:
return 2;
case DISCOVERABLE_TIMEOUT_NEVER:
return 3;
}
}
void setNumberOfPairedDevices(int pairedDevices) {
mNumberOfPairedDevices = pairedDevices;
handleModeChanged(mLocalAdapter.getScanMode());
}
void handleModeChanged(int mode) {
Log.d(TAG, "handleModeChanged(): mode = " + mode);
if (mode == BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE) {
mDiscoverable = true;
updateCountdownSummary();
} else {
mDiscoverable = false;
setSummaryNotDiscoverable();
}
}
private void setSummaryNotDiscoverable() {
if (mNumberOfPairedDevices != 0) {
mDiscoveryPreference.setSummary(R.string.bluetooth_only_visible_to_paired_devices);
} else {
mDiscoveryPreference.setSummary(R.string.bluetooth_not_visible_to_other_devices);
}
}
private void updateCountdownSummary() {
int mode = mLocalAdapter.getScanMode();
if (mode != BluetoothAdapter.SCAN_MODE_CONNECTABLE_DISCOVERABLE) {
return;
}
long currentTimestamp = System.currentTimeMillis();
long endTimestamp = LocalBluetoothPreferences.getDiscoverableEndTimestamp(mContext);
if (currentTimestamp > endTimestamp) {
// We're still in discoverable mode, but maybe there isn't a timeout.
updateTimerDisplay(0);
return;
}
int timeLeft = (int) ((endTimestamp - currentTimestamp) / 1000L);
updateTimerDisplay(timeLeft);
synchronized (this) {
mUiHandler.removeCallbacks(mUpdateCountdownSummaryRunnable);
mUiHandler.postDelayed(mUpdateCountdownSummaryRunnable, 1000);
}
}
}
| |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.mdm.agent.services;
import java.util.ArrayList;
import java.util.Map;
import org.wso2.mdm.agent.R;
import org.wso2.mdm.agent.AlertActivity;
import org.wso2.mdm.agent.ServerDetails;
import org.wso2.mdm.agent.api.ApplicationManager;
import org.wso2.mdm.agent.api.DeviceInfo;
import org.wso2.mdm.agent.api.GPSTracker;
import org.wso2.mdm.agent.api.WiFiConfig;
import org.wso2.mdm.agent.beans.DeviceAppInfo;
import org.wso2.mdm.agent.factory.DeviceStateFactory;
import org.wso2.mdm.agent.interfaces.DeviceState;
import org.wso2.mdm.agent.utils.Constants;
import org.wso2.mdm.agent.utils.Preference;
import org.wso2.mdm.agent.utils.CommonUtils;
import android.app.admin.DevicePolicyManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.media.AudioManager;
import android.net.Uri;
import android.util.Log;
import android.widget.Toast;
public class Operation {
private Context context;
private DevicePolicyManager devicePolicyManager;
private ApplicationManager appList;
private Resources resources;
private BuildResultPayload resultBuilder;
private static final String TAG = "Operation Handler";
private static final String MEMORY_INFO_TAG_TOTAL = "total";
private static final String MEMORY_INFO_TAG_AVAILABLE = "available";
private static final String MEMORY_INFO_TAG_INTERNAL = "internal_memory";
private static final String MEMORY_INFO_TAG_EXTERNAL = "external_memory";
private static final String BATTERY_INFO_TAG_LEVEL = "level";
private static final String BATTERY_INFO_TAG = "battery";
private static final String LOCATION_INFO_TAG_LONGITUDE = "longitude";
private static final String LOCATION_INFO_TAG_LATITUDE = "latitude";
private static final String LOCATION_INFO_TAG = "location_obj";
private static final String NETWORK_OPERATOR_TAG = "operator";
private static final String APP_INFO_TAG_NAME = "name";
private static final String APP_INFO_TAG_PACKAGE = "package";
private static final String APP_INFO_TAG_ICON = "icon";
private static final int PRE_WIPE_WAIT_TIME = 4000;
private static final int ACTIVATION_REQUEST = 47;
private static final int DEFAULT_PASSWORD_LENGTH = 0;
private static final int DEFAULT_VOLUME = 0;
private static final int DEFAULT_FLAG = 0;
private static final int DEFAULT_PASSWORD_MIN_LENGTH = 3;
private static final long DAY_MILLISECONDS_MULTIPLIER = 24 * 60 * 60 * 1000;
private Map<String, String> bundleParams;
public Operation(Context context) {
this.context = context;
this.resources = context.getResources();
this.devicePolicyManager =
(DevicePolicyManager) context.getSystemService(Context.DEVICE_POLICY_SERVICE);
this.appList = new ApplicationManager(context.getApplicationContext());
this.resultBuilder = new BuildResultPayload(context.getApplicationContext());
}
/**
* Executes device management operations on the device.
* @param operationCode - Device management operation code.
* @param operationData - Operation data if required(ex:WIFI configuration).
* @param requestMode - Request mode (GCM/Local).
* @return Operation/Policy status list.
*/
public JSONArray doTask(String operationCode, String operationData) {
switch (operationCode) {
case Constants.OPERATION_DEVICE_INFO:
getDeviceInfo(operationCode);
break;
case Constants.OPERATION_DEVICE_LOCATION:
getLocationInfo(operationCode);
break;
case Constants.OPERATION_GET_APPLICATION_LIST:
getApplicationList(operationCode);
break;
case Constants.OPERATION_LOCK_DEVICE:
lockDevice(operationCode);
break;
case Constants.OPERATION_WIPE_DATA:
wipeDevice(operationCode, operationData);
break;
case Constants.OPERATION_CLEAR_PASSWORD:
clearPassword(operationCode);
break;
case Constants.OPERATION_NOTIFICATION:
displayNotification(operationCode);
break;
case Constants.OPERATION_WIFI:
configureWifi(operationCode, operationData);
break;
case Constants.OPERATION_DISABLE_CAMERA:
disableCamera(operationCode, operationData);
break;
case Constants.OPERATION_INSTALL_APPLICATION:
installAppBundle(operationCode, operationData);
break;
case Constants.OPERATION_INSTALL_APPLICATION_BUNDLE:
installAppBundle(operationCode, operationData);
break;
case Constants.OPERATION_UNINSTALL_APPLICATION:
uninstallApplication(operationCode, operationData);
break;
case Constants.OPERATION_ENCRYPT_STORAGE:
encryptStorage(operationCode, operationData);
break;
case Constants.OPERATION_RING:
ringDevice(operationCode);
break;
case Constants.OPERATION_MUTE:
muteDevice(operationCode);
break;
case Constants.OPERATION_WEBCLIP:
createWebClip(operationCode, operationData);
break;
case Constants.OPERATION_PASSWORD_POLICY:
setPasswordPolicy(operationCode, operationData);
break;
case Constants.OPERATION_INSTALL_GOOGLE_APP:
installGooglePlayApp(operationCode, operationData);
break;
case Constants.OPERATION_CHANGE_LOCK_CODE:
changeLockCode(operationCode, operationData);
break;
case Constants.OPERATION_POLICY_BUNDLE:
setPolicyBundle(operationCode);
break;
case Constants.OPERATION_ENTERPRISE_WIPE_DATA:
enterpriseWipe(operationCode);
break;
case Constants.OPERATION_BLACKLIST_APPS:
blacklistApps(operationCode, operationData);
break;
default:
Log.e(TAG, "Invalid operation code receieved");
break;
}
return resultBuilder.getResultPayload();
}
/**
* Retrieve device information.
* @param code - Operation code.
*/
public void getDeviceInfo(String code) {
DeviceInfo deviceInfo = new DeviceInfo(context.getApplicationContext());
DeviceState phoneState = DeviceStateFactory.getDeviceState(context.getApplicationContext(),
deviceInfo.getSdkVersion());
GPSTracker gps = new GPSTracker(context.getApplicationContext());
JSONObject result = new JSONObject();
JSONObject batteryInfo = new JSONObject();
JSONObject internalMemoryInfo = new JSONObject();
JSONObject externalMemoryInfo = new JSONObject();
JSONObject locationInfo = new JSONObject();
double latitude;
double longitude;
try {
latitude = gps.getLatitude();
longitude = gps.getLongitude();
int batteryLevel = (int) Math.floor(phoneState.getBatteryLevel());
batteryInfo.put(BATTERY_INFO_TAG_LEVEL, batteryLevel);
internalMemoryInfo.put(MEMORY_INFO_TAG_TOTAL, phoneState.getTotalInternalMemorySize());
internalMemoryInfo.put(MEMORY_INFO_TAG_AVAILABLE,
phoneState.getAvailableInternalMemorySize());
externalMemoryInfo.put(MEMORY_INFO_TAG_TOTAL, phoneState.getTotalExternalMemorySize());
externalMemoryInfo.put(MEMORY_INFO_TAG_AVAILABLE,
phoneState.getAvailableExternalMemorySize());
locationInfo.put(LOCATION_INFO_TAG_LATITUDE, latitude);
locationInfo.put(LOCATION_INFO_TAG_LONGITUDE, longitude);
result.put(BATTERY_INFO_TAG, batteryInfo);
result.put(MEMORY_INFO_TAG_INTERNAL, internalMemoryInfo);
result.put(MEMORY_INFO_TAG_EXTERNAL, externalMemoryInfo);
if (latitude != 0 && longitude != 0) {
result.put(LOCATION_INFO_TAG, locationInfo);
}
result.put(NETWORK_OPERATOR_TAG, deviceInfo.getNetworkOperatorName());
resultBuilder.build(code, result);
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Retrieve device information.
* @param code - Operation code.
*/
public void getLocationInfo(String code) {
double latitude;
double longitude;
JSONObject result = new JSONObject();
GPSTracker gps = new GPSTracker(context);
try {
latitude = gps.getLatitude();
longitude = gps.getLongitude();
result.put(LOCATION_INFO_TAG_LATITUDE, latitude);
result.put(LOCATION_INFO_TAG_LONGITUDE, longitude);
resultBuilder.build(code, result);
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Retrieve device application information.
* @param code - Operation code.
*/
public void getApplicationList(String code) {
ArrayList<DeviceAppInfo> apps = appList.getInstalledApps();
JSONArray result = new JSONArray();
int size = apps.size();
for (int i = 0; i < size; i++) {
JSONObject app = new JSONObject();
try {
app.put(APP_INFO_TAG_NAME, Uri.encode(apps.get(i).getAppname()));
app.put(APP_INFO_TAG_PACKAGE, apps.get(i).getPackagename());
app.put(APP_INFO_TAG_ICON, apps.get(i).getIcon());
result.put(app);
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
resultBuilder.build(code, result);
}
/**
* Lock the device.
* @param code - Operation code.
*/
public void lockDevice(String code) {
resultBuilder.build(code);
devicePolicyManager.lockNow();
}
/**
* Ring the device.
* @param code - Operation code.
*/
public void ringDevice(String code) {
resultBuilder.build(code);
devicePolicyManager.lockNow();
}
/**
* Wipe the device.
* @param code - Operation code.
* @param data - Data required by the operation(PIN).
*/
public void wipeDevice(String code, String data) {
String inputPin;
String savedPin =
Preference.getString(context,
resources.getString(R.string.shared_pref_pin));
try {
JSONObject wipeKey = new JSONObject(data);
inputPin = (String) wipeKey.get(resources.getString(R.string.shared_pref_pin));
String status;
if (inputPin.trim().equals(savedPin.trim())) {
status = resources.getString(R.string.shared_pref_default_status);
} else {
status = resources.getString(R.string.shared_pref_false_status);
}
resultBuilder.build(code, status);
if (inputPin.trim().equals(savedPin.trim())) {
Toast.makeText(context, resources.getString(R.string.toast_message_wipe),
Toast.LENGTH_LONG).show();
try {
Thread.sleep(PRE_WIPE_WAIT_TIME);
} catch (InterruptedException e) {
Log.e(TAG, "Wipe pause interrupted :" + e.toString());
}
devicePolicyManager.wipeData(ACTIVATION_REQUEST);
} else {
Toast.makeText(context, resources.getString(R.string.toast_message_wipe_failed),
Toast.LENGTH_LONG).show();
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Clear device password.
* @param code - Operation code.
* @param requestMode - Request mode(Normal mode or policy bundle mode).
*/
public void clearPassword(String code) {
ComponentName demoDeviceAdmin = new ComponentName(context, AgentDeviceAdminReceiver.class);
resultBuilder.build(code);
devicePolicyManager.setPasswordQuality(demoDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED);
devicePolicyManager.setPasswordMinimumLength(demoDeviceAdmin, DEFAULT_PASSWORD_LENGTH);
devicePolicyManager.resetPassword(resources.getString(R.string.shared_pref_default_string),
DevicePolicyManager.RESET_PASSWORD_REQUIRE_ENTRY);
devicePolicyManager.lockNow();
devicePolicyManager.setPasswordQuality(demoDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED);
}
/**
* Display notification.
* @param code - Operation code.
*/
public void displayNotification(String code) {
String notification = null;
try {
JSONObject inputData = new JSONObject(code);
if (inputData.get(resources.getString(R.string.intent_extra_notification)).toString() !=
null &&
!inputData.get(resources.getString(R.string.intent_extra_notification)).toString()
.isEmpty()) {
notification =
inputData.get(resources.getString(R.string.intent_extra_notification))
.toString();
}
resultBuilder.build(code);
if (notification != null) {
Intent intent = new Intent(context, AlertActivity.class);
intent.putExtra(resources.getString(R.string.intent_extra_message), notification);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP |
Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Configure device WIFI profile.
* @param code - Operation code.
* @param data - Data required(SSID, Password).
* @param requestMode - Request mode(Normal mode or policy bundle mode).
*/
public void configureWifi(String code, String data) {
boolean wifistatus = false;
String ssid = null;
String password = null;
try {
JSONObject wifiData = new JSONObject(data);
if (!wifiData.isNull(resources.getString(R.string.intent_extra_ssid))) {
ssid = (String) wifiData.get(resources.getString(R.string.intent_extra_ssid));
}
if (!wifiData.isNull(resources.getString(R.string.intent_extra_password))) {
password =
(String) wifiData.get(resources.getString(R.string.intent_extra_password));
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format " + e.toString());
}
WiFiConfig config = new WiFiConfig(context.getApplicationContext());
wifistatus = config.saveWEPConfig(ssid, password);
String status = null;
if (wifistatus) {
status = resources.getString(R.string.shared_pref_default_status);
} else {
status = resources.getString(R.string.shared_pref_false_status);
}
resultBuilder.build(code, status);
}
/**
* Disable/Enable device camera.
* @param code - Operation code.
* @param data - Data required(Camera enable/disable switch).
* @param requestMode - Request mode(Normal mode or policy bundle mode).
*/
public void disableCamera(String code, String data) {
boolean camFunc = false;
try {
JSONObject inputData = new JSONObject(data);
if (!inputData.isNull(resources.getString(R.string.intent_extra_function)) &&
inputData.get(resources.getString(R.string.intent_extra_function)).toString()
.equalsIgnoreCase(resources.getString(R.string.intent_extra_enable))) {
camFunc = false;
} else if (!inputData.isNull(resources.getString(R.string.intent_extra_function)) &&
inputData.get(resources.getString(R.string.intent_extra_function)).toString()
.equalsIgnoreCase(
resources.getString(R.string.intent_extra_disable))) {
camFunc = true;
} else if (!inputData.isNull(resources.getString(R.string.intent_extra_function))) {
camFunc =
Boolean.parseBoolean(
inputData.get(resources.getString(R.string.intent_extra_function))
.toString());
}
ComponentName cameraAdmin = new ComponentName(context, AgentDeviceAdminReceiver.class);
resultBuilder.build(code);
devicePolicyManager.setCameraDisabled(cameraAdmin, camFunc);
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Install application/bundle.
* @param code - Operation code.
* @param data - Data required(App data).
*/
public void installAppBundle(String code, String data) {
try {
resultBuilder.build(code);
if (code.equals(Constants.OPERATION_INSTALL_APPLICATION)) {
JSONObject appData = new JSONObject(data);
installApplication(appData, code);
} else if (code.equals(Constants.OPERATION_INSTALL_APPLICATION_BUNDLE)) {
JSONArray jArray = null;
jArray = new JSONArray(data);
for (int i = 0; i < jArray.length(); i++) {
JSONObject appObj = (JSONObject) jArray.getJSONObject(i);
installApplication(appObj, code);
}
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Uninstall application.
* @param code - Operation code.
* @param data - Data required(App package).
*/
public void uninstallApplication(String code, String data) {
String packageName;
try {
JSONObject appData = new JSONObject(data);
packageName = (String) appData.get(resources.getString(R.string.intent_extra_identity));
resultBuilder.build(code);
appList.uninstallApplication(packageName);
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Encrypt/Decrypt device storage.
* @param code - Operation code.
* @param data - Data required(Encryption enable/disable switch).
* @param requestMode - Request mode(Normal mode or policy bundle mode).
*/
public void encryptStorage(String code, String data) {
boolean doEncrypt = true;
try {
JSONObject encryptData = new JSONObject(data);
if (!encryptData.isNull(resources.getString(R.string.intent_extra_function)) &&
encryptData.get(resources.getString(R.string.intent_extra_function)).toString()
.equalsIgnoreCase(resources.getString(R.string.intent_extra_encrypt))) {
doEncrypt = true;
} else if (!encryptData.isNull(resources.getString(R.string.intent_extra_function)) &&
encryptData.get(resources.getString(R.string.intent_extra_function))
.toString()
.equalsIgnoreCase(
resources.getString(R.string.intent_extra_decrypt))) {
doEncrypt = false;
} else if (!encryptData.isNull(resources.getString(R.string.intent_extra_function))) {
doEncrypt =
Boolean.parseBoolean(
encryptData.get(resources.getString(R.string.intent_extra_function))
.toString());
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
ComponentName admin = new ComponentName(context, AgentDeviceAdminReceiver.class);
if (doEncrypt &&
devicePolicyManager.getStorageEncryptionStatus() != DevicePolicyManager.ENCRYPTION_STATUS_UNSUPPORTED &&
(devicePolicyManager.getStorageEncryptionStatus() == DevicePolicyManager.ENCRYPTION_STATUS_INACTIVE)) {
devicePolicyManager.setStorageEncryption(admin, doEncrypt);
Intent intent = new Intent(DevicePolicyManager.ACTION_START_ENCRYPTION);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
} else if (!doEncrypt &&
devicePolicyManager.getStorageEncryptionStatus() != DevicePolicyManager.ENCRYPTION_STATUS_UNSUPPORTED &&
(devicePolicyManager.getStorageEncryptionStatus() == DevicePolicyManager.ENCRYPTION_STATUS_ACTIVE ||
devicePolicyManager.getStorageEncryptionStatus() == DevicePolicyManager.ENCRYPTION_STATUS_ACTIVATING)) {
devicePolicyManager.setStorageEncryption(admin, doEncrypt);
}
String status;
if (devicePolicyManager.getStorageEncryptionStatus() !=
DevicePolicyManager.ENCRYPTION_STATUS_UNSUPPORTED) {
status = resources.getString(R.string.shared_pref_default_status);
} else {
status = resources.getString(R.string.shared_pref_false_status);
}
resultBuilder.build(code, status);
}
/**
* Mute the device.
* @param code - Operation code.
* @param requestMode - Request mode(Normal mode or policy bundle mode).
*/
private void muteDevice(String code) {
resultBuilder.build(code);
AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
audioManager.setStreamVolume(AudioManager.STREAM_RING, DEFAULT_VOLUME, DEFAULT_FLAG);
}
/**
* Create web clip (Web app shortcut on device home screen).
* @param code - Operation code.
* @param data - Data required(Web app data).
*/
public void createWebClip(String code, String data) {
String appUrl = null;
String title = null;
try {
JSONObject webClipData = new JSONObject(code);
appUrl = (String) webClipData.get(resources.getString(R.string.intent_extra_identity));
title = (String) webClipData.get(resources.getString(R.string.intent_extra_title));
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format. " + e);
}
resultBuilder.build(data);
if(appUrl!=null && title!=null){
appList.createWebAppBookmark(appUrl, title);
}
}
/**
* Set device password policy.
* @param code - Operation code.
* @param data - Data required (Password policy parameters).
* @param requestMode - Request mode(Normal mode or policy bundle mode).
*/
public void setPasswordPolicy(String code, String data) {
ComponentName demoDeviceAdmin = new ComponentName(context, AgentDeviceAdminReceiver.class);
int attempts, length, history, specialChars;
String alphanumeric, complex;
boolean isAlphanumeric, isComplex;
long timout;
resultBuilder.build(code);
try {
JSONObject policyData = new JSONObject(data);
if (!policyData
.isNull(resources.getString(R.string.policy_password_max_failed_attempts)) &&
policyData.get(resources.getString(R.string.policy_password_max_failed_attempts)) !=
null) {
attempts =
Integer.parseInt((String) policyData.get(resources.getString(
R.string.policy_password_max_failed_attempts)));
devicePolicyManager.setMaximumFailedPasswordsForWipe(demoDeviceAdmin, attempts);
}
if (!policyData.isNull(resources.getString(R.string.policy_password_min_length)) &&
policyData.get(resources.getString(R.string.policy_password_min_length)) != null) {
length =
Integer.parseInt((String) policyData
.get(resources.getString(R.string.policy_password_min_length)));
devicePolicyManager.setPasswordMinimumLength(demoDeviceAdmin, length);
}
if (!policyData.isNull(resources.getString(R.string.policy_password_pin_history)) &&
policyData.get(resources.getString(R.string.policy_password_pin_history)) != null) {
history =
Integer.parseInt((String) policyData
.get(resources.getString(R.string.policy_password_pin_history)));
devicePolicyManager.setPasswordHistoryLength(demoDeviceAdmin, history);
}
if (!policyData
.isNull(resources.getString(R.string.policy_password_min_complex_chars)) &&
policyData.get(resources.getString(R.string.policy_password_min_complex_chars)) !=
null) {
specialChars =
Integer.parseInt((String) policyData.get(resources.getString(
R.string.policy_password_min_complex_chars)));
devicePolicyManager.setPasswordMinimumSymbols(demoDeviceAdmin, specialChars);
}
if (!policyData
.isNull(resources.getString(R.string.policy_password_require_alphanumeric)) &&
policyData
.get(resources.getString(R.string.policy_password_require_alphanumeric)) !=
null) {
if (policyData.get(resources.getString(
R.string.policy_password_require_alphanumeric)) instanceof String) {
alphanumeric =
(String) policyData.get(resources.getString(
R.string.policy_password_require_alphanumeric));
if (alphanumeric
.equals(resources.getString(R.string.shared_pref_default_status))) {
devicePolicyManager.setPasswordQuality(demoDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_ALPHANUMERIC);
}
} else if (policyData.get(resources.getString(
R.string.policy_password_require_alphanumeric)) instanceof Boolean) {
isAlphanumeric =
policyData.getBoolean(resources.getString(
R.string.policy_password_require_alphanumeric));
if (isAlphanumeric) {
devicePolicyManager.setPasswordQuality(demoDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_ALPHANUMERIC);
}
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_allow_simple)) &&
policyData.get(resources.getString(R.string.policy_password_allow_simple)) !=
null) {
if (policyData.get(resources.getString(
R.string.policy_password_allow_simple)) instanceof String) {
complex =
(String) policyData.get(resources.getString(
R.string.policy_password_allow_simple));
if (!complex.equals(resources.getString(R.string.shared_pref_default_status))) {
devicePolicyManager.setPasswordQuality(demoDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_COMPLEX);
}
} else if (policyData.get(resources.getString(
R.string.policy_password_allow_simple)) instanceof Boolean) {
isComplex =
policyData.getBoolean(
resources.getString(R.string.policy_password_allow_simple));
if (!isComplex) {
devicePolicyManager.setPasswordQuality(demoDeviceAdmin,
DevicePolicyManager.PASSWORD_QUALITY_COMPLEX);
}
}
}
if (!policyData.isNull(resources.getString(R.string.policy_password_pin_age_in_days)) &&
policyData.get(resources.getString(R.string.policy_password_pin_age_in_days)) !=
null) {
int daysOfExp =
Integer.parseInt((String) policyData.get(resources.getString(
R.string.policy_password_pin_age_in_days)));
timout = (long) (daysOfExp * DAY_MILLISECONDS_MULTIPLIER);
devicePolicyManager.setPasswordExpirationTimeout(demoDeviceAdmin, timout);
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Install google play applications.
* @param code - Operation code.
* @param data - Data required(App data).
*/
public void installGooglePlayApp(String code, String data) {
String packageName = null;
try {
JSONObject appData = new JSONObject(data);
packageName = (String) appData.get(resources.getString(R.string.intent_extra_package));
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
resultBuilder.build(code);
triggerGooglePlayApp(packageName);
}
/**
* Open Google Play store application with an application given.
* @param packageName - Application package name.
*/
public void triggerGooglePlayApp(String packageName) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setData(Uri.parse(Constants.GOOGLE_PLAY_APP_URI + packageName));
context.startActivity(intent);
}
/**
* Change device lock code.
* @param code - Operation code.
* @param data - Data required(Lock code).
*/
public void changeLockCode(String code, String data) {
ComponentName demoDeviceAdmin = new ComponentName(context, AgentDeviceAdminReceiver.class);
devicePolicyManager.setPasswordMinimumLength(demoDeviceAdmin, DEFAULT_PASSWORD_MIN_LENGTH);
String password = null;
try {
JSONObject lockData = new JSONObject(data);
if (!lockData.isNull(resources.getString(R.string.intent_extra_password))) {
password =
(String) lockData.get(resources.getString(R.string.intent_extra_password));
}
resultBuilder.build(code);
if (password!=null && !password.isEmpty()) {
devicePolicyManager.resetPassword(password,
DevicePolicyManager.RESET_PASSWORD_REQUIRE_ENTRY);
devicePolicyManager.lockNow();
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Set policy bundle.
* @param code - Operation code.
*/
public void setPolicyBundle(String code) {
try {
resultBuilder.build(code, new JSONObject(bundleParams.toString()));
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
/**
* Enterprise wipe the device.
* @param code - Operation code.
*/
public void enterpriseWipe(String code) {
resultBuilder.build(code);
CommonUtils.clearAppData(context);
Intent intent = new Intent(context, ServerDetails.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
/**
* Blacklisting apps.
* @param code - Operation code.
* @param data - Data required(Application data).
*/
public void blacklistApps(String code, String data) {
ArrayList<DeviceAppInfo> apps = appList.getInstalledApps();
JSONArray appList = new JSONArray();
String identity = null;
try {
JSONObject resultApp = new JSONObject(data);
if (!resultApp.isNull(resources.getString(R.string.intent_extra_data))) {
resultApp =
(JSONObject) resultApp.get(resources.getString(R.string.intent_extra_data));
}
identity = (String) resultApp.get(resources.getString(R.string.intent_extra_identity));
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
for (DeviceAppInfo app : apps) {
JSONObject result = new JSONObject();
try {
result.put(resources.getString(R.string.intent_extra_name), app.getAppname());
result.put(resources.getString(R.string.intent_extra_package),
app.getPackagename());
if (identity.trim().equals(app.getPackagename())) {
result.put(resources.getString(R.string.intent_extra_not_violated), false);
result.put(resources.getString(R.string.intent_extra_package),
app.getPackagename());
} else {
result.put(resources.getString(R.string.intent_extra_not_violated), true);
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
appList.put(result);
}
resultBuilder.build(code, appList);
}
/**
* Install an Application
*/
private void installApplication(JSONObject data, String code) {
String appUrl = null;
String type = null;
String os = null;
try {
JSONObject applicationData = data;
appUrl = (String) applicationData
.get(resources.getString(R.string.intent_extra_identity));
if (!applicationData.isNull(resources.getString(R.string.intent_extra_type))) {
type = (String) applicationData
.get(resources.getString(R.string.intent_extra_type));
}
if (!applicationData.isNull(resources.getString(R.string.intent_extra_platform_id))) {
os = (String) applicationData
.get(resources.getString(R.string.intent_extra_platform_id));
} else if (!applicationData.isNull(resources.getString(R.string.intent_extra_os))) {
os = (String) applicationData.get(resources.getString(R.string.intent_extra_os));
}
if (type != null && type.equalsIgnoreCase(resources.getString(R.string.intent_extra_enterprise))) {
if (os != null) {
if (os.equalsIgnoreCase(resources.getString(R.string.intent_extra_android))) {
appList.installApp(appUrl);
}
} else {
appList.installApp(appUrl);
}
} else if (type!= null && type.equalsIgnoreCase(resources.getString(R.string.intent_extra_market))) {
if (os != null) {
if (os.equalsIgnoreCase(resources.getString(R.string.intent_extra_android))) {
triggerGooglePlayApp(appUrl);
}
} else {
triggerGooglePlayApp(appUrl);
}
} else {
if (os != null) {
if (os.equalsIgnoreCase(resources.getString(R.string.intent_extra_android))) {
appList.installApp(appUrl);
}
} else {
appList.installApp(appUrl);
}
}
} catch (JSONException e) {
Log.e(TAG, "Invalid JSON format." + e);
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/featurestore_service.proto
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for [FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1.FeaturestoreService.BatchCreateFeatures].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.BatchCreateFeaturesRequest}
*/
public final class BatchCreateFeaturesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.BatchCreateFeaturesRequest)
BatchCreateFeaturesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchCreateFeaturesRequest.newBuilder() to construct.
private BatchCreateFeaturesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchCreateFeaturesRequest() {
parent_ = "";
requests_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchCreateFeaturesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private BatchCreateFeaturesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 18:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
requests_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.CreateFeatureRequest>();
mutable_bitField0_ |= 0x00000001;
}
requests_.add(
input.readMessage(
com.google.cloud.aiplatform.v1.CreateFeatureRequest.parser(),
extensionRegistry));
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
requests_ = java.util.Collections.unmodifiableList(requests_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchCreateFeaturesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchCreateFeaturesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.class,
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUESTS_FIELD_NUMBER = 2;
private java.util.List<com.google.cloud.aiplatform.v1.CreateFeatureRequest> requests_;
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.CreateFeatureRequest> getRequestsList() {
return requests_;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder>
getRequestsOrBuilderList() {
return requests_;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getRequestsCount() {
return requests_.size();
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateFeatureRequest getRequests(int index) {
return requests_.get(index);
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder getRequestsOrBuilder(
int index) {
return requests_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
for (int i = 0; i < requests_.size(); i++) {
output.writeMessage(2, requests_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
for (int i = 0; i < requests_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, requests_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest other =
(com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getRequestsList().equals(other.getRequestsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (getRequestsCount() > 0) {
hash = (37 * hash) + REQUESTS_FIELD_NUMBER;
hash = (53 * hash) + getRequestsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [FeaturestoreService.BatchCreateFeatures][google.cloud.aiplatform.v1.FeaturestoreService.BatchCreateFeatures].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.BatchCreateFeaturesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.BatchCreateFeaturesRequest)
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchCreateFeaturesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchCreateFeaturesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.class,
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRequestsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
if (requestsBuilder_ == null) {
requests_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
requestsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1_BatchCreateFeaturesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest build() {
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest buildPartial() {
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest result =
new com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest(this);
int from_bitField0_ = bitField0_;
result.parent_ = parent_;
if (requestsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
requests_ = java.util.Collections.unmodifiableList(requests_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.requests_ = requests_;
} else {
result.requests_ = requestsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest other) {
if (other == com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (requestsBuilder_ == null) {
if (!other.requests_.isEmpty()) {
if (requests_.isEmpty()) {
requests_ = other.requests_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRequestsIsMutable();
requests_.addAll(other.requests_);
}
onChanged();
}
} else {
if (!other.requests_.isEmpty()) {
if (requestsBuilder_.isEmpty()) {
requestsBuilder_.dispose();
requestsBuilder_ = null;
requests_ = other.requests_;
bitField0_ = (bitField0_ & ~0x00000001);
requestsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRequestsFieldBuilder()
: null;
} else {
requestsBuilder_.addAllMessages(other.requests_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the EntityType to create the batch of Features under.
* Format:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private java.util.List<com.google.cloud.aiplatform.v1.CreateFeatureRequest> requests_ =
java.util.Collections.emptyList();
private void ensureRequestsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
requests_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.CreateFeatureRequest>(requests_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.CreateFeatureRequest,
com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder,
com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder>
requestsBuilder_;
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.CreateFeatureRequest> getRequestsList() {
if (requestsBuilder_ == null) {
return java.util.Collections.unmodifiableList(requests_);
} else {
return requestsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getRequestsCount() {
if (requestsBuilder_ == null) {
return requests_.size();
} else {
return requestsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.CreateFeatureRequest getRequests(int index) {
if (requestsBuilder_ == null) {
return requests_.get(index);
} else {
return requestsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRequests(
int index, com.google.cloud.aiplatform.v1.CreateFeatureRequest value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestsIsMutable();
requests_.set(index, value);
onChanged();
} else {
requestsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRequests(
int index, com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder builderForValue) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.set(index, builderForValue.build());
onChanged();
} else {
requestsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(com.google.cloud.aiplatform.v1.CreateFeatureRequest value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestsIsMutable();
requests_.add(value);
onChanged();
} else {
requestsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(
int index, com.google.cloud.aiplatform.v1.CreateFeatureRequest value) {
if (requestsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRequestsIsMutable();
requests_.add(index, value);
onChanged();
} else {
requestsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(
com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder builderForValue) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.add(builderForValue.build());
onChanged();
} else {
requestsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addRequests(
int index, com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder builderForValue) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.add(index, builderForValue.build());
onChanged();
} else {
requestsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllRequests(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.CreateFeatureRequest> values) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, requests_);
onChanged();
} else {
requestsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRequests() {
if (requestsBuilder_ == null) {
requests_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
requestsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeRequests(int index) {
if (requestsBuilder_ == null) {
ensureRequestsIsMutable();
requests_.remove(index);
onChanged();
} else {
requestsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder getRequestsBuilder(
int index) {
return getRequestsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder getRequestsOrBuilder(
int index) {
if (requestsBuilder_ == null) {
return requests_.get(index);
} else {
return requestsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder>
getRequestsOrBuilderList() {
if (requestsBuilder_ != null) {
return requestsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(requests_);
}
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder addRequestsBuilder() {
return getRequestsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.CreateFeatureRequest.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder addRequestsBuilder(
int index) {
return getRequestsFieldBuilder()
.addBuilder(
index, com.google.cloud.aiplatform.v1.CreateFeatureRequest.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. The request message specifying the Features to create. All Features must be
* created under the same parent EntityType. The `parent` field in each child
* request message can be omitted. If `parent` is set in a child request, then
* the value must match the `parent` value in this request message.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.CreateFeatureRequest requests = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder>
getRequestsBuilderList() {
return getRequestsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.CreateFeatureRequest,
com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder,
com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder>
getRequestsFieldBuilder() {
if (requestsBuilder_ == null) {
requestsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.CreateFeatureRequest,
com.google.cloud.aiplatform.v1.CreateFeatureRequest.Builder,
com.google.cloud.aiplatform.v1.CreateFeatureRequestOrBuilder>(
requests_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
requests_ = null;
}
return requestsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.BatchCreateFeaturesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.BatchCreateFeaturesRequest)
private static final com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest();
}
public static com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchCreateFeaturesRequest> PARSER =
new com.google.protobuf.AbstractParser<BatchCreateFeaturesRequest>() {
@java.lang.Override
public BatchCreateFeaturesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new BatchCreateFeaturesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<BatchCreateFeaturesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchCreateFeaturesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.BatchCreateFeaturesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.07 at 08:01:35 PM IST
//
package com.mozu.qbintegration.model.qbmodel.allgen;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{}PayeeEntityRef"/>
* <element ref="{}APAccountRef" minOccurs="0"/>
* <element ref="{}TxnDate" minOccurs="0"/>
* <element ref="{}BankAccountRef"/>
* <choice>
* <element ref="{}IsToBePrinted"/>
* <element name="RefNumber">
* <simpleType>
* <restriction base="{}STRTYPE">
* <maxLength value="11"/>
* </restriction>
* </simpleType>
* </element>
* </choice>
* <element name="Memo" minOccurs="0">
* <simpleType>
* <restriction base="{}STRTYPE">
* <maxLength value="4095"/>
* </restriction>
* </simpleType>
* </element>
* <element ref="{}ExchangeRate" minOccurs="0"/>
* <element ref="{}ExternalGUID" minOccurs="0"/>
* <element ref="{}AppliedToTxnAdd" maxOccurs="unbounded"/>
* </sequence>
* <attribute name="defMacro" type="{}MACROTYPE" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"payeeEntityRef",
"apAccountRef",
"txnDate",
"bankAccountRef",
"isToBePrinted",
"refNumber",
"memo",
"exchangeRate",
"externalGUID",
"appliedToTxnAdd"
})
@XmlRootElement(name = "BillPaymentCheckAdd")
public class BillPaymentCheckAdd {
@XmlElement(name = "PayeeEntityRef", required = true)
protected PayeeEntityRef payeeEntityRef;
@XmlElement(name = "APAccountRef")
protected APAccountRef apAccountRef;
@XmlElement(name = "TxnDate")
protected String txnDate;
@XmlElement(name = "BankAccountRef", required = true)
protected BankAccountRef bankAccountRef;
@XmlElement(name = "IsToBePrinted")
protected String isToBePrinted;
@XmlElement(name = "RefNumber")
protected String refNumber;
@XmlElement(name = "Memo")
protected String memo;
@XmlElement(name = "ExchangeRate")
protected String exchangeRate;
@XmlElement(name = "ExternalGUID")
protected String externalGUID;
@XmlElement(name = "AppliedToTxnAdd", required = true)
protected List<AppliedToTxnAdd> appliedToTxnAdd;
@XmlAttribute(name = "defMacro")
protected String defMacro;
/**
* Gets the value of the payeeEntityRef property.
*
* @return
* possible object is
* {@link PayeeEntityRef }
*
*/
public PayeeEntityRef getPayeeEntityRef() {
return payeeEntityRef;
}
/**
* Sets the value of the payeeEntityRef property.
*
* @param value
* allowed object is
* {@link PayeeEntityRef }
*
*/
public void setPayeeEntityRef(PayeeEntityRef value) {
this.payeeEntityRef = value;
}
/**
* Gets the value of the apAccountRef property.
*
* @return
* possible object is
* {@link APAccountRef }
*
*/
public APAccountRef getAPAccountRef() {
return apAccountRef;
}
/**
* Sets the value of the apAccountRef property.
*
* @param value
* allowed object is
* {@link APAccountRef }
*
*/
public void setAPAccountRef(APAccountRef value) {
this.apAccountRef = value;
}
/**
* Gets the value of the txnDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTxnDate() {
return txnDate;
}
/**
* Sets the value of the txnDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTxnDate(String value) {
this.txnDate = value;
}
/**
* Gets the value of the bankAccountRef property.
*
* @return
* possible object is
* {@link BankAccountRef }
*
*/
public BankAccountRef getBankAccountRef() {
return bankAccountRef;
}
/**
* Sets the value of the bankAccountRef property.
*
* @param value
* allowed object is
* {@link BankAccountRef }
*
*/
public void setBankAccountRef(BankAccountRef value) {
this.bankAccountRef = value;
}
/**
* Gets the value of the isToBePrinted property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIsToBePrinted() {
return isToBePrinted;
}
/**
* Sets the value of the isToBePrinted property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIsToBePrinted(String value) {
this.isToBePrinted = value;
}
/**
* Gets the value of the refNumber property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getRefNumber() {
return refNumber;
}
/**
* Sets the value of the refNumber property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setRefNumber(String value) {
this.refNumber = value;
}
/**
* Gets the value of the memo property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getMemo() {
return memo;
}
/**
* Sets the value of the memo property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setMemo(String value) {
this.memo = value;
}
/**
* Gets the value of the exchangeRate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getExchangeRate() {
return exchangeRate;
}
/**
* Sets the value of the exchangeRate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setExchangeRate(String value) {
this.exchangeRate = value;
}
/**
* Gets the value of the externalGUID property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getExternalGUID() {
return externalGUID;
}
/**
* Sets the value of the externalGUID property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setExternalGUID(String value) {
this.externalGUID = value;
}
/**
* Gets the value of the appliedToTxnAdd property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the appliedToTxnAdd property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getAppliedToTxnAdd().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link AppliedToTxnAdd }
*
*
*/
public List<AppliedToTxnAdd> getAppliedToTxnAdd() {
if (appliedToTxnAdd == null) {
appliedToTxnAdd = new ArrayList<AppliedToTxnAdd>();
}
return this.appliedToTxnAdd;
}
/**
* Gets the value of the defMacro property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDefMacro() {
return defMacro;
}
/**
* Sets the value of the defMacro property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDefMacro(String value) {
this.defMacro = value;
}
}
| |
package tie.hackathon.travelguide;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.Html;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.miguelcatalan.materialsearchview.MaterialSearchView;
import com.squareup.picasso.Picasso;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.net.HttpURLConnection;
import java.net.URL;
import Util.Constants;
import Util.Utils;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class ShoppingCurrentCity extends AppCompatActivity {
@BindView(R.id.pb) ProgressBar pb;
@BindView(R.id.music_list) ListView lv;
@BindView(R.id.query) EditText q;
@BindView(R.id.go) Button ok;
private SharedPreferences s ;
private MaterialSearchView searchView;
private SharedPreferences.Editor e;
private String item="bags";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_shopping_currentcity);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
ButterKnife.bind(this);
s = PreferenceManager.getDefaultSharedPreferences(this);
e = s.edit();
setTitle("Shopping");
new Book_RetrieveFeed().execute();
searchView = (MaterialSearchView) findViewById(R.id.search_view);
searchView.setOnQueryTextListener(new MaterialSearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
//Do some magic
Log.e("VDSvsd",query+" ");
pb.setVisibility(View.VISIBLE);
try {
item = query;
Log.e("click", "going" + item);
new Book_RetrieveFeed().execute();
} catch (Exception e) {
AlertDialog alertDialog = new AlertDialog.Builder(ShoppingCurrentCity.this).create();
alertDialog.setTitle("Can't connect.");
alertDialog.setMessage("We cannot connect to the internet right now. Please try again later.");
alertDialog.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
alertDialog.show();
}
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
//Do some magic
return false;
}
});
searchView.setOnSearchViewListener(new MaterialSearchView.SearchViewListener() {
@Override
public void onSearchViewShown() {
//Do some magic
}
@Override
public void onSearchViewClosed() {
//Do some magic
}
});
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
}
@OnClick(R.id.go) void onClick(){
Log.e("vfs", "clcike");
pb.setVisibility(View.VISIBLE);
try {
item = q.getText().toString();
Log.e("click", "going" + item);
new Book_RetrieveFeed().execute();
} catch (Exception e) {
AlertDialog alertDialog = new AlertDialog.Builder(ShoppingCurrentCity.this).create();
alertDialog.setTitle("Can't connect.");
alertDialog.setMessage("We cannot connect to the internet right now. Please try again later.");
alertDialog.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
alertDialog.show();
Log.e("YouTube:", "Cannot fetch " + e.toString());
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.search_menu, menu);
MenuItem item = menu.findItem(R.id.action_search);
searchView.setMenuItem(item);
return true;
}
@Override
public void onBackPressed() {
if (searchView.isSearchOpen()) {
searchView.closeSearch();
} else {
super.onBackPressed();
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if(item.getItemId() ==android.R.id.home)
finish();
return super.onOptionsItemSelected(item);
}
private class Book_RetrieveFeed extends AsyncTask<String, Void, String> {
@Override
protected void onPreExecute() {
super.onPreExecute();
Log.e("vdslmvdspo", "started");
}
protected String doInBackground(String... urls) {
try {
String uri = Constants.apilink +
"online-shopping.php?string="+item;
uri = uri.replace(" ","+");
URL url = new URL(uri);
HttpURLConnection con = (HttpURLConnection) url.openConnection();
String readStream = Utils.readStream(con.getInputStream());
Log.e("here",uri +" ");
return readStream;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
@Override
protected void onPostExecute(String Result) {
try {
JSONObject YTFeed = new JSONObject(String.valueOf(Result));
JSONArray YTFeedItems = YTFeed.getJSONArray("results");
Log.e("response", YTFeedItems + " ");
if(YTFeedItems.length()==0){
Utils.hideKeyboard(ShoppingCurrentCity.this);
Snackbar.make(pb, "No results found", Snackbar.LENGTH_LONG).setAction("Action", null).show();
}
pb.setVisibility(View.GONE);
lv.setAdapter(new Shop_adapter(ShoppingCurrentCity.this , YTFeedItems) );
} catch (Exception e) {
e.printStackTrace();
}
}
}
public class Shop_adapter extends BaseAdapter {
final Context context;
final JSONArray FeedItems;
private LayoutInflater inflater = null;
public Shop_adapter(Context context, JSONArray FeedItems) {
this.context = context;
this.FeedItems = FeedItems;
inflater = (LayoutInflater) context
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
@Override
public int getCount() {
// TODO Auto-generated method stub
return FeedItems.length();
}
@Override
public Object getItem(int position) {
// TODO Auto-generated method stub
try {
return FeedItems.getJSONObject(position);
} catch (JSONException e) {
e.printStackTrace();
}
return null;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(final int position, View convertView, ViewGroup parent) {
View vi = convertView;
if (vi == null)
vi = inflater.inflate(R.layout.shop_listitem, null);
TextView Title = (TextView) vi.findViewById(R.id.VideoTitle);
TextView Description = (TextView) vi.findViewById(R.id.VideoDescription);
ImageView iv = (ImageView) vi.findViewById(R.id.VideoThumbnail);
try {
String x = FeedItems.getJSONObject(position).getString("name");
x = Html.fromHtml(x).toString();
Title.setText(x);
String DescriptionText = FeedItems.getJSONObject(position).getString("value");
DescriptionText = Html.fromHtml(DescriptionText).toString();
Description.setText(DescriptionText + " Rs");
Picasso.with(context).load(FeedItems.getJSONObject(position).getString("image")).into(iv);
} catch (JSONException e) {
e.printStackTrace();
}
vi.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent browserIntent = null;
try {
browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(FeedItems.getJSONObject(position).getString("url")));
} catch (JSONException e1) {
e1.printStackTrace();
}
context.startActivity(browserIntent);
}
});
return vi;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.spanner;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.argThat;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.cloud.Timestamp;
import com.google.cloud.spanner.ErrorCode;
import com.google.cloud.spanner.Key;
import com.google.cloud.spanner.KeyRange;
import com.google.cloud.spanner.KeySet;
import com.google.cloud.spanner.Mutation;
import com.google.cloud.spanner.ReadOnlyTransaction;
import com.google.cloud.spanner.ResultSets;
import com.google.cloud.spanner.SpannerExceptionFactory;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.Struct;
import com.google.cloud.spanner.Type;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.beam.sdk.Pipeline.PipelineExecutionException;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.BatchFn;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.BatchableMutationFilterFn;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.FailureMode;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.GatherBundleAndSortFn;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.WriteGrouped;
import org.apache.beam.sdk.io.gcp.spanner.SpannerIO.WriteToSpannerFn;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.testing.TestStream;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.DoFn.FinishBundleContext;
import org.apache.beam.sdk.transforms.DoFn.ProcessContext;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.util.Sleeper;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
import org.joda.time.Duration;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Captor;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
/**
* Unit tests for {@link SpannerIO}.
*
* <p>Note that because batching and sorting work on Bundles, and the TestPipeline does not bundle
* small numbers of elements, the batching and sorting DoFns need to be unit tested outside of the
* pipeline.
*/
@RunWith(JUnit4.class)
public class SpannerIOWriteTest implements Serializable {
private static final long CELLS_PER_KEY = 7;
@Rule public transient TestPipeline pipeline = TestPipeline.create();
@Rule public transient ExpectedException thrown = ExpectedException.none();
@Captor public transient ArgumentCaptor<Iterable<Mutation>> mutationBatchesCaptor;
@Captor public transient ArgumentCaptor<Iterable<MutationGroup>> mutationGroupListCaptor;
@Captor public transient ArgumentCaptor<MutationGroup> mutationGroupCaptor;
@Captor public transient ArgumentCaptor<List<KV<byte[], byte[]>>> byteArrayKvListCaptor;
private FakeServiceFactory serviceFactory;
@Before
@SuppressWarnings("unchecked")
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
serviceFactory = new FakeServiceFactory();
ReadOnlyTransaction tx = mock(ReadOnlyTransaction.class);
when(serviceFactory.mockDatabaseClient().readOnlyTransaction()).thenReturn(tx);
// Capture batches sent to writeAtLeastOnce.
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(mutationBatchesCaptor.capture()))
.thenReturn(null);
// Simplest schema: a table with int64 key
preparePkMetadata(tx, Arrays.asList(pkMetadata("tEsT", "key", "ASC")));
prepareColumnMetadata(tx, Arrays.asList(columnMetadata("tEsT", "key", "INT64", CELLS_PER_KEY)));
}
private SpannerSchema getSchema() {
return SpannerSchema.builder()
.addColumn("tEsT", "key", "INT64", CELLS_PER_KEY)
.addKeyPart("tEsT", "key", false)
.build();
}
private static Struct columnMetadata(
String tableName, String columnName, String type, long cellsMutated) {
return Struct.newBuilder()
.set("table_name")
.to(tableName)
.set("column_name")
.to(columnName)
.set("spanner_type")
.to(type)
.set("cells_mutated")
.to(cellsMutated)
.build();
}
private static Struct pkMetadata(String tableName, String columnName, String ordering) {
return Struct.newBuilder()
.set("table_name")
.to(tableName)
.set("column_name")
.to(columnName)
.set("column_ordering")
.to(ordering)
.build();
}
private void prepareColumnMetadata(ReadOnlyTransaction tx, List<Struct> rows) {
Type type =
Type.struct(
Type.StructField.of("table_name", Type.string()),
Type.StructField.of("column_name", Type.string()),
Type.StructField.of("spanner_type", Type.string()),
Type.StructField.of("cells_mutated", Type.int64()));
when(tx.executeQuery(
argThat(
new ArgumentMatcher<Statement>() {
@Override
public boolean matches(Statement argument) {
if (!(argument instanceof Statement)) {
return false;
}
Statement st = (Statement) argument;
return st.getSql().contains("information_schema.columns");
}
})))
.thenReturn(ResultSets.forRows(type, rows));
}
private void preparePkMetadata(ReadOnlyTransaction tx, List<Struct> rows) {
Type type =
Type.struct(
Type.StructField.of("table_name", Type.string()),
Type.StructField.of("column_name", Type.string()),
Type.StructField.of("column_ordering", Type.string()));
when(tx.executeQuery(
argThat(
new ArgumentMatcher<Statement>() {
@Override
public boolean matches(Statement argument) {
if (!(argument instanceof Statement)) {
return false;
}
Statement st = (Statement) argument;
return st.getSql().contains("information_schema.index_columns");
}
})))
.thenReturn(ResultSets.forRows(type, rows));
}
@Test
public void emptyTransform() throws Exception {
SpannerIO.Write write = SpannerIO.write();
thrown.expect(NullPointerException.class);
thrown.expectMessage("requires instance id to be set with");
write.expand(null);
}
@Test
public void emptyInstanceId() throws Exception {
SpannerIO.Write write = SpannerIO.write().withDatabaseId("123");
thrown.expect(NullPointerException.class);
thrown.expectMessage("requires instance id to be set with");
write.expand(null);
}
@Test
public void emptyDatabaseId() throws Exception {
SpannerIO.Write write = SpannerIO.write().withInstanceId("123");
thrown.expect(NullPointerException.class);
thrown.expectMessage("requires database id to be set with");
write.expand(null);
}
@Test
public void singleMutationPipeline() throws Exception {
Mutation mutation = m(2L);
PCollection<Mutation> mutations = pipeline.apply(Create.of(mutation));
mutations.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory));
pipeline.run();
verifyBatches(batch(m(2L)));
}
@Test
public void singleMutationGroupPipeline() throws Exception {
PCollection<MutationGroup> mutations =
pipeline.apply(Create.<MutationGroup>of(g(m(1L), m(2L), m(3L))));
mutations.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.grouped());
pipeline.run();
verifyBatches(batch(m(1L), m(2L), m(3L)));
}
private void verifyBatches(Iterable<Mutation>... batches) {
for (Iterable<Mutation> b : batches) {
verify(serviceFactory.mockDatabaseClient(), times(1)).writeAtLeastOnce(mutationsInNoOrder(b));
}
}
@Test
public void noBatching() throws Exception {
// This test uses a different mock/fake because it explicitly does not want to populate the
// Spanner schema.
FakeServiceFactory fakeServiceFactory = new FakeServiceFactory();
ReadOnlyTransaction tx = mock(ReadOnlyTransaction.class);
when(fakeServiceFactory.mockDatabaseClient().readOnlyTransaction()).thenReturn(tx);
// Capture batches sent to writeAtLeastOnce.
when(fakeServiceFactory.mockDatabaseClient().writeAtLeastOnce(mutationBatchesCaptor.capture()))
.thenReturn(null);
PCollection<MutationGroup> mutations = pipeline.apply(Create.of(g(m(1L)), g(m(2L))));
mutations.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(fakeServiceFactory)
.withBatchSizeBytes(1)
.grouped());
pipeline.run();
verify(fakeServiceFactory.mockDatabaseClient(), times(1))
.writeAtLeastOnce(mutationsInNoOrder(batch(m(1L))));
verify(fakeServiceFactory.mockDatabaseClient(), times(1))
.writeAtLeastOnce(mutationsInNoOrder(batch(m(2L))));
// If no batching then the DB schema is never read.
verify(tx, never()).executeQuery(any());
}
@Test
public void streamingWrites() throws Exception {
TestStream<Mutation> testStream =
TestStream.create(SerializableCoder.of(Mutation.class))
.addElements(m(1L), m(2L))
.advanceProcessingTime(Duration.standardMinutes(1))
.addElements(m(3L), m(4L))
.advanceProcessingTime(Duration.standardMinutes(1))
.addElements(m(5L), m(6L))
.advanceWatermarkToInfinity();
pipeline
.apply(testStream)
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory));
pipeline.run();
verifyBatches(batch(m(1L), m(2L)), batch(m(3L), m(4L)), batch(m(5L), m(6L)));
}
@Test
public void streamingWritesWithGrouping() throws Exception {
// verify that grouping/sorting occurs when set.
TestStream<Mutation> testStream =
TestStream.create(SerializableCoder.of(Mutation.class))
.addElements(m(1L), m(5L), m(2L), m(4L), m(3L), m(6L))
.advanceWatermarkToInfinity();
pipeline
.apply(testStream)
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withGroupingFactor(40)
.withMaxNumRows(2));
pipeline.run();
// Output should be batches of sorted mutations.
verifyBatches(batch(m(1L), m(2L)), batch(m(3L), m(4L)), batch(m(5L), m(6L)));
}
@Test
public void streamingWritesNoGrouping() throws Exception {
// verify that grouping/sorting does not occur - batches should be created in received order.
TestStream<Mutation> testStream =
TestStream.create(SerializableCoder.of(Mutation.class))
.addElements(m(1L), m(5L), m(2L), m(4L), m(3L), m(6L))
.advanceWatermarkToInfinity();
// verify that grouping/sorting does not occur when notset.
pipeline
.apply(testStream)
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withMaxNumRows(2));
pipeline.run();
verifyBatches(batch(m(1L), m(5L)), batch(m(2L), m(4L)), batch(m(3L), m(6L)));
}
@Test
public void reportFailures() throws Exception {
MutationGroup[] mutationGroups = new MutationGroup[10];
for (int i = 0; i < mutationGroups.length; i++) {
mutationGroups[i] = g(m((long) i));
}
List<MutationGroup> mutationGroupList = Arrays.asList(mutationGroups);
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(any()))
.thenAnswer(
invocationOnMock -> {
Preconditions.checkNotNull(invocationOnMock.getArguments()[0]);
throw SpannerExceptionFactory.newSpannerException(ErrorCode.ALREADY_EXISTS, "oops");
});
SpannerWriteResult result =
pipeline
.apply(Create.of(mutationGroupList))
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withBatchSizeBytes(0)
.withFailureMode(SpannerIO.FailureMode.REPORT_FAILURES)
.grouped());
PAssert.that(result.getFailedMutations())
.satisfies(
m -> {
assertEquals(mutationGroups.length, Iterables.size(m));
return null;
});
PAssert.that(result.getFailedMutations()).containsInAnyOrder(mutationGroupList);
pipeline.run().waitUntilFinish();
// writeAtLeastOnce called once for the batch of mutations
// (which as they are unbatched = each mutation group) then again for the individual retry.
verify(serviceFactory.mockDatabaseClient(), times(20)).writeAtLeastOnce(any());
}
@Test
public void deadlineExceededRetries() throws InterruptedException {
List<Mutation> mutationList = Arrays.asList(m((long) 1));
// mock sleeper so that it does not actually sleep.
WriteToSpannerFn.sleeper = Mockito.mock(Sleeper.class);
// respond with 2 timeouts and a success.
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(any()))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
ErrorCode.DEADLINE_EXCEEDED, "simulated Timeout 1"))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
ErrorCode.DEADLINE_EXCEEDED, "simulated Timeout 2"))
.thenReturn(Timestamp.now());
SpannerWriteResult result =
pipeline
.apply(Create.of(mutationList))
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withBatchSizeBytes(0)
.withFailureMode(SpannerIO.FailureMode.REPORT_FAILURES));
// all success, so veryify no errors
PAssert.that(result.getFailedMutations())
.satisfies(
m -> {
assertEquals(0, Iterables.size(m));
return null;
});
pipeline.run().waitUntilFinish();
// 2 calls to sleeper
verify(WriteToSpannerFn.sleeper, times(2)).sleep(anyLong());
// 3 write attempts for the single mutationGroup.
verify(serviceFactory.mockDatabaseClient(), times(3)).writeAtLeastOnce(any());
}
@Test
public void deadlineExceededFailsAfterRetries() throws InterruptedException {
List<Mutation> mutationList = Arrays.asList(m((long) 1));
// mock sleeper so that it does not actually sleep.
WriteToSpannerFn.sleeper = Mockito.mock(Sleeper.class);
// respond with all timeouts.
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(any()))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
ErrorCode.DEADLINE_EXCEEDED, "simulated Timeout"));
SpannerWriteResult result =
pipeline
.apply(Create.of(mutationList))
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withBatchSizeBytes(0)
.withMaxCumulativeBackoff(Duration.standardHours(2))
.withFailureMode(SpannerIO.FailureMode.REPORT_FAILURES));
// One error
PAssert.that(result.getFailedMutations())
.satisfies(
m -> {
assertEquals(1, Iterables.size(m));
return null;
});
pipeline.run().waitUntilFinish();
// Due to jitter in backoff algorithm, we cannot test for an exact number of retries,
// but there will be more than 16 (normally 18).
int numSleeps = Mockito.mockingDetails(WriteToSpannerFn.sleeper).getInvocations().size();
assertTrue(String.format("Should be least 16 sleeps, got %d", numSleeps), numSleeps > 16);
long totalSleep =
Mockito.mockingDetails(WriteToSpannerFn.sleeper).getInvocations().stream()
.mapToLong(i -> i.getArgument(0))
.reduce(0L, Long::sum);
// Total sleep should be greater then 2x maxCumulativeBackoff: 120m,
// because the batch is repeated inidividually due REPORT_FAILURES.
assertTrue(
String.format("Should be least 7200s of sleep, got %d", totalSleep),
totalSleep >= Duration.standardHours(2).getMillis());
// Number of write attempts should be numSleeps + 2 write attempts:
// 1 batch attempt, numSleeps/2 batch retries,
// then 1 individual attempt + numSleeps/2 individual retries
verify(serviceFactory.mockDatabaseClient(), times(numSleeps + 2)).writeAtLeastOnce(any());
}
@Test
public void retryOnSchemaChangeException() throws InterruptedException {
List<Mutation> mutationList = Arrays.asList(m((long) 1));
String errString =
"Transaction aborted. "
+ "Database schema probably changed during transaction, retry may succeed.";
// mock sleeper so that it does not actually sleep.
WriteToSpannerFn.sleeper = Mockito.mock(Sleeper.class);
// respond with 2 timeouts and a success.
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(any()))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenReturn(Timestamp.now());
SpannerWriteResult result =
pipeline
.apply(Create.of(mutationList))
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withBatchSizeBytes(0)
.withFailureMode(FailureMode.FAIL_FAST));
// all success, so veryify no errors
PAssert.that(result.getFailedMutations())
.satisfies(
m -> {
assertEquals(0, Iterables.size(m));
return null;
});
pipeline.run().waitUntilFinish();
// 0 calls to sleeper
verify(WriteToSpannerFn.sleeper, times(0)).sleep(anyLong());
// 3 write attempts for the single mutationGroup.
verify(serviceFactory.mockDatabaseClient(), times(3)).writeAtLeastOnce(any());
}
@Test
public void retryMaxOnSchemaChangeException() throws InterruptedException {
List<Mutation> mutationList = Arrays.asList(m((long) 1));
String errString =
"Transaction aborted. "
+ "Database schema probably changed during transaction, retry may succeed.";
// mock sleeper so that it does not actually sleep.
WriteToSpannerFn.sleeper = Mockito.mock(Sleeper.class);
// Respond with Aborted transaction
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(any()))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString));
// When spanner aborts transaction for more than 5 time, pipeline execution stops with
// PipelineExecutionException
thrown.expect(PipelineExecutionException.class);
thrown.expectMessage(errString);
SpannerWriteResult result =
pipeline
.apply(Create.of(mutationList))
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withBatchSizeBytes(0)
.withFailureMode(FailureMode.FAIL_FAST));
// One error
PAssert.that(result.getFailedMutations())
.satisfies(
m -> {
assertEquals(1, Iterables.size(m));
return null;
});
pipeline.run().waitUntilFinish();
// 0 calls to sleeper
verify(WriteToSpannerFn.sleeper, times(0)).sleep(anyLong());
// 5 write attempts for the single mutationGroup.
verify(serviceFactory.mockDatabaseClient(), times(5)).writeAtLeastOnce(any());
}
@Test
public void retryOnAbortedAndDeadlineExceeded() throws InterruptedException {
List<Mutation> mutationList = Arrays.asList(m((long) 1));
String errString =
"Transaction aborted. "
+ "Database schema probably changed during transaction, retry may succeed.";
// mock sleeper so that it does not actually sleep.
WriteToSpannerFn.sleeper = Mockito.mock(Sleeper.class);
// Respond with (1) Aborted transaction a couple of times (2) deadline exceeded
// (3) Aborted transaction 3 times (4) deadline exceeded and finally return success.
when(serviceFactory.mockDatabaseClient().writeAtLeastOnce(any()))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
ErrorCode.DEADLINE_EXCEEDED, "simulated Timeout 1"))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenThrow(SpannerExceptionFactory.newSpannerException(ErrorCode.ABORTED, errString))
.thenThrow(
SpannerExceptionFactory.newSpannerException(
ErrorCode.DEADLINE_EXCEEDED, "simulated Timeout 2"))
.thenReturn(Timestamp.now());
SpannerWriteResult result =
pipeline
.apply(Create.of(mutationList))
.apply(
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withServiceFactory(serviceFactory)
.withBatchSizeBytes(0)
.withFailureMode(FailureMode.FAIL_FAST));
// Zero error
PAssert.that(result.getFailedMutations())
.satisfies(
m -> {
assertEquals(0, Iterables.size(m));
return null;
});
pipeline.run().waitUntilFinish();
// 2 calls to sleeper
verify(WriteToSpannerFn.sleeper, times(2)).sleep(anyLong());
// 8 write attempts for the single mutationGroup.
verify(serviceFactory.mockDatabaseClient(), times(8)).writeAtLeastOnce(any());
}
@Test
public void displayDataWrite() throws Exception {
SpannerIO.Write write =
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withBatchSizeBytes(123)
.withMaxNumMutations(456)
.withMaxNumRows(789)
.withGroupingFactor(100);
DisplayData data = DisplayData.from(write);
assertThat(data.items(), hasSize(7));
assertThat(data, hasDisplayItem("projectId", "test-project"));
assertThat(data, hasDisplayItem("instanceId", "test-instance"));
assertThat(data, hasDisplayItem("databaseId", "test-database"));
assertThat(data, hasDisplayItem("batchSizeBytes", 123));
assertThat(data, hasDisplayItem("maxNumMutations", 456));
assertThat(data, hasDisplayItem("maxNumRows", 789));
assertThat(data, hasDisplayItem("groupingFactor", "100"));
// check for default grouping value
write =
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database");
data = DisplayData.from(write);
assertThat(data.items(), hasSize(7));
assertThat(data, hasDisplayItem("groupingFactor", "DEFAULT"));
}
@Test
public void displayDataWriteGrouped() throws Exception {
SpannerIO.WriteGrouped writeGrouped =
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.withBatchSizeBytes(123)
.withMaxNumMutations(456)
.withMaxNumRows(789)
.withGroupingFactor(100)
.grouped();
DisplayData data = DisplayData.from(writeGrouped);
assertThat(data.items(), hasSize(7));
assertThat(data, hasDisplayItem("projectId", "test-project"));
assertThat(data, hasDisplayItem("instanceId", "test-instance"));
assertThat(data, hasDisplayItem("databaseId", "test-database"));
assertThat(data, hasDisplayItem("batchSizeBytes", 123));
assertThat(data, hasDisplayItem("maxNumMutations", 456));
assertThat(data, hasDisplayItem("maxNumRows", 789));
assertThat(data, hasDisplayItem("groupingFactor", "100"));
// check for default grouping value
writeGrouped =
SpannerIO.write()
.withProjectId("test-project")
.withInstanceId("test-instance")
.withDatabaseId("test-database")
.grouped();
data = DisplayData.from(writeGrouped);
assertThat(data.items(), hasSize(7));
assertThat(data, hasDisplayItem("groupingFactor", "DEFAULT"));
}
@Test
public void testBatchableMutationFilterFn_cells() {
Mutation all = Mutation.delete("test", KeySet.all());
Mutation prefix = Mutation.delete("test", KeySet.prefixRange(Key.of(1L)));
Mutation range =
Mutation.delete(
"test", KeySet.range(KeyRange.openOpen(Key.of(1L), Key.newBuilder().build())));
MutationGroup[] mutationGroups =
new MutationGroup[] {
g(m(1L)),
g(m(2L), m(3L)),
g(m(2L), m(3L), m(4L), m(5L)), // not batchable - too big.
g(del(1L)),
g(del(5L, 6L)), // not point delete.
g(all),
g(prefix),
g(range)
};
BatchableMutationFilterFn testFn =
new BatchableMutationFilterFn(null, null, 10000000, 3 * CELLS_PER_KEY, 1000);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupCaptor.capture());
doNothing().when(mockProcessContext).output(any(), mutationGroupListCaptor.capture());
// Process all elements.
for (MutationGroup m : mutationGroups) {
when(mockProcessContext.element()).thenReturn(m);
testFn.processElement(mockProcessContext);
}
// Verify captured batchable elements.
assertThat(
mutationGroupCaptor.getAllValues(),
containsInAnyOrder(g(m(1L)), g(m(2L), m(3L)), g(del(1L))));
// Verify captured unbatchable mutations
Iterable<MutationGroup> unbatchableMutations =
Iterables.concat(mutationGroupListCaptor.getAllValues());
assertThat(
unbatchableMutations,
containsInAnyOrder(
g(m(2L), m(3L), m(4L), m(5L)), // not batchable - too big.
g(del(5L, 6L)), // not point delete.
g(all),
g(prefix),
g(range)));
}
@Test
public void testBatchableMutationFilterFn_size() {
Mutation all = Mutation.delete("test", KeySet.all());
Mutation prefix = Mutation.delete("test", KeySet.prefixRange(Key.of(1L)));
Mutation range =
Mutation.delete(
"test", KeySet.range(KeyRange.openOpen(Key.of(1L), Key.newBuilder().build())));
MutationGroup[] mutationGroups =
new MutationGroup[] {
g(m(1L)),
g(m(2L), m(3L)),
g(m(1L), m(3L), m(4L), m(5L)), // not batchable - too big.
g(del(1L)),
g(del(5L, 6L)), // not point delete.
g(all),
g(prefix),
g(range)
};
long mutationSize = MutationSizeEstimator.sizeOf(m(1L));
BatchableMutationFilterFn testFn =
new BatchableMutationFilterFn(null, null, mutationSize * 3, 1000, 1000);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupCaptor.capture());
doNothing().when(mockProcessContext).output(any(), mutationGroupListCaptor.capture());
// Process all elements.
for (MutationGroup m : mutationGroups) {
when(mockProcessContext.element()).thenReturn(m);
testFn.processElement(mockProcessContext);
}
// Verify captured batchable elements.
assertThat(
mutationGroupCaptor.getAllValues(),
containsInAnyOrder(g(m(1L)), g(m(2L), m(3L)), g(del(1L))));
// Verify captured unbatchable mutations
Iterable<MutationGroup> unbatchableMutations =
Iterables.concat(mutationGroupListCaptor.getAllValues());
assertThat(
unbatchableMutations,
containsInAnyOrder(
g(m(1L), m(3L), m(4L), m(5L)), // not batchable - too big.
g(del(5L, 6L)), // not point delete.
g(all),
g(prefix),
g(range)));
}
@Test
public void testBatchableMutationFilterFn_rows() {
Mutation all = Mutation.delete("test", KeySet.all());
Mutation prefix = Mutation.delete("test", KeySet.prefixRange(Key.of(1L)));
Mutation range =
Mutation.delete(
"test", KeySet.range(KeyRange.openOpen(Key.of(1L), Key.newBuilder().build())));
MutationGroup[] mutationGroups =
new MutationGroup[] {
g(m(1L)),
g(m(2L), m(3L)),
g(m(1L), m(3L), m(4L), m(5L)), // not batchable - too many rows.
g(del(1L)),
g(del(5L, 6L)), // not point delete.
g(all),
g(prefix),
g(range)
};
long mutationSize = MutationSizeEstimator.sizeOf(m(1L));
BatchableMutationFilterFn testFn = new BatchableMutationFilterFn(null, null, 1000, 1000, 3);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupCaptor.capture());
doNothing().when(mockProcessContext).output(any(), mutationGroupListCaptor.capture());
// Process all elements.
for (MutationGroup m : mutationGroups) {
when(mockProcessContext.element()).thenReturn(m);
testFn.processElement(mockProcessContext);
}
// Verify captured batchable elements.
assertThat(
mutationGroupCaptor.getAllValues(),
containsInAnyOrder(g(m(1L)), g(m(2L), m(3L)), g(del(1L))));
// Verify captured unbatchable mutations
Iterable<MutationGroup> unbatchableMutations =
Iterables.concat(mutationGroupListCaptor.getAllValues());
assertThat(
unbatchableMutations,
containsInAnyOrder(
g(m(1L), m(3L), m(4L), m(5L)), // not batchable - too many rows.
g(del(5L, 6L)), // not point delete.
g(all),
g(prefix),
g(range)));
}
@Test
public void testBatchableMutationFilterFn_batchingDisabled() {
MutationGroup[] mutationGroups =
new MutationGroup[] {g(m(1L)), g(m(2L)), g(del(1L)), g(del(5L, 6L))};
BatchableMutationFilterFn testFn = new BatchableMutationFilterFn(null, null, 0, 0, 0);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupCaptor.capture());
doNothing().when(mockProcessContext).output(any(), mutationGroupListCaptor.capture());
// Process all elements.
for (MutationGroup m : mutationGroups) {
when(mockProcessContext.element()).thenReturn(m);
testFn.processElement(mockProcessContext);
}
// Verify captured batchable elements.
assertTrue(mutationGroupCaptor.getAllValues().isEmpty());
// Verify captured unbatchable mutations
Iterable<MutationGroup> unbatchableMutations =
Iterables.concat(mutationGroupListCaptor.getAllValues());
assertThat(unbatchableMutations, containsInAnyOrder(mutationGroups));
}
@Test
public void testGatherBundleAndSortFn() throws Exception {
GatherBundleAndSortFn testFn = new GatherBundleAndSortFn(10000000, 10, 1000, 100, null);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
FinishBundleContext mockFinishBundleContext = Mockito.mock(FinishBundleContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(byteArrayKvListCaptor.capture());
// Capture the outputs.
doNothing().when(mockFinishBundleContext).output(byteArrayKvListCaptor.capture(), any(), any());
MutationGroup[] mutationGroups =
new MutationGroup[] {
g(m(4L)), g(m(1L)), g(m(5L), m(6L), m(7L), m(8L), m(9L)), g(del(2L)), g(m(3L))
};
// Process all elements as one bundle.
testFn.startBundle();
for (MutationGroup m : mutationGroups) {
when(mockProcessContext.element()).thenReturn(m);
testFn.processElement(mockProcessContext);
}
testFn.finishBundle(mockFinishBundleContext);
verify(mockProcessContext, never()).output(any());
verify(mockFinishBundleContext, times(1)).output(any(), any(), any());
// Verify sorted output... first decode it...
List<MutationGroup> sorted =
byteArrayKvListCaptor.getValue().stream()
.map(kv -> WriteGrouped.decode(kv.getValue()))
.collect(Collectors.toList());
assertThat(
sorted,
contains(g(m(1L)), g(del(2L)), g(m(3L)), g(m(4L)), g(m(5L), m(6L), m(7L), m(8L), m(9L))));
}
@Test
public void testGatherBundleAndSortFn_flushOversizedBundle() throws Exception {
// Setup class to bundle every 3 mutations
GatherBundleAndSortFn testFn =
new GatherBundleAndSortFn(10000000, CELLS_PER_KEY, 1000, 3, null);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
FinishBundleContext mockFinishBundleContext = Mockito.mock(FinishBundleContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(byteArrayKvListCaptor.capture());
// Capture the outputs.
doNothing().when(mockFinishBundleContext).output(byteArrayKvListCaptor.capture(), any(), any());
MutationGroup[] mutationGroups =
new MutationGroup[] {
g(m(4L)),
g(m(1L)),
// end group
g(m(5L), m(6L), m(7L), m(8L), m(9L)),
// end group
g(m(10L)),
g(m(3L)),
g(m(11L)),
// end group.
g(m(2L))
};
// Process all elements as one bundle.
testFn.startBundle();
for (MutationGroup m : mutationGroups) {
when(mockProcessContext.element()).thenReturn(m);
testFn.processElement(mockProcessContext);
}
testFn.finishBundle(mockFinishBundleContext);
verify(mockProcessContext, times(3)).output(any());
verify(mockFinishBundleContext, times(1)).output(any(), any(), any());
// verify sorted output... needs decoding...
List<List<KV<byte[], byte[]>>> kvGroups = byteArrayKvListCaptor.getAllValues();
assertEquals(4, kvGroups.size());
// decode list of lists of KV to a list of lists of MutationGroup.
List<List<MutationGroup>> mgListGroups =
kvGroups.stream()
.map(
l ->
l.stream()
.map(kv -> WriteGrouped.decode(kv.getValue()))
.collect(Collectors.toList()))
.collect(Collectors.toList());
// verify contents of 4 sorted groups.
assertThat(mgListGroups.get(0), contains(g(m(1L)), g(m(4L))));
assertThat(mgListGroups.get(1), contains(g(m(5L), m(6L), m(7L), m(8L), m(9L))));
assertThat(mgListGroups.get(2), contains(g(m(3L)), g(m(10L)), g(m(11L))));
assertThat(mgListGroups.get(3), contains(g(m(2L))));
}
@Test
public void testBatchFn_cells() throws Exception {
// Setup class to bundle every 3 mutations (3xCELLS_PER_KEY cell mutations)
BatchFn testFn = new BatchFn(10000000, 3 * CELLS_PER_KEY, 1000, null);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupListCaptor.capture());
List<MutationGroup> mutationGroups =
Arrays.asList(
g(m(1L)),
g(m(4L)),
g(m(5L), m(6L), m(7L), m(8L), m(9L)),
g(m(3L)),
g(m(10L)),
g(m(11L)),
g(m(2L)));
List<KV<byte[], byte[]>> encodedInput =
mutationGroups.stream()
.map(mg -> KV.of((byte[]) null, WriteGrouped.encode(mg)))
.collect(Collectors.toList());
// Process elements.
when(mockProcessContext.element()).thenReturn(encodedInput);
testFn.processElement(mockProcessContext);
verify(mockProcessContext, times(4)).output(any());
List<Iterable<MutationGroup>> batches = mutationGroupListCaptor.getAllValues();
assertEquals(4, batches.size());
// verify contents of 4 batches.
assertThat(batches.get(0), contains(g(m(1L)), g(m(4L))));
assertThat(batches.get(1), contains(g(m(5L), m(6L), m(7L), m(8L), m(9L))));
assertThat(batches.get(2), contains(g(m(3L)), g(m(10L)), g(m(11L))));
assertThat(batches.get(3), contains(g(m(2L))));
}
@Test
public void testBatchFn_size() throws Exception {
long mutationSize = MutationSizeEstimator.sizeOf(m(1L));
// Setup class to bundle every 3 mutations by size)
BatchFn testFn = new BatchFn(mutationSize * 3, 1000, 1000, null);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupListCaptor.capture());
List<MutationGroup> mutationGroups =
Arrays.asList(
g(m(1L)),
g(m(4L)),
g(m(5L), m(6L), m(7L), m(8L), m(9L)),
g(m(3L)),
g(m(10L)),
g(m(11L)),
g(m(2L)));
List<KV<byte[], byte[]>> encodedInput =
mutationGroups.stream()
.map(mg -> KV.of((byte[]) null, WriteGrouped.encode(mg)))
.collect(Collectors.toList());
// Process elements.
when(mockProcessContext.element()).thenReturn(encodedInput);
testFn.processElement(mockProcessContext);
verify(mockProcessContext, times(4)).output(any());
List<Iterable<MutationGroup>> batches = mutationGroupListCaptor.getAllValues();
assertEquals(4, batches.size());
// verify contents of 4 batches.
assertThat(batches.get(0), contains(g(m(1L)), g(m(4L))));
assertThat(batches.get(1), contains(g(m(5L), m(6L), m(7L), m(8L), m(9L))));
assertThat(batches.get(2), contains(g(m(3L)), g(m(10L)), g(m(11L))));
assertThat(batches.get(3), contains(g(m(2L))));
}
@Test
public void testBatchFn_rows() throws Exception {
// Setup class to bundle every 3 mutations (3xCELLS_PER_KEY cell mutations)
BatchFn testFn = new BatchFn(10000000, 1000, 3, null);
ProcessContext mockProcessContext = Mockito.mock(ProcessContext.class);
when(mockProcessContext.sideInput(any())).thenReturn(getSchema());
// Capture the outputs.
doNothing().when(mockProcessContext).output(mutationGroupListCaptor.capture());
List<MutationGroup> mutationGroups =
Arrays.asList(
g(m(1L)),
g(m(4L)),
g(m(5L), m(6L), m(7L), m(8L), m(9L)),
g(m(3L)),
g(m(10L)),
g(m(11L)),
g(m(2L)));
List<KV<byte[], byte[]>> encodedInput =
mutationGroups.stream()
.map(mg -> KV.of((byte[]) null, WriteGrouped.encode(mg)))
.collect(Collectors.toList());
// Process elements.
when(mockProcessContext.element()).thenReturn(encodedInput);
testFn.processElement(mockProcessContext);
verify(mockProcessContext, times(4)).output(any());
List<Iterable<MutationGroup>> batches = mutationGroupListCaptor.getAllValues();
assertEquals(4, batches.size());
// verify contents of 4 batches.
assertThat(batches.get(0), contains(g(m(1L)), g(m(4L))));
assertThat(batches.get(1), contains(g(m(5L), m(6L), m(7L), m(8L), m(9L))));
assertThat(batches.get(2), contains(g(m(3L)), g(m(10L)), g(m(11L))));
assertThat(batches.get(3), contains(g(m(2L))));
}
private static MutationGroup g(Mutation m, Mutation... other) {
return MutationGroup.create(m, other);
}
private static Mutation m(Long key) {
return Mutation.newInsertOrUpdateBuilder("test").set("key").to(key).build();
}
private static Iterable<Mutation> batch(Mutation... m) {
return Arrays.asList(m);
}
private static Mutation del(Long... keys) {
KeySet.Builder builder = KeySet.newBuilder();
for (Long key : keys) {
builder.addKey(Key.of(key));
}
return Mutation.delete("test", builder.build());
}
private static Mutation delRange(Long start, Long end) {
return Mutation.delete("test", KeySet.range(KeyRange.closedClosed(Key.of(start), Key.of(end))));
}
private static Iterable<Mutation> mutationsInNoOrder(Iterable<Mutation> expected) {
final ImmutableSet<Mutation> mutations = ImmutableSet.copyOf(expected);
return argThat(
new ArgumentMatcher<Iterable<Mutation>>() {
@Override
public boolean matches(Iterable<Mutation> argument) {
if (!(argument instanceof Iterable)) {
return false;
}
ImmutableSet<Mutation> actual = ImmutableSet.copyOf((Iterable) argument);
return actual.equals(mutations);
}
@Override
public String toString() {
return "Iterable must match " + mutations;
}
});
}
private Iterable<Mutation> iterableOfSize(final int size) {
return argThat(
new ArgumentMatcher<Iterable<Mutation>>() {
@Override
public boolean matches(Iterable<Mutation> argument) {
return argument instanceof Iterable && Iterables.size((Iterable<?>) argument) == size;
}
@Override
public String toString() {
return "The size of the iterable must equal " + size;
}
});
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package laberintodecretaalpha;
import java.awt.Graphics;
import java.awt.Toolkit;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import laberintodecretaalpha.Control.EstadosJuego;
import sun.audio.AudioPlayer;
import sun.audio.AudioStream;
/**
*
* @author M!ke
*/
public class VentanaPausa extends javax.swing.JFrame {
/**
* Creates new form VentanaMenuPrincipal
*/
public static VentanaPausa instancia = null;
public static VentanaPausa Singleton()
{
if(instancia == null)
{
instancia = new VentanaPausa();
instancia.initComponents();
instancia.setLocationRelativeTo(null);// Centrar la ventana
instancia.labelJugar.setIcon(ImageIconDic.singleton().getImageIcon("pausa"));
instancia.fondo.setIcon(ImageIconDic.singleton().getImageIcon("ventanaPausa"));
instancia.repaint();
}
return instancia;
}
@Override
public void paint(Graphics g)
{
jPanel1.paintAll(jPanel1.getGraphics());
}
public VentanaPausa() {
initComponents();
setIcon();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
labelJugar = new javax.swing.JLabel();
fondo = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
setTitle("Laberinto de Creta");
setResizable(false);
getContentPane().setLayout(new org.netbeans.lib.awtextra.AbsoluteLayout());
jPanel1.setLayout(null);
labelJugar.setPreferredSize(new java.awt.Dimension(144, 57));
labelJugar.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseExited(java.awt.event.MouseEvent evt) {
labelJugarMouseExited(evt);
}
public void mousePressed(java.awt.event.MouseEvent evt) {
labelJugarMousePressed(evt);
}
});
labelJugar.addMouseMotionListener(new java.awt.event.MouseMotionAdapter() {
public void mouseMoved(java.awt.event.MouseEvent evt) {
labelJugarMouseMoved(evt);
}
});
jPanel1.add(labelJugar);
labelJugar.setBounds(80, 120, 236, 57);
labelJugar.getAccessibleContext().setAccessibleDescription("jugar");
fondo.setText("jLabel1");
jPanel1.add(fondo);
fondo.setBounds(0, 0, 400, 300);
getContentPane().add(jPanel1, new org.netbeans.lib.awtextra.AbsoluteConstraints(0, 0, 400, 300));
pack();
}// </editor-fold>//GEN-END:initComponents
private void labelJugarMouseMoved(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_labelJugarMouseMoved
// TODO add your handling code here:
instancia.labelJugar.setIcon(ImageIconDic.singleton().getImageIcon("pausaHover"));
}//GEN-LAST:event_labelJugarMouseMoved
private void labelJugarMousePressed(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_labelJugarMousePressed
// TODO add your handling code here:
String botonJugarAudio = "load.wav";
InputStream in = null;
try {
in = new FileInputStream(botonJugarAudio);
} catch (FileNotFoundException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
AudioStream nivel1 = null;
try {
nivel1 = new AudioStream(in);
} catch (IOException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
if(VentanaConfiguracion.prenderMusica)
{
AudioPlayer.player.start(nivel1);
}
if(Control.Singleton().getEstadoPasado() == EstadosJuego.Nivel1)
{
try {
try {
Control.Singleton().setEstadoJuego(Control.EstadosJuego.Nivel1);
} catch (InterruptedException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
} catch (IOException ex) {
System.out.println(ex);
}
}
if(Control.Singleton().getEstadoPasado() == EstadosJuego.Nivel2)
{
try {
try {
Control.Singleton().setEstadoJuego(Control.EstadosJuego.Nivel2);
} catch (InterruptedException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
} catch (IOException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
}
if(Control.Singleton().getEstadoPasado() == EstadosJuego.Nivel3)
{
try {
try {
Control.Singleton().setEstadoJuego(Control.EstadosJuego.Nivel3);
} catch (InterruptedException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
} catch (IOException ex) {
Logger.getLogger(VentanaPausa.class.getName()).log(Level.SEVERE, null, ex);
}
}
VentanaPausa.Singleton().setVisible(false);
}//GEN-LAST:event_labelJugarMousePressed
private void labelJugarMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_labelJugarMouseExited
// TODO add your handling code here:
instancia.labelJugar.setIcon(ImageIconDic.singleton().getImageIcon("pausa"));
}//GEN-LAST:event_labelJugarMouseExited
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(VentanaPausa.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
new VentanaPausa().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel fondo;
private javax.swing.JPanel jPanel1;
private javax.swing.JLabel labelJugar;
// End of variables declaration//GEN-END:variables
private void setIcon() {
setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource("icono/icono2.gif")));
}
}
| |
package com.siondream.libgdxjam.ecs.systems;
import com.badlogic.ashley.core.Engine;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.EntityListener;
import com.badlogic.ashley.core.Family;
import com.badlogic.ashley.systems.IteratingSystem;
import com.badlogic.ashley.utils.ImmutableArray;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.physics.box2d.Fixture;
import com.badlogic.gdx.physics.box2d.RayCastCallback;
import com.badlogic.gdx.physics.box2d.World;
import com.badlogic.gdx.utils.Logger;
import com.badlogic.gdx.utils.ObjectMap;
import com.badlogic.gdx.utils.ObjectSet;
import com.siondream.libgdxjam.Env;
import com.siondream.libgdxjam.ecs.Mappers;
import com.siondream.libgdxjam.ecs.components.ObservableComponent;
import com.siondream.libgdxjam.ecs.components.ObserverComponent;
public class VisionSystem extends IteratingSystem
implements EntityListener, DebugRenderer {
private ObjectMap<Entity, ObjectSet<Entity>> vision = new ObjectMap<Entity, ObjectSet<Entity>>();
private VisionCallback callback = new VisionCallback();
private Vector2 toObservable = new Vector2();
private Vector2 tmp1 = new Vector2();
private Vector2 tmp2 = new Vector2();
private Logger logger = new Logger(
VisionSystem.class.getSimpleName(),
Env.LOG_LEVEL
);
private ImmutableArray<Entity> observables;
private World world;
public VisionSystem(World world) {
super(Family.all(ObserverComponent.class).get());
logger.info("initialize");
this.world = world;
}
@Override
public void addedToEngine(Engine engine) {
super.addedToEngine(engine);
observables = engine.getEntitiesFor(
Family.all(ObservableComponent.class).get()
);
engine.addEntityListener(getFamily(), this);
}
@Override
public void removedFromEngine(Engine engine) {
super.removedFromEngine(engine);
engine.removeEntityListener(this);
}
@Override
public void entityAdded(Entity entity) {
vision.put(entity, new ObjectSet<Entity>());
}
@Override
public void entityRemoved(Entity entity) {
vision.remove(entity);
}
public boolean canSee(Entity observer, Entity observable) {
ObjectSet<Entity> targets = vision.get(observer);
if (targets == null) {
return false;
}
return targets.contains(observable);
}
@Override
protected void processEntity(Entity observer, float deltaTime) {
updateVision(observer);
}
private void updateVision(Entity observer) {
for (Entity observable : observables) {
updateVision(observer, observable);
}
}
private void updateVision(Entity observer, Entity observable) {
if (!inFov(observer, observable)) {
removeFromVision(observer, observable);
return;
}
raycast(observer, observable);
}
private boolean inFov(Entity entity, Entity target) {
ObserverComponent observer = Mappers.observer.get(entity);
ObservableComponent observable = Mappers.observable.get(target);
if (observer.position.isZero() ||
observable.position.isZero() ||
observer.position.dst2(observable.position) >
observer.distance * observer.distance) {
return false;
}
toObservable.set(observable.position);
toObservable.sub(observer.position);
float toObservableAngle = toObservable.angle();
float angleDifference = Math.abs(toObservableAngle - observer.angle);
angleDifference = Math.min(angleDifference, 360.0f - angleDifference);
if (angleDifference > observer.fovAngle) {
return false;
}
return true;
}
private void raycast(Entity entity, Entity target) {
ObserverComponent observer = Mappers.observer.get(entity);
ObservableComponent observable = Mappers.observable.get(target);
callback.prepare(entity, target);
world.rayCast(
callback,
observer.position,
observable.position
);
if (callback.canSee()) {
addToVision(entity, target);
}
else {
removeFromVision(entity, target);
}
}
private void addToVision(Entity observer, Entity observable) {
if (vision.get(observer).add(observable)) {
logger.info("Can now see");
}
}
private void removeFromVision(Entity observer, Entity observable) {
if (vision.get(observer).remove(observable)) {
logger.info("Can't see anymore");
}
}
private class VisionCallback implements RayCastCallback {
private Entity observer;
private Entity observable;
private float minFraction;
private float observableFraction;
public void prepare(Entity observer, Entity observable) {
this.observer = observer;
this.observable = observable;
this.minFraction = Float.MAX_VALUE;
this.observableFraction = Float.MAX_VALUE;
}
public boolean canSee() {
return observableFraction < 1.0f &&
observableFraction <= minFraction;
}
@Override
public float reportRayFixture(Fixture fixture,
Vector2 point,
Vector2 normal,
float fraction) {
Object data = fixture.getBody().getUserData();
if (data == observer) {
return -1;
}
minFraction = fraction;
if (data == observable) {
observableFraction = fraction;
return fraction;
}
return 0;
}
}
@Override
public void render(ShapeRenderer shapeRenderer) {
shapeRenderer.begin(ShapeType.Filled);
shapeRenderer.setColor(Color.RED);
for (Entity entity : observables) {
ObservableComponent observable = Mappers.observable.get(entity);
shapeRenderer.rect(
observable.position.x,
observable.position.y,
0.1f,
0.1f
);
}
shapeRenderer.setColor(Color.BLUE);
for (Entity entity : getEntities()) {
ObserverComponent observer = Mappers.observer.get(entity);
shapeRenderer.rect(
observer.position.x,
observer.position.y,
0.1f,
0.1f
);
}
shapeRenderer.end();
shapeRenderer.begin(ShapeType.Line);
shapeRenderer.setColor(Color.GREEN);
for (Entity entity : getEntities()) {
ObserverComponent observer = Mappers.observer.get(entity);
float halfFov = observer.fovAngle * 0.5f;
tmp1.set(observer.distance, 0.0f);
tmp1.rotate(observer.angle);
tmp1.rotate(halfFov);
tmp1.add(observer.position);
tmp2.set(observer.distance, 0.0f);
tmp2.rotate(observer.angle);
tmp2.rotate(-halfFov);
tmp2.add(observer.position);
shapeRenderer.triangle(
observer.position.x, observer.position.y,
tmp1.x, tmp1.y,
tmp2.x, tmp2.y
);
}
shapeRenderer.end();
}
}
| |
//Copyright 2008 Cyrus Najmabadi
//
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
package org.metasyntactic.providers;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static org.metasyntactic.utilities.CollectionUtilities.isEmpty;
import static org.metasyntactic.utilities.StringUtilities.isNullOrEmpty;
import java.io.File;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.commons.collections.map.MultiValueMap;
import org.metasyntactic.Constants;
import org.metasyntactic.NowPlayingApplication;
import org.metasyntactic.NowPlayingModel;
import org.metasyntactic.activities.R;
import org.metasyntactic.caches.UserLocationCache;
import org.metasyntactic.data.FavoriteTheater;
import org.metasyntactic.data.Location;
import org.metasyntactic.data.Movie;
import org.metasyntactic.data.Performance;
import org.metasyntactic.data.Theater;
import org.metasyntactic.protobuf.NowPlaying;
import org.metasyntactic.threading.ThreadingUtilities;
import org.metasyntactic.time.Days;
import org.metasyntactic.time.Hours;
import org.metasyntactic.utilities.DateUtilities;
import org.metasyntactic.utilities.ExceptionUtilities;
import org.metasyntactic.utilities.FileUtilities;
import org.metasyntactic.utilities.LogUtilities;
import org.metasyntactic.utilities.NetworkUtilities;
import android.content.Intent;
import com.google.protobuf.InvalidProtocolBufferException;
public class DataProvider {
public enum State {
Started, Updating, Finished
}
private final Object lock = new Object();
private final NowPlayingModel model;
private List<Movie> movies;
private List<Theater> theaters;
private Map<String, Date> synchronizationData;
private Map<String, Map<String, List<Performance>>> performances;
private boolean shutdown;
private State state;
public DataProvider(final NowPlayingModel model) {
this.model = model;
performances = new HashMap<String, Map<String, List<Performance>>>();
state = State.Started;
}
public void onLowMemory() {
movies = null;
theaters = null;
synchronizationData = null;
performances = new HashMap<String, Map<String, List<Performance>>>();
}
public void update() {
final List<Movie> localMovies = getMovies();
final List<Theater> localTheaters = getTheaters();
final Runnable runnable = new Runnable() {
public void run() {
updateBackgroundEntryPoint(localMovies, localTheaters);
}
};
state = State.Updating;
ThreadingUtilities.performOnBackgroundThread("Update Provider", runnable, lock, true/* visible */);
}
private static boolean isUpToDate() {
final Date lastLookupDate = getLastLookupDate();
if (lastLookupDate == null) {
return false;
}
final int days = Days.daysBetween(lastLookupDate, new Date());
if (days != 0) {
return false;
}
// same date. make sure it's been at least 12 hours
final int hours = Hours.hoursBetween(lastLookupDate, new Date());
if (hours > 8) {
return false;
}
return true;
}
private void updateBackgroundEntryPoint(final Iterable<Movie> currentMovies, final List<Theater> currentTheaters) {
updateBackgroundEntryPointWorker(currentMovies, currentTheaters);
ThreadingUtilities.performOnMainThread(new Runnable() {
public void run() {
NowPlayingApplication.getApplication().sendBroadcast(
new Intent(NowPlayingApplication.NOW_PLAYING_LOCAL_DATA_DOWNLOADED));
model.updateSecondaryCaches();
}
});
}
private void broadcastUpdate(final int id) {
ThreadingUtilities.performOnMainThread(new Runnable() {
public void run() {
final String message = NowPlayingApplication.getApplication().getResources().getString(id);
final Intent intent = new Intent(NowPlayingApplication.NOW_PLAYING_LOCAL_DATA_DOWNLOAD_PROGRESS).putExtra(
"message", message);
NowPlayingApplication.getApplication().sendBroadcast(intent);
}
});
}
private void updateBackgroundEntryPointWorker(final Iterable<Movie> currentMovies, final List<Theater> currentTheaters) {
if (isUpToDate()) {
return;
}
if (shutdown) {
return;
}
// LogUtilities.i("DEBUG", "Started downloadUserLocation trace");
// Debug.startMethodTracing("downloadUserLocation", 50000000);
long start = System.currentTimeMillis();
broadcastUpdate(R.string.finding_location);
final Location location = UserLocationCache.downloadUserAddressLocationBackgroundEntryPoint(model.getUserAddress());
LogUtilities.logTime(DataProvider.class, "Get User Location", start);
// Debug.stopMethodTracing();
// LogUtilities.i("DEBUG", "Stopped downloadUserLocation trace");
if (location == null) {
// this should be impossible. we only update if the user has entered a
// valid location
return;
}
start = System.currentTimeMillis();
if (shutdown) {
return;
}
final LookupResult result = lookupLocation(location, null);
LogUtilities.logTime(DataProvider.class, "Lookup Location", start);
if (result == null || isEmpty(result.getMovies()) || isEmpty(result.getTheaters())) {
return;
}
start = System.currentTimeMillis();
if (shutdown) {
return;
}
addMissingData(result, location, currentMovies, currentTheaters);
LogUtilities.logTime(DataProvider.class, "Add missing data", start);
start = System.currentTimeMillis();
broadcastUpdate(R.string.finding_favorites);
if (shutdown) {
return;
}
lookupMissingFavorites(result);
LogUtilities.logTime(DataProvider.class, "Lookup Missing Theaters", start);
reportResult(result);
saveResult(result);
}
private void addMissingData(final LookupResult result, final Location location, final Iterable<Movie> currentMovies,
final List<Theater> currentTheaters) {
// Ok. so if:
// a) the user is doing their main search
// b) we do not find data for a theater that should be showing up
// c) they're close enough to their last search
// then we want to give them the old information we have for that
// theater *as well as* a warning to let them know that it may be
// out of date.
//
// This is to deal with the case where the user is confused because
// a theater they care about has been filtered out because it didn't
// report showtimes.
final Collection<String> existingMovieTitles = new LinkedHashSet<String>();
for (final Movie movie : result.getMovies()) {
existingMovieTitles.add(movie.getCanonicalTitle());
}
final Collection<Theater> missingTheaters = new LinkedHashSet<Theater>(currentTheaters);
missingTheaters.removeAll(result.getTheaters());
for (final Theater theater : missingTheaters) {
if (theater.getLocation().distanceTo(location) > 50) {
// Not close enough. Consider this a brand new search in a new
// location. Don't include this old theaters.
continue;
}
// no showtime information available. fallback to anything we've
// stored (but warn the user).
final Map<String, List<Performance>> oldPerformances = lookupTheaterPerformances(theater);
if (isEmpty(oldPerformances)) {
continue;
}
final Date syncDate = synchronizationDateForTheater(theater);
if (syncDate == null) {
continue;
}
if (Math.abs(syncDate.getTime() - new Date().getTime()) > Constants.FOUR_WEEKS) {
continue;
}
result.getPerformances().put(theater.getName(), oldPerformances);
result.getSynchronizationData().put(theater.getName(), syncDate);
result.getTheaters().add(theater);
addMissingMovies(oldPerformances, result, existingMovieTitles, currentMovies);
}
}
@SuppressWarnings("unchecked")
private void lookupMissingFavorites(final LookupResult lookupResult) {
if (lookupResult == null) {
return;
}
final Collection<FavoriteTheater> favoriteTheaters = model.getFavoriteTheaters();
if (favoriteTheaters.isEmpty()) {
return;
}
final MultiValueMap locationToMissingTheaterNames = new MultiValueMap();
for (final FavoriteTheater favorite : favoriteTheaters) {
if (!lookupResult.containsFavorite(favorite)) {
locationToMissingTheaterNames.put(favorite.getOriginatingLocation(), favorite.getName());
}
}
final Collection<String> movieTitles = new LinkedHashSet<String>();
for (final Movie movie : lookupResult.getMovies()) {
movieTitles.add(movie.getCanonicalTitle());
}
for (final Location location : (Set<Location>) locationToMissingTheaterNames.keySet()) {
final Collection<String> theaterNames = locationToMissingTheaterNames.getCollection(location);
final LookupResult favoritesLookupResult = lookupLocation(location, theaterNames);
if (favoritesLookupResult == null) {
continue;
}
lookupResult.getTheaters().addAll(favoritesLookupResult.getTheaters());
lookupResult.getPerformances().putAll(favoritesLookupResult.getPerformances());
// the theater may refer to movies that we don't know about.
for (final Map.Entry<String, Map<String, List<Performance>>> stringMapEntry : favoritesLookupResult
.getPerformances().entrySet()) {
addMissingMovies(stringMapEntry.getValue(), lookupResult, movieTitles, favoritesLookupResult.getMovies());
}
}
}
private static void addMissingMovies(final Map<String, List<Performance>> performances, final LookupResult result,
final Collection<String> existingMovieTitles, final Iterable<Movie> currentMovies) {
if (isEmpty(performances)) {
return;
}
for (final String movieTitle : performances.keySet()) {
if (!existingMovieTitles.contains(movieTitle)) {
existingMovieTitles.add(movieTitle);
for (final Movie movie : currentMovies) {
if (movie.getCanonicalTitle().equals(movieTitle)) {
result.getMovies().add(movie);
break;
}
}
}
}
}
private void reportResult(final LookupResult result) {
ThreadingUtilities.performOnMainThread(new Runnable() {
public void run() {
reportResultOnMainThread(result);
}
});
}
private void reportResultOnMainThread(final LookupResult result) {
movies = result.getMovies();
theaters = result.getTheaters();
synchronizationData = result.getSynchronizationData();
performances = result.getPerformances();
NowPlayingApplication.refresh(true);
}
private LookupResult lookupLocation(final Location location, final Collection<String> theaterNames) {
if (isNullOrEmpty(location.getPostalCode())) {
return null;
}
final String country = isNullOrEmpty(location.getCountry()) ? Locale.getDefault().getCountry() : location
.getCountry();
int days = Days.daysBetween(DateUtilities.getToday(), model.getSearchDate());
days = min(max(days, 0), 7);
final String address = "http://" + NowPlayingApplication.host + ".appspot.com/LookupTheaterListings2?country="
+ country + "&postalcode=" + location.getPostalCode() + "&language=" + Locale.getDefault().getLanguage()
+ "&day=" + days + "&format=pb" + "&latitude=" + (int) (location.getLatitude() * 1000000) + "&longitude="
+ (int) (location.getLongitude() * 1000000) + "&device=android";
final byte[] data = NetworkUtilities.download(address, true);
if (data == null) {
return null;
}
broadcastUpdate(R.string.searching_location);
final NowPlaying.TheaterListingsProto theaterListings;
try {
// LogUtilities.i("DEBUG", "Started parse from trace");
// Debug.startMethodTracing("parse_from", 50000000);
final long start = System.currentTimeMillis();
theaterListings = NowPlaying.TheaterListingsProto.parseFrom(data);
LogUtilities.i("DEBUG", "Parsing took: " + (System.currentTimeMillis() - start));
// Debug.stopMethodTracing();
// LogUtilities.i("DEBUG", "Stopped parse from trace");
} catch (final InvalidProtocolBufferException e) {
ExceptionUtilities.log(DataProvider.class, "lookupLocation", e);
return null;
}
// LogUtilities.i("DEBUG", "Started processListings trace");
// Debug.startMethodTracing("processListings", 50000000);
// Debug.stopMethodTracing();
// LogUtilities.i("DEBUG", "Stopped processListings trace");
if (shutdown) {
return null;
}
return processTheaterListings(theaterListings, location, theaterNames);
}
private final DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
private Map<String, Movie> processMovies(final Iterable<NowPlaying.MovieProto> movies) {
final Map<String, Movie> movieIdToMovieMap = new HashMap<String, Movie>();
for (final NowPlaying.MovieProto movieProto : movies) {
if (shutdown) {
return null;
}
final String identifier = movieProto.getIdentifier();
final String title = movieProto.getTitle();
final String rating = movieProto.getRawRating();
final int length = movieProto.getLength();
final String synopsis = movieProto.getDescription();
final List<String> genres = Arrays.asList(movieProto.getGenre().replace('_', ' ').split("/"));
final List<String> directors = movieProto.getDirectorList();
final List<String> cast = movieProto.getCastList();
final String releaseDateString = movieProto.getReleaseDate();
Date releaseDate = null;
if (releaseDateString != null && releaseDateString.length() == 10) {
try {
releaseDate = formatter.parse(releaseDateString);
} catch (final ParseException e) {
throw new RuntimeException(e);
}
}
String imdbAddress = "";
if (!isNullOrEmpty(movieProto.getIMDbUrl())) {
imdbAddress = "http://www.imdb.com/title/" + movieProto.getIMDbUrl();
}
final String poster = "";
final Movie movie = new Movie(identifier, title, rating, length, imdbAddress, releaseDate, poster, synopsis, "",
directors, cast, genres);
movieIdToMovieMap.put(identifier, movie);
}
return movieIdToMovieMap;
}
private Map<String, List<Performance>> processMovieAndShowtimesList(
final Iterable<NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto.MovieAndShowtimesProto> movieAndShowtimesList,
final Map<String, Movie> movieIdToMovieMap) {
final Map<String, List<Performance>> result = new HashMap<String, List<Performance>>();
for (final NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto.MovieAndShowtimesProto movieAndShowtimes : movieAndShowtimesList) {
if (shutdown) {
break;
}
final String movieId = movieAndShowtimes.getMovieIdentifier();
final String movieTitle = movieIdToMovieMap.get(movieId).getCanonicalTitle();
final List<Performance> localPerformances = new ArrayList<Performance>();
final List<String> times = processTimes(movieAndShowtimes.getShowtimes().getShowtimesList());
final List<NowPlaying.ShowtimeProto> showtimes = movieAndShowtimes.getShowtimes().getShowtimesList();
for (int i = 0; i < showtimes.size(); i++) {
if (shutdown) {
break;
}
final String time = times.get(i);
if (time == null) {
continue;
}
String url = showtimes.get(i).getUrl();
if (url != null && url.startsWith("tid=")) {
url = "http://www.fandango.com/redirect.aspx?" + url + "&a=11584&source=google";
}
final Performance performance = new Performance(time, url);
localPerformances.add(performance);
}
result.put(movieTitle, localPerformances);
}
return result;
}
private static List<String> processTimes(final Iterable<NowPlaying.ShowtimeProto> showtimes) {
/*
* if (false) { if (showtimes.size() == 0) { return Collections.emptyList();
* } if (is24HourTime(showtimes)) { // return process24HourTimes(showtimes);
* } else { // return process12HourTimes(showtimes); } }
*/
final List<String> result = new ArrayList<String>();
for (final NowPlaying.ShowtimeProto proto : showtimes) {
result.add(proto.getTime());
}
return result;
}
private void processTheaterAndMovieShowtimes(
final NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto theaterAndMovieShowtimes,
final Collection<Theater> theaters, final Map<String, Map<String, List<Performance>>> performances,
final Map<String, Date> synchronizationData, final Location originatingLocation,
final Collection<String> theaterNames, final Map<String, Movie> movieIdToMovieMap) {
final NowPlaying.TheaterProto theater = theaterAndMovieShowtimes.getTheater();
final String name = theater.getName();
if (isNullOrEmpty(name)) {
return;
}
if (theaterNames != null && !theaterNames.contains(name)) {
return;
}
final String identifier = theater.getIdentifier();
final String address = theater.getStreetAddress();
final String city = theater.getCity();
final String localState = theater.getState();
final String postalCode = theater.getPostalCode();
final String country = theater.getCountry();
final String phone = theater.getPhone();
final double latitude = theater.getLatitude();
final double longitude = theater.getLongitude();
final List<NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto.MovieAndShowtimesProto> movieAndShowtimesList = theaterAndMovieShowtimes
.getMovieAndShowtimesList();
if (shutdown) {
return;
}
Map<String, List<Performance>> movieToShowtimesMap = processMovieAndShowtimesList(movieAndShowtimesList,
movieIdToMovieMap);
synchronizationData.put(name, DateUtilities.getToday());
if (movieToShowtimesMap.isEmpty()) {
// no showtime information available. fallback to anything we've
// stored (but warn the user).
final File performancesFile = getPerformancesFile(name);
final Map<String, List<Performance>> oldPerformances = FileUtilities.readStringToListOfPersistables(
Performance.reader, performancesFile);
if (!oldPerformances.isEmpty()) {
movieToShowtimesMap = oldPerformances;
synchronizationData.put(name, synchronizationDateForTheater(name));
}
}
final Location location = new Location(latitude, longitude, address, city, localState, postalCode, country);
performances.put(name, movieToShowtimesMap);
theaters.add(new Theater(identifier, name, address, phone, location, originatingLocation, new HashSet<String>(
movieToShowtimesMap.keySet())));
}
private LookupResult processTheaterAndMovieShowtimes(
final Iterable<NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto> theaterAndMovieShowtimes,
final Location originatingLocation, final Collection<String> theaterNames,
final Map<String, Movie> movieIdToMovieMap) {
final List<Theater> localTheaters = new ArrayList<Theater>();
final Map<String, Map<String, List<Performance>>> localPerformances = new HashMap<String, Map<String, List<Performance>>>();
final Map<String, Date> localSynchronizationData = new HashMap<String, Date>();
for (final NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto proto : theaterAndMovieShowtimes) {
if (shutdown) {
return null;
}
processTheaterAndMovieShowtimes(proto, localTheaters, localPerformances, localSynchronizationData,
originatingLocation, theaterNames, movieIdToMovieMap);
}
return new LookupResult(null, localTheaters, localPerformances, localSynchronizationData);
}
private LookupResult processTheaterListings(final NowPlaying.TheaterListingsProto element,
final Location originatingLocation, final Collection<String> theaterNames) {
final List<NowPlaying.MovieProto> movieProtos = element.getMoviesList();
final List<NowPlaying.TheaterListingsProto.TheaterAndMovieShowtimesProto> theaterAndMovieShowtimes = element
.getTheaterAndMovieShowtimesList();
final Map<String, Movie> movieIdToMovieMap = processMovies(movieProtos);
if (shutdown) {
return null;
}
final LookupResult result = processTheaterAndMovieShowtimes(theaterAndMovieShowtimes, originatingLocation,
theaterNames, movieIdToMovieMap);
if (shutdown) {
return null;
}
final List<Movie> localMovies = new ArrayList<Movie>(movieIdToMovieMap.values());
return new LookupResult(localMovies, result.getTheaters(), result.getPerformances(), result
.getSynchronizationData());
}
private static File getMoviesFile() {
return new File(NowPlayingApplication.dataDirectory, "Movies");
}
private static File getTheatersFile() {
return new File(NowPlayingApplication.dataDirectory, "Theaters");
}
private static File getSynchronizationFile() {
return new File(NowPlayingApplication.dataDirectory, "Synchronization");
}
private static File getLastLookupDateFile() {
return new File(NowPlayingApplication.dataDirectory, "lastLookupDate");
}
private static Date getLastLookupDate() {
final File file = getLastLookupDateFile();
if (!file.exists()) {
return new Date(0);
}
return new Date(file.lastModified());
}
private static void setLastLookupDate() {
FileUtilities.writeString("", getLastLookupDateFile());
}
private static List<Movie> loadMovies() {
final List<Movie> result = FileUtilities.readPersistableList(Movie.reader, getMoviesFile());
if (result == null) {
return Collections.emptyList();
}
// hack. ensure no duplicates
final Map<String, Movie> map = new HashMap<String, Movie>();
for (final Movie movie : result) {
map.put(movie.getIdentifier(), movie);
}
return new ArrayList<Movie>(map.values());
}
public List<Movie> getMovies() {
if (movies == null) {
movies = loadMovies();
}
return movies;
}
private static Map<String, Date> loadSynchronizationData() {
final Map<String, Date> result = FileUtilities.readStringToDateMap(getSynchronizationFile());
if (result == null) {
return Collections.emptyMap();
}
return result;
}
private Map<String, Date> getSynchronizationData() {
if (synchronizationData == null) {
synchronizationData = loadSynchronizationData();
}
return synchronizationData;
}
private static File getPerformancesFile(final File parentFolder, final String theaterName) {
return new File(parentFolder, FileUtilities.sanitizeFileName(theaterName));
}
private static File getPerformancesFile(final String theaterName) {
return getPerformancesFile(NowPlayingApplication.performancesDirectory, theaterName);
}
private void saveResult(final LookupResult result) {
long start = System.currentTimeMillis();
broadcastUpdate(R.string.downloading_movie_information);
FileUtilities.writePersistableCollection(result.getMovies(), getMoviesFile());
LogUtilities.logTime(DataProvider.class, "Saving Movies", start);
start = System.currentTimeMillis();
broadcastUpdate(R.string.downloading_theater_information);
FileUtilities.writePersistableCollection(result.getTheaters(), getTheatersFile());
LogUtilities.logTime(DataProvider.class, "Saving Theaters", start);
start = System.currentTimeMillis();
FileUtilities.writeStringToDateMap(result.getSynchronizationData(), getSynchronizationFile());
LogUtilities.logTime(DataProvider.class, "Saving Sync Data", start);
start = System.currentTimeMillis();
final File tempFolder = new File(NowPlayingApplication.tempDirectory, "DPT" + Math.random());
tempFolder.mkdirs();
broadcastUpdate(R.string.downloading_local_performances);
for (final Map.Entry<String, Map<String, List<Performance>>> entry : result.getPerformances().entrySet()) {
final Map<String, List<Performance>> value = entry.getValue();
FileUtilities.writeStringToListOfPersistables(value, getPerformancesFile(tempFolder, entry.getKey()));
}
NowPlayingApplication.deleteDirectory(NowPlayingApplication.performancesDirectory);
tempFolder.renameTo(NowPlayingApplication.performancesDirectory);
LogUtilities.logTime(DataProvider.class, "Saving Performances", start);
// this has to happen last.
setLastLookupDate();
}
private Map<String, List<Performance>> lookupTheaterPerformances(final Theater theater) {
Map<String, List<Performance>> theaterPerformances = performances.get(theater.getName());
if (theaterPerformances == null) {
theaterPerformances = FileUtilities.readStringToListOfPersistables(Performance.reader,
getPerformancesFile(theater.getName()));
performances.put(theater.getName(), theaterPerformances);
}
return theaterPerformances;
}
public List<Performance> getPerformancesForMovieInTheater(final Movie movie, final Theater theater) {
final Map<String, List<Performance>> theaterPerformances = lookupTheaterPerformances(theater);
if (theaterPerformances != null) {
final List<Performance> result = theaterPerformances.get(movie.getCanonicalTitle());
if (result != null) {
return result;
}
}
return Collections.emptyList();
}
private static List<Theater> loadTheaters() {
final List<Theater> result = FileUtilities.readPersistableList(Theater.reader, getTheatersFile());
if (result == null) {
return Collections.emptyList();
}
return result;
}
public List<Theater> getTheaters() {
if (theaters == null) {
theaters = loadTheaters();
}
return theaters;
}
public Date synchronizationDateForTheater(final String theaterName) {
return getSynchronizationData().get(theaterName);
}
public Date synchronizationDateForTheater(final Theater theater) {
return synchronizationDateForTheater(theater.getName());
}
public void shutdown() {
shutdown = true;
}
public static void markOutOfDate() {
getLastLookupDateFile().delete();
}
public State getState() {
return state;
}
public boolean isStale(final Theater theater) {
final Date date = synchronizationDateForTheater(theater);
if (date == null) {
return false;
}
return !DateUtilities.isToday(date);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.protocol.stomp.v11;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.activemq.artemis.core.protocol.stomp.ActiveMQStompException;
import org.apache.activemq.artemis.core.protocol.stomp.FrameEventListener;
import org.apache.activemq.artemis.core.protocol.stomp.SimpleBytes;
import org.apache.activemq.artemis.core.protocol.stomp.Stomp;
import org.apache.activemq.artemis.core.protocol.stomp.StompConnection;
import org.apache.activemq.artemis.core.protocol.stomp.StompDecoder;
import org.apache.activemq.artemis.core.protocol.stomp.StompFrame;
import org.apache.activemq.artemis.core.protocol.stomp.VersionedStompFrameHandler;
import org.apache.activemq.artemis.core.server.ActiveMQServerLogger;
import static org.apache.activemq.artemis.core.protocol.stomp.ActiveMQStompProtocolMessageBundle.BUNDLE;
public class StompFrameHandlerV11 extends VersionedStompFrameHandler implements FrameEventListener
{
protected static final char ESC_CHAR = '\\';
private HeartBeater heartBeater;
public StompFrameHandlerV11(StompConnection connection)
{
super(connection);
connection.addStompEventListener(this);
decoder = new StompDecoderV11();
decoder.init();
}
@Override
public StompFrame onConnect(StompFrame frame)
{
StompFrame response = null;
Map<String, String> headers = frame.getHeadersMap();
String login = headers.get(Stomp.Headers.Connect.LOGIN);
String passcode = headers.get(Stomp.Headers.Connect.PASSCODE);
String clientID = headers.get(Stomp.Headers.Connect.CLIENT_ID);
String requestID = headers.get(Stomp.Headers.Connect.REQUEST_ID);
try
{
if (connection.validateUser(login, passcode))
{
connection.setClientID(clientID);
connection.setValid(true);
response = this.createStompFrame(Stomp.Responses.CONNECTED);
// version
response.addHeader(Stomp.Headers.Connected.VERSION,
connection.getVersion());
// session
response.addHeader(Stomp.Headers.Connected.SESSION, connection
.getID().toString());
// server
response.addHeader(Stomp.Headers.Connected.SERVER,
connection.getActiveMQServerName());
if (requestID != null)
{
response.addHeader(Stomp.Headers.Connected.RESPONSE_ID,
requestID);
}
// heart-beat. We need to start after connected frame has been sent.
// otherwise the client may receive heart-beat before it receives
// connected frame.
String heartBeat = headers.get(Stomp.Headers.Connect.HEART_BEAT);
if (heartBeat != null)
{
handleHeartBeat(heartBeat);
if (heartBeater == null)
{
response.addHeader(Stomp.Headers.Connected.HEART_BEAT, "0,0");
}
else
{
response.addHeader(Stomp.Headers.Connected.HEART_BEAT, heartBeater.getServerHeartBeatValue());
}
}
}
else
{
// not valid
response = new StompFrame(Stomp.Responses.ERROR, true);
response.addHeader(Stomp.Headers.Error.MESSAGE, "Failed to connect");
response.setBody("The login account is not valid.");
}
}
catch (ActiveMQStompException e)
{
response = e.getFrame();
}
return response;
}
//ping parameters, hard-code for now
//the server can support min 20 milliseconds and receive ping at 100 milliseconds (20,100)
private void handleHeartBeat(String heartBeatHeader) throws ActiveMQStompException
{
String[] params = heartBeatHeader.split(",");
if (params.length != 2)
{
throw new ActiveMQStompException("Incorrect heartbeat header " + heartBeatHeader);
}
//client ping
long minPingInterval = Long.valueOf(params[0]);
//client receive ping
long minAcceptInterval = Long.valueOf(params[1]);
if ((minPingInterval != 0) || (minAcceptInterval != 0))
{
heartBeater = new HeartBeater(minPingInterval, minAcceptInterval);
}
}
@Override
public StompFrame onDisconnect(StompFrame frame)
{
if (this.heartBeater != null)
{
heartBeater.shutdown();
try
{
heartBeater.join();
}
catch (InterruptedException e)
{
ActiveMQServerLogger.LOGGER.errorOnStompHeartBeat(e);
}
}
return null;
}
@Override
public StompFrame onUnsubscribe(StompFrame request)
{
StompFrame response = null;
//unsubscribe in 1.1 only needs id header
String id = request.getHeader(Stomp.Headers.Unsubscribe.ID);
String durableSubscriberName = request.getHeader(Stomp.Headers.Unsubscribe.DURABLE_SUBSCRIBER_NAME);
String subscriptionID = null;
if (id != null)
{
subscriptionID = id;
}
else
{
response = BUNDLE.needSubscriptionID().getFrame();
return response;
}
try
{
connection.unsubscribe(subscriptionID, durableSubscriberName);
}
catch (ActiveMQStompException e)
{
response = e.getFrame();
}
return response;
}
@Override
public StompFrame onAck(StompFrame request)
{
StompFrame response = null;
String messageID = request.getHeader(Stomp.Headers.Ack.MESSAGE_ID);
String txID = request.getHeader(Stomp.Headers.TRANSACTION);
String subscriptionID = request.getHeader(Stomp.Headers.Ack.SUBSCRIPTION);
if (txID != null)
{
ActiveMQServerLogger.LOGGER.stompTXAckNorSupported();
}
if (subscriptionID == null)
{
response = BUNDLE.needSubscriptionID().getFrame();
return response;
}
try
{
connection.acknowledge(messageID, subscriptionID);
}
catch (ActiveMQStompException e)
{
response = e.getFrame();
}
return response;
}
@Override
public StompFrame onStomp(StompFrame request)
{
if (!connection.isValid())
{
return onConnect(request);
}
return null;
}
@Override
public StompFrame onNack(StompFrame request)
{
//this eventually means discard the message (it never be redelivered again).
//we can consider supporting redeliver to a different sub.
return onAck(request);
}
@Override
public void replySent(StompFrame reply)
{
if (reply.getCommand().equals(Stomp.Responses.CONNECTED))
{
//kick off the pinger
startHeartBeat();
}
if (reply.needsDisconnect())
{
connection.disconnect(false);
}
else
{
//update ping
if (heartBeater != null)
{
heartBeater.pinged();
}
}
}
private void startHeartBeat()
{
if (heartBeater != null)
{
heartBeater.start();
}
}
public StompFrame createPingFrame()
{
StompFrame frame = new StompFrame(Stomp.Commands.STOMP);
frame.setPing(true);
return frame;
}
//server heart beat
//algorithm:
//(a) server ping: if server hasn't sent any frame within serverPing
//interval, send a ping.
//(b) accept ping: if server hasn't received any frame within
// 2*serverAcceptPing, disconnect!
private class HeartBeater extends Thread
{
private static final int MIN_SERVER_PING = 500;
private static final int MIN_CLIENT_PING = 500;
long serverPing = 0;
long serverAcceptPing = 0;
volatile boolean shutdown = false;
AtomicLong lastPingTime = new AtomicLong(0);
AtomicLong lastAccepted = new AtomicLong(0);
StompFrame pingFrame;
public HeartBeater(long clientPing, long clientAcceptPing)
{
if (clientPing != 0)
{
serverAcceptPing = clientPing > MIN_CLIENT_PING ? clientPing : MIN_CLIENT_PING;
}
if (clientAcceptPing != 0)
{
serverPing = clientAcceptPing > MIN_SERVER_PING ? clientAcceptPing : MIN_SERVER_PING;
}
}
public synchronized void shutdown()
{
shutdown = true;
this.notify();
}
public String getServerHeartBeatValue()
{
return String.valueOf(serverPing) + "," + String.valueOf(serverAcceptPing);
}
public void pinged()
{
lastPingTime.set(System.currentTimeMillis());
}
@Override
public void run()
{
lastAccepted.set(System.currentTimeMillis());
pingFrame = createPingFrame();
synchronized (this)
{
while (!shutdown)
{
long dur1 = 0;
long dur2 = 0;
if (serverPing != 0)
{
dur1 = System.currentTimeMillis() - lastPingTime.get();
if (dur1 >= serverPing)
{
lastPingTime.set(System.currentTimeMillis());
connection.ping(pingFrame);
dur1 = 0;
}
}
if (serverAcceptPing != 0)
{
dur2 = System.currentTimeMillis() - lastAccepted.get();
if (dur2 > (2 * serverAcceptPing))
{
connection.disconnect(false);
shutdown = true;
break;
}
}
long waitTime1 = 0;
long waitTime2 = 0;
if (serverPing > 0)
{
waitTime1 = serverPing - dur1;
}
if (serverAcceptPing > 0)
{
waitTime2 = serverAcceptPing * 2 - dur2;
}
long waitTime = 10L;
if ((waitTime1 > 0) && (waitTime2 > 0))
{
waitTime = Math.min(waitTime1, waitTime2);
}
else if (waitTime1 > 0)
{
waitTime = waitTime1;
}
else if (waitTime2 > 0)
{
waitTime = waitTime2;
}
try
{
this.wait(waitTime);
}
catch (InterruptedException e)
{
}
}
}
}
public void pingAccepted()
{
this.lastAccepted.set(System.currentTimeMillis());
}
}
@Override
public void requestAccepted(StompFrame request)
{
if (heartBeater != null)
{
heartBeater.pingAccepted();
}
}
@Override
public StompFrame createStompFrame(String command)
{
return new StompFrameV11(command);
}
@Override
public void initDecoder(VersionedStompFrameHandler existingHandler)
{
decoder.init(existingHandler.getDecoder());
}
protected class StompDecoderV11 extends StompDecoder
{
protected boolean isEscaping = false;
protected SimpleBytes holder = new SimpleBytes(1024);
@Override
public void init(StompDecoder decoder)
{
this.data = decoder.data;
this.workingBuffer = decoder.workingBuffer;
this.pos = decoder.pos;
this.command = decoder.command;
}
@Override
public void init()
{
super.init();
isEscaping = false;
holder.reset();
}
@Override
protected boolean parseCommand() throws ActiveMQStompException
{
int offset = 0;
boolean nextChar = false;
//check for ping
// Some badly behaved STOMP clients add a \n *after* the terminating NUL char at the end of the
// STOMP frame this can manifest as an extra \n at the beginning when the
// next STOMP frame is read - we need to deal with this.
// Besides, Stomp 1.2 allows for extra EOLs after NULL (i.e.
// either "[\r]\n"s or "\n"s)
while (true)
{
if (workingBuffer[offset] == NEW_LINE)
{
if (heartBeater != null)
{
//client ping
heartBeater.pingAccepted();
}
nextChar = false;
}
else if (workingBuffer[offset] == CR)
{
if (nextChar) throw BUNDLE.invalidTwoCRs();
nextChar = true;
}
else
{
break;
}
offset++;
if (offset == data) return false; //no more bytes
}
if (nextChar)
{
throw BUNDLE.badCRs();
}
//if some EOLs have been processed, drop those bytes before parsing command
if (offset > 0)
{
System.arraycopy(workingBuffer, offset, workingBuffer, 0, data - offset);
data = data - offset;
offset = 0;
}
if (data < 4)
{
// Need at least four bytes to identify the command
// - up to 3 bytes for the command name + potentially another byte for a leading \n
return false;
}
byte b = workingBuffer[offset];
switch (b)
{
case A:
{
if (workingBuffer[offset + 1] == StompDecoder.B)
{
if (!tryIncrement(offset + COMMAND_ABORT_LENGTH + eolLen))
{
return false;
}
// ABORT
command = COMMAND_ABORT;
}
else
{
if (!tryIncrement(offset + COMMAND_ACK_LENGTH + eolLen))
{
return false;
}
// ACK
command = COMMAND_ACK;
}
break;
}
case B:
{
if (!tryIncrement(offset + COMMAND_BEGIN_LENGTH + eolLen))
{
return false;
}
// BEGIN
command = COMMAND_BEGIN;
break;
}
case C:
{
if (workingBuffer[offset + 2] == M)
{
if (!tryIncrement(offset + COMMAND_COMMIT_LENGTH + eolLen))
{
return false;
}
// COMMIT
command = COMMAND_COMMIT;
}
/**** added by meddy, 27 april 2011, handle header parser for reply to websocket protocol ****/
else if (workingBuffer[offset + 7] == E)
{
if (!tryIncrement(offset + COMMAND_CONNECTED_LENGTH + eolLen))
{
return false;
}
// CONNECTED
command = COMMAND_CONNECTED;
}
/**** end ****/
else
{
if (!tryIncrement(offset + COMMAND_CONNECT_LENGTH + eolLen))
{
return false;
}
// CONNECT
command = COMMAND_CONNECT;
}
break;
}
case D:
{
if (!tryIncrement(offset + COMMAND_DISCONNECT_LENGTH + eolLen))
{
return false;
}
// DISCONNECT
command = COMMAND_DISCONNECT;
break;
}
case R:
{
if (!tryIncrement(offset + COMMAND_RECEIPT_LENGTH + eolLen))
{
return false;
}
// RECEIPT
command = COMMAND_RECEIPT;
break;
}
/**** added by meddy, 27 april 2011, handle header parser for reply to websocket protocol ****/
case E:
{
if (!tryIncrement(offset + COMMAND_ERROR_LENGTH + eolLen))
{
return false;
}
// ERROR
command = COMMAND_ERROR;
break;
}
case M:
{
if (!tryIncrement(offset + COMMAND_MESSAGE_LENGTH + eolLen))
{
return false;
}
// MESSAGE
command = COMMAND_MESSAGE;
break;
}
/**** end ****/
case S:
{
if (workingBuffer[offset + 1] == E)
{
if (!tryIncrement(offset + COMMAND_SEND_LENGTH + eolLen))
{
return false;
}
// SEND
command = COMMAND_SEND;
}
else if (workingBuffer[offset + 1] == U)
{
if (!tryIncrement(offset + COMMAND_SUBSCRIBE_LENGTH + eolLen))
{
return false;
}
// SUBSCRIBE
command = COMMAND_SUBSCRIBE;
}
else
{
if (!tryIncrement(offset + StompDecoder.COMMAND_STOMP_LENGTH + eolLen))
{
return false;
}
// SUBSCRIBE
command = COMMAND_STOMP;
}
break;
}
case U:
{
if (!tryIncrement(offset + COMMAND_UNSUBSCRIBE_LENGTH + eolLen))
{
return false;
}
// UNSUBSCRIBE
command = COMMAND_UNSUBSCRIBE;
break;
}
case N:
{
if (!tryIncrement(offset + COMMAND_NACK_LENGTH + eolLen))
{
return false;
}
//NACK
command = COMMAND_NACK;
break;
}
default:
{
throwInvalid();
}
}
checkEol();
return true;
}
protected void checkEol() throws ActiveMQStompException
{
if (workingBuffer[pos - 1] != NEW_LINE)
{
throwInvalid();
}
}
@Override
protected boolean parseHeaders() throws ActiveMQStompException
{
outer:
while (true)
{
byte b = workingBuffer[pos++];
switch (b)
{
//escaping
case ESC_CHAR:
{
if (isEscaping)
{
//this is a backslash
holder.append(b);
isEscaping = false;
}
else
{
//begin escaping
isEscaping = true;
}
break;
}
case HEADER_SEPARATOR:
{
if (inHeaderName)
{
headerName = holder.getString();
holder.reset();
inHeaderName = false;
headerValueWhitespace = true;
}
whiteSpaceOnly = false;
break;
}
case StompDecoder.LN:
{
if (isEscaping)
{
holder.append(StompDecoder.NEW_LINE);
isEscaping = false;
}
else
{
holder.append(b);
}
break;
}
case StompDecoder.c:
{
if (isEscaping)
{
holder.append(StompDecoder.HEADER_SEPARATOR);
isEscaping = false;
}
else
{
holder.append(b);
}
break;
}
case StompDecoder.NEW_LINE:
{
if (whiteSpaceOnly)
{
// Headers are terminated by a blank line
readingHeaders = false;
break outer;
}
String headerValue = holder.getString();
holder.reset();
headers.put(headerName, headerValue);
if (headerName.equals(CONTENT_LENGTH_HEADER_NAME))
{
contentLength = Integer.parseInt(headerValue);
}
if (headerName.equals(CONTENT_TYPE_HEADER_NAME))
{
contentType = headerValue;
}
whiteSpaceOnly = true;
inHeaderName = true;
headerValueWhitespace = false;
break;
}
default:
{
whiteSpaceOnly = false;
headerValueWhitespace = false;
holder.append(b);
}
}
if (pos == data)
{
// Run out of data
return false;
}
}
return true;
}
protected StompFrame parseBody() throws ActiveMQStompException
{
byte[] content = null;
if (contentLength != -1)
{
if (pos + contentLength + 1 > data)
{
// Need more bytes
}
else
{
content = new byte[contentLength];
System.arraycopy(workingBuffer, pos, content, 0, contentLength);
pos += contentLength;
//drain all the rest
if (bodyStart == -1)
{
bodyStart = pos;
}
while (pos < data)
{
if (workingBuffer[pos++] == 0)
{
break;
}
}
}
}
else
{
// Need to scan for terminating NUL
if (bodyStart == -1)
{
bodyStart = pos;
}
while (pos < data)
{
if (workingBuffer[pos++] == 0)
{
content = new byte[pos - bodyStart - 1];
System.arraycopy(workingBuffer, bodyStart, content, 0, content.length);
break;
}
}
}
if (content != null)
{
if (data > pos)
{
if (workingBuffer[pos] == NEW_LINE) pos++;
if (data > pos)
// More data still in the buffer from the next packet
System.arraycopy(workingBuffer, pos, workingBuffer, 0, data - pos);
}
data = data - pos;
// reset
StompFrame ret = new StompFrameV11(command, headers, content);
init();
return ret;
}
else
{
return null;
}
}
}
}
| |
/*
* @(#)GlacierMetada.java 0.7.0 2013 May 9
*
* Copyright (c) 2013 Amherst Robots, Inc.
* All rigts reserved.
*
* See LICENSE file accompanying this file.
*/
package com.vrane.metaGlacierSDK;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.CookiePolicy;
import java.net.CookieStore;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.ProtocolException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.prefs.Preferences;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
/**
* This abstract class is the base class with the detailed-level information
* about the metadata provider.
* It also handles low level http calls so that classes extending it does not
* need to do so.
*
* @author K Z Win
*/
public abstract class GlacierMetadata {
final static String API_KEY_VAULT_USER = "user";
final static String API_KEY_VAULT = "vault";
final static String API_KEY_COMPUTER_ID = "computer_id";
final static String API_ARCHIVE_COLUMN_DESCRIPTION
= "description";
final static String SYSTEM_USERNAME
= System.getProperty("user.name");
final static String BASE_URL = "https://ssl.vrane.com/bin/api/";
private static boolean sign_in_error = false;
private static long time_of_last_signin_attempt = Long.MIN_VALUE;
private static String session_pass;
private static long session_expired_time = Long.MIN_VALUE;
private static ObjectMapper mapper;
private static CookieStore jar;
private static boolean initialiased = false;
private static CookieManager _COOKIE_MANAGER;
private static MetadataProviderCredentials mpc;
private final static String API_KEY_REGION = "region";
private final static long SESSION_DURATION = 86400100; // in ms
private final static Preferences P
= Preferences.userNodeForPackage(GlacierMetadata.class);
private final static String API_COUNT = "api call counter";
private final static String API_COUNT_LAST_RESET
= "api call counter reset time";
private final static String API_LOGIN_PASSWORD = "pass";
private final static String API_LOGIN_USER = "email";
private final static String API_SESSION_KEY = "pass";
private final static String GLACIER_BASE_URL = BASE_URL + "aws/glacier/";
private final static String LOGIN_URL = BASE_URL + "login2";
private final static Logger LGR
= Logger.getLogger(GlacierMetadata.class.getName());
private String json_text = "";
private ArrayList<Object> arrayPayload;
private String region;
private Map hashPayload;
/**
* This constructor is used in a sub-object creation by calling
* super(region)
*
* @param _region is a string such as 'us-west-2', 'eu-west-1'
*/
protected GlacierMetadata(final String _region){
region = _region;
init();
}
/**
* Default constructor to be used by subclasses not wanting to specify
* region. In this sdk, it is used by <code>Search</code> class for
* searching archives in all available regions.
*/
protected GlacierMetadata(){
init();
}
private void init(){
if (initialiased) {
return;
}
_COOKIE_MANAGER = new CookieManager(jar,
CookiePolicy.ACCEPT_ORIGINAL_SERVER);
CookieHandler.setDefault(_COOKIE_MANAGER);
initialiased = true;
}
/**
* Gets the AWS region string.
*
* @return string such as 'us-west-2' and 'us-east-1' indicating AWS regions.
*/
public String getRegion(){
return region;
}
/**
* Sets the credentials required to sign-in the metadata server.
*
* @see <code>MedataProviderCredentials</code>
* @param m interface with account credential
*/
public static void setCredentials(final MetadataProviderCredentials m){
mpc = m;
}
/**
* Signs-in to the metadata account and obtain a session.
* On subsequent call a session cookie is sent (behind the scene by the
* cookie manager) until the session expiration time.
* The last time when the application set the credentials is also checked;
* so a new session cookie will be retrieved if the credentials change
* before the session expiration
*
* @return true on successful sign-in
* @throws APIException
*/
private boolean signin() throws SDKException, SignInException{
final long current_time = System.currentTimeMillis();
HashMap<String, String> signinData = new HashMap<>();
if (session_pass != null && session_expired_time > current_time
&& time_of_last_signin_attempt > mpc.lastMPCSet()) {
return true;
}
LGR.info("signing-in to the metadata account");
if (sign_in_error && time_of_last_signin_attempt > mpc.lastMPCSet()){
LGR.warning("error at last sign-in; not using these credentials");
return false;
}
time_of_last_signin_attempt = current_time;
signinData.put(API_LOGIN_USER, mpc.getMPCUser());
signinData.put(API_LOGIN_PASSWORD, mpc.getMPCPass());
String session_data = post(LOGIN_URL, signinData);
sign_in_error = true;
SignInException e2 = null;
try {
final ArrayList<String> session_array = (ArrayList<String>)
getJSONMapper().readValue(session_data, ArrayList.class);
session_pass = session_array.get(0);
sign_in_error = false;
session_expired_time = current_time + SESSION_DURATION;
return true;
} catch (JsonMappingException e) {
LGR.log(Level.SEVERE, null, e);
LGR.log(Level.INFO,
"Failed to sign in. Please check your credentials {0}",
session_data);
e2 = new SignInException("Failed to sign-in");
} catch (IOException ex) {
LGR.log(Level.SEVERE, null, ex);
}
if (e2 != null) {
throw e2;
}
return false;
}
/**
* Gets the flag whether last sign-in was successful.
*
* @return true if it fails to sign in.
*/
public static boolean failedToSignIn() {
return sign_in_error;
}
/**
* This sets up a global json related object once per JVM.
*/
private ObjectMapper getJSONMapper(){
if (mapper == null) {
mapper = new ObjectMapper(); // can reuse, share globally
}
return mapper;
}
/**
* Does the actual HTTP post operation.
* By this time, the actual post data to be sent over the wire is in the
* form of a <code>HashMap<String, String></code>
*
* @param urlString fully qualified url string constructed in subclasses.
* @param postData in <code>HashMap</code>
* @return raw string of the response
* @throws SDKException
*/
protected String post(final String urlString,
final HashMap<String, String> postData) throws SDKException
{
String response = "";
final String cont = jsonPostContent(postData);
final URL url;
final HttpURLConnection connection;
PrintWriter out;
final BufferedReader in;
String line;
try {
url = new URL(urlString);
} catch (MalformedURLException ex) {
LGR.log(Level.INFO, null, ex);
throw new SDKException("cannot parse url");
}
try {
connection = (HttpURLConnection) url.openConnection();
} catch (IOException ex) {
LGR.log(Level.INFO, null, ex);
throw new SDKException("cannot create http connection");
}
try {
connection.setRequestMethod("POST");
} catch (ProtocolException ex) {
LGR.log(Level.INFO, null, ex);
throw new SDKException("'POST' method not acceptable as a request");
}
connection.setRequestProperty("Content-Length",
String.valueOf(cont.length()));
connection.setRequestProperty("Content-type", "application/json");
connection.setDoOutput(true);
try {
out = new PrintWriter(connection.getOutputStream());
} catch (IOException ex) {
LGR.log(Level.INFO, null, ex);
LGR.severe("Retrying in 20 s");
try {
Thread.sleep(20000);
} catch (InterruptedException ex1) {
LGR.log(Level.SEVERE, null, ex1);
throw new SDKException("interrputed");
}
try {
out = new PrintWriter(connection.getOutputStream());
} catch (IOException ex1) {
LGR.log(Level.SEVERE, null, ex1);
throw new SDKException("error saving metadata on 2nd attempt");
}
}
out.println(cont);
out.close();
try {
in = new BufferedReader(new InputStreamReader
(connection.getInputStream()));
} catch (IOException ex) {
LGR.log(Level.SEVERE, null, ex);
throw new SDKException("IO error creating input stream");
}
try {
while ((line = in.readLine()) != null){
response += line;
}
} catch (IOException ex) {
LGR.log(Level.SEVERE, null, ex);
throw new SDKException("IO error while receiving data from API server");
}
try {
in.close();
} catch (IOException ex) {
LGR.log(Level.INFO, null, ex);
throw new SDKException("IO error while closing input stream");
}
LGR.log(Level.INFO, "content length is {0}",
connection.getContentLength());
return response;
}
private String jsonPostContent(final HashMap kv){
Writer json = new StringWriter();
try {
getJSONMapper().writeValue(json, kv);
} catch (IOException ex) {
LGR.log(Level.SEVERE, null, ex);
return null;
}
return json.toString();
}
/**
* Increases global counter of API calls by 1.
*
* This counter is saved to the "Preferences" database every time the method
* is called and hence it persists between multiple JVM sessions
*/
private static synchronized void incrementCounter(){
P.putInt(API_COUNT, 1 + P.getInt(API_COUNT, 0));
}
/**
* Gets the number of http requests to metadata server.
* This information is retrieved from the local java preferences DB.
*
* @return the total count of the API call since last reset
*/
public static int getAPIcounter(){
return P.getInt(API_COUNT, 0);
}
/**
* Gets the time when counter reset was last done.
* This information is retrieved from the local preferences DB.
*
* @return the epoch in millisecond when the API count was reset.
*/
public static long getAPICounterResetTime(){
long counter_reset_time
= P.getLong(API_COUNT_LAST_RESET, 0);
if (counter_reset_time == 0) {
final long now = System.currentTimeMillis();
counter_reset_time = now;
P.putLong(API_COUNT_LAST_RESET, now);
}
return counter_reset_time;
}
/**
* Resets the API call counter.
* This is the number of http requests to the server.
*/
public static synchronized void resetCounter(){
P.putLong(API_COUNT_LAST_RESET, System.currentTimeMillis());
P.remove(API_COUNT);
}
/**
* Gets the array form of the payload.
* Depending on the API call, Metadata provider can return either an array
* of objects or a hash object. This method to be used when the expected
* data is an array. See also <code>getHashPayload</code>
*
* @return parsed json data in <code>ArrayList</code>
*/
protected ArrayList getArrayPayload(){
return arrayPayload;
}
/**
* This private method returns an array of error strings.
* Used internally by this class only.
*
* @param rawResponse
* @return <code>ArrayList</code> of errors
* @throws JsonMappingException
* @throws JsonProcessingException
* @throws IOException
*/
private ArrayList<String> getError(final String rawResponse)
throws JsonMappingException, JsonProcessingException, IOException{
final Map<String, ArrayList<String>> map = getJSONMapper()
.readValue(rawResponse, Map.class);
final ArrayList<String> errors = map.get("error");
if (null == errors) {
hashPayload = map;
}
return errors;
}
/**
* Mediates between the Java data providers in this sdk and the
* json data returned by the API.
* Its major work is in parsing the json data from API and doing whatever
* appropriate for the data. It first looks for true or false answer from
* API in the form of 1 byte string "1" and "0". If it recognizes such
* strings, then it returns <code>true</code> or <code>false</code>. If they
* are not found, it tries to parse the json data as a hash. If this
* succeeds, it looks for
* "error" as a key in this hash. If this error is found then it returns
* false. If the "error" key is not found, then it returns "true". The hash
* data is available in <code>getHashPayload</code> method. If the hash
* parsing fails, it tries to parse an array and if this succeeds, then it
* returns true and the array data is available in
* <code>getArrayPayload</code> method.
*
* @param uri string representing the metadata endpoint
* @param d <code>HashMap</code> data
* @return true if successful
* @throws APIException
* @throws SDKException
*/
protected boolean performOperation(final String uri, final HashMap d)
throws SDKException, APIException, SignInException{
if (!signin()) {
return false;
}
d.put(API_SESSION_KEY, session_pass);
if (region != null){
d.put(API_KEY_REGION, region);
}
json_text = post(GLACIER_BASE_URL + uri, d);
incrementCounter();
switch (json_text) {
case "1":
LGR.info("received string '1' from metadata provider");
return true;
case "0":
LGR.info("received string '0' from metadata provider");
return false;
}
LGR.info("checking for error information");
LGR.fine(json_text);
boolean api_error = false;
try{
ArrayList<String> errors = getError(json_text);
if (hashPayload != null) {
return true;
}
for (final String s: errors){
LGR.warning(s);
}
api_error = true;
} catch (JsonMappingException ex) {
//LGR.log(Level.SEVERE, null, ex);
//Intentional no logging
} catch (Exception ex) {
LGR.log(Level.SEVERE, null, ex);
}
if (api_error) {
throw new APIException("Metadata internal error");
}
LGR.log(Level.INFO,
"parsing array data structure from string of size {0}",
json_text.length());
try {
arrayPayload = getJSONMapper()
.readValue(json_text, ArrayList.class);
return true;
} catch (JsonParseException ex) {
LGR.log(Level.SEVERE, null, ex);
} catch (JsonMappingException ex) {
LGR.log(Level.SEVERE, null, ex);
} catch (IOException ex) {
LGR.log(Level.SEVERE, null, ex);
} catch (Exception ex){
LGR.log(Level.SEVERE, null, ex);
}
return false;
}
/**
* Gets raw string response from the server.
*
* @return the response string
*/
protected String getResponseContent(){
return json_text;
}
/**
* Gets the hash form of the payload.
* Depending on the API call, Metadata provider can return either an array
* of objects or a hash object.
* Use this method when the expected data is a hash.
*
* @return parsed json data in <code>Map</code> structure
*/
protected Map getHashPayload(){
return hashPayload;
}
}
| |
/**
* Copyright 2011-2013 FoundationDB, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* The original from which this derives bore the following: */
/*
Derby - Class org.apache.derby.impl.sql.compile.CreateTableNode
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.foundationdb.sql.parser;
import com.foundationdb.sql.StandardException;
import java.util.Properties;
/**
* A CreateTableNode is the root of a QueryTree that represents a CREATE TABLE or DECLARE GLOBAL TEMPORARY TABLE
* statement.
*
*/
public class CreateTableNode extends DDLStatementNode
{
public static final int BASE_TABLE_TYPE = 0;
public static final int SYSTEM_TABLE_TYPE = 1;
public static final int VIEW_TYPE = 2;
public static final int GLOBAL_TEMPORARY_TABLE_TYPE = 3;
public static final int SYNONYM_TYPE = 4;
public static final int VTI_TYPE = 5;
public static final char ROW_LOCK_GRANULARITY = 'R';
public static final char TABLE_LOCK_GRANULARITY = 'T';
public static final char DEFAULT_LOCK_GRANULARITY = ROW_LOCK_GRANULARITY;
private char lockGranularity;
private boolean onCommitDeleteRows; //If true, on commit delete rows else on commit preserve rows of temporary table.
private boolean onRollbackDeleteRows; //If true, on rollback delete rows from temp table if it was logically modified in that UOW. true is the only supported value
private Properties properties;
private TableElementList tableElementList;
protected int tableType; //persistent table or global temporary table
private ResultColumnList resultColumns;
private CursorNode queryExpression;
private boolean withData;
private ExistenceCheck existenceCheck;
private StorageFormatNode storageFormat;
private String createAsQuery;
/**
* Initializer for a CreateTableNode for a base table
*
* @param newObjectName The name of the new object being created (ie base table)
* @param tableElementList The elements of the table: columns,
* constraints, etc.
* @param properties The optional list of properties associated with
* the table.
* @param lockGranularity The lock granularity.
* @param storageFormat The storage format to use.
*
* @exception StandardException Thrown on error
*/
public void init(Object newObjectName,
Object tableElementList,
Object properties,
Object lockGranularity,
Object existenceCheck,
Object storageFormat)
throws StandardException {
tableType = BASE_TABLE_TYPE;
this.lockGranularity = ((Character)lockGranularity).charValue();
implicitCreateSchema = true;
assert (this.lockGranularity == TABLE_LOCK_GRANULARITY ||
this.lockGranularity == ROW_LOCK_GRANULARITY);
initAndCheck(newObjectName);
this.tableElementList = (TableElementList)tableElementList;
this.properties = (Properties)properties;
this.existenceCheck = (ExistenceCheck)existenceCheck;
this.storageFormat = (StorageFormatNode)storageFormat;
}
/**
* Initializer for a CreateTableNode for a global temporary table
*
* @param newObjectName The name of the new object being declared (ie temporary table)
* @param tableElementList The elements of the table: columns,
* constraints, etc.
* @param onCommitDeleteRows If true, on commit delete rows else on commit preserve rows of temporary table.
* @param onRollbackDeleteRows If true, on rollback, delete rows from temp tables which were logically modified. true is the only supported value
*
* @exception StandardException Thrown on error
*/
public void init(Object newObjectName,
Object tableElementList,
Object onCommitDeleteRows,
Object onRollbackDeleteRows,
Object existenceCheck)
throws StandardException {
tableType = GLOBAL_TEMPORARY_TABLE_TYPE;
newObjectName = tempTableSchemaNameCheck(newObjectName);
this.onCommitDeleteRows = ((Boolean)onCommitDeleteRows).booleanValue();
this.onRollbackDeleteRows = ((Boolean)onRollbackDeleteRows).booleanValue();
initAndCheck(newObjectName);
this.tableElementList = (TableElementList)tableElementList;
this.existenceCheck = (ExistenceCheck)existenceCheck;
assert this.onRollbackDeleteRows;
}
/**
* Initializer for a CreateTableNode for a base table create from a query
*
* @param newObjectName The name of the new object being created
* (ie base table).
* @param resultColumns The optional column list.
* @param queryExpression The query expression for the table.
* @param createAsQuery select query inside sql query
* @param isCreateAs used only to distinguish this constructors
* @param isCreateAsAgain used only to distinguish this constructor
*/
public void init(Object newObjectName,
Object resultColumns,
Object queryExpression,
Object c,
Object createAsQuery,
Object isCreateAs,
Object isCreateAsAgain) throws StandardException {
this.createAsQuery = (String)createAsQuery;
tableType = BASE_TABLE_TYPE;
lockGranularity = DEFAULT_LOCK_GRANULARITY;
implicitCreateSchema = true;
initAndCheck(newObjectName);
this.resultColumns = (ResultColumnList)resultColumns;
this.queryExpression = (CursorNode)queryExpression;
this.existenceCheck = (ExistenceCheck) c;
}
/**
* Fill this node with a deep copy of the given node.
*/
public void copyFrom(QueryTreeNode node) throws StandardException {
super.copyFrom(node);
CreateTableNode other = (CreateTableNode)node;
this.lockGranularity = other.lockGranularity;
this.onCommitDeleteRows = other.onCommitDeleteRows;
this.onRollbackDeleteRows = other.onRollbackDeleteRows;
this.properties = other.properties; // TODO: Clone?
this.tableElementList = (TableElementList)
getNodeFactory().copyNode(other.tableElementList, getParserContext());
this.tableType = other.tableType;
this.resultColumns = (ResultColumnList)
getNodeFactory().copyNode(other.resultColumns, getParserContext());
this.queryExpression = (CursorNode)
getNodeFactory().copyNode(other.queryExpression, getParserContext());
this.withData = other.withData;
this.existenceCheck = other.existenceCheck;
this.storageFormat = (StorageFormatNode)getNodeFactory().copyNode(other.storageFormat,
getParserContext());
}
/**
* If no schema name specified for global temporary table, SESSION is the implicit schema.
* Otherwise, make sure the specified schema name for global temporary table is SESSION.
* @param objectName The name of the new object being declared (ie temporary table)
*/
private Object tempTableSchemaNameCheck(Object objectName) throws StandardException {
TableName tempTableName = (TableName)objectName;
if (tempTableName != null) {
if (tempTableName.getSchemaName() == null)
tempTableName.setSchemaName("SESSION"); //If no schema specified, SESSION is the implicit schema.
else if (!"SESSION".equals(tempTableName.getSchemaName()))
throw new StandardException("Must specify SESSION schema");
}
return(tempTableName);
}
/**
* Convert this object to a String. See comments in QueryTreeNode.java
* for how this should be done for tree printing.
*
* @return This object as a String
*/
public String toString() {
String tempString = "";
if (tableType == GLOBAL_TEMPORARY_TABLE_TYPE) {
tempString = tempString + "onCommitDeleteRows: " + "\n" + onCommitDeleteRows + "\n";
tempString = tempString + "onRollbackDeleteRows: " + "\n" + onRollbackDeleteRows + "\n";
}
else
tempString = tempString +
(properties != null ? "properties: " + "\n" + properties + "\n" : "") +
"withData: " + withData + "\n" +
"lockGranularity: " + lockGranularity + "\n";
tempString += "existenceCheck: " + existenceCheck + "\n";
return super.toString() + tempString;
}
public TableElementList getTableElementList() {
return tableElementList;
}
public CursorNode getQueryExpression() {
return queryExpression;
}
public String getCreateAsQuery() { return createAsQuery;}
public boolean isWithData() {
return withData;
}
public void markWithData() {
withData = true;
}
public ExistenceCheck getExistenceCheck()
{
return existenceCheck;
}
public StorageFormatNode getStorageFormat()
{
return storageFormat;
}
public ResultColumnList getResultColumns() { return resultColumns; }
/**
* Prints the sub-nodes of this object. See QueryTreeNode.java for
* how tree printing is supposed to work.
* @param depth The depth to indent the sub-nodes
*/
public void printSubNodes(int depth) {
if (tableElementList != null) {
printLabel(depth, "tableElementList: ");
tableElementList.treePrint(depth + 1);
}
if (queryExpression != null) {
printLabel(depth, "queryExpression: ");
queryExpression.treePrint(depth + 1);
}
if (storageFormat != null) {
printLabel(depth, "storageFormat: ");
storageFormat.treePrint(depth + 1);
}
if(resultColumns != null) {
printLabel(depth, "resultColumns: ");
resultColumns.treePrint(depth + 1);
}
}
public String statementToString() {
if (tableType == GLOBAL_TEMPORARY_TABLE_TYPE)
return "DECLARE GLOBAL TEMPORARY TABLE";
else
return "CREATE TABLE";
}
/**
* Accept the visitor for all visitable children of this node.
*
* @param v the visitor
*
* @exception StandardException on error
*/
void acceptChildren(Visitor v) throws StandardException {
super.acceptChildren(v);
if (tableElementList != null) {
tableElementList.accept(v);
}
if (queryExpression != null) {
queryExpression.accept(v);
}
if (storageFormat != null) {
storageFormat = (StorageFormatNode)storageFormat.accept(v);
}
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.analytics.model.pnl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.Clock;
import org.threeten.bp.LocalDate;
import org.threeten.bp.Period;
import org.threeten.bp.ZonedDateTime;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.analytics.financial.schedule.HolidayDateRemovalFunction;
import com.opengamma.analytics.financial.schedule.Schedule;
import com.opengamma.analytics.financial.schedule.ScheduleCalculatorFactory;
import com.opengamma.analytics.financial.schedule.TimeSeriesSamplingFunction;
import com.opengamma.analytics.financial.schedule.TimeSeriesSamplingFunctionFactory;
import com.opengamma.analytics.financial.timeseries.util.TimeSeriesDifferenceOperator;
import com.opengamma.core.config.ConfigSource;
import com.opengamma.core.historicaltimeseries.HistoricalTimeSeries;
import com.opengamma.core.position.Position;
import com.opengamma.core.security.Security;
import com.opengamma.core.value.MarketDataRequirementNames;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.AbstractFunction;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValuePropertyNames;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueRequirementNames;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaExecutionContext;
import com.opengamma.financial.analytics.DoubleLabelledMatrix1D;
import com.opengamma.financial.analytics.fixedincome.InterestRateInstrumentType;
import com.opengamma.financial.analytics.ircurve.FixedIncomeStripWithSecurity;
import com.opengamma.financial.analytics.ircurve.InterpolatedYieldCurveSpecificationWithSecurities;
import com.opengamma.financial.analytics.ircurve.StripInstrumentType;
import com.opengamma.financial.analytics.ircurve.calcconfig.ConfigDBCurveCalculationConfigSource;
import com.opengamma.financial.analytics.ircurve.calcconfig.MultiCurveCalculationConfig;
import com.opengamma.financial.analytics.model.curve.interestrate.FXImpliedYieldCurveFunction;
import com.opengamma.financial.analytics.timeseries.DateConstraint;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesBundle;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils;
import com.opengamma.financial.convention.calendar.Calendar;
import com.opengamma.financial.convention.calendar.MondayToFridayCalendar;
import com.opengamma.financial.currency.CurrencyPairs;
import com.opengamma.financial.security.FinancialSecurity;
import com.opengamma.financial.security.FinancialSecurityUtils;
import com.opengamma.financial.security.future.InterestRateFutureSecurity;
import com.opengamma.financial.security.option.IRFutureOptionSecurity;
import com.opengamma.financial.security.swap.SwapSecurity;
import com.opengamma.financial.sensitivities.SecurityEntryData;
import com.opengamma.id.ExternalId;
import com.opengamma.id.UniqueId;
import com.opengamma.timeseries.DoubleTimeSeries;
import com.opengamma.timeseries.date.DateDoubleTimeSeries;
import com.opengamma.util.money.Currency;
import com.opengamma.util.money.UnorderedCurrencyPair;
/**
*
*/
public class YieldCurveNodePnLFunction extends AbstractFunction.NonCompiledInvoker {
/** The logger */
private static final Logger s_logger = LoggerFactory.getLogger(YieldCurveNodePnLFunction.class);
// Please see http://jira.opengamma.com/browse/PLAT-2330 for information about this constant.
/**
* Property name of the contribution to the P&L (e.g. yield curve, FX rate)
*
* @deprecated Use {@link ValuePropertyNames#PROPERTY_PNL_CONTRIBUTIONS} instead
*/
@Deprecated
public static final String PROPERTY_PNL_CONTRIBUTIONS = ValuePropertyNames.PROPERTY_PNL_CONTRIBUTIONS;
/** Removes holidays from schedule */
private static final HolidayDateRemovalFunction HOLIDAY_REMOVER = HolidayDateRemovalFunction.getInstance();
/** A calendar containing only weekends */
private static final Calendar WEEKEND_CALENDAR = new MondayToFridayCalendar("Weekend");
/** Calculates the first difference of a time series */
private static final TimeSeriesDifferenceOperator DIFFERENCE = new TimeSeriesDifferenceOperator();
private ConfigDBCurveCalculationConfigSource _curveCalculationConfigSource;
@Override
public void init(final FunctionCompilationContext context) {
_curveCalculationConfigSource = ConfigDBCurveCalculationConfigSource.init(context, this);
}
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) {
final Position position = target.getPosition();
final ConfigSource configSource = OpenGammaExecutionContext.getConfigSource(executionContext);
final Clock snapshotClock = executionContext.getValuationClock();
final LocalDate now = ZonedDateTime.now(snapshotClock).toLocalDate();
final Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
final String currencyString = currency.getCode();
final ValueRequirement desiredValue = desiredValues.iterator().next();
final ValueProperties constraints = desiredValue.getConstraints();
final String desiredCurrency;
final Set<String> currencies = desiredValue.getConstraints().getValues(ValuePropertyNames.CURRENCY);
if (currencies != null && !currencies.isEmpty()) {
desiredCurrency = desiredValue.getConstraint(ValuePropertyNames.CURRENCY);
} else {
desiredCurrency = currencyString;
}
final String curveCalculationConfigName = desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG);
final Set<String> yieldCurveNames = constraints.getValues(ValuePropertyNames.CURVE);
final Period samplingPeriod = getSamplingPeriod(desiredValue.getConstraint(ValuePropertyNames.SAMPLING_PERIOD));
final LocalDate startDate = now.minus(samplingPeriod);
final Schedule scheduleCalculator = getScheduleCalculator(desiredValue.getConstraint(ValuePropertyNames.SCHEDULE_CALCULATOR));
final TimeSeriesSamplingFunction samplingFunction = getSamplingFunction(desiredValue.getConstraint(ValuePropertyNames.SAMPLING_FUNCTION));
final LocalDate[] schedule = HOLIDAY_REMOVER.getStrippedSchedule(scheduleCalculator.getSchedule(startDate, now, true, false), WEEKEND_CALENDAR); //REVIEW emcleod should "fromEnd" be hard-coded?
DoubleTimeSeries<?> result = null;
final MultiCurveCalculationConfig curveCalculationConfig = _curveCalculationConfigSource.getConfig(curveCalculationConfigName);
DoubleTimeSeries<?> fxSeries = null;
boolean isInverse = true;
if (!desiredCurrency.equals(currencyString)) {
if (inputs.getValue(ValueRequirementNames.HISTORICAL_FX_TIME_SERIES) != null) {
final Map<UnorderedCurrencyPair, DoubleTimeSeries<?>> allFXSeries = (Map<UnorderedCurrencyPair, DoubleTimeSeries<?>>) inputs
.getValue(ValueRequirementNames.HISTORICAL_FX_TIME_SERIES);
final CurrencyPairs currencyPairs = OpenGammaExecutionContext.getCurrencyPairsSource(executionContext).getCurrencyPairs(CurrencyPairs.DEFAULT_CURRENCY_PAIRS);
if (desiredCurrency.equals(currencyPairs.getCurrencyPair(Currency.of(desiredCurrency), currency).getCounter().getCode())) {
isInverse = false;
}
if (allFXSeries.size() != 1) {
throw new OpenGammaRuntimeException("Have more than one FX series; should not happen");
}
final Map.Entry<UnorderedCurrencyPair, DoubleTimeSeries<?>> entry = Iterables.getOnlyElement(allFXSeries.entrySet());
if (!UnorderedCurrencyPair.of(Currency.of(desiredCurrency), currency).equals(entry.getKey())) {
throw new OpenGammaRuntimeException("Could not get FX series for currency pair " + desiredCurrency + ", " + currencyString);
}
fxSeries = entry.getValue();
} else {
throw new OpenGammaRuntimeException("Could not get FX series for currency pair " + desiredCurrency + ", " + currencyString);
}
}
for (final String yieldCurveName : yieldCurveNames) {
final ValueRequirement ycnsRequirement = getYCNSRequirement(currencyString, curveCalculationConfigName, yieldCurveName, target, constraints);
final Object ycnsObject = inputs.getValue(ycnsRequirement);
if (ycnsObject == null) {
throw new OpenGammaRuntimeException("Could not get yield curve node sensitivities; " + ycnsRequirement);
}
final DoubleLabelledMatrix1D ycns = (DoubleLabelledMatrix1D) ycnsObject;
final ValueRequirement ychtsRequirement = getYCHTSRequirement(currency, yieldCurveName, samplingPeriod.toString());
final Object ychtsObject = inputs.getValue(ychtsRequirement);
if (ychtsObject == null) {
throw new OpenGammaRuntimeException("Could not get yield curve historical time series; " + ychtsRequirement);
}
final HistoricalTimeSeriesBundle ychts = (HistoricalTimeSeriesBundle) ychtsObject;
final DoubleTimeSeries<?> pnLSeries;
if (curveCalculationConfig.getCalculationMethod().equals(FXImpliedYieldCurveFunction.FX_IMPLIED)) {
pnLSeries = getPnLSeries(ycns, ychts, schedule, samplingFunction);
} else {
final ValueRequirement curveSpecRequirement = getCurveSpecRequirement(currency, yieldCurveName);
final Object curveSpecObject = inputs.getValue(curveSpecRequirement);
if (curveSpecObject == null) {
throw new OpenGammaRuntimeException("Could not get curve specification; " + curveSpecRequirement);
}
final InterpolatedYieldCurveSpecificationWithSecurities curveSpec = (InterpolatedYieldCurveSpecificationWithSecurities) curveSpecObject;
pnLSeries = getPnLSeries(curveSpec, ycns, ychts, schedule, samplingFunction, fxSeries, isInverse);
}
if (result == null) {
result = pnLSeries;
} else {
result = result.add(pnLSeries);
}
}
if (result == null) {
throw new OpenGammaRuntimeException("Could not get any values for security " + position.getSecurity());
}
result = result.multiply(position.getQuantity().doubleValue());
final ValueSpecification resultSpec = new ValueSpecification(ValueRequirementNames.PNL_SERIES, target.toSpecification(), desiredValue.getConstraints());
return Sets.newHashSet(new ComputedValue(resultSpec, result));
}
@Override
public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) {
final Security security = target.getPosition().getSecurity();
if (security instanceof InterestRateFutureSecurity || security instanceof IRFutureOptionSecurity) {
return false;
}
if (security.getSecurityType().equals(SecurityEntryData.EXTERNAL_SENSITIVITIES_SECURITY_TYPE)) {
return true;
}
if (!(security instanceof FinancialSecurity)) {
return false;
}
if (security instanceof SwapSecurity) {
try {
final InterestRateInstrumentType type = InterestRateInstrumentType.getInstrumentTypeFromSecurity((SwapSecurity) security);
return type == InterestRateInstrumentType.SWAP_FIXED_IBOR || type == InterestRateInstrumentType.SWAP_FIXED_IBOR_WITH_SPREAD || type == InterestRateInstrumentType.SWAP_IBOR_IBOR ||
type == InterestRateInstrumentType.SWAP_FIXED_OIS || type == InterestRateInstrumentType.ZERO_COUPON_INFLATION_SWAP;
} catch (final OpenGammaRuntimeException ogre) {
return false;
}
}
return InterestRateInstrumentType.isFixedIncomeInstrumentType((FinancialSecurity) security);
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) {
final Position position = target.getPosition();
final ValueProperties constraints = desiredValue.getConstraints();
final Set<String> curveCalculationConfigNames = constraints.getValues(ValuePropertyNames.CURVE_CALCULATION_CONFIG);
if (curveCalculationConfigNames == null || curveCalculationConfigNames.size() != 1) {
return null;
}
final String curveCalculationConfigName = curveCalculationConfigNames.iterator().next();
final MultiCurveCalculationConfig curveCalculationConfig = _curveCalculationConfigSource.getConfig(curveCalculationConfigName);
if (curveCalculationConfig == null) {
s_logger.error("Could not find curve calculation configuration named " + curveCalculationConfigName);
return null;
}
final Set<String> periodNames = constraints.getValues(ValuePropertyNames.SAMPLING_PERIOD);
if (periodNames == null || periodNames.size() != 1) {
return null;
}
final String samplingPeriod = periodNames.iterator().next();
final Set<String> scheduleNames = constraints.getValues(ValuePropertyNames.SCHEDULE_CALCULATOR);
if (scheduleNames == null || scheduleNames.size() != 1) {
return null;
}
final Set<String> samplingFunctionNames = constraints.getValues(ValuePropertyNames.SAMPLING_FUNCTION);
if (samplingFunctionNames == null || samplingFunctionNames.size() != 1) {
return null;
}
final String[] yieldCurveNames = curveCalculationConfig.getYieldCurveNames();
if (yieldCurveNames.length == 0) {
s_logger.error("Curve calculation configuration called {} did not contain any yield curve names", curveCalculationConfigName);
return null;
}
final Set<ValueRequirement> requirements = new HashSet<>();
final Currency currency = FinancialSecurityUtils.getCurrency(position.getSecurity());
final String currencyString = currency.getCode();
for (final String yieldCurveName : yieldCurveNames) {
requirements.add(getYCNSRequirement(currencyString, curveCalculationConfigName, yieldCurveName, target, constraints));
requirements.add(getYCHTSRequirement(currency, yieldCurveName, samplingPeriod));
if (!curveCalculationConfig.getCalculationMethod().equals(FXImpliedYieldCurveFunction.FX_IMPLIED)) {
requirements.add(getCurveSpecRequirement(currency, yieldCurveName));
}
}
final Set<String> resultCurrencies = constraints.getValues(ValuePropertyNames.CURRENCY);
if (resultCurrencies != null && resultCurrencies.size() == 1) {
final ValueRequirement ccyConversionTSRequirement = getCurrencyConversionTSRequirement(position, currencyString, resultCurrencies);
if (ccyConversionTSRequirement != null) {
requirements.add(ccyConversionTSRequirement);
}
}
return requirements;
}
protected ValueRequirement getCurrencyConversionTSRequirement(final Position position, final String currencyString, final Set<String> resultCurrencies) {
final String resultCurrency = Iterables.getOnlyElement(resultCurrencies);
if (!resultCurrency.equals(currencyString)) {
final ValueProperties.Builder properties = ValueProperties.builder();
properties.with(ValuePropertyNames.CURRENCY, resultCurrencies);
final ComputationTargetSpecification targetSpec = ComputationTargetSpecification.of(position.getSecurity());
return new ValueRequirement(ValueRequirementNames.HISTORICAL_FX_TIME_SERIES, targetSpec, properties.get());
}
return null;
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) {
final ValueProperties properties = createValueProperties().withAny(ValuePropertyNames.CURRENCY).withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG).withAny(ValuePropertyNames.CURVE)
.withAny(ValuePropertyNames.SAMPLING_PERIOD).withAny(ValuePropertyNames.SCHEDULE_CALCULATOR).withAny(ValuePropertyNames.SAMPLING_FUNCTION)
.with(ValuePropertyNames.PROPERTY_PNL_CONTRIBUTIONS, ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES).get();
return Sets.newHashSet(new ValueSpecification(ValueRequirementNames.PNL_SERIES, target.toSpecification(), properties));
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) {
final Set<String> curveNames = new HashSet<>();
for (final Map.Entry<ValueSpecification, ValueRequirement> entry : inputs.entrySet()) {
if (entry.getKey().getValueName().equals(ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES)) {
curveNames.add(entry.getValue().getConstraint(ValuePropertyNames.CURVE));
}
}
if (curveNames.isEmpty()) {
s_logger.error("Curves names not specified in any of " + inputs);
return null;
}
final ValueProperties properties = createValueProperties().withAny(ValuePropertyNames.CURRENCY).withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG)
.with(ValuePropertyNames.CURVE, curveNames).withAny(ValuePropertyNames.SAMPLING_PERIOD).withAny(ValuePropertyNames.SCHEDULE_CALCULATOR).withAny(ValuePropertyNames.SAMPLING_FUNCTION)
.with(ValuePropertyNames.PROPERTY_PNL_CONTRIBUTIONS, ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES).get();
return Sets.newHashSet(new ValueSpecification(ValueRequirementNames.PNL_SERIES, target.toSpecification(), properties));
}
@Override
public ComputationTargetType getTargetType() {
return ComputationTargetType.POSITION;
}
@Override
public boolean canHandleMissingRequirements() {
return false;
}
/**
* Creates the result properties for the P&L series
*
* @param desiredValue The desired value
* @param currency The currency
* @param curveNames The curve names
* @param curveCalculationConfig The curve calculation configuration
* @return The result properties
*/
protected ValueProperties getResultProperties(final ValueRequirement desiredValue, final String currency, final String[] curveNames, final String curveCalculationConfig) {
return createValueProperties().with(ValuePropertyNames.CURRENCY, currency)
.with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, desiredValue.getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG))
.with(ValuePropertyNames.CURVE, desiredValue.getConstraints().getValues(ValuePropertyNames.CURVE))
.with(ValuePropertyNames.SAMPLING_PERIOD, desiredValue.getConstraint(ValuePropertyNames.SAMPLING_PERIOD))
.with(ValuePropertyNames.SCHEDULE_CALCULATOR, desiredValue.getConstraint(ValuePropertyNames.SCHEDULE_CALCULATOR))
.with(ValuePropertyNames.SAMPLING_FUNCTION, desiredValue.getConstraint(ValuePropertyNames.SAMPLING_FUNCTION))
.with(ValuePropertyNames.PROPERTY_PNL_CONTRIBUTIONS, ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES).get();
}
private Period getSamplingPeriod(final String samplingPeriodName) {
return Period.parse(samplingPeriodName);
}
private Schedule getScheduleCalculator(final String scheduleCalculatorName) {
return ScheduleCalculatorFactory.getScheduleCalculator(scheduleCalculatorName);
}
private TimeSeriesSamplingFunction getSamplingFunction(final String samplingFunctionName) {
return TimeSeriesSamplingFunctionFactory.getFunction(samplingFunctionName);
}
private DoubleTimeSeries<?> getPnLSeries(final InterpolatedYieldCurveSpecificationWithSecurities spec, final DoubleLabelledMatrix1D curveSensitivities,
final HistoricalTimeSeriesBundle timeSeriesBundle, final LocalDate[] schedule, final TimeSeriesSamplingFunction samplingFunction, final DoubleTimeSeries<?> fxSeries,
final boolean isInverse) {
DoubleTimeSeries<?> pnlSeries = null;
final int n = curveSensitivities.size();
final double[] values = curveSensitivities.getValues();
final SortedSet<FixedIncomeStripWithSecurity> strips = (SortedSet<FixedIncomeStripWithSecurity>) spec.getStrips();
final FixedIncomeStripWithSecurity[] stripsArray = strips.toArray(new FixedIncomeStripWithSecurity[] {});
final List<StripInstrumentType> stripList = new ArrayList<>(n);
int stripCount = 0;
for (final FixedIncomeStripWithSecurity strip : strips) {
final int index = stripCount++; //labelsList.indexOf(strip.getSecurityIdentifier());
if (index < 0) {
throw new OpenGammaRuntimeException("Could not get index for " + strip);
}
stripList.add(index, strip.getInstrumentType());
}
for (int i = 0; i < n; i++) {
final ExternalId id = stripsArray[i].getSecurityIdentifier();
double sensitivity = values[i];
if (stripList.get(i) == StripInstrumentType.FUTURE) {
// TODO Temporary fix as sensitivity is to rate, but historical time series is to price (= 1 - rate)
sensitivity *= -1;
}
final HistoricalTimeSeries dbNodeTimeSeries = timeSeriesBundle.get(MarketDataRequirementNames.MARKET_VALUE, id);
if (dbNodeTimeSeries == null) {
throw new OpenGammaRuntimeException("Could not get historical time series for " + id);
}
if (dbNodeTimeSeries.getTimeSeries().isEmpty()) {
throw new OpenGammaRuntimeException("Time series " + id + " is empty");
}
DateDoubleTimeSeries<?> nodeTimeSeries = samplingFunction.getSampledTimeSeries(dbNodeTimeSeries.getTimeSeries(), schedule);
if (fxSeries != null) {
if (isInverse) {
nodeTimeSeries = nodeTimeSeries.divide(fxSeries);
} else {
nodeTimeSeries = nodeTimeSeries.multiply(fxSeries);
}
}
nodeTimeSeries = DIFFERENCE.evaluate(nodeTimeSeries);
if (pnlSeries == null) {
pnlSeries = nodeTimeSeries.multiply(sensitivity);
} else {
pnlSeries = pnlSeries.add(nodeTimeSeries.multiply(sensitivity));
}
}
return pnlSeries;
}
private DoubleTimeSeries<?> getPnLSeries(final DoubleLabelledMatrix1D curveSensitivities, final HistoricalTimeSeriesBundle timeSeriesBundle, final LocalDate[] schedule,
final TimeSeriesSamplingFunction samplingFunction) {
DoubleTimeSeries<?> pnlSeries = null;
final Object[] labels = curveSensitivities.getLabels();
final double[] values = curveSensitivities.getValues();
for (int i = 0; i < labels.length; i++) {
final ExternalId id = (ExternalId) labels[i];
final HistoricalTimeSeries dbNodeTimeSeries = timeSeriesBundle.get(MarketDataRequirementNames.MARKET_VALUE, id);
if (dbNodeTimeSeries == null) {
throw new OpenGammaRuntimeException("Could not identifier / price series pair for " + id);
}
DateDoubleTimeSeries<?> nodeTimeSeries = samplingFunction.getSampledTimeSeries(dbNodeTimeSeries.getTimeSeries(), schedule);
nodeTimeSeries = DIFFERENCE.evaluate(nodeTimeSeries);
if (pnlSeries == null) {
pnlSeries = nodeTimeSeries.multiply(values[i]);
} else {
pnlSeries = pnlSeries.add(nodeTimeSeries.multiply(values[i]));
}
}
return pnlSeries;
}
/**
* Given a yield curve name, returns the yield curve node sensitivities requirement for that name
*
* @param currencyString The currency
* @param curveCalculationConfigName The curve calculation configuration
* @param yieldCurveName The yield curve name
* @param target The target
* @param desiredValueProperties The properties of the desired value
* @return The yield curve node sensitivities requirement for the yield curve name
*/
protected ValueRequirement getYCNSRequirement(final String currencyString, final String curveCalculationConfigName, final String yieldCurveName, final ComputationTarget target,
final ValueProperties desiredValueProperties) {
final UniqueId uniqueId = target.getPosition().getSecurity().getUniqueId();
final ValueProperties properties = ValueProperties.builder().with(ValuePropertyNames.CURRENCY, currencyString).with(ValuePropertyNames.CURVE_CURRENCY, currencyString)
.with(ValuePropertyNames.CURVE, yieldCurveName).with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfigName).get();
return new ValueRequirement(ValueRequirementNames.YIELD_CURVE_NODE_SENSITIVITIES, ComputationTargetType.SECURITY, uniqueId, properties);
}
private ValueRequirement getYCHTSRequirement(final Currency currency, final String yieldCurveName, final String samplingPeriod) {
return HistoricalTimeSeriesFunctionUtils.createYCHTSRequirement(currency, yieldCurveName, MarketDataRequirementNames.MARKET_VALUE, null,
DateConstraint.VALUATION_TIME.minus(samplingPeriod), true, DateConstraint.VALUATION_TIME, true);
}
private ValueRequirement getCurveSpecRequirement(final Currency currency, final String yieldCurveName) {
final ValueProperties properties = ValueProperties.builder().with(ValuePropertyNames.CURVE, yieldCurveName).get();
return new ValueRequirement(ValueRequirementNames.YIELD_CURVE_SPEC, ComputationTargetSpecification.of(currency), properties);
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kms.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kms-2014-11-01/RetireGrant" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class RetireGrantRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Token that identifies the grant to be retired.
* </p>
*/
private String grantToken;
/**
* <p>
* The Amazon Resource Name of the CMK associated with the grant. Example:
* </p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
* </ul>
*/
private String keyId;
/**
* <p>
* Unique identifier of the grant to retire. The grant ID is returned in the response to a <code>CreateGrant</code>
* operation.
* </p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
* </ul>
*/
private String grantId;
/**
* <p>
* Token that identifies the grant to be retired.
* </p>
*
* @param grantToken
* Token that identifies the grant to be retired.
*/
public void setGrantToken(String grantToken) {
this.grantToken = grantToken;
}
/**
* <p>
* Token that identifies the grant to be retired.
* </p>
*
* @return Token that identifies the grant to be retired.
*/
public String getGrantToken() {
return this.grantToken;
}
/**
* <p>
* Token that identifies the grant to be retired.
* </p>
*
* @param grantToken
* Token that identifies the grant to be retired.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RetireGrantRequest withGrantToken(String grantToken) {
setGrantToken(grantToken);
return this;
}
/**
* <p>
* The Amazon Resource Name of the CMK associated with the grant. Example:
* </p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
* </ul>
*
* @param keyId
* The Amazon Resource Name of the CMK associated with the grant. Example:</p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
*/
public void setKeyId(String keyId) {
this.keyId = keyId;
}
/**
* <p>
* The Amazon Resource Name of the CMK associated with the grant. Example:
* </p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
* </ul>
*
* @return The Amazon Resource Name of the CMK associated with the grant. Example:</p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
*/
public String getKeyId() {
return this.keyId;
}
/**
* <p>
* The Amazon Resource Name of the CMK associated with the grant. Example:
* </p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
* </ul>
*
* @param keyId
* The Amazon Resource Name of the CMK associated with the grant. Example:</p>
* <ul>
* <li>
* <p>
* arn:aws:kms:us-east-2:444455556666:key/1234abcd-12ab-34cd-56ef-1234567890ab
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RetireGrantRequest withKeyId(String keyId) {
setKeyId(keyId);
return this;
}
/**
* <p>
* Unique identifier of the grant to retire. The grant ID is returned in the response to a <code>CreateGrant</code>
* operation.
* </p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
* </ul>
*
* @param grantId
* Unique identifier of the grant to retire. The grant ID is returned in the response to a
* <code>CreateGrant</code> operation.</p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
*/
public void setGrantId(String grantId) {
this.grantId = grantId;
}
/**
* <p>
* Unique identifier of the grant to retire. The grant ID is returned in the response to a <code>CreateGrant</code>
* operation.
* </p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
* </ul>
*
* @return Unique identifier of the grant to retire. The grant ID is returned in the response to a
* <code>CreateGrant</code> operation.</p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
*/
public String getGrantId() {
return this.grantId;
}
/**
* <p>
* Unique identifier of the grant to retire. The grant ID is returned in the response to a <code>CreateGrant</code>
* operation.
* </p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
* </ul>
*
* @param grantId
* Unique identifier of the grant to retire. The grant ID is returned in the response to a
* <code>CreateGrant</code> operation.</p>
* <ul>
* <li>
* <p>
* Grant ID Example - 0123456789012345678901234567890123456789012345678901234567890123
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public RetireGrantRequest withGrantId(String grantId) {
setGrantId(grantId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGrantToken() != null)
sb.append("GrantToken: ").append(getGrantToken()).append(",");
if (getKeyId() != null)
sb.append("KeyId: ").append(getKeyId()).append(",");
if (getGrantId() != null)
sb.append("GrantId: ").append(getGrantId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof RetireGrantRequest == false)
return false;
RetireGrantRequest other = (RetireGrantRequest) obj;
if (other.getGrantToken() == null ^ this.getGrantToken() == null)
return false;
if (other.getGrantToken() != null && other.getGrantToken().equals(this.getGrantToken()) == false)
return false;
if (other.getKeyId() == null ^ this.getKeyId() == null)
return false;
if (other.getKeyId() != null && other.getKeyId().equals(this.getKeyId()) == false)
return false;
if (other.getGrantId() == null ^ this.getGrantId() == null)
return false;
if (other.getGrantId() != null && other.getGrantId().equals(this.getGrantId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGrantToken() == null) ? 0 : getGrantToken().hashCode());
hashCode = prime * hashCode + ((getKeyId() == null) ? 0 : getKeyId().hashCode());
hashCode = prime * hashCode + ((getGrantId() == null) ? 0 : getGrantId().hashCode());
return hashCode;
}
@Override
public RetireGrantRequest clone() {
return (RetireGrantRequest) super.clone();
}
}
| |
package io.sniffy.nio;
import io.sniffy.Sniffy;
import io.sniffy.Spy;
import io.sniffy.socket.BaseSocketTest;
import io.sniffy.socket.SnifferSocketImplFactory;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.Pipe;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicReference;
import static io.sniffy.Threads.*;
import static org.junit.Assert.*;
public class NioSniffySocketTest extends BaseSocketTest {
@Test
public void testSelectionKeys() throws Exception {
SnifferSocketImplFactory.uninstall();
SnifferSocketImplFactory.install();
SniffySelectorProviderModule.initialize();
SniffySelectorProvider.uninstall();
SniffySelectorProvider.install();
try {
ByteBuffer responseBuffer = ByteBuffer.allocate(BaseSocketTest.RESPONSE.length);
Selector selector = Selector.open();
SocketChannel socketChannel = SocketChannel.open();
socketChannel.configureBlocking(false);
socketChannel.connect(new InetSocketAddress(BaseSocketTest.localhost, echoServerRule.getBoundPort()));
socketChannel.register(selector, SelectionKey.OP_CONNECT);
selectorLoop:
while (true) {
// Wait for an event one of the registered channels
selector.select();
// Iterate over the set of keys for which events are available
Iterator<SelectionKey> selectedKeys = selector.selectedKeys().iterator();
while (selectedKeys.hasNext()) {
SelectionKey key = selectedKeys.next();
selectedKeys.remove();
if (!key.isValid()) {
continue;
}
// Check what event is available and deal with it
if (key.isConnectable()) {
SocketChannel channel = (SocketChannel) key.channel();
// Finish the connection. If the connection operation failed
// this will raise an IOException.
try {
channel.finishConnect();
} catch (IOException e) {
// Cancel the channel's registration with our selector
e.printStackTrace();
key.cancel();
break selectorLoop;
}
// Register an interest in writing on this channel
//key.interestOps(SelectionKey.OP_WRITE);
key.interestOps(0);
channel.register(selector, SelectionKey.OP_WRITE);
} else if (key.isReadable()) {
SocketChannel channel = (SocketChannel) key.channel();
// Attempt to read off the channel
int numRead;
try {
numRead = channel.read(responseBuffer);
} catch (IOException e) {
// The remote forcibly closed the connection, cancel
// the selection key and close the channel.
key.cancel();
channel.close();
break selectorLoop;
}
if (!responseBuffer.hasRemaining()) {
// Entire response consumed
key.channel().close();
key.cancel();
break selectorLoop;
}
if (numRead == -1) {
// Remote entity shut the socket down cleanly. Do the
// same from our end and cancel the channel.
key.channel().close();
key.cancel();
break selectorLoop;
}
} else if (key.isWritable()) {
SocketChannel channel = (SocketChannel) key.channel();
ByteBuffer requestBuffer = ByteBuffer.wrap(BaseSocketTest.REQUEST);
while (requestBuffer.remaining() > 0) {
channel.write(requestBuffer);
}
key.interestOps(0);
channel.register(selector, SelectionKey.OP_READ);
}
}
}
Assert.assertArrayEquals(BaseSocketTest.RESPONSE, responseBuffer.array());
} finally {
SnifferSocketImplFactory.uninstall();
SniffySelectorProvider.uninstall();
}
}
@Test
public void testInstall() throws Exception {
SnifferSocketImplFactory.uninstall();
SnifferSocketImplFactory.install();
SniffySelectorProviderModule.initialize();
SniffySelectorProvider.uninstall();
SniffySelectorProvider.install();
try {
try (Spy<?> s = Sniffy.spy()) {
performSocketOperation();
Thread thread = new Thread(this::performSocketOperation);
thread.start();
thread.join();
// Current thread socket operations
assertEquals(1, (long) s.getSocketOperations(CURRENT, true).entrySet().size());
s.getSocketOperations(CURRENT, true).values().stream().findAny().ifPresent((socketStats) -> {
Assert.assertEquals(BaseSocketTest.REQUEST.length, socketStats.bytesUp.intValue());
Assert.assertEquals(BaseSocketTest.RESPONSE.length, socketStats.bytesDown.intValue());
});
// Other threads socket operations
assertEquals(1, s.getSocketOperations(OTHERS, true).entrySet().stream().count());
s.getSocketOperations(OTHERS, true).values().stream().findAny().ifPresent((socketStats) -> {
Assert.assertEquals(BaseSocketTest.REQUEST.length, socketStats.bytesUp.intValue());
Assert.assertEquals(BaseSocketTest.RESPONSE.length, socketStats.bytesDown.intValue());
});
// Any threads socket operations
assertEquals(2, s.getSocketOperations(ANY, true).entrySet().stream().count());
s.getSocketOperations(OTHERS, true).values().stream().forEach((socketStats) -> {
Assert.assertEquals(BaseSocketTest.REQUEST.length, socketStats.bytesUp.intValue());
Assert.assertEquals(BaseSocketTest.RESPONSE.length, socketStats.bytesDown.intValue());
});
}
} finally {
SnifferSocketImplFactory.uninstall();
SniffySelectorProvider.uninstall();
}
}
@Override
protected void performSocketOperation() {
try {
SocketChannel client = SocketChannel.open(new InetSocketAddress(BaseSocketTest.localhost, echoServerRule.getBoundPort()));
ByteBuffer requestBuffer = ByteBuffer.wrap(BaseSocketTest.REQUEST);
ByteBuffer responseBuffer = ByteBuffer.allocate(BaseSocketTest.RESPONSE.length);
client.write(requestBuffer);
requestBuffer.clear();
client.read(responseBuffer);
client.close();
echoServerRule.joinThreads();
Assert.assertArrayEquals(BaseSocketTest.REQUEST, echoServerRule.pollReceivedData());
Assert.assertArrayEquals(BaseSocketTest.RESPONSE, responseBuffer.array());
} catch (IOException e) {
fail(e.getMessage());
}
}
@Test
public void testPipe() {
try {
SniffySelectorProviderModule.initialize();
SniffySelectorProvider.uninstall();
SniffySelectorProvider.install();
Pipe pipe = Pipe.open();
Pipe.SourceChannel source = pipe.source();
Pipe.SinkChannel sink = pipe.sink();
final ByteBuffer targetBuffer = ByteBuffer.allocate(5);
final AtomicReference<Exception> exceptionHolder = new AtomicReference<>();
Thread sourceThread = new Thread(new Runnable() {
@Override
public void run() {
try {
source.read(targetBuffer);
} catch (IOException e) {
exceptionHolder.set(e);
}
}
});
Thread sinkThread = new Thread(new Runnable() {
@Override
public void run() {
try {
sink.write(ByteBuffer.wrap(new byte[]{1, 2, 3, 5, 8}));
} catch (IOException e) {
exceptionHolder.set(e);
}
}
});
sourceThread.start();
sinkThread.start();
sourceThread.join();
sinkThread.join();
assertArrayEquals(new byte[]{1, 2, 3, 5, 8}, targetBuffer.array());
} catch (Exception e) {
SniffySelectorProvider.uninstall();
} finally {
SniffySelectorProvider.uninstall();
}
}
}
| |
/*
* Copyright (c) 2014. Real Time Genomics Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rtg.reader;
import static com.rtg.util.StringUtils.LS;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import com.rtg.mode.DNA;
import com.rtg.mode.DNAFastaSymbolTable;
import com.rtg.mode.DnaUtils;
import com.rtg.mode.Residue;
import com.rtg.mode.SequenceType;
import com.rtg.util.PortableRandom;
import com.rtg.util.TestUtils;
import com.rtg.util.bytecompression.MultiByteArray;
import com.rtg.util.bytecompression.MultiByteArrayTest;
import com.rtg.util.cli.CommandLine;
import com.rtg.util.diagnostic.Diagnostic;
import com.rtg.util.intervals.LongRange;
import com.rtg.util.io.FileUtils;
import com.rtg.util.test.FileHelper;
/**
*/
public class CompressedMemorySequencesReaderTest extends AbstractSequencesReaderTest {
@Override
protected SequencesReader createSequencesReader(final File dir, LongRange region) throws IOException {
return CompressedMemorySequencesReader.createSequencesReader(dir, true, false, region, false);
}
public void testCRC() {
assertEquals((byte) 0x8D, crc(new byte[] {0}));
assertEquals((byte) 0x1B, crc(new byte[] {1}));
assertEquals((byte) 0xA1, crc(new byte[] {2}));
assertEquals((byte) 0x37, crc(new byte[] {3}));
assertEquals((byte) 0x94, crc(new byte[] {4}));
assertEquals((byte) 0xBB, crc(new byte[]{4, 0, 1, 3, 2, 3, 1, 4, 0, 2, 3, 3, 3}));
}
private byte crc(final byte[] data) {
final CompressedMemorySequencesReader reader = new CompressedMemorySequencesReader(
new byte[][] {data}, new String[] {"test"}, new long[] {data.length},
data.length, data.length, SequenceType.DNA);
assertTrue(reader.integrity());
return (byte) reader.getChecksum(0);
}
public void testPrereadNames() {
final ArrayNames pn = new ArrayNames(new String[] {"blah", "fah", "rah"});
assertEquals("fah", pn.name(1));
}
private final class MyMemorySequencesReader extends CompressedMemorySequencesReader {
MyMemorySequencesReader(final byte[][] data, final String[] labels, final long[] counts,
final int min, final int max, final SequenceType type) {
super(data, labels, counts, min, max, type);
final ArrayNames arr = new ArrayNames(new String[]{"someFile"});
assertEquals("someFile", arr.name(0));
final MultiByteArray mba = new MultiByteArray(10L);
//System.err.println(mba.get(1));
assertEquals(0, MultiByteArrayTest.get1(mba, 1));
}
}
public void testArrayPrereadNames() {
Diagnostic.setLogStream();
final String seqString = "acgtcacgtcacgtcacgtcacgtcacgtcacgtc";
new MyMemorySequencesReader(new byte[][] {DnaUtils.encodeArray(seqString.getBytes())},
new String[] {"seq1"}, new long[] {35}, 35, 35, SequenceType.DNA);
}
public void testRoll() throws Exception {
final InputStream fqis = createStream(">123456789012345678901\nacgtgtgtgtcttagggctcactggtcatgca\n>bob the buuilder\ntagttcagcatcgatca\n>hobos r us\naccccaccccacaaacccaa");
final FastaSequenceDataSource ds = new FastaSequenceDataSource(fqis, new DNAFastaSymbolTable());
final SequencesWriter sw = new SequencesWriter(ds, mDir, 20, PrereadType.UNKNOWN, false);
sw.processSequences();
try (SequencesReader dsr = SequencesReaderFactory.createMemorySequencesReader(mDir, true, LongRange.NONE)) {
final SequencesIterator it = dsr.iterator();
assertTrue(((CompressedMemorySequencesReader) dsr).integrity());
assertEquals(mDir, dsr.path());
assertTrue(it.nextSequence());
assertEquals("1234567890123456789", it.currentName());
assertEquals(32, it.currentLength());
SequencesWriterTest.checkEquals(it, new byte[]{1, 2, 3, 4, 3, 4, 3, 4, 3, 4, 2, 4, 4, 1, 3, 3, 3, 2, 4, 2, 1, 2, 4, 3, 3, 4, 2, 1, 4, 3, 2, 1});
assertTrue(it.nextSequence());
assertEquals("bob", it.currentName());
assertEquals(17, it.currentLength());
SequencesWriterTest.checkEquals(it, new byte[]{4, 1, 3, 4, 4, 2, 1, 3, 2, 1, 4, 2, 3, 1, 4, 2, 1});
assertTrue(it.nextSequence());
assertEquals("hobos", it.currentName());
assertEquals(20, it.currentLength());
SequencesWriterTest.checkEquals(it, new byte[]{1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 1, 1});
}
}
public void testInfo() throws IOException {
//set a command line
CommandLine.setCommandArgs("aksfj", "-d", "djfk siduf");
try {
final InputStream fqis = createStream(">123456789012345678901\nacgtgtgtgtcttagggctcactggtcatgca\n>bob-the-builder\ntagttcagcatcgatca\n>hobos r us\naccccaccccacaaacccaa");
final FastaSequenceDataSource ds = new FastaSequenceDataSource(fqis, new DNAFastaSymbolTable());
final SequencesWriter sw = new SequencesWriter(ds, mDir, 20, PrereadType.UNKNOWN, false);
sw.setComment("wejksfd boier sakrjoieje");
sw.processSequences();
final CompressedMemorySequencesReader msr = (CompressedMemorySequencesReader) SequencesReaderFactory.createMemorySequencesReader(mDir, true, LongRange.NONE);
checkDetails(msr);
final CompressedMemorySequencesReader msr2 = (CompressedMemorySequencesReader) msr.copy();
assertTrue(msr2 != msr);
checkDetails(msr2);
assertEquals("wejksfd boier sakrjoieje", msr.comment());
assertEquals("wejksfd boier sakrjoieje", msr2.comment());
assertEquals("aksfj -d \"djfk siduf\"", msr.commandLine());
assertEquals("aksfj -d \"djfk siduf\"", msr2.commandLine());
} finally {
CommandLine.clearCommandArgs();
}
}
private void checkDetails(final CompressedMemorySequencesReader msr) throws IOException {
assertTrue(msr.integrity());
final SequencesIterator it = msr.iterator();
final StringBuilder sb = new StringBuilder();
msr.infoString(sb);
TestUtils.containsAll(sb.toString(),
"Memory Usage\tbytes\tlength",
// "24\t69\tSeqData", TODO: uncomment this after long read FastScorer done.
"3\t3\tSeqChecksums",
"114\t3\tNames",
"32\t4\tPositions"
);
assertEquals(mDir, msr.path());
assertEquals(3, msr.numberSequences());
it.seek(1L);
assertEquals("bob-the-builder", it.currentName());
assertEquals(1L, it.currentSequenceId());
assertEquals(17, it.currentLength());
assertEquals(17L + 20L, msr.lengthBetween(1L, 3L));
final byte[] read = new byte[100];
assertEquals(17, it.readCurrent(read));
assertEquals(DNA.T.ordinal(), read[0]);
assertEquals(DNA.A.ordinal(), read[1]);
assertEquals(DNA.G.ordinal(), read[2]);
assertTrue(msr.getSdfId().available());
//this doesnt work because on some platforms (eg MacOSX) the canonical directory and directory are not necessarily the same
//assertEquals(msr.directory().hashCode(), msr.hashCode());
assertEquals(SequenceType.DNA, msr.type());
assertEquals(32L + 17L + 20L, msr.totalLength());
assertEquals(17L, msr.minLength());
assertEquals(32L, msr.maxLength());
final long[] counts = msr.residueCounts();
assertEquals(5, counts.length);
assertEquals(18L, counts[DNA.A.ordinal()]);
assertEquals(0L, counts[DNA.N.ordinal()]);
assertEquals(true, msr.hasHistogram());
final long[] hist = msr.histogram();
assertEquals(3L, hist[0]);
assertEquals(0L, hist[1]);
assertEquals(0L, msr.nBlockCount());
assertEquals(0L, msr.longestNBlock());
assertNotNull(msr.posHistogram());
assertEquals(0.0, msr.globalQualityAverage());
assertNotNull(msr.positionQualityAverage());
assertEquals(PrereadArm.UNKNOWN, msr.getArm());
assertEquals(PrereadType.UNKNOWN, msr.getPrereadType());
// seek beyond last sequence
try {
it.seek(3L);
fail();
} catch (IllegalArgumentException e) {
//expected
}
// must leave it positioned at a valid sequence
it.seek(2L);
assertEquals(2L, it.currentSequenceId());
}
public void testFencePost() throws IOException {
final File dir = FileUtils.createTempDir("cmsrt", "fencepost");
try {
randomSDF(dir, 256, 8200, 2L * 1024 * 1024);
final CompressedMemorySequencesReader reader = (CompressedMemorySequencesReader) CompressedMemorySequencesReader.createSequencesReader(dir, true, false, LongRange.NONE);
assertTrue(reader.checkChecksums());
} finally {
assertTrue(FileHelper.deleteAll(dir));
}
}
private static void randomSDF(final File dir, final int seqLength, final int seqNum, final long sizeLimit) throws IOException {
final PortableRandom rand = new PortableRandom(1);
final Residue[] bases = {DNA.A, DNA.C, DNA.G, DNA.T};
final SdfWriter writer = new SdfWriter(dir, sizeLimit, PrereadType.UNKNOWN, false, true, false, SequenceType.DNA);
final byte[] buf = new byte[seqLength];
for (int i = 0; i < seqNum; ++i) {
writer.startSequence("random " + i);
for (int j = 0; j < seqLength; ++j) {
buf[j] = (byte) bases[rand.nextInt(4)].ordinal();
}
writer.write(buf, null, buf.length);
writer.endSequence();
}
writer.close();
}
static final String POSITIONS_FASTQ = "@r0" + LS + "ACGTACGTACGTACGT" + LS //16 0 file0
+ "+" + LS + "ZZZZZZZZZZZZZZZZ" + LS
+ "@r1" + LS + "ACGTACGTACGTACGT" + LS //16 16 file0
+ "+" + LS + "XXXXXXXXXXXXXXXX" + LS
+ "@r2" + LS + "ACGTACGTACGTACGTACGT" + LS //20 32 file1
+ "+" + LS + "BBBBBBBBBBBBBBBBBBBB" + LS
+ "@r3" + LS + "ACGTACGTACGTACGTACGT" + LS //20 52 file2
+ "+" + LS + "YYYYYYYYYYYYYYYYYYYY" + LS
+ "@r4" + LS + "ACGTACGTACGT" + LS //12 72 file3
+ "+" + LS + "DDDDDDDDDDDD" + LS
+ "@r5" + LS + "ACGTAC" + LS //6 84 file4
+ "+" + LS + "EEEEEE" + LS;
public void testPartialFastq() throws Exception {
final SequencesReader reader = ReaderTestUtils.getReaderDNAFastq(POSITIONS_FASTQ, mDir, false);
reader.close();
final SequencesIterator cmsr = new CompressedMemorySequencesReader(mDir, new IndexFile(mDir), 5, true, false, new LongRange(3, 5)).iterator();
cmsr.nextSequence();
checkFastq(cmsr, "r3", "ACGTACGTACGTACGTACGT");
cmsr.nextSequence();
checkFastq(cmsr, "r4", "ACGTACGTACGT");
}
public void testOverEndFastq() throws Exception {
final SequencesReader reader = ReaderTestUtils.getReaderDNAFastq(POSITIONS_FASTQ, mDir, false);
reader.close();
new CompressedMemorySequencesReader(mDir, new IndexFile(mDir), 5, true, false, new LongRange(3, 8));
// Now this is OK rather than Exception, since our std behaviour is to warn and clip end that are too high back to the available num seqs
}
void checkFastq(SequencesIterator it, String name, String read) throws IOException {
final byte[] readBytes = new byte[it.currentLength()];
it.readCurrent(readBytes);
assertEquals(name, it.currentName());
assertEquals(read, DnaUtils.bytesToSequenceIncCG(readBytes));
it.readCurrentQuality(readBytes);
}
static final String SMALL_FASTQ = "@r0" + LS + "ACGTACGTACGTACGT" + LS //16 0 file0
+ "+" + LS + "ABCDEFGHIJKLMNOP" + LS;
public void testRanges() throws Exception {
final SequencesReader reader = ReaderTestUtils.getReaderDNAFastq(SMALL_FASTQ, mDir, 20, false);
reader.close();
final CompressedMemorySequencesReader cmsr = new CompressedMemorySequencesReader(mDir, new IndexFile(mDir), 5, true, false, LongRange.NONE);
final byte[] foo = new byte[6];
cmsr.readQuality(0, foo, 4, 6);
final String qual = "EFGHIJ";
for (int i = 0; i < foo.length; ++i) {
assertEquals(qual.charAt(i) - '!', foo[i]);
}
}
public void testFastqEnd() throws Exception {
final SequencesReader reader = ReaderTestUtils.getReaderDNAFastq(POSITIONS_FASTQ, mDir, 20, false);
reader.close();
final SequencesIterator cmsr = new CompressedMemorySequencesReader(mDir, new IndexFile(mDir), 5, true, false, new LongRange(5, 6)).iterator();
cmsr.nextSequence();
checkFastq(cmsr, "r5", "ACGTAC");
assertFalse(cmsr.nextSequence());
}
@Override
public void testEquals() throws IOException {
final File dir = FileHelper.createTempDirectory(mDir);
final File otherDir = FileHelper.createTempDirectory(mDir);
ReaderTestUtils.getReaderDNAFastq("", dir, false).close();
ReaderTestUtils.getReaderDNAFastq("", otherDir, false).close();
final CompressedMemorySequencesReader cmsr = new CompressedMemorySequencesReader(dir, new IndexFile(dir), 5, true, false, new LongRange(0, 0));
final CompressedMemorySequencesReader other = new CompressedMemorySequencesReader(otherDir, new IndexFile(dir), 5, true, false, new LongRange(0, 0));
assertTrue(cmsr.equals(cmsr));
assertFalse(cmsr.equals(null));
assertFalse(cmsr.equals(other));
assertFalse(cmsr.equals("FDSA"));
}
public void testEmptyFastq() throws IOException {
final SequencesReader reader = ReaderTestUtils.getReaderDNAFastq("", mDir, false);
reader.close();
final CompressedMemorySequencesReader cmsr = new CompressedMemorySequencesReader(mDir, new IndexFile(mDir), 5, true, false, new LongRange(0, 0));
assertEquals(0, cmsr.numberSequences());
}
public void testEmptyFasta() throws IOException {
final SequencesReader reader = ReaderTestUtils.getReaderDNA("", mDir, new SdfId(0L));
reader.close();
final CompressedMemorySequencesReader cmsr = new CompressedMemorySequencesReader(mDir, new IndexFile(mDir), 5, true, false, new LongRange(0, 0));
assertEquals(0, cmsr.numberSequences());
}
public void testReadMeNoDirectory() throws IOException {
final CompressedMemorySequencesReader msr = new CompressedMemorySequencesReader(new byte[][] {DnaUtils.encodeArray("acgtcacgtcacgtcacgtcacgtcacgtcacgtc".getBytes())}, new String[] {"seq1"}, new long[] {35}, 35, 35, SequenceType.DNA);
assertNull(msr.getReadMe());
}
private static final String FASTQ = ""
+ "@seq1\n"
+ "AGAGGGTTTCAGA\n"
+ "+\n"
+ "@BBDFFDFHHHHH\n"
+ "@seq2\n"
+ "\n"
+ "+\n"
+ "\n"
+ "@seq3\n"
+ "GTTTCAAACCTGC\n"
+ "+\n"
+ "C@CFFFFFHHHHH\n"
+ "@seq4\n"
+ "\n"
+ "+\n"
+ "\n"
;
public void testEmptySequenceInFastq() throws IOException {
final File fqFile = File.createTempFile("test", ".fq", mDir);
FileUtils.stringToFile(FASTQ, fqFile);
final FastqSequenceDataSource fqSource = new FastqSequenceDataSource(new ByteArrayInputStream(FASTQ.getBytes(StandardCharsets.US_ASCII)), QualityFormat.SANGER);
final SequencesWriter sw = new SequencesWriter(fqSource, null, PrereadType.UNKNOWN, true);
sw.setSdfId(new SdfId(0));
final SequencesReader reader = sw.processSequencesInMemory(fqFile, true, new SimpleNames(), null, LongRange.NONE);
assertTrue(reader.hasQualityData());
assertEquals(4, reader.numberSequences());
assertEquals("seq1", reader.name(0));
assertEquals("seq2", reader.name(1));
assertEquals("seq3", reader.name(2));
assertEquals("seq4", reader.name(3));
assertEquals(13, reader.length(0));
assertEquals(0, reader.length(1));
assertEquals(13, reader.length(2));
assertEquals(0, reader.length(3));
assertEquals(13, reader.readQuality(0).length);
assertEquals(0, reader.readQuality(1).length);
assertEquals(13, reader.readQuality(2).length);
assertEquals(0, reader.readQuality(3).length);
assertEquals("[34, 31, 34, 37, 37, 37, 37, 37, 39, 39, 39, 39, 39]", Arrays.toString(reader.readQuality(2)));
}
}
| |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.net.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.net.CronetException;
import org.chromium.net.UrlRequest;
import org.chromium.net.UrlResponseInfo;
import org.chromium.net.impl.ImplVersion;
import java.net.Proxy;
import java.nio.ByteBuffer;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* Test functionality of {@link FakeCronetEngine}.
*/
@RunWith(AndroidJUnit4.class)
public class FakeCronetEngineTest {
Context mContext;
FakeCronetEngine mFakeCronetEngine;
UrlRequest.Callback mCallback;
ExecutorService mExecutor;
@Before
public void setUp() {
mContext = InstrumentationRegistry.getTargetContext();
mFakeCronetEngine =
(FakeCronetEngine) new FakeCronetProvider(mContext).createBuilder().build();
mCallback = new UrlRequest.Callback() {
@Override
public void onRedirectReceived(
UrlRequest request, UrlResponseInfo info, String newLocationUrl) {}
@Override
public void onResponseStarted(UrlRequest request, UrlResponseInfo info) {}
@Override
public void onReadCompleted(
UrlRequest request, UrlResponseInfo info, ByteBuffer byteBuffer) {}
@Override
public void onSucceeded(UrlRequest request, UrlResponseInfo info) {}
@Override
public void onFailed(UrlRequest request, UrlResponseInfo info, CronetException error) {}
@Override
public void onCanceled(UrlRequest request, UrlResponseInfo info) {}
};
mExecutor = Executors.newSingleThreadExecutor();
}
@Test
@SmallTest
public void testShutdownEngineThrowsExceptionWhenApiCalled() {
mFakeCronetEngine.shutdown();
try {
mFakeCronetEngine.newUrlRequestBuilder("", mCallback, mExecutor).build();
fail("newUrlRequestBuilder API not checked for shutdown engine.");
} catch (IllegalStateException e) {
assertEquals(
"This instance of CronetEngine has been shutdown and can no longer be used.",
e.getMessage());
}
}
@Test
@SmallTest
public void testShutdownEngineThrowsExceptionWhenBidirectionalStreamApiCalled() {
mFakeCronetEngine.shutdown();
try {
mFakeCronetEngine.newBidirectionalStreamBuilder("", null, null);
fail("newBidirectionalStreamBuilder API not checked for shutdown engine.");
} catch (IllegalStateException e) {
assertEquals(
"This instance of CronetEngine has been shutdown and can no longer be used.",
e.getMessage());
}
}
@Test
@SmallTest
public void testExceptionForNewBidirectionalStreamApi() {
try {
mFakeCronetEngine.newBidirectionalStreamBuilder("", null, null);
fail("newBidirectionalStreamBuilder API should not be available.");
} catch (UnsupportedOperationException e) {
assertEquals("The bidirectional stream API is not supported by the Fake implementation "
+ "of CronetEngine.",
e.getMessage());
}
}
@Test
@SmallTest
public void testExceptionForOpenConnectionApi() {
try {
mFakeCronetEngine.openConnection(null);
fail("openConnection API should not be available.");
} catch (Exception e) {
assertEquals("The openConnection API is not supported by the Fake implementation of "
+ "CronetEngine.",
e.getMessage());
}
}
@Test
@SmallTest
public void testExceptionForOpenConnectionApiWithProxy() {
try {
mFakeCronetEngine.openConnection(null, Proxy.NO_PROXY);
fail("openConnection API should not be available.");
} catch (Exception e) {
assertEquals("The openConnection API is not supported by the Fake implementation of "
+ "CronetEngine.",
e.getMessage());
}
}
@Test
@SmallTest
public void testExceptionForCreateStreamHandlerFactoryApi() {
try {
mFakeCronetEngine.createURLStreamHandlerFactory();
fail("createURLStreamHandlerFactory API should not be available.");
} catch (UnsupportedOperationException e) {
assertEquals(
"The URLStreamHandlerFactory API is not supported by the Fake implementation of"
+ " CronetEngine.",
e.getMessage());
}
}
@Test
@SmallTest
public void testGetVersionString() {
assertEquals("FakeCronet/" + ImplVersion.getCronetVersionWithLastChange(),
mFakeCronetEngine.getVersionString());
}
@Test
@SmallTest
public void testStartNetLogToFile() {
mFakeCronetEngine.startNetLogToFile("", false);
}
@Test
@SmallTest
public void testStartNetLogToDisk() {
mFakeCronetEngine.startNetLogToDisk("", false, 0);
}
@Test
@SmallTest
public void testStopNetLog() {
mFakeCronetEngine.stopNetLog();
}
@Test
@SmallTest
public void testGetGlobalMetricsDeltas() {
assertTrue(mFakeCronetEngine.getGlobalMetricsDeltas().length == 0);
}
@Test
@SmallTest
public void testGetEffectiveConnectionType() {
assertEquals(FakeCronetEngine.EFFECTIVE_CONNECTION_TYPE_UNKNOWN,
mFakeCronetEngine.getEffectiveConnectionType());
}
@Test
@SmallTest
public void testGetHttpRttMs() {
assertEquals(FakeCronetEngine.CONNECTION_METRIC_UNKNOWN, mFakeCronetEngine.getHttpRttMs());
}
@Test
@SmallTest
public void testGetTransportRttMs() {
assertEquals(
FakeCronetEngine.CONNECTION_METRIC_UNKNOWN, mFakeCronetEngine.getTransportRttMs());
}
@Test
@SmallTest
public void testGetDownstreamThroughputKbps() {
assertEquals(FakeCronetEngine.CONNECTION_METRIC_UNKNOWN,
mFakeCronetEngine.getDownstreamThroughputKbps());
}
@Test
@SmallTest
public void testConfigureNetworkQualityEstimatorForTesting() {
mFakeCronetEngine.configureNetworkQualityEstimatorForTesting(false, false, false);
}
@Test
@SmallTest
public void testAddRttListener() {
mFakeCronetEngine.addRttListener(null);
}
@Test
@SmallTest
public void testRemoveRttListener() {
mFakeCronetEngine.removeRttListener(null);
}
@Test
@SmallTest
public void testAddThroughputListener() {
mFakeCronetEngine.addThroughputListener(null);
}
@Test
@SmallTest
public void testRemoveThroughputListener() {
mFakeCronetEngine.removeThroughputListener(null);
}
@Test
@SmallTest
public void testAddRequestFinishedListener() {
mFakeCronetEngine.addRequestFinishedListener(null);
}
@Test
@SmallTest
public void testRemoveRequestFinishedListener() {
mFakeCronetEngine.removeRequestFinishedListener(null);
}
@Test
@SmallTest
public void testShutdownBlockedWhenRequestCountNotZero() {
// Start a request and verify the engine can't be shutdown.
assertTrue(mFakeCronetEngine.startRequest());
try {
mFakeCronetEngine.shutdown();
fail("Shutdown not checked for active requests.");
} catch (IllegalStateException e) {
assertEquals("Cannot shutdown with active requests.", e.getMessage());
}
// Finish the request and verify the engine can be shutdown.
mFakeCronetEngine.onRequestDestroyed();
mFakeCronetEngine.shutdown();
}
@Test
@SmallTest
public void testCantStartRequestAfterEngineShutdown() {
mFakeCronetEngine.shutdown();
assertFalse(mFakeCronetEngine.startRequest());
}
@Test
@SmallTest
public void testCantDecrementOnceShutdown() {
mFakeCronetEngine.shutdown();
try {
mFakeCronetEngine.onRequestDestroyed();
fail("onRequestDestroyed not checked for shutdown engine");
} catch (IllegalStateException e) {
assertEquals("This instance of CronetEngine was shutdown. All requests must have been "
+ "complete.",
e.getMessage());
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.server;
import com.intellij.ProjectTopics;
import com.intellij.compiler.CompilerWorkspaceConfiguration;
import com.intellij.compiler.impl.javaCompiler.javac.JavacConfiguration;
import com.intellij.compiler.server.impl.BuildProcessClasspathManager;
import com.intellij.execution.ExecutionAdapter;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.ExecutionManager;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.configurations.RunProfile;
import com.intellij.execution.process.*;
import com.intellij.execution.ui.RunContentDescriptor;
import com.intellij.ide.DataManager;
import com.intellij.ide.PowerSaveMode;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.compiler.CompileContext;
import com.intellij.openapi.components.ApplicationComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl;
import com.intellij.openapi.fileTypes.impl.FileTypeManagerImpl;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectCoreUtil;
import com.intellij.openapi.project.ProjectManager;
import com.intellij.openapi.project.ProjectManagerAdapter;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.JavaSdkType;
import com.intellij.openapi.projectRoots.JavaSdkVersion;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.impl.JavaAwareProjectJdkTableImpl;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.ShutDownTracker;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.events.VFileEvent;
import com.intellij.openapi.vfs.newvfs.impl.FileNameCache;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.util.Alarm;
import com.intellij.util.Function;
import com.intellij.util.SmartList;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.concurrency.SequentialTaskExecutor;
import com.intellij.util.containers.IntArrayList;
import com.intellij.util.io.storage.HeavyProcessLatch;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.net.NetUtils;
import gnu.trove.THashSet;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.handler.codec.protobuf.ProtobufDecoder;
import io.netty.handler.codec.protobuf.ProtobufEncoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder;
import io.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.ide.PooledThreadExecutor;
import org.jetbrains.io.ChannelRegistrar;
import org.jetbrains.io.NettyUtil;
import org.jetbrains.jps.api.*;
import org.jetbrains.jps.cmdline.BuildMain;
import org.jetbrains.jps.cmdline.ClasspathBootstrap;
import org.jetbrains.jps.incremental.Utils;
import org.jetbrains.jps.model.serialization.JpsGlobalLoader;
import javax.tools.JavaCompiler;
import javax.tools.ToolProvider;
import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.charset.Charset;
import java.util.*;
import java.util.List;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.jetbrains.jps.api.CmdlineRemoteProto.Message.ControllerMessage.ParametersMessage.TargetTypeBuildScope;
/**
* @author Eugene Zhuravlev
* Date: 9/6/11
*/
public class BuildManager implements ApplicationComponent{
public static final Key<Boolean> ALLOW_AUTOMAKE = Key.create("_allow_automake_when_process_is_active_");
private static final Key<String> FORCE_MODEL_LOADING_PARAMETER = Key.create(BuildParametersKeys.FORCE_MODEL_LOADING);
private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.server.BuildManager");
private static final String COMPILER_PROCESS_JDK_PROPERTY = "compiler.process.jdk";
public static final String SYSTEM_ROOT = "compile-server";
public static final String TEMP_DIR_NAME = "_temp_";
private static final int MAKE_TRIGGER_DELAY = 300 /*300 ms*/;
private static final int DOCUMENT_SAVE_TRIGGER_DELAY = 1500 /*1.5 sec*/;
private final boolean IS_UNIT_TEST_MODE;
private static final String IWS_EXTENSION = ".iws";
private static final String IPR_EXTENSION = ".ipr";
private static final String IDEA_PROJECT_DIR_PATTERN = "/.idea/";
private static final Function<String, Boolean> PATH_FILTER =
SystemInfo.isFileSystemCaseSensitive?
new Function<String, Boolean>() {
@Override
public Boolean fun(String s) {
return !(s.contains(IDEA_PROJECT_DIR_PATTERN) || s.endsWith(IWS_EXTENSION) || s.endsWith(IPR_EXTENSION));
}
} :
new Function<String, Boolean>() {
@Override
public Boolean fun(String s) {
return !(StringUtil.endsWithIgnoreCase(s, IWS_EXTENSION) || StringUtil.endsWithIgnoreCase(s, IPR_EXTENSION) || StringUtil.containsIgnoreCase(s, IDEA_PROJECT_DIR_PATTERN));
}
};
private final File mySystemDirectory;
private final ProjectManager myProjectManager;
private final Map<RequestFuture, Project> myAutomakeFutures = Collections.synchronizedMap(new HashMap<RequestFuture, Project>());
private final Map<String, RequestFuture> myBuildsInProgress = Collections.synchronizedMap(new HashMap<String, RequestFuture>());
private final BuildProcessClasspathManager myClasspathManager = new BuildProcessClasspathManager();
private final SequentialTaskExecutor myRequestsProcessor = new SequentialTaskExecutor(PooledThreadExecutor.INSTANCE);
private final Map<String, ProjectData> myProjectDataMap = Collections.synchronizedMap(new HashMap<String, ProjectData>());
private final BuildManagerPeriodicTask myAutoMakeTask = new BuildManagerPeriodicTask() {
@Override
protected int getDelay() {
return Registry.intValue("compiler.automake.trigger.delay", MAKE_TRIGGER_DELAY);
}
@Override
protected void runTask() {
runAutoMake();
}
};
private final BuildManagerPeriodicTask myDocumentSaveTask = new BuildManagerPeriodicTask() {
@Override
protected int getDelay() {
return Registry.intValue("compiler.document.save.trigger.delay", DOCUMENT_SAVE_TRIGGER_DELAY);
}
private final Semaphore mySemaphore = new Semaphore();
private final Runnable mySaveDocsRunnable = new Runnable() {
@Override
public void run() {
try {
((FileDocumentManagerImpl)FileDocumentManager.getInstance()).saveAllDocuments(false);
}
finally {
mySemaphore.up();
}
}
};
@Override
public void runTask() {
if (shouldSaveDocuments()) {
mySemaphore.down();
ApplicationManager.getApplication().invokeLater(mySaveDocsRunnable, ModalityState.NON_MODAL);
mySemaphore.waitFor();
}
}
private boolean shouldSaveDocuments() {
final Project contextProject = getCurrentContextProject();
return contextProject != null && canStartAutoMake(contextProject);
}
};
private final ChannelRegistrar myChannelRegistrar = new ChannelRegistrar();
private final BuildMessageDispatcher myMessageDispatcher = new BuildMessageDispatcher();
private volatile int myListenPort = -1;
@Nullable
private final Charset mySystemCharset;
public BuildManager(final ProjectManager projectManager) {
final Application application = ApplicationManager.getApplication();
IS_UNIT_TEST_MODE = application.isUnitTestMode();
myProjectManager = projectManager;
mySystemCharset = CharsetToolkit.getDefaultSystemCharset();
final String systemPath = PathManager.getSystemPath();
File system = new File(systemPath);
try {
system = system.getCanonicalFile();
}
catch (IOException e) {
LOG.info(e);
}
mySystemDirectory = system;
projectManager.addProjectManagerListener(new ProjectWatcher());
final MessageBusConnection conn = application.getMessageBus().connect();
conn.subscribe(VirtualFileManager.VFS_CHANGES, new BulkFileListener.Adapter() {
@Override
public void after(@NotNull List<? extends VFileEvent> events) {
if (shouldTriggerMake(events)) {
scheduleAutoMake();
}
}
private boolean shouldTriggerMake(List<? extends VFileEvent> events) {
if (PowerSaveMode.isEnabled()) {
return false;
}
Project project = null;
ProjectFileIndex fileIndex = null;
for (VFileEvent event : events) {
final VirtualFile eventFile = event.getFile();
if (eventFile == null || ProjectCoreUtil.isProjectOrWorkspaceFile(eventFile)) {
continue;
}
if (project == null) {
// lazy init
project = getCurrentContextProject();
if (project == null) {
return false;
}
fileIndex = ProjectRootManager.getInstance(project).getFileIndex();
}
if (fileIndex.isInContent(eventFile)) {
return true;
}
}
return false;
}
});
EditorFactory.getInstance().getEventMulticaster().addDocumentListener(new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
scheduleProjectSave();
}
});
ShutDownTracker.getInstance().registerShutdownTask(new Runnable() {
@Override
public void run() {
stopListening();
}
});
}
private List<Project> getOpenProjects() {
final Project[] projects = myProjectManager.getOpenProjects();
if (projects.length == 0) {
return Collections.emptyList();
}
final List<Project> projectList = new SmartList<Project>();
for (Project project : projects) {
if (isValidProject(project)) {
projectList.add(project);
}
}
return projectList;
}
private static boolean isValidProject(@Nullable Project project) {
return project != null && !project.isDisposed() && !project.isDefault() && project.isInitialized();
}
public static BuildManager getInstance() {
return ApplicationManager.getApplication().getComponent(BuildManager.class);
}
public void notifyFilesChanged(final Collection<File> paths) {
doNotify(paths, false);
}
public void notifyFilesDeleted(Collection<File> paths) {
doNotify(paths, true);
}
public void runCommand(Runnable command) {
myRequestsProcessor.submit(command);
}
private void doNotify(final Collection<File> paths, final boolean notifyDeletion) {
// ensure events processed in the order they arrived
runCommand(new Runnable() {
@Override
public void run() {
final List<String> filtered = new ArrayList<String>(paths.size());
for (File file : paths) {
final String path = FileUtil.toSystemIndependentName(file.getPath());
if (PATH_FILTER.fun(path)) {
filtered.add(path);
}
}
if (filtered.isEmpty()) {
return;
}
synchronized (myProjectDataMap) {
if (IS_UNIT_TEST_MODE) {
if (notifyDeletion) {
LOG.info("Registering deleted paths: " + filtered);
}
else {
LOG.info("Registering changed paths: " + filtered);
}
}
for (Map.Entry<String, ProjectData> entry : myProjectDataMap.entrySet()) {
final ProjectData data = entry.getValue();
if (notifyDeletion) {
data.addDeleted(filtered);
}
else {
data.addChanged(filtered);
}
final RequestFuture future = myBuildsInProgress.get(entry.getKey());
if (future != null && !future.isCancelled() && !future.isDone()) {
final UUID sessionId = future.getRequestID();
final Channel channel = myMessageDispatcher.getConnectedChannel(sessionId);
if (channel != null) {
final CmdlineRemoteProto.Message.ControllerMessage message =
CmdlineRemoteProto.Message.ControllerMessage.newBuilder().setType(
CmdlineRemoteProto.Message.ControllerMessage.Type.FS_EVENT).setFsEvent(data.createNextEvent()).build();
channel.writeAndFlush(CmdlineProtoUtil.toMessage(sessionId, message));
}
}
}
}
}
});
}
public static void forceModelLoading(CompileContext context) {
context.getCompileScope().putUserData(FORCE_MODEL_LOADING_PARAMETER, Boolean.TRUE.toString());
}
public void clearState(Project project) {
final String projectPath = getProjectPath(project);
synchronized (myProjectDataMap) {
final ProjectData data = myProjectDataMap.get(projectPath);
if (data != null) {
data.dropChanges();
}
}
scheduleAutoMake();
}
public boolean rescanRequired(Project project) {
final String projectPath = getProjectPath(project);
synchronized (myProjectDataMap) {
final ProjectData data = myProjectDataMap.get(projectPath);
return data == null || data.myNeedRescan;
}
}
@Nullable
public List<String> getFilesChangedSinceLastCompilation(Project project) {
String projectPath = getProjectPath(project);
synchronized (myProjectDataMap) {
ProjectData data = myProjectDataMap.get(projectPath);
if (data != null && !data.myNeedRescan) {
return convertToStringPaths(data.myChanged);
}
return null;
}
}
private static List<String> convertToStringPaths(final Collection<InternedPath> interned) {
final ArrayList<String> list = new ArrayList<String>(interned.size());
for (InternedPath path : interned) {
list.add(path.getValue());
}
return list;
}
@Nullable
private static String getProjectPath(final Project project) {
final String url = project.getPresentableUrl();
if (url == null) {
return null;
}
return VirtualFileManager.extractPath(url);
}
public void scheduleAutoMake() {
if (!IS_UNIT_TEST_MODE && !PowerSaveMode.isEnabled()) {
myAutoMakeTask.schedule();
}
}
private void scheduleProjectSave() {
if (!IS_UNIT_TEST_MODE && !PowerSaveMode.isEnabled()) {
myDocumentSaveTask.schedule();
}
}
private void runAutoMake() {
final Project project = getCurrentContextProject();
if (project == null || !canStartAutoMake(project)) {
return;
}
final List<TargetTypeBuildScope> scopes = CmdlineProtoUtil.createAllModulesScopes(false);
final AutoMakeMessageHandler handler = new AutoMakeMessageHandler(project);
final RequestFuture future = scheduleBuild(
project, false, true, false, scopes, Collections.<String>emptyList(), Collections.<String, String>emptyMap(), handler
);
if (future != null) {
myAutomakeFutures.put(future, project);
try {
future.waitFor();
}
finally {
myAutomakeFutures.remove(future);
}
}
}
private static boolean canStartAutoMake(@NotNull Project project) {
if (project.isDisposed()) {
return false;
}
final CompilerWorkspaceConfiguration config = CompilerWorkspaceConfiguration.getInstance(project);
if (!config.useOutOfProcessBuild() || !config.MAKE_PROJECT_ON_SAVE) {
return false;
}
if (!config.allowAutoMakeWhileRunningApplication() && hasRunningProcess(project)) {
return false;
}
return true;
}
@Nullable
private Project getCurrentContextProject() {
return getContextProject(null);
}
@Nullable
private Project getContextProject(@Nullable Window window) {
final List<Project> openProjects = getOpenProjects();
if (openProjects.isEmpty()) {
return null;
}
if (openProjects.size() == 1) {
return openProjects.get(0);
}
if (window == null) {
window = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow();
if (window == null) {
return null;
}
}
Component comp = window;
while (true) {
final Container _parent = comp.getParent();
if (_parent == null) {
break;
}
comp = _parent;
}
Project project = null;
if (comp instanceof IdeFrame) {
project = ((IdeFrame)comp).getProject();
}
if (project == null) {
project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(comp));
}
return isValidProject(project)? project : null;
}
private static boolean hasRunningProcess(Project project) {
for (RunContentDescriptor descriptor : ExecutionManager.getInstance(project).getContentManager().getAllDescriptors()) {
final ProcessHandler handler = descriptor.getProcessHandler();
if (handler != null && !handler.isProcessTerminated() && !ALLOW_AUTOMAKE.get(handler, Boolean.FALSE)) { // active process
return true;
}
}
return false;
}
public Collection<RequestFuture> cancelAutoMakeTasks(Project project) {
final Collection<RequestFuture> futures = new SmartList<RequestFuture>();
synchronized (myAutomakeFutures) {
for (Map.Entry<RequestFuture, Project> entry : myAutomakeFutures.entrySet()) {
if (entry.getValue().equals(project)) {
final RequestFuture future = entry.getKey();
future.cancel(false);
futures.add(future);
}
}
}
return futures;
}
@Nullable
public RequestFuture scheduleBuild(
final Project project, final boolean isRebuild, final boolean isMake,
final boolean onlyCheckUpToDate, final List<TargetTypeBuildScope> scopes,
final Collection<String> paths,
final Map<String, String> userData, final DefaultMessageHandler messageHandler) {
final String projectPath = getProjectPath(project);
final UUID sessionId = UUID.randomUUID();
final boolean isAutomake = messageHandler instanceof AutoMakeMessageHandler;
final BuilderMessageHandler handler = new MessageHandlerWrapper(messageHandler) {
@Override
public void buildStarted(UUID sessionId) {
super.buildStarted(sessionId);
try {
ApplicationManager.getApplication().getMessageBus().syncPublisher(BuildManagerListener.TOPIC).buildStarted(project, sessionId, isAutomake);
}
catch (Throwable e) {
LOG.error(e);
}
}
@Override
public void sessionTerminated(UUID sessionId) {
try {
super.sessionTerminated(sessionId);
}
finally {
try {
ApplicationManager.getApplication().getMessageBus().syncPublisher(BuildManagerListener.TOPIC).buildFinished(project, sessionId, isAutomake);
}
catch (Throwable e) {
LOG.error(e);
}
}
}
};
// ensure server is listening
if (myListenPort < 0) {
try {
synchronized (this) {
if (myListenPort < 0) {
myListenPort = startListening();
}
}
}
catch (Exception e) {
handler.handleFailure(sessionId, CmdlineProtoUtil.createFailure(e.getMessage(), null));
handler.sessionTerminated(sessionId);
return null;
}
}
try {
final RequestFuture<BuilderMessageHandler> future = new RequestFuture<BuilderMessageHandler>(handler, sessionId, new RequestFuture.CancelAction<BuilderMessageHandler>() {
@Override
public void cancel(RequestFuture<BuilderMessageHandler> future) throws Exception {
myMessageDispatcher.cancelSession(future.getRequestID());
}
});
// by using the same queue that processes events we ensure that
// the build will be aware of all events that have happened before this request
runCommand(new Runnable() {
@Override
public void run() {
if (future.isCancelled() || project.isDisposed()) {
handler.sessionTerminated(sessionId);
future.setDone();
return;
}
final CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings globals =
CmdlineRemoteProto.Message.ControllerMessage.GlobalSettings.newBuilder()
.setGlobalOptionsPath(PathManager.getOptionsPath())
.build();
CmdlineRemoteProto.Message.ControllerMessage.FSEvent currentFSChanges;
final SequentialTaskExecutor projectTaskQueue;
synchronized (myProjectDataMap) {
ProjectData data = myProjectDataMap.get(projectPath);
if (data == null) {
data = new ProjectData(new SequentialTaskExecutor(PooledThreadExecutor.INSTANCE));
myProjectDataMap.put(projectPath, data);
}
if (isRebuild) {
data.dropChanges();
}
if (IS_UNIT_TEST_MODE) {
LOG.info("Scheduling build for " +
projectPath +
"; CHANGED: " +
new HashSet<String>(convertToStringPaths(data.myChanged)) +
"; DELETED: " +
new HashSet<String>(convertToStringPaths(data.myDeleted)));
}
currentFSChanges = data.getAndResetRescanFlag() ? null : data.createNextEvent();
projectTaskQueue = data.taskQueue;
}
final CmdlineRemoteProto.Message.ControllerMessage params;
if (isRebuild) {
params = CmdlineProtoUtil.createBuildRequest(projectPath, scopes, Collections.<String>emptyList(), userData, globals, null);
}
else if (onlyCheckUpToDate) {
params = CmdlineProtoUtil.createUpToDateCheckRequest(projectPath, scopes, paths, userData, globals, currentFSChanges);
}
else {
params = CmdlineProtoUtil.createBuildRequest(projectPath, scopes, isMake ? Collections.<String>emptyList() : paths,
userData, globals, currentFSChanges);
}
myMessageDispatcher.registerBuildMessageHandler(sessionId, new MessageHandlerWrapper(handler) {
@Override
public void sessionTerminated(UUID sessionId) {
try {
super.sessionTerminated(sessionId);
}
finally {
future.setDone();
}
}
}, params);
try {
projectTaskQueue.submit(new Runnable() {
@Override
public void run() {
Throwable execFailure = null;
try {
if (project.isDisposed()) {
return;
}
myBuildsInProgress.put(projectPath, future);
final OSProcessHandler processHandler = launchBuildProcess(project, myListenPort, sessionId);
final StringBuilder stdErrOutput = new StringBuilder();
processHandler.addProcessListener(new ProcessAdapter() {
@Override
public void onTextAvailable(ProcessEvent event, Key outputType) {
// re-translate builder's output to idea.log
final String text = event.getText();
if (!StringUtil.isEmptyOrSpaces(text)) {
LOG.info("BUILDER_PROCESS [" + outputType.toString() + "]: " + text.trim());
if (stdErrOutput.length() < 1024 && ProcessOutputTypes.STDERR.equals(outputType)) {
stdErrOutput.append(text);
}
}
}
});
processHandler.startNotify();
final boolean terminated = processHandler.waitFor();
if (terminated) {
final int exitValue = processHandler.getProcess().exitValue();
if (exitValue != 0) {
final StringBuilder msg = new StringBuilder();
msg.append("Abnormal build process termination: ");
if (stdErrOutput.length() > 0) {
msg.append("\n").append(stdErrOutput);
}
else {
msg.append("unknown error");
}
handler.handleFailure(sessionId, CmdlineProtoUtil.createFailure(msg.toString(), null));
}
}
else {
handler.handleFailure(sessionId, CmdlineProtoUtil.createFailure("Disconnected from build process", null));
}
}
catch (Throwable e) {
execFailure = e;
}
finally {
myBuildsInProgress.remove(projectPath);
if (myMessageDispatcher.getAssociatedChannel(sessionId) == null) {
// either the connection has never been established (process not started or execution failed), or no messages were sent from the launched process.
// in this case the session cannot be unregistered by the message dispatcher
final BuilderMessageHandler unregistered = myMessageDispatcher.unregisterBuildMessageHandler(sessionId);
if (unregistered != null) {
if (execFailure != null) {
unregistered.handleFailure(sessionId, CmdlineProtoUtil.createFailure(execFailure.getMessage(), execFailure));
}
unregistered.sessionTerminated(sessionId);
}
}
}
}
});
}
catch (Throwable e) {
final BuilderMessageHandler unregistered = myMessageDispatcher.unregisterBuildMessageHandler(sessionId);
if (unregistered != null) {
unregistered.handleFailure(sessionId, CmdlineProtoUtil.createFailure(e.getMessage(), e));
unregistered.sessionTerminated(sessionId);
}
}
}
});
return future;
}
catch (Throwable e) {
handler.handleFailure(sessionId, CmdlineProtoUtil.createFailure(e.getMessage(), e));
handler.sessionTerminated(sessionId);
}
return null;
}
@Override
public void initComponent() {
}
@Override
public void disposeComponent() {
stopListening();
}
@NotNull
@Override
public String getComponentName() {
return "com.intellij.compiler.server.BuildManager";
}
private OSProcessHandler launchBuildProcess(Project project, final int port, final UUID sessionId) throws ExecutionException {
final String compilerPath;
final String vmExecutablePath;
JavaSdkVersion sdkVersion = null;
final String forcedCompiledJdkHome = Registry.stringValue(COMPILER_PROCESS_JDK_PROPERTY);
if (StringUtil.isEmptyOrSpaces(forcedCompiledJdkHome)) {
// choosing sdk with which the build process should be run
Sdk projectJdk = null;
int sdkMinorVersion = 0;
final Set<Sdk> candidates = new HashSet<Sdk>();
final Sdk defaultSdk = ProjectRootManager.getInstance(project).getProjectSdk();
if (defaultSdk != null && defaultSdk.getSdkType() instanceof JavaSdkType) {
candidates.add(defaultSdk);
}
for (Module module : ModuleManager.getInstance(project).getModules()) {
final Sdk sdk = ModuleRootManager.getInstance(module).getSdk();
if (sdk != null && sdk.getSdkType() instanceof JavaSdkType) {
candidates.add(sdk);
}
}
// now select the latest version from the sdks that are used in the project, but not older than the internal sdk version
final JavaSdk javaSdkType = JavaSdk.getInstance();
for (Sdk candidate : candidates) {
final String vs = candidate.getVersionString();
if (vs != null) {
final JavaSdkVersion candidateVersion = javaSdkType.getVersion(vs);
if (candidateVersion != null) {
final int candidateMinorVersion = getMinorVersion(vs);
if (projectJdk == null) {
sdkVersion = candidateVersion;
sdkMinorVersion = candidateMinorVersion;
projectJdk = candidate;
}
else {
final int result = candidateVersion.compareTo(sdkVersion);
if (result > 0 || (result == 0 && candidateMinorVersion > sdkMinorVersion)) {
sdkVersion = candidateVersion;
sdkMinorVersion = candidateMinorVersion;
projectJdk = candidate;
}
}
}
}
}
final Sdk internalJdk = JavaAwareProjectJdkTableImpl.getInstanceEx().getInternalJdk();
if (projectJdk == null || sdkVersion == null || !sdkVersion.isAtLeast(JavaSdkVersion.JDK_1_6)) {
projectJdk = internalJdk;
}
// validate tools.jar presence
final JavaSdkType projectJdkType = (JavaSdkType)projectJdk.getSdkType();
if (projectJdk.equals(internalJdk)) {
// important: because internal JDK can be either JDK or JRE,
// this is the most universal way to obtain tools.jar path in this particular case
final JavaCompiler systemCompiler = ToolProvider.getSystemJavaCompiler();
if (systemCompiler == null) {
throw new ExecutionException("No system java compiler is provided by the JRE. Make sure tools.jar is present in IntelliJ IDEA classpath.");
}
compilerPath = ClasspathBootstrap.getResourcePath(systemCompiler.getClass());
}
else {
compilerPath = projectJdkType.getToolsPath(projectJdk);
if (compilerPath == null) {
throw new ExecutionException("Cannot determine path to 'tools.jar' library for " + projectJdk.getName() + " (" + projectJdk.getHomePath() + ")");
}
}
vmExecutablePath = projectJdkType.getVMExecutablePath(projectJdk);
}
else {
compilerPath = new File(forcedCompiledJdkHome, "lib/tools.jar").getAbsolutePath();
vmExecutablePath = new File(forcedCompiledJdkHome, "bin/java").getAbsolutePath();
}
final CompilerWorkspaceConfiguration config = CompilerWorkspaceConfiguration.getInstance(project);
final GeneralCommandLine cmdLine = new GeneralCommandLine();
cmdLine.setExePath(vmExecutablePath);
//cmdLine.addParameter("-XX:MaxPermSize=150m");
//cmdLine.addParameter("-XX:ReservedCodeCacheSize=64m");
final int heapSize = config.getProcessHeapSize(JavacConfiguration.getOptions(project, JavacConfiguration.class).MAXIMUM_HEAP_SIZE);
cmdLine.addParameter("-Xmx" + heapSize + "m");
if (SystemInfo.isMac && sdkVersion != null && JavaSdkVersion.JDK_1_6.equals(sdkVersion) && Registry.is("compiler.process.32bit.vm.on.mac")) {
// unfortunately -d32 is supported on jdk 1.6 only
cmdLine.addParameter("-d32");
}
cmdLine.addParameter("-Djava.awt.headless=true");
cmdLine.addParameter("-Djava.endorsed.dirs=\"\""); // turn off all jre customizations for predictable behaviour
if (IS_UNIT_TEST_MODE) {
cmdLine.addParameter("-Dtest.mode=true");
}
cmdLine.addParameter("-Djdt.compiler.useSingleThread=true"); // always run eclipse compiler in single-threaded mode
final String shouldGenerateIndex = System.getProperty(GlobalOptions.GENERATE_CLASSPATH_INDEX_OPTION);
if (shouldGenerateIndex != null) {
cmdLine.addParameter("-D"+ GlobalOptions.GENERATE_CLASSPATH_INDEX_OPTION +"=" + shouldGenerateIndex);
}
cmdLine.addParameter("-D"+ GlobalOptions.COMPILE_PARALLEL_OPTION +"=" + Boolean.toString(config.PARALLEL_COMPILATION));
cmdLine.addParameter("-D"+ GlobalOptions.REBUILD_ON_DEPENDENCY_CHANGE_OPTION + "=" + Boolean.toString(config.REBUILD_ON_DEPENDENCY_CHANGE));
if (Boolean.TRUE.equals(Boolean.valueOf(System.getProperty("java.net.preferIPv4Stack", "false")))) {
cmdLine.addParameter("-Djava.net.preferIPv4Stack=true");
}
boolean isProfilingMode = false;
final String additionalOptions = config.COMPILER_PROCESS_ADDITIONAL_VM_OPTIONS;
if (!StringUtil.isEmpty(additionalOptions)) {
final StringTokenizer tokenizer = new StringTokenizer(additionalOptions, " ", false);
while (tokenizer.hasMoreTokens()) {
final String option = tokenizer.nextToken();
if ("-Dprofiling.mode=true".equals(option)) {
isProfilingMode = true;
}
cmdLine.addParameter(option);
}
}
// debugging
final int debugPort = Registry.intValue("compiler.process.debug.port");
if (debugPort > 0) {
cmdLine.addParameter("-XX:+HeapDumpOnOutOfMemoryError");
cmdLine.addParameter("-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=" + debugPort);
}
if (!Registry.is("compiler.process.use.memory.temp.cache")) {
cmdLine.addParameter("-D"+ GlobalOptions.USE_MEMORY_TEMP_CACHE_OPTION + "=false");
}
if (Registry.is("compiler.process.use.external.javac")) {
cmdLine.addParameter("-D"+ GlobalOptions.USE_EXTERNAL_JAVAC_OPTION);
}
// javac's VM should use the same default locale that IDEA uses in order for javac to print messages in 'correct' language
if (mySystemCharset != null) {
cmdLine.setCharset(mySystemCharset);
cmdLine.addParameter("-D" + CharsetToolkit.FILE_ENCODING_PROPERTY + "=" + mySystemCharset.name());
}
cmdLine.addParameter("-D" + JpsGlobalLoader.FILE_TYPES_COMPONENT_NAME_KEY + "=" + FileTypeManagerImpl.getFileTypeComponentName());
for (String name : new String[]{"user.language", "user.country", "user.region", PathManager.PROPERTY_HOME_PATH}) {
final String value = System.getProperty(name);
if (value != null) {
cmdLine.addParameter("-D" + name + "=" + value);
}
}
cmdLine.addParameter("-D" + GlobalOptions.LOG_DIR_OPTION + "=" + FileUtil.toSystemIndependentName(getBuildLogDirectory().getAbsolutePath()));
final File workDirectory = getBuildSystemDirectory();
workDirectory.mkdirs();
cmdLine.addParameter("-Djava.io.tmpdir=" + FileUtil.toSystemIndependentName(workDirectory.getPath()) + "/" + TEMP_DIR_NAME);
for (BuildProcessParametersProvider provider : project.getExtensions(BuildProcessParametersProvider.EP_NAME)) {
final List<String> args = provider.getVMArguments();
cmdLine.addParameters(args);
}
@SuppressWarnings("UnnecessaryFullyQualifiedName")
final Class<?> launcherClass = org.jetbrains.jps.cmdline.Launcher.class;
final List<String> launcherCp = new ArrayList<String>();
launcherCp.add(ClasspathBootstrap.getResourcePath(launcherClass));
launcherCp.add(compilerPath);
ClasspathBootstrap.appendJavaCompilerClasspath(launcherCp);
cmdLine.addParameter("-classpath");
cmdLine.addParameter(classpathToString(launcherCp));
cmdLine.addParameter(launcherClass.getName());
final List<String> cp = ClasspathBootstrap.getBuildProcessApplicationClasspath(true);
cp.addAll(myClasspathManager.getBuildProcessPluginsClasspath(project));
if (isProfilingMode) {
cp.add(new File(workDirectory, "yjp-controller-api-redist.jar").getPath());
cmdLine.addParameter("-agentlib:yjpagent=disablej2ee,disablealloc,delay=10000,sessionname=ExternalBuild");
}
cmdLine.addParameter(classpathToString(cp));
cmdLine.addParameter(BuildMain.class.getName());
cmdLine.addParameter("127.0.0.1");
cmdLine.addParameter(Integer.toString(port));
cmdLine.addParameter(sessionId.toString());
cmdLine.addParameter(FileUtil.toSystemIndependentName(workDirectory.getPath()));
cmdLine.setWorkDirectory(workDirectory);
final Process process = cmdLine.createProcess();
return new OSProcessHandler(process, null, mySystemCharset) {
@Override
protected boolean shouldDestroyProcessRecursively() {
return true;
}
};
}
public File getBuildSystemDirectory() {
return new File(mySystemDirectory, SYSTEM_ROOT);
}
public File getBuildLogDirectory() {
return new File(PathManager.getLogPath(), "build-log");
}
@Nullable
public File getProjectSystemDirectory(Project project) {
final String projectPath = getProjectPath(project);
return projectPath != null? Utils.getDataStorageRoot(getBuildSystemDirectory(), projectPath) : null;
}
private static int getMinorVersion(String vs) {
final int dashIndex = vs.lastIndexOf('_');
if (dashIndex >= 0) {
StringBuilder builder = new StringBuilder();
for (int idx = dashIndex + 1; idx < vs.length(); idx++) {
final char ch = vs.charAt(idx);
if (Character.isDigit(ch)) {
builder.append(ch);
}
else {
break;
}
}
if (builder.length() > 0) {
try {
return Integer.parseInt(builder.toString());
}
catch (NumberFormatException ignored) {
}
}
}
return 0;
}
public void stopListening() {
myChannelRegistrar.close();
}
private int startListening() throws Exception {
final ServerBootstrap bootstrap = NettyUtil.nioServerBootstrap(new NioEventLoopGroup(1, PooledThreadExecutor.INSTANCE));
bootstrap.childHandler(new ChannelInitializer() {
@Override
protected void initChannel(Channel channel) throws Exception {
channel.pipeline().addLast(myChannelRegistrar,
new ProtobufVarint32FrameDecoder(),
new ProtobufDecoder(CmdlineRemoteProto.Message.getDefaultInstance()),
new ProtobufVarint32LengthFieldPrepender(),
new ProtobufEncoder(),
myMessageDispatcher);
}
});
Channel serverChannel = bootstrap.bind(NetUtils.getLoopbackAddress(), 0).syncUninterruptibly().channel();
myChannelRegistrar.add(serverChannel);
return ((InetSocketAddress)serverChannel.localAddress()).getPort();
}
@TestOnly
public void stopWatchingProject(Project project) {
myProjectDataMap.remove(getProjectPath(project));
}
private static String classpathToString(List<String> cp) {
StringBuilder builder = new StringBuilder();
for (String file : cp) {
if (builder.length() > 0) {
builder.append(File.pathSeparator);
}
builder.append(FileUtil.toCanonicalPath(file));
}
return builder.toString();
}
private static abstract class BuildManagerPeriodicTask implements Runnable {
private final Alarm myAlarm = new Alarm(Alarm.ThreadToUse.SHARED_THREAD);
private final AtomicBoolean myInProgress = new AtomicBoolean(false);
private final Runnable myTaskRunnable = new Runnable() {
@Override
public void run() {
try {
runTask();
}
finally {
myInProgress.set(false);
}
}
};
public final void schedule() {
myAlarm.cancelAllRequests();
final int delay = Math.max(100, getDelay());
myAlarm.addRequest(this, delay);
}
protected abstract int getDelay();
protected abstract void runTask();
@Override
public final void run() {
if (!HeavyProcessLatch.INSTANCE.isRunning() && !myInProgress.getAndSet(true)) {
try {
ApplicationManager.getApplication().executeOnPooledThread(myTaskRunnable);
}
catch (RejectedExecutionException ignored) {
// we were shut down
myInProgress.set(false);
}
catch (Throwable e) {
myInProgress.set(false);
throw new RuntimeException(e);
}
}
else {
schedule();
}
}
}
private class ProjectWatcher extends ProjectManagerAdapter {
private final Map<Project, MessageBusConnection> myConnections = new HashMap<Project, MessageBusConnection>();
@Override
public void projectOpened(final Project project) {
final MessageBusConnection conn = project.getMessageBus().connect();
myConnections.put(project, conn);
conn.subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootAdapter() {
@Override
public void rootsChanged(final ModuleRootEvent event) {
final Object source = event.getSource();
if (source instanceof Project) {
clearState((Project)source);
}
}
});
conn.subscribe(ExecutionManager.EXECUTION_TOPIC, new ExecutionAdapter() {
@Override
public void processTerminated(@NotNull RunProfile runProfile, @NotNull ProcessHandler handler) {
scheduleAutoMake();
}
});
final String projectPath = getProjectPath(project);
Disposer.register(project, new Disposable() {
@Override
public void dispose() {
myProjectDataMap.remove(projectPath);
}
});
StartupManager.getInstance(project).registerPostStartupActivity(new Runnable() {
@Override
public void run() {
scheduleAutoMake(); // run automake after project opened
}
});
}
@Override
public boolean canCloseProject(Project project) {
cancelAutoMakeTasks(project);
return super.canCloseProject(project);
}
@Override
public void projectClosing(Project project) {
for (RequestFuture future : cancelAutoMakeTasks(project)) {
future.waitFor(500, TimeUnit.MILLISECONDS);
}
}
@Override
public void projectClosed(Project project) {
myProjectDataMap.remove(getProjectPath(project));
final MessageBusConnection conn = myConnections.remove(project);
if (conn != null) {
conn.disconnect();
}
}
}
private static class ProjectData {
final SequentialTaskExecutor taskQueue;
private final Set<InternedPath> myChanged = new THashSet<InternedPath>();
private final Set<InternedPath> myDeleted = new THashSet<InternedPath>();
private long myNextEventOrdinal = 0L;
private boolean myNeedRescan = true;
private ProjectData(SequentialTaskExecutor taskQueue) {
this.taskQueue = taskQueue;
}
public void addChanged(Collection<String> paths) {
if (!myNeedRescan) {
for (String path : paths) {
final InternedPath _path = InternedPath.create(path);
myDeleted.remove(_path);
myChanged.add(_path);
}
}
}
public void addDeleted(Collection<String> paths) {
if (!myNeedRescan) {
for (String path : paths) {
final InternedPath _path = InternedPath.create(path);
myChanged.remove(_path);
myDeleted.add(_path);
}
}
}
public CmdlineRemoteProto.Message.ControllerMessage.FSEvent createNextEvent() {
final CmdlineRemoteProto.Message.ControllerMessage.FSEvent.Builder builder =
CmdlineRemoteProto.Message.ControllerMessage.FSEvent.newBuilder();
builder.setOrdinal(++myNextEventOrdinal);
for (InternedPath path : myChanged) {
builder.addChangedPaths(path.getValue());
}
myChanged.clear();
for (InternedPath path : myDeleted) {
builder.addDeletedPaths(path.getValue());
}
myDeleted.clear();
return builder.build();
}
public boolean getAndResetRescanFlag() {
final boolean rescan = myNeedRescan;
myNeedRescan = false;
return rescan;
}
public void dropChanges() {
myNeedRescan = true;
myNextEventOrdinal = 0L;
myChanged.clear();
myDeleted.clear();
}
}
private static abstract class InternedPath {
protected final int[] myPath;
/**
* @param path assuming system-independent path with forward slashes
*/
protected InternedPath(String path) {
final IntArrayList list = new IntArrayList();
final StringTokenizer tokenizer = new StringTokenizer(path, "/", false);
while(tokenizer.hasMoreTokens()) {
final String element = tokenizer.nextToken();
list.add(FileNameCache.storeName(element));
}
myPath = list.toArray();
}
public abstract String getValue();
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
InternedPath path = (InternedPath)o;
if (!Arrays.equals(myPath, path.myPath)) return false;
return true;
}
@Override
public int hashCode() {
return Arrays.hashCode(myPath);
}
public static InternedPath create(String path) {
return path.startsWith("/")? new XInternedPath(path) : new WinInternedPath(path);
}
}
private static class WinInternedPath extends InternedPath {
private WinInternedPath(String path) {
super(path);
}
@Override
public String getValue() {
if (myPath.length == 1) {
final String name = FileNameCache.getVFileName(myPath[0]);
// handle case of windows drive letter
return name.length() == 2 && name.endsWith(":")? name + "/" : name;
}
final StringBuilder buf = new StringBuilder();
for (int element : myPath) {
if (buf.length() > 0) {
buf.append("/");
}
buf.append(FileNameCache.getVFileName(element));
}
return buf.toString();
}
}
private static class XInternedPath extends InternedPath {
private XInternedPath(String path) {
super(path);
}
@Override
public String getValue() {
if (myPath.length > 0) {
final StringBuilder buf = new StringBuilder();
for (int element : myPath) {
buf.append("/").append(FileNameCache.getVFileName(element));
}
return buf.toString();
}
return "/";
}
}
}
| |
/*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.client.widget.escalator;
import com.google.gwt.animation.client.AnimationScheduler;
import com.google.gwt.animation.client.AnimationScheduler.AnimationSupportDetector;
import com.google.gwt.core.client.Scheduler;
import com.google.gwt.core.client.Scheduler.ScheduledCommand;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style.Display;
import com.google.gwt.dom.client.Style.Overflow;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.dom.client.Style.Visibility;
import com.google.gwt.event.shared.EventHandler;
import com.google.gwt.event.shared.GwtEvent;
import com.google.gwt.event.shared.HandlerManager;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.Timer;
import com.vaadin.client.BrowserInfo;
import com.vaadin.client.DeferredWorker;
import com.vaadin.client.WidgetUtil;
import com.vaadin.client.widget.grid.events.ScrollEvent;
import com.vaadin.client.widget.grid.events.ScrollHandler;
/**
* An element-like bundle representing a configurable and visual scrollbar in
* one axis.
*
* @since 7.4
* @author Vaadin Ltd
* @see VerticalScrollbarBundle
* @see HorizontalScrollbarBundle
*/
public abstract class ScrollbarBundle implements DeferredWorker {
private static final boolean SUPPORTS_REQUEST_ANIMATION_FRAME = new AnimationSupportDetector()
.isNativelySupported();
private class ScrollEventFirer {
private final ScheduledCommand fireEventCommand = () -> {
/*
* Some kind of native-scroll-event related asynchronous problem
* occurs here (at least on desktops) where the internal bookkeeping
* isn't up to date with the real scroll position. The weird thing
* is, that happens only once, and if you drag scrollbar fast
* enough. After it has failed once, it never fails again.
*
* Theory: the user drags the scrollbar, and this command is
* executed before the browser has a chance to fire a scroll event
* (which normally would correct this situation). This would explain
* why slow scrolling doesn't trigger the problem, while fast
* scrolling does.
*
* To make absolutely sure that we have the latest scroll position,
* let's update the internal value.
*
* This might lead to a slight performance hit (on my computer it
* was never more than 3ms on either of Chrome 38 or Firefox 31). It
* also _slightly_ counteracts the purpose of the internal
* bookkeeping. But since getScrollPos is called 3 times (on one
* direction) per scroll loop, it's still better to have take this
* small penalty than removing it altogether.
*/
updateScrollPosFromDom();
getHandlerManager().fireEvent(new ScrollEvent());
isBeingFired = false;
};
private boolean isBeingFired;
public void scheduleEvent() {
if (!isBeingFired) {
/*
* We'll gather all the scroll events, and only fire once, once
* everything has calmed down.
*/
if (SUPPORTS_REQUEST_ANIMATION_FRAME) {
// Chrome MUST use this as deferred commands will sometimes
// be run with a 300+ ms delay when scrolling.
AnimationScheduler.get().requestAnimationFrame(
timestamp -> fireEventCommand.execute());
} else {
// Does not support requestAnimationFrame and the fallback
// uses a delay of 16ms, we stick to the old deferred
// command which uses a delay of 0ms
Scheduler.get().scheduleDeferred(fireEventCommand);
}
isBeingFired = true;
}
}
}
/**
* The orientation of the scrollbar.
*/
public enum Direction {
VERTICAL, HORIZONTAL;
}
private class TemporaryResizer {
private static final int TEMPORARY_RESIZE_DELAY = 1000;
private final Timer timer = new Timer() {
@Override
public void run() {
internalSetScrollbarThickness(1);
root.getStyle().setVisibility(Visibility.HIDDEN);
}
};
public void show() {
internalSetScrollbarThickness(OSX_INVISIBLE_SCROLLBAR_FAKE_SIZE_PX);
root.getStyle().setVisibility(Visibility.VISIBLE);
timer.schedule(TEMPORARY_RESIZE_DELAY);
}
}
/**
* A means to listen to when the scrollbar handle in a
* {@link ScrollbarBundle} either appears or is removed.
*/
public interface VisibilityHandler extends EventHandler {
/**
* This method is called whenever the scrollbar handle's visibility is
* changed in a {@link ScrollbarBundle}.
*
* @param event
* the {@link VisibilityChangeEvent}
*/
void visibilityChanged(VisibilityChangeEvent event);
}
public static class VisibilityChangeEvent
extends GwtEvent<VisibilityHandler> {
public static final Type<VisibilityHandler> TYPE = new Type<ScrollbarBundle.VisibilityHandler>() {
@Override
public String toString() {
return "VisibilityChangeEvent";
}
};
private final boolean isScrollerVisible;
private VisibilityChangeEvent(boolean isScrollerVisible) {
this.isScrollerVisible = isScrollerVisible;
}
/**
* Checks whether the scroll handle is currently visible or not.
*
* @return <code>true</code> if the scroll handle is currently visible.
* <code>false</code> if not.
*/
public boolean isScrollerVisible() {
return isScrollerVisible;
}
@Override
public Type<VisibilityHandler> getAssociatedType() {
return TYPE;
}
@Override
protected void dispatch(VisibilityHandler handler) {
handler.visibilityChanged(this);
}
}
/**
* The pixel size for OSX's invisible scrollbars.
* <p>
* Touch devices don't show a scrollbar at all, so the scrollbar size is
* irrelevant in their case. There doesn't seem to be any other popular
* platforms that has scrollbars similar to OSX. Thus, this behavior is
* tailored for OSX only, until additional platforms start behaving this
* way.
*/
private static final int OSX_INVISIBLE_SCROLLBAR_FAKE_SIZE_PX = 13;
/**
* A representation of a single vertical scrollbar.
*
* @see VerticalScrollbarBundle#getElement()
*/
public static final class VerticalScrollbarBundle extends ScrollbarBundle {
@Override
public void setStylePrimaryName(String primaryStyleName) {
super.setStylePrimaryName(primaryStyleName);
root.addClassName(primaryStyleName + "-scroller-vertical");
}
@Override
protected void internalSetScrollPos(int px) {
root.setScrollTop(px);
}
@Override
protected int internalGetScrollPos() {
return root.getScrollTop();
}
@Override
protected void internalSetScrollSize(double px) {
scrollSizeElement.getStyle().setHeight(px, Unit.PX);
}
@Override
protected String internalGetScrollSize() {
return scrollSizeElement.getStyle().getHeight();
}
@Override
protected void internalSetOffsetSize(double px) {
root.getStyle().setHeight(px, Unit.PX);
}
@Override
public String internalGetOffsetSize() {
return root.getStyle().getHeight();
}
@Override
protected void internalSetScrollbarThickness(double px) {
root.getStyle().setPaddingRight(px, Unit.PX);
root.getStyle().setWidth(0, Unit.PX);
scrollSizeElement.getStyle().setWidth(px, Unit.PX);
}
@Override
protected String internalGetScrollbarThickness() {
return scrollSizeElement.getStyle().getWidth();
}
@Override
protected void internalForceScrollbar(boolean enable) {
if (enable) {
root.getStyle().setOverflowY(Overflow.SCROLL);
} else {
root.getStyle().clearOverflowY();
}
}
@Override
public Direction getDirection() {
return Direction.VERTICAL;
}
}
/**
* A representation of a single horizontal scrollbar.
*
* @see HorizontalScrollbarBundle#getElement()
*/
public static final class HorizontalScrollbarBundle
extends ScrollbarBundle {
@Override
public void setStylePrimaryName(String primaryStyleName) {
super.setStylePrimaryName(primaryStyleName);
root.addClassName(primaryStyleName + "-scroller-horizontal");
}
@Override
protected void internalSetScrollPos(int px) {
root.setScrollLeft(px);
}
@Override
protected int internalGetScrollPos() {
return root.getScrollLeft();
}
@Override
protected void internalSetScrollSize(double px) {
scrollSizeElement.getStyle().setWidth(px, Unit.PX);
}
@Override
protected String internalGetScrollSize() {
return scrollSizeElement.getStyle().getWidth();
}
@Override
protected void internalSetOffsetSize(double px) {
root.getStyle().setWidth(px, Unit.PX);
}
@Override
public String internalGetOffsetSize() {
return root.getStyle().getWidth();
}
@Override
protected void internalSetScrollbarThickness(double px) {
root.getStyle().setPaddingBottom(px, Unit.PX);
root.getStyle().setHeight(0, Unit.PX);
scrollSizeElement.getStyle().setHeight(px, Unit.PX);
}
@Override
protected String internalGetScrollbarThickness() {
return scrollSizeElement.getStyle().getHeight();
}
@Override
protected void internalForceScrollbar(boolean enable) {
if (enable) {
root.getStyle().setOverflowX(Overflow.SCROLL);
} else {
root.getStyle().clearOverflowX();
}
}
@Override
public Direction getDirection() {
return Direction.HORIZONTAL;
}
}
protected final Element root = DOM.createDiv();
protected final Element scrollSizeElement = DOM.createDiv();
protected boolean isInvisibleScrollbar = false;
private double scrollPos = 0;
private double maxScrollPos = 0;
private boolean scrollHandleIsVisible = false;
private boolean isLocked = false;
/** @deprecated access via {@link #getHandlerManager()} instead. */
@Deprecated
private HandlerManager handlerManager;
private TemporaryResizer invisibleScrollbarTemporaryResizer = new TemporaryResizer();
private final ScrollEventFirer scrollEventFirer = new ScrollEventFirer();
private HandlerRegistration scrollSizeTemporaryScrollHandler;
private HandlerRegistration offsetSizeTemporaryScrollHandler;
private HandlerRegistration scrollInProgress;
private ScrollbarBundle() {
root.appendChild(scrollSizeElement);
root.getStyle().setDisplay(Display.NONE);
root.setTabIndex(-1);
}
protected abstract String internalGetScrollSize();
/**
* Sets the primary style name.
*
* @param primaryStyleName
* The primary style name to use
*/
public void setStylePrimaryName(String primaryStyleName) {
root.setClassName(primaryStyleName + "-scroller");
}
/**
* Gets the root element of this scrollbar-composition.
*
* @return the root element
*/
public final Element getElement() {
return root;
}
/**
* Modifies the scroll position of this scrollbar by a number of pixels.
* <p>
* <em>Note:</em> Even though {@code double} values are used, they are
* currently only used as integers as large {@code int} (or small but fast
* {@code long}). This means, all values are truncated to zero decimal
* places.
*
* @param delta
* the delta in pixels to change the scroll position by
*/
public final void setScrollPosByDelta(double delta) {
if (delta != 0) {
setScrollPos(getScrollPos() + delta);
}
}
/**
* Modifies {@link #root root's} dimensions in the axis the scrollbar is
* representing.
*
* @param px
* the new size of {@link #root} in the dimension this scrollbar
* is representing
*/
protected abstract void internalSetOffsetSize(double px);
/**
* Sets the length of the scrollbar.
*
* @param px
* the length of the scrollbar in pixels
*/
public final void setOffsetSize(final double px) {
/*
* This needs to be made step-by-step because IE8 flat-out refuses to
* fire a scroll event when the scroll size becomes smaller than the
* offset size. All other browser need to suffer alongside.
*/
boolean newOffsetSizeIsGreaterThanScrollSize = px > getScrollSize();
boolean offsetSizeBecomesGreaterThanScrollSize = showsScrollHandle()
&& newOffsetSizeIsGreaterThanScrollSize;
if (offsetSizeBecomesGreaterThanScrollSize && getScrollPos() != 0) {
if (offsetSizeTemporaryScrollHandler != null) {
offsetSizeTemporaryScrollHandler.removeHandler();
}
// must be a field because Java insists.
offsetSizeTemporaryScrollHandler = addScrollHandler(
event -> setOffsetSizeNow(px));
setScrollPos(0);
} else {
setOffsetSizeNow(px);
}
}
private void setOffsetSizeNow(double px) {
internalSetOffsetSize(Math.max(0, px));
recalculateMaxScrollPos();
forceScrollbar(showsScrollHandle());
fireVisibilityChangeIfNeeded();
if (offsetSizeTemporaryScrollHandler != null) {
offsetSizeTemporaryScrollHandler.removeHandler();
offsetSizeTemporaryScrollHandler = null;
}
}
/**
* Force the scrollbar to be visible with CSS. In practice, this means to
* set either <code>overflow-x</code> or <code>overflow-y</code> to "
* <code>scroll</code>" in the scrollbar's direction.
* <p>
* This method is an IE8 workaround, since it doesn't always show scrollbars
* with <code>overflow: auto</code> enabled.
* <p>
* Firefox on the other hand loses pending scroll events when the scrollbar
* is hidden, so the event must be fired manually.
* <p>
* When IE8 support is dropped, this should really be simplified.
*/
protected void forceScrollbar(boolean enable) {
if (enable) {
root.getStyle().clearDisplay();
} else {
if (BrowserInfo.get().isFirefox()) {
/*
* This is related to the Firefox workaround in setScrollSize
* for setScrollPos(0)
*/
scrollEventFirer.scheduleEvent();
}
root.getStyle().setDisplay(Display.NONE);
}
internalForceScrollbar(enable);
}
protected abstract void internalForceScrollbar(boolean enable);
/**
* Gets the length of the scrollbar.
*
* @return the length of the scrollbar in pixels
*/
public double getOffsetSize() {
return parseCssDimensionToPixels(internalGetOffsetSize());
}
public abstract String internalGetOffsetSize();
/**
* Sets the scroll position of the scrollbar in the axis the scrollbar is
* representing.
* <p>
* <em>Note:</em> Even though {@code double} values are used, they are
* currently only used as integers as large {@code int} (or small but fast
* {@code long}). This means, all values are truncated to zero decimal
* places.
*
* @param px
* the new scroll position in pixels
*/
public final void setScrollPos(double px) {
if (isLocked()) {
return;
}
double oldScrollPos = scrollPos;
scrollPos = Math.max(0, Math.min(maxScrollPos, truncate(px)));
if (!WidgetUtil.pixelValuesEqual(oldScrollPos, scrollPos)) {
if (scrollInProgress == null) {
// Only used for tracking that there is "workPending"
scrollInProgress = addScrollHandler(event -> {
scrollInProgress.removeHandler();
scrollInProgress = null;
});
}
if (isInvisibleScrollbar) {
invisibleScrollbarTemporaryResizer.show();
}
/*
* This is where the value needs to be converted into an integer no
* matter how we flip it, since GWT expects an integer value.
* There's no point making a JSNI method that accepts doubles as the
* scroll position, since the browsers themselves don't support such
* large numbers (as of today, 25.3.2014). This double-ranged is
* only facilitating future virtual scrollbars.
*/
internalSetScrollPos(toInt32(scrollPos));
}
}
/**
* Should be called whenever this bundle is attached to the DOM (typically,
* from the onLoad of the containing widget). Used to ensure the DOM scroll
* position is maintained when detaching and reattaching the bundle.
*
* @since 7.4.1
*/
public void onLoad() {
internalSetScrollPos(toInt32(scrollPos));
}
/**
* Truncates a double such that no decimal places are retained.
* <p>
* E.g. {@code trunc(2.3d) == 2.0d} and {@code trunc(-2.3d) == -2.0d}.
*
* @param num
* the double value to be truncated
* @return the {@code num} value without any decimal digits
*/
private static double truncate(double num) {
if (num > 0) {
return Math.floor(num);
} else {
return Math.ceil(num);
}
}
/**
* Modifies the element's scroll position (scrollTop or scrollLeft).
* <p>
* <em>Note:</em> The parameter here is a type of integer (instead of a
* double) by design. The browsers internally convert all double values into
* an integer value. To make this fact explicit, this API has chosen to
* force integers already at this level.
*
* @param px
* integer pixel value to scroll to
*/
protected abstract void internalSetScrollPos(int px);
/**
* Gets the scroll position of the scrollbar in the axis the scrollbar is
* representing.
*
* @return the new scroll position in pixels
*/
public final double getScrollPos() {
assert internalGetScrollPos() == toInt32(
scrollPos) : "calculated scroll position (" + scrollPos
+ ") did not match the DOM element scroll position ("
+ internalGetScrollPos() + ")";
return scrollPos;
}
/**
* Retrieves the element's scroll position (scrollTop or scrollLeft).
* <p>
* <em>Note:</em> The parameter here is a type of integer (instead of a
* double) by design. The browsers internally convert all double values into
* an integer value. To make this fact explicit, this API has chosen to
* force integers already at this level.
*
* @return integer pixel value of the scroll position
*/
protected abstract int internalGetScrollPos();
/**
* Modifies {@link #scrollSizeElement scrollSizeElement's} dimensions in
* such a way that the scrollbar is able to scroll a certain number of
* pixels in the axis it is representing.
*
* @param px
* the new size of {@link #scrollSizeElement} in the dimension
* this scrollbar is representing
*/
protected abstract void internalSetScrollSize(double px);
/**
* Sets the amount of pixels the scrollbar needs to be able to scroll
* through.
*
* @param px
* the number of pixels the scrollbar should be able to scroll
* through
*/
public final void setScrollSize(final double px) {
/*
* This needs to be made step-by-step because IE8 flat-out refuses to
* fire a scroll event when the scroll size becomes smaller than the
* offset size. All other browser need to suffer alongside.
*
* This really should be changed to not use any temporary scroll
* handlers at all once IE8 support is dropped, like now done only for
* Firefox.
*/
boolean newScrollSizeIsSmallerThanOffsetSize = px <= getOffsetSize();
boolean scrollSizeBecomesSmallerThanOffsetSize = showsScrollHandle()
&& newScrollSizeIsSmallerThanOffsetSize;
if (scrollSizeBecomesSmallerThanOffsetSize && getScrollPos() != 0) {
/*
* For whatever reason, Firefox loses the scroll event in this case
* and the onscroll handler is never called (happens when reducing
* size from 1000 items to 1 while being scrolled a bit down, see
* #19802). Based on the comment above, only IE8 should really use
* 'delayedSizeSet'
*/
boolean delayedSizeSet = !BrowserInfo.get().isFirefox();
if (delayedSizeSet) {
if (scrollSizeTemporaryScrollHandler != null) {
scrollSizeTemporaryScrollHandler.removeHandler();
}
scrollSizeTemporaryScrollHandler = addScrollHandler(
event -> setScrollSizeNow(px));
}
setScrollPos(0);
if (!delayedSizeSet) {
setScrollSizeNow(px);
}
} else {
setScrollSizeNow(px);
}
}
private void setScrollSizeNow(double px) {
internalSetScrollSize(Math.max(0, px));
recalculateMaxScrollPos();
forceScrollbar(showsScrollHandle());
fireVisibilityChangeIfNeeded();
if (scrollSizeTemporaryScrollHandler != null) {
scrollSizeTemporaryScrollHandler.removeHandler();
scrollSizeTemporaryScrollHandler = null;
}
}
/**
* Gets the amount of pixels the scrollbar needs to be able to scroll
* through.
*
* @return the number of pixels the scrollbar should be able to scroll
* through
*/
public double getScrollSize() {
return parseCssDimensionToPixels(internalGetScrollSize());
}
/**
* Modifies {@link #scrollSizeElement scrollSizeElement's} dimensions in the
* opposite axis to what the scrollbar is representing.
*
* @param px
* the dimension that {@link #scrollSizeElement} should take in
* the opposite axis to what the scrollbar is representing
*/
protected abstract void internalSetScrollbarThickness(double px);
/**
* Sets the scrollbar's thickness.
* <p>
* If the thickness is set to 0, the scrollbar will be treated as an
* "invisible" scrollbar. This means, the DOM structure will be given a
* non-zero size, but {@link #getScrollbarThickness()} will still return the
* value 0.
*
* @param px
* the scrollbar's thickness in pixels
*/
public final void setScrollbarThickness(double px) {
isInvisibleScrollbar = (px == 0);
if (isInvisibleScrollbar) {
Event.sinkEvents(root, Event.ONSCROLL);
Event.setEventListener(root,
event -> invisibleScrollbarTemporaryResizer.show());
root.getStyle().setVisibility(Visibility.HIDDEN);
} else {
Event.sinkEvents(root, 0);
Event.setEventListener(root, null);
root.getStyle().clearVisibility();
}
internalSetScrollbarThickness(Math.max(1d, px));
}
/**
* Gets the scrollbar's thickness as defined in the DOM.
*
* @return the scrollbar's thickness as defined in the DOM, in pixels
*/
protected abstract String internalGetScrollbarThickness();
/**
* Gets the scrollbar's thickness.
* <p>
* This value will differ from the value in the DOM, if the thickness was
* set to 0 with {@link #setScrollbarThickness(double)}, as the scrollbar is
* then treated as "invisible."
*
* @return the scrollbar's thickness in pixels
*/
public final double getScrollbarThickness() {
if (!isInvisibleScrollbar) {
return parseCssDimensionToPixels(internalGetScrollbarThickness());
} else {
return 0;
}
}
/**
* Checks whether the scrollbar's handle is visible.
* <p>
* In other words, this method checks whether the contents is larger than
* can visually fit in the element.
*
* @return <code>true</code> if the scrollbar's handle is visible
*/
public boolean showsScrollHandle() {
return getScrollSize() - getOffsetSize() > WidgetUtil.PIXEL_EPSILON;
}
public void recalculateMaxScrollPos() {
double scrollSize = getScrollSize();
double offsetSize = getOffsetSize();
maxScrollPos = Math.max(0, scrollSize - offsetSize);
// make sure that the correct max scroll position is maintained.
setScrollPos(scrollPos);
}
/**
* This is a method that JSNI can call to synchronize the object state from
* the DOM.
*/
private final void updateScrollPosFromDom() {
/*
* TODO: this method probably shouldn't be called from Escalator's JSNI,
* but probably could be handled internally by this listening to its own
* element. Would clean up the code quite a bit. Needs further
* investigation.
*/
int newScrollPos = internalGetScrollPos();
if (!isLocked()) {
scrollPos = newScrollPos;
scrollEventFirer.scheduleEvent();
} else if (scrollPos != newScrollPos) {
// we need to actually undo the setting of the scroll.
internalSetScrollPos(toInt32(scrollPos));
}
}
protected HandlerManager getHandlerManager() {
if (handlerManager == null) {
handlerManager = new HandlerManager(this);
}
return handlerManager;
}
/**
* Adds handler for the scrollbar handle visibility.
*
* @param handler
* the {@link VisibilityHandler} to add
* @return {@link HandlerRegistration} used to remove the handler
*/
public HandlerRegistration addVisibilityHandler(
final VisibilityHandler handler) {
return getHandlerManager().addHandler(VisibilityChangeEvent.TYPE,
handler);
}
private void fireVisibilityChangeIfNeeded() {
final boolean oldHandleIsVisible = scrollHandleIsVisible;
scrollHandleIsVisible = showsScrollHandle();
if (oldHandleIsVisible != scrollHandleIsVisible) {
final VisibilityChangeEvent event = new VisibilityChangeEvent(
scrollHandleIsVisible);
getHandlerManager().fireEvent(event);
}
}
/**
* Converts a double into an integer by JavaScript's terms.
* <p>
* Implementation copied from {@link Element#toInt32(double)}.
*
* @param val
* the double value to convert into an integer
* @return the double value converted to an integer
*/
private static native int toInt32(double val)
/*-{
return Math.round(val) | 0;
}-*/;
/**
* Locks or unlocks the scrollbar bundle.
* <p>
* A locked scrollbar bundle will refuse to scroll, both programmatically
* and via user-triggered events.
*
* @param isLocked
* <code>true</code> to lock, <code>false</code> to unlock
*/
public void setLocked(boolean isLocked) {
this.isLocked = isLocked;
}
/**
* Checks whether the scrollbar bundle is locked or not.
*
* @return <code>true</code> if the scrollbar bundle is locked
*/
public boolean isLocked() {
return isLocked;
}
/**
* Returns the scroll direction of this scrollbar bundle.
*
* @return the scroll direction of this scrollbar bundle
*/
public abstract Direction getDirection();
/**
* Adds a scroll handler to the scrollbar bundle.
*
* @param handler
* the handler to add
* @return the registration object for the handler registration
*/
public HandlerRegistration addScrollHandler(final ScrollHandler handler) {
return getHandlerManager().addHandler(ScrollEvent.TYPE, handler);
}
private static double parseCssDimensionToPixels(String size) {
/*
* Sizes of elements are calculated from CSS rather than
* element.getOffset*() because those values are 0 whenever display:
* none. Because we know that all elements have populated
* CSS-dimensions, it's better to do it that way.
*
* Another solution would be to make the elements visible while
* measuring and then re-hide them, but that would cause unnecessary
* reflows that would probably kill the performance dead.
*/
if (size.isEmpty()) {
return 0;
} else {
assert size.endsWith("px") : "Can't parse CSS dimension \"" + size
+ "\"";
return Double.parseDouble(size.substring(0, size.length() - 2));
}
}
@Override
public boolean isWorkPending() {
// Need to include scrollEventFirer.isBeingFired as it might use
// requestAnimationFrame - which is not automatically checked
return scrollSizeTemporaryScrollHandler != null
|| offsetSizeTemporaryScrollHandler != null
|| scrollInProgress != null || scrollEventFirer.isBeingFired;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.managedgrafana.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A structure containing information about the user authentication methods used by the workspace.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/grafana-2020-08-18/AuthenticationDescription" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AuthenticationDescription implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* A structure containing information about how this workspace works with Amazon Web Services SSO.
* </p>
*/
private AwsSsoAuthentication awsSso;
/**
* <p>
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users to use
* the Grafana console in the Amazon Managed Grafana workspace.
* </p>
*/
private java.util.List<String> providers;
/**
* <p>
* A structure containing information about how this workspace works with SAML, including what attributes within the
* assertion are to be mapped to user information in the workspace.
* </p>
*/
private SamlAuthentication saml;
/**
* <p>
* A structure containing information about how this workspace works with Amazon Web Services SSO.
* </p>
*
* @param awsSso
* A structure containing information about how this workspace works with Amazon Web Services SSO.
*/
public void setAwsSso(AwsSsoAuthentication awsSso) {
this.awsSso = awsSso;
}
/**
* <p>
* A structure containing information about how this workspace works with Amazon Web Services SSO.
* </p>
*
* @return A structure containing information about how this workspace works with Amazon Web Services SSO.
*/
public AwsSsoAuthentication getAwsSso() {
return this.awsSso;
}
/**
* <p>
* A structure containing information about how this workspace works with Amazon Web Services SSO.
* </p>
*
* @param awsSso
* A structure containing information about how this workspace works with Amazon Web Services SSO.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AuthenticationDescription withAwsSso(AwsSsoAuthentication awsSso) {
setAwsSso(awsSso);
return this;
}
/**
* <p>
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users to use
* the Grafana console in the Amazon Managed Grafana workspace.
* </p>
*
* @return Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate
* users to use the Grafana console in the Amazon Managed Grafana workspace.
* @see AuthenticationProviderTypes
*/
public java.util.List<String> getProviders() {
return providers;
}
/**
* <p>
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users to use
* the Grafana console in the Amazon Managed Grafana workspace.
* </p>
*
* @param providers
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users
* to use the Grafana console in the Amazon Managed Grafana workspace.
* @see AuthenticationProviderTypes
*/
public void setProviders(java.util.Collection<String> providers) {
if (providers == null) {
this.providers = null;
return;
}
this.providers = new java.util.ArrayList<String>(providers);
}
/**
* <p>
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users to use
* the Grafana console in the Amazon Managed Grafana workspace.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setProviders(java.util.Collection)} or {@link #withProviders(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param providers
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users
* to use the Grafana console in the Amazon Managed Grafana workspace.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AuthenticationProviderTypes
*/
public AuthenticationDescription withProviders(String... providers) {
if (this.providers == null) {
setProviders(new java.util.ArrayList<String>(providers.length));
}
for (String ele : providers) {
this.providers.add(ele);
}
return this;
}
/**
* <p>
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users to use
* the Grafana console in the Amazon Managed Grafana workspace.
* </p>
*
* @param providers
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users
* to use the Grafana console in the Amazon Managed Grafana workspace.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AuthenticationProviderTypes
*/
public AuthenticationDescription withProviders(java.util.Collection<String> providers) {
setProviders(providers);
return this;
}
/**
* <p>
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users to use
* the Grafana console in the Amazon Managed Grafana workspace.
* </p>
*
* @param providers
* Specifies whether this workspace uses Amazon Web Services SSO, SAML, or both methods to authenticate users
* to use the Grafana console in the Amazon Managed Grafana workspace.
* @return Returns a reference to this object so that method calls can be chained together.
* @see AuthenticationProviderTypes
*/
public AuthenticationDescription withProviders(AuthenticationProviderTypes... providers) {
java.util.ArrayList<String> providersCopy = new java.util.ArrayList<String>(providers.length);
for (AuthenticationProviderTypes value : providers) {
providersCopy.add(value.toString());
}
if (getProviders() == null) {
setProviders(providersCopy);
} else {
getProviders().addAll(providersCopy);
}
return this;
}
/**
* <p>
* A structure containing information about how this workspace works with SAML, including what attributes within the
* assertion are to be mapped to user information in the workspace.
* </p>
*
* @param saml
* A structure containing information about how this workspace works with SAML, including what attributes
* within the assertion are to be mapped to user information in the workspace.
*/
public void setSaml(SamlAuthentication saml) {
this.saml = saml;
}
/**
* <p>
* A structure containing information about how this workspace works with SAML, including what attributes within the
* assertion are to be mapped to user information in the workspace.
* </p>
*
* @return A structure containing information about how this workspace works with SAML, including what attributes
* within the assertion are to be mapped to user information in the workspace.
*/
public SamlAuthentication getSaml() {
return this.saml;
}
/**
* <p>
* A structure containing information about how this workspace works with SAML, including what attributes within the
* assertion are to be mapped to user information in the workspace.
* </p>
*
* @param saml
* A structure containing information about how this workspace works with SAML, including what attributes
* within the assertion are to be mapped to user information in the workspace.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AuthenticationDescription withSaml(SamlAuthentication saml) {
setSaml(saml);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAwsSso() != null)
sb.append("AwsSso: ").append(getAwsSso()).append(",");
if (getProviders() != null)
sb.append("Providers: ").append(getProviders()).append(",");
if (getSaml() != null)
sb.append("Saml: ").append(getSaml());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AuthenticationDescription == false)
return false;
AuthenticationDescription other = (AuthenticationDescription) obj;
if (other.getAwsSso() == null ^ this.getAwsSso() == null)
return false;
if (other.getAwsSso() != null && other.getAwsSso().equals(this.getAwsSso()) == false)
return false;
if (other.getProviders() == null ^ this.getProviders() == null)
return false;
if (other.getProviders() != null && other.getProviders().equals(this.getProviders()) == false)
return false;
if (other.getSaml() == null ^ this.getSaml() == null)
return false;
if (other.getSaml() != null && other.getSaml().equals(this.getSaml()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAwsSso() == null) ? 0 : getAwsSso().hashCode());
hashCode = prime * hashCode + ((getProviders() == null) ? 0 : getProviders().hashCode());
hashCode = prime * hashCode + ((getSaml() == null) ? 0 : getSaml().hashCode());
return hashCode;
}
@Override
public AuthenticationDescription clone() {
try {
return (AuthenticationDescription) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.managedgrafana.model.transform.AuthenticationDescriptionMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
package com.silkimen.cordovahttp;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import javax.net.ssl.SSLException;
import com.silkimen.http.HttpBodyDecoder;
import com.silkimen.http.HttpRequest;
import com.silkimen.http.HttpRequest.HttpRequestException;
import com.silkimen.http.JsonUtils;
import com.silkimen.http.TLSConfiguration;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.util.Base64;
import android.util.Log;
abstract class CordovaHttpBase implements Runnable {
protected static final String TAG = "Cordova-Plugin-HTTP";
protected String method;
protected String url;
protected String serializer = "none";
protected String responseType;
protected Object data;
protected JSONObject headers;
protected int connectTimeout;
protected int readTimeout;
protected boolean followRedirects;
protected TLSConfiguration tlsConfiguration;
protected CordovaObservableCallbackContext callbackContext;
public CordovaHttpBase(String method, String url, String serializer, Object data, JSONObject headers, int connectTimeout,
int readTimeout, boolean followRedirects, String responseType, TLSConfiguration tlsConfiguration,
CordovaObservableCallbackContext callbackContext) {
this.method = method;
this.url = url;
this.serializer = serializer;
this.data = data;
this.headers = headers;
this.connectTimeout = connectTimeout;
this.readTimeout = readTimeout;
this.followRedirects = followRedirects;
this.responseType = responseType;
this.tlsConfiguration = tlsConfiguration;
this.callbackContext = callbackContext;
}
public CordovaHttpBase(String method, String url, JSONObject headers, int connectTimeout, int readTimeout, boolean followRedirects,
String responseType, TLSConfiguration tlsConfiguration, CordovaObservableCallbackContext callbackContext) {
this.method = method;
this.url = url;
this.headers = headers;
this.connectTimeout = connectTimeout;
this.readTimeout = readTimeout;
this.followRedirects = followRedirects;
this.responseType = responseType;
this.tlsConfiguration = tlsConfiguration;
this.callbackContext = callbackContext;
}
@Override
public void run() {
CordovaHttpResponse response = new CordovaHttpResponse();
HttpRequest request = null;
try {
request = this.createRequest();
this.prepareRequest(request);
this.sendBody(request);
this.processResponse(request, response);
request.disconnect();
} catch (HttpRequestException e) {
Throwable cause = e.getCause();
String message = cause.getMessage();
if (cause instanceof SSLException) {
response.setStatus(-2);
response.setErrorMessage("TLS connection could not be established: " + e.getMessage());
Log.w(TAG, "TLS connection could not be established", e);
} else if (cause instanceof UnknownHostException) {
response.setStatus(-3);
response.setErrorMessage("Host could not be resolved: " + e.getMessage());
Log.w(TAG, "Host could not be resolved", e);
} else if (cause instanceof SocketTimeoutException) {
response.setStatus(-4);
response.setErrorMessage("Request timed out: " + e.getMessage());
Log.w(TAG, "Request timed out", e);
} else if (cause instanceof InterruptedIOException && "thread interrupted".equals(message.toLowerCase())) {
this.setAborted(request, response);
} else {
response.setStatus(-1);
response.setErrorMessage("There was an error with the request: " + message);
Log.w(TAG, "Generic request error", e);
}
} catch (InterruptedException ie) {
this.setAborted(request, response);
} catch (Exception e) {
response.setStatus(-1);
response.setErrorMessage(e.getMessage());
Log.e(TAG, "An unexpected error occured", e);
}
try {
if (response.hasFailed()) {
this.callbackContext.error(response.toJSON());
} else {
this.callbackContext.success(response.toJSON());
}
} catch (JSONException e) {
Log.e(TAG, "An unexpected error occured while creating HTTP response object", e);
}
}
protected HttpRequest createRequest() throws JSONException {
return new HttpRequest(this.url, this.method);
}
protected void prepareRequest(HttpRequest request) throws JSONException, IOException {
request.followRedirects(this.followRedirects);
request.connectTimeout(this.connectTimeout);
request.readTimeout(this.readTimeout);
request.acceptCharset("UTF-8");
request.uncompress(true);
if (this.tlsConfiguration.getHostnameVerifier() != null) {
request.setHostnameVerifier(this.tlsConfiguration.getHostnameVerifier());
}
request.setSSLSocketFactory(this.tlsConfiguration.getTLSSocketFactory());
// setup content type before applying headers, so user can override it
this.setContentType(request);
request.headers(JsonUtils.getStringMap(this.headers));
}
protected void setContentType(HttpRequest request) {
if ("json".equals(this.serializer)) {
request.contentType("application/json", "UTF-8");
} else if ("utf8".equals(this.serializer)) {
request.contentType("text/plain", "UTF-8");
} else if ("raw".equals(this.serializer)) {
request.contentType("application/octet-stream");
} else if ("urlencoded".equals(this.serializer)) {
// intentionally left blank, because content type is set in HttpRequest.form()
} else if ("multipart".equals(this.serializer)) {
// intentionally left blank, because content type is set in HttpRequest.part()
}
}
protected void sendBody(HttpRequest request) throws Exception {
if (this.data == null) {
return;
}
if ("json".equals(this.serializer)) {
request.send(this.data.toString());
} else if ("utf8".equals(this.serializer)) {
request.send(((JSONObject) this.data).getString("text"));
} else if ("raw".equals(this.serializer)) {
request.send(Base64.decode((String)this.data, Base64.DEFAULT));
} else if ("urlencoded".equals(this.serializer)) {
request.form(JsonUtils.getObjectMap((JSONObject) this.data));
} else if ("multipart".equals(this.serializer)) {
JSONArray buffers = ((JSONObject) this.data).getJSONArray("buffers");
JSONArray names = ((JSONObject) this.data).getJSONArray("names");
JSONArray fileNames = ((JSONObject) this.data).getJSONArray("fileNames");
JSONArray types = ((JSONObject) this.data).getJSONArray("types");
for (int i = 0; i < buffers.length(); ++i) {
byte[] bytes = Base64.decode(buffers.getString(i), Base64.DEFAULT);
String name = names.getString(i);
if (fileNames.isNull(i)) {
request.part(name, new String(bytes, "UTF-8"));
} else {
request.part(name, fileNames.getString(i), types.getString(i), new ByteArrayInputStream(bytes));
}
}
// prevent sending malformed empty multipart requests (#372)
if (buffers.length() == 0) {
request.contentType("multipart/form-data; boundary=00content0boundary00");
request.send("\r\n--00content0boundary00--\r\n");
}
}
}
protected void processResponse(HttpRequest request, CordovaHttpResponse response) throws Exception {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
request.receive(outputStream);
response.setStatus(request.code());
response.setUrl(request.url().toString());
response.setHeaders(request.headers());
if (request.code() >= 200 && request.code() < 300) {
if ("text".equals(this.responseType) || "json".equals(this.responseType)) {
String decoded = HttpBodyDecoder.decodeBody(outputStream.toByteArray(), request.charset());
response.setBody(decoded);
} else {
response.setData(outputStream.toByteArray());
}
} else {
response.setErrorMessage(HttpBodyDecoder.decodeBody(outputStream.toByteArray(), request.charset()));
}
}
protected void setAborted(HttpRequest request, CordovaHttpResponse response) {
response.setStatus(-8);
response.setErrorMessage("Request was aborted");
if (request != null) {
try {
request.disconnect();
} catch(Exception any){
Log.w(TAG, "Failed to close aborted request", any);
}
}
Log.i(TAG, "Request was aborted");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.csv;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
import org.apache.camel.Exchange;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.spi.DataFormatName;
import org.apache.camel.spi.annotations.Dataformat;
import org.apache.camel.support.service.ServiceSupport;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.QuoteMode;
/**
* CSV Data format.
* <p/>
* By default, columns are autogenerated in the resulting CSV. Subsequent
* messages use the previously created columns with new fields being added at
* the end of the line. Thus, field order is the same from message to message.
* Autogeneration can be disabled. In this case, only the fields defined in
* csvConfig are written on the output.
*/
@Dataformat("csv")
public class CsvDataFormat extends ServiceSupport implements DataFormat, DataFormatName {
// CSV format options
private CSVFormat format = CSVFormat.DEFAULT;
private boolean commentMarkerDisabled;
private Character commentMarker;
private Character delimiter;
private boolean escapeDisabled;
private Character escape;
private boolean headerDisabled;
private String[] header;
private Boolean allowMissingColumnNames;
private Boolean ignoreEmptyLines;
private Boolean ignoreSurroundingSpaces;
private boolean nullStringDisabled;
private String nullString;
private boolean quoteDisabled;
private Character quote;
private QuoteMode quoteMode;
private boolean recordSeparatorDisabled;
private String recordSeparator;
private Boolean skipHeaderRecord;
private Boolean trim;
private Boolean ignoreHeaderCase;
private Boolean trailingDelimiter;
// Unmarshal options
private boolean lazyLoad;
private boolean useMaps;
private boolean useOrderedMaps;
private CsvRecordConverter<?> recordConverter;
private CsvMarshallerFactory marshallerFactory = CsvMarshallerFactory.DEFAULT;
private volatile CsvMarshaller marshaller;
private volatile CsvUnmarshaller unmarshaller;
public CsvDataFormat() {
}
public CsvDataFormat(CSVFormat format) {
setFormat(format);
}
@Override
public String getDataFormatName() {
return "csv";
}
public void marshal(Exchange exchange, Object object, OutputStream outputStream) throws Exception {
marshaller.marshal(exchange, object, outputStream);
}
public Object unmarshal(Exchange exchange, InputStream inputStream) throws Exception {
return unmarshaller.unmarshal(exchange, inputStream);
}
@Override
protected void doStart() throws Exception {
marshaller = marshallerFactory.create(getActiveFormat(), this);
unmarshaller = CsvUnmarshaller.create(getActiveFormat(), this);
}
@Override
protected void doStop() throws Exception {
// noop
}
CSVFormat getActiveFormat() {
CSVFormat answer = format;
if (commentMarkerDisabled) {
answer = answer.withCommentMarker(null); // null disables the comment marker
} else if (commentMarker != null) {
answer = answer.withCommentMarker(commentMarker);
}
if (delimiter != null) {
answer = answer.withDelimiter(delimiter);
}
if (escapeDisabled) {
answer = answer.withEscape(null); // null disables the escape
} else if (escape != null) {
answer = answer.withEscape(escape);
}
if (headerDisabled) {
answer = answer.withHeader((String[]) null); // null disables the header
} else if (header != null) {
answer = answer.withHeader(header);
}
if (allowMissingColumnNames != null) {
answer = answer.withAllowMissingColumnNames(allowMissingColumnNames);
}
if (ignoreEmptyLines != null) {
answer = answer.withIgnoreEmptyLines(ignoreEmptyLines);
}
if (ignoreSurroundingSpaces != null) {
answer = answer.withIgnoreSurroundingSpaces(ignoreSurroundingSpaces);
}
if (nullStringDisabled) {
answer = answer.withNullString(null); // null disables the null string replacement
} else if (nullString != null) {
answer = answer.withNullString(nullString);
}
if (quoteDisabled) {
answer = answer.withQuote(null); // null disables quotes
} else if (quote != null) {
answer = answer.withQuote(quote);
}
if (quoteMode != null) {
answer = answer.withQuoteMode(quoteMode);
}
if (recordSeparatorDisabled) {
answer = answer.withRecordSeparator(null); // null disables the record separator
} else if (recordSeparator != null) {
answer = answer.withRecordSeparator(recordSeparator);
}
if (skipHeaderRecord != null) {
answer = answer.withSkipHeaderRecord(skipHeaderRecord);
}
if (trim != null) {
answer = answer.withTrim(trim);
}
if (ignoreHeaderCase != null) {
answer = answer.withIgnoreHeaderCase(ignoreHeaderCase);
}
if (trailingDelimiter != null) {
answer = answer.withTrailingDelimiter(trailingDelimiter);
}
return answer;
}
//region Getters/Setters
/**
* Gets the CSV format before applying any changes.
* It cannot be {@code null}, the default one is {@link org.apache.commons.csv.CSVFormat#DEFAULT}.
*
* @return CSV format
*/
public CSVFormat getFormat() {
return format;
}
/**
* Sets the CSV format before applying any changes.
* If {@code null}, then {@link org.apache.commons.csv.CSVFormat#DEFAULT} is used instead.
*
* @param format CSV format
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat
* @see org.apache.commons.csv.CSVFormat#DEFAULT
*/
public CsvDataFormat setFormat(CSVFormat format) {
this.format = (format == null) ? CSVFormat.DEFAULT : format;
return this;
}
/**
* Sets the {@link CsvMarshaller} factory.
* If {@code null}, then {@link CsvMarshallerFactory#DEFAULT} is used instead.
*
* @param marshallerFactory
* @return Current {@code CsvDataFormat}, fluent API
*/
public CsvDataFormat setMarshallerFactory(CsvMarshallerFactory marshallerFactory) {
this.marshallerFactory = (marshallerFactory == null) ? CsvMarshallerFactory.DEFAULT : marshallerFactory;
return this;
}
/**
* Returns the used {@link CsvMarshallerFactory}.
*
* @return never {@code null}.
*/
public CsvMarshallerFactory getMarshallerFactory() {
return marshallerFactory;
}
/**
* Sets the CSV format by name before applying any changes.
*
* @param name CSV format name
* @return Current {@code CsvDataFormat}, fluent API
* @see #setFormat(org.apache.commons.csv.CSVFormat)
* @see org.apache.commons.csv.CSVFormat
*/
public CsvDataFormat setFormatName(String name) {
if (name == null) {
setFormat(null);
} else if ("DEFAULT".equals(name)) {
setFormat(CSVFormat.DEFAULT);
} else if ("RFC4180".equals(name)) {
setFormat(CSVFormat.RFC4180);
} else if ("EXCEL".equals(name)) {
setFormat(CSVFormat.EXCEL);
} else if ("TDF".equals(name)) {
setFormat(CSVFormat.TDF);
} else if ("MYSQL".equals(name)) {
setFormat(CSVFormat.MYSQL);
} else {
throw new IllegalArgumentException("Unsupported format");
}
return this;
}
/**
* Indicates whether or not the comment markers are disabled.
*
* @return {@code true} if the comment markers are disabled, {@code false} otherwise
*/
public boolean isCommentMarkerDisabled() {
return commentMarkerDisabled;
}
/**
* Sets whether or not the comment markers are disabled.
*
* @param commentMarkerDisabled {@code true} if the comment markers are disabled, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withCommentMarker(java.lang.Character)
*/
public CsvDataFormat setCommentMarkerDisabled(boolean commentMarkerDisabled) {
this.commentMarkerDisabled = commentMarkerDisabled;
return this;
}
/**
* Gets the comment marker.
* If {@code null} then the default one of the format used.
*
* @return Comment marker
*/
public Character getCommentMarker() {
return commentMarker;
}
/**
* Sets the comment marker to use.
* If {@code null} then the default one of the format used.
*
* @param commentMarker Comment marker
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withCommentMarker(Character)
*/
public CsvDataFormat setCommentMarker(Character commentMarker) {
this.commentMarker = commentMarker;
return this;
}
/**
* Gets the delimiter.
* If {@code null} then the default one of the format used.
*
* @return Delimiter
*/
public Character getDelimiter() {
return delimiter;
}
/**
* Sets the delimiter.
* If {@code null} then the default one of the format used.
*
* @param delimiter Delimiter
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withDelimiter(char)
*/
public CsvDataFormat setDelimiter(Character delimiter) {
this.delimiter = delimiter;
return this;
}
/**
* Indicates whether or not the escaping is disabled.
*
* @return {@code true} if the escaping is disabled, {@code false} otherwise
*/
public boolean isEscapeDisabled() {
return escapeDisabled;
}
/**
* Sets whether or not the escaping is disabled.
*
* @param escapeDisabled {@code true} if the escaping is disabled, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withEscape(Character)
*/
public CsvDataFormat setEscapeDisabled(boolean escapeDisabled) {
this.escapeDisabled = escapeDisabled;
return this;
}
/**
* Gets the escape character.
* If {@code null} then the default one of the format used.
*
* @return Escape character
*/
public Character getEscape() {
return escape;
}
/**
* Sets the escape character.
* If {@code null} then the default one of the format used.
*
* @param escape Escape character
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withEscape(Character)
*/
public CsvDataFormat setEscape(Character escape) {
this.escape = escape;
return this;
}
/**
* Indicates whether or not the headers are disabled.
*
* @return {@code true} if the headers are disabled, {@code false} otherwise
*/
public boolean isHeaderDisabled() {
return headerDisabled;
}
/**
* Sets whether or not the headers are disabled.
*
* @param headerDisabled {@code true} if the headers are disabled, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withHeader(String...)
*/
public CsvDataFormat setHeaderDisabled(boolean headerDisabled) {
this.headerDisabled = headerDisabled;
return this;
}
/**
* Gets the header.
* If {@code null} then the default one of the format used. If empty then it will be automatically handled.
*
* @return Header
*/
public String[] getHeader() {
return header;
}
/**
* Gets the header.
* If {@code null} then the default one of the format used. If empty then it will be automatically handled.
*
* @param header Header
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withHeader(String...)
*/
public CsvDataFormat setHeader(String[] header) {
this.header = Arrays.copyOf(header, header.length);
return this;
}
/**
* Indicates whether or not missing column names are allowed.
* If {@code null} then the default value of the format used.
*
* @return Whether or not missing column names are allowed
*/
public Boolean getAllowMissingColumnNames() {
return allowMissingColumnNames;
}
/**
* Sets whether or not missing column names are allowed.
* If {@code null} then the default value of the format used.
*
* @param allowMissingColumnNames Whether or not missing column names are allowed
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withAllowMissingColumnNames(boolean)
*/
public CsvDataFormat setAllowMissingColumnNames(Boolean allowMissingColumnNames) {
this.allowMissingColumnNames = allowMissingColumnNames;
return this;
}
/**
* Indicates whether or not empty lines must be ignored.
* If {@code null} then the default value of the format used.
*
* @return Whether or not empty lines must be ignored
*/
public Boolean getIgnoreEmptyLines() {
return ignoreEmptyLines;
}
/**
* Sets whether or not empty lines must be ignored.
* If {@code null} then the default value of the format used.
*
* @param ignoreEmptyLines Whether or not empty lines must be ignored
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withIgnoreEmptyLines(boolean)
*/
public CsvDataFormat setIgnoreEmptyLines(Boolean ignoreEmptyLines) {
this.ignoreEmptyLines = ignoreEmptyLines;
return this;
}
/**
* Indicates whether or not surrounding spaces must be ignored.
* If {@code null} then the default value of the format used.
*
* @return Whether or not surrounding spaces must be ignored
*/
public Boolean getIgnoreSurroundingSpaces() {
return ignoreSurroundingSpaces;
}
/**
* Sets whether or not surrounding spaces must be ignored.
* If {@code null} then the default value of the format used.
*
* @param ignoreSurroundingSpaces Whether or not surrounding spaces must be ignored
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withIgnoreSurroundingSpaces(boolean)
*/
public CsvDataFormat setIgnoreSurroundingSpaces(Boolean ignoreSurroundingSpaces) {
this.ignoreSurroundingSpaces = ignoreSurroundingSpaces;
return this;
}
/**
* Indicates whether or not the null string replacement is disabled.
*
* @return {@code true} if the null string replacement is disabled, {@code false} otherwise
*/
public boolean isNullStringDisabled() {
return nullStringDisabled;
}
/**
* Sets whether or not the null string replacement is disabled.
*
* @param nullStringDisabled {@code true} if the null string replacement is disabled, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withNullString(String)
*/
public CsvDataFormat setNullStringDisabled(boolean nullStringDisabled) {
this.nullStringDisabled = nullStringDisabled;
return this;
}
/**
* Gets the null string replacement.
* If {@code null} then the default one of the format used.
*
* @return Null string replacement
*/
public String getNullString() {
return nullString;
}
/**
* Sets the null string replacement.
* If {@code null} then the default one of the format used.
*
* @param nullString Null string replacement
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withNullString(String)
*/
public CsvDataFormat setNullString(String nullString) {
this.nullString = nullString;
return this;
}
/**
* Indicates whether or not quotes are disabled.
*
* @return {@code true} if quotes are disabled, {@code false} otherwise
*/
public boolean isQuoteDisabled() {
return quoteDisabled;
}
/**
* Sets whether or not quotes are disabled
*
* @param quoteDisabled {@code true} if quotes are disabled, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withQuote(Character)
*/
public CsvDataFormat setQuoteDisabled(boolean quoteDisabled) {
this.quoteDisabled = quoteDisabled;
return this;
}
/**
* Gets the quote character.
* If {@code null} then the default one of the format used.
*
* @return Quote character
*/
public Character getQuote() {
return quote;
}
/**
* Sets the quote character.
* If {@code null} then the default one of the format used.
*
* @param quote Quote character
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withQuote(Character)
*/
public CsvDataFormat setQuote(Character quote) {
this.quote = quote;
return this;
}
/**
* Gets the quote mode.
* If {@code null} then the default one of the format used.
*
* @return Quote mode
*/
public QuoteMode getQuoteMode() {
return quoteMode;
}
/**
* Sets the quote mode.
* If {@code null} then the default one of the format used.
*
* @param quoteMode Quote mode
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withQuoteMode(org.apache.commons.csv.QuoteMode)
*/
public CsvDataFormat setQuoteMode(QuoteMode quoteMode) {
this.quoteMode = quoteMode;
return this;
}
/**
* Indicates whether or not the record separator is disabled.
*
* @return {@code true} if the record separator disabled, {@code false} otherwise
*/
public boolean isRecordSeparatorDisabled() {
return recordSeparatorDisabled;
}
/**
* Sets whether or not the record separator is disabled.
*
* @param recordSeparatorDisabled {@code true} if the record separator disabled, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withRecordSeparator(String)
*/
public CsvDataFormat setRecordSeparatorDisabled(boolean recordSeparatorDisabled) {
this.recordSeparatorDisabled = recordSeparatorDisabled;
return this;
}
/**
* Gets the record separator.
* If {@code null} then the default one of the format used.
*
* @return Record separator
*/
public String getRecordSeparator() {
return recordSeparator;
}
/**
* Sets the record separator.
* If {@code null} then the default one of the format used.
*
* @param recordSeparator Record separator
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withRecordSeparator(String)
*/
public CsvDataFormat setRecordSeparator(String recordSeparator) {
this.recordSeparator = recordSeparator;
return this;
}
/**
* Indicates whether or not header record must be skipped.
* If {@code null} then the default value of the format used.
*
* @return Whether or not header record must be skipped
*/
public Boolean getSkipHeaderRecord() {
return skipHeaderRecord;
}
/**
* Sets whether or not header record must be skipped.
* If {@code null} then the default value of the format used.
*
* @param skipHeaderRecord Whether or not header record must be skipped
* @return Current {@code CsvDataFormat}, fluent API
* @see org.apache.commons.csv.CSVFormat#withSkipHeaderRecord(boolean)
*/
public CsvDataFormat setSkipHeaderRecord(Boolean skipHeaderRecord) {
this.skipHeaderRecord = skipHeaderRecord;
return this;
}
/**
* Indicates whether or not the unmarshalling should lazily load the records.
*
* @return {@code true} for lazy loading, {@code false} otherwise
*/
public boolean isLazyLoad() {
return lazyLoad;
}
/**
* Indicates whether or not the unmarshalling should lazily load the records.
*
* @param lazyLoad {@code true} for lazy loading, {@code false} otherwise
* @return Current {@code CsvDataFormat}, fluent API
*/
public CsvDataFormat setLazyLoad(boolean lazyLoad) {
this.lazyLoad = lazyLoad;
return this;
}
/**
* Indicates whether or not the unmarshalling should produce maps instead of lists.
*
* @return {@code true} for maps, {@code false} for lists
*/
public boolean isUseMaps() {
return useMaps;
}
/**
* Sets whether or not the unmarshalling should produce maps instead of lists.
*
* @param useMaps {@code true} for maps, {@code false} for lists
* @return Current {@code CsvDataFormat}, fluent API
*/
public CsvDataFormat setUseMaps(boolean useMaps) {
this.useMaps = useMaps;
return this;
}
/**
* Indicates whether or not the unmarshalling should produce ordered maps instead of lists.
*
* @return {@code true} for maps, {@code false} for lists
*/
public boolean isUseOrderedMaps() {
return useOrderedMaps;
}
/**
* Sets whether or not the unmarshalling should produce ordered maps instead of lists.
*
* @param useOrderedMaps {@code true} for maps, {@code false} for lists
* @return Current {@code CsvDataFormat}, fluent API
*/
public CsvDataFormat setUseOrderedMaps(boolean useOrderedMaps) {
this.useOrderedMaps = useOrderedMaps;
return this;
}
/**
* Gets the record converter to use. If {@code null} then it will use {@link CsvDataFormat#isUseMaps()} for finding
* the proper converter.
*
* @return Record converter to use
*/
public CsvRecordConverter<?> getRecordConverter() {
return recordConverter;
}
/**
* Sets the record converter to use. If {@code null} then it will use {@link CsvDataFormat#isUseMaps()} for finding
* the proper converter.
*
* @param recordConverter Record converter to use
* @return Current {@code CsvDataFormat}, fluent API
*/
public CsvDataFormat setRecordConverter(CsvRecordConverter<?> recordConverter) {
this.recordConverter = recordConverter;
return this;
}
//endregion
/**
* Sets whether or not to trim leading and trailing blanks.
* <p>
* If {@code null} then the default value of the format used.
* </p>
*
* @param trim whether or not to trim leading and trailing blanks.
* <code>null</code> value allowed.
* @return Current {@code CsvDataFormat}, fluent API.
*/
public CsvDataFormat setTrim(Boolean trim) {
this.trim = trim;
return this;
}
/**
* Indicates whether or not to trim leading and trailing blanks.
*
* @return {@link Boolean#TRUE} if leading and trailing blanks should be
* trimmed. {@link Boolean#FALSE} otherwise. Could return
* <code>null</code> if value has NOT been set.
*/
public Boolean getTrim() {
return trim;
}
/**
* Sets whether or not to ignore case when accessing header names.
* <p>
* If {@code null} then the default value of the format used.
* </p>
*
* @param ignoreHeaderCase whether or not to ignore case when accessing header names.
* <code>null</code> value allowed.
* @return Current {@code CsvDataFormat}, fluent API.
*/
public CsvDataFormat setIgnoreHeaderCase(Boolean ignoreHeaderCase) {
this.ignoreHeaderCase = ignoreHeaderCase;
return this;
}
/**
* Indicates whether or not to ignore case when accessing header names.
*
* @return {@link Boolean#TRUE} if case should be ignored when accessing
* header name. {@link Boolean#FALSE} otherwise. Could return
* <code>null</code> if value has NOT been set.
*/
public Boolean getIgnoreHeaderCase() {
return ignoreHeaderCase;
}
/**
* Sets whether or not to add a trailing delimiter.
* <p>
* If {@code null} then the default value of the format used.
* </p>
*
* @param trailingDelimiter whether or not to add a trailing delimiter.
* @return Current {@code CsvDataFormat}, fluent API.
*/
public CsvDataFormat setTrailingDelimiter(Boolean trailingDelimiter) {
this.trailingDelimiter = trailingDelimiter;
return this;
}
/**
* Indicates whether or not to add a trailing delimiter.
*
* @return {@link Boolean#TRUE} if a trailing delimiter should be added.
* {@link Boolean#FALSE} otherwise. Could return <code>null</code>
* if value has NOT been set.
*/
public Boolean getTrailingDelimiter() {
return trailingDelimiter;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.nextreports.designer.wizrep;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.List;
import javax.swing.*;
import javax.swing.event.ListSelectionListener;
import javax.swing.event.ListSelectionEvent;
import org.jdesktop.swingx.JXList;
import ro.nextreports.designer.Globals;
import ro.nextreports.designer.template.report.TemplateFileFilter;
import ro.nextreports.designer.template.report.TemplatePreviewPanel;
import ro.nextreports.designer.template.report.action.ApplyTemplateAction;
import ro.nextreports.designer.template.report.action.CreateTemplateAction;
import ro.nextreports.designer.ui.wizard.WizardPanel;
import ro.nextreports.designer.util.I18NSupport;
import ro.nextreports.designer.util.ImageUtil;
/**
* Created by IntelliJ IDEA.
* User: mihai.panaitescu
* Date: Oct 10, 2008
* Time: 1:28:01 PM
*/
public class SelectTemplateWizardPanel extends WizardPanel {
private JTextField templateText;
private JButton templateButton;
private JButton createTemplateButton;
private Dimension dim = new Dimension(200, 20);
private Dimension buttonDim = new Dimension(20, 20);
private Dimension scrDim = new Dimension(200, 150);
private File selectedTemplate;
private CreateTemplateAction templateAction = new CreateTemplateAction();
private JXList defTemplateList = new JXList();
private DefaultListModel defTemplateModel = new DefaultListModel();
private TemplatePreviewPanel previewPanel = new TemplatePreviewPanel();
public SelectTemplateWizardPanel() {
super();
banner.setTitle(I18NSupport.getString("wizard.panel.step",5,5) + I18NSupport.getString("wizard.panel.seltemplate.title"));
banner.setSubtitle(I18NSupport.getString("wizard.panel.seltemplate.subtitle"));
init();
}
/**
* Called when the panel is set.
*/
public void onDisplay() {
if (defTemplateModel.size() > 0) {
defTemplateList.requestFocus();
}
JDialog mainDialog = (JDialog)context.getAttribute(WizardConstants.MAIN_FRAME);
ApplyTemplateAction action = new ApplyTemplateAction(mainDialog, false, true) {
protected void selection() {
selectedTemplate = getSelectedFile();
templateText.setText(selectedTemplate.getName());
}
};
templateButton.setAction(action);
}
/**
* Is there be a next panel?
*
* @return true if there is a panel to move to next
*/
public boolean hasNext() {
return false;
}
/**
* Called to validate the panel before moving to next panel.
*
* @param messages a List of messages to be displayed.
* @return true if the panel is valid,
*/
public boolean validateNext(List<String> messages) {
return true;
}
/**
* Get the next panel to go to.
*/
public WizardPanel getNextPanel() {
return null;
}
/**
* Can this panel finish the wizard?
*
* @return true if this panel can finish the wizard.
*/
public boolean canFinish() {
return true;
}
/**
* Called to validate the panel before finishing the wizard. Should return
* false if canFinish returns false.
*
* @param messages a List of messages to be displayed.
* @return true if it is valid for this wizard to finish.
*/
public boolean validateFinish(List<String> messages) {
WizardUtil.openReport(context, selectedTemplate);
return true;
}
/**
* Handle finishing the wizard.
*/
public void onFinish() {
}
private void init() {
setLayout(new BorderLayout());
templateText = new JTextField();
JScrollPane scr = new JScrollPane();
scr.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
scr.setMinimumSize(scrDim);
scr.setPreferredSize(scrDim);
scr.getViewport().add(defTemplateList, null);
defTemplateList.setModel(defTemplateModel);
defTemplateList.addListSelectionListener(new ListSelectionListener() {
public void valueChanged(ListSelectionEvent e) {
if (defTemplateList.getSelectedIndex() > 0) {
previewPanel.setFileTemplate(getSelectedTemplateFile());
} else {
previewPanel.setFileTemplate(null);
}
chooseTemplate();
}
});
populateDefTemplates();
JPanel qPanel = new JPanel(new GridBagLayout());
JLabel defTemplateLabel =new JLabel(I18NSupport.getString("wizard.panel.seltemplate.default.label"));
JLabel templateLabel =new JLabel(I18NSupport.getString("wizard.panel.seltemplate.label"));
templateText.setPreferredSize(dim);
templateText.setEditable(false);
templateButton = new JButton();
templateButton.setPreferredSize(buttonDim);
templateButton.setMaximumSize(buttonDim);
templateButton.setMinimumSize(buttonDim);
createTemplateButton = new JButton();
createTemplateButton.setIcon(ImageUtil.getImageIcon("template_create"));
createTemplateButton.setToolTipText(I18NSupport.getString("create.template"));
createTemplateButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
templateAction.actionPerformed(e);
selectedTemplate = templateAction.getSavedFile();
if (selectedTemplate != null) {
populateDefTemplates();
templateText.setText(selectedTemplate.getName());
}
}
});
createTemplateButton.setPreferredSize(buttonDim);
createTemplateButton.setMaximumSize(buttonDim);
createTemplateButton.setMinimumSize(buttonDim);
JPanel lowPanel = new JPanel(new GridBagLayout());
lowPanel.add(templateLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(5, 0, 5, 0), 0, 0));
lowPanel.add(templateText, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(5, 5, 5, 0), 0, 0));
lowPanel.add(templateButton, new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(5, 5, 5, 0), 0, 0));
lowPanel.add(createTemplateButton, new GridBagConstraints(3, 0, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(5, 5, 5, 5), 0, 0));
lowPanel.add(new JLabel(""), new GridBagConstraints(4, 0, 1, 2, 1.0, 1.0,
GridBagConstraints.WEST, GridBagConstraints.BOTH,
new Insets(5, 5, 5, 5), 0, 0));
qPanel.add(defTemplateLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0,
GridBagConstraints.WEST, GridBagConstraints.NONE,
new Insets(5, 5, 5, 5), 0, 0));
qPanel.add(scr, new GridBagConstraints(0, 1, 1, 1, 0.0, 1.0,
GridBagConstraints.WEST, GridBagConstraints.VERTICAL,
new Insets(5, 5, 5, 0), 0, 0));
qPanel.add(previewPanel, new GridBagConstraints(1, 1, 1, 1, 0.0, 1.0,
GridBagConstraints.WEST, GridBagConstraints.VERTICAL,
new Insets(5, 5, 5, 0), 0, 0));
qPanel.add(lowPanel, new GridBagConstraints(0, 2, 2, 1, 1.0, 1.0,
GridBagConstraints.WEST, GridBagConstraints.BOTH,
new Insets(5, 5, 5, 5), 0, 0));
add(qPanel, BorderLayout.CENTER);
}
private void chooseTemplate() {
if (defTemplateList.getSelectedIndex() > 0) {
selectedTemplate = getSelectedTemplateFile();
templateText.setText(selectedTemplate.getName());
} else {
templateText.setText("");
selectedTemplate = null;
}
}
private void populateDefTemplates() {
defTemplateModel.clear();
defTemplateModel.addElement(I18NSupport.getString("wizard.panel.seltemplate.none"));
File[] files = new File(Globals.USER_DATA_DIR + "/templates").listFiles();
for (File f : files) {
if (f.getName().endsWith(TemplateFileFilter.TEMPLATE_FILE_EXT)) {
defTemplateModel.addElement(f.getName().substring(0, f.getName().indexOf(TemplateFileFilter.TEMPLATE_FILE_EXT)));
}
}
if (defTemplateModel.size() > 0) {
defTemplateList.setSelectedIndex(0);
}
}
private File getSelectedTemplateFile() {
String template = (String)defTemplateList.getSelectedValue() + TemplateFileFilter.TEMPLATE_FILE_EXT;
return new File(Globals.USER_DATA_DIR + "/templates" + File.separator + template);
}
}
| |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: Dec 20, 2012
*
*******************************************************************************/
package org.oscm.billingservice.business.calculation.revenue.setup;
import java.math.BigDecimal;
import org.oscm.billingservice.setup.BillingIntegrationTestBase;
import org.oscm.billingservice.setup.CustomerData;
import org.oscm.billingservice.setup.IntegrationTestSetup;
import org.oscm.billingservice.setup.TestData;
import org.oscm.billingservice.setup.TestOrganizationSetup;
import org.oscm.billingservice.setup.VOPriceModelFactory;
import org.oscm.billingservice.setup.VOPriceModelFactory.TestPriceModel;
import org.oscm.billingservice.setup.VOServiceFactory;
import org.oscm.billingservice.setup.VOServiceFactory.TestService;
import org.oscm.billingservice.setup.VendorData;
import org.oscm.test.DateTimeHandling;
import org.oscm.test.ReflectiveClone;
import org.oscm.internal.types.enumtypes.UserRoleType;
import org.oscm.internal.vo.VOOrganization;
import org.oscm.internal.vo.VOPriceModel;
import org.oscm.internal.vo.VORoleDefinition;
import org.oscm.internal.vo.VOServiceDetails;
import org.oscm.internal.vo.VOSubscriptionDetails;
import org.oscm.internal.vo.VOUser;
import org.oscm.internal.vo.VOUserDetails;
/**
* @author baumann
*/
public class BugSetup extends IntegrationTestSetup {
public void createBug10339() throws Exception {
long supplierKey = basicSetup.getSupplierAdminKey();
long customerAdminKey = basicSetup.getCustomerAdminKey();
VOUser customerAdmin = basicSetup.getCustomerAdmin();
VOUserDetails user = basicSetup.getCustomerUser1();
BillingIntegrationTestBase
.setDateFactoryInstance("2013-05-01 12:00:00");
setCutOffDay(supplierKey, 1);
VOServiceDetails service = serviceSetup
.createPublishAndActivateMarketableService(supplierKey,
"Bug10339", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES_PARS,
technicalService, supplierMarketplace);
login("2013-06-01 08:00:00", customerAdminKey, ROLE_ORGANIZATION_ADMIN);
VORoleDefinition role = VOServiceFactory.getRole(service, "USER");
VOSubscriptionDetails subscription = subscrSetup.subscribeToService(
"Bug10339", service, customerAdmin, role);
subscription = subscrSetup.addUser("2013-06-10 20:00:00",
subscription.getSubscriptionId(), user,
VOServiceFactory.getRole(service, "GUEST"));
subscription = subscrSetup.modifyParameter("2013-06-15 12:00:00",
subscription, "HAS_OPTIONS", "2");
BillingIntegrationTestBase.addToCache(subscription);
resetCutOffDay(supplierKey);
}
public void createBug10267_free() throws Exception {
// create subscription
BillingIntegrationTestBase
.setDateFactoryInstance("2013-03-07 07:00:00");
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "Bug10267_free",
TestService.EXAMPLE, TestPriceModel.FREE,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"Bug10267_free", serviceDetails, basicSetup.getCustomerUser1(),
role);
BillingIntegrationTestBase.addToCache(subDetails);
resetCutOffDay(basicSetup.getSupplierAdminKey());
}
public void createBug10267() throws Exception {
// create subscription
BillingIntegrationTestBase
.setDateFactoryInstance("2013-03-07 07:00:00");
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "createBug10267_1",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup
.subscribeToService("Bug10267", serviceDetails,
basicSetup.getCustomerUser1(), role);
BillingIntegrationTestBase.addToCache(subDetails);
// upgrade to free of charge
VOServiceDetails freeService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "createBug10267_2",
TestService.EXAMPLE, TestPriceModel.FREE,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, freeService);
BillingIntegrationTestBase
.setDateFactoryInstance("2013-03-10 07:00:00");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, freeService);
BillingIntegrationTestBase.addToCache(upgradedSubDetails);
resetCutOffDay(basicSetup.getSupplierAdminKey());
}
public void createMonthScenarioBug10091() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-03-07 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_PERUNIT_MONTH", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10091_PERUNIT_MONTH", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails freeService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_MONTH_FREE_SERVICE", TestService.EXAMPLE,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, freeService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-03-09 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, freeService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-03-11 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup
.unsubscribeToService(upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_PERUNIT_MONTH", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_PERUNIT_MONTH", upgradedSubDetails);
}
public void createWeekScenarioBug10091() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_PERUNIT_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10091_PERUNIT_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails freeService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_WEEK_FREE_SERVICE", TestService.EXAMPLE,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, freeService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-04-09 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, freeService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-04-15 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_PERUNIT_WEEK", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_PERUNIT_WEEK", upgradedSubDetails);
}
public void createWeekScenarioBug10091_Freep() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_PU_WEEK_FREEP", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS_FREEP_2,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10091_PU_WEEK_FREEP", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails freeService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_WEEK_FREE_SERVIC", TestService.EXAMPLE,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, freeService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-04-09 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, freeService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-04-15 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_PU_WEEK_FREEP", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_PU_WEEK_FREEP", upgradedSubDetails);
}
public void createWeekScenarioBug10269_Freep() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10269_PERUNIT_WEEK_FREEP", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 11);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10269_PERUNIT_WEEK_FREEP", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails paidService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10269_2_WEEK_FREE_SERVICE", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-04-09 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, paidService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-04-15 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10269_PERUNIT_WEEK_FREEP", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10269_PERUNIT_WEEK_FREEP", upgradedSubDetails);
}
public void createWeekScenarioBug10269_Rata_Freep() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10269_RATA_WEEK_FREEP", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 11);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10269_RATA_WEEK_FREEP", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails paidService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10269_3_WEEK_FREE_SERVICE", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-04-09 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, paidService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-04-15 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10269_RATA_WEEK_FREEP", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10269_RATA_WEEK_FREEP", upgradedSubDetails);
}
public void createWeekScenarioBug10269_2_Rata_Freep() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10269_2_RATA_WEEK_FREEP", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10269_2_RATA_WEEK_FREEP", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails paidService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10269_2-2_WEEK_FREE_SERVICE", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-04-10 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, paidService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-04-14 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10269_2_RATA_WEEK_FREEP", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10269_2_RATA_WEEK_FREEP", upgradedSubDetails);
}
public void createWeekScenarioBug10091_Rata() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10091_RATA_WEEK",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES_PARS,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10091_RATA_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
// create upgraded service
VOServiceDetails freeService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10091_WEEK_FREE_SERVICE_Rata", TestService.EXAMPLE,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, freeService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-04-09 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, freeService);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-04-15 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_RATA_WEEK", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10091_RATA_WEEK", upgradedSubDetails);
}
public void createWeekScenarioBug10133() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-30 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10133_PERUNIT_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10133_PERUNIT_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10133_PERUNIT_WEEK", subDetails);
}
public void createWeekScenarioBug10133_Rata() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-30 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10133_RATA_WEEK",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES_PARS,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10133_RATA_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10133_RATA_WEEK", subDetails);
}
public void createWeekScenarioBug10133_2() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10133_2_PERUNIT_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10133_2_PERUNIT_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10133_2_PERUNIT_WEEK", subDetails);
}
public void createWeekScenarioBug10221_with_free_period() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10221_FREE_PERIOD_UNIT_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10221_FREE_PERIOD_UNIT_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10221_FREE_PERIOD_UNIT_WEEK", subDetails);
}
public void createWeekBug10221_free_period_and_event() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10221_FREE_UNIT_WEEK_EVENT", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_EVENTS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10221_FREE_UNIT_WEEK_EVENT", serviceDetails,
basicSetup.getCustomerUser1(), role);
// record one event after 3 days
container.login(basicSetup.getSupplierAdminKey(), ROLE_SERVICE_MANAGER,
ROLE_TECHNOLOGY_MANAGER);
subscrSetup.recordEventForSubscription(subDetails, usageStartTime
+ DateTimeHandling.daysToMillis(3), "FILE_DOWNLOAD", 2);
// record other event after 6 days
subscrSetup.recordEventForSubscription(subDetails, usageStartTime
+ DateTimeHandling.daysToMillis(6), "FILE_UPLOAD", 2);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-04 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10221_FREE_UNIT_WEEK_EVENT", subDetails);
}
public void createWeekScenarioBug10221_with_free_period_Rata()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10221_FREE_PER_RATA_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES_PARS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10221_FREE_PER_RATA_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10221_FREE_PER_RATA_WEEK", subDetails);
}
public void createWeekScenarioBug10221_with_free_period_2()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10221_FREE_PERIOD_UNIT_WEEK_2",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 4);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10221_FREE_PERIOD_UNIT_WEEK_2", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10221_FREE_PERIOD_UNIT_WEEK_2", subDetails);
}
public void createWeekScenarioBug10221_with_free_period_3()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-29 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10221_FREE_PERIOD_UNIT_WEEK_3",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 4);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10221_FREE_PERIOD_UNIT_WEEK_3", serviceDetails,
basicSetup.getCustomerUser1(), role);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10221_FREE_PERIOD_UNIT_WEEK_3", subDetails);
}
public void createWeekScenarioBug10235_with_free_period() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-29 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10235_FREE_PERIOD_UNIT_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 4);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10235_FREE_PERIOD_UNIT_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
// user is assigned
long userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-30 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
VORoleDefinition changedRole = VOServiceFactory.getRole(serviceDetails,
"USER");
subscrSetup.addUser(basicSetup.getSecondCustomerUser1(), changedRole,
subDetails.getSubscriptionId());
// user is deassigned
long userDeassignedTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userDeassignedTime);
subscrSetup.revokeUser(basicSetup.getSecondCustomerUser1(),
subDetails.getSubscriptionId());
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10235_FREE_PERIOD_UNIT_WEEK", subDetails);
}
public void createWeekScenarioBug10235_with_free_period_2()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-29 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10235_FREE_PERIOD_UNIT_WEEK_2",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_FREEP_3,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 4);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10235_FREE_PERIOD_UNIT_WEEK_2", serviceDetails,
basicSetup.getCustomerUser1(), role);
// user is assigned
long userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-30 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
VORoleDefinition changedRole = VOServiceFactory.getRole(serviceDetails,
"USER");
subscrSetup.addUser(basicSetup.getSecondCustomerUser1(), changedRole,
subDetails.getSubscriptionId());
// user is deassigned
long userDeassignedTime = DateTimeHandling
.calculateMillis("2013-05-02 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userDeassignedTime);
subscrSetup.revokeUser(basicSetup.getSecondCustomerUser1(),
subDetails.getSubscriptionId());
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-02 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10235_FREE_PERIOD_UNIT_WEEK_2", subDetails);
}
public void createWeekScenarioBug10235_with_free_period_Rata()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-29 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10235_FREE_PERIOD_RATA_WEEK", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES_PARS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 4);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10235_FREE_PERIOD_RATA_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
// user is assigned
long userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-30 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
VORoleDefinition changedRole = VOServiceFactory.getRole(serviceDetails,
"USER");
subscrSetup.addUser(basicSetup.getSecondCustomerUser1(), changedRole,
subDetails.getSubscriptionId());
// user is deassigned
long userDeassignedTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userDeassignedTime);
subscrSetup.revokeUser(basicSetup.getSecondCustomerUser1(),
subDetails.getSubscriptionId());
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-01 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
// subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10235_FREE_PERIOD_RATA_WEEK", subDetails);
}
public void createWeek_free_period_stepPriceUser() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-04-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"FREE_STEPPED_USER_WEEK",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_STEPPED_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"FREE_STEPPED_USER_WEEK", serviceDetails,
basicSetup.getCustomerUser1(), role);
// 2. user is assigned
long userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-28 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
VORoleDefinition changedRole = VOServiceFactory.getRole(serviceDetails,
"USER");
subscrSetup.addUser(basicSetup.getCustomerUser2(), changedRole,
subDetails.getSubscriptionId());
// 3. user is assigned
userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-28 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
changedRole = VOServiceFactory.getRole(serviceDetails, "USER");
subscrSetup.addUser(basicSetup.getCustomerUser3(), changedRole,
subDetails.getSubscriptionId());
// 4. user is assigned
userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-28 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
changedRole = VOServiceFactory.getRole(serviceDetails, "USER");
subscrSetup.addUser(basicSetup.getCustomerUser4(), changedRole,
subDetails.getSubscriptionId());
// 5. user is assigned
userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-28 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
changedRole = VOServiceFactory.getRole(serviceDetails, "USER");
subscrSetup.addUser(basicSetup.getCustomerUser5(), changedRole,
subDetails.getSubscriptionId());
// 6. user is assigned
userAssignedTime = DateTimeHandling
.calculateMillis("2013-04-28 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userAssignedTime);
changedRole = VOServiceFactory.getRole(serviceDetails, "USER");
subscrSetup.addUser(basicSetup.getCustomerUser6(), changedRole,
subDetails.getSubscriptionId());
// 2. user is deassigned
long userDeassignedTime = DateTimeHandling
.calculateMillis("2013-04-29 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(userDeassignedTime);
subscrSetup.revokeUser(basicSetup.getCustomerUser2(),
subDetails.getSubscriptionId());
long usageEndTime = DateTimeHandling
.calculateMillis("2013-05-04 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"FREE_STEPPED_USER_WEEK", subDetails);
}
/**
* Bug 10249: Subscribe to a service with priced events. One event has
* stepped prices.
*/
public void createMonthScenarioBug10249_perUnit_steppedEvents()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2012-12-01 00:00:00")
- DateTimeHandling.daysToMillis(3.5);
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10249_PER_UNIT_MONTH_EVENTS", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_STEPPED_EVENTS,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10249_PER_UNIT_MONTH_EVENTS", serviceDetails,
basicSetup.getCustomerUser1(), role);
// record an event after 8 days
container.login(basicSetup.getSupplierAdminKey(), ROLE_SERVICE_MANAGER,
ROLE_TECHNOLOGY_MANAGER);
subscrSetup.recordEventForSubscription(subDetails, usageStartTime
+ DateTimeHandling.daysToMillis(8), "FILE_DOWNLOAD", 75);
// record another event after 10 days
subscrSetup.recordEventForSubscription(subDetails, usageStartTime
+ DateTimeHandling.daysToMillis(10), "FILE_UPLOAD", 13);
// record another event after 12 days
subscrSetup.recordEventForSubscription(subDetails, usageStartTime
+ DateTimeHandling.daysToMillis(10), "FOLDER_NEW", 1);
long usageEndTime = DateTimeHandling
.calculateMillis("2012-12-01 00:00:00")
+ DateTimeHandling.daysToMillis(10);
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10249_PER_UNIT_MONTH_EVENTS", subDetails);
}
public void createWeekScenarioRolChangeWithFreeP() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-02-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"ROLCHANGE_WEEK_FREEP", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_FREEP_2,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"ROLCHANGE_WEEK_FREEP", serviceDetails,
basicSetup.getCustomerUser1(), role);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-01 00:00:00"));
subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses()
.get(0), VOServiceFactory.getRole(serviceDetails, "USER"),
subDetails.getSubscriptionId());
long usageEndTime = DateTimeHandling
.calculateMillis("2013-03-03 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"ROLCHANGE_WEEK_FREEP", subDetails);
}
/**
* Change parameter two times in free period. Price model is per unit/week,
* charged week overlaps billing period.
*/
public void createWeekScenarioBug10265_ParChangeWithFreeP()
throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-02-28 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"RARCHANGE_WEEK_FREEP", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS_FREEP,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"RARCHANGE_WEEK_FREEP", serviceDetails,
basicSetup.getCustomerUser1(), role);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-01 00:00:00"));
subDetails = subscrSetup.modifyParameterForSubscription(subDetails,
DateTimeHandling.calculateMillis("2013-03-01 00:00:00"),
"MAX_FOLDER_NUMBER", "7");
subDetails = subscrSetup.modifyParameterForSubscription(subDetails,
DateTimeHandling.calculateMillis("2013-03-01 01:00:00"),
"MAX_FOLDER_NUMBER", "3");
long usageEndTime = DateTimeHandling
.calculateMillis("2013-03-03 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"RARCHANGE_WEEK_FREEP", subDetails);
}
/**
* Upgrade a service with a free period to another service with a free
* period. Both services have a per unit/week price model. Change a
* parameter in both free periods. Charged week overlaps billing period.
*/
public void createWeekScenarioBug10265_UpgradeAndParChange()
throws Exception {
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-02-23 00:00:00"));
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10265_UPG_PARCHG", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS2, 3,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
basicSetup.getCustomerAdminKey(), "BUG10265_UPG_PARCHG",
serviceDetails, basicSetup.getCustomerUser1(),
VOServiceFactory.getRole(serviceDetails, "ADMIN"));
subDetails = subscrSetup.modifyParameterForSubscription(subDetails,
DateTimeHandling.calculateMillis("2013-02-25 00:00:00"),
"MAX_FOLDER_NUMBER", "3");
// Upgrade the subscription
VOServiceDetails perUnitService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10265_UPG_PARCHG_SERVICE2", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS3, 2,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
perUnitService);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-02-28 00:00:00"));
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.copyParametersAndUpgradeSubscription(
basicSetup.getCustomerAdminKey(), subDetails,
perUnitService);
upgradedSubDetails = subscrSetup.modifyParameterForSubscription(
upgradedSubDetails,
DateTimeHandling.calculateMillis("2013-03-01 00:00:00"),
"MAX_FOLDER_NUMBER", "5");
upgradedSubDetails = subscrSetup.modifyParameterForSubscription(
upgradedSubDetails,
DateTimeHandling.calculateMillis("2013-03-01 07:00:00"),
"MAX_FOLDER_NUMBER", "7");
upgradedSubDetails = subscrSetup.modifyParameterForSubscription(
upgradedSubDetails,
DateTimeHandling.calculateMillis("2013-03-02 12:00:00"),
"MAX_FOLDER_NUMBER", "4");
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-03 07:00:00"));
subscrSetup
.unsubscribeToService(upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10265_UPG_PARCHG", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10265_UPG_PARCHG", upgradedSubDetails);
}
/**
* Upgrade a service with a free period to another service without a free
* period. Both services have a per unit/week price model. Change a
* parameter in the free period and after it. Charged week overlaps billing
* period.
*/
public void createWeekScenarioBug10265_UpgradeAndParChange2()
throws Exception {
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-02-23 00:00:00"));
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10265_UPG_PARCHG2", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS2, 3,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
basicSetup.getCustomerAdminKey(), "BUG10265_UPG_PARCHG2",
serviceDetails, basicSetup.getCustomerUser1(),
VOServiceFactory.getRole(serviceDetails, "ADMIN"));
subDetails = subscrSetup.modifyParameterForSubscription(subDetails,
DateTimeHandling.calculateMillis("2013-02-25 00:00:00"),
"MAX_FOLDER_NUMBER", "3");
// Upgrade the subscription
VOServiceDetails perUnitService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10265_UPG_PARCHG2_SERVICE2", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES_PARS3, 0,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
perUnitService);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-02-28 00:00:00"));
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.copyParametersAndUpgradeSubscription(
basicSetup.getCustomerAdminKey(), subDetails,
perUnitService);
upgradedSubDetails = subscrSetup.modifyParameterForSubscription(
upgradedSubDetails,
DateTimeHandling.calculateMillis("2013-03-01 00:00:00"),
"MAX_FOLDER_NUMBER", "5");
upgradedSubDetails = subscrSetup.modifyParameterForSubscription(
upgradedSubDetails,
DateTimeHandling.calculateMillis("2013-03-01 07:00:00"),
"MAX_FOLDER_NUMBER", "7");
upgradedSubDetails = subscrSetup.modifyParameterForSubscription(
upgradedSubDetails,
DateTimeHandling.calculateMillis("2013-03-02 12:00:00"),
"MAX_FOLDER_NUMBER", "4");
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-03 07:00:00"));
subscrSetup
.unsubscribeToService(upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10265_UPG_PARCHG2", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10265_UPG_PARCHG2", upgradedSubDetails);
}
public void createWeekSuspend() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-06-08 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "SCENARIO_WEEK_SUSP",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
// Create own customer, because the customer's payment types are changed
// later on
String customerAdminId = "ScenarioSuspWeekCustomerAdmin";
VOOrganization customer = orgSetup.registerCustomer(
"ScenarioWeekSuspCustomer",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"SCENARIO_WEEK_SUSP", serviceDetails, customerAdmin, role);
// suspend/resume subscription several times by removing/restoring the
// customer's payment types
long suspResTime = DateTimeHandling
.calculateMillis("2013-06-09 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(suspResTime);
container.login(basicSetup.getSupplierAdminKey(), ROLE_SERVICE_MANAGER,
ROLE_TECHNOLOGY_MANAGER);
paymentSetup.deleteCustomerPaymentTypes(customer);
long reassignResTime = DateTimeHandling
.calculateMillis("2013-06-17 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(reassignResTime);
paymentSetup.reassignCustomerPaymentTypes(customer);
// price model change!!
VOPriceModel newSubPriceModel = VOPriceModelFactory
.modifyPriceModelPeriodFee(subDetails.getPriceModel(),
new BigDecimal("200.00"));
subscrSetup.savePriceModelForSubscription(
basicSetup.getSupplierAdminKey(), subDetails, newSubPriceModel,
customer);
long usageEndTime = DateTimeHandling
.calculateMillis("2013-06-18 10:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageEndTime);
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"SCENARIO_WEEK_SUSP", subDetails);
BillingIntegrationTestBase.updateCustomerListForTests(
"SCENARIO_WEEK_SUSP", customer);
}
public void createBug10301() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-05-05 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
// Create own customer
String customerAdminId = "BUGxxCustomer";
VOOrganization customer = orgSetup.registerCustomer("BUGxxCustomerOrg",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10301",
TestService.EXAMPLE, TestPriceModel.FREE,
technicalService, supplierMarketplace);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10301", serviceDetails, customerAdmin, role);
// role change
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-05-14 10:00:00"));
subDetails = subscrSetup.modifyUserRole(subDetails.getUsageLicenses()
.get(0), VOServiceFactory.getRole(serviceDetails, "USER"),
subDetails.getSubscriptionId());
// create upgraded service
VOServiceDetails paidService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10301_Upgrade",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-05-15 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, paidService);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
// change sunbscriptionID
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-05-16 00:00:00"));
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
upgradedSubDetails.setSubscriptionId("BUG10301" + "_SubID2");
upgradedSubDetails = subscrSetup.modifySubscription(upgradedSubDetails,
null);
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests("BUG10301",
subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests("BUG10301",
upgradedSubDetails);
BillingIntegrationTestBase.updateCustomerListForTests("BUG10301",
customer);
}
public void createPerUnitDayBug10302() throws Exception {
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-01 00:00:00"));
setCutOffDay(basicSetup.getSupplierAdminKey(), 7);
// Create an own customer
String customerAdminId = "Bug10302PUnitDayCustomer";
VOOrganization customer = orgSetup.registerCustomer(
"Bug10302PUnitDayCustomerOrg",
TestOrganizationSetup.ORGANIZATION_DOMICILE_UK,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10302_PERUNIT_DAY", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PICT04_PERUNIT_DAY,
technicalService, supplierMarketplace);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"GUEST");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10302_PERUNIT_DAY", serviceDetails, customerAdmin, role);
// Suspend and resume the subscription by deleting the customer's
// payment types
container.login(basicSetup.getSupplierAdminKey(),
UserRoleType.SERVICE_MANAGER.name());
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-07 12:00:00"));
paymentSetup.deleteCustomerPaymentTypes(customer);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-07 18:00:00"));
paymentSetup.reassignCustomerPaymentTypes(customer);
subDetails = subscrSetup.getSubscriptionDetails(customerAdmin.getKey(),
subDetails.getSubscriptionId());
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-11 00:00:00"));
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10302_PERUNIT_DAY", subDetails);
BillingIntegrationTestBase.updateCustomerListForTests(
"BUG10302_PERUNIT_DAY", customer);
}
public void createBug10303() throws Exception {
long usageStartTime = DateTimeHandling
.calculateMillis("2013-05-05 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageStartTime);
setCutOffDay(basicSetup.getSupplierAdminKey(), 1);
// Create own customer
String customerAdminId = "BUG10303Customer";
VOOrganization customer = orgSetup.registerCustomer("BUGxxCustomerOrg",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10303",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
technicalService, supplierMarketplace);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10303", serviceDetails, customerAdmin, role);
// create upgraded service
VOServiceDetails paidService = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10303_Upgrade",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
long usageUpgradeTime = DateTimeHandling
.calculateMillis("2013-05-8 20:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(usageUpgradeTime);
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(subDetails, paidService);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails, paidService);
// // suspend/resume subscription several times by removing/restoring
// // customer's payment types
long suspResTime = DateTimeHandling
.calculateMillis("2013-05-11 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(suspResTime);
container.login(basicSetup.getSupplierAdminKey(), ROLE_SERVICE_MANAGER,
ROLE_TECHNOLOGY_MANAGER);
paymentSetup.deleteCustomerPaymentTypes(customer);
long reassignResTime = DateTimeHandling
.calculateMillis("2013-05-12 07:00:00");
BillingIntegrationTestBase.setDateFactoryInstance(reassignResTime);
paymentSetup.reassignCustomerPaymentTypes(customer);
upgradedSubDetails = subscrSetup.getSubscriptionDetails(
customerAdmin.getKey(), upgradedSubDetails.getSubscriptionId());
// change sunbscriptionID
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-05-14 00:00:00"));
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
upgradedSubDetails.setSubscriptionId("BUG10301" + "_SubID2");
upgradedSubDetails = subscrSetup.modifySubscription(upgradedSubDetails,
null);
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests("BUG10303",
subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests("BUG10303",
upgradedSubDetails);
BillingIntegrationTestBase.updateCustomerListForTests("BUG10303",
customer);
}
public void createDayBug10361_DaylightSavingTime() throws Exception {
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-15 00:00:00"));
setCutOffDay(basicSetup.getSupplierAdminKey(), 7);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(), "BUG10361_DAYLSAV",
TestService.EXAMPLE,
TestPriceModel.EXAMPLE_FP_PUDAY_ROLES_PAR_EVENTS,
technicalService, supplierMarketplace);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10361_DAYLSAV", serviceDetails,
basicSetup.getCustomerUser1(), role);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-31 00:00:00"));
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10361_DAYLSAV", subDetails);
}
public void createDayBug10361_DaylSav_ParChange() throws Exception {
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-03-16 00:00:00"));
setCutOffDay(basicSetup.getSupplierAdminKey(), 7);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10361_DLS_PAR_CHANGE", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_FP_PUDAY_ROLES_PAR_EVENTS,
technicalService, supplierMarketplace);
VORoleDefinition role = VOServiceFactory.getRole(serviceDetails,
"ADMIN");
container.login(basicSetup.getCustomerAdminKey(),
ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10361_DLS_PAR_CHANGE", serviceDetails,
basicSetup.getCustomerUser1(), role);
subDetails = subscrSetup.modifyParameterForSubscription(subDetails,
DateTimeHandling.calculateMillis("2013-03-31 01:00:00"),
"MAX_FOLDER_NUMBER", "4");
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-04-01 00:00:00"));
subscrSetup.unsubscribeToService(subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10361_DLS_PAR_CHANGE", subDetails);
}
public void createBug10404_suspendUpgradedProRataService() throws Exception {
setDateFactory("2013-08-07 10:00:00");
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
// Create an own customer, because the customer's payment types
// are changed later on
String customerAdminId = "Bug10404Customer";
VOOrganization customer = orgSetup.registerCustomer(
"Bug10404CustomerOrg",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10404_UPGR_SUS_SERVICE", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
// subscribe
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
"BUG10404_UPGR_SUS", serviceDetails, customerAdmin,
VOServiceFactory.getRole(serviceDetails, "GUEST"));
// upgrade service
VOServiceDetails upgradedServiceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"Bug10404_Upgrade_Service", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
upgradedServiceDetails);
setDateFactory("2013-08-08 13:00:00");
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(customerAdmin.getKey(), subDetails,
upgradedServiceDetails);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-09 00:00:00"));
upgradedSubDetails = subscrSetup.modifyUserRole(upgradedSubDetails
.getUsageLicenses().get(0), VOServiceFactory.getRole(
upgradedServiceDetails, "ADMIN"), upgradedSubDetails
.getSubscriptionId());
// Suspend and resume the subscription by deleting the customer's
// payment types
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-09 18:00:00"));
paymentSetup.deleteCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
upgradedSubDetails = subscrSetup.getSubscriptionDetails(
customerAdmin.getKey(), subDetails.getSubscriptionId());
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-14 20:00:00"));
paymentSetup.reassignCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
upgradedSubDetails = subscrSetup.getSubscriptionDetails(
customerAdmin.getKey(), subDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10404_UPGR_SUS", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10404_UPGR_SUS", upgradedSubDetails);
BillingIntegrationTestBase.updateCustomerListForTests(
"BUG10404_UPGR_SUS", customer);
}
public void createBug10404_upgradeExpiredSubscription() throws Exception {
setDateFactory("2013-08-07 10:00:00");
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10404_UPGR_EXP_SUB", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
// subscribe
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
basicSetup.getCustomerAdminKey(), "BUG10404_UPGR_EXP_SUB",
serviceDetails, basicSetup.getCustomerUser1(),
VOServiceFactory.getRole(serviceDetails, "ADMIN"));
// expire the subscription
setDateFactory("2013-08-12 13:00:00");
subscrSetup.expireSubscription(subDetails, basicSetup.getCustomer());
subDetails = subscrSetup.getSubscriptionDetails(
basicSetup.getCustomerAdminKey(),
subDetails.getSubscriptionId());
// upgrade service
VOServiceDetails upgradedServiceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"Bug10404_UpgradeExp_Service", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
upgradedServiceDetails);
setDateFactory("2013-08-20 10:00:00");
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(basicSetup.getCustomerAdminKey(),
subDetails, upgradedServiceDetails);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-30 15:20:00"));
subscrSetup.unsubscribeToService(basicSetup.getCustomerAdminKey(),
upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10404_UPGR_EXP_SUB", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10404_UPGR_EXP_SUB", upgradedSubDetails);
}
public void createBug10404_expireSuspendedSubscription() throws Exception {
setDateFactory("2013-08-07 10:00:00");
setCutOffDay(basicSetup.getSupplierAdminKey(), 10);
// Create an own customer, because the customer's payment types
// are changed later on
String customerAdminId = "Bug10404ExpSusCustomer";
VOOrganization customer = orgSetup.registerCustomer(
"Bug10404ExpSusCustomerOrg",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10404_EXP_SUS_SUB_SERVICE", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
// subscribe
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
customerAdmin.getKey(), "BUG10404_EXP_SUS_SUB", serviceDetails,
customerAdmin,
VOServiceFactory.getRole(serviceDetails, "ADMIN"));
// Suspend the subscription
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-12 13:00:00"));
paymentSetup.deleteCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
subDetails = subscrSetup.getSubscriptionDetails(customerAdmin.getKey(),
subDetails.getSubscriptionId());
// expire the subscription
setDateFactory("2013-08-13 10:00:00");
subscrSetup.expireSubscription(subDetails, customer);
subDetails = subscrSetup.getSubscriptionDetails(customerAdmin.getKey(),
subDetails.getSubscriptionId());
// Reassign the customer's payment types
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-18 18:00:00"));
paymentSetup.reassignCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
subDetails = subscrSetup.getSubscriptionDetails(customerAdmin.getKey(),
subDetails.getSubscriptionId());
// upgrade service
VOServiceDetails upgradedServiceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"Bug10404_SusExpUpgr_Service", TestService.EXAMPLE,
TestPriceModel.EXAMPLE_RATA_WEEK_ROLES,
technicalService, supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
upgradedServiceDetails);
setDateFactory("2013-08-20 10:00:00");
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(customerAdmin.getKey(), subDetails,
upgradedServiceDetails);
BillingIntegrationTestBase.setDateFactoryInstance(DateTimeHandling
.calculateMillis("2013-08-30 15:20:00"));
subscrSetup.unsubscribeToService(customerAdmin.getKey(),
upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10404_EXP_SUS_SUB", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10404_EXP_SUS_SUB", upgradedSubDetails);
}
/**
* Upgrade a suspended subscription to a free service
*/
public void createBug10476_upgradeSuspendedSubscription() throws Exception {
setDateFactory("2013-08-02 10:00:00");
// Create own customer, because the customer's payment types
// are changed later on
String customerAdminId = "Bug10476UpgrSusCustomerAdmin";
VOOrganization customer = orgSetup.registerCustomer(
"Bug10476UpgrSusCustomer",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10476_UPGRSUS_SERVICE", TestService.EXAMPLE2,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
// subscribe
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
customerAdmin.getKey(), "BUG10476_UPGRSUS", serviceDetails,
customerAdmin,
VOServiceFactory.getRole(serviceDetails, "GUEST"));
// Delete customer payment types
setDateFactory("2013-08-03 00:00:00");
paymentSetup.deleteCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
subDetails = subscrSetup.getSubscriptionDetails(customerAdmin.getKey(),
subDetails.getSubscriptionId());
// Create upgraded service
setDateFactory("2013-08-04 12:00:00");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOServiceDetails upgradedServiceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10476_UPGRSUS_FREE_SERVICE", TestService.EXAMPLE2,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
upgradedServiceDetails);
// Upgrade subscription -> subscription is activated because
// it is free!!
setDateFactory("2013-08-10 23:10:00");
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(customerAdmin.getKey(), subDetails,
upgradedServiceDetails);
setDateFactory("2013-08-11 15:00:00");
paymentSetup.reassignCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
upgradedSubDetails = subscrSetup.getSubscriptionDetails(
customerAdmin.getKey(), upgradedSubDetails.getSubscriptionId());
// Terminate subscription
setDateFactory("2013-08-15 08:12:00");
subscrSetup
.unsubscribeToService(upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10476_UPGRSUS", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10476_UPGRSUS", upgradedSubDetails);
BillingIntegrationTestBase.updateCustomerListForTests(
"BUG10476_UPGRSUS", customer);
}
/**
* Same as createBug10476_upgradeSuspendedSubscription(), but suspend and
* upgrade happens in the same time unit.
*/
public void createBug10476_upgradeSuspendedSubscription2() throws Exception {
setDateFactory("2013-08-02 10:00:00");
// Create own customer, because the customer's payment types
// are changed later on
String customerAdminId = "Bug10476UpgrSus2CustomerAdmin";
VOOrganization customer = orgSetup.registerCustomer(
"Bug10476UpgrSus2Customer",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10476_UPGRSUS2_SERVICE", TestService.EXAMPLE2,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
// subscribe
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
customerAdmin.getKey(), "BUG10476_UPGRSUS2", serviceDetails,
customerAdmin,
VOServiceFactory.getRole(serviceDetails, "GUEST"));
// Delete customer payment types
setDateFactory("2013-08-03 00:00:00");
paymentSetup.deleteCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
subDetails = subscrSetup.getSubscriptionDetails(customerAdmin.getKey(),
subDetails.getSubscriptionId());
// Create upgraded service
setDateFactory("2013-08-04 12:00:00");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOServiceDetails upgradedServiceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG10476_UPGRSUS2_FREE_SERVICE", TestService.EXAMPLE2,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
upgradedServiceDetails);
// Upgrade subscription -> subscription is activated because
// it is free!!
setDateFactory("2013-08-04 23:10:00");
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(customerAdmin.getKey(), subDetails,
upgradedServiceDetails);
setDateFactory("2013-08-11 15:00:00");
paymentSetup.reassignCustomerPaymentTypes(
basicSetup.getSupplierAdminKey(), customer);
upgradedSubDetails = subscrSetup.getSubscriptionDetails(
customerAdmin.getKey(), upgradedSubDetails.getSubscriptionId());
// Terminate subscription
setDateFactory("2013-08-15 08:12:00");
subscrSetup
.unsubscribeToService(upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10476_UPGRSUS2", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG10476_UPGRSUS2", upgradedSubDetails);
BillingIntegrationTestBase.updateCustomerListForTests(
"BUG10476_UPGRSUS2", customer);
}
/**
* Upgrade a suspended subscription to a free service. The subscription is
* suspended by deleting the billing contact of the customer.
*/
public void createBug11021_upgradeSuspendedSubscription() throws Exception {
setDateFactory("2013-08-02 10:00:00");
// Create own customer, because the customer's payment types
// are changed later on
String customerAdminId = "Bug11021UpgrSusCustomerAdmin";
VOOrganization customer = orgSetup.registerCustomer(
"Bug11021UpgrSusCustomer",
TestOrganizationSetup.ORGANIZATION_DOMICILE_DE,
customerAdminId, supplierMarketplace.getMarketplaceId(),
basicSetup.getSupplierOrgID());
VOUser customerAdmin = orgSetup.getUser(customerAdminId, true);
VOServiceDetails serviceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG11021_UPGRSUS_SERVICE", TestService.EXAMPLE2,
TestPriceModel.EXAMPLE_PERUNIT_WEEK_ROLES,
technicalService, supplierMarketplace);
setCutOffDay(basicSetup.getSupplierAdminKey(), 3);
// subscribe
VOSubscriptionDetails subDetails = subscrSetup.subscribeToService(
customerAdmin.getKey(), "BUG11021_UPGRSUS", serviceDetails,
customerAdmin,
VOServiceFactory.getRole(serviceDetails, "GUEST"));
// Delete the customer's billing contacts -> subscription is suspended
setDateFactory("2013-08-03 00:00:00");
subDetails = deleteBillingContactsAndUpdateSub(customerAdmin.getKey(),
subDetails);
// Create upgraded service
setDateFactory("2013-08-04 12:00:00");
container.login(customerAdmin.getKey(), ROLE_ORGANIZATION_ADMIN);
VOServiceDetails upgradedServiceDetails = serviceSetup
.createPublishAndActivateMarketableService(
basicSetup.getSupplierAdminKey(),
"BUG11021_UPGRSUS_FREE_SERVICE", TestService.EXAMPLE2,
TestPriceModel.FREE, technicalService,
supplierMarketplace);
serviceSetup.registerCompatibleServices(
basicSetup.getSupplierAdminKey(), serviceDetails,
upgradedServiceDetails);
// Upgrade subscription -> subscription is activated because
// it is free!!
setDateFactory("2013-08-10 23:10:00");
VOSubscriptionDetails upgradedSubDetails = subscrSetup
.upgradeSubscription(customerAdmin.getKey(), subDetails,
upgradedServiceDetails);
// Terminate subscription
setDateFactory("2013-08-15 08:12:00");
subscrSetup
.unsubscribeToService(upgradedSubDetails.getSubscriptionId());
resetCutOffDay(basicSetup.getSupplierAdminKey());
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG11021_UPGRSUS", subDetails);
BillingIntegrationTestBase.updateSubscriptionListForTests(
"BUG11021_UPGRSUS", upgradedSubDetails);
BillingIntegrationTestBase.updateCustomerListForTests(
"BUG11021_UPGRSUS", customer);
}
public void bug11822_changePurchaseOrderNumber() throws Exception {
VendorData supplierData = setupNewSupplier("2015-05-01 08:00:00");
setCutOffDay(supplierData.getAdminKey(), 1);
CustomerData customerData = registerCustomer(supplierData);
VOServiceDetails serviceDetails = createPublishActivateService(
supplierData, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
"BUG11822_PERUNIT_MONTH_SERVICE");
VOSubscriptionDetails subDetails = subscribe(
customerData.getAdminUser(), "BUG11822_CHANGE_PON",
"My subscription 4711", serviceDetails,
DateTimeHandling.calculateMillis("2015-06-01 00:00:00"),
"ADMIN");
// Change Purchase Order Number
VOSubscriptionDetails modifiedSubDetails = (VOSubscriptionDetails) (ReflectiveClone
.clone(subDetails));
modifiedSubDetails
.setPurchaseOrderNumber("My new subscription reference 8888");
modifiedSubDetails = modifySubscription(customerData.getAdminUser(),
modifiedSubDetails, null,
DateTimeHandling.calculateMillis("2015-06-30 00:00:00"));
unsubscribe(customerData.getAdminUser(),
modifiedSubDetails.getSubscriptionId(), "2015-07-01 00:00:00");
resetCutOffDay(supplierData.getAdminKey());
cacheTestData("BUG11822_CHANGE_PON", new TestData(supplierData));
}
public void bug11822_changePurchaseOrderNumber_afterUpgrade()
throws Exception {
VendorData supplierData = setupNewSupplier("2015-05-01 08:00:00");
setCutOffDay(supplierData.getAdminKey(), 1);
CustomerData customerData = registerCustomer(supplierData);
VOServiceDetails serviceDetails = createPublishActivateService(
supplierData, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
"BUG11822_PERUNIT_MONTH_SERVICE2");
VOSubscriptionDetails subDetails = subscribe(
customerData.getAdminUser(),
"BUG11822_CHANGE_PON_AFTER_UPGRADE", "My subscription 4712",
serviceDetails,
DateTimeHandling.calculateMillis("2015-06-01 00:00:00"),
"ADMIN");
// create upgrade service
VOServiceDetails upgradeService = createAndRegisterCompatibleService(
supplierData, TestPriceModel.EXAMPLE_PERUNIT_MONTH_ROLES,
serviceDetails, "BUG11822_PERUNIT_MONTH_UPGRADE_SERVICE");
// upgrade subscription
VOSubscriptionDetails upgradedSubDetails = upgrade(
customerData.getAdminUser(), subDetails, upgradeService,
DateTimeHandling.calculateMillis("2015-06-16 00:00:00"));
// Change Purchase Order Number
VOSubscriptionDetails modifiedSubDetails = (VOSubscriptionDetails) (ReflectiveClone
.clone(upgradedSubDetails));
modifiedSubDetails
.setPurchaseOrderNumber("My new subscription reference 1313");
modifySubscription(customerData.getAdminUser(), modifiedSubDetails,
null, DateTimeHandling.calculateMillis("2015-06-30 00:00:00"));
resetCutOffDay(supplierData.getAdminKey());
cacheTestData("BUG11822_CHANGE_PON_AFTER_UPGRADE", new TestData(
supplierData));
}
}
| |
package enderamm.item;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.entity.Entity;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.Vec3;
import org.lwjgl.util.vector.Vector3f;
import org.lwjgl.util.vector.Vector4f;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
public class EAVector3 {
public double x;
public double y;
public double z;
public EAVector3() {
}
public EAVector3(double d, double d1, double d2) {
this.x = d;
this.y = d1;
this.z = d2;
}
public EAVector3(EAVector3 vec) {
this.x = vec.x;
this.y = vec.y;
this.z = vec.z;
}
public EAVector3(Vec3 vec) {
this.x = vec.xCoord;
this.y = vec.yCoord;
this.z = vec.zCoord;
}
public EAVector3 copy() {
return new EAVector3(this);
}
public static EAVector3 fromEntity(Entity e) {
return new EAVector3(e.posX, e.posY, e.posZ);
}
public static EAVector3 fromEntityCenter(Entity e) {
return new EAVector3(e.posX, e.posY - e.yOffset + e.height / 2.0F, e.posZ);
}
public static EAVector3 fromTileEntity(TileEntity e) {
return new EAVector3(e.xCoord, e.yCoord, e.zCoord);
}
public static EAVector3 fromTileEntityCenter(TileEntity e) {
return new EAVector3(e.xCoord + 0.5D, e.yCoord + 0.5D, e.zCoord + 0.5D);
}
@Deprecated
public static EAVector3 fromVec3D(Vec3 vec) {
return new EAVector3(vec);
}
public EAVector3 set(double d, double d1, double d2) {
this.x = d;
this.y = d1;
this.z = d2;
return this;
}
public EAVector3 set(EAVector3 vec) {
this.x = vec.x;
this.y = vec.y;
this.z = vec.z;
return this;
}
public double dotProduct(EAVector3 vec) {
double d = vec.x * this.x + vec.y * this.y + vec.z * this.z;
if ((d > 1.0D) && (d < 1.00001D))
d = 1.0D;
else if ((d < -1.0D) && (d > -1.00001D))
d = -1.0D;
return d;
}
public double dotProduct(double d, double d1, double d2) {
return d * this.x + d1 * this.y + d2 * this.z;
}
public EAVector3 crossProduct(EAVector3 vec) {
double d = this.y * vec.z - this.z * vec.y;
double d1 = this.z * vec.x - this.x * vec.z;
double d2 = this.x * vec.y - this.y * vec.x;
this.x = d;
this.y = d1;
this.z = d2;
return this;
}
public EAVector3 add(double d, double d1, double d2) {
this.x += d;
this.y += d1;
this.z += d2;
return this;
}
public EAVector3 add(EAVector3 vec) {
this.x += vec.x;
this.y += vec.y;
this.z += vec.z;
return this;
}
public EAVector3 subtract(EAVector3 vec) {
this.x -= vec.x;
this.y -= vec.y;
this.z -= vec.z;
return this;
}
public EAVector3 multiply(double d) {
this.x *= d;
this.y *= d;
this.z *= d;
return this;
}
public EAVector3 multiply(EAVector3 f) {
this.x *= f.x;
this.y *= f.y;
this.z *= f.z;
return this;
}
public EAVector3 multiply(double fx, double fy, double fz) {
this.x *= fx;
this.y *= fy;
this.z *= fz;
return this;
}
public double mag() {
return Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z);
}
public double magSquared() {
return this.x * this.x + this.y * this.y + this.z * this.z;
}
public EAVector3 normalize() {
double d = mag();
if (d != 0.0D) {
multiply(1.0D / d);
}
return this;
}
public String toString() {
MathContext cont = new MathContext(4, RoundingMode.HALF_UP);
return "Vector: " + new BigDecimal(this.x, cont) + ", "
+ new BigDecimal(this.y, cont) + ", "
+ new BigDecimal(this.z, cont);
}
public EAVector3 perpendicular() {
if (this.z == 0.0D) {
return zCrossProduct();
}
return xCrossProduct();
}
public EAVector3 xCrossProduct() {
double d = this.z;
double d1 = -this.y;
this.x = 0.0D;
this.y = d;
this.z = d1;
return this;
}
public EAVector3 zCrossProduct() {
double d = this.y;
double d1 = -this.x;
this.x = d;
this.y = d1;
this.z = 0.0D;
return this;
}
public EAVector3 yCrossProduct() {
double d = -this.z;
double d1 = this.x;
this.x = d;
this.y = 0.0D;
this.z = d1;
return this;
}
public Vec3 toVec3D() {
return Vec3.createVectorHelper(this.x, this.y, this.z);
}
public double angle(EAVector3 vec) {
return Math.acos(copy().normalize().dotProduct(vec.copy().normalize()));
}
public boolean isZero() {
return (this.x == 0.0D) && (this.y == 0.0D) && (this.z == 0.0D);
}
public boolean isAxial() {
return (this.y == 0.0D) || (this.z == 0.0D);
}
@SideOnly(Side.CLIENT)
public Vector3f vector3f() {
return new Vector3f((float) this.x, (float) this.y, (float) this.z);
}
@SideOnly(Side.CLIENT)
public Vector4f vector4f() {
return new Vector4f((float) this.x, (float) this.y, (float) this.z,
1.0F);
}
public EAVector3 negate() {
this.x = (-this.x);
this.y = (-this.y);
this.z = (-this.z);
return this;
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2015 the original author or authors.
*/
package org.assertj.core.api;
import static org.assertj.core.error.ShouldBeAfter.shouldBeAfter;
import static org.assertj.core.error.ShouldBeAfterOrEqualsTo.shouldBeAfterOrEqualsTo;
import static org.assertj.core.error.ShouldBeBefore.shouldBeBefore;
import static org.assertj.core.error.ShouldBeBeforeOrEqualsTo.shouldBeBeforeOrEqualsTo;
import static org.assertj.core.error.ShouldBeEqualIgnoringNanos.shouldBeEqualIgnoringNanos;
import static org.assertj.core.error.ShouldBeEqualIgnoringSeconds.shouldBeEqualIgnoringSeconds;
import static org.assertj.core.error.ShouldHaveSameHourAs.shouldHaveSameHourAs;
import java.time.LocalTime;
import org.assertj.core.internal.Failures;
import org.assertj.core.internal.Objects;
/**
* Assertions for {@link LocalTime} type from new Date & Time API introduced in Java 8.
*/
public abstract class AbstractLocalTimeAssert<S extends AbstractLocalTimeAssert<S>>
extends AbstractAssert<S, LocalTime> {
public static final String NULL_LOCAL_TIME_PARAMETER_MESSAGE = "The LocalTime to compare actual with should not be null";
/**
* Creates a new <code>{@link org.assertj.core.api.AbstractLocalTimeAssert}</code>.
*
* @param selfType the "self type"
* @param actual the actual value to verify
*/
protected AbstractLocalTimeAssert(LocalTime actual, Class<?> selfType) {
super(actual, selfType);
}
// visible for test
protected LocalTime getActual() {
return actual;
}
/**
* Verifies that the actual {@code LocalTime} is <b>strictly</b> before the given one.
* <p>
* Example :
* <pre><code class='java'> assertThat(parse("12:00:00")).isBefore(parse("13:00:00"));</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is not strictly before the given one.
*/
public S isBefore(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (!actual.isBefore(other)) {
throw Failures.instance().failure(info, shouldBeBefore(actual, other));
}
return myself;
}
/**
* Same assertion as {@link #isBefore(LocalTime)} but the {@link LocalTime} is built from given String, which
* must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTime as String (AssertJ taking care of the conversion)
* assertThat(parse("12:59")).isBefore("13:00");</code></pre>
*
* @param localTimeAsString String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is not strictly before the {@link LocalTime} built
* from given String.
*/
public S isBefore(String localTimeAsString) {
assertLocalTimeAsStringParameterIsNotNull(localTimeAsString);
return isBefore(LocalTime.parse(localTimeAsString));
}
/**
* Verifies that the actual {@code LocalTime} is before or equals to the given one.
* <p>
* Example :
* <pre><code class='java'> assertThat(parse("12:00:00")).isBeforeOrEqualTo(parse("12:00:00"))
* .isBeforeOrEqualTo(parse("12:00:01"));</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is not before or equals to the given one.
*/
public S isBeforeOrEqualTo(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (actual.isAfter(other)) {
throw Failures.instance().failure(info, shouldBeBeforeOrEqualsTo(actual, other));
}
return myself;
}
/**
* Same assertion as {@link #isBeforeOrEqualTo(LocalTime)} but the {@link LocalTime} is built from given
* String, which must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTime as String (AssertJ taking care of the conversion)
* assertThat(parse("12:00:00")).isBeforeOrEqualTo("12:00:00")
* .isBeforeOrEqualTo("13:00:00");</code></pre>
*
* @param localTimeAsString String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is not before or equals to the {@link LocalTime} built from
* given String.
*/
public S isBeforeOrEqualTo(String localTimeAsString) {
assertLocalTimeAsStringParameterIsNotNull(localTimeAsString);
return isBeforeOrEqualTo(LocalTime.parse(localTimeAsString));
}
/**
* Verifies that the actual {@code LocalTime} is after or equals to the given one.
* <p>
* Example :
* <pre><code class='java'> assertThat(parse("13:00:00")).isAfterOrEqualTo(parse("13:00:00"))
* .isAfterOrEqualTo(parse("12:00:00"));</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is not after or equals to the given one.
*/
public S isAfterOrEqualTo(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (actual.isBefore(other)) {
throw Failures.instance().failure(info, shouldBeAfterOrEqualsTo(actual, other));
}
return myself;
}
/**
* Same assertion as {@link #isAfterOrEqualTo(LocalTime)} but the {@link LocalTime} is built from given
* String, which must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTime as String (AssertJ taking care of the conversion)
* assertThat(parse("13:00:00")).isAfterOrEqualTo("13:00:00")
* .isAfterOrEqualTo("12:00:00");</code></pre>
*
* @param localTimeAsString String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is not after or equals to the {@link LocalTime} built from
* given String.
*/
public S isAfterOrEqualTo(String localTimeAsString) {
assertLocalTimeAsStringParameterIsNotNull(localTimeAsString);
return isAfterOrEqualTo(LocalTime.parse(localTimeAsString));
}
/**
* Verifies that the actual {@code LocalTime} is <b>strictly</b> after the given one.
* <p>
* Example :
* <pre><code class='java'> assertThat(parse("13:00:00")).isAfter(parse("12:00:00"));</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is not strictly after the given one.
*/
public S isAfter(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (!actual.isAfter(other)) {
throw Failures.instance().failure(info, shouldBeAfter(actual, other));
}
return myself;
}
/**
* Same assertion as {@link #isAfter(LocalTime)} but the {@link LocalTime} is built from given a String that
* must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTime as String (AssertJ taking care of the conversion)
* assertThat(parse("13:00:00")).isAfter("12:00:00");</code></pre>
*
* @param localTimeAsString String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is not strictly after the {@link LocalTime} built
* from given String.
*/
public S isAfter(String localTimeAsString) {
assertLocalTimeAsStringParameterIsNotNull(localTimeAsString);
return isAfter(LocalTime.parse(localTimeAsString));
}
/**
* Same assertion as {@link #isEqualTo(Object)} (where Object is expected to be {@link LocalTime}) but here you
* pass {@link LocalTime} String representation that must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTime as String (AssertJ taking care of the conversion)
* assertThat(parse("13:00:00")).isEqualTo("13:00:00");</code></pre>
*
* @param localTimeAsString String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is not equal to the {@link LocalTime} built from
* given String.
*/
public S isEqualTo(String localTimeAsString) {
assertLocalTimeAsStringParameterIsNotNull(localTimeAsString);
return isEqualTo(LocalTime.parse(localTimeAsString));
}
/**
* Same assertion as {@link #isNotEqualTo(Object)} (where Object is expected to be {@link LocalTime}) but here you
* pass {@link LocalTime} String representation that must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTime as String (AssertJ taking care of the conversion)
* assertThat(parse("13:00:00")).isNotEqualTo("12:00:00");</code></pre>
*
* @param localTimeAsString String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is equal to the {@link LocalTime} built from given
* String.
*/
public S isNotEqualTo(String localTimeAsString) {
assertLocalTimeAsStringParameterIsNotNull(localTimeAsString);
return isNotEqualTo(LocalTime.parse(localTimeAsString));
}
/**
* Same assertion as {@link #isIn(Object...)} (where Objects are expected to be {@link LocalTime}) but here you
* pass {@link LocalTime} String representations that must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTimes as String (AssertJ taking care of the conversion)
* assertThat(parse("13:00:00")).isIn("12:00:00", "13:00:00");</code></pre>
*
* @param localTimesAsString String array representing {@link LocalTime}s.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is not in the {@link LocalTime}s built from given
* Strings.
*/
public S isIn(String... localTimesAsString) {
checkIsNotNullAndNotEmpty(localTimesAsString);
return isIn(convertToLocalTimeArray(localTimesAsString));
}
/**
* Same assertion as {@link #isNotIn(Object...)} (where Objects are expected to be {@link LocalTime}) but here you
* pass {@link LocalTime} String representations that must follow <a href=
* "http://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html#ISO_LOCAL_TIME"
* >ISO LocalTime format</a> to allow calling {@link LocalTime#parse(CharSequence)} method.
* <p>
* Example :
* <pre><code class='java'> // you can express expected LocalTimes as String (AssertJ taking care of the conversion)
* assertThat(parse("13:00:00")).isNotIn("12:00:00", "14:00:00");</code></pre>
*
* @param localTimesAsString Array of String representing a {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if given String is null or can't be converted to a {@link LocalTime}.
* @throws AssertionError if the actual {@code LocalTime} is in the {@link LocalTime}s built from given
* Strings.
*/
public S isNotIn(String... localTimesAsString) {
checkIsNotNullAndNotEmpty(localTimesAsString);
return isNotIn(convertToLocalTimeArray(localTimesAsString));
}
private static Object[] convertToLocalTimeArray(String... localTimesAsString) {
LocalTime[] dates = new LocalTime[localTimesAsString.length];
for (int i = 0; i < localTimesAsString.length; i++) {
dates[i] = LocalTime.parse(localTimesAsString[i]);
}
return dates;
}
private void checkIsNotNullAndNotEmpty(Object[] values) {
if (values == null) throw new IllegalArgumentException("The given LocalTime array should not be null");
if (values.length == 0) throw new IllegalArgumentException("The given LocalTime array should not be empty");
}
/**
* Check that the {@link LocalTime} string representation to compare actual {@link LocalTime} to is not null,
* otherwise throws a {@link IllegalArgumentException} with an explicit message
*
* @param localTimeAsString String representing the {@link LocalTime} to compare actual with
* @throws IllegalArgumentException with an explicit message if the given {@link String} is null
*/
private static void assertLocalTimeAsStringParameterIsNotNull(String localTimeAsString) {
// @format:off
if (localTimeAsString == null) throw new IllegalArgumentException("The String representing the LocalTime to compare actual with should not be null");
// @format:on
}
/**
* Check that the {@link LocalTime} to compare actual {@link LocalTime} to is not null, in that case throws a
* {@link IllegalArgumentException} with an explicit message
*
* @param other the {@link LocalTime} to check
* @throws IllegalArgumentException with an explicit message if the given {@link LocalTime} is null
*/
private static void assertLocalTimeParameterIsNotNull(LocalTime other) {
if (other == null) throw new IllegalArgumentException("The LocalTime to compare actual with should not be null");
}
/**
* Verifies that actual and given {@code LocalTime} have same hour, minute and second fields (nanosecond fields are
* ignored in comparison).
* <p>
* Assertion can fail with localTimes in same chronological nanosecond time window, e.g :
* <p>
* 23:00:<b>01.000000000</b> and 23:00:<b>00.999999999</b>.
* <p>
* Assertion fails as second fields differ even if time difference is only 1ns.
* <p>
* Code example :
* <pre><code class='java'> // successful assertions
* LocalTime localTime1 = LocalTime.of(12, 0, 1, 0);
* LocalTime localTime2 = LocalTime.of(12, 0, 1, 456);
* assertThat(localTime1).isEqualToIgnoringNanos(localTime2);
*
* // failing assertions (even if time difference is only 1ns)
* LocalTime localTimeA = LocalTime.of(12, 0, 1, 0);
* LocalTime localTimeB = LocalTime.of(12, 0, 0, 999999999);
* assertThat(localTimeA).isEqualToIgnoringNanos(localTimeB);</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is are not equal with nanoseconds ignored.
*/
public S isEqualToIgnoringNanos(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (!areEqualIgnoringNanos(actual, other)) {
throw Failures.instance().failure(info, shouldBeEqualIgnoringNanos(actual, other));
}
return myself;
}
/**
* Verifies that actual and given {@link LocalTime} have same hour and minute fields (second and nanosecond fields are
* ignored in comparison).
* <p>
* Assertion can fail with LocalTimes in same chronological second time window, e.g :
* <p>
* 23:<b>01:00</b>.000 and 23:<b>00:59</b>.000.
* <p>
* Assertion fails as minute fields differ even if time difference is only 1s.
* <p>
* Code example :
* <pre><code class='java'> // successful assertions
* LocalTime localTime1 = LocalTime.of(23, 50, 0, 0);
* LocalTime localTime2 = LocalTime.of(23, 50, 10, 456);
* assertThat(localTime1).isEqualToIgnoringSeconds(localTime2);
*
* // failing assertions (even if time difference is only 1ms)
* LocalTime localTimeA = LocalTime.of(23, 50, 00, 000);
* LocalTime localTimeB = LocalTime.of(23, 49, 59, 999);
* assertThat(localTimeA).isEqualToIgnoringSeconds(localTimeB);</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is are not equal with second and nanosecond fields
* ignored.
*/
public S isEqualToIgnoringSeconds(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (!areEqualIgnoringSeconds(actual, other)) {
throw Failures.instance().failure(info, shouldBeEqualIgnoringSeconds(actual, other));
}
return myself;
}
/**
* Verifies that actual and given {@code LocalTime} have same hour fields (minute, second and nanosecond fields are
* ignored in comparison).
* <p>
* Assertion can fail with localTimes in same chronological second time window, e.g :
* <p>
* <b>01:00</b>:00.000 and <b>00:59:59</b>.000.
* <p>
* Time difference is only 1s but hour fields differ.
* <p>
* Code example :
* <pre><code class='java'> // successful assertions
* LocalTime localTime1 = LocalTime.of(23, 50, 0, 0);
* LocalTime localTime2 = LocalTime.of(23, 00, 2, 7);
* assertThat(localTime1).hasSameHourAs(localTime2);
*
* // failing assertions (even if time difference is only 1ms)
* LocalTime localTimeA = LocalTime.of(01, 00, 00, 000);
* LocalTime localTimeB = LocalTime.of(00, 59, 59, 999);
* assertThat(localTimeA).hasSameHourAs(localTimeB);</code></pre>
*
* @param other the given {@link LocalTime}.
* @return this assertion object.
* @throws AssertionError if the actual {@code LocalTime} is {@code null}.
* @throws IllegalArgumentException if other {@code LocalTime} is {@code null}.
* @throws AssertionError if the actual {@code LocalTime} is are not equal ignoring minute, second and nanosecond
* fields.
*/
public S hasSameHourAs(LocalTime other) {
Objects.instance().assertNotNull(info, actual);
assertLocalTimeParameterIsNotNull(other);
if (!haveSameHourField(actual, other)) {
throw Failures.instance().failure(info, shouldHaveSameHourAs(actual, other));
}
return myself;
}
/**
* Returns true if both localtime are in the same year, month and day of month, hour, minute and second, false
* otherwise.
*
* @param actual the actual localtime. expected not be null
* @param other the other localtime. expected not be null
* @return true if both localtime are in the same year, month and day of month, hour, minute and second, false
* otherwise.
*/
private static boolean areEqualIgnoringNanos(LocalTime actual, LocalTime other) {
return areEqualIgnoringSeconds(actual, other) && actual.getSecond() == other.getSecond();
}
/**
* Returns true if both localtime are in the same year, month, day of month, hour and minute, false otherwise.
*
* @param actual the actual localtime. expected not be null
* @param other the other localtime. expected not be null
* @return true if both localtime are in the same year, month, day of month, hour and minute, false otherwise.
*/
private static boolean areEqualIgnoringSeconds(LocalTime actual, LocalTime other) {
return haveSameHourField(actual, other) && actual.getMinute() == other.getMinute();
}
private static boolean haveSameHourField(LocalTime actual, LocalTime other) {
return actual.getHour() == other.getHour();
}
}
| |
/*
* Copyright 2007-2018 Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.numeric;
import com.intellij.codeInspection.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.ObjectUtils;
import com.intellij.util.SmartList;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.PsiReplacementUtil;
import com.siyeh.ig.psiutils.CommentTracker;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.MethodCallUtils;
import com.siyeh.ig.psiutils.TypeUtils;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.List;
import static com.siyeh.ig.numeric.UnaryPlusInspection.ConvertDoubleUnaryToPrefixOperationFix;
public final class UnnecessaryUnaryMinusInspection extends LocalInspectionTool {
@Override
public @NotNull PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
return new UnnecessaryUnaryMinusVisitor(holder, isOnTheFly);
}
private static class ReplaceParentOperatorFix extends InspectionGadgetsFix {
@Override
@NotNull
public String getFamilyName() {
return InspectionGadgetsBundle.message("unnecessary.unary.minus.quickfix");
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) {
final PsiElement element = descriptor.getPsiElement();
final PsiPrefixExpression prefixExpression = (PsiPrefixExpression)element.getParent();
final PsiExpression operand = prefixExpression.getOperand();
if (operand == null) {
return;
}
final PsiExpression parentExpression = (PsiExpression)prefixExpression.getParent();
final CommentTracker commentTracker = new CommentTracker();
@NonNls final StringBuilder newExpression = new StringBuilder();
if (parentExpression instanceof PsiAssignmentExpression) {
final PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parentExpression;
final PsiExpression lhs = assignmentExpression.getLExpression();
newExpression.append(commentTracker.text(lhs));
final IElementType tokenType = assignmentExpression.getOperationTokenType();
if (tokenType.equals(JavaTokenType.PLUSEQ)) {
newExpression.append("-=");
}
else {
newExpression.append("+=");
}
newExpression.append(commentTracker.text(operand));
}
else if (parentExpression instanceof PsiPolyadicExpression) {
final PsiPolyadicExpression polyadicExpression = (PsiPolyadicExpression)parentExpression;
int lastOperatorIndex = -1;
IElementType lastOperator = null;
for (PsiElement child = polyadicExpression.getFirstChild(); child != null; child = child.getNextSibling()) {
if (child == prefixExpression) {
if (lastOperatorIndex == -1) {
return;
}
newExpression.replace(lastOperatorIndex, lastOperatorIndex + 1, lastOperator == JavaTokenType.PLUS ? "-" : "+");
newExpression.append(commentTracker.text(operand));
continue;
}
if (PsiUtil.isJavaToken(child, JavaTokenType.PLUS)) {
lastOperatorIndex = newExpression.length();
lastOperator = JavaTokenType.PLUS;
}
else if (PsiUtil.isJavaToken(child, JavaTokenType.MINUS)) {
lastOperatorIndex = newExpression.length();
lastOperator = JavaTokenType.MINUS;
}
newExpression.append(commentTracker.text(child));
}
if (lastOperatorIndex == -1) {
return;
}
}
PsiReplacementUtil.replaceExpression(parentExpression, newExpression.toString(), commentTracker);
}
}
private static class RemoveDoubleUnaryMinusFix extends InspectionGadgetsFix {
private final boolean myMinusOnTheLeft;
private RemoveDoubleUnaryMinusFix(boolean minusOnTheLeft) {
myMinusOnTheLeft = minusOnTheLeft;
}
@Override
public @NotNull String getFamilyName() {
return InspectionGadgetsBundle.message("unnecessary.unary.minus.remove.quickfix");
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) {
final PsiPrefixExpression prefixExpr = ObjectUtils.tryCast(descriptor.getPsiElement().getParent(), PsiPrefixExpression.class);
if (prefixExpr == null) {
return;
}
final PsiExpression oldExpr;
final PsiExpression operand;
if (myMinusOnTheLeft) {
final PsiPrefixExpression child = ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprDown(prefixExpr.getOperand()),
PsiPrefixExpression.class);
if (child == null) {
return;
}
oldExpr = prefixExpr;
operand = child.getOperand();
}
else {
final PsiPrefixExpression parent = ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprUp(prefixExpr.getParent()),
PsiPrefixExpression.class);
if (parent == null) {
return;
}
oldExpr = parent;
operand = prefixExpr.getOperand();
}
if (operand == null) {
return;
}
PsiReplacementUtil.replaceExpression(oldExpr, operand.getText());
}
}
private static class UnnecessaryUnaryMinusVisitor extends BaseInspectionVisitor {
private final ProblemsHolder myProblemsHolder;
private final boolean myOnTheFly;
private UnnecessaryUnaryMinusVisitor(@NotNull ProblemsHolder problemsHolder, boolean onTheFly) {
myProblemsHolder = problemsHolder;
myOnTheFly = onTheFly;
}
@Override
public void visitPrefixExpression(PsiPrefixExpression prefixExpr) {
super.visitPrefixExpression(prefixExpr);
if (!ConvertDoubleUnaryToPrefixOperationFix.isDesiredPrefixExpression(prefixExpr, false)) {
return;
}
final PsiExpression operand = prefixExpr.getOperand();
if (operand == null) {
return;
}
final List<LocalQuickFix> fixes = new SmartList<>();
addReplaceParentOperatorFix(fixes, prefixExpr);
if (myOnTheFly) {
LocalQuickFix decrementFix = ConvertDoubleUnaryToPrefixOperationFix.createFix(prefixExpr);
if (decrementFix != null) {
fixes.add(decrementFix);
}
addRemoveDoubleUnaryMinusFix(fixes, prefixExpr);
}
if (!fixes.isEmpty()) {
myProblemsHolder.registerProblem(prefixExpr.getOperationSign(),
InspectionGadgetsBundle.message("unnecessary.unary.minus.problem.descriptor"),
ProblemHighlightType.LIKE_UNUSED_SYMBOL, fixes.toArray(LocalQuickFix[]::new));
}
}
private static void addReplaceParentOperatorFix(@NotNull List<LocalQuickFix> fixes, @NotNull PsiPrefixExpression prefixExpr) {
final PsiElement parent = prefixExpr.getParent();
if (parent instanceof PsiPolyadicExpression) {
final PsiPolyadicExpression polyadicExpression = (PsiPolyadicExpression)parent;
if (ExpressionUtils.hasType(polyadicExpression, CommonClassNames.JAVA_LANG_STRING)) {
return;
}
final PsiJavaToken token = polyadicExpression.getTokenBeforeOperand(prefixExpr);
if (token == null) {
return;
}
final IElementType tokenType = token.getTokenType();
if (!JavaTokenType.PLUS.equals(tokenType) && !JavaTokenType.MINUS.equals(tokenType)) {
return;
}
fixes.add(new ReplaceParentOperatorFix());
}
else if (parent instanceof PsiAssignmentExpression) {
final PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parent;
if (ExpressionUtils.hasType(assignmentExpression, CommonClassNames.JAVA_LANG_STRING)) {
return;
}
final IElementType assignmentTokenType = assignmentExpression.getOperationTokenType();
if (!JavaTokenType.PLUSEQ.equals(assignmentTokenType)) {
return;
}
final PsiExpression rhs = assignmentExpression.getRExpression();
if (!prefixExpr.equals(rhs)) {
// don't warn on broken code.
return;
}
fixes.add(new ReplaceParentOperatorFix());
}
}
private static void addRemoveDoubleUnaryMinusFix(@NotNull List<LocalQuickFix> fixes,
@NotNull PsiPrefixExpression prefixExpr) {
final PsiElement parent = PsiUtil.skipParenthesizedExprUp(prefixExpr.getParent());
final PsiExpression operandExpr;
final PsiExpression expr;
final boolean minusOnTheLeft;
final PsiExpression operand = PsiUtil.skipParenthesizedExprDown(prefixExpr.getOperand());
if (operand == null) {
return;
}
if (parent instanceof PsiPrefixExpression &&
ConvertDoubleUnaryToPrefixOperationFix.isDesiredPrefixExpression((PsiPrefixExpression)parent, false)) {
operandExpr = prefixExpr.getOperand();
expr = (PsiExpression)parent;
minusOnTheLeft = false;
}
else if (operand instanceof PsiPrefixExpression &&
ConvertDoubleUnaryToPrefixOperationFix.isDesiredPrefixExpression((PsiPrefixExpression)operand, false)) {
operandExpr = ((PsiPrefixExpression)operand).getOperand();
expr = prefixExpr;
minusOnTheLeft = true;
}
else {
return;
}
if (operandExpr == null) {
return;
}
final PsiType type = operandExpr.getType();
if (TypeUtils.unaryNumericPromotion(type) != type && MethodCallUtils.isNecessaryForSurroundingMethodCall(expr, operandExpr)) {
return;
}
fixes.add(new RemoveDoubleUnaryMinusFix(minusOnTheLeft));
}
}
}
| |
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.ThreadedIndexingAndSearchingTestCase;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.apache.lucene.util.NamedThreadFactory;
import org.apache.lucene.util._TestUtil;
@SuppressCodecs({ "SimpleText", "Memory", "Direct" })
public class TestSearcherManager extends ThreadedIndexingAndSearchingTestCase {
boolean warmCalled;
private SearcherLifetimeManager.Pruner pruner;
public void testSearcherManager() throws Exception {
pruner = new SearcherLifetimeManager.PruneByAge(TEST_NIGHTLY ? _TestUtil.nextInt(random(), 1, 20) : 1);
runTest("TestSearcherManager");
}
@Override
protected IndexSearcher getFinalSearcher() throws Exception {
if (!isNRT) {
writer.commit();
}
assertTrue(mgr.maybeRefresh() || mgr.isSearcherCurrent());
return mgr.acquire();
}
private SearcherManager mgr;
private SearcherLifetimeManager lifetimeMGR;
private final List<Long> pastSearchers = new ArrayList<Long>();
private boolean isNRT;
@Override
protected void doAfterWriter(final ExecutorService es) throws Exception {
final SearcherFactory factory = new SearcherFactory() {
@Override
public IndexSearcher newSearcher(IndexReader r) throws IOException {
IndexSearcher s = new IndexSearcher(r, es);
TestSearcherManager.this.warmCalled = true;
s.search(new TermQuery(new Term("body", "united")), 10);
return s;
}
};
if (random().nextBoolean()) {
// TODO: can we randomize the applyAllDeletes? But
// somehow for final searcher we must apply
// deletes...
mgr = new SearcherManager(writer, true, factory);
isNRT = true;
} else {
// SearcherManager needs to see empty commit:
writer.commit();
mgr = new SearcherManager(dir, factory);
isNRT = false;
assertMergedSegmentsWarmed = false;
}
lifetimeMGR = new SearcherLifetimeManager();
}
@Override
protected void doSearching(ExecutorService es, final long stopTime) throws Exception {
Thread reopenThread = new Thread() {
@Override
public void run() {
try {
if (VERBOSE) {
System.out.println("[" + Thread.currentThread().getName() + "]: launch reopen thread");
}
while(System.currentTimeMillis() < stopTime) {
Thread.sleep(_TestUtil.nextInt(random(), 1, 100));
writer.commit();
Thread.sleep(_TestUtil.nextInt(random(), 1, 5));
boolean block = random().nextBoolean();
if (block) {
mgr.maybeRefreshBlocking();
lifetimeMGR.prune(pruner);
} else if (mgr.maybeRefresh()) {
lifetimeMGR.prune(pruner);
}
}
} catch (Throwable t) {
if (VERBOSE) {
System.out.println("TEST: reopen thread hit exc");
t.printStackTrace(System.out);
}
failed.set(true);
throw new RuntimeException(t);
}
}
};
reopenThread.setDaemon(true);
reopenThread.start();
runSearchThreads(stopTime);
reopenThread.join();
}
@Override
protected IndexSearcher getCurrentSearcher() throws Exception {
if (random().nextInt(10) == 7) {
// NOTE: not best practice to call maybeReopen
// synchronous to your search threads, but still we
// test as apps will presumably do this for
// simplicity:
if (mgr.maybeRefresh()) {
lifetimeMGR.prune(pruner);
}
}
IndexSearcher s = null;
synchronized(pastSearchers) {
while (pastSearchers.size() != 0 && random().nextDouble() < 0.25) {
// 1/4 of the time pull an old searcher, ie, simulate
// a user doing a follow-on action on a previous
// search (drilling down/up, clicking next/prev page,
// etc.)
final Long token = pastSearchers.get(random().nextInt(pastSearchers.size()));
s = lifetimeMGR.acquire(token);
if (s == null) {
// Searcher was pruned
pastSearchers.remove(token);
} else {
break;
}
}
}
if (s == null) {
s = mgr.acquire();
if (s.getIndexReader().numDocs() != 0) {
Long token = lifetimeMGR.record(s);
synchronized(pastSearchers) {
if (!pastSearchers.contains(token)) {
pastSearchers.add(token);
}
}
}
}
return s;
}
@Override
protected void releaseSearcher(IndexSearcher s) throws Exception {
s.getIndexReader().decRef();
}
@Override
protected void doClose() throws Exception {
assertTrue(warmCalled);
if (VERBOSE) {
System.out.println("TEST: now close SearcherManager");
}
mgr.close();
lifetimeMGR.close();
}
public void testIntermediateClose() throws IOException, InterruptedException {
Directory dir = newDirectory();
// Test can deadlock if we use SMS:
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()));
writer.addDocument(new Document());
writer.commit();
final CountDownLatch awaitEnterWarm = new CountDownLatch(1);
final CountDownLatch awaitClose = new CountDownLatch(1);
final AtomicBoolean triedReopen = new AtomicBoolean(false);
final ExecutorService es = random().nextBoolean() ? null : Executors.newCachedThreadPool(new NamedThreadFactory("testIntermediateClose"));
final SearcherFactory factory = new SearcherFactory() {
@Override
public IndexSearcher newSearcher(IndexReader r) {
try {
if (triedReopen.get()) {
awaitEnterWarm.countDown();
awaitClose.await();
}
} catch (InterruptedException e) {
//
}
return new IndexSearcher(r, es);
}
};
final SearcherManager searcherManager = random().nextBoolean()
? new SearcherManager(dir, factory)
: new SearcherManager(writer, random().nextBoolean(), factory);
if (VERBOSE) {
System.out.println("sm created");
}
IndexSearcher searcher = searcherManager.acquire();
try {
assertEquals(1, searcher.getIndexReader().numDocs());
} finally {
searcherManager.release(searcher);
}
writer.addDocument(new Document());
writer.commit();
final AtomicBoolean success = new AtomicBoolean(false);
final Throwable[] exc = new Throwable[1];
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
triedReopen.set(true);
if (VERBOSE) {
System.out.println("NOW call maybeReopen");
}
searcherManager.maybeRefresh();
success.set(true);
} catch (AlreadyClosedException e) {
// expected
} catch (Throwable e) {
if (VERBOSE) {
System.out.println("FAIL: unexpected exc");
e.printStackTrace(System.out);
}
exc[0] = e;
// use success as the barrier here to make sure we see the write
success.set(false);
}
}
});
thread.start();
if (VERBOSE) {
System.out.println("THREAD started");
}
awaitEnterWarm.await();
if (VERBOSE) {
System.out.println("NOW call close");
}
searcherManager.close();
awaitClose.countDown();
thread.join();
try {
searcherManager.acquire();
fail("already closed");
} catch (AlreadyClosedException ex) {
// expected
}
assertFalse(success.get());
assertTrue(triedReopen.get());
assertNull("" + exc[0], exc[0]);
writer.close();
dir.close();
if (es != null) {
es.shutdown();
es.awaitTermination(1, TimeUnit.SECONDS);
}
}
public void testCloseTwice() throws Exception {
// test that we can close SM twice (per Closeable's contract).
Directory dir = newDirectory();
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
SearcherManager sm = new SearcherManager(dir, null);
sm.close();
sm.close();
dir.close();
}
public void testReferenceDecrementIllegally() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random())).setMergeScheduler(new ConcurrentMergeScheduler()));
SearcherManager sm = new SearcherManager(writer, false, new SearcherFactory());
writer.addDocument(new Document());
writer.commit();
sm.maybeRefreshBlocking();
IndexSearcher acquire = sm.acquire();
IndexSearcher acquire2 = sm.acquire();
sm.release(acquire);
sm.release(acquire2);
acquire = sm.acquire();
acquire.getIndexReader().decRef();
sm.release(acquire);
try {
sm.acquire();
fail("acquire should have thrown an IllegalStateException since we modified the refCount outside of the manager");
} catch (IllegalStateException ex) {
//
}
// sm.close(); -- already closed
writer.close();
dir.close();
}
public void testEnsureOpen() throws Exception {
Directory dir = newDirectory();
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
SearcherManager sm = new SearcherManager(dir, null);
IndexSearcher s = sm.acquire();
sm.close();
// this should succeed;
sm.release(s);
try {
// this should fail
sm.acquire();
} catch (AlreadyClosedException e) {
// ok
}
try {
// this should fail
sm.maybeRefresh();
} catch (AlreadyClosedException e) {
// ok
}
dir.close();
}
public void testListenerCalled() throws Exception {
Directory dir = newDirectory();
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
final AtomicBoolean afterRefreshCalled = new AtomicBoolean(false);
SearcherManager sm = new SearcherManager(iw, false, new SearcherFactory());
sm.addListener(new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
}
@Override
public void afterRefresh(boolean didRefresh) {
if (didRefresh) {
afterRefreshCalled.set(true);
}
}
});
iw.addDocument(new Document());
iw.commit();
assertFalse(afterRefreshCalled.get());
sm.maybeRefreshBlocking();
assertTrue(afterRefreshCalled.get());
sm.close();
iw.close();
dir.close();
}
public void testEvilSearcherFactory() throws Exception {
final Random random = random();
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random, dir);
w.commit();
final IndexReader other = DirectoryReader.open(dir);
final SearcherFactory theEvilOne = new SearcherFactory() {
@Override
public IndexSearcher newSearcher(IndexReader ignored) {
return LuceneTestCase.newSearcher(other);
}
};
try {
new SearcherManager(dir, theEvilOne);
} catch (IllegalStateException ise) {
// expected
}
try {
new SearcherManager(w.w, random.nextBoolean(), theEvilOne);
} catch (IllegalStateException ise) {
// expected
}
w.close();
other.close();
dir.close();
}
public void testMaybeRefreshBlockingLock() throws Exception {
// make sure that maybeRefreshBlocking releases the lock, otherwise other
// threads cannot obtain it.
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
w.close();
final SearcherManager sm = new SearcherManager(dir, null);
Thread t = new Thread() {
@Override
public void run() {
try {
// this used to not release the lock, preventing other threads from obtaining it.
sm.maybeRefreshBlocking();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
};
t.start();
t.join();
// if maybeRefreshBlocking didn't release the lock, this will fail.
assertTrue("failde to obtain the refreshLock!", sm.maybeRefresh());
sm.close();
dir.close();
}
}
| |
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.integration.git;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.acceptance.PushOneCommit.FILE_CONTENT;
import static com.google.gerrit.acceptance.PushOneCommit.FILE_NAME;
import static com.google.gerrit.testing.GerritJUnit.assertThrows;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.gerrit.acceptance.GerritConfig;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.StandaloneSiteTest;
import com.google.gerrit.acceptance.UseSsh;
import com.google.gerrit.common.RawInputUtil;
import com.google.gerrit.extensions.api.GerritApi;
import com.google.gerrit.extensions.common.ChangeInput;
import com.google.gerrit.extensions.common.CommitInfo;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.Project;
import com.google.inject.Inject;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import org.junit.Test;
@NoHttpd
@UseSsh
public class UploadArchiveIT extends StandaloneSiteTest {
private static final String[] SSH_KEYGEN_CMD =
new String[] {"ssh-keygen", "-t", "rsa", "-q", "-P", "", "-f"};
private static final String GIT_SSH_COMMAND =
"ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -o 'IdentitiesOnly yes' -i";
private static final String ARCHIVE = "archive";
@Inject private GerritApi gApi;
private String sshDestination;
private String identityPath;
private Project.NameKey project;
private CommitInfo commit;
@Test
@GerritConfig(name = "download.archive", value = "off")
public void archiveFeatureOff() throws Exception {
try (ServerContext ctx = startServer()) {
setUpTestHarness(ctx);
assertArchiveNotPermitted();
}
}
@Test
@GerritConfig(
name = "download.archive",
values = {"tar", "tbz2", "tgz", "txz"})
public void zipFormatDisabled() throws Exception {
try (ServerContext ctx = startServer()) {
setUpTestHarness(ctx);
assertArchiveNotPermitted();
}
}
@Test
public void verifyUploadArchiveFormats() throws Exception {
try (ServerContext ctx = startServer()) {
setUpTestHarness(ctx);
setUpChange();
for (String f : Arrays.asList("zip", "tar", "tar.gz", "tar.bz2", "tar.xz")) {
verifyUploadArchive(f);
}
}
}
private void verifyUploadArchive(String format) throws Exception {
Path outputPath = sitePaths.data_dir.resolve(ARCHIVE);
execute(
cmd(format, commit.commit),
sitePaths.data_dir.toFile(),
ImmutableMap.of("GIT_SSH_COMMAND", GIT_SSH_COMMAND + identityPath),
outputPath);
try (InputStream fi = Files.newInputStream(outputPath);
InputStream bi = new BufferedInputStream(fi);
ArchiveInputStream archive = archiveStreamForFormat(bi, format)) {
assertEntries(archive);
}
}
private ArchiveInputStream archiveStreamForFormat(InputStream bi, String format)
throws IOException {
switch (format) {
case "zip":
return new ZipArchiveInputStream(bi);
case "tar":
return new TarArchiveInputStream(bi);
case "tar.gz":
return new TarArchiveInputStream(new GzipCompressorInputStream(bi));
case "tar.bz2":
return new TarArchiveInputStream(new BZip2CompressorInputStream(bi));
case "tar.xz":
return new TarArchiveInputStream(new XZCompressorInputStream(bi));
default:
throw new IllegalArgumentException("Unknown archive format: " + format);
}
}
private void setUpTestHarness(ServerContext ctx) throws RestApiException, Exception {
ctx.getInjector().injectMembers(this);
project = new Project.NameKey("upload-archive-project-test");
gApi.projects().create(project.get());
setUpAuthentication();
sshDestination =
String.format(
"ssh://%s@%s:%s/%s",
"admin",
ctx.getServer().getSshdAddress().getHostName(),
ctx.getServer().getSshdAddress().getPort(),
project.get());
identityPath = sitePaths.data_dir.resolve(String.format("id_rsa_%s", "admin")).toString();
}
private void setUpAuthentication() throws Exception {
execute(
ImmutableList.<String>builder()
.add(SSH_KEYGEN_CMD)
.add(String.format("id_rsa_%s", "admin"))
.build());
gApi.accounts()
.id("admin")
.addSshKey(
new String(
java.nio.file.Files.readAllBytes(
sitePaths.data_dir.resolve(String.format("id_rsa_%s.pub", "admin"))),
UTF_8));
}
private ImmutableList<String> cmd(String format, String commit) {
return ImmutableList.<String>builder()
.add("git")
.add("archive")
.add("-f=" + format)
.add("--prefix=" + commit + "/")
.add("--remote=" + sshDestination)
.add(commit)
.add(FILE_NAME)
.build();
}
private String execute(ImmutableList<String> cmd) throws Exception {
return execute(cmd, sitePaths.data_dir.toFile(), ImmutableMap.of());
}
private void assertArchiveNotPermitted() {
IOException exception =
assertThrows(
IOException.class,
() ->
execute(
cmd("zip", "master"),
sitePaths.data_dir.toFile(),
ImmutableMap.of("GIT_SSH_COMMAND", GIT_SSH_COMMAND + identityPath)));
assertThat(exception)
.hasMessageThat()
.contains("fatal: upload-archive not permitted for format zip");
}
private void setUpChange() throws Exception {
ChangeInput in = new ChangeInput(project.get(), "master", "Test change");
in.newBranch = true;
String changeId = gApi.changes().create(in).info().changeId;
gApi.changes().id(changeId).edit().modifyFile(FILE_NAME, RawInputUtil.create(FILE_CONTENT));
gApi.changes().id(changeId).edit().publish();
commit = gApi.changes().id(changeId).current().commit(false);
}
private void assertEntries(ArchiveInputStream o) throws IOException {
Set<String> entryNames = new TreeSet<>();
ArchiveEntry e;
while ((e = o.getNextEntry()) != null) {
entryNames.add(e.getName());
}
assertThat(entryNames)
.containsExactly(
String.format("%s/", commit.commit), String.format("%s/%s", commit.commit, FILE_NAME))
.inOrder();
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2022 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.vertica.model;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ext.generic.model.GenericCatalog;
import org.jkiss.dbeaver.ext.generic.model.GenericDataSource;
import org.jkiss.dbeaver.ext.generic.model.GenericSchema;
import org.jkiss.dbeaver.ext.generic.model.GenericTableBase;
import org.jkiss.dbeaver.model.DBPObjectStatisticsCollector;
import org.jkiss.dbeaver.model.DBPSystemObject;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.exec.DBCException;
import org.jkiss.dbeaver.model.exec.DBCSession;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement;
import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectLookupCache;
import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCStructLookupCache;
import org.jkiss.dbeaver.model.meta.Association;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.struct.DBSEntity;
import org.jkiss.dbeaver.model.struct.DBSObject;
import org.jkiss.utils.ArrayUtils;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* VerticaSchema
*/
public class VerticaSchema extends GenericSchema implements DBPSystemObject, DBPObjectStatisticsCollector
{
private static final Log log = Log.getLog(VerticaSchema.class);
private boolean hasStatistics;
private static final String SYSTEM_SCHEMAS[] = {
"v_catalog",
"v_internal",
"v_monitor",
"v_txtindex",
};
final ProjectionCache projectionCache = new ProjectionCache();
final UDFCache udfCache = new UDFCache();
public VerticaSchema(GenericDataSource dataSource, GenericCatalog catalog, String schemaName) {
super(dataSource, catalog, schemaName);
}
public ProjectionCache getProjectionCache() {
return projectionCache;
}
@Override
@SuppressWarnings("unchecked")
public List<VerticaTable> getPhysicalTables(DBRProgressMonitor monitor) throws DBException {
return (List<VerticaTable>) super.getPhysicalTables(monitor);
}
@Override
@SuppressWarnings("unchecked")
public List<VerticaTable> getTables(DBRProgressMonitor monitor) throws DBException {
return (List<VerticaTable>) super.getTables(monitor);
}
@Override
public Collection<? extends DBSObject> getChildren(@NotNull DBRProgressMonitor monitor) throws DBException {
List<DBSObject> children = new ArrayList<>(getTables(monitor));
//children.addAll(getProjections(monitor));
return children;
}
/*
@Override
public DBSObject getChild(@NotNull DBRProgressMonitor monitor, @NotNull String childName)
throws DBException
{
DBSObject child = getTable(monitor, childName);
if (child == null) {
child = getProjection(monitor, childName);
}
return child;
}
*/
/*
JDBCPreparedStatement dbStat = session.prepareStatement(
"SELECT tv.*,c.comment as REMARKS FROM (\n" +
"SELECT NULL as TABLE_CAT, t.table_schema as TABLE_SCHEM, t.table_name as TABLE_NAME, (CASE t.is_flextable WHEN true THEN 'FLEXTABLE' ELSE 'TABLE' END) as TABLE_TYPE, NULL as TYPE_CAT,\n" +
"\tt.owner_name, t.table_definition as DEFINITION \n" +
"FROM v_catalog.tables t\n" +
"UNION ALL\n" +
"SELECT NULL as TABLE_CAT, v.table_schema as TABLE_SCHEM, v.table_name as TABLE_NAME, 'VIEW' as TABLE_TYPE, NULL as TYPE_CAT,\n" +
"\tv.owner_name, v.view_definition as DEFINITION \n" +
"FROM v_catalog.views v) tv\n" +
"LEFT OUTER JOIN v_catalog.comments c ON c.object_type = tv.TABLE_TYPE AND c.object_schema = tv.table_schem AND c.object_name = tv.table_name \n" +
"WHERE tv.table_schem=?" +
(object == null && objectName == null ? "" : " AND tv.table_name LIKE ?") + "\n" +
"ORDER BY 2, 3");
dbStat.setString(1, owner.getName());
if (object != null || objectName != null) {
dbStat.setString(2, object != null ? object.getName() : objectName);
}
return dbStat;
* */
@Association
public List<VerticaFlexTable> getFlexTables(DBRProgressMonitor monitor) throws DBException {
List<? extends GenericTableBase> tables = getTables(monitor);
if (tables != null) {
List<VerticaFlexTable> filtered = new ArrayList<>();
for (GenericTableBase table : tables) {
if (table instanceof VerticaFlexTable) {
filtered.add((VerticaFlexTable) table);
}
}
return filtered;
}
return null;
}
@Association
public Collection<VerticaProjection> getProjections(DBRProgressMonitor monitor) throws DBException {
return projectionCache.getAllObjects(monitor, this);
}
@Association
public VerticaProjection getProjection(DBRProgressMonitor monitor, String name) throws DBException {
return projectionCache.getObject(monitor, this, name);
}
@Association
public Collection<VerticaUDF> getUserDefinedFunctions(DBRProgressMonitor monitor) throws DBException {
return udfCache.getAllObjects(monitor, this);
}
@Override
public boolean isSystem() {
return ArrayUtils.contains(SYSTEM_SCHEMAS, getName());
}
@Override
public boolean isStatisticsCollected() {
return hasStatistics;
}
@Override
public synchronized DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException {
hasStatistics = false;
return super.refreshObject(monitor);
}
void resetStatistics() {
hasStatistics = false;
}
@Override
public void collectObjectStatistics(DBRProgressMonitor monitor, boolean totalSizeOnly, boolean forceRefresh) throws DBException {
try (DBCSession session = DBUtils.openMetaSession(monitor, this, "Read relation statistics")) {
try (JDBCPreparedStatement dbStat = ((JDBCSession)session).prepareStatement(
"SELECT anchor_table_name as table_name,\n" +
"SUM(used_bytes) as used_bytes\n" +
"FROM v_monitor.column_storage cs\n" +
"WHERE cs.anchor_table_schema = ?\n" +
"GROUP BY anchor_table_id, anchor_table_name\n" +
"ORDER BY anchor_table_name"))
{
dbStat.setString(1, getName());
try (JDBCResultSet dbResult = dbStat.executeQuery()) {
while (dbResult.next()) {
String tableName = dbResult.getString("table_name");
GenericTableBase table = getTable(monitor, tableName);
if (table instanceof VerticaTable) {
((VerticaTable) table).fetchStatistics(dbResult);
}
}
}
} catch (SQLException e) {
throw new DBCException("Error reading schema relation statistics", e);
}
} finally {
hasStatistics = true;
}
}
@NotNull
@Override
public Class<? extends DBSEntity> getPrimaryChildType(@Nullable DBRProgressMonitor monitor) throws DBException {
return VerticaTable.class;
}
public class ProjectionCache extends JDBCStructLookupCache<VerticaSchema, VerticaProjection, VerticaProjectionColumn> {
ProjectionCache()
{
super("projection_name");
setListOrderComparator(DBUtils.nameComparatorIgnoreCase());
}
@NotNull
@Override
public JDBCStatement prepareLookupStatement(@NotNull JDBCSession session, @NotNull VerticaSchema schema, @Nullable VerticaProjection object, @Nullable String objectName) throws SQLException {
final JDBCPreparedStatement dbStat = session.prepareStatement(
"SELECT p.*,c.comment FROM v_catalog.projections p\n" +
"LEFT OUTER JOIN v_catalog.comments c ON c.object_type = 'PROJECTION' AND c.object_schema = p.projection_schema AND c.object_name = p.projection_name\n" +
"WHERE p.projection_schema=?" +
(object == null && objectName == null ? "" : " AND projection_name=?")
);
dbStat.setString(1, schema.getName());
if (object != null || objectName != null) dbStat.setString(2, object != null ? object.getName() : objectName);
return dbStat;
}
@Override
protected VerticaProjection fetchObject(@NotNull JDBCSession session, @NotNull VerticaSchema owner, @NotNull JDBCResultSet dbResult)
throws SQLException, DBException
{
return new VerticaProjection(VerticaSchema.this, dbResult);
}
@Override
protected JDBCStatement prepareChildrenStatement(@NotNull JDBCSession session, @NotNull VerticaSchema owner, @Nullable VerticaProjection forTable)
throws SQLException
{
String sql = ("SELECT pc.*,c.comment FROM v_catalog.projection_columns pc\n" +
"LEFT OUTER JOIN v_catalog.comments c ON c.object_id = pc.column_id\n" +
"WHERE pc.projection_id=?\n" +
"ORDER BY pc.column_position");
JDBCPreparedStatement dbStat = session.prepareStatement(sql);
dbStat.setLong(1, forTable.getObjectId());
return dbStat;
}
@Override
protected VerticaProjectionColumn fetchChild(@NotNull JDBCSession session, @NotNull VerticaSchema owner, @NotNull VerticaProjection table, @NotNull JDBCResultSet dbResult)
throws SQLException, DBException
{
return new VerticaProjectionColumn(table, dbResult);
}
}
public class UDFCache extends JDBCObjectLookupCache<VerticaSchema, VerticaUDF> {
UDFCache()
{
super();
setListOrderComparator(DBUtils.nameComparatorIgnoreCase());
}
@NotNull
@Override
public JDBCStatement prepareLookupStatement(@NotNull JDBCSession session, @NotNull VerticaSchema schema, @Nullable VerticaUDF object, @Nullable String objectName) throws SQLException {
final JDBCPreparedStatement dbStat = session.prepareStatement(
"SELECT * FROM v_catalog.user_functions WHERE schema_schema=?" +
(object == null && objectName == null ? "" : " AND function_name=?")
);
dbStat.setString(1, schema.getName());
if (object != null || objectName != null) dbStat.setString(2, object != null ? object.getName() : objectName);
return dbStat;
}
@Override
protected VerticaUDF fetchObject(@NotNull JDBCSession session, @NotNull VerticaSchema owner, @NotNull JDBCResultSet dbResult)
throws SQLException, DBException
{
return new VerticaUDF(VerticaSchema.this, dbResult);
}
}
}
| |
/*
Copyright 2012 by Uday Kamath, Sean Luke, and George Mason University
Licensed under the Academic Free License version 3.0
See the file "LICENSE" for more information
*/
package ec.gp.breed;
import ec.*;
import ec.util.*;
import ec.gp.*;
import ec.gp.koza.*;
import java.util.*;
/**
* SizeFairCrossover works similarly to one written in the paper
* "Size Fair and Homologous Tree Genetic Programming Crossovers" by Langdon (1998).
* <p>SizeFairCrossover tries <i>tries</i> times to find a tree
* that has at least one fair size node based on size fair or homologous
* implementation. If it cannot
* find a valid tree in <i>tries</i> times, it gives up and simply
* copies the individual.
* <p>This pipeline typically produces up to 2 new individuals (the two newly-
* swapped individuals) per produce(...) call. If the system only
* needs a single individual, the pipeline will throw one of the
* new individuals away. The user can also have the pipeline always
* throw away the second new individual instead of adding it to the population.
* In this case, the pipeline will only typically
* produce 1 new individual per produce(...) call.
<p><b>Typical Number of Individuals Produced Per <tt>produce(...)</tt> call</b><br>
2 * minimum typical number of individuals produced by each source, unless tossSecondParent
is set, in which case it's simply the minimum typical number.
<p><b>Number of Sources</b><br>
2
<p><b>Parameters</b><br>
<table>
<tr><td valign=top><i>base</i>.<tt>tries</tt><br>
<font size=-1>int >= 1</font></td>
<td valign=top>(number of times to try finding valid pairs of nodes)</td></tr>
<tr><td valign=top><i>base</i>.<tt>maxdepth</tt><br>
<font size=-1>int >= 1</font></td>
<td valign=top>(maximum valid depth of a crossed-over subtree)</td></tr>
<tr><td valign=top><i>base</i>.<tt>tree.0</tt><br>
<font size=-1>0 < int < (num trees in individuals), if exists</font></td>
<td valign=top>(first tree for the crossover; if parameter doesn't exist, tree is picked at random)</td></tr>
<tr><td valign=top><i>base</i>.<tt>tree.1</tt><br>
<font size=-1>0 < int < (num trees in individuals), if exists</font></td>
<td valign=top>(second tree for the crossover; if parameter doesn't exist, tree is picked at random. This tree <b>must</b> have the same GPTreeConstraints as <tt>tree.0</tt>, if <tt>tree.0</tt> is defined.)</td></tr>
<tr><td valign=top><i>base</i>.<tt>ns.</tt><i>n</i><br>
<font size=-1>classname, inherits and != GPNodeSelector,<br>
or String <tt>same<tt></font></td>
<td valign=top>(GPNodeSelector for parent <i>n</i> (n is 0 or 1) If, for <tt>ns.1</tt> the value is <tt>same</tt>, then <tt>ns.1</tt> a copy of whatever <tt>ns.0</tt> is. Note that the default version has no <i>n</i>)</td></tr>
<tr><td valign=top><i>base</i>.<tt>toss</tt><br>
<font size=-1>bool = <tt>true</tt> or <tt>false</tt> (default)</font>/td>
<td valign=top>(after crossing over with the first new individual, should its second sibling individual be thrown away instead of adding it to the population?)</td></tr>
<tr><td valign=top><i>base</i>.<tt>homologous</tt><br>
<font size=-1>bool = <tt>true</tt> or <tt>false</tt> (default)</font>/td>
<td valign=top>(Is the implementation homologous (as opposed to size-fair)?)</td></tr>
</table>
<p><b>Default Base</b><br>
gp.breed.size-fair
<p><b>Parameter bases</b><br>
<table>
<tr><td valign=top><i>base</i>.<tt>ns.</tt><i>n</i><br>
<td>nodeselect<i>n</i> (<i>n</i> is 0 or 1)</td></tr>
</table>
* @author Uday Kamath and Sean Luke
* @version 1.0
*/
public class SizeFairCrossoverPipeline extends GPBreedingPipeline
{
private static final long serialVersionUID = 1;
public static final String P_NUM_TRIES = "tries";
public static final String P_MAXDEPTH = "maxdepth";
public static final String P_SIZEFAIR = "size-fair";
public static final String P_TOSS = "toss";
public static final String P_HOMOLOGOUS = "homologous";
public static final int INDS_PRODUCED = 2;
public static final int NUM_SOURCES = 2;
/** How the pipeline selects a node from individual 1 */
public GPNodeSelector nodeselect1;
/** How the pipeline selects a node from individual 2 */
public GPNodeSelector nodeselect2;
/** Is the first tree fixed? If not, this is -1 */
public int tree1;
/** Is the second tree fixed? If not, this is -1 */
public int tree2;
/** How many times the pipeline attempts to pick nodes until it gives up. */
public int numTries;
/**
* The deepest tree the pipeline is allowed to form. Single terminal trees
* are depth 1.
*/
public int maxDepth;
/** Should the pipeline discard the second parent after crossing over? */
public boolean tossSecondParent;
/** Temporary holding place for parents */
public GPIndividual parents[];
public boolean homologous;
public SizeFairCrossoverPipeline()
{
parents = new GPIndividual[2];
}
public Parameter defaultBase()
{
return GPBreedDefaults.base().push(P_SIZEFAIR);
}
public int numSources()
{
return NUM_SOURCES;
}
public Object clone()
{
SizeFairCrossoverPipeline c = (SizeFairCrossoverPipeline) (super.clone());
// deep-cloned stuff
c.nodeselect1 = (GPNodeSelector) (nodeselect1.clone());
c.nodeselect2 = (GPNodeSelector) (nodeselect2.clone());
c.parents = (GPIndividual[]) parents.clone();
return c;
}
public void setup(final EvolutionState state, final Parameter base)
{
super.setup(state, base);
Parameter def = defaultBase();
Parameter p = base.push(P_NODESELECTOR).push("0");
Parameter d = def.push(P_NODESELECTOR).push("0");
nodeselect1 = (GPNodeSelector) (state.parameters.getInstanceForParameter(p, d, GPNodeSelector.class));
nodeselect1.setup(state, p);
p = base.push(P_NODESELECTOR).push("1");
d = def.push(P_NODESELECTOR).push("1");
if (state.parameters.exists(p, d) && state.parameters.getString(p, d).equals(V_SAME))
{
// can't just copy it this time; the selectors
// use internal caches. So we have to clone it no matter what
nodeselect2 = (GPNodeSelector) (nodeselect1.clone());
}
else
{
nodeselect2 = (GPNodeSelector) (state.parameters.getInstanceForParameter(p, d, GPNodeSelector.class));
nodeselect2.setup(state, p);
}
numTries = state.parameters.getInt(base.push(P_NUM_TRIES), def.push(P_NUM_TRIES), 1);
if (numTries == 0)
state.output.fatal("GPCrossover Pipeline has an invalid number of tries (it must be >= 1).",
base.push(P_NUM_TRIES), def.push(P_NUM_TRIES));
maxDepth = state.parameters.getInt(base.push(P_MAXDEPTH), def.push(P_MAXDEPTH), 1);
if (maxDepth == 0)
state.output.fatal("GPCrossover Pipeline has an invalid maximum depth (it must be >= 1).",
base.push(P_MAXDEPTH), def.push(P_MAXDEPTH));
tree1 = TREE_UNFIXED;
if (state.parameters.exists(base.push(P_TREE).push("" + 0), def.push(P_TREE).push("" + 0)))
{
tree1 = state.parameters.getInt(base.push(P_TREE).push("" + 0), def.push(P_TREE).push("" + 0), 0);
if (tree1 == -1)
state.output.fatal("Tree fixed value, if defined, must be >= 0");
}
tree2 = TREE_UNFIXED;
if (state.parameters.exists(base.push(P_TREE).push("" + 1), def.push(P_TREE).push("" + 1)))
{
tree2 = state.parameters.getInt(base.push(P_TREE).push("" + 1), def.push(P_TREE).push("" + 1), 0);
if (tree2 == -1)
state.output.fatal("Tree fixed value, if defined, must be >= 0");
}
tossSecondParent = state.parameters.getBoolean(base.push(P_TOSS), def.push(P_TOSS), false);
if(state.parameters.exists(base.push(P_HOMOLOGOUS), null))
{
//get the parameter
homologous = state.parameters.getBoolean(base.push(P_HOMOLOGOUS), null, false);
}
}
/**
* Returns 2 * minimum number of typical individuals produced by any
* sources, else 1* minimum number if tossSecondParent is true.
*/
public int typicalIndsProduced()
{
return (tossSecondParent ? minChildProduction() : minChildProduction() * 2);
}
/** Returns true if inner1 can feasibly be swapped into inner2's position. */
public boolean verifyPoints(final GPInitializer initializer,
final GPNode inner1, final GPNode inner2)
{
// first check to see if inner1 is swap-compatible with inner2
// on a type basis
if (!inner1.swapCompatibleWith(initializer, inner2))
return false;
// next check to see if inner1 can fit in inner2's spot
if (inner1.depth() + inner2.atDepth() > maxDepth)
return false;
// checks done!
return true;
}
public int produce(final int min, final int max, final int start,
final int subpopulation, final Individual[] inds,
final EvolutionState state, final int thread)
{
// how many individuals should we make?
int n = typicalIndsProduced();
if (n < min)
n = min;
if (n > max)
n = max;
// should we bother?
if (!state.random[thread].nextBoolean(likelihood))
return reproduce(n, start, subpopulation, inds, state, thread, true); // DO produce children from source -- we've not done so already
GPInitializer initializer = ((GPInitializer) state.initializer);
for (int q = start; q < n + start; /* no increment */) // keep on going until we're filled up
{
// grab two individuals from our sources
if (sources[0] == sources[1]) // grab from the same source
sources[0].produce(2, 2, 0, subpopulation, parents, state, thread);
else // grab from different sources
{
sources[0].produce(1, 1, 0, subpopulation, parents, state, thread);
sources[1].produce(1, 1, 1, subpopulation, parents, state, thread);
}
// at this point, parents[] contains our two selected individuals
// are our tree values valid?
if (tree1 != TREE_UNFIXED && (tree1 < 0 || tree1 >= parents[0].trees.length))
// uh oh
state.output.fatal("GP Crossover Pipeline attempted to fix tree.0 to a value which was out of bounds of the array of the individual's trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual");
if (tree2 != TREE_UNFIXED && (tree2 < 0 || tree2 >= parents[1].trees.length))
// uh oh
state.output.fatal("GP Crossover Pipeline attempted to fix tree.1 to a value which was out of bounds of the array of the individual's trees. Check the pipeline's fixed tree values -- they may be negative or greater than the number of trees in an individual");
int t1 = 0;
int t2 = 0;
if (tree1 == TREE_UNFIXED || tree2 == TREE_UNFIXED)
{
do
// pick random trees -- their GPTreeConstraints must be the same
{
if (tree1 == TREE_UNFIXED)
if (parents[0].trees.length > 1)
t1 = state.random[thread].nextInt(parents[0].trees.length);
else
t1 = 0;
else
t1 = tree1;
if (tree2 == TREE_UNFIXED)
if (parents[1].trees.length > 1)
t2 = state.random[thread].nextInt(parents[1].trees.length);
else
t2 = 0;
else
t2 = tree2;
}
while (parents[0].trees[t1].constraints(initializer) != parents[1].trees[t2].constraints(initializer));
}
else
{
t1 = tree1;
t2 = tree2;
// make sure the constraints are okay
if (parents[0].trees[t1].constraints(initializer) != parents[1].trees[t2].constraints(initializer)) // uh oh
state.output.fatal("GP Crossover Pipeline's two tree choices are both specified by the user -- but their GPTreeConstraints are not the same");
}
boolean res1 = false;
boolean res2 = false;
GPTree tree2 = parents[1].trees[t2];
// pick some nodes
GPNode p1 = null;
GPNode p2 = null;
// lets walk on parent2 all nodes to get subtrees for each node, doing it once for O(N) and not O(N^2)
// because depth etc are computed and not stored
ArrayList nodeToSubtrees = new ArrayList();
// also HashMap for size to List() of nodes in that size for O(1) lookup
HashMap sizeToNodes = new HashMap();
this.traverseTreeForDepth(tree2.child, nodeToSubtrees, sizeToNodes);
// sort the ArrayList with comparator that sorts by subtrees
Collections.sort(nodeToSubtrees, new Comparator()
{
public int compare(Object o1, Object o2)
{
NodeInfo node1 = (NodeInfo)o1;
NodeInfo node2 = (NodeInfo)o2;
int comparison = 0;
if (node1.numberOfSubTreesBeneath > node2.numberOfSubTreesBeneath)
comparison = 1;
else if (node1.numberOfSubTreesBeneath < node2.numberOfSubTreesBeneath)
comparison = -1;
else if (node1.numberOfSubTreesBeneath == node2.numberOfSubTreesBeneath)
comparison = 0;
return comparison;
}
});
for (int x = 0; x < numTries; x++)
{
// pick a node in individual 1
p1 = nodeselect1.pickNode(state, subpopulation, thread, parents[0], parents[0].trees[t1]);
// now lets find "similar" in parent 2
p2 = findFairSizeNode(nodeToSubtrees, sizeToNodes, p1, tree2, state, thread);
// check for depth and swap-compatibility limits
res1 = verifyPoints(initializer, p2, p1); // p2 can fill p1's spot -- order is important!
if (n - (q - start) < 2 || tossSecondParent)
res2 = true;
else
res2 = verifyPoints(initializer, p1, p2); // p1 can fill p2's spot -- order is important!
// did we get something that had both nodes verified?
// we reject if EITHER of them is invalid. This is what lil-gp
// does.
// Koza only has numTries set to 1, so it's compatible as well.
if (res1 && res2)
break;
}
// at this point, res1 AND res2 are valid, OR
// either res1 OR res2 is valid and we ran out of tries, OR
// neither res1 nor res2 is valid and we rand out of tries.
// So now we will transfer to a tree which has res1 or res2
// valid, otherwise it'll just get replicated. This is
// compatible with both Koza and lil-gp.
// at this point I could check to see if my sources were breeding
// pipelines -- but I'm too lazy to write that code (it's a little
// complicated) to just swap one individual over or both over,
// -- it might still entail some copying. Perhaps in the future.
// It would make things faster perhaps, not requiring all that
// cloning.
// Create some new individuals based on the old ones -- since
// GPTree doesn't deep-clone, this should be just fine. Perhaps we
// should change this to proto off of the main species prototype,
// but
// we have to then copy so much stuff over; it's not worth it.
GPIndividual j1 = (GPIndividual) (parents[0].lightClone());
GPIndividual j2 = null;
if (n - (q - start) >= 2 && !tossSecondParent)
j2 = (GPIndividual) (parents[1].lightClone());
// Fill in various tree information that didn't get filled in there
j1.trees = new GPTree[parents[0].trees.length];
if (n - (q - start) >= 2 && !tossSecondParent)
j2.trees = new GPTree[parents[1].trees.length];
// at this point, p1 or p2, or both, may be null.
// If not, swap one in. Else just copy the parent.
for (int x = 0; x < j1.trees.length; x++)
{
if (x == t1 && res1) // we've got a tree with a kicking cross
// position!
{
j1.trees[x] = (GPTree) (parents[0].trees[x].lightClone());
j1.trees[x].owner = j1;
j1.trees[x].child = parents[0].trees[x].child.cloneReplacing(p2, p1);
j1.trees[x].child.parent = j1.trees[x];
j1.trees[x].child.argposition = 0;
j1.evaluated = false;
} // it's changed
else
{
j1.trees[x] = (GPTree) (parents[0].trees[x].lightClone());
j1.trees[x].owner = j1;
j1.trees[x].child = (GPNode) (parents[0].trees[x].child.clone());
j1.trees[x].child.parent = j1.trees[x];
j1.trees[x].child.argposition = 0;
}
}
if (n - (q - start) >= 2 && !tossSecondParent)
for (int x = 0; x < j2.trees.length; x++)
{
if (x == t2 && res2) // we've got a tree with a kicking
// cross position!
{
j2.trees[x] = (GPTree) (parents[1].trees[x].lightClone());
j2.trees[x].owner = j2;
j2.trees[x].child = parents[1].trees[x].child.cloneReplacing(p1, p2);
j2.trees[x].child.parent = j2.trees[x];
j2.trees[x].child.argposition = 0;
j2.evaluated = false;
} // it's changed
else
{
j2.trees[x] = (GPTree) (parents[1].trees[x].lightClone());
j2.trees[x].owner = j2;
j2.trees[x].child = (GPNode) (parents[1].trees[x].child.clone());
j2.trees[x].child.parent = j2.trees[x];
j2.trees[x].child.argposition = 0;
}
}
// add the individuals to the population
inds[q] = j1;
q++;
if (q < n + start && !tossSecondParent)
{
inds[q] = j2;
q++;
}
}
return n;
}
/**
* This method finds a node using the logic given in the langdon paper.
* @param nodeToSubtrees For Tree of Parent2 all precomputed stats about depth,subtrees etc
* @param sizeToNodes Quick lookup for LinkedList of size to Nodes
* @param parent1SelectedNode Node selected in parent1
* @param tree2 Tree of parent2
* @param state Evolution State passed for getting access to Random Object of MersenneTwiser
* @param thread thread number
*/
protected GPNode findFairSizeNode(ArrayList nodeToSubtrees,
HashMap sizeToNodes,
GPNode parent1SelectedNode,
GPTree tree2,
EvolutionState state,
int thread)
{
GPNode selectedNode = null;
// get the size of subtrees of parent1
int parent1SubTrees = parent1SelectedNode.numNodes(GPNode.NODESEARCH_NONTERMINALS);
// the maximum length in mate we are looking for
int maxmatesublen = (parent1SubTrees == 0) ? 0 : 2 * parent1SubTrees + 1;
// lets see if for all lengths we have trees corresponding
boolean[] mateSizeAvailable = new boolean[maxmatesublen + 1];
// initialize the array to false
for (int i = 0; i < maxmatesublen; i++)
mateSizeAvailable[i] = false;
// check for ones we have
for (int i = 0; i < nodeToSubtrees.size(); i++)
{
NodeInfo nodeInfo = (NodeInfo)nodeToSubtrees.get(i);
// get the length of trees
int subtree = nodeInfo.numberOfSubTreesBeneath;
if (subtree <= maxmatesublen)
mateSizeAvailable[subtree] = true;
}
// choose matesublen so mean size change=0 if possible
int countOfPositives = 0;
int countOfNegatives = 0;
int sumOfPositives = 0;
int sumOfNegatives = 0;
int l;
for (l = 1; l < parent1SubTrees; l++)
if (mateSizeAvailable[l])
{
countOfNegatives++;
sumOfNegatives += parent1SubTrees - l;
}
for (l = parent1SubTrees + 1; l <= maxmatesublen; l++)
if (mateSizeAvailable[l])
{
countOfPositives++;
sumOfPositives += l - parent1SubTrees;
}
// if they are missing use the same
int mateSublengthSelected = 0;
if (sumOfPositives == 0 || sumOfNegatives == 0)
{
//if so then check if mate has the length and use that
if(mateSizeAvailable[parent1SubTrees])
{
mateSublengthSelected = parent1SubTrees;
}
//else we go with zero
}
else
{
// probability of same is dependent on do we find same sub trees
// else 0.0
double pzero = (mateSizeAvailable[parent1SubTrees]) ? 1.0 / parent1SubTrees : 0.0;
// positive probability
double ppositive = (1.0 - pzero) / (countOfPositives + ((double) (countOfNegatives * sumOfPositives) / (sumOfNegatives)));
// negative probability
double pnegative = (1.0 - pzero) / (countOfNegatives + ((double) (countOfPositives * sumOfNegatives) / (sumOfPositives)));
// total probability, just for making sure math is right ;-)
double total = countOfNegatives * pnegative + pzero + countOfPositives * ppositive;
// putting an assert for floating point calculations, similar to what langdon does
// assert(total<1.01&&total>.99);
// now create a Roulette Wheel
RouletteWheelSelector wheel = new RouletteWheelSelector(maxmatesublen);
// add probabilities to the wheel
// all below the length of parent node get pnegative
// all above get ppositive and one on node gets pzero
for (l = 1; l < parent1SubTrees; l++)
if (mateSizeAvailable[l])
wheel.add(pnegative, l);
if (mateSizeAvailable[parent1SubTrees])
wheel.add(pzero, parent1SubTrees);
for (l = parent1SubTrees + 1; l <= maxmatesublen; l++)
if (mateSizeAvailable[l])
wheel.add(ppositive, l);
// spin the wheel
mateSublengthSelected = wheel.roulette(state, thread);
}
// now we have length chosen, but there can be many nodes with that
//
LinkedList listOfNodes = (LinkedList)(sizeToNodes.get(new Integer(mateSublengthSelected)));
if(listOfNodes == null)
{
System.err.println("Nodes for tree length " + mateSublengthSelected + " is null, indicates some serious error");
}
// in size fair we choose the elements at random for given length
int chosenNode = 0;
// if using fair size get random from the list
if(!homologous)
{
chosenNode = state.random[thread].nextInt(listOfNodes.size());
}
// if homologous
else
{
if(listOfNodes.size() > 1)
{
GPInitializer initializer = ((GPInitializer) state.initializer);
int currentMinDistance = Integer.MAX_VALUE;
for(int i =0 ; i< listOfNodes.size(); i++)
{
// get the GP node
GPNode selectedMateNode = ((NodeInfo)listOfNodes.get(i)).node;
// now lets traverse selected and parent 1 to see divergence
GPNode currentMateNode = selectedMateNode;
GPNode currentParent1Node = parent1SelectedNode;
// found a match?
boolean foundAMatchInAncestor = false;
int distance =0;
while(currentMateNode.parent != null &&
currentMateNode.parent instanceof GPNode &&
currentParent1Node.parent != null &&
currentParent1Node.parent instanceof GPNode &&
!foundAMatchInAncestor)
{
GPNode parent1 = (GPNode)currentParent1Node.parent;
GPNode parent2 = (GPNode)currentMateNode.parent;
// if there is match between compatibility of parents break
if(parent1.swapCompatibleWith(initializer, parent2))
{
foundAMatchInAncestor = true;
break;
}
else
{
// need to go one level above of both
currentMateNode = parent2;
currentParent1Node = parent1;
//increment the distance
distance = distance +1;
}
}
// find the one with least distance
if(distance < currentMinDistance)
{
currentMinDistance = distance;
chosenNode = i;
}
}
}
// else take the first node, no choice
}
NodeInfo nodeInfoSelected = (NodeInfo)listOfNodes.get(chosenNode);
selectedNode = nodeInfoSelected.node;
return selectedNode;
}
/**
* Recursively travel the tree so that depth and subtree below are computed
* only once and can be reused later.
*
* @param node
* @param nodeToDepth
*/
public void traverseTreeForDepth(GPNode node,
ArrayList nodeToDepth,
HashMap sizeToNodes)
{
GPNode[] children = node.children;
NodeInfo nodeInfo = new NodeInfo(node, node.numNodes(GPNode.NODESEARCH_NONTERMINALS));
nodeToDepth.add(nodeInfo);
// check to see if there is list in map for that size
LinkedList listForSize = (LinkedList)(sizeToNodes.get(new Integer(nodeInfo.numberOfSubTreesBeneath)));
if (listForSize == null)
{
listForSize = new LinkedList();
sizeToNodes.put(new Integer(nodeInfo.numberOfSubTreesBeneath), listForSize);
}
// add it to the list no matter what
listForSize.add(nodeInfo);
// recurse
if (children.length > 0)
{
for (int i = 0; i < children.length; i++)
{
traverseTreeForDepth(children[i], nodeToDepth, sizeToNodes);
}
}
}
/**
* Inner class to do a quick Roulette Wheel Selection
*
*/
class RouletteWheelSelector
{
int[] length;
double[] probability;
int currentIndex = 0;
int maxLength = 0;
RouletteWheelSelector(int size)
{
length = new int[size];
probability =new double[size];
}
public void add(double currentProbability, int currentLength)
{
length[currentIndex] = currentLength;
probability[currentIndex] = currentProbability;
currentIndex = currentIndex +1;
if(currentLength > maxLength) maxLength = currentLength;
}
public int roulette(EvolutionState state, int thread)
{
int winner = 0;
int selectedLength = 0;
// accumulate
for (int i = 1; i < currentIndex; i++)
{
probability[i] += probability[i-1];
}
int bot = 0; // binary chop search
int top = currentIndex - 1;
double f = state.random[thread].nextDouble() * probability[top];
for(int loop =0; loop< 20; loop++)
{
int index = (top + bot) / 2;
if (index > 0 && f < probability[index - 1])
top = index - 1;
else if (f > probability[index])
bot = index + 1;
else
{
if (f == probability[index] && index + 1 < currentIndex)
winner = index + 1;
else
winner = index;
break;
}
}
// check for bounds
if(winner < 0 || winner >= currentIndex)
{
state.output.fatal("roulette() method winner " + winner + " out of range 0..." + (currentIndex-1));
winner=0; //safe default
}
if(length[winner] < 1 || length[winner] > maxLength)
{
state.output.fatal("roulette() method " + length[winner] + " is out of range 1..." + maxLength);
// range is specified on creation
return maxLength; //safe default
}
selectedLength = length[winner];
return selectedLength;
}
}
/**
*Used for O(1) information of number of subtrees
*
*/
class NodeInfo
{
// numberOfSubTrees beneath
int numberOfSubTreesBeneath;
// actual node
GPNode node;
public NodeInfo(GPNode node, int numberOfSubtrees)
{
this.node = node;
this.numberOfSubTreesBeneath = numberOfSubtrees;
}
public void setSubtrees(int totalSubtrees)
{
this.numberOfSubTreesBeneath = totalSubtrees;
}
public int getSubtrees()
{
return numberOfSubTreesBeneath;
}
public GPNode getNode()
{
return node;
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Request to list information about a resource.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The <a>RestApi</a> identifier for the resource.
* </p>
*/
private String restApiId;
/**
* <p>
* The identifier for the <a>Resource</a> resource.
* </p>
*/
private String resourceId;
/**
* <p>
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a> representation in
* the response. This <code>embed</code> parameter value is a list of comma-separated strings. Currently, the
* request supports only retrieval of the embedded <a>Method</a> resources this way. The query parameter value must
* be a single-valued list and contain the <code>"methods"</code> string. For example,
* <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* </p>
*/
private java.util.List<String> embed;
/**
* <p>
* The <a>RestApi</a> identifier for the resource.
* </p>
*
* @param restApiId
* The <a>RestApi</a> identifier for the resource.
*/
public void setRestApiId(String restApiId) {
this.restApiId = restApiId;
}
/**
* <p>
* The <a>RestApi</a> identifier for the resource.
* </p>
*
* @return The <a>RestApi</a> identifier for the resource.
*/
public String getRestApiId() {
return this.restApiId;
}
/**
* <p>
* The <a>RestApi</a> identifier for the resource.
* </p>
*
* @param restApiId
* The <a>RestApi</a> identifier for the resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetResourceRequest withRestApiId(String restApiId) {
setRestApiId(restApiId);
return this;
}
/**
* <p>
* The identifier for the <a>Resource</a> resource.
* </p>
*
* @param resourceId
* The identifier for the <a>Resource</a> resource.
*/
public void setResourceId(String resourceId) {
this.resourceId = resourceId;
}
/**
* <p>
* The identifier for the <a>Resource</a> resource.
* </p>
*
* @return The identifier for the <a>Resource</a> resource.
*/
public String getResourceId() {
return this.resourceId;
}
/**
* <p>
* The identifier for the <a>Resource</a> resource.
* </p>
*
* @param resourceId
* The identifier for the <a>Resource</a> resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetResourceRequest withResourceId(String resourceId) {
setResourceId(resourceId);
return this;
}
/**
* <p>
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a> representation in
* the response. This <code>embed</code> parameter value is a list of comma-separated strings. Currently, the
* request supports only retrieval of the embedded <a>Method</a> resources this way. The query parameter value must
* be a single-valued list and contain the <code>"methods"</code> string. For example,
* <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* </p>
*
* @return A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a>
* representation in the response. This <code>embed</code> parameter value is a list of comma-separated
* strings. Currently, the request supports only retrieval of the embedded <a>Method</a> resources this way.
* The query parameter value must be a single-valued list and contain the <code>"methods"</code> string. For
* example, <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
*/
public java.util.List<String> getEmbed() {
return embed;
}
/**
* <p>
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a> representation in
* the response. This <code>embed</code> parameter value is a list of comma-separated strings. Currently, the
* request supports only retrieval of the embedded <a>Method</a> resources this way. The query parameter value must
* be a single-valued list and contain the <code>"methods"</code> string. For example,
* <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* </p>
*
* @param embed
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a>
* representation in the response. This <code>embed</code> parameter value is a list of comma-separated
* strings. Currently, the request supports only retrieval of the embedded <a>Method</a> resources this way.
* The query parameter value must be a single-valued list and contain the <code>"methods"</code> string. For
* example, <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
*/
public void setEmbed(java.util.Collection<String> embed) {
if (embed == null) {
this.embed = null;
return;
}
this.embed = new java.util.ArrayList<String>(embed);
}
/**
* <p>
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a> representation in
* the response. This <code>embed</code> parameter value is a list of comma-separated strings. Currently, the
* request supports only retrieval of the embedded <a>Method</a> resources this way. The query parameter value must
* be a single-valued list and contain the <code>"methods"</code> string. For example,
* <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEmbed(java.util.Collection)} or {@link #withEmbed(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param embed
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a>
* representation in the response. This <code>embed</code> parameter value is a list of comma-separated
* strings. Currently, the request supports only retrieval of the embedded <a>Method</a> resources this way.
* The query parameter value must be a single-valued list and contain the <code>"methods"</code> string. For
* example, <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetResourceRequest withEmbed(String... embed) {
if (this.embed == null) {
setEmbed(new java.util.ArrayList<String>(embed.length));
}
for (String ele : embed) {
this.embed.add(ele);
}
return this;
}
/**
* <p>
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a> representation in
* the response. This <code>embed</code> parameter value is a list of comma-separated strings. Currently, the
* request supports only retrieval of the embedded <a>Method</a> resources this way. The query parameter value must
* be a single-valued list and contain the <code>"methods"</code> string. For example,
* <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* </p>
*
* @param embed
* A query parameter to retrieve the specified resources embedded in the returned <a>Resource</a>
* representation in the response. This <code>embed</code> parameter value is a list of comma-separated
* strings. Currently, the request supports only retrieval of the embedded <a>Method</a> resources this way.
* The query parameter value must be a single-valued list and contain the <code>"methods"</code> string. For
* example, <code>GET /restapis/{restapi_id}/resources/{resource_id}?embed=methods</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetResourceRequest withEmbed(java.util.Collection<String> embed) {
setEmbed(embed);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRestApiId() != null)
sb.append("RestApiId: ").append(getRestApiId()).append(",");
if (getResourceId() != null)
sb.append("ResourceId: ").append(getResourceId()).append(",");
if (getEmbed() != null)
sb.append("Embed: ").append(getEmbed());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetResourceRequest == false)
return false;
GetResourceRequest other = (GetResourceRequest) obj;
if (other.getRestApiId() == null ^ this.getRestApiId() == null)
return false;
if (other.getRestApiId() != null && other.getRestApiId().equals(this.getRestApiId()) == false)
return false;
if (other.getResourceId() == null ^ this.getResourceId() == null)
return false;
if (other.getResourceId() != null && other.getResourceId().equals(this.getResourceId()) == false)
return false;
if (other.getEmbed() == null ^ this.getEmbed() == null)
return false;
if (other.getEmbed() != null && other.getEmbed().equals(this.getEmbed()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRestApiId() == null) ? 0 : getRestApiId().hashCode());
hashCode = prime * hashCode + ((getResourceId() == null) ? 0 : getResourceId().hashCode());
hashCode = prime * hashCode + ((getEmbed() == null) ? 0 : getEmbed().hashCode());
return hashCode;
}
@Override
public GetResourceRequest clone() {
return (GetResourceRequest) super.clone();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/genomics/v1alpha2/pipelines.proto
package com.google.genomics.v1a;
public interface RunPipelineArgsOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.genomics.v1alpha2.RunPipelineArgs)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Required. The project in which to run the pipeline. The caller must have
* WRITER access to all Google Cloud services and resources (e.g. Google
* Compute Engine) will be used.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
java.lang.String getProjectId();
/**
* <pre>
* Required. The project in which to run the pipeline. The caller must have
* WRITER access to all Google Cloud services and resources (e.g. Google
* Compute Engine) will be used.
* </pre>
*
* <code>optional string project_id = 1;</code>
*/
com.google.protobuf.ByteString
getProjectIdBytes();
/**
* <pre>
* Pipeline input arguments; keys are defined in the pipeline documentation.
* All input parameters that do not have default values must be specified.
* If parameters with defaults are specified here, the defaults will be
* overridden.
* </pre>
*
* <code>map<string, string> inputs = 2;</code>
*/
int getInputsCount();
/**
* <pre>
* Pipeline input arguments; keys are defined in the pipeline documentation.
* All input parameters that do not have default values must be specified.
* If parameters with defaults are specified here, the defaults will be
* overridden.
* </pre>
*
* <code>map<string, string> inputs = 2;</code>
*/
boolean containsInputs(
java.lang.String key);
/**
* Use {@link #getInputsMap()} instead.
*/
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String>
getInputs();
/**
* <pre>
* Pipeline input arguments; keys are defined in the pipeline documentation.
* All input parameters that do not have default values must be specified.
* If parameters with defaults are specified here, the defaults will be
* overridden.
* </pre>
*
* <code>map<string, string> inputs = 2;</code>
*/
java.util.Map<java.lang.String, java.lang.String>
getInputsMap();
/**
* <pre>
* Pipeline input arguments; keys are defined in the pipeline documentation.
* All input parameters that do not have default values must be specified.
* If parameters with defaults are specified here, the defaults will be
* overridden.
* </pre>
*
* <code>map<string, string> inputs = 2;</code>
*/
java.lang.String getInputsOrDefault(
java.lang.String key,
java.lang.String defaultValue);
/**
* <pre>
* Pipeline input arguments; keys are defined in the pipeline documentation.
* All input parameters that do not have default values must be specified.
* If parameters with defaults are specified here, the defaults will be
* overridden.
* </pre>
*
* <code>map<string, string> inputs = 2;</code>
*/
java.lang.String getInputsOrThrow(
java.lang.String key);
/**
* <pre>
* Pipeline output arguments; keys are defined in the pipeline
* documentation. All output parameters of without default values
* must be specified. If parameters with defaults are specified
* here, the defaults will be overridden.
* </pre>
*
* <code>map<string, string> outputs = 3;</code>
*/
int getOutputsCount();
/**
* <pre>
* Pipeline output arguments; keys are defined in the pipeline
* documentation. All output parameters of without default values
* must be specified. If parameters with defaults are specified
* here, the defaults will be overridden.
* </pre>
*
* <code>map<string, string> outputs = 3;</code>
*/
boolean containsOutputs(
java.lang.String key);
/**
* Use {@link #getOutputsMap()} instead.
*/
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String>
getOutputs();
/**
* <pre>
* Pipeline output arguments; keys are defined in the pipeline
* documentation. All output parameters of without default values
* must be specified. If parameters with defaults are specified
* here, the defaults will be overridden.
* </pre>
*
* <code>map<string, string> outputs = 3;</code>
*/
java.util.Map<java.lang.String, java.lang.String>
getOutputsMap();
/**
* <pre>
* Pipeline output arguments; keys are defined in the pipeline
* documentation. All output parameters of without default values
* must be specified. If parameters with defaults are specified
* here, the defaults will be overridden.
* </pre>
*
* <code>map<string, string> outputs = 3;</code>
*/
java.lang.String getOutputsOrDefault(
java.lang.String key,
java.lang.String defaultValue);
/**
* <pre>
* Pipeline output arguments; keys are defined in the pipeline
* documentation. All output parameters of without default values
* must be specified. If parameters with defaults are specified
* here, the defaults will be overridden.
* </pre>
*
* <code>map<string, string> outputs = 3;</code>
*/
java.lang.String getOutputsOrThrow(
java.lang.String key);
/**
* <pre>
* The Google Cloud Service Account that will be used to access data and
* services. By default, the compute service account associated with
* `projectId` is used.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.ServiceAccount service_account = 4;</code>
*/
boolean hasServiceAccount();
/**
* <pre>
* The Google Cloud Service Account that will be used to access data and
* services. By default, the compute service account associated with
* `projectId` is used.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.ServiceAccount service_account = 4;</code>
*/
com.google.genomics.v1a.ServiceAccount getServiceAccount();
/**
* <pre>
* The Google Cloud Service Account that will be used to access data and
* services. By default, the compute service account associated with
* `projectId` is used.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.ServiceAccount service_account = 4;</code>
*/
com.google.genomics.v1a.ServiceAccountOrBuilder getServiceAccountOrBuilder();
/**
* <pre>
* This field is deprecated. Use `labels` instead. Client-specified pipeline
* operation identifier.
* </pre>
*
* <code>optional string client_id = 5;</code>
*/
java.lang.String getClientId();
/**
* <pre>
* This field is deprecated. Use `labels` instead. Client-specified pipeline
* operation identifier.
* </pre>
*
* <code>optional string client_id = 5;</code>
*/
com.google.protobuf.ByteString
getClientIdBytes();
/**
* <pre>
* Specifies resource requirements/overrides for the pipeline run.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.PipelineResources resources = 6;</code>
*/
boolean hasResources();
/**
* <pre>
* Specifies resource requirements/overrides for the pipeline run.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.PipelineResources resources = 6;</code>
*/
com.google.genomics.v1a.PipelineResources getResources();
/**
* <pre>
* Specifies resource requirements/overrides for the pipeline run.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.PipelineResources resources = 6;</code>
*/
com.google.genomics.v1a.PipelineResourcesOrBuilder getResourcesOrBuilder();
/**
* <pre>
* Required. Logging options. Used by the service to communicate results
* to the user.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.LoggingOptions logging = 7;</code>
*/
boolean hasLogging();
/**
* <pre>
* Required. Logging options. Used by the service to communicate results
* to the user.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.LoggingOptions logging = 7;</code>
*/
com.google.genomics.v1a.LoggingOptions getLogging();
/**
* <pre>
* Required. Logging options. Used by the service to communicate results
* to the user.
* </pre>
*
* <code>optional .google.genomics.v1alpha2.LoggingOptions logging = 7;</code>
*/
com.google.genomics.v1a.LoggingOptionsOrBuilder getLoggingOrBuilder();
/**
* <pre>
* How long to keep the VM up after a failure (for example docker command
* failed, copying input or output files failed, etc). While the VM is up, one
* can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
* </pre>
*
* <code>optional .google.protobuf.Duration keep_vm_alive_on_failure_duration = 8;</code>
*/
boolean hasKeepVmAliveOnFailureDuration();
/**
* <pre>
* How long to keep the VM up after a failure (for example docker command
* failed, copying input or output files failed, etc). While the VM is up, one
* can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
* </pre>
*
* <code>optional .google.protobuf.Duration keep_vm_alive_on_failure_duration = 8;</code>
*/
com.google.protobuf.Duration getKeepVmAliveOnFailureDuration();
/**
* <pre>
* How long to keep the VM up after a failure (for example docker command
* failed, copying input or output files failed, etc). While the VM is up, one
* can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day.
* </pre>
*
* <code>optional .google.protobuf.Duration keep_vm_alive_on_failure_duration = 8;</code>
*/
com.google.protobuf.DurationOrBuilder getKeepVmAliveOnFailureDurationOrBuilder();
/**
* <pre>
* Labels to apply to this pipeline run. Labels will also be applied to
* compute resources (VM, disks) created by this pipeline run. When listing
* operations, operations can [filtered by labels]
* [google.longrunning.ListOperationsRequest.filter].
* Label keys may not be empty; label values may be empty. Non-empty labels
* must be 1-63 characters long, and comply with [RFC1035]
* (https://www.ietf.org/rfc/rfc1035.txt).
* Specifically, the name must be 1-63 characters long and match the regular
* expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be
* a dash, lowercase letter, or digit, except the last character, which cannot
* be a dash.
* </pre>
*
* <code>map<string, string> labels = 9;</code>
*/
int getLabelsCount();
/**
* <pre>
* Labels to apply to this pipeline run. Labels will also be applied to
* compute resources (VM, disks) created by this pipeline run. When listing
* operations, operations can [filtered by labels]
* [google.longrunning.ListOperationsRequest.filter].
* Label keys may not be empty; label values may be empty. Non-empty labels
* must be 1-63 characters long, and comply with [RFC1035]
* (https://www.ietf.org/rfc/rfc1035.txt).
* Specifically, the name must be 1-63 characters long and match the regular
* expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be
* a dash, lowercase letter, or digit, except the last character, which cannot
* be a dash.
* </pre>
*
* <code>map<string, string> labels = 9;</code>
*/
boolean containsLabels(
java.lang.String key);
/**
* Use {@link #getLabelsMap()} instead.
*/
@java.lang.Deprecated
java.util.Map<java.lang.String, java.lang.String>
getLabels();
/**
* <pre>
* Labels to apply to this pipeline run. Labels will also be applied to
* compute resources (VM, disks) created by this pipeline run. When listing
* operations, operations can [filtered by labels]
* [google.longrunning.ListOperationsRequest.filter].
* Label keys may not be empty; label values may be empty. Non-empty labels
* must be 1-63 characters long, and comply with [RFC1035]
* (https://www.ietf.org/rfc/rfc1035.txt).
* Specifically, the name must be 1-63 characters long and match the regular
* expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be
* a dash, lowercase letter, or digit, except the last character, which cannot
* be a dash.
* </pre>
*
* <code>map<string, string> labels = 9;</code>
*/
java.util.Map<java.lang.String, java.lang.String>
getLabelsMap();
/**
* <pre>
* Labels to apply to this pipeline run. Labels will also be applied to
* compute resources (VM, disks) created by this pipeline run. When listing
* operations, operations can [filtered by labels]
* [google.longrunning.ListOperationsRequest.filter].
* Label keys may not be empty; label values may be empty. Non-empty labels
* must be 1-63 characters long, and comply with [RFC1035]
* (https://www.ietf.org/rfc/rfc1035.txt).
* Specifically, the name must be 1-63 characters long and match the regular
* expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be
* a dash, lowercase letter, or digit, except the last character, which cannot
* be a dash.
* </pre>
*
* <code>map<string, string> labels = 9;</code>
*/
java.lang.String getLabelsOrDefault(
java.lang.String key,
java.lang.String defaultValue);
/**
* <pre>
* Labels to apply to this pipeline run. Labels will also be applied to
* compute resources (VM, disks) created by this pipeline run. When listing
* operations, operations can [filtered by labels]
* [google.longrunning.ListOperationsRequest.filter].
* Label keys may not be empty; label values may be empty. Non-empty labels
* must be 1-63 characters long, and comply with [RFC1035]
* (https://www.ietf.org/rfc/rfc1035.txt).
* Specifically, the name must be 1-63 characters long and match the regular
* expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be
* a dash, lowercase letter, or digit, except the last character, which cannot
* be a dash.
* </pre>
*
* <code>map<string, string> labels = 9;</code>
*/
java.lang.String getLabelsOrThrow(
java.lang.String key);
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.syslog;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.logical.FormatPluginConfig;
import org.apache.drill.common.types.TypeProtos;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.test.ClusterTest;
import org.apache.drill.test.BaseDirTestWatcher;
import org.apache.drill.exec.physical.rowSet.RowSet;
import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.rowSet.RowSetComparison;
import org.apache.drill.exec.record.metadata.SchemaBuilder;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.ClassRule;
public class TestSyslogFormat extends ClusterTest {
@ClassRule
public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
@BeforeClass
public static void setup() throws Exception {
ClusterTest.startCluster(ClusterFixture.builder(dirTestWatcher).maxParallelization(1));
defineSyslogPlugin();
}
private static void defineSyslogPlugin() throws ExecutionSetupException {
Map<String, FormatPluginConfig> formats = new HashMap<>();
formats.put("sample", new SyslogFormatConfig(
Collections.singletonList("syslog"), null, null));
formats.put("flat", new SyslogFormatConfig(
Collections.singletonList("syslog1"), null, true));
// Define a temporary plugin for the "cp" storage plugin.
cluster.defineFormats("cp", formats);
}
@Test
public void testNonComplexFields() throws RpcException {
String sql = "SELECT event_date," +
"severity_code," +
"severity," +
"facility_code," +
"facility," +
"ip," +
"process_id," +
"message_id," +
"structured_data_text " +
"FROM cp.`syslog/logs.syslog`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder()
.add("event_date", TypeProtos.MinorType.TIMESTAMP, TypeProtos.DataMode.OPTIONAL)
.add("severity_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("severity", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("facility_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("facility", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("ip", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("process_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_text", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.addRow(1065910455003L, 2, "CRIT", 4, "AUTH", "mymachine.example.com", null, "", "")
.addRow(482196050520L, 2, "CRIT", 4, "AUTH", "mymachine.example.com", null, "", "")
.addRow(482196050520L, 2, "CRIT", 4, "AUTH", "mymachine.example.com", null, "", "")
.addRow(1065910455003L, 2, "CRIT", 4, "AUTH", "mymachine.example.com", null, "", "")
.addRow(1061727255000L, 2, "CRIT", 4, "AUTH", "mymachine.example.com", null, "", "")
.addRow(1061727255000L, 5, "NOTICE", 20, "LOCAL4", "192.0.2.1", "8710", "", "")
.addRow(1065910455003L, 5, "NOTICE", 20, "LOCAL4", "mymachine.example.com", null, "", "{examplePriority@32473=[class=high], exampleSDID@32473=[iut=3, eventSource=Application, eventID=1011]}")
.addRow(1065910455003L, 5, "NOTICE", 20, "LOCAL4", "mymachine.example.com", null, "", "{examplePriority@32473=[class=high], exampleSDID@32473=[iut=3, eventSource=Application, eventID=1011]}")
.build();
new RowSetComparison(expected).verifyAndClearAll(results);
}
@Test
public void testStarQuery() throws RpcException {
String sql = "SELECT * FROM cp.`syslog/logs1.syslog`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder()
.add("event_date", TypeProtos.MinorType.TIMESTAMP, TypeProtos.DataMode.OPTIONAL)
.add("severity_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("facility_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("severity", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("facility", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("ip", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("app_name", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("process_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.addRow(1065910455003L, 2, 4, "CRIT", "AUTH", "mymachine.example.com", "su", "ID47", "BOM'su root' failed for lonvick on /dev/pts/8", null)
.addRow(482196050520L, 2, 4, "CRIT", "AUTH", "mymachine.example.com", "su", "ID47", "BOM'su root' failed for lonvick on /dev/pts/8", null)
.addRow(482196050520L, 2, 4, "CRIT", "AUTH", "mymachine.example.com", "su", "ID47", "BOM'su root' failed for lonvick on /dev/pts/8", null)
.addRow(1065910455003L, 2, 4, "CRIT", "AUTH", "mymachine.example.com", "su", "ID47", "BOM'su root' failed for lonvick on /dev/pts/8", null)
.addRow(1061727255000L, 2, 4, "CRIT", "AUTH", "mymachine.example.com", "su", "ID47", "BOM'su root' failed for lonvick on /dev/pts/8", null)
.addRow(1061727255000L, 5, 20, "NOTICE", "LOCAL4", "192.0.2.1", "myproc", null, "%% It's time to make the do-nuts.", "8710")
.build();
new RowSetComparison(expected).verifyAndClearAll(results);
}
@Test
public void testRawQuery() throws RpcException {
String sql = "SELECT _raw FROM cp.`syslog/logs.syslog`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder()
.add("_raw", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.addRow("<34>1 2003-10-11T22:14:15.003Z mymachine.example.com su - ID47 - BOM'su root' failed for lonvick on /dev/pts/8")
.addRow("<34>1 1985-04-12T19:20:50.52-04:00 mymachine.example.com su - ID47 - BOM'su root' failed for lonvick on /dev/pts/8")
.addRow("<34>1 1985-04-12T23:20:50.52Z mymachine.example.com su - ID47 - BOM'su root' failed for lonvick on /dev/pts/8")
.addRow("<34>1 2003-10-11T22:14:15.003Z mymachine.example.com su - ID47 - BOM'su root' failed for lonvick on /dev/pts/8")
.addRow("<34>1 2003-08-24T05:14:15.000003-07:00 mymachine.example.com su - ID47 - BOM'su root' failed for lonvick on /dev/pts/8")
.addRow("<165>1 2003-08-24T05:14:15.000003-07:00 192.0.2.1 myproc 8710 - - %% It's time to make the do-nuts.")
.addRow("<165>1 2003-10-11T22:14:15.003Z mymachine.example.com evntslog - ID47 [exampleSDID@32473 iut=\"3\" eventSource=\"Application\" eventID=\"1011\"][examplePriority@32473 class=\"high\"]")
.addRow("<165>1 2003-10-11T22:14:15.003Z mymachine.example.com evntslog - ID47 [exampleSDID@32473 iut=\"3\" eventSource=\"Application\" eventID=\"1011\"][examplePriority@32473 class=\"high\"] - and thats a wrap!")
.build();
new RowSetComparison(expected).verifyAndClearAll(results);
}
@Test
public void testStructuredDataQuery() throws RpcException {
String sql = "SELECT syslog_data.`structured_data`.`UserAgent` AS UserAgent, " +
"syslog_data.`structured_data`.`UserHostAddress` AS UserHostAddress," +
"syslog_data.`structured_data`.`BrowserSession` AS BrowserSession," +
"syslog_data.`structured_data`.`Realm` AS Realm," +
"syslog_data.`structured_data`.`Appliance` AS Appliance," +
"syslog_data.`structured_data`.`Company` AS Company," +
"syslog_data.`structured_data`.`UserID` AS UserID," +
"syslog_data.`structured_data`.`PEN` AS PEN," +
"syslog_data.`structured_data`.`HostName` AS HostName," +
"syslog_data.`structured_data`.`Category` AS Category," +
"syslog_data.`structured_data`.`Priority` AS Priority " +
"FROM (" +
"SELECT structured_data " +
"FROM cp.`syslog/test.syslog`" +
") AS syslog_data";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder()
.add("UserAgent", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("UserHostAddress", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("BrowserSession", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("Realm", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("Appliance", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("Company", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("UserID", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("PEN", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("HostName", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("Category", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("Priority", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.addRow("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132",
"0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation",
"Tester2", "27389", "192.168.2.132", "AUDIT", "4")
.build();
new RowSetComparison(expected).verifyAndClearAll(results);
}
@Test
public void testStarFlattenedStructuredDataQuery() throws RpcException {
String sql = "SELECT * FROM cp.`syslog/test.syslog1`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder()
.add("event_date", TypeProtos.MinorType.TIMESTAMP, TypeProtos.DataMode.OPTIONAL)
.add("severity_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("facility_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("severity", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("facility", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("ip", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("app_name", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("process_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_text", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_UserAgent", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_UserHostAddress", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_BrowserSession", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Realm", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Appliance", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Company", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_UserID", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_PEN", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_HostName", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Category", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Priority", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.addRow(1438811939693L, 6, 10, "INFO", "AUTHPRIV", "192.168.2.132", "SecureAuth0", "23108", "ID52020",
"{SecureAuth@27389=[UserAgent=Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko, UserHostAddress=192.168.2.132, BrowserSession=0gvhdi5udjuqtweprbgoxilc, Realm=SecureAuth0, Appliance=secureauthqa.gosecureauth.com, Company=SecureAuth Corporation, UserID=Tester2, PEN=27389, HostName=192.168.2.132, Category=AUDIT, Priority=4]}",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132",
"0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation",
"Tester2", "27389", "192.168.2.132", "AUDIT", "4", "Found the user for retrieving user's profile")
.build();
new RowSetComparison(expected).verifyAndClearAll(results);
}
@Test
public void testExplicitFlattenedStructuredDataQuery() throws RpcException {
String sql = "SELECT event_date," +
"severity_code," +
"facility_code," +
"severity," +
"facility," +
"ip," +
"app_name," +
"process_id," +
"message_id," +
"structured_data_text," +
"structured_data_UserAgent," +
"structured_data_UserHostAddress," +
"structured_data_BrowserSession," +
"structured_data_Realm," +
"structured_data_Appliance," +
"structured_data_Company," +
"structured_data_UserID," +
"structured_data_PEN," +
"structured_data_HostName," +
"structured_data_Category," +
"structured_data_Priority," +
"message " +
"FROM cp.`syslog/test.syslog1`";
RowSet results = client.queryBuilder().sql(sql).rowSet();
TupleMetadata expectedSchema = new SchemaBuilder()
.add("event_date", TypeProtos.MinorType.TIMESTAMP, TypeProtos.DataMode.OPTIONAL)
.add("severity_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("facility_code", TypeProtos.MinorType.INT, TypeProtos.DataMode.OPTIONAL)
.add("severity", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("facility", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("ip", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("app_name", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("process_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message_id", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_text", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_UserAgent", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_UserHostAddress", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_BrowserSession", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Realm", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Appliance", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Company", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_UserID", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_PEN", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_HostName", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Category", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("structured_data_Priority", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.add("message", TypeProtos.MinorType.VARCHAR, TypeProtos.DataMode.OPTIONAL)
.buildSchema();
RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
.addRow(1438811939693L, 6, 10, "INFO", "AUTHPRIV", "192.168.2.132", "SecureAuth0", "23108", "",
"{SecureAuth@27389=[UserAgent=Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko, UserHostAddress=192.168.2.132, BrowserSession=0gvhdi5udjuqtweprbgoxilc, Realm=SecureAuth0, Appliance=secureauthqa.gosecureauth.com, Company=SecureAuth Corporation, UserID=Tester2, PEN=27389, HostName=192.168.2.132, Category=AUDIT, Priority=4]}",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132",
"0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation",
"Tester2", "27389", "192.168.2.132", "AUDIT", "4", "Found the user for retrieving user's profile")
.build();
new RowSetComparison(expected).verifyAndClearAll(results);
}
}
| |
/*
* Copyright 2007 Yusuke Yamamoto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package twitter4j.internal.json;
import twitter4j.*;
import javax.annotation.Generated;
import java.util.Date;
/**
* A data class representing one single status of a user.
*
* @author Yusuke Yamamoto - yusuke at mac.com
*/
@Generated(
value = "generate-lazy-objects.sh",
comments = "This is Tool Generated Code. DO NOT EDIT",
date = "2011-07-13"
)
final class LazyStatus implements twitter4j.Status {
private twitter4j.internal.http.HttpResponse res;
private z_T4JInternalFactory factory;
private Status target = null;
LazyStatus(twitter4j.internal.http.HttpResponse res, z_T4JInternalFactory factory) {
this.res = res;
this.factory = factory;
}
private Status getTarget() {
if (target == null) {
try {
target = factory.createStatus(res);
} catch (TwitterException e) {
throw new TwitterRuntimeException(e);
}
}
return target;
}
/**
* Return the created_at
*
* @return created_at
* @since Twitter4J 1.1.0
*/
public Date getCreatedAt() {
return getTarget().getCreatedAt();
}
/**
* Returns the id of the status
*
* @return the id
*/
public long getId() {
return getTarget().getId();
}
/**
* Returns the text of the status
*
* @return the text
*/
public String getText() {
return getTarget().getText();
}
/**
* Returns the source
*
* @return the source
* @since Twitter4J 1.0.4
*/
public String getSource() {
return getTarget().getSource();
}
/**
* Test if the status is truncated
*
* @return true if truncated
* @since Twitter4J 1.0.4
*/
public boolean isTruncated() {
return getTarget().isTruncated();
}
/**
* Returns the in_reply_tostatus_id
*
* @return the in_reply_tostatus_id
* @since Twitter4J 1.0.4
*/
public long getInReplyToStatusId() {
return getTarget().getInReplyToStatusId();
}
/**
* Returns the in_reply_user_id
*
* @return the in_reply_tostatus_id
* @since Twitter4J 1.0.4
*/
public long getInReplyToUserId() {
return getTarget().getInReplyToUserId();
}
/**
* Returns the in_reply_to_screen_name
*
* @return the in_in_reply_to_screen_name
* @since Twitter4J 2.0.4
*/
public String getInReplyToScreenName() {
return getTarget().getInReplyToScreenName();
}
/**
* Returns The location that this tweet refers to if available.
*
* @return returns The location that this tweet refers to if available (can be null)
* @since Twitter4J 2.1.0
*/
public GeoLocation getGeoLocation() {
return getTarget().getGeoLocation();
}
/**
* Returns the place attached to this status
*
* @return The place attached to this status
* @since Twitter4J 2.1.1
*/
public Place getPlace() {
return getTarget().getPlace();
}
/**
* Test if the status is favorited
*
* @return true if favorited
* @since Twitter4J 1.0.4
*/
public boolean isFavorited() {
return getTarget().isFavorited();
}
/**
* Return the user associated with the status.<br>
* This can be null if the instance if from User.getStatus().
*
* @return the user
*/
public User getUser() {
return getTarget().getUser();
}
/**
* @since Twitter4J 2.0.10
*/
public boolean isRetweet() {
return getTarget().isRetweet();
}
/**
* @since Twitter4J 2.1.0
*/
public Status getRetweetedStatus() {
return getTarget().getRetweetedStatus();
}
/**
* Returns an array of contributors, or null if no contributor is associated with this status.
*
* @since Twitter4J 2.2.3
*/
public long[] getContributors() {
return getTarget().getContributors();
}
/**
* Returns the number of times this tweet has been retweeted, or -1 when the tweet was
* created before this feature was enabled.
*
* @return the retweet count.
*/
public long getRetweetCount() {
return getTarget().getRetweetCount();
}
/**
* Returns true if the authenticating user has retweeted this tweet, or false when the tweet was
* created before this feature was enabled.
*
* @return whether the authenticating user has retweeted this tweet.
* @since Twitter4J 2.1.4
*/
public boolean isRetweetedByMe() {
return getTarget().isRetweetedByMe();
}
/**
* Returns the authenticating user's retweet's id of this tweet, or -1L when the tweet was created
* before this feature was enabled.
*
* @return the authenticating user's retweet's id of this tweet
* @since Twitter4J 3.0.1
*/
public long getCurrentUserRetweetId() {
return getTarget().getCurrentUserRetweetId();
}
@Override
public boolean isPossiblySensitive() {
return getTarget().isPossiblySensitive();
}
/**
* Returns an array of user mentions in the tweet, or null if no users were mentioned.
*
* @return An array of user mention entities in the tweet.
* @since Twitter4J 2.1.9
*/
public UserMentionEntity[] getUserMentionEntities() {
return getTarget().getUserMentionEntities();
}
/**
* Returns an array if URLEntity mentioned in the tweet, or null if no URLs were mentioned.
*
* @return An array of URLEntity mentioned in the tweet.
* @since Twitter4J 2.1.9
*/
public URLEntity[] getURLEntities() {
return getTarget().getURLEntities();
}
/**
* Returns an array if hashtag mentioned in the tweet, or null if no hashtag were mentioned.
*
* @return An array of Hashtag mentioned in the tweet.
* @since Twitter4J 2.1.9
*/
public HashtagEntity[] getHashtagEntities() {
return getTarget().getHashtagEntities();
}
/**
* Returns an array of MediaEntities if medias are available in the tweet, or null if no media is included in the tweet.
*
* @return an array of MediaEntities.
* @since Twitter4J 2.2.3
*/
public MediaEntity[] getMediaEntities() {
return getTarget().getMediaEntities();
}
public RateLimitStatus getRateLimitStatus() {
return getTarget().getRateLimitStatus();
}
public int getAccessLevel() {
return getTarget().getAccessLevel();
}
public int compareTo(Status target) {
return getTarget().compareTo(target);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Status)) return false;
return getTarget().equals(o);
}
@Override
public int hashCode() {
return getTarget().hashCode();
}
@Override
public String toString() {
return "LazyStatus{" +
"target=" + getTarget() +
"}";
}
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
// Portions copyright Hiroshi Ito. Licensed under Apache 2.0 license
package com.gs.fw.common.mithra.databasetype;
import com.gs.fw.common.mithra.MithraManagerProvider;
import com.gs.fw.common.mithra.MithraObjectPortal;
import com.gs.fw.common.mithra.attribute.Attribute;
import com.gs.fw.common.mithra.attribute.update.AttributeUpdateWrapper;
import com.gs.fw.common.mithra.bulkloader.BcpBulkLoader;
import com.gs.fw.common.mithra.bulkloader.BulkLoader;
import com.gs.fw.common.mithra.bulkloader.BulkLoaderException;
import com.gs.fw.common.mithra.finder.MapperStackImpl;
import com.gs.fw.common.mithra.finder.SqlQuery;
import com.gs.fw.common.mithra.tempobject.TupleTempContext;
import com.gs.fw.common.mithra.util.ColumnInfo;
import com.gs.fw.common.mithra.util.MithraFastList;
import com.gs.fw.common.mithra.util.MithraTimestamp;
import com.gs.fw.common.mithra.util.TableColumnInfo;
import com.gs.fw.common.mithra.util.Time;
import org.eclipse.collections.impl.map.mutable.primitive.IntIntHashMap;
import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap;
import org.eclipse.collections.impl.set.mutable.primitive.IntHashSet;
import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.TimeZone;
public class SybaseDatabaseType extends AbstractDatabaseType
{
private static Logger logger;
private static final String BULK_INSERT_TYPE_KEY = "com.gs.fw.common.mithra.databasetype.SybaseDatabaseType.bulkInsertMethod";
protected static final String JDBC_SYBASE_TDS = "jdbc:sybase:tds:";
private static final char[] SYBASE_LIKE_META_CHARS = {'=', '%', '_', '[', ']'};
public static final String STATE_CONNECTION_CLOSED = "JZ0C0";
public static final String STATE_CONNECTION_CLOSED2 = "JZ0C1";
public static final String STATE_EMPTY_QUERY = "JZ0S4";
public static final String STATE_INCORRECT_ESCAPE_SEQUENCE = "JZ0S8";
public static final String STATE_INPUT_PARAMETER_NOT_SET = "JZ0SA";
public static final String STATE_UNEXPECTED_INPUT_PARAMETER = "JZ0SF";
public static final String STATE_UNSUPPORTED_SQL_TYPE = "JZ0SM";
public static final String STATE_IO_READ_TIMEOUT = "JZ0I1";
public static final String STATE_IO_WRITE_TIMEOUT = "JZ0I2";
public static final String STATE_READ_TIMEOUT = "JZ0T3";
public static final String STATE_WRITE_TIMEOUT = "JZ0T4";
public static final String STATE_ILLEGAL_TYPE_CONVERSION = "JZ0TC";
public static final String STATE_INVALID_COLUMN_NAME = "S0022";
public static final String STATE_USER_NAME_TOO_LONG = "JZ001";
public static final String STATE_PASSWORD_TOO_LONG = "JZ002";
public static final String STATE_INCORRECT_URL_FORMAT = "JZ003";
public static final String STATE_IO_EXCEPTION = "JZ006";
public static final String STATE_BATCH_ERROR ="JZ0BE";
public static final String STATE_METADATA_NOT_FOUND = "JZ0SJ";
public static final String JTDS_IO_ERROR = "08S01";
public static final int CODE_OBJECT_NOT_FOUND = 208; // this seems to happen spuriously or on a bad connection
public static final int CODE_RUN_OUT_OF_LOCKS = 1204;
public static final int CODE_DEADLOCK = 1205;
public static final int CODE_DUPLICATE = 2601;
public static final int CODE_SCHEMA_CHANGE = 540;
public static final int CODE_REQUEST_TIMEOUT = 13468;
public static final int CODE_CONN_TIMEOUT = 13507;
public static final int SESSION_ACQUIRE_LOCK_TIMEOUT = 12205;
public static final int TABLE_ACQUIRE_LOCK_TIMEOUT = 12207;
// use only JDK and trove collections in this class for generator dependency reasons
public static final HashSet<String> TIMEOUT_STATE;
public static final IntHashSet TIMEOUT_ERROR;
private static final HashSet<String> typesWithLength;
private static final IntIntHashMap sybaseToJDBCTypes = new IntIntHashMap();
private static final IntObjectHashMap<String> sybaseTypeNames = new IntObjectHashMap<String>();
private static int maxClauses = 420;
private static int maxSearchableArguments = 102;
private static final String UNION = " union ";
private static final String UNION_ALL = " union all ";
private static final SybaseDatabaseType instance = new SybaseDatabaseType();
private static final SybaseDatabaseType sybase15Instance = new SybaseDatabaseType(1, 420, 102);
public static final String TIMESTAMP_FORMAT = "MMM dd yyyy HH:mm:ss:SSS";
public static final String DATE_FORMAT = "MMM dd yyyy";
// these values were arrived at using a series of test, by inserting into tables of 1, 8, 16, and 30 columns
private static final double OPTIMAL_INSERT_PARAMETERS = 160;
private static final int MAX_UNIONS_IN_INSERT = 60;
private int maxParallelDegree = -1;
private boolean forceFile = false;
private static final Map<String, String> sqlToJavaTypes;
private static final HashSet<String> numericSybaseTypes;
private static final ThreadLocal calendarInstance = new ThreadLocal();
private int maxUnions = 1;
private Constructor<?> bulkConstructor;
static
{
// http://jtds.sourceforge.net/typemap.html defines some of the mappings below.
registerSybaseType(1, "char", Types.CHAR);
registerSybaseType(2, "varchar", Types.VARCHAR); // varchar
registerSybaseType(3, "binary", Types.BINARY); // binary
registerSybaseType(4, "varbinary", Types.VARBINARY); // varbinay
registerSybaseType(5, "tinyint", Types.TINYINT); // tinyint
registerSybaseType(6, "smallint", Types.SMALLINT); // smallint
registerSybaseType(7, "int", Types.INTEGER); // int
registerSybaseType(8, "float", Types.FLOAT); // float
registerSybaseType(10, "numeric", Types.NUMERIC); // numeric
registerSybaseType(11, "money", Types.DECIMAL); // money
registerSybaseType(12, "datetime", Types.TIMESTAMP); // datetime
registerSybaseType(13, "int", Types.INTEGER); // intn
registerSybaseType(14, "float", Types.FLOAT); // floatn
registerSybaseType(15, "datetime", Types.TIMESTAMP); // datetimn
registerSybaseType(16, "bit", Types.BOOLEAN); // bit
registerSybaseType(17, "money", Types.DECIMAL); // moneyn
registerSybaseType(18, "sysname", 0);
registerSybaseType(19, "text", Types.CLOB); // text
registerSybaseType(20, "image", Types.BLOB); // image
registerSybaseType(21, "smallmoney", Types.DECIMAL); // smallmoney
registerSybaseType(22, "smalldatetime", Types.TIMESTAMP); // smalldatetime
registerSybaseType(23, "real", Types.REAL); // real
registerSybaseType(24, "nchar", Types.CHAR); // nchar
registerSybaseType(25, "nvarchar", Types.VARCHAR); // nvarchar
registerSybaseType(26, "decimal", Types.DECIMAL); // decimal
registerSybaseType(27, "decimal", Types.DECIMAL); // decimaln
registerSybaseType(28, "numeric", Types.NUMERIC); // numericn
registerSybaseType(34, "unichar", Types.CHAR); // unichar
registerSybaseType(35, "univarchar", Types.VARCHAR); // univarchar
registerSybaseType(37, "date", Types.DATE); // date
registerSybaseType(38, "time", Types.TIME); // time
registerSybaseType(39, "date", Types.DATE); // daten
registerSybaseType(40, "time", Types.TIME); // timen
registerSybaseType(43, "bigint", Types.BIGINT); // bigint
registerSybaseType(80, "timestamp", Types.TIMESTAMP); // timestamp
}
protected static void registerSybaseType(int sybaseType, String sybaseTypeName, int javaSqlType)
{
sybaseToJDBCTypes.put(sybaseType, javaSqlType);
sybaseTypeNames.put(sybaseType, sybaseTypeName);
}
static
{
numericSybaseTypes = new HashSet<String>();
numericSybaseTypes.add("int");
numericSybaseTypes.add("smallint");
numericSybaseTypes.add("tinyint");
numericSybaseTypes.add("float");
numericSybaseTypes.add("numeric");
numericSybaseTypes.add("bit");
numericSybaseTypes.add("binary");
numericSybaseTypes.add("varbinary");
numericSybaseTypes.add("decimal");
numericSybaseTypes.add("real");
numericSybaseTypes.add("bigint");
}
static
{
sqlToJavaTypes = new HashMap<String, String>();
sqlToJavaTypes.put("integer", "int");
sqlToJavaTypes.put("smallint", "short");
sqlToJavaTypes.put("tinyint", "short");
sqlToJavaTypes.put("float", "float");
sqlToJavaTypes.put("double precision", "double");
sqlToJavaTypes.put("double precis", "double");
sqlToJavaTypes.put("smallmoney", "not implemented");
sqlToJavaTypes.put("money", "not implemented");
sqlToJavaTypes.put("char", "char");
sqlToJavaTypes.put("varchar", "String");
sqlToJavaTypes.put("text", "String");
sqlToJavaTypes.put("image", "byte[]");
sqlToJavaTypes.put("datetime", "Timestamp");
sqlToJavaTypes.put("smalldatetime", "Timestamp");
sqlToJavaTypes.put("timestamp", "Timestamp");
sqlToJavaTypes.put("bit", "boolean");
sqlToJavaTypes.put("binary", "byte[]");
sqlToJavaTypes.put("varbinary", "byte[]");
sqlToJavaTypes.put("decimal", "BigDecimal");
sqlToJavaTypes.put("real", "float");
sqlToJavaTypes.put("date", "Timestamp");
sqlToJavaTypes.put("time", "Time");
sqlToJavaTypes.put("bigint", "long");
}
static
{
typesWithLength = new HashSet<String>();
typesWithLength.add("char");
typesWithLength.add("nchar");
typesWithLength.add("varchar");
typesWithLength.add("nvarchar");
typesWithLength.add("binary");
typesWithLength.add("varbinary");
typesWithLength.add("unichar");
typesWithLength.add("univarchar");
}
static
{
TIMEOUT_STATE = new HashSet<String>();
TIMEOUT_STATE.add(STATE_IO_READ_TIMEOUT);
TIMEOUT_STATE.add(STATE_IO_WRITE_TIMEOUT);
TIMEOUT_STATE.add(STATE_READ_TIMEOUT);
TIMEOUT_STATE.add(STATE_WRITE_TIMEOUT);
TIMEOUT_ERROR = new IntHashSet();
TIMEOUT_ERROR.add(CODE_CONN_TIMEOUT);
TIMEOUT_ERROR.add(CODE_REQUEST_TIMEOUT);
TIMEOUT_ERROR.add(SESSION_ACQUIRE_LOCK_TIMEOUT);
TIMEOUT_ERROR.add(TABLE_ACQUIRE_LOCK_TIMEOUT);
}
/** Extendable Singleton */
protected SybaseDatabaseType()
{
String bulkType = System.getProperty(BULK_INSERT_TYPE_KEY, "jtds");
if (bulkType.equalsIgnoreCase("file"))
{
forceFile = true;
}
try
{
Class<?> aClass = Class.forName("com.gs.fw.common.mithra.bulkloader.JtdsBcpBulkLoader");
this.bulkConstructor = aClass.getDeclaredConstructor(String.class, String.class, String.class, Integer.TYPE, SybaseDatabaseType.class, Boolean.TYPE);
}
catch (Exception e)
{
getLogger().info("Sybase bulk loader not found. Bulk loading disabled.");
}
}
protected SybaseDatabaseType(int maxUnions)
{
this();
this.maxUnions = maxUnions;
}
protected SybaseDatabaseType(int maxUnions, int maxClauses, int maxSearchableArguments)
{
this();
this.maxUnions = maxUnions;
this.maxClauses = maxClauses;
this.maxSearchableArguments = maxSearchableArguments;
}
private static Logger getLogger()
{
if(logger == null)
{
logger = LoggerFactory.getLogger(SybaseDatabaseType.class.getName());
}
return logger;
}
public void setForceFileBcp(boolean forceFile)
{
this.forceFile = forceFile;
}
public static SybaseDatabaseType getInstance()
{
return instance;
}
public static SybaseDatabaseType getSybase15Instance()
{
return sybase15Instance;
}
public boolean hasTopQuery()
{
return true;
}
public String getSelect(String columns, SqlQuery query, String groupBy, boolean isInTransaction, int rowCount)
{
StringBuilder selectWithoutWhere = new StringBuilder("select ");
String union = UNION_ALL;
if (query.requiresDistinct())
{
selectWithoutWhere.append(" distinct ");
}
if (rowCount > 0)
{
selectWithoutWhere.append(" top ").append(rowCount+1).append(' ');
}
selectWithoutWhere.append(columns).append(" from ");
if (isInTransaction)
{
query.appendFromClauseWithPerTableLocking(selectWithoutWhere, "holdlock", "noholdlock");
}
else
{
query.appendFromClause(selectWithoutWhere);
}
int numberOfUnions = query.getNumberOfUnions();
StringBuffer result = new StringBuffer();
if (numberOfUnions > 1 && query.requiresUnionWithoutAll())
{
union = UNION;
}
for (int i = 0; i < numberOfUnions; i++)
{
if (i > 0)
{
result.append(union);
}
result.append(selectWithoutWhere);
String whereClause = query.getWhereClauseAsString(i);
if (whereClause != null && whereClause.length() > 0)
{
result.append(" where ").append(whereClause);
}
}
if (groupBy != null)
{
result.append(" group by ").append(groupBy);
}
String orderBy = query.getOrderByClause();
if (orderBy != null && orderBy.length() > 0)
{
result.append(" order by ").append(orderBy);
}
return result.toString();
}
public String getSelect(String columns, String fromClause, String whereClause, boolean lock)
{
StringBuilder selectClause = new StringBuilder("select ").append(columns).append(" from ").append(fromClause);
if (lock)
{
selectClause.append(" holdlock");
}
else
{
selectClause.append(" noholdlock");
}
if (whereClause != null)
{
selectClause.append(" where ").append(whereClause);
}
return selectClause.toString();
}
public String getDelete(SqlQuery query, int rowCount)
{
StringBuilder deleteClause = new StringBuilder("delete ");
if(rowCount > 0)
{
deleteClause.append(" top ").append(rowCount).append(' ');
}
String tableName = query.getTableName(query.getAnalyzedOperation().getAnalyzedOperation().getResultObjectPortal(), MapperStackImpl.EMPTY_MAPPER_STACK_IMPL);
deleteClause.append(tableName);
deleteClause.append(" from ").append(query.getFromClauseAsString());
String where = query.getWhereClauseAsString(0); // zero is the union number is which is disabled for deletes.
if (where.trim().length() > 0)
{
deleteClause.append(" where ").append(where);
}
return deleteClause.toString();
}
protected boolean isRetriableWithoutRecursion(SQLException sqlException)
{
String state = sqlException.getSQLState();
int code = sqlException.getErrorCode();
boolean retriable = (code == CODE_DEADLOCK || code == CODE_SCHEMA_CHANGE || STATE_CONNECTION_CLOSED.equals(state));
if (!retriable && STATE_BATCH_ERROR.equals(state))
{
retriable = sqlException.getMessage().indexOf("encountered a deadlock situation. Please re-run your command.") >= 0;
}
if (!retriable)
{
retriable = JTDS_IO_ERROR.equals(state) && sqlException.getMessage().contains("DB server closed connection");
}
return retriable;
}
protected boolean isTimedOutWithoutRecursion(SQLException sqlException)
{
return (TIMEOUT_STATE.contains(sqlException.getSQLState()) || TIMEOUT_ERROR.contains(sqlException.getErrorCode()));
}
public boolean violatesUniqueIndexWithoutRecursion(SQLException sqlException)
{
return CODE_DUPLICATE == sqlException.getErrorCode();
}
public int getMaxPreparedStatementBatchCount(int parametersPerStatement)
{
return (int)(OPTIMAL_INSERT_PARAMETERS/parametersPerStatement);
}
public void setRowCount(Connection con, int rowcount) throws SQLException
{
PreparedStatement stm = con.prepareStatement("set rowcount ?");
stm.setInt(1, rowcount);
stm.executeUpdate();
stm.close();
}
public void setInfiniteRowCount(Connection con)
{
try
{
this.setRowCount(con, 0);
}
catch (SQLException e)
{
getLogger().error("Could not reset row count! This is very bad, as the connection will now be foobared in the pool", e);
}
}
public int getMaxClauses()
{
// if (tempThreshold < maxClauses) return tempThreshold + 10;
return maxClauses;
}
public boolean hasSetRowCount()
{
return true;
}
@Override
public String getIndexableSqlDataTypeForBoolean()
{
return "tinyint";
}
@Override
public String getConversionFunctionIntegerToString(String expression)
{
return "convert(char(11), "+expression+")";
}
@Override
public String getConversionFunctionStringToInteger(String expression)
{
return "convert(int, "+expression+")";
}
public String getSqlDataTypeForBoolean()
{
return "bit";
}
public String getSqlDataTypeForTimestamp()
{
return "datetime";
}
public String getSqlDataTypeForTime()
{
return "time";
}
public String getSqlDataTypeForTinyInt()
{
return "tinyint";
}
public String getSqlDataTypeForVarBinary()
{
return "image";
}
public String getSqlDataTypeForByte()
{
return "tinyint";
}
public String getSqlDataTypeForChar()
{
return "char(1)";
}
public String getSqlDataTypeForDateTime()
{
return "datetime";
}
public String getSqlDataTypeForDouble()
{
return "double precision";
}
public String getSqlDataTypeForFloat()
{
return "float";
}
public String getSqlDataTypeForInt()
{
return "integer";
}
public String getSqlDataTypeForLong()
{
return "bigint";
}
public String getSqlDataTypeForShortJava()
{
return "smallint";
}
public String getSqlDataTypeForString()
{
return "varchar";
}
public String getSqlDataTypeForBigDecimal()
{
return "numeric";
}
public String getFullyQualifiedTableName(String schema, String tableName)
{
String fqTableName;
if (schema != null)
{
if (schema.indexOf('.') > 0)
{
fqTableName = schema + '.' + tableName;
}
else
{
fqTableName = schema + ".dbo." + tableName;
}
}
else
{
fqTableName = tableName;
}
return fqTableName;
}
public String getFullyQualifiedDboTableName(String schema, String tableName)
{
String fqTableName;
if (schema != null)
{
int dotIndex = schema.indexOf('.');
if (dotIndex > 0)
{
fqTableName = schema.substring(0, dotIndex) + ".dbo." + tableName;
}
else
{
fqTableName = schema + ".dbo." + tableName;
}
}
else
{
fqTableName = tableName;
}
return fqTableName;
}
public String getJavaTypeFromSql(String sql, Integer precision, Integer decimal)
{
String javaType = sqlToJavaTypes.get(sql);
if (sql.equals("numeric"))
{
if (decimal != 0)
{
javaType = "double";
}
else if (precision <= 8)
{
javaType = "int";
}
else
{
javaType = "long";
}
}
if("char".equals(sql))
{
if(precision > 1)
{
javaType = "String";
}
}
return javaType;
}
@Deprecated
public BulkLoader createBulkLoader(Connection connection, String user, String password, String hostName, int port) throws BulkLoaderException
{
try
{
if (connection != null)
{
getLogger().error("Using deprecated method createBulkLoader with a connection object can lead to pool exhaustion!");
connection.close();
connection = null;
}
}
catch (SQLException e)
{
getLogger().error("could not close connection", e);
}
return createBulkLoader(user, password, hostName, port);
}
public BulkLoader createBulkLoader(String user, String password, String hostName, int port)
{
return createBulkLoader(user, password, hostName, port, true);
}
public BulkLoader createBulkLoader(String user, String password, String hostName, int port, boolean dataModelMismatchIsFatal)
{
if (hostName == null || forceFile)
{
return new BcpBulkLoader(this, user, password);
}
else if (bulkConstructor != null)
{
try
{
return (BulkLoader) bulkConstructor.newInstance(user, password, hostName, port, this, dataModelMismatchIsFatal);
}
catch (Exception e)
{
throw new RuntimeException("Could not instantiate bulk loader", e);
}
}
return null;
}
/**
* <p>Overridden to create the table metadata by hand rather than using the JDBC
* <code>DatabaseMetadata.getColumns()</code> method. This is because the Sybase driver fails
* when the connection is an XA connection unless you allow transactional DDL in tempdb.</p>
*/
public TableColumnInfo getTableColumnInfo(Connection connection, String schema, String table) throws SQLException
{
if (schema == null || schema.length() == 0)
{
schema = connection.getCatalog();
}
PreparedStatement stmt = connection.prepareStatement("SELECT name,colid,length,usertype,prec,scale,status FROM "+getFullyQualifiedDboTableName(schema, "syscolumns")+" WHERE id=OBJECT_ID(?)");
ResultSet results = null;
ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();
try
{
String objectName = getFullyQualifiedTableName(schema , table);
stmt.setString(1, objectName);
results = stmt.executeQuery();
while (results.next())
{
String name = results.getString("name");
int ordinalPosition = results.getInt("colid");
int size = results.getInt("length");
int type = sybaseToJDBCTypes.get(results.getInt("usertype"));
int precision = results.getInt("prec");
int scale = results.getInt("scale");
// http://www.sybase.com/detail?id=205883#syscol - How to Read syscolumns.status
boolean nullable = (results.getInt("status") & 8) != 0;
columns.add(new ColumnInfo(name, type, size, precision, scale, ordinalPosition, nullable));
}
}
finally
{
closeResultSet(results, "Ignoring error whilst closing ResultSet that was used to query the DatabaseInfo");
closeStatement(stmt, "Ignoring error whilst closing PreparedStatement that was used to query the DatabaseInfo");
}
Collections.sort(columns);
return columns.isEmpty() ? null : new TableColumnInfo(null, schema, table, columns.toArray(new ColumnInfo[columns.size()]));
}
/**
* Gets the <code>DatabaseInfo</code> for a particular connection.
* @param connection The connection to lookup the database information from.
* @return The database information.
* @throws SQLException if there was a problem looking up the database information.
*/
public DatabaseInfo getDatabaseInfo(Connection connection) throws SQLException
{
PreparedStatement statement = connection.prepareStatement("select @@servername, db_name()");
ResultSet results = null;
DatabaseInfo info = null;
try
{
results = statement.executeQuery();
if (!results.next())
{
throw new SQLException("Query for database name and server name returned zero rows!");
}
info = new DatabaseInfo(results.getString(1), results.getString(2));
}
finally
{
closeResultSet(results, "Ignoring error whilst closing ResultSet that was used to query the DatabaseInfo");
closeStatement(statement, "Ignoring error whilst closing PreparedStatement that was used to query the DatabaseInfo");
}
return info;
}
protected boolean hasSelectUnionMultiInsert()
{
return true;
}
protected boolean hasValuesMultiInsert()
{
return false;
}
public int getMultiInsertBatchSize(int columnsToInsert)
{
int result = (int) Math.round(OPTIMAL_INSERT_PARAMETERS / (double) columnsToInsert);
if (result == 0) result = 1;
if (result > MAX_UNIONS_IN_INSERT)
{
result = MAX_UNIONS_IN_INSERT;
}
return result;
}
public String getLastIdentitySql(String tableName)
{
return "select @@identity";
}
public String getAllowInsertIntoIdentityStatementFor(String tableName, String onOff)
{
return "SET IDENTITY_INSERT "+ tableName + onOff;
}
public String getHostnameFromDataSource(DataSource ds)
{
try
{
Method method = ds.getClass().getMethod("getServerName", null);
return (String) method.invoke(ds, null);
}
catch (NoSuchMethodException e)
{
//ignore
}
catch (InvocationTargetException e)
{
//ignore
}
catch (IllegalAccessException e)
{
//ignore
}
return null;
}
public int getPortFromDataSource(DataSource ds)
{
try
{
Method method = ds.getClass().getMethod("getPortNumber", null);
return (Integer) method.invoke(ds, null);
}
catch (NoSuchMethodException e)
{
//ignore
}
catch (InvocationTargetException e)
{
//ignore
}
catch (IllegalAccessException e)
{
//ignore
}
return 0;
}
public String getHostnameFromUrl(String url)
{
if (url.toLowerCase().startsWith(JDBC_SYBASE_TDS))
{
StringTokenizer stok = new StringTokenizer(url, ":/?&");
stok.nextToken(); // jdbc
stok.nextToken(); // sybase
stok.nextToken(); // Tds
return stok.nextToken();
}
return null;
}
public int getPortFromUrl(String url)
{
if (url.toLowerCase().startsWith(JDBC_SYBASE_TDS))
{
StringTokenizer stok = new StringTokenizer(url, ":/?&");
stok.nextToken(); // jdbc
stok.nextToken(); // sybase
stok.nextToken(); // Tds
stok.nextToken(); // server
String portString = stok.nextToken();
try
{
return Integer.parseInt(portString);
}
catch (NumberFormatException e)
{
getLogger().error("Could not parse port number in url "+url);
}
}
return 0;
}
@Override
public Timestamp getTimestampFromResultSet(ResultSet rs, int pos, TimeZone timeZone) throws SQLException
{
Calendar c = getCalendarInstance();
Timestamp timestamp = rs.getTimestamp(pos, c);
return MithraTimestamp.zConvertTimeForReadingWithUtcCalendar(timestamp, timeZone);
}
protected Calendar getCalendarInstance()
{
SingleInstanceGregorianCalendar c = (SingleInstanceGregorianCalendar) calendarInstance.get();
if (c == null)
{
c = new SingleInstanceGregorianCalendar();
calendarInstance.set(c);
}
return c;
}
protected static IntIntHashMap getSybaseToJDBCTypes()
{
return sybaseToJDBCTypes;
}
public static class SingleInstanceGregorianCalendar extends GregorianCalendar
{
protected static final ISOChronology ISO_CHRONOLOGY_UTC = ISOChronology.getInstance(DateTimeZone.UTC);
private int year;
private int month;
private int dayOfMonth;
private int hourOfDay;
private int minute;
private int second;
private int millis;
public Object clone()
{
year = month = dayOfMonth = hourOfDay = minute = second = millis = 0;
return this;
}
public long getTimeInMillis()
{
try
{
return ISO_CHRONOLOGY_UTC.getDateTimeMillis(year, month + 1, dayOfMonth, hourOfDay, minute, second, millis);
}
catch (IllegalArgumentException e)
{
getLogger().warn(e.getMessage());
return ISO_CHRONOLOGY_UTC.getDateTimeMillis(year, month + 1, dayOfMonth, hourOfDay + 1, minute, second, millis);
}
}
public void set(int field, int value)
{
switch(field)
{
case Calendar.YEAR:
this.year = value;
break;
case Calendar.MONTH:
this.month = value;
break;
case Calendar.DAY_OF_MONTH:
this.dayOfMonth = value;
break;
case Calendar.HOUR_OF_DAY:
this.hourOfDay = value;
break;
case Calendar.MINUTE:
this.minute = value;
break;
case Calendar.SECOND:
this.second = value;
break;
case Calendar.MILLISECOND:
this.millis = value;
break;
default:
throw new RuntimeException("unexpected set method for field "+field);
}
}
}
public String getCreateTableStatement(Connection connection, String schema, String tableName) throws SQLException
{
return createTableStatement(getColumnInfoList(connection, schema, tableName));
}
public List<ColumnInfo> getColumnInfoList(Connection connection, String schema, String tableName) throws SQLException
{
List<ColumnInfo> columnInfoList = new ArrayList<ColumnInfo>();
if (schema == null)
{
throw new IllegalArgumentException("schema must not be null");
}
String sql =
"SELECT c.colid, c.name, t.usertype, c.status, c.length, c.prec, c.scale " +
"FROM "+getFullyQualifiedDboTableName(schema, "syscolumns")+" c, "+getFullyQualifiedDboTableName(schema,"systypes")+" t " +
"WHERE id=OBJECT_ID('"+getFullyQualifiedTableName(schema,tableName)+"') " +
"and ( (c.usertype = t.usertype and c.usertype < 100) or " +
"(c.type = t.type and c.usertype > 100 and t.usertype < 100 and t.name not in ('longsysname', 'nchar', 'nvarchar', 'sysname'))) "+
"order by c.colid";
ResultSet results = null;
Statement stmt = connection.createStatement();
try
{
results = stmt.executeQuery(sql);
while(results.next())
{
int columnId = results.getInt(1);
String columnName = results.getString(2);
int userType = results.getInt(3);
String columnType = sybaseTypeNames.get(userType);
if (columnType == null)
{
throw new SQLException("could not resolve column type for column "+ columnName +" in table "+ getFullyQualifiedTableName(schema, tableName)
+" got a user type of "+ userType);
}
int status = results.getInt(4);
if ((status & 128) != 0)
{
// identity column, skip it
getLogger().warn("Skipping identity column "+columnName+" in table "+tableName);
continue;
}
boolean nullable = (status & 8) != 0;
ColumnInfo columnInformation = new ColumnInfo(columnName, userType, results.getInt(5),
results.getInt(6), results.getInt(7), 0, nullable);
columnInfoList.add(columnInformation);
}
}
finally
{
closeResultSet(results, "Ignoring error whilst closing ResultSet that was used to query the DatabaseInfo");
closeStatement(stmt, "Ignoring error whilst closing PreparedStatement that was used to query the DatabaseInfo");
}
return columnInfoList;
}
public String createTableStatement(List<ColumnInfo> columnInformationMap)
{
StringBuilder builder = new StringBuilder(32);
builder.append('(');
boolean added = false;
for(ColumnInfo columnInfo: columnInformationMap)
{
if (added)
{
builder.append(',');
}
added = true;
builder.append(columnInfo.getName());
String columnType = getColumnType(columnInfo.getType());
builder.append(' ').append(columnType);
if (typesWithLength.contains(columnType))
{
int length = columnInfo.getSize();
builder.append('(').append(length).append(')');
}
else if (columnType.equals("numeric") || columnType.equals("decimal"))
{
builder.append('(').append(columnInfo.getPrecision()).append(',');
builder.append(columnInfo.getScale()).append(')');
}
if (columnInfo.isNullable())
{
builder.append(" null");
}
else
{
builder.append(" not null");
}
}
builder.append(')');
return builder.toString();
}
private void closeStatement(Statement stmt, String msg)
{
if (stmt != null)
{
try
{
stmt.close();
}
catch (SQLException e)
{
getLogger().warn(msg, e);
}
}
}
private void closeResultSet(ResultSet results, String msg)
{
if (results != null)
{
try
{
results.close();
}
catch (SQLException e)
{
getLogger().warn(msg, e);
}
}
}
public void setMaxParallelDegree(int maxParallelDegree)
{
this.maxParallelDegree = maxParallelDegree;
}
public void configureConnection(Connection con) throws SQLException
{
if (maxParallelDegree > 0)
{
fullyExecute(con, "set parallel_degree "+maxParallelDegree);
}
fullyExecute(con, "set quoted_identifier on");
}
public boolean hasPerTableLock()
{
return true;
}
public String getPerTableLock(boolean lock)
{
if (lock)
{
return "holdlock";
}
else
{
return "noholdlock";
}
}
public String getTempDbSchemaName()
{
return "tempdb."; // see getFullyQualifiedTableName for explanantion of extra dot: we don't want
}
public boolean hasBulkInsert()
{
return bulkConstructor != null || forceFile;
}
public String getNullableColumnConstraintString()
{
return " null ";
}
public boolean isConnectionDeadWithoutRecursion(SQLException e)
{
String state = e.getSQLState();
int code = e.getErrorCode();
return (STATE_CONNECTION_CLOSED.equals(state) ||
STATE_CONNECTION_CLOSED2.equals(state) ||
STATE_IO_EXCEPTION.equals(state) ||
STATE_METADATA_NOT_FOUND.equals(state) ||
code == CODE_RUN_OUT_OF_LOCKS ||
code == CODE_OBJECT_NOT_FOUND
);
}
public boolean generateBetweenClauseForLargeInClause()
{
return true;
}
public String getTableNameForNonSharedTempTable(String nominalName)
{
return "#"+nominalName;
}
public String getSqlPrefixForNonSharedTempTableCreation(String nominalTableName)
{
return "create table #"+nominalTableName;
}
public String getSqlPostfixForNonSharedTempTableCreation()
{
return " lock allpages";
}
public String getSqlPostfixForSharedTempTableCreation()
{
return " lock allpages";
}
public int getMaxSearchableArguments()
{
return maxSearchableArguments;
}
public int getMaxUnionCount()
{
return maxUnions;
}
public String getModFunction(String fullyQualifiedLeftHandExpression, int divisor)
{
return "("+fullyQualifiedLeftHandExpression+" % "+divisor+")";
}
public String appendNonSharedTempTableCreatePreamble(StringBuilder sb, String tempTableName)
{
if (MithraManagerProvider.getMithraManager().isInTransaction())
{
// we use shared temp tables in sybase because of this error:
// 'CREATE TABLE' command is not allowed within a multi-statement transaction in the 'tempdb' database.
// (SQL code: 2762 SQL State: ZZZZZ)
// google "sybase Error 2762" for more details.
return appendSharedTempTableCreatePreamble(sb, tempTableName);
}
sb.append("create table #").append(tempTableName);
return "#"+tempTableName;
}
public String appendSharedTempTableCreatePreamble(StringBuilder sb, String nominalTableName)
{
String tableName = this.getTempDbSchemaName() + "." + nominalTableName;
sb.append("create table ").append(tableName);
return tableName;
}
@Override
public boolean createTempTableAllowedInTransaction()
{
return false; // see comment in appendNonSharedTempTableCreatePreamble
}
public boolean dropTableAllowedInTransaction()
{
return false;
}
//todo: rezaem: fix this for sybase. for millis = 999, Sybase rounds to 996 instead of 1000. maybe others
public void xsetTimestamp(PreparedStatement ps, int index, Timestamp timestamp, boolean forceAsString) throws SQLException
{
// if (forceAsString)
// {
// super.setTimestamp(ps, index, timestamp, forceAsString, asdfsd);
// }
// else
// {
// long time = timestamp.getTime();
// int lastDigit = (int) (time % 10);
// switch(lastDigit)
// {
// case 0:
// case 3:
// case 6:
// ps.setTimestamp(index, timestamp);
// break;
// case 1:
// ps.setTimestamp(index, new Timestamp(time - 1));
// break;
// case 2:
// ps.setTimestamp(index, new Timestamp(time + 1));
// break;
// case 4:
// ps.setTimestamp(index, new Timestamp(time - 1));
// break;
// case 5:
// ps.setTimestamp(index, new Timestamp(time + 1));
// break;
// case 7:
// ps.setTimestamp(index, new Timestamp(time - 1));
// break;
// case 8:
// ps.setTimestamp(index, new Timestamp(time - 2));
// break;
// case 9:
// ps.setTimestamp(index, new Timestamp(time + 1));
// break;
// }
// }
}
protected Time createOrReturnTimeWithAnyRequiredRounding(Time time)
{
// This behaviour is encapsulated in a protected method so that subclasses (e.g. IQ) can override it
return time.createOrReturnTimeWithRoundingForSybaseJConnectCompatibility();
}
@Override
public void setTime(PreparedStatement ps, int index, Time time) throws SQLException
{
super.setTime(ps, index, this.createOrReturnTimeWithAnyRequiredRounding(time));
}
public int getDefaultPrecision()
{
return 18;
}
public int getMaxPrecision()
{
return 38;
}
public int getDeleteViaInsertAndJoinThreshold()
{
return -1;
}
public String createSubstringExpression(String stringExpression, int start, int end)
{
int length = end - start;
if (end < 0) length = Integer.MAX_VALUE;
return "substring("+stringExpression+","+(start+1)+","+length+")";
}
@Override
public boolean isKilledConnectionException(Exception e)
{
String message = e.getMessage();
return message != null && message.contains("SybConnectionDeadException");
}
@Override
public double getSysLogPercentFull(Connection connection, String schemaName) throws SQLException
{
Statement statement = null;
ResultSet resultSet = null;
try
{
statement = connection.createStatement();
resultSet = statement.executeQuery("select @@version_as_integer");
resultSet.next();
int version = resultSet.getInt(1);
resultSet.close();
if (schemaName != null && schemaName.indexOf('.') > 0)
{
schemaName = schemaName.substring(0, schemaName.indexOf('.'));
}
String syssegmentsFullyQualifiedName = getFullyQualifiedDboTableName(schemaName, "syssegments");
String syslogsFullyQualifiedName = getFullyQualifiedDboTableName(schemaName, "syslogs");
String sysindexesFullyQualifiedName = getFullyQualifiedDboTableName(schemaName, "sysindexes");
String dbId = schemaName == null ? "" : "'"+schemaName+"'";
if (version >= 15000) // Sybase 15 or greater
{
String sql = "select sum(a.size)/(power(2,20)/@@maxpagesize) " +
"from master..sysusages a," + syssegmentsFullyQualifiedName + " b " +
"where a.dbid=db_id(" + dbId + ") " +
"and b.name='logsegment' " +
"and (a.segmap & power(2,b.segment)) != 0 ";
getLogger().debug(sql);
resultSet = statement.executeQuery(sql);
resultSet.next();
double total = resultSet.getDouble(1);
resultSet.close();
sql = "select data_pages(db_id(" + dbId + "),i.id)/(power(2,20)/@@maxpagesize) " +
"from " + sysindexesFullyQualifiedName + " i " +
"where i.id=object_id('" + syslogsFullyQualifiedName + "')";
getLogger().debug(sql);
resultSet = statement.executeQuery(sql);
resultSet.next();
return resultSet.getDouble(1) * 100.0 / total;
}
else
{
String sql = "select sum(a.size) " +
"from master..sysusages a," + syssegmentsFullyQualifiedName + " b " +
"where a.dbid=db_id(" + dbId + ") " +
"and b.name='logsegment' " +
"and (a.segmap & power(2,b.segment)) != 0 ";
getLogger().debug(sql);
resultSet = statement.executeQuery(sql);
resultSet.next();
double total = resultSet.getDouble(1);
resultSet.close();
sql = "select data_pgs(i.id,i.doampg) " +
"from " + sysindexesFullyQualifiedName + " i " +
"where i.id=object_id('" + syslogsFullyQualifiedName + "')";
getLogger().debug(sql);
resultSet = statement.executeQuery(sql);
resultSet.next();
return resultSet.getDouble(1) * 100.0 / total;
}
}
finally
{
closeResultSet(resultSet, "Error when closing result set used to determine syslog percent.");
closeStatement(statement, "Error when closing statement used to determine syslog percent.");
}
}
public String getUpdateTableStatisticsSql(String tableName)
{
if (!MithraManagerProvider.getMithraManager().isInTransaction())
{
return "update statistics " + tableName;
}
return null;
}
public static boolean isColumnTypeNumeric(int userType)
{
return numericSybaseTypes.contains(getColumnType(userType));
}
public static String getColumnType(int userType)
{
return sybaseTypeNames.get(userType);
}
@Override
protected char[] getLikeMetaChars()
{
return SYBASE_LIKE_META_CHARS;
}
@Override
public String getSqlExpressionForDateYear(String columnName)
{
return "datepart(year, " + columnName + ")";
}
@Override
public String getSqlExpressionForDateMonth(String columnName)
{
return "datepart(month, " + columnName + ")";
}
@Override
public String getSqlExpressionForDateDayOfMonth(String columnName)
{
return "datepart(day, " + columnName + ")";
}
@Override
public void setMultiUpdateViaJoinQuery(
Object source,
List updates,
Attribute[] prototypeArray,
MithraFastList<Attribute> nullAttributes,
int pkAttributeCount,
TupleTempContext tempContext,
MithraObjectPortal mithraObjectPortal,
String fullyQualifiedTableNameGenericSource,
StringBuilder builder)
{
this.startUpdateViaJoinQuery(fullyQualifiedTableNameGenericSource, builder);
builder.append(" set ");
for (int i = 0; i < updates.size(); i++)
{
AttributeUpdateWrapper wrapper = (AttributeUpdateWrapper) updates.get(i);
if (i > 0)
{
builder.append(", ");
}
builder.append(wrapper.getSetAttributeSql());
}
this.appendTempTableJoin(source, prototypeArray, nullAttributes, pkAttributeCount, tempContext, mithraObjectPortal, fullyQualifiedTableNameGenericSource, builder);
}
@Override
public void setBatchUpdateViaJoinQuery(
Object source,
List updates,
Attribute[] prototypeArray,
MithraFastList<Attribute> nullAttributes,
int pkAttributeCount,
TupleTempContext tempContext,
MithraObjectPortal mithraObjectPortal,
String fullyQualifiedTableNameGenericSource,
StringBuilder builder)
{
this.startUpdateViaJoinQuery(fullyQualifiedTableNameGenericSource, builder);
builder.append(" set ");
for (int i = 0; i < updates.size(); i++)
{
AttributeUpdateWrapper wrapper = (AttributeUpdateWrapper) updates.get(i);
if (i > 0)
{
builder.append(", ");
}
builder.append(wrapper.getAttribute().getColumnName()).append(" = t1.c");
builder.append(pkAttributeCount + i);
}
this.appendTempTableJoin(source, prototypeArray, nullAttributes, pkAttributeCount, tempContext, mithraObjectPortal, fullyQualifiedTableNameGenericSource, builder);
}
@Override
public int getNullableBooleanJavaSqlType()
{
return Types.TINYINT;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.IOException;
import java.io.DataInput;
import java.io.DataOutput;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CodingErrorAction;
import java.nio.charset.MalformedInputException;
import java.nio.charset.StandardCharsets;
import java.text.CharacterIterator;
import java.text.StringCharacterIterator;
import java.util.Arrays;
import org.apache.avro.reflect.Stringable;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** This class stores text using standard UTF8 encoding. It provides methods
* to serialize, deserialize, and compare texts at byte level. The type of
* length is integer and is serialized using zero-compressed format. <p>In
* addition, it provides methods for string traversal without converting the
* byte array to a string. <p>Also includes utilities for
* serializing/deserialing a string, coding/decoding a string, checking if a
* byte array contains valid UTF8 code, calculating the length of an encoded
* string.
*/
@Stringable
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Text extends BinaryComparable
implements WritableComparable<BinaryComparable> {
private static final ThreadLocal<CharsetEncoder> ENCODER_FACTORY =
new ThreadLocal<CharsetEncoder>() {
@Override
protected CharsetEncoder initialValue() {
return StandardCharsets.UTF_8.newEncoder().
onMalformedInput(CodingErrorAction.REPORT).
onUnmappableCharacter(CodingErrorAction.REPORT);
}
};
private static final ThreadLocal<CharsetDecoder> DECODER_FACTORY =
new ThreadLocal<CharsetDecoder>() {
@Override
protected CharsetDecoder initialValue() {
return StandardCharsets.UTF_8.newDecoder().
onMalformedInput(CodingErrorAction.REPORT).
onUnmappableCharacter(CodingErrorAction.REPORT);
}
};
private static final byte[] EMPTY_BYTES = new byte[0];
private byte[] bytes = EMPTY_BYTES;
private int length = 0;
private int textLength = -1;
/**
* Construct an empty text string.
*/
public Text() {
}
/**
* Construct from a string.
*/
public Text(String string) {
set(string);
}
/**
* Construct from another text.
*/
public Text(Text utf8) {
set(utf8);
}
/**
* Construct from a byte array.
*/
public Text(byte[] utf8) {
set(utf8);
}
/**
* Get a copy of the bytes that is exactly the length of the data.
* See {@link #getBytes()} for faster access to the underlying array.
*/
public byte[] copyBytes() {
return Arrays.copyOf(bytes, length);
}
/**
* Returns the raw bytes; however, only data up to {@link #getLength()} is
* valid. Please use {@link #copyBytes()} if you
* need the returned array to be precisely the length of the data.
*/
@Override
public byte[] getBytes() {
return bytes;
}
/**
* Returns the number of bytes in the byte array.
*/
@Override
public int getLength() {
return length;
}
/**
* Returns the length of this text. The length is equal to the number of
* Unicode code units in the text.
*/
public int getTextLength() {
if (textLength < 0) {
textLength = toString().length();
}
return textLength;
}
/**
* Returns the Unicode Scalar Value (32-bit integer value)
* for the character at <code>position</code>. Note that this
* method avoids using the converter or doing String instantiation
* @return the Unicode scalar value at position or -1
* if the position is invalid or points to a
* trailing byte
*/
public int charAt(int position) {
if (position > this.length) return -1; // too long
if (position < 0) return -1; // duh.
ByteBuffer bb = (ByteBuffer)ByteBuffer.wrap(bytes).position(position);
return bytesToCodePoint(bb.slice());
}
public int find(String what) {
return find(what, 0);
}
/**
* Finds any occurrence of <code>what</code> in the backing
* buffer, starting as position <code>start</code>. The starting
* position is measured in bytes and the return value is in
* terms of byte position in the buffer. The backing buffer is
* not converted to a string for this operation.
* @return byte position of the first occurrence of the search
* string in the UTF-8 buffer or -1 if not found
*/
public int find(String what, int start) {
try {
ByteBuffer src = ByteBuffer.wrap(this.bytes, 0, this.length);
ByteBuffer tgt = encode(what);
byte b = tgt.get();
src.position(start);
while (src.hasRemaining()) {
if (b == src.get()) { // matching first byte
src.mark(); // save position in loop
tgt.mark(); // save position in target
boolean found = true;
int pos = src.position()-1;
while (tgt.hasRemaining()) {
if (!src.hasRemaining()) { // src expired first
tgt.reset();
src.reset();
found = false;
break;
}
if (!(tgt.get() == src.get())) {
tgt.reset();
src.reset();
found = false;
break; // no match
}
}
if (found) return pos;
}
}
return -1; // not found
} catch (CharacterCodingException e) {
throw new RuntimeException("Should not have happened", e);
}
}
/**
* Set to contain the contents of a string.
*/
public void set(String string) {
try {
ByteBuffer bb = encode(string, true);
bytes = bb.array();
length = bb.limit();
textLength = string.length();
} catch (CharacterCodingException e) {
throw new RuntimeException("Should not have happened", e);
}
}
/**
* Set to a utf8 byte array.
*/
public void set(byte[] utf8) {
set(utf8, 0, utf8.length);
}
/**
* Copy a text.
*/
public void set(Text other) {
set(other.getBytes(), 0, other.getLength());
this.textLength = other.textLength;
}
/**
* Set the Text to range of bytes.
*
* @param utf8 the data to copy from
* @param start the first position of the new string
* @param len the number of bytes of the new string
*/
public void set(byte[] utf8, int start, int len) {
ensureCapacity(len);
System.arraycopy(utf8, start, bytes, 0, len);
this.length = len;
this.textLength = -1;
}
/**
* Append a range of bytes to the end of the given text.
*
* @param utf8 the data to copy from
* @param start the first position to append from utf8
* @param len the number of bytes to append
*/
public void append(byte[] utf8, int start, int len) {
byte[] original = bytes;
int capacity = Math.max(length + len, length + (length >> 1));
if (ensureCapacity(capacity)) {
System.arraycopy(original, 0, bytes, 0, length);
}
System.arraycopy(utf8, start, bytes, length, len);
length += len;
textLength = -1;
}
/**
* Clear the string to empty.
*
* <em>Note</em>: For performance reasons, this call does not clear the
* underlying byte array that is retrievable via {@link #getBytes()}.
* In order to free the byte-array memory, call {@link #set(byte[])}
* with an empty byte array (For example, <code>new byte[0]</code>).
*/
public void clear() {
length = 0;
textLength = -1;
}
/**
* Sets the capacity of this Text object to <em>at least</em>
* <code>capacity</code> bytes. If the current buffer is longer, then the
* capacity and existing content of the buffer are unchanged. If
* <code>capacity</code> is larger than the current capacity, the Text
* object's capacity is increased to match and any existing data is lost.
*
* @param capacity the number of bytes we need
* @return true if the internal array was resized or false otherwise
*/
private boolean ensureCapacity(final int capacity) {
if (bytes.length < capacity) {
bytes = new byte[capacity];
return true;
}
return false;
}
@Override
public String toString() {
try {
return decode(bytes, 0, length);
} catch (CharacterCodingException e) {
throw new RuntimeException("Should not have happened", e);
}
}
@Override
public void readFields(DataInput in) throws IOException {
int newLength = WritableUtils.readVInt(in);
readWithKnownLength(in, newLength);
}
public void readFields(DataInput in, int maxLength) throws IOException {
int newLength = WritableUtils.readVInt(in);
if (newLength < 0) {
throw new IOException("tried to deserialize " + newLength +
" bytes of data! newLength must be non-negative.");
} else if (newLength >= maxLength) {
throw new IOException("tried to deserialize " + newLength +
" bytes of data, but maxLength = " + maxLength);
}
readWithKnownLength(in, newLength);
}
/**
* Skips over one Text in the input.
*/
public static void skip(DataInput in) throws IOException {
int length = WritableUtils.readVInt(in);
WritableUtils.skipFully(in, length);
}
/**
* Read a Text object whose length is already known.
* This allows creating Text from a stream which uses a different serialization
* format.
*/
public void readWithKnownLength(DataInput in, int len) throws IOException {
ensureCapacity(len);
in.readFully(bytes, 0, len);
length = len;
textLength = -1;
}
/**
* Serialize. Write this object to out length uses zero-compressed encoding.
*
* @see Writable#write(DataOutput)
*/
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVInt(out, length);
out.write(bytes, 0, length);
}
public void write(DataOutput out, int maxLength) throws IOException {
if (length > maxLength) {
throw new IOException("data was too long to write! Expected " +
"less than or equal to " + maxLength + " bytes, but got " +
length + " bytes.");
}
WritableUtils.writeVInt(out, length);
out.write(bytes, 0, length);
}
/**
* Returns true iff <code>o</code> is a Text with the same length and same
* contents.
*/
@Override
public boolean equals(Object o) {
if (o instanceof Text)
return super.equals(o);
return false;
}
@Override
public int hashCode() {
return super.hashCode();
}
/** A WritableComparator optimized for Text keys. */
public static class Comparator extends WritableComparator {
public Comparator() {
super(Text.class);
}
@Override
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
int n1 = WritableUtils.decodeVIntSize(b1[s1]);
int n2 = WritableUtils.decodeVIntSize(b2[s2]);
return compareBytes(b1, s1 + n1, l1 - n1, b2, s2 + n2, l2 - n2);
}
}
static {
// register this comparator
WritableComparator.define(Text.class, new Comparator());
}
/// STATIC UTILITIES FROM HERE DOWN
/**
* Converts the provided byte array to a String using the
* UTF-8 encoding. If the input is malformed,
* replace by a default value.
*/
public static String decode(byte[] utf8) throws CharacterCodingException {
return decode(ByteBuffer.wrap(utf8), true);
}
public static String decode(byte[] utf8, int start, int length)
throws CharacterCodingException {
return decode(ByteBuffer.wrap(utf8, start, length), true);
}
/**
* Converts the provided byte array to a String using the
* UTF-8 encoding. If <code>replace</code> is true, then
* malformed input is replaced with the
* substitution character, which is U+FFFD. Otherwise the
* method throws a MalformedInputException.
*/
public static String decode(byte[] utf8, int start, int length, boolean replace)
throws CharacterCodingException {
return decode(ByteBuffer.wrap(utf8, start, length), replace);
}
private static String decode(ByteBuffer utf8, boolean replace)
throws CharacterCodingException {
CharsetDecoder decoder = DECODER_FACTORY.get();
if (replace) {
decoder.onMalformedInput(
java.nio.charset.CodingErrorAction.REPLACE);
decoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
}
String str = decoder.decode(utf8).toString();
// set decoder back to its default value: REPORT
if (replace) {
decoder.onMalformedInput(CodingErrorAction.REPORT);
decoder.onUnmappableCharacter(CodingErrorAction.REPORT);
}
return str;
}
/**
* Converts the provided String to bytes using the
* UTF-8 encoding. If the input is malformed,
* invalid chars are replaced by a default value.
* @return ByteBuffer: bytes stores at ByteBuffer.array()
* and length is ByteBuffer.limit()
*/
public static ByteBuffer encode(String string)
throws CharacterCodingException {
return encode(string, true);
}
/**
* Converts the provided String to bytes using the
* UTF-8 encoding. If <code>replace</code> is true, then
* malformed input is replaced with the
* substitution character, which is U+FFFD. Otherwise the
* method throws a MalformedInputException.
* @return ByteBuffer: bytes stores at ByteBuffer.array()
* and length is ByteBuffer.limit()
*/
public static ByteBuffer encode(String string, boolean replace)
throws CharacterCodingException {
CharsetEncoder encoder = ENCODER_FACTORY.get();
if (replace) {
encoder.onMalformedInput(CodingErrorAction.REPLACE);
encoder.onUnmappableCharacter(CodingErrorAction.REPLACE);
}
ByteBuffer bytes =
encoder.encode(CharBuffer.wrap(string.toCharArray()));
if (replace) {
encoder.onMalformedInput(CodingErrorAction.REPORT);
encoder.onUnmappableCharacter(CodingErrorAction.REPORT);
}
return bytes;
}
static final public int DEFAULT_MAX_LEN = 1024 * 1024;
/** Read a UTF8 encoded string from in
*/
public static String readString(DataInput in) throws IOException {
return readString(in, Integer.MAX_VALUE);
}
/** Read a UTF8 encoded string with a maximum size
*/
public static String readString(DataInput in, int maxLength)
throws IOException {
int length = WritableUtils.readVIntInRange(in, 0, maxLength);
byte [] bytes = new byte[length];
in.readFully(bytes, 0, length);
return decode(bytes);
}
/**
* Write a UTF8 encoded string to out.
*/
public static int writeString(DataOutput out, String s) throws IOException {
ByteBuffer bytes = encode(s);
int length = bytes.limit();
WritableUtils.writeVInt(out, length);
out.write(bytes.array(), 0, length);
return length;
}
/**
* Write a UTF8 encoded string with a maximum size to out.
*/
public static int writeString(DataOutput out, String s, int maxLength)
throws IOException {
ByteBuffer bytes = encode(s);
int length = bytes.limit();
if (length > maxLength) {
throw new IOException("string was too long to write! Expected " +
"less than or equal to " + maxLength + " bytes, but got " +
length + " bytes.");
}
WritableUtils.writeVInt(out, length);
out.write(bytes.array(), 0, length);
return length;
}
////// states for validateUTF8
private static final int LEAD_BYTE = 0;
private static final int TRAIL_BYTE_1 = 1;
private static final int TRAIL_BYTE = 2;
/**
* Check if a byte array contains valid UTF-8.
*
* @param utf8 byte array
* @throws MalformedInputException if the byte array contains invalid UTF-8
*/
public static void validateUTF8(byte[] utf8) throws MalformedInputException {
validateUTF8(utf8, 0, utf8.length);
}
/**
* Check to see if a byte array is valid UTF-8.
*
* @param utf8 the array of bytes
* @param start the offset of the first byte in the array
* @param len the length of the byte sequence
* @throws MalformedInputException if the byte array contains invalid bytes
*/
public static void validateUTF8(byte[] utf8, int start, int len)
throws MalformedInputException {
int count = start;
int leadByte = 0;
int length = 0;
int state = LEAD_BYTE;
while (count < start+len) {
int aByte = utf8[count] & 0xFF;
switch (state) {
case LEAD_BYTE:
leadByte = aByte;
length = bytesFromUTF8[aByte];
switch (length) {
case 0: // check for ASCII
if (leadByte > 0x7F)
throw new MalformedInputException(count);
break;
case 1:
if (leadByte < 0xC2 || leadByte > 0xDF)
throw new MalformedInputException(count);
state = TRAIL_BYTE_1;
break;
case 2:
if (leadByte < 0xE0 || leadByte > 0xEF)
throw new MalformedInputException(count);
state = TRAIL_BYTE_1;
break;
case 3:
if (leadByte < 0xF0 || leadByte > 0xF4)
throw new MalformedInputException(count);
state = TRAIL_BYTE_1;
break;
default:
// too long! Longest valid UTF-8 is 4 bytes (lead + three)
// or if < 0 we got a trail byte in the lead byte position
throw new MalformedInputException(count);
} // switch (length)
break;
case TRAIL_BYTE_1:
if (leadByte == 0xF0 && aByte < 0x90)
throw new MalformedInputException(count);
if (leadByte == 0xF4 && aByte > 0x8F)
throw new MalformedInputException(count);
if (leadByte == 0xE0 && aByte < 0xA0)
throw new MalformedInputException(count);
if (leadByte == 0xED && aByte > 0x9F)
throw new MalformedInputException(count);
// falls through to regular trail-byte test!!
case TRAIL_BYTE:
if (aByte < 0x80 || aByte > 0xBF)
throw new MalformedInputException(count);
if (--length == 0) {
state = LEAD_BYTE;
} else {
state = TRAIL_BYTE;
}
break;
default:
break;
} // switch (state)
count++;
}
}
/**
* Magic numbers for UTF-8. These are the number of bytes
* that <em>follow</em> a given lead byte. Trailing bytes
* have the value -1. The values 4 and 5 are presented in
* this table, even though valid UTF-8 cannot include the
* five and six byte sequences.
*/
static final int[] bytesFromUTF8 =
{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
// trail bytes
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3,
3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5 };
/**
* Returns the next code point at the current position in
* the buffer. The buffer's position will be incremented.
* Any mark set on this buffer will be changed by this method!
*/
public static int bytesToCodePoint(ByteBuffer bytes) {
bytes.mark();
byte b = bytes.get();
bytes.reset();
int extraBytesToRead = bytesFromUTF8[(b & 0xFF)];
if (extraBytesToRead < 0) return -1; // trailing byte!
int ch = 0;
switch (extraBytesToRead) {
case 5: ch += (bytes.get() & 0xFF); ch <<= 6; /* remember, illegal UTF-8 */
case 4: ch += (bytes.get() & 0xFF); ch <<= 6; /* remember, illegal UTF-8 */
case 3: ch += (bytes.get() & 0xFF); ch <<= 6;
case 2: ch += (bytes.get() & 0xFF); ch <<= 6;
case 1: ch += (bytes.get() & 0xFF); ch <<= 6;
case 0: ch += (bytes.get() & 0xFF);
}
ch -= offsetsFromUTF8[extraBytesToRead];
return ch;
}
static final int offsetsFromUTF8[] =
{ 0x00000000, 0x00003080,
0x000E2080, 0x03C82080, 0xFA082080, 0x82082080 };
/**
* For the given string, returns the number of UTF-8 bytes
* required to encode the string.
* @param string text to encode
* @return number of UTF-8 bytes required to encode
*/
public static int utf8Length(String string) {
CharacterIterator iter = new StringCharacterIterator(string);
char ch = iter.first();
int size = 0;
while (ch != CharacterIterator.DONE) {
if ((ch >= 0xD800) && (ch < 0xDC00)) {
// surrogate pair?
char trail = iter.next();
if ((trail > 0xDBFF) && (trail < 0xE000)) {
// valid pair
size += 4;
} else {
// invalid pair
size += 3;
iter.previous(); // rewind one
}
} else if (ch < 0x80) {
size++;
} else if (ch < 0x800) {
size += 2;
} else {
// ch < 0x10000, that is, the largest char value
size += 3;
}
ch = iter.next();
}
return size;
}
}
| |
package eu.modernmt.persistence.cassandra;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.exceptions.CodecNotFoundException;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import eu.modernmt.model.Memory;
import eu.modernmt.persistence.MemoryDAO;
import eu.modernmt.persistence.PersistenceException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* Created by andrea on 08/03/17.
* <p>
* A CassandraMemoryDAO object offers methods
* for performing CRUD operations on Memory objects
* when connected to a Cassandra Database.
*/
public class CassandraMemoryDAO implements MemoryDAO {
private CassandraConnection connection;
/**
* This method creates a CassandraMemoryDAO
* that will communicate with the Cassandra DB under analysis
* using a specific connection
*
* @param connection the Cassandra Connection that the DAO will employ
* to deal with the Memory CRUD operations.
*/
public CassandraMemoryDAO(CassandraConnection connection) {
this.connection = connection;
}
/**
* This method retrieves a Memory object in a Cassandra DB
* by the ID it was stored with.
*
* @param id the ID of the Memory object to retrieve
* @return the Memory object stored with the passed id, if there is one in the DB;
* else, this method returns null
* @throws PersistenceException
*/
@Override
public Memory retrieve(long id) throws PersistenceException {
BuiltStatement statement = QueryBuilder.select()
.from(CassandraDatabase.MEMORIES_TABLE)
.where(QueryBuilder.eq("id", id));
ResultSet result = CassandraUtils.checkedExecute(connection, statement);
return read(result.one());
}
/**
* This method reads a row returned by a query
* to the memories table in the Cassandra DB
* and creates a new Memory object from its fields
*
* @param row a row retrieved from the memories table
* @return the new Memory object obtained by the row
* @throws PersistenceException
*/
private static Memory read(Row row) throws PersistenceException {
if (row == null)
return null;
try {
long id = row.getLong("id");
String name = row.getString("name");
return new Memory(id, name);
} catch (IllegalArgumentException e) {
throw new PersistenceException("code name not valid for this object", e);
} catch (CodecNotFoundException e) {
throw new PersistenceException("there is no registered codec to convert the underlying CQL type to a long", e);
}
}
/**
* This method retrieves from the Cassandra DB
* all the Memory objects the ids of which
* are contained in a given collection
*
* @param ids the collection of ids of the Memories to retrieve
* @return the Memory objects the ids of which are contained in the passed id collection
* @throws PersistenceException
*/
@Override
public Map<Long, Memory> retrieve(Collection<Long> ids) throws PersistenceException {
Map<Long, Memory> map = new HashMap<>(ids.size());
/*if the list is empty, return an empty map*/
if (ids.isEmpty())
return map;
ArrayList<Long> list = new ArrayList<>(ids.size());
list.addAll(ids);
BuiltStatement statement = QueryBuilder.
select().
from(CassandraDatabase.MEMORIES_TABLE).
where(QueryBuilder.in("id", list));
/*execute the query*/
ResultSet result = CassandraUtils.checkedExecute(connection, statement);
/*create the Memory objects from the rows*/
while (!result.isExhausted()) {
Memory memory = read(result.one());
map.put(memory.getId(), memory);
}
return map;
}
/**
* This method retrieves from the Cassandra DB
* all the Memory objects stored in the corresponding table
*
* @return a list with all the Memory objects in the DB
* @throws PersistenceException
*/
@Override
public Collection<Memory> retrieveAll() throws PersistenceException {
ArrayList<Memory> list = new ArrayList<>();
BuiltStatement statement = QueryBuilder.select().
from(CassandraDatabase.MEMORIES_TABLE).
where();
ResultSet result = CassandraUtils.checkedExecute(connection, statement);
for (Row row : result.all())
list.add(read(row));
return list;
}
/**
* This method stores a Memory object in the DB
* with a new, sequentially generated ID
*
* @param memory the Memory object to store in the DB
* @return the same Memory object received as a parameter, updated with its new ID
* @throws PersistenceException if couldn't insert the importjob in the DB
*/
@Override
public Memory store(Memory memory) throws PersistenceException {
return this.store(memory, false);
}
/**
* This method stores a Memory object in the DB
*
* @param memory the Memory object to store in the DB
* @param forceId if it is true, then this method tries to store memory with its ID.
* Else it uses a new, sequentially generated ID.
* @return if the memory was successfully stored, the method returns memory itself
* (with its ID update to the new one if forceId was false).
* Else, throws an exception.
* @throws PersistenceException if couldn't insert the importjob in the DB
*/
@Override
public Memory store(Memory memory, boolean forceId) throws PersistenceException {
long id;
if (!forceId) {
id = CassandraIdGenerator.generate(connection, CassandraDatabase.MEMORIES_TABLE_ID);
} else {
id = memory.getId();
CassandraIdGenerator.advanceCounter(connection, CassandraDatabase.MEMORIES_TABLE_ID, id);
}
String[] columns = {"id", "name"};
Object[] values = {id, memory.getName()};
BuiltStatement statement = QueryBuilder
.insertInto(CassandraDatabase.MEMORIES_TABLE)
.values(columns, values)
.ifNotExists();
boolean success = CassandraUtils.checkedExecute(connection, statement).wasApplied();
if (!success)
throw new PersistenceException("Unable to insert memory into Cassandra Database: " + memory);
memory.setId(id);
return memory;
}
/**
* This method receives a Memory object
* and stores it in the DB overwriting an existing row with same ID.
* If in the DB there is no row with that ID nothing happens.
*
* @param memory the Memory object to store in the DB
* in place of an already existing one
* @return the same memory object passed as a parameter,
* if the overwrite is successful
* (if an object with that ID was already in the DB)
* or null if the overwrite was not successful.
* @throws PersistenceException
*/
@Override
public Memory update(Memory memory) throws PersistenceException {
BuiltStatement built = QueryBuilder.update(CassandraDatabase.MEMORIES_TABLE).
with(QueryBuilder.set("name", memory.getName())).
where(QueryBuilder.eq("id", memory.getId())).
ifExists();
ResultSet result = CassandraUtils.checkedExecute(connection, built);
if (result.wasApplied())
return memory;
else
return null;
}
/**
* This method deletes a Memory object from the DB
*
* @param id the id of the Memory object to delete
* @return True if the object was successfully deleted;
* False if no object with the passed ID could be found.
* @throws PersistenceException
*/
@Override
public boolean delete(long id) throws PersistenceException {
BuiltStatement built = QueryBuilder.delete().
from(CassandraDatabase.MEMORIES_TABLE).
where(QueryBuilder.eq("id", id)).
ifExists();
ResultSet result = CassandraUtils.checkedExecute(connection, built);
return result.wasApplied();
}
}
| |
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.intents;
import apple.NSObject;
import apple.eventkit.EKRecurrenceRule;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSDateComponents;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.protocol.NSCopying;
import apple.foundation.protocol.NSSecureCoding;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("Intents")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class INDateComponentsRange extends NSObject implements NSCopying, NSSecureCoding {
static {
NatJ.register();
}
@Generated
protected INDateComponentsRange(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native INDateComponentsRange alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native INDateComponentsRange allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native INDateComponentsRange new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@Owned
@Selector("copyWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public native Object copyWithZone(VoidPtr zone);
@Generated
@Selector("encodeWithCoder:")
public native void encodeWithCoder(NSCoder coder);
@Generated
@Selector("endDateComponents")
public native NSDateComponents endDateComponents();
@Generated
@Selector("init")
public native INDateComponentsRange init();
@Generated
@Selector("initWithCoder:")
public native INDateComponentsRange initWithCoder(NSCoder coder);
@Generated
@Selector("initWithStartDateComponents:endDateComponents:")
public native INDateComponentsRange initWithStartDateComponentsEndDateComponents(
NSDateComponents startDateComponents, NSDateComponents endDateComponents);
@Generated
@Selector("startDateComponents")
public native NSDateComponents startDateComponents();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
@Generated
@Selector("EKRecurrenceRule")
public native EKRecurrenceRule EKRecurrenceRule();
@Generated
@Selector("initWithEKRecurrenceRule:")
public native INDateComponentsRange initWithEKRecurrenceRule(EKRecurrenceRule recurrenceRule);
@Generated
@Selector("initWithStartDateComponents:endDateComponents:recurrenceRule:")
public native INDateComponentsRange initWithStartDateComponentsEndDateComponentsRecurrenceRule(
NSDateComponents startDateComponents, NSDateComponents endDateComponents, INRecurrenceRule recurrenceRule);
@Generated
@Selector("recurrenceRule")
public native INRecurrenceRule recurrenceRule();
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.gen;
import com.facebook.presto.byteCode.Block;
import com.facebook.presto.byteCode.ByteCodeNode;
import com.facebook.presto.byteCode.ClassDefinition;
import com.facebook.presto.byteCode.Scope;
import com.facebook.presto.byteCode.MethodDefinition;
import com.facebook.presto.byteCode.Parameter;
import com.facebook.presto.byteCode.Variable;
import com.facebook.presto.byteCode.control.ForLoop;
import com.facebook.presto.byteCode.control.IfStatement;
import com.facebook.presto.byteCode.instruction.LabelNode;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.operator.CursorProcessor;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.PageBuilder;
import com.facebook.presto.spi.RecordCursor;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.relational.CallExpression;
import com.facebook.presto.sql.relational.ConstantExpression;
import com.facebook.presto.sql.relational.InputReferenceExpression;
import com.facebook.presto.sql.relational.RowExpression;
import com.facebook.presto.sql.relational.RowExpressionVisitor;
import com.google.common.primitives.Primitives;
import java.util.List;
import static com.facebook.presto.byteCode.Access.PUBLIC;
import static com.facebook.presto.byteCode.Access.a;
import static com.facebook.presto.byteCode.Parameter.arg;
import static com.facebook.presto.byteCode.OpCode.NOP;
import static com.facebook.presto.byteCode.ParameterizedType.type;
import static com.facebook.presto.sql.gen.ByteCodeUtils.generateWrite;
import static java.lang.String.format;
public class CursorProcessorCompiler
implements BodyCompiler<CursorProcessor>
{
private final Metadata metadata;
public CursorProcessorCompiler(Metadata metadata)
{
this.metadata = metadata;
}
@Override
public void generateMethods(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter, List<RowExpression> projections)
{
generateProcessMethod(classDefinition, projections.size());
generateFilterMethod(classDefinition, callSiteBinder, filter);
for (int i = 0; i < projections.size(); i++) {
generateProjectMethod(classDefinition, callSiteBinder, "project_" + i, projections.get(i));
}
}
private void generateProcessMethod(ClassDefinition classDefinition, int projections)
{
Parameter session = arg("session", ConnectorSession.class);
Parameter cursor = arg("cursor", RecordCursor.class);
Parameter count = arg("count", int.class);
Parameter pageBuilder = arg("pageBuilder", PageBuilder.class);
MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), "process", type(int.class), session, cursor, count, pageBuilder);
Scope scope = method.getScope();
Variable completedPositionsVariable = scope.declareVariable(int.class, "completedPositions");
method.getBody()
.comment("int completedPositions = 0;")
.putVariable(completedPositionsVariable, 0);
//
// for loop loop body
//
LabelNode done = new LabelNode("done");
ForLoop forLoop = new ForLoop()
.initialize(NOP)
.condition(new Block()
.comment("completedPositions < count")
.getVariable(completedPositionsVariable)
.getVariable(count)
.invokeStatic(CompilerOperations.class, "lessThan", boolean.class, int.class, int.class)
)
.update(new Block()
.comment("completedPositions++")
.incrementVariable(completedPositionsVariable, (byte) 1)
);
Block forLoopBody = new Block()
.comment("if (pageBuilder.isFull()) break;")
.append(new Block()
.getVariable(pageBuilder)
.invokeVirtual(PageBuilder.class, "isFull", boolean.class)
.ifTrueGoto(done))
.comment("if (!cursor.advanceNextPosition()) break;")
.append(new Block()
.getVariable(cursor)
.invokeInterface(RecordCursor.class, "advanceNextPosition", boolean.class)
.ifFalseGoto(done));
forLoop.body(forLoopBody);
// if (filter(cursor))
IfStatement ifStatement = new IfStatement();
ifStatement.condition()
.append(method.getThis())
.getVariable(session)
.getVariable(cursor)
.invokeVirtual(classDefinition.getType(), "filter", type(boolean.class), type(ConnectorSession.class), type(RecordCursor.class));
// pageBuilder.declarePosition();
ifStatement.ifTrue()
.getVariable(pageBuilder)
.invokeVirtual(PageBuilder.class, "declarePosition", void.class);
// this.project_43(session, cursor, pageBuilder.getBlockBuilder(42)));
for (int projectionIndex = 0; projectionIndex < projections; projectionIndex++) {
ifStatement.ifTrue()
.append(method.getThis())
.getVariable(session)
.getVariable(cursor);
// pageBuilder.getBlockBuilder(0)
ifStatement.ifTrue()
.getVariable(pageBuilder)
.push(projectionIndex)
.invokeVirtual(PageBuilder.class, "getBlockBuilder", BlockBuilder.class, int.class);
// project(block..., blockBuilder)
ifStatement.ifTrue()
.invokeVirtual(classDefinition.getType(),
"project_" + projectionIndex,
type(void.class),
type(ConnectorSession.class),
type(RecordCursor.class),
type(BlockBuilder.class));
}
forLoopBody.append(ifStatement);
method.getBody()
.append(forLoop)
.visitLabel(done)
.comment("return completedPositions;")
.getVariable(completedPositionsVariable)
.retInt();
}
private void generateFilterMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, RowExpression filter)
{
Parameter session = arg("session", ConnectorSession.class);
Parameter cursor = arg("cursor", RecordCursor.class);
MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), "filter", type(boolean.class), session, cursor);
method.comment("Filter: %s", filter);
Scope scope = method.getScope();
Variable wasNullVariable = scope.declareVariable(type(boolean.class), "wasNull");
ByteCodeExpressionVisitor visitor = new ByteCodeExpressionVisitor(callSiteBinder, fieldReferenceCompiler(cursor, wasNullVariable), metadata.getFunctionRegistry());
LabelNode end = new LabelNode("end");
method.getBody()
.comment("boolean wasNull = false;")
.putVariable(wasNullVariable, false)
.comment("evaluate filter: " + filter)
.append(filter.accept(visitor, scope))
.comment("if (wasNull) return false;")
.getVariable(wasNullVariable)
.ifFalseGoto(end)
.pop(boolean.class)
.push(false)
.visitLabel(end)
.retBoolean();
}
private void generateProjectMethod(ClassDefinition classDefinition, CallSiteBinder callSiteBinder, String methodName, RowExpression projection)
{
Parameter session = arg("session", ConnectorSession.class);
Parameter cursor = arg("cursor", RecordCursor.class);
Parameter output = arg("output", BlockBuilder.class);
MethodDefinition method = classDefinition.declareMethod(a(PUBLIC), methodName, type(void.class), session, cursor, output);
method.comment("Projection: %s", projection.toString());
Scope scope = method.getScope();
Variable wasNullVariable = scope.declareVariable(type(boolean.class), "wasNull");
Block body = method.getBody()
.comment("boolean wasNull = false;")
.putVariable(wasNullVariable, false);
ByteCodeExpressionVisitor visitor = new ByteCodeExpressionVisitor(callSiteBinder, fieldReferenceCompiler(cursor, wasNullVariable), metadata.getFunctionRegistry());
body.getVariable(output)
.comment("evaluate projection: " + projection.toString())
.append(projection.accept(visitor, scope))
.append(generateWrite(callSiteBinder, scope, wasNullVariable, projection.getType()))
.ret();
}
private RowExpressionVisitor<Scope, ByteCodeNode> fieldReferenceCompiler(final Variable cursorVariable, final Variable wasNullVariable)
{
return new RowExpressionVisitor<Scope, ByteCodeNode>()
{
@Override
public ByteCodeNode visitInputReference(InputReferenceExpression node, Scope scope)
{
int field = node.getField();
Type type = node.getType();
Class<?> javaType = type.getJavaType();
IfStatement ifStatement = new IfStatement();
ifStatement.condition()
.setDescription(format("cursor.get%s(%d)", type, field))
.getVariable(cursorVariable)
.push(field)
.invokeInterface(RecordCursor.class, "isNull", boolean.class, int.class);
ifStatement.ifTrue()
.putVariable(wasNullVariable, true)
.pushJavaDefault(javaType);
ifStatement.ifFalse()
.getVariable(cursorVariable)
.push(field)
.invokeInterface(RecordCursor.class, "get" + Primitives.wrap(javaType).getSimpleName(), javaType, int.class);
return ifStatement;
}
@Override
public ByteCodeNode visitCall(CallExpression call, Scope scope)
{
throw new UnsupportedOperationException("not yet implemented");
}
@Override
public ByteCodeNode visitConstant(ConstantExpression literal, Scope scope)
{
throw new UnsupportedOperationException("not yet implemented");
}
};
}
}
| |
/*
* Copyright (C) 2015 University of Oregon
*
* You may distribute under the terms of either the GNU General Public
* License or the Apache License, as specified in the LICENSE file.
*
* For more information, see the LICENSE file.
*/
package vnmr.ui;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.swing.SwingUtilities;
import org.w3c.dom.NamedNodeMap;
import vnmr.templates.ProtocolBuilder;
import vnmr.templates.SQActionRenderer;
import vnmr.templates.SQBuild;
import vnmr.templates.SQNode;
import vnmr.templates.SQNodeList;
import vnmr.templates.VElement;
import vnmr.util.DebugOutput;
import vnmr.util.FileChangeEvent;
import vnmr.util.FileListener;
import vnmr.util.FileWatcher;
import vnmr.util.Messages;
import vnmr.util.QuotedStringTokenizer;
import vnmr.util.Util;
import vnmr.util.VnmrParameter;
import static vnmr.templates.ProtocolBuilder.*;
import static vnmr.ui.StudyQueue.*;
import static vnmr.util.FileChangeEvent.*;
public class SQUpdater implements FileListener {
private List<QInfo> m_qInfos;
private String m_archiveDir;
private String m_autodir;
private String m_curSQExp;
private String m_sqInfoDir;
private String m_dataDir;
private String m_sample;
//private String m_sampleNumber;
private String m_queueWatchPath;
private String m_queueWatchPath2;
private String m_autoWatchPath;
private String m_dataWatchPath;
private StudyQueue m_studyQueue;
private String m_updateCmd = "";
private String m_mode;
private String m_activeLocdir;
static private FileWatcher m_currentFileWatcher;
public SQUpdater(StudyQueue sq, String mode, String svfdir, String sample) {
m_studyQueue = sq;
m_mode = mode;
m_dataDir = svfdir + "/" + sample;
m_dataWatchPath = m_dataDir + "/dirinfo/macdir/ACQlist";
String[] paths = {m_dataWatchPath};
quitWatching();
m_currentFileWatcher = new FileWatcher(this, paths);
m_currentFileWatcher.start();
}
public SQUpdater(StudyQueue sq, String mode, String studydir,
String autodir, String svfdir, String cmd) {
this(sq, mode, studydir, autodir, svfdir);
m_updateCmd = cmd;
}
public SQUpdater(StudyQueue sq, String[] args) {
m_studyQueue = sq;
m_mode = args[0];
if (m_mode.equalsIgnoreCase("build")) {
initBuild(args);
} else if (m_mode.equalsIgnoreCase("active")) {
initActive(args);
}
}
private void initBuild(String[] args) {
m_curSQExp = args[1];
m_autodir = args[2];
m_archiveDir = args[3];
m_sample = args[4];
m_updateCmd = args[5];
ArrayList<String> alPaths = new ArrayList<String>();
m_autoWatchPath = m_autodir + "/doneQ";
//m_queueWatchPath = m_curSQExp + "/EXPLIST";
//m_queueWatchPath2 = m_curSQExp + "/tmpstudy/enterSQ";
//m_dataDir = getDataDir(m_autodir);
m_dataWatchPath = m_archiveDir + "/" + m_sample + "/dirinfo/macdir/ACQlist";
alPaths.add(m_dataWatchPath);
alPaths.add(m_autoWatchPath);
String[] paths = alPaths.toArray(new String[0]);
quitWatching();
m_currentFileWatcher = new FileWatcher(this, paths);
m_currentFileWatcher.start();
updateStatsBuild(true);
}
private void initActive(String[] args) {
m_curSQExp = args[1];
m_autodir = args[2];
m_archiveDir = args[3];
m_sample = args[4];
m_updateCmd = args[5];
ArrayList<String> alPaths = new ArrayList<String>();
// m_autoWatchPath = m_autodir + "/enterQ.macdir/fidlog";
m_autoWatchPath = m_autodir + "/doneQ";
String autoWatchPath2 = m_autodir + "enterQ.macdir/cpdoneQ";
m_dataWatchPath = m_archiveDir + "/" + m_sample + "/dirinfo/macdir/ACQlist";
alPaths.add(m_autoWatchPath);
alPaths.add(autoWatchPath2);
alPaths.add(m_dataWatchPath);
String[] paths = alPaths.toArray(new String[0]);
quitWatching();
m_currentFileWatcher = new FileWatcher(this, paths);
m_currentFileWatcher.start();
//updateStatsActive();
}
public SQUpdater(StudyQueue sq, String mode, String studydir,
String autodir, String svfdir) {
m_studyQueue = sq;
m_mode = mode;
m_curSQExp = studydir;
m_autodir = autodir;
m_archiveDir = svfdir;
ArrayList<String> alPaths = new ArrayList<String>();
m_autoWatchPath = m_autodir + "/doneQ";
m_queueWatchPath = m_curSQExp + "/EXPLIST";
m_queueWatchPath2 = m_curSQExp + "/tmpstudy/enterSQ";
m_dataDir = getDataDir(m_autodir);
m_dataWatchPath = m_archiveDir + "/" + m_dataDir + "/dirinfo/macdir/ACQlist";
if ("auto".equals(mode)) {
//alPaths.add(m_dataWatchPath);
alPaths.add(m_autoWatchPath);
} else if ("build".equals(mode)) {
alPaths.add(m_dataWatchPath);
alPaths.add(m_autoWatchPath);
} else {
alPaths.add(m_autoWatchPath);
alPaths.add(m_queueWatchPath);
alPaths.add(m_queueWatchPath2);
alPaths.add(m_dataWatchPath);
}
String[] paths = alPaths.toArray(new String[0]);
quitWatching();
m_currentFileWatcher = new FileWatcher(this, paths);
m_currentFileWatcher.start();
}
static public boolean processCommand(StudyQueue sq, String[] args) {
if (args.length < 1) {
return false;
}
String cmd = args[0];
if (cmd.equalsIgnoreCase("off")) {
quitWatching();
} else if (cmd.equalsIgnoreCase("bgstudy")) {
new SQUpdater(sq, cmd, args[1], args[2]);
} else if (cmd.equalsIgnoreCase("auto")
|| cmd.equalsIgnoreCase("more")
|| cmd.equalsIgnoreCase("submit")
|| cmd.equalsIgnoreCase("build")
|| cmd.equalsIgnoreCase("active")
)
{
if (cmd.equalsIgnoreCase("build")
|| cmd.equalsIgnoreCase("active"))
{
new SQUpdater(sq, args);
} else if (args.length == 4) {
new SQUpdater(sq, cmd, args[1], args[2], args[3]);
} else if (args.length == 5) {
new SQUpdater(sq, cmd, args[1], args[2], args[3], args[4]);
} else {
return false;
}
}
// SQUpdater updater = new SQUpdater(sq, studydir, autodir, svfdir);
return true;
}
/**
*
*/
private static void quitWatching() {
if (m_currentFileWatcher != null) {
//Util.sendToVnmr("write('line3','QUIT WATCHING')");
m_currentFileWatcher.quit();
}
}
@Override
public void fileChanged(FileChangeEvent event) {
int type = event.getEventType();
String changedPath = event.getPath();
// NB: TOUCHED ==> no change in file contents
if (type != FILE_TOUCHED || m_mode.equals("auto")) {
Messages.postDebug("SQUpdater", "FILE CHANGED: " + changedPath);
if ((m_mode.equals("submit")
|| m_mode.equals("more")
//|| m_mode.equals("auto")
|| m_mode.equals("build"))
&& changedPath.equals(m_autoWatchPath))
{
String dataDir = getDataDir(m_autodir);
if (dataDir != null && dataDir.length() > 0 && !dataDir.equals(m_dataDir)) {
m_currentFileWatcher.removePath(m_dataWatchPath);
Messages.postDebug("SQUpdater","Removed " + m_dataWatchPath);
m_dataDir = dataDir;
m_dataWatchPath = m_archiveDir + "/" + m_dataDir
+ "/dirinfo/macdir/ACQlist";
m_currentFileWatcher.addPath(m_dataWatchPath);
Messages.postDebug("SQUpdater","Added " + m_dataWatchPath);
}
}
if (m_mode.equals("auto") && type != FILE_INITIAL) {
updateStatsAuto();
} else if (m_mode.equals("bgstudy")) {
updateStatsBgstudy();
} else if (m_mode.equals("submit")) {
updateStatsSubmit();
} else if (m_mode.equals("more")) {
updateStatsMore();
} else if (m_mode.equals("build") && type != FILE_INITIAL) {
updateStatsBuild(false);
} else if (m_mode.equals("active") && type != FILE_INITIAL) {
updateStatsActive();
}
}
}
public void updateStatsAuto() {
// Have chempack update the automation run display
Util.sendToVnmr(m_updateCmd);
// Run this in the Event Thread because it updates the GUI
// SwingUtilities.invokeLater(new Runnable() {
// public void run() { updateStatsAutoUI(); } }
// );
}
/**
*
*/
public void updateStatsAutoUI() {
// (It's all been done by chempack)
}
public void updateStatsBgstudy() {
// Run this in the Event Thread because it updates the GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() { updateStatsBgstudyUI(); } }
);
// Workaround to avoid race condition
try {
Thread.sleep(2000);
} catch (InterruptedException ie) {}
// Run this in the Event Thread because it updates the GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() { updateStatsBgstudyUI(); } }
);
}
/**
*
*/
public void updateStatsBgstudyUI() {
Messages.postDebug("SQUpdater","updateStatsBgstudyUI");
List<String> nodes = getNodeIdsFromSQ(ProtocolBuilder.ACTIONS);
Map<String, String> expStats = getExpStats(new File(m_dataWatchPath));
File f = new File(m_dataDir + "/dirinfo/parlib/parliblist");
Map<String, String> fidNames = getFidNames(f);
// boolean done = nodes.size() > 0;
ProtocolBuilder mgr = m_studyQueue.getMgr();
for (String node : nodes) {
VElement vnode = mgr.getElement(node);
String title = mgr.getAttribute(vnode, ATTR_TITLE);
String stat = getValueFromTitle(title, expStats);
if (stat != null) {
mgr.setAttribute(vnode, ATTR_STATUS, stat);
if (stat.equals(SQ_COMPLETED)) {
String fidname = getValueFromTitle(title, fidNames);
if (fidname != null) {
mgr.setAttribute(vnode, ATTR_TITLE, fidname);
}
}
}
// // See if this node still may change
// stat = m_studyQueue.getAttribute(node, ATTR_STATUS);
// if (stat != null) {
// if (stat.equalsIgnoreCase(READY)
// || stat.equalsIgnoreCase(QUEUED)
// || stat.equalsIgnoreCase(EXECUTING)
// )
// {
// done = false;
// }
// }
}
// // Clean up if we're done
// // Keep getting false positives on this
// // ... check how many updates we've done?
// if (done) {
// Util.sendToVnmr("SQWatch('off')");
// Util.sendToVnmr("xmhaha('refreshSQ')");
// }
}
private List<String> getNodeIdsFromSQ(int nodeType) {
ProtocolBuilder mgr = m_studyQueue.getMgr();
ArrayList<VElement> elementList;
elementList = mgr.getElements(null, nodeType);
ArrayList<String> ids = new ArrayList<String>();
for (VElement elem : elementList) {
ids.add(elem.getAttribute(ProtocolBuilder.ATTR_ID));
}
return ids;
}
private ArrayList<NamedNodeMap>
getNodeAttributesFromSQ(Iterable<String> nodeIds) {
ArrayList<NamedNodeMap> list = new ArrayList<NamedNodeMap>();
for (String nodeId : nodeIds) {
list.add(getNodeAttributesFromSQ(nodeId));
}
return list;
}
private SQNodeList getNodeInfoFromSQ(Iterable<String> nodeIds) {
SQNodeList list = new SQNodeList();
for (String nodeId : nodeIds) {
list.add(getNodeInfoFromSQ(nodeId));
}
return list;
}
private NamedNodeMap getNodeAttributesFromSQ(String nodeId) {
ProtocolBuilder mgr = m_studyQueue.getMgr();
VElement velem = mgr.getElement(nodeId);
NamedNodeMap attrs = velem.getAttributes();
return attrs;
}
private SQNode getNodeInfoFromSQ(String nodeId) {
SQNode nodeInfo = null;
ProtocolBuilder mgr = m_studyQueue.getMgr();
VElement velem = mgr.getElement(nodeId);
if (velem != null) {
nodeInfo = new SQNode();
for (String attr : SQNode.getStandardAttributes()) {
String value = mgr.getAttribute(velem, attr);
nodeInfo.setAttr(attr, value);
}
}
// String when = "";
// String[] whenEnums = {"_day", "_night"};
// for (String w : whenEnums) {
// if (psLabel.contains(w)) {
// when = w;
// psLabel = psLabel.replace(w, "");
// }
// if (title.contains(w)) {
// when = w;
// title = title.replace(w, "");
// }
// }
return nodeInfo;
}
private void printAllNodeAttributes(List<String> ids) {
List<NamedNodeMap> maps = getNodeAttributesFromSQ(ids);
for (int j = 0; j < ids.size(); j++) {
NamedNodeMap map = maps.get(j);
String id = ids.get(j);
Messages.postDebug("\nNode " + id + ":");
for (int i = 0; i < map.getLength(); i++) {
org.w3c.dom.Node attr = map.item(i);
String name = attr.getNodeName();
String value = attr.getNodeValue();
Messages.postDebug(" " + name + "=" + value);
}
}
}
private boolean updateNodeInSQ(String id,
SQNode srcNode,
List<String> changed) {
boolean ok = false;
ProtocolBuilder mgr = m_studyQueue.getMgr();
VElement elem = mgr.getElement(id);
if (elem != null) {
for (String attr : changed) {
String value = srcNode.getAttr(attr);
mgr.setAttribute(elem, attr, value);
}
ok = true;
}
return ok;
}
/**
* Set properties in studies "prop" file for the given SQ node.
* A typical location:
* <BR><CODE>
* vnmrsys/studies/exp2/tmpstudy/info/n002/prop
* </CODE>
* @param cursqexp Experiment directory for the SQ.
* @param id The ID of the SQ node.
* @param node The node to write about.
*/
private void writePropFile(String cursqexp,
String id, SQNode node) {
// The attributes that are written into the "props" file
// (e.g., studies/exp2/tmpstudy/info/n002/prop), in the order
// they should appear. Order is important, because macros
// may read the value of a particular line number without checking
// the key.
// In xmaction it is noted:
// "$id=sqval[1] $type=sqval[2] $status=sqval[3] - fixed position"
// (ID is added to the front of the list after reading the prop file.)
final String[] propAttributes = {
ATTR_TYPE,
ATTR_STATUS,
ATTR_LOCK,
ATTR_TITLE,
ATTR_EXP,
ATTR_TIME,
ATTR_MACRO,
ATTR_DATA,
};
String path = cursqexp + "/tmpstudy/info/" + id;
File filePath = new File(path, "/prop");
PrintWriter out = null;
try {
new File(path).mkdirs();
out = new PrintWriter(filePath);
for (String name : propAttributes) {
out.println(name + " " + node.getAttr(name));
}
} catch (FileNotFoundException e) {
Messages.postDebug("Could not write file "
+ filePath.getPath());
} finally {
try {
out.close();
} catch (Exception e) {}
}
}
public void updateStatsSubmit() {
// Run this in the Event Thread because it updates the GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() { updateStatsSubmitUI(); } }
);
}
/**
*
*/
public void updateStatsSubmitUI() {
File explist = new File(m_curSQExp + "/EXPLIST");
File enterSQ = new File(m_curSQExp + "/tmpstudy/enterSQ");
Map<String, String> titleToIdMap = getTitleToIdMap(explist, enterSQ);
String curSampleDir = getDataDir(m_autodir);
File acqlist = new File(m_archiveDir + "/" + curSampleDir
+ "/dirinfo/macdir/ACQlist");
Map<String, String> expStats = getExpStats(acqlist);
Set<String> titles = titleToIdMap.keySet();
for (String title : titles) {
String node = titleToIdMap.get(title);
String status = expStats.get(title);
if (status == null) {
status = SQ_READY;
} else if (status.equals(SQ_ACTIVE)) {
status = SQ_EXECUTING;
}
m_studyQueue.processCommand(SET + " " + node + " "
+ ATTR_STATUS + " " + status);
}
// Set the parent title to show the current sample number
// m_studyQueue.processCommand(SET + " tmpstudy " + ATTR_TITLE
// + " \"Sample " + m_sampleNumber + "\"");
// m_studyQueue.processCommand(SET + " tmpstudy " + ATTR_TOOLTEXT
// + " \"\"");
}
public void updateStatsMore() {
// Run this in the Event Thread because it updates the GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() { updateStatsMoreUI(); } }
);
}
public void updateStatsMoreUI() {
List<String> stats = null;
String curSampleDir = getDataDir(m_autodir);
File acqlist = new File(m_archiveDir + "/" + curSampleDir
+ "/dirinfo/macdir/ACQlist");
stats = getStatList(acqlist);
File enterSQ = new File(m_curSQExp + "/tmpstudy/enterSQ");
List<String> nodes = getNodeList(enterSQ);
int len = nodes.size();
int statsLen = stats.size();
for (int i = 0; i < len; i++) {
String node = nodes.get(i);
String status = i < statsLen ? stats.get(i) : SQ_READY;
if (status.equals(SQ_ACTIVE)) {
status = SQ_EXECUTING;
}
m_studyQueue.processCommand(SET + " " + node + " "
+ ATTR_STATUS + " " + status);
}
}
public void updateStatsBuild(boolean isInitial) {
String archsamp = m_archiveDir + "/" + m_sample;
String samplename = new File(archsamp).getName();
List<String> filelist = new ArrayList<String>();
filelist.add("fidlog");
filelist.add("ACQlist");
filelist.add("explist");
filelist.add("EXPLIST");
filelist.add("LONGLIST");
SQBuild builder = new SQBuild(archsamp, samplename, m_autodir,
filelist, false);
String locdir = builder.getActiveLocdir();
if (locdir != null) {
locdir += "/current.fid/fid";
if (!locdir.equals(m_activeLocdir)) {
Messages.postDebug("SQActive","New active locdir: " + locdir);
m_currentFileWatcher.removePath(m_activeLocdir);
m_activeLocdir = locdir;
m_currentFileWatcher.addPath(m_activeLocdir);
}
}
final SQNodeList nodeList = builder.getNodeList();
final boolean initial = isInitial;
// Run this in the Event Thread because it updates the GUI
SwingUtilities.invokeLater(new Runnable() {
public void run() {
updateStatsBuildUI(initial, nodeList, m_curSQExp);
} }
);
}
public void updateStatsActive() {
// Check if sample has changed:
//String archsamp = m_archiveDir + "/" + m_sample;
String newSample = getLastSampleFromFidlog(m_autodir);
newSample = getDataDir(m_autodir);
//String newArchdir = getArchDirFromGlobal(m_autodir, m_archiveDir);
Messages.postDebug("SQUpdateActive",
"updateStatsActive: m_sample=" + m_sample);
Messages.postDebug("SQUpdateActive",
"................. newSample=" + newSample);
if (newSample.equals(m_sample)) {
updateStatsBuild(false);
Messages.postDebug("SQUpdateActive", "SAME SAMPLE");
} else {
m_sample = newSample;
Util.sendToVnmr(m_updateCmd);
Messages.postDebug("SQUpdateActive", "NEW SAMPLE");
}
}
public void updateStatsBuildUI(boolean isInitial,
SQNodeList nodeList, String cursqexp) {
if (DebugOutput.isSetFor("AllSQUpdater")) {
Messages.postDebug("--------------------- Calc node list:");
SQBuild.printNodeList(nodeList);
Messages.postDebug("--------------------- End calc nodes");
}
// TODO: All of updateStatsBuildUI needs cleaning up.
// Get list of action nodes visible in the SQ
List<String> visibleIds = getNodeIdsFromSQ(ProtocolBuilder.ACTIONS);
String sampleInfoId = removeSampleInfoNode(visibleIds);
if (DebugOutput.isSetFor("AllSQUpdater") && sampleInfoId != null) {
Messages.postDebug("SampleInfo node ignored");
}
if (!isInitial && nodeList.size() != visibleIds.size()) {
Messages.postDebug("SQUpdater", "calc nodes: " + nodeList.size()
+ ", vis nodes: " + visibleIds.size());
Util.sendToVnmr(m_updateCmd);
if (DebugOutput.isSetFor("AllSQUpdater")) {
Messages.postDebug(visibleIds.size() + " nodes visible in SQ; "
+ nodeList.size() + " calculated");
// Print out the visibleNodes
Messages.postDebug("--------------------- Visible node list:");
SQNodeList visibleNodes = getNodeInfoFromSQ(visibleIds);
for (SQNode node : visibleNodes) {
Messages.postDebug(node.toString());
}
Messages.postDebug("--------------------- End visible nodes");
}
} else {
// Get all attributes from the visible SQ nodes
// Build list of the visible SQ Nodes
SQNodeList visibleNodes = getNodeInfoFromSQ(visibleIds);
if (DebugOutput.isSetFor("AllSQUpdater")) {
Messages.postDebug("--------------------- Visible node attrs:");
printAllNodeAttributes(visibleIds);
Messages.postDebug("--------------------- End visible attrs");
}
// Find differences between nodeList and SQ
boolean isDiff = false;
if (DebugOutput.isSetFor("AllSQUpdater")) {
Messages.postDebug("--------------------- Node diffs:");
}
for (int i = 0; i < nodeList.size(); i++) {
SQNode node = nodeList.get(i);
SQNode vnode = visibleNodes.get(i);
String diffs = vnode.diff(node);
if (diffs.length() > 0) {
isDiff = true;
if (DebugOutput.isSetFor("AllSQUpdater")) {
Messages.postDebug("Node " + i + " ("
+ visibleIds.get(i) + ") differs:");
Messages.postDebug(diffs);
}
// // TODO: May not need to check this stuff
// String vstat = vnode.getStatus();
// String cstat = node.getStatus();
// Boolean completed = (cstat.equals(COMPLETED)
// && (vstat.equals(EXECUTING)
// || vstat.equals(ACTIVE)));
// String vtitle = vnode.getTitle().trim();
// // Displayed title may have other stuff after true title
// String[] titleTokens = vtitle.split(" +", 2);
// if (node.getTitle().contains(titleTokens[0]) || completed) {
// Set some SQ node attributes to match calculated node
List<String> changed = vnode.update(node);
if (changed.size() > 0) {
String nodeId = visibleIds.get(i);
updateNodeInSQ(nodeId, vnode, changed);
writePropFile(cursqexp, nodeId, vnode);
}
// }
}
}
if (DebugOutput.isSetFor("AllSQUpdater")) {
Messages.postDebug("--------------------- End node diffs");
}
if (!isDiff) {
if (DebugOutput.isSetFor("SQUpdater")) {
Messages.postDebug("NO DIFFERENCES");
}
// } else {
// m_sqInfoDir = m_curSQExp + "/tmpstudy/info";
}
}
}
private String removeSampleInfoNode(List<String> ids) {
String id = null;
if (ids.size() > 0) {
ProtocolBuilder mgr = m_studyQueue.getMgr();
VElement vnode = mgr.getElement(ids.get(0));
String title = mgr.getAttribute(vnode, ATTR_TITLE);
if (title.contains("SampleInfo")) {
id = ids.remove(0);
}
}
return id;
}
private String getValueFromTitle(String title, Map<String,String> map) {
String value = null;
Collection<String> keys = map.keySet();
for (String key : keys) {
// NB: title attribute may have other stuff after the true title
if (title.startsWith(key)) {
// title is good enough match to the key
value = map.get(key);
}
}
return value;
}
private List<String> getStatList(File acqlist) {
List<String> list = new ArrayList<String>();
BufferedReader in = null;
// Look for Title and node IDs in EXPLIST file
try {
in = new BufferedReader(new FileReader(acqlist));
String line;
while ((line = in.readLine()) != null) {
String[] toks = line.split(" +");
int ntoks = toks.length;
if (ntoks == 5) {
list.add(toks[4]);
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
// Any "Active" nodes before the last one are errors
boolean gotActive = false;
for (int i = list.size() - 1; i >= 0; --i) {
if ("Active".equalsIgnoreCase(list.get(i))) {
if (gotActive) {
list.set(i, "Error");
} else {
gotActive = true;
}
}
}
return list;
}
private List<String> getNodeList(File enterSQ) {
List<String> list = new ArrayList<String>();
BufferedReader in = null;
// Look for Title and node IDs in enterSQ file
try {
in = new BufferedReader(new FileReader(enterSQ));
String line;
while ((line = in.readLine()) != null) {
String[] toks = line.split(" +");
int ntoks = toks.length;
if (ntoks >= 2) {
String title = toks[1];
if (!title.equals("parent") && !title.equals("SampInfo")) {
list.add(toks[0]); // The node ID
}
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
return list;
}
private String getLastSampleFromFidlog(String autodir) {
String sample = "";
String path = autodir + "/enterQ.macdir/fidlog";
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(path));
String line;
while ((line = in.readLine()) != null) {
QuotedStringTokenizer toker = new QuotedStringTokenizer(line);
if (toker.countTokens() == 2) {
toker.nextToken();
sample = toker.nextToken();
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
sample = new File(sample).getParent();
return sample;
}
private String getArchDirFromGlobal(String autodir, String archdir) {
String globalPath = autodir + "/enterQ.macdir/currentsampleglobal";
Map<String, VnmrParameter> pmap;
String[] pars = {"archivedir"};
pmap = VnmrParameter.readParameters(globalPath, pars);
if (pmap != null) {
archdir = pmap.get("archivedir").getStringValue();
}
return archdir;
}
/**
* Read all the info in the "doneQ" in the given "autodir".
* @return The name of the current sample directory in "svfdir".
*/
private String getDataDir(String autodir) {
String curSampleDir = "";
File doneQ = new File(autodir + "/doneQ");
m_qInfos = getQFileInfo(doneQ);
int nInfos = m_qInfos.size();
if (nInfos > 0) {
QInfo curInfo = m_qInfos.get(m_qInfos.size() - 1);
//m_sampleNumber = curInfo.sampleNumber;
curSampleDir = curInfo.sampleDir;
Messages.postDebug("SQUpdater", "LAST AUTO STATUS = "
+ curInfo.status);
}
return curSampleDir;
}
/**
* Construct a map of experiment stats, keyed by title.
* @param acqlist Path to the "ACQlist" file under "dirinfo".
* @return The map.
*/
static public Map<String, String> getExpStats(File acqlist) {
Map<String, String> map = new TreeMap<String, String>();
if (acqlist == null) {
return map;
}
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(acqlist));
String line;
while ((line = in.readLine()) != null) {
String[] toks = line.split(" +");
int ntoks = toks.length;
if (ntoks == 5) {
// title = toks[0]
String stat = toks[4];
if (stat.equalsIgnoreCase("Active")) {
stat = "Executing";
}
map.put(toks[0], stat);
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
return map;
}
/**
* Construct a map of experiment fid names, keyed by title.
* @param parliblist Path to the "parliblist" file under "dirinfo".
* @return The map.
*/
static public Map<String, String> getFidNames(File parliblist) {
Map<String, String> map = new TreeMap<String, String>();
if (parliblist == null) {
return map;
}
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(parliblist));
String line;
while ((line = in.readLine()) != null) {
String[] toks = line.split("[: ]+", 2);
int ntoks = toks.length;
if (ntoks == 2) {
// title = toks[0]
int idx = toks[1].lastIndexOf("/");
String fidname = toks[1].substring(idx + 1);
map.put(toks[0], fidname);
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
return map;
}
static public Map<String, String> getTitleToIdMap(File explist,
File enterSQ) {
Map<String, String> map = new TreeMap<String, String>();
BufferedReader in = null;
// Look for Title and node IDs in EXPLIST file
try {
in = new BufferedReader(new FileReader(explist));
String line;
while ((line = in.readLine()) != null) {
String[] toks = line.split(" +");
int ntoks = toks.length;
if (ntoks == 5 && !toks[3].startsWith("xx")) {
map.put(toks[0], toks[3]);
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
if (map.size() == 0) {
// Nothing found in EXPLIST - try enterSQ
try {
in = new BufferedReader(new FileReader(enterSQ));
String line;
while ((line = in.readLine()) != null) {
String[] toks = line.split(" +");
int ntoks = toks.length;
if (ntoks == 4) {
String title = toks[3];
int idx = title.lastIndexOf("/");
if (idx >= 0) {
title = title.substring(idx + 1);
}
map.put(toks[0], title);
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
}
return map;
}
/**
* Get info for all the entries in a xxxxQ file.
* (Currently we only use the last entry and only the "sampleDir".)
* @param qPath The path to the xxxxQ file to read.
* @return A list containing the QInfo for each element in the file.
*/
static private List<QInfo> getQFileInfo(File qPath) {
List<QInfo> infos = new ArrayList<QInfo>();
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(qPath));
String line;
QInfo info = new QInfo();
while ((line = in.readLine()) != null) {
if (line.startsWith("----------")) {
if (!(info.sampleNumber == QInfo.NO_SAMPLE_NUMBER)) {
infos.add(info);
info = new QInfo();
}
}
String[] toks = line.split(":", 2);
int ntoks = toks.length;
if (ntoks == 2) {
String key = toks[0].trim();
String value = toks[1].trim();
if (key.equalsIgnoreCase("SAMPLE#")) {
info.sampleNumber = QInfo.NULL_SAMPLE_NUMBER;
try {
info.sampleNumber = Integer.parseInt(value);
} catch (NumberFormatException nfe) {}
} else if (key.equalsIgnoreCase("USER")) {
info.user = value;
} else if (key.equalsIgnoreCase("MACRO")) {
info.macro = value;
} else if (key.equalsIgnoreCase("SOLVENT")) {
info.solvent = value;
} else if (key.equalsIgnoreCase("TEXT")) {
info.text = value;
} else if (key.equalsIgnoreCase("SampleDir")) {
info.sampleDir = value;
} else if (key.equalsIgnoreCase("USERDIR")) {
info.userdir = value;
} else if (key.equalsIgnoreCase("DATA")) {
info.data = value;
} else if (key.equalsIgnoreCase("STATUS")) {
info.status = value;
}
}
}
} catch (FileNotFoundException e) {
} catch (IOException e) {
} finally {
try {
in.close();
} catch (Exception e) {}
}
return infos;
}
static class QInfo {
public static final int NO_SAMPLE_NUMBER = -1;
public static final int NULL_SAMPLE_NUMBER = 0;
public int sampleNumber = NO_SAMPLE_NUMBER;
public String user;
public String macro;
public String solvent;
public String text;
public String sampleDir;
public String userdir;
public String data;
public String status;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap;
import static java.util.Collections.singletonList;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.fail;
import static org.apache.jackrabbit.oak.api.Type.BINARIES;
import static org.apache.jackrabbit.oak.api.Type.BINARY;
import static org.apache.jackrabbit.oak.api.Type.STRING;
import static org.apache.jackrabbit.oak.api.Type.STRINGS;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.segment.ListRecord.LEVEL_SIZE;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import com.google.common.base.Charsets;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.memory.MemoryStore;
import org.apache.jackrabbit.oak.spi.commit.CommitInfo;
import org.apache.jackrabbit.oak.spi.commit.EmptyHook;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class RecordTest {
@Rule
public TemporaryFolder folder = new TemporaryFolder();
private static final String HELLO_WORLD = "Hello, World!";
private final byte[] bytes = HELLO_WORLD.getBytes(Charsets.UTF_8);
private FileStore store;
private SegmentWriter writer;
private final Random random = new Random(0xcafefaceL);
@Before
public void setup() throws IOException {
store = fileStoreBuilder(folder.getRoot()).build();
writer = store.getWriter();
}
@After
public void tearDown() {
store.close();
}
@Test
public void testBlockRecord() throws IOException {
RecordId blockId = writer.writeBlock(bytes, 0, bytes.length);
BlockRecord block = new BlockRecord(blockId, bytes.length);
// Check reading with all valid positions and lengths
for (int n = 1; n < bytes.length; n++) {
for (int i = 0; i + n <= bytes.length; i++) {
Arrays.fill(bytes, i, i + n, (byte) '.');
assertEquals(n, block.read(i, bytes, i, n));
assertEquals(HELLO_WORLD, new String(bytes, Charsets.UTF_8));
}
}
// Check reading with a too long length
byte[] large = new byte[bytes.length * 2];
assertEquals(bytes.length, block.read(0, large, 0, large.length));
assertEquals(HELLO_WORLD, new String(large, 0, bytes.length, Charsets.UTF_8));
}
@Test
public void testListRecord() throws IOException {
RecordId blockId = writer.writeBlock(bytes, 0, bytes.length);
ListRecord one = writeList(1, blockId);
ListRecord level1 = writeList(LEVEL_SIZE, blockId);
ListRecord level1p = writeList(LEVEL_SIZE + 1, blockId);
ListRecord level2 = writeList(LEVEL_SIZE * LEVEL_SIZE, blockId);
ListRecord level2p = writeList(LEVEL_SIZE * LEVEL_SIZE + 1, blockId);
assertEquals(1, one.size());
assertEquals(blockId, one.getEntry(0));
assertEquals(LEVEL_SIZE, level1.size());
assertEquals(blockId, level1.getEntry(0));
assertEquals(blockId, level1.getEntry(LEVEL_SIZE - 1));
assertEquals(LEVEL_SIZE + 1, level1p.size());
assertEquals(blockId, level1p.getEntry(0));
assertEquals(blockId, level1p.getEntry(LEVEL_SIZE));
assertEquals(LEVEL_SIZE * LEVEL_SIZE, level2.size());
assertEquals(blockId, level2.getEntry(0));
assertEquals(blockId, level2.getEntry(LEVEL_SIZE * LEVEL_SIZE - 1));
assertEquals(LEVEL_SIZE * LEVEL_SIZE + 1, level2p.size());
assertEquals(blockId, level2p.getEntry(0));
assertEquals(blockId, level2p.getEntry(LEVEL_SIZE * LEVEL_SIZE));
int count = 0;
for (RecordId entry : level2p.getEntries()) {
assertEquals(blockId, entry);
assertEquals(blockId, level2p.getEntry(count));
count++;
}
assertEquals(LEVEL_SIZE * LEVEL_SIZE + 1, count);
}
private ListRecord writeList(int size, RecordId id) throws IOException {
List<RecordId> list = Collections.nCopies(size, id);
return new ListRecord(writer.writeList(list), size);
}
@Test
public void testListWithLotsOfReferences() throws IOException { // OAK-1184
List<RecordId> list = newArrayList();
for (int i = 0; i < 1000; i++) {
list.add(new RecordId(store.newBulkSegmentId(), 0));
}
writer.writeList(list);
}
@Test
public void testStreamRecord() throws IOException {
checkRandomStreamRecord(0);
checkRandomStreamRecord(1);
checkRandomStreamRecord(0x79);
checkRandomStreamRecord(0x80);
checkRandomStreamRecord(0x4079);
checkRandomStreamRecord(0x4080);
checkRandomStreamRecord(SegmentWriter.BLOCK_SIZE);
checkRandomStreamRecord(SegmentWriter.BLOCK_SIZE + 1);
checkRandomStreamRecord(Segment.MAX_SEGMENT_SIZE);
checkRandomStreamRecord(Segment.MAX_SEGMENT_SIZE + 1);
checkRandomStreamRecord(Segment.MAX_SEGMENT_SIZE * 2);
checkRandomStreamRecord(Segment.MAX_SEGMENT_SIZE * 2 + 1);
}
private void checkRandomStreamRecord(int size) throws IOException {
byte[] source = new byte[size];
random.nextBytes(source);
Blob value = writer.writeStream(new ByteArrayInputStream(source));
InputStream stream = value.getNewStream();
try {
byte[] b = new byte[349]; // prime number
int offset = 0;
for (int n = stream.read(b); n != -1; n = stream.read(b)) {
for (int i = 0; i < n; i++) {
assertEquals(source[offset + i], b[i]);
}
offset += n;
}
assertEquals(offset, size);
assertEquals(-1, stream.read());
} finally {
stream.close();
}
}
@Test
public void testStringRecord() throws IOException {
RecordId empty = writer.writeString("");
RecordId space = writer.writeString(" ");
RecordId hello = writer.writeString("Hello, World!");
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 2 * Segment.MAX_SEGMENT_SIZE + 1000; i++) {
builder.append((char) ('0' + i % 10));
}
RecordId large = writer.writeString(builder.toString());
Segment segment = large.getSegmentId().getSegment();
assertEquals("", store.getReader().readString(empty));
assertEquals(" ", store.getReader().readString(space));
assertEquals("Hello, World!", store.getReader().readString(hello));
assertEquals(builder.toString(), store.getReader().readString(large));
}
@Test
public void testMapRecord() throws IOException {
RecordId blockId = writer.writeBlock(bytes, 0, bytes.length);
MapRecord zero = writer.writeMap(
null, ImmutableMap.<String, RecordId>of());
MapRecord one = writer.writeMap(
null, ImmutableMap.of("one", blockId));
MapRecord two = writer.writeMap(
null, ImmutableMap.of("one", blockId, "two", blockId));
Map<String, RecordId> map = newHashMap();
for (int i = 0; i < 1000; i++) {
map.put("key" + i, blockId);
}
MapRecord many = writer.writeMap(null, map);
Iterator<MapEntry> iterator;
assertEquals(0, zero.size());
assertNull(zero.getEntry("one"));
iterator = zero.getEntries().iterator();
assertFalse(iterator.hasNext());
assertEquals(1, one.size());
assertEquals(blockId, one.getEntry("one").getValue());
assertNull(one.getEntry("two"));
iterator = one.getEntries().iterator();
assertTrue(iterator.hasNext());
assertEquals("one", iterator.next().getName());
assertFalse(iterator.hasNext());
assertEquals(2, two.size());
assertEquals(blockId, two.getEntry("one").getValue());
assertEquals(blockId, two.getEntry("two").getValue());
assertNull(two.getEntry("three"));
iterator = two.getEntries().iterator();
assertTrue(iterator.hasNext());
iterator.next();
assertTrue(iterator.hasNext());
iterator.next();
assertFalse(iterator.hasNext());
assertEquals(1000, many.size());
iterator = many.getEntries().iterator();
for (int i = 0; i < 1000; i++) {
assertTrue(iterator.hasNext());
assertEquals(blockId, iterator.next().getValue());
assertEquals(blockId, many.getEntry("key" + i).getValue());
}
assertFalse(iterator.hasNext());
assertNull(many.getEntry("foo"));
Map<String, RecordId> changes = newHashMap();
changes.put("key0", null);
changes.put("key1000", blockId);
MapRecord modified = writer.writeMap(many, changes);
assertEquals(1000, modified.size());
iterator = modified.getEntries().iterator();
for (int i = 1; i <= 1000; i++) {
assertTrue(iterator.hasNext());
assertEquals(blockId, iterator.next().getValue());
assertEquals(blockId, modified.getEntry("key" + i).getValue());
}
assertFalse(iterator.hasNext());
assertNull(many.getEntry("foo"));
}
@Test
public void testMapRemoveNonExisting() throws IOException {
RecordId blockId = writer.writeBlock(bytes, 0, bytes.length);
Map<String, RecordId> changes = newHashMap();
changes.put("one", null);
MapRecord zero = writer.writeMap(null, changes);
assertEquals(0, zero.size());
}
@Test
public void testWorstCaseMap() throws IOException {
RecordId blockId = writer.writeBlock(bytes, 0, bytes.length);
Map<String, RecordId> map = newHashMap();
char[] key = new char[2];
for (int i = 0; i <= MapRecord.BUCKETS_PER_LEVEL; i++) {
key[0] = (char) ('A' + i);
key[1] = (char) ('\u1000' - key[0] * 31);
map.put(new String(key), blockId);
}
MapRecord bad = writer.writeMap(null, map);
assertEquals(map.size(), bad.size());
Iterator<MapEntry> iterator = bad.getEntries().iterator();
for (int i = 0; i < map.size(); i++) {
assertTrue(iterator.hasNext());
assertEquals('\u1000', iterator.next().getName().hashCode());
}
assertFalse(iterator.hasNext());
}
@Test
public void testEmptyNode() throws IOException {
NodeState before = EMPTY_NODE;
NodeState after = writer.writeNode(before);
assertEquals(before, after);
}
@Test
public void testSimpleNode() throws IOException {
NodeState before = EMPTY_NODE.builder()
.setProperty("foo", "abc")
.setProperty("bar", 123)
.setProperty("baz", Math.PI)
.getNodeState();
NodeState after = writer.writeNode(before);
assertEquals(before, after);
}
@Test
public void testDeepNode() throws IOException {
NodeBuilder root = EMPTY_NODE.builder();
NodeBuilder builder = root;
for (int i = 0; i < 1000; i++) {
builder = builder.child("test");
}
NodeState before = builder.getNodeState();
NodeState after = writer.writeNode(before);
assertEquals(before, after);
}
@Test
public void testManyMapDeletes() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
for (int i = 0; i < 1000; i++) {
builder.child("test" + i);
}
NodeState before = writer.writeNode(builder.getNodeState());
assertEquals(builder.getNodeState(), before);
builder = before.builder();
for (int i = 0; i < 900; i++) {
builder.getChildNode("test" + i).remove();
}
NodeState after = writer.writeNode(builder.getNodeState());
assertEquals(builder.getNodeState(), after);
}
@Test
public void testMultiValuedBinaryPropertyAcrossSegments()
throws IOException {
// biggest possible inlined value record
byte[] data = new byte[Segment.MEDIUM_LIMIT - 1];
random.nextBytes(data);
// create enough copies of the value to fill a full segment
List<Blob> blobs = newArrayList();
while (blobs.size() * data.length < Segment.MAX_SEGMENT_SIZE) {
blobs.add(writer.writeStream(new ByteArrayInputStream(data)));
}
// write a simple node that'll now be stored in a separate segment
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty("test", blobs, BINARIES);
NodeState state = writer.writeNode(builder.getNodeState());
// all the blobs should still be accessible, even if they're
// referenced from another segment
for (Blob blob : state.getProperty("test").getValue(BINARIES)) {
try {
blob.getNewStream().close();
} catch (IllegalStateException e) {
fail("OAK-1374");
}
}
}
@Test
public void testBinaryPropertyFromExternalSegmentStore() throws IOException, CommitFailedException {
byte[] data = new byte[Segment.MEDIUM_LIMIT + 1];
random.nextBytes(data);
SegmentNodeStore extStore = SegmentNodeStoreBuilders.builder(new MemoryStore()).build();
NodeBuilder extRootBuilder = extStore.getRoot().builder();
Blob extBlob = extRootBuilder.createBlob(new ByteArrayInputStream(data));
extRootBuilder.setProperty("binary", extBlob, BINARY);
extStore.merge(extRootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY);
PropertyState extPropertyState = extStore.getRoot().getProperty("binary");
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty(extPropertyState);
NodeState state = writer.writeNode(builder.getNodeState());
try {
InputStream is = state.getProperty("binary").getValue(BINARY).getNewStream();
is.read();
is.close();
} catch (SegmentNotFoundException e) {
fail("OAK-4307 SegmentWriter saves references to external blobs");
}
}
@Test
public void testStringPrimaryType() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty("jcr:primaryType", "foo", STRING);
NodeState state = writer.writeNode(builder.getNodeState());
assertNotNull(state.getProperty("jcr:primaryType"));
}
@Test
public void testStringMixinTypes() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty("jcr:mixinTypes", singletonList("foo"), STRINGS);
NodeState state = writer.writeNode(builder.getNodeState());
assertNotNull(state.getProperty("jcr:mixinTypes"));
}
@Test
public void testCancel() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
SegmentBufferWriter bufferWriter = new SegmentBufferWriter(store, store.getTracker(),
store.getReader(), "test", 0);
NodeState state = writer.writeNode(builder.getNodeState(), bufferWriter, Suppliers.ofInstance(true));
assertNull(state);
}
}
| |
/*
* Copyright 2016 Davide Maestroni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dm.jrt.core.channel;
import com.github.dm.jrt.core.common.RoutineException;
import com.github.dm.jrt.core.util.UnitDuration;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Interface defining the basic communication channel with the routine invocation.
* <p>
* Channel instances are used to transfer data to and from the code executed inside the routine
* invocation.
* <p>
* Created by davide-maestroni on 09/09/2014.
*
* @param <IN> the input data type.
* @param <OUT> the output data type.
*/
public interface Channel<IN, OUT> extends Iterator<OUT>, Iterable<OUT> {
/**
* Closes the channel and abort the transfer of data, thus aborting the routine invocation.
* <br>
* An instance of {@link com.github.dm.jrt.core.channel.AbortException AbortException} will be
* passed as the abortion reason.
* <br>
* If a delay has been set through the dedicated methods, the abortion will be accordingly
* postponed.
* <p>
* Note that, in case the channel is already closed, the method invocation will have no effect.
*
* @return whether the channel status changed as a result of the call.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
boolean abort();
/**
* Closes the channel and abort the transfer of data, thus aborting the routine invocation and
* causing the specified throwable to be passed as the abortion reason.
* <br>
* The throwable, unless it extends the base
* {@link com.github.dm.jrt.core.common.RoutineException RoutineException}, will be wrapped as
* the cause of an {@link com.github.dm.jrt.core.channel.AbortException AbortException}
* instance.
* <br>
* If a delay has been set through the dedicated methods, the abortion will be accordingly
* postponed.
* <p>
* Note that, in case the channel is already closed, the method invocation will have no effect.
*
* @param reason the throwable object identifying the reason of the invocation abortion.
* @return whether the channel status changed as a result of the call.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
boolean abort(@Nullable Throwable reason);
/**
* Tells the channel to delay the following operations of the specified time duration.
* <br>
* In case of read operations the value will represent the maximum allowed time before the
* operation completes.
* <p>
* Note that closing and abortion commands will be delayed as well. Note, however, that a
* delayed abortion will not prevent the invocation from completing, as pending input data do.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @param delay the delay value.
* @param timeUnit the delay time unit.
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @throws java.lang.IllegalArgumentException if the specified delay is negative.
*/
@NotNull
Channel<IN, OUT> after(long delay, @NotNull TimeUnit timeUnit);
/**
* Tells the channel to delay the following operations of the specified time duration.
* <br>
* In case of read operations the value will represent the maximum allowed time before the
* operation completes.
* <p>
* Note that closing and abortion commands will be delayed as well. Note, however, that a
* delayed abortion will not prevent the invocation from completing, as pending input data do.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @param delay the delay.
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
*/
@NotNull
Channel<IN, OUT> after(@NotNull UnitDuration delay);
/**
* Consumes all the results by waiting for the routine to complete at the maximum for the set
* delay.
* <p>
* Note that this method invocation will block the calling thread until the routine invocation
* completes or is aborted, or the timeout elapses.
*
* @return the list of results.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
List<OUT> all();
/**
* Consumes all the results by waiting for the routine to complete at the maximum for the set
* delay, and put them into the specified collection.
* <p>
* Note that this method invocation will block the calling thread until the routine invocation
* completes or is aborted, or the timeout elapses.
*
* @param results the collection to fill.
* @return this channel.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
Channel<IN, OUT> allInto(@NotNull Collection<? super OUT> results);
/**
* Binds this channel to the specified one.
* <br>
* After method exits, all the output will be passed only to the specified input channel.
* Attempting to read through the dedicated methods will cause an
* {@link java.lang.IllegalStateException} to be thrown.
* <br>
* If a delay has been set through the dedicated methods, the transfer of data will be
* accordingly postponed.
*
* @param channel the input channel
* @param <AFTER> the channel output type.
* @return the passed channel.
* @throws java.lang.IllegalStateException if this channel is already bound.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
<AFTER> Channel<? super OUT, AFTER> bind(@NotNull Channel<? super OUT, AFTER> channel);
/**
* Binds this channel to the specified consumer.
* <br>
* After method exits, all the output will be passed only to the consumer. Attempting to read
* through the dedicated methods will cause an {@link java.lang.IllegalStateException} to be
* thrown.
* <br>
* If a delay has been set through the dedicated methods, the transfer of data will be
* accordingly postponed.
* <p>
* Note that the consumer methods may be called on the runner thread.
*
* @param consumer the consumer instance.
* @return this channel.
* @throws java.lang.IllegalStateException if this channel is already bound.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
Channel<IN, OUT> bind(@NotNull ChannelConsumer<? super OUT> consumer);
/**
* Closes this channel and completes the invocation.
* <br>
* After channel is closed, attempting to pass additional input data through the dedicated
* methods will cause an {@link java.lang.IllegalStateException} to be thrown.
* <br>
* If a delay has been set through the dedicated methods, the closing command will be
* accordingly postponed.
* <p>
* Note that, even if calling this method is not strictly mandatory, some invocation
* implementations may rely on the completion notification to produce their results. So, it's
* always advisable to close the channel as soon as all the input data has been passed.
*
* @return this channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
Channel<IN, OUT> close();
/**
* Tells the channel to abort the invocation execution in case, after a read method is invoked,
* no result is available before the timeout has elapsed.
* <p>
* By default an
* {@link com.github.dm.jrt.core.channel.OutputTimeoutException OutputTimeoutException}
* exception will be thrown.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @return this channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
Channel<IN, OUT> eventuallyAbort();
/**
* Tells the channel to abort the invocation execution in case, after a read method is invoked,
* no result is available before the timeout has elapsed.
* <p>
* By default an
* {@link com.github.dm.jrt.core.channel.OutputTimeoutException OutputTimeoutException}
* exception will be thrown.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @param reason the throwable object identifying the reason of the invocation abortion.
* @return this channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
Channel<IN, OUT> eventuallyAbort(@Nullable Throwable reason);
/**
* Tells the channel to break the invocation execution in case, after a read method is
* invoked, no result is available before the timeout has elapsed.
* <p>
* By default an
* {@link com.github.dm.jrt.core.channel.OutputTimeoutException OutputTimeoutException}
* exception will be thrown.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @return this channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyFail()
*/
@NotNull
Channel<IN, OUT> eventuallyContinue();
/**
* Tells the channel to throw an
* {@link com.github.dm.jrt.core.channel.OutputTimeoutException OutputTimeoutException} in case,
* after a read method is invoked, no result is available before the timeout has elapsed.
* <p>
* This is the default behavior.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @return this channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
*/
@NotNull
Channel<IN, OUT> eventuallyFail();
/**
* Returns an iterator whose lifetime cannot exceed the set delay.
*
* @return the iterator instance.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
Iterator<OUT> expiringIterator();
/**
* Checks if the invocation has completed, waiting at the maximum for the set delay.
* <p>
* Note that this method invocation will block the calling thread until the routine invocation
* completes or is aborted, or the timeout elapses.
*
* @return whether the routine execution has completed.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
boolean getComplete();
/**
* Gets the invocation error or abort exception, if the invocation is aborted, waiting at the
* maximum for the set delay.
* <p>
* Note that this method invocation will block the calling thread until the routine invocation
* completes or is aborted, or the timeout elapses.
*
* @return the invocation error or null.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@Nullable
RoutineException getError();
/**
* Checks if more results are available by waiting at the maximum for the set timeout.
* <p>
* Note that this method invocation will block the calling thread until a new output is
* available, the routine invocation completes or is aborted, or the timeout elapses.
*
* @return whether at least one result is available.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
boolean hasNext();
/**
* Consumes the first available result by waiting at the maximum for the set timeout.
* <p>
* Note that this method invocation will block the calling thread until a new output is
* available, the routine invocation completes or is aborted, or the timeout elapses.
*
* @return the first available result.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @throws java.util.NoSuchElementException if no output is available (it
* might be thrown also in case
* the read timeout elapses and no
* timeout exception is set to be
* thrown).
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
OUT next();
/**
* Returns the number of input data stored in the channel.
*
* @return the input data size.
*/
int inputCount();
/**
* Checks if this channel is bound to a consumer or another channel.
*
* @return whether the channel is bound.
* @see #bind bind(Channel)
* @see #bind(ChannelConsumer)
*/
boolean isBound();
/**
* Checks if the channel is empty, that is, no data are stored in it.
*
* @return whether the channel is empty.
*/
boolean isEmpty();
/**
* Checks if the channel is open, that is, more data are expected to be passed to it.
*
* @return whether the channel is open.
*/
boolean isOpen();
/**
* Consumes the first {@code count} available results by waiting at the maximum for the set delay.
* <p>
* Note that this method invocation will block the calling thread until {@code count} new
* outputs are available, the routine invocation completes or is aborted, or the timeout elapses.
*
* @param count the number of outputs to read.
* @return the first {@code count} available results.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
List<OUT> next(int count);
/**
* Consumes the first available result by waiting at the maximum for the set delay.
* <br>
* If the timeout elapses and the channel is not configured to throw an exception or abort the
* invocation, the specified alternative output is returned.
* <p>
* Note that this method invocation will block the calling thread until a new output is
* available, the routine invocation completes or is aborted, or the timeout elapses.
*
* @param output the default output to return.
* @return the first available result.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @throws java.util.NoSuchElementException if no output is available (it
* might be thrown also in case
* the read timeout elapses and no
* timeout exception is set to be
* thrown).
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
OUT nextOrElse(OUT output);
/**
* Tells the channel to not wait for results to be available.
* <p>
* By default the timeout is set to 0 to avoid unexpected deadlocks.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @return this channel.
*/
@NotNull
Channel<IN, OUT> now();
/**
* Returns the number of output data stored in the channel.
*
* @return the output data size.
*/
int outputCount();
/**
* Passes the data returned by the specified channel to this one.
* <br>
* If a delay has been set through the dedicated methods, the transfer of data will be
* accordingly postponed.
* <p>
* Note that the passed channel will be bound as a result of the call, thus effectively
* preventing any other consumer from getting data from it.
*
* @param channel the channel.
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @throws java.lang.IllegalStateException if this channel is already closed.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
Channel<IN, OUT> pass(@Nullable Channel<?, ? extends IN> channel);
/**
* Passes the data returned by the specified iterable to this channel.
* <br>
* If a delay has been set through the dedicated methods, the transfer of data will be
* accordingly postponed.
*
* @param inputs the iterable returning the input data.
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @throws java.lang.IllegalStateException if this channel is already closed.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
Channel<IN, OUT> pass(@Nullable Iterable<? extends IN> inputs);
/**
* Passes the specified input to this channel.
* <br>
* If a delay has been set through the dedicated methods, the transfer of data will be
* accordingly postponed.
*
* @param input the input.
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @throws java.lang.IllegalStateException if this channel is already closed.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
Channel<IN, OUT> pass(@Nullable IN input);
/**
* Passes the specified input data to this channel.
* <br>
* If a delay has been set through the dedicated methods, the transfer of data will be
* accordingly postponed.
*
* @param inputs the input data.
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @throws java.lang.IllegalStateException if this channel is already closed.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
@NotNull
Channel<IN, OUT> pass(@Nullable IN... inputs);
/**
* Returns the total number of data stored in the channel.
*
* @return the data size.
*/
int size();
/**
* Skips the first {@code count} available results by waiting at the maximum for the set delay.
* <p>
* Note that this method invocation will block the calling thread until {@code count} new
* outputs are available, the routine invocation completes or is aborted, or the timeout elapses.
*
* @param count the number of outputs to skip.
* @return this channel.
* @throws com.github.dm.jrt.core.channel.OutputTimeoutException if the channel is set to throw
* an exception when the timeout
* elapses.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been
* aborted.
* @throws java.lang.IllegalStateException if this channel is already
* bound to a consumer or another
* channel.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
* @see #eventuallyAbort()
* @see #eventuallyAbort(Throwable)
* @see #eventuallyContinue()
* @see #eventuallyFail()
*/
@NotNull
Channel<IN, OUT> skipNext(int count);
/**
* Tells the channel to sort the passed input data based in the same order as they are passed
* to the channel.
* <p>
* By default no particular order is applied.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @see #unsorted()
*/
@NotNull
Channel<IN, OUT> sorted();
/**
* Throws the invocation error or abort exception, if the invocation is aborted, waiting at the
* maximum for the set delay.
* <p>
* Note that this method invocation will block the calling thread until the routine invocation
* completes or is aborted, or the timeout elapses.
*
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @see #after(UnitDuration)
* @see #after(long, TimeUnit)
* @see #now()
*/
void throwError();
/**
* Tells the channel to not sort the passed input data.
* <p>
* Note that only the inputs passed with a 0 delay will be delivered in the same order as they
* are passed to the channel, while the others will be delivered as soon as the dedicated runner
* handles the specific execution.
* <p>
* This is the default behavior.
* <p>
* Note that the implementing class should ensure that calls of this method from different
* threads will not interfere with each others.
*
* @return this channel.
* @throws com.github.dm.jrt.core.common.RoutineException if the execution has been aborted.
* @see #sorted()
*/
@NotNull
Channel<IN, OUT> unsorted();
}
| |
package flink.graphs;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.test.util.JavaProgramTestBase;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
@RunWith(Parameterized.class)
public class TestGraphOperations extends JavaProgramTestBase {
private static int NUM_PROGRAMS = 10;
private int curProgId = config.getInteger("ProgramId", -1);
private String resultPath;
private String expectedResult;
public TestGraphOperations(Configuration config) {
super(config);
}
@Override
protected void preSubmit() throws Exception {
resultPath = getTempDirPath("result");
}
@Override
protected void testProgram() throws Exception {
expectedResult = GraphProgs.runProgram(curProgId, resultPath);
}
@Override
protected void postSubmit() throws Exception {
compareResultsByLinesInMemory(expectedResult, resultPath);
}
@Parameters
public static Collection<Object[]> getConfigurations() throws FileNotFoundException, IOException {
LinkedList<Configuration> tConfigs = new LinkedList<Configuration>();
for(int i=1; i <= NUM_PROGRAMS; i++) {
Configuration config = new Configuration();
config.setInteger("ProgramId", i);
tConfigs.add(config);
}
return toParameterList(tConfigs);
}
private static class GraphProgs {
@SuppressWarnings("serial")
public static String runProgram(int progId, String resultPath) throws Exception {
switch(progId) {
case 1: {
/*
* Test getUndirected()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.getUndirected().getEdges().writeAsCsv(resultPath);
env.execute();
return "1,2,12\n" + "2,1,12\n" +
"1,3,13\n" + "3,1,13\n" +
"2,3,23\n" + "3,2,23\n" +
"3,4,34\n" + "4,3,34\n" +
"3,5,35\n" + "5,3,35\n" +
"4,5,45\n" + "5,4,45\n" +
"5,1,51\n" + "1,5,51\n";
}
case 2: {
/*
* Test reverse()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.reverse().getEdges().writeAsCsv(resultPath);
env.execute();
return "2,1,12\n" +
"3,1,13\n" +
"3,2,23\n" +
"4,3,34\n" +
"5,3,35\n" +
"5,4,45\n" +
"1,5,51\n";
}
case 3: {
/*
* Test subgraph:
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.subgraph(new FilterFunction<Vertex<Long, Long>>() {
public boolean filter(Vertex<Long, Long> vertex) throws Exception {
return (vertex.getValue() > 2);
}
},
new FilterFunction<Edge<Long, Long>>() {
public boolean filter(Edge<Long, Long> edge) throws Exception {
return (edge.getValue() > 34);
}
}).getEdges().writeAsCsv(resultPath);
env.execute();
return "3,5,35\n" +
"4,5,45\n";
}
case 4: {
/*
* Test filterOnVertices:
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.filterOnVertices(new FilterFunction<Vertex<Long, Long>>() {
public boolean filter(Vertex<Long, Long> vertex) throws Exception {
return (vertex.getValue() > 2);
}
}).getEdges().writeAsCsv(resultPath);
env.execute();
return "3,4,34\n" +
"3,5,35\n" +
"4,5,45\n";
}
case 5: {
/*
* Test filterOnEdges:
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.filterOnEdges(new FilterFunction<Edge<Long, Long>>() {
public boolean filter(Edge<Long, Long> edge) throws Exception {
return (edge.getValue() > 34);
}
}).getEdges().writeAsCsv(resultPath);
env.execute();
return "3,5,35\n" +
"4,5,45\n" +
"5,1,51\n";
}
case 6: {
/*
* Test numberOfVertices()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.numberOfVertices().writeAsText(resultPath);
env.execute();
return "5";
}
case 7: {
/*
* Test numberOfEdges()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.numberOfEdges().writeAsText(resultPath);
env.execute();
return "7";
}
case 8: {
/*
* Test getVertexIds()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.getVertexIds().writeAsText(resultPath);
env.execute();
return "1\n2\n3\n4\n5\n";
}
case 9: {
/*
* Test getEdgeIds()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
graph.getEdgeIds().writeAsCsv(resultPath);
env.execute();
return "1,2\n" + "1,3\n" +
"2,3\n" + "3,4\n" +
"3,5\n" + "4,5\n" +
"5,1\n";
}
case 10: {
/*
* Test union()
*/
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
Graph<Long, Long, Long> graph = Graph.fromDataSet(TestGraphUtils.getLongLongVertexData(env),
TestGraphUtils.getLongLongEdgeData(env), env);
List<Vertex<Long, Long>> vertices = new ArrayList<Vertex<Long, Long>>();
List<Edge<Long, Long>> edges = new ArrayList<Edge<Long, Long>>();
vertices.add(new Vertex<Long, Long>(6L, 6L));
edges.add(new Edge<Long, Long>(6L, 1L, 61L));
graph = graph.union(Graph.fromCollection(vertices, edges, env));
graph.getEdges().writeAsCsv(resultPath);
env.execute();
return "1,2,12\n" +
"1,3,13\n" +
"2,3,23\n" +
"3,4,34\n" +
"3,5,35\n" +
"4,5,45\n" +
"5,1,51\n" +
"6,1,61\n";
}
default:
throw new IllegalArgumentException("Invalid program id");
}
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.impl;
import com.intellij.conversion.*;
import com.intellij.conversion.impl.ConversionContextImpl;
import com.intellij.conversion.impl.ConversionRunner;
import com.intellij.conversion.impl.ProjectConversionUtil;
import com.intellij.conversion.impl.ui.ConvertProjectDialog;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.JDOMUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.PathUtil;
import com.intellij.util.SystemProperties;
import com.intellij.util.graph.*;
import com.intellij.util.xmlb.XmlSerializer;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import com.intellij.util.xmlb.annotations.Tag;
import com.intellij.util.xmlb.annotations.XCollection;
import org.jdom.Document;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* @author nik
*/
public class ConversionServiceImpl extends ConversionService {
private static final Logger LOG = Logger.getInstance(ConversionServiceImpl.class);
@NotNull
@Override
public ConversionResult convertSilently(@NotNull String projectPath) {
return convertSilently(projectPath, new ConversionListener() {
@Override
public void conversionNeeded() {
}
@Override
public void successfullyConverted(File backupDir) {
}
@Override
public void error(String message) {
}
@Override
public void cannotWriteToFiles(List<File> readonlyFiles) {
}
});
}
@NotNull
@Override
public ConversionResult convertSilently(@NotNull String projectPath, @NotNull ConversionListener listener) {
try {
if (!isConversionNeeded(projectPath)) {
return ConversionResultImpl.CONVERSION_NOT_NEEDED;
}
listener.conversionNeeded();
ConversionContextImpl context = new ConversionContextImpl(projectPath);
final List<ConversionRunner> runners = getConversionRunners(context);
Set<File> affectedFiles = new HashSet<>();
for (ConversionRunner runner : runners) {
affectedFiles.addAll(runner.getAffectedFiles());
}
final List<File> readOnlyFiles = ConversionRunner.getReadOnlyFiles(affectedFiles);
if (!readOnlyFiles.isEmpty()) {
listener.cannotWriteToFiles(readOnlyFiles);
return ConversionResultImpl.ERROR_OCCURRED;
}
final File backupDir = ProjectConversionUtil.backupFiles(affectedFiles, context.getProjectBaseDir());
List<ConversionRunner> usedRunners = new ArrayList<>();
for (ConversionRunner runner : runners) {
if (runner.isConversionNeeded()) {
runner.preProcess();
runner.process();
runner.postProcess();
usedRunners.add(runner);
}
}
context.saveFiles(affectedFiles, usedRunners);
listener.successfullyConverted(backupDir);
saveConversionResult(context);
return new ConversionResultImpl(runners);
}
catch (CannotConvertException | IOException e) {
listener.error(e.getMessage());
}
return ConversionResultImpl.ERROR_OCCURRED;
}
@NotNull
@Override
public ConversionResult convert(@NotNull String projectPath) {
try {
if (!new File(projectPath).exists() || ApplicationManager.getApplication().isHeadlessEnvironment() || !isConversionNeeded(projectPath)) {
return ConversionResultImpl.CONVERSION_NOT_NEEDED;
}
final ConversionContextImpl context = new ConversionContextImpl(projectPath);
final List<ConversionRunner> converters = getConversionRunners(context);
ConvertProjectDialog dialog = new ConvertProjectDialog(context, converters);
dialog.show();
if (dialog.isConverted()) {
saveConversionResult(context);
return new ConversionResultImpl(converters);
}
return ConversionResultImpl.CONVERSION_CANCELED;
}
catch (CannotConvertException e) {
LOG.info(e);
Messages.showErrorDialog(IdeBundle.message("error.cannot.convert.project", e.getMessage()),
IdeBundle.message("title.cannot.convert.project"));
return ConversionResultImpl.ERROR_OCCURRED;
}
}
private static List<ConversionRunner> getConversionRunners(ConversionContextImpl context) throws CannotConvertException {
final List<ConversionRunner> converters = getSortedConverters(context);
final Iterator<ConversionRunner> iterator = converters.iterator();
Set<String> convertersToRunIds = new HashSet<>();
while (iterator.hasNext()) {
ConversionRunner runner = iterator.next();
boolean conversionNeeded = runner.isConversionNeeded();
if (!conversionNeeded) {
for (String id : runner.getProvider().getPrecedingConverterIds()) {
if (convertersToRunIds.contains(id)) {
conversionNeeded = true;
break;
}
}
}
if (conversionNeeded) {
convertersToRunIds.add(runner.getProvider().getId());
}
else {
iterator.remove();
}
}
return converters;
}
public static boolean isConversionNeeded(String projectPath) {
try {
final ConversionContextImpl context = new ConversionContextImpl(projectPath);
final List<ConversionRunner> runners = getSortedConverters(context);
if (runners.isEmpty()) {
return false;
}
for (ConversionRunner runner : runners) {
if (runner.isConversionNeeded()) {
return true;
}
}
saveConversionResult(context);
}
catch (CannotConvertException e) {
LOG.info("Cannot check whether conversion of project files is needed or not, conversion won't be performed", e);
}
return false;
}
private static List<ConversionRunner> getSortedConverters(final ConversionContextImpl context) {
final CachedConversionResult conversionResult = loadCachedConversionResult(context.getProjectFile());
final Map<String, Long> oldMap = conversionResult.myProjectFilesTimestamps;
Map<String, Long> newMap = getProjectFilesMap(context);
boolean changed = false;
LOG.debug("Checking project files");
for (Map.Entry<String, Long> entry : newMap.entrySet()) {
final String path = entry.getKey();
final Long oldValue = oldMap.get(path);
if (oldValue == null) {
LOG.debug(" new file: " + path);
changed = true;
}
else if (!entry.getValue().equals(oldValue)) {
LOG.debug(" changed file: " + path);
changed = true;
}
}
final Set<String> performedConversionIds;
if (changed) {
performedConversionIds = Collections.emptySet();
LOG.debug("Project files were modified.");
}
else {
performedConversionIds = conversionResult.myAppliedConverters;
LOG.debug("Project files are up to date. Applied converters: " + performedConversionIds);
}
return createConversionRunners(context, performedConversionIds);
}
private static Map<String, Long> getProjectFilesMap(ConversionContextImpl context) {
final Map<String, Long> map = new HashMap<>();
for (File file : context.getAllProjectFiles()) {
if (file.exists()) {
map.put(file.getAbsolutePath(), file.lastModified());
}
}
return map;
}
private static List<ConversionRunner> createConversionRunners(ConversionContextImpl context, final Set<String> performedConversionIds) {
List<ConversionRunner> runners = new ArrayList<>();
final ConverterProvider[] providers = ConverterProvider.EP_NAME.getExtensions();
for (ConverterProvider provider : providers) {
if (!performedConversionIds.contains(provider.getId())) {
runners.add(new ConversionRunner(provider, context));
}
}
final Graph<ConverterProvider> graph = GraphGenerator.generate(CachingSemiGraph.cache(new ConverterProvidersGraph(providers)));
final DFSTBuilder<ConverterProvider> builder = new DFSTBuilder<>(graph);
if (!builder.isAcyclic()) {
final Pair<ConverterProvider,ConverterProvider> pair = builder.getCircularDependency();
LOG.error("cyclic dependencies between converters: " + pair.getFirst().getId() + " and " + pair.getSecond().getId());
}
final Comparator<ConverterProvider> comparator = builder.comparator();
Collections.sort(runners, (o1, o2) -> comparator.compare(o1.getProvider(), o2.getProvider()));
return runners;
}
@Override
public void saveConversionResult(@NotNull String projectPath) {
try {
saveConversionResult(new ConversionContextImpl(projectPath));
}
catch (CannotConvertException e) {
LOG.info(e);
}
}
private static void saveConversionResult(ConversionContextImpl context) {
final CachedConversionResult conversionResult = new CachedConversionResult();
for (ConverterProvider provider : ConverterProvider.EP_NAME.getExtensions()) {
conversionResult.myAppliedConverters.add(provider.getId());
}
conversionResult.myProjectFilesTimestamps = getProjectFilesMap(context);
final File infoFile = getConversionInfoFile(context.getProjectFile());
FileUtil.createParentDirs(infoFile);
try {
JDOMUtil.writeDocument(new Document(XmlSerializer.serialize(conversionResult)), infoFile, SystemProperties.getLineSeparator());
}
catch (IOException e) {
LOG.info(e);
}
}
@NotNull
private static CachedConversionResult loadCachedConversionResult(File projectFile) {
try {
final File infoFile = getConversionInfoFile(projectFile);
if (!infoFile.exists()) {
return new CachedConversionResult();
}
return XmlSerializer.deserialize(JDOMUtil.load(infoFile), CachedConversionResult.class);
}
catch (Exception e) {
LOG.info(e);
return new CachedConversionResult();
}
}
private static File getConversionInfoFile(@NotNull File projectFile) {
String dirName = PathUtil.suggestFileName(projectFile.getName() + Integer.toHexString(projectFile.getAbsolutePath().hashCode()));
return new File(PathManager.getSystemPath() + File.separator + "conversion" + File.separator + dirName + ".xml");
}
@Override
@NotNull
public ConversionResult convertModule(@NotNull final Project project, @NotNull final File moduleFile) {
final String url = project.getPresentableUrl();
assert url != null : project;
final String projectPath = FileUtil.toSystemDependentName(url);
if (!isConversionNeeded(projectPath, moduleFile)) {
return ConversionResultImpl.CONVERSION_NOT_NEEDED;
}
final int res = Messages.showYesNoDialog(project, IdeBundle.message("message.module.file.has.an.older.format.do.you.want.to.convert.it"),
IdeBundle.message("dialog.title.convert.module"), Messages.getQuestionIcon());
if (res != Messages.YES) {
return ConversionResultImpl.CONVERSION_CANCELED;
}
if (!moduleFile.canWrite()) {
Messages.showErrorDialog(project, IdeBundle.message("error.message.cannot.modify.file.0", moduleFile.getAbsolutePath()),
IdeBundle.message("dialog.title.convert.module"));
return ConversionResultImpl.ERROR_OCCURRED;
}
try {
ConversionContextImpl context = new ConversionContextImpl(projectPath);
final List<ConversionRunner> runners = createConversionRunners(context, Collections.emptySet());
final File backupFile = ProjectConversionUtil.backupFile(moduleFile);
List<ConversionRunner> usedRunners = new ArrayList<>();
for (ConversionRunner runner : runners) {
if (runner.isModuleConversionNeeded(moduleFile)) {
runner.convertModule(moduleFile);
usedRunners.add(runner);
}
}
context.saveFiles(Collections.singletonList(moduleFile), usedRunners);
Messages.showInfoMessage(project, IdeBundle.message("message.your.module.was.successfully.converted.br.old.version.was.saved.to.0", backupFile.getAbsolutePath()),
IdeBundle.message("dialog.title.convert.module"));
return new ConversionResultImpl(runners);
}
catch (CannotConvertException e) {
LOG.info(e);
Messages.showErrorDialog(IdeBundle.message("error.cannot.load.project", e.getMessage()), "Cannot Convert Module");
return ConversionResultImpl.ERROR_OCCURRED;
}
catch (IOException e) {
LOG.info(e);
return ConversionResultImpl.ERROR_OCCURRED;
}
}
private static boolean isConversionNeeded(String projectPath, File moduleFile) {
try {
ConversionContextImpl context = new ConversionContextImpl(projectPath);
final List<ConversionRunner> runners = createConversionRunners(context, Collections.emptySet());
for (ConversionRunner runner : runners) {
if (runner.isModuleConversionNeeded(moduleFile)) {
return true;
}
}
return false;
}
catch (CannotConvertException e) {
LOG.info(e);
return false;
}
}
@Tag("conversion")
public static class CachedConversionResult {
@Tag("applied-converters")
@XCollection(elementName = "converter", valueAttributeName = "id")
public Set<String> myAppliedConverters = new HashSet<>();
@Tag("project-files")
@MapAnnotation(surroundWithTag = false, surroundKeyWithTag = false, surroundValueWithTag = false, entryTagName = "file",
keyAttributeName = "path", valueAttributeName = "timestamp")
public Map<String, Long> myProjectFilesTimestamps = new HashMap<>();
}
private static class ConverterProvidersGraph implements InboundSemiGraph<ConverterProvider> {
private final ConverterProvider[] myProviders;
public ConverterProvidersGraph(ConverterProvider[] providers) {
myProviders = providers;
}
@Override
public Collection<ConverterProvider> getNodes() {
return Arrays.asList(myProviders);
}
@Override
public Iterator<ConverterProvider> getIn(ConverterProvider n) {
List<ConverterProvider> preceding = new ArrayList<>();
for (String id : n.getPrecedingConverterIds()) {
for (ConverterProvider provider : myProviders) {
if (provider.getId().equals(id)) {
preceding.add(provider);
}
}
}
return preceding.iterator();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.