answer
stringlengths 17
10.2M
|
|---|
package io.github.romatroskin.altrader.bot;
import eu.verdelhan.ta4j.*;
import eu.verdelhan.ta4j.analysis.criteria.AverageProfitableTradesCriterion;
import eu.verdelhan.ta4j.analysis.criteria.RewardRiskRatioCriterion;
import eu.verdelhan.ta4j.analysis.criteria.TotalProfitCriterion;
import eu.verdelhan.ta4j.analysis.criteria.VersusBuyAndHoldCriterion;
import io.github.romatroskin.altrader.strategies.*;
import org.joda.time.DateTime;
import org.knowm.xchange.ExchangeFactory;
import org.knowm.xchange.ExchangeSpecification;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.poloniex.PoloniexExchange;
import org.knowm.xchange.poloniex.dto.marketdata.PoloniexChartData;
import org.knowm.xchange.poloniex.dto.marketdata.PoloniexMarketData;
import org.knowm.xchange.poloniex.dto.marketdata.PoloniexTicker;
import org.knowm.xchange.poloniex.service.PoloniexMarketDataService;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.fusesource.jansi.Ansi.Color.BLUE;
import static org.fusesource.jansi.Ansi.ansi;
import static org.knowm.xchange.dto.Order.OrderType.ASK;
import static org.knowm.xchange.dto.Order.OrderType.BID;
public enum BotStrategiesFactory implements BotTaskRunner {
cci_correction(1) {
@Override
public void start(CurrencyPair pair, ExchangeSpecification spec) throws Exception {
this.runStrategy(pair, spec, CCICorrectionStrategy.class);
}
},
global_extrema(1) {
@Override
public void start(CurrencyPair pair, ExchangeSpecification spec) throws Exception {
this.runStrategy(pair, spec, GlobalExtremaStrategy.class);
}
},
moving_momentum(1) {
@Override
public void start(CurrencyPair pair, ExchangeSpecification spec) throws Exception {
this.runStrategy(pair, spec, MovingMomentumStrategy.class);
}
},
rsi2(1) {
@Override
public void start(CurrencyPair pair, ExchangeSpecification spec) throws Exception {
this.runStrategy(pair, spec, RSI2Strategy.class);
}
},
ichimoku(1) {
@Override
public void start(CurrencyPair pair, ExchangeSpecification spec) throws Exception {
this.runStrategy(pair, spec, IchimokuCloudTradingStrategy.class);
}
};
<T> void runStrategy(CurrencyPair pair, ExchangeSpecification spec, Class<T> strategyClazz) throws Exception {
final long now = new Date().getTime() / 1000;
System.out.println(ansi().fg(BLUE).a(String.format("======== Strategy: %s", toString())).fgDefault());
final PoloniexExchange poloniexExchange = (PoloniexExchange) ExchangeFactory.INSTANCE.createExchange(spec);
final PoloniexMarketDataService marketDataService = (PoloniexMarketDataService)
poloniexExchange.getMarketDataService();
final PoloniexChartData[] chartData = marketDataService.
getPoloniexChartData(pair, now - lookBackDaysInSeconds, now, period);
final List<Tick> tickList = Arrays.stream(chartData).map(chart -> new Tick(new DateTime(chart.getDate()),
chart.getOpen().doubleValue(), chart.getHigh().doubleValue(), chart.getLow().doubleValue(),
chart.getClose().doubleValue(), chart.getVolume().doubleValue())).collect(Collectors.toList());
final TimeSeries series = new TimeSeries(tickList);
series.setMaximumTickCount(MAX_TICK_COUNT);
final Method buildStrategyMethod = strategyClazz.getMethod("buildStrategy", TimeSeries.class);
Strategy strategy = (Strategy) buildStrategyMethod.invoke(null, series);
final TradingRecord tradingRecord = new TradingRecord();
final Runnable runnable = () -> {
try {
final PoloniexTicker ticker = marketDataService.getPoloniexTicker(pair);
final PoloniexMarketData marketData = ticker.getPoloniexMarketData();
final Tick newTick = new Tick(DateTime.now(), marketData.getLowestAsk().doubleValue(),
marketData.getHigh24hr().doubleValue(), marketData.getLow24hr().doubleValue(),
marketData.getHighestBid().doubleValue(), marketData.getBaseVolume().doubleValue());
series.addTick(newTick);
System.out.println(ansi().fgBlue().a(String.format("[== New Tick || %3$s || time: %1$td/%1$tm/%1$tY %1$tH:%1$tM:%1$tS, close price: %2$.8f ==]",
newTick.getEndTime().toGregorianCalendar(), newTick.getClosePrice().toDouble(), ticker.getCurrencyPair().base.getDisplayName()))
.fgDefault());
final Balance counterBalance = poloniexExchange.getAccountService()
.getAccountInfo().getWallet().getBalance(pair.counter);
final Balance baseBalance = poloniexExchange.getAccountService()
.getAccountInfo().getWallet().getBalance(pair.base);
final int endIndex = series.getEnd();
System.out.println(String.format("+ Available Balance %1$s: %2$.8f", pair.counter.getDisplayName(), counterBalance.getAvailable().doubleValue()));
System.out.println(String.format("+ Available Balance %1$s: %2$.8f", pair.base.getDisplayName(), baseBalance.getAvailable().doubleValue()));
if(strategy.shouldEnter(endIndex)) {
Decimal buyAmount = Decimal.valueOf(counterBalance.getAvailable().subtract(BigDecimal.valueOf(0.0001f))
.multiply(BigDecimal.valueOf(0.33f)).doubleValue()).dividedBy(newTick.getClosePrice());
boolean entered = tradingRecord.enter(endIndex, newTick.getClosePrice(), buyAmount);
if (entered) {
Order entry = tradingRecord.getLastEntry();
System.out.println(String.format("[== Entered with %4$s on %1$d (price=%2$.8f, amount=%3$.8f) ==]", entry.getIndex(),
entry.getPrice().toDouble(), entry.getAmount().toDouble(), pair.base.getDisplayName()));
LimitOrder order = new LimitOrder.Builder(BID, pair).tradableAmount(
BigDecimal.valueOf(buyAmount.toDouble())).limitPrice(BigDecimal.valueOf(entry.getPrice().toDouble())).build();
String orderInfo = poloniexExchange.getTradeService().placeLimitOrder(order);
System.out.println(ansi().fgGreen().a(String.format("[== Placed order BUY #%1$s, %3$s, price=%2$.8f ==]", orderInfo,
order.getLimitPrice().doubleValue(), pair.base.getDisplayName())).fgDefault());
}
} else if(strategy.shouldExit(endIndex)) {
if(tradingRecord.getLastEntry() != null && (tradingRecord.getLastTrade() == null || tradingRecord.getLastTrade().isOpened())) {
Decimal sellTotal = newTick.getClosePrice().multipliedBy(tradingRecord.getLastEntry().getAmount());
Decimal buyTotal = tradingRecord.getLastEntry().getPrice().multipliedBy(tradingRecord.getLastEntry().getAmount());
System.out.println(String.format("[== Should exit with %1$s, was bought %2$.8f without fee, want to sell %3$.8f, amount %4$.8f ==]",
pair.base.getDisplayName(), buyTotal.toDouble(), sellTotal.toDouble(), tradingRecord.getLastEntry().getAmount().toDouble()));
if(sellTotal.isGreaterThan(buyTotal.plus(buyTotal.multipliedBy(Decimal.valueOf(0.01f))))) {
Decimal sellAmount = tradingRecord.getLastEntry().getAmount().isLessThanOrEqual(Decimal.valueOf(baseBalance.getAvailable().doubleValue()))
? tradingRecord.getLastEntry().getAmount() : Decimal.valueOf(baseBalance.getAvailable().doubleValue());
boolean exited = tradingRecord.exit(endIndex, newTick.getClosePrice(), sellAmount);
if (exited) {
Order exit = tradingRecord.getLastExit();
System.out.println(String.format("[== Exited with %4$s on %1$d (price=%2$.8f, amount=%3$.8f) ==]", exit.getIndex(),
exit.getPrice().toDouble(), exit.getAmount().toDouble(), pair.base.getDisplayName()));
LimitOrder order = new LimitOrder.Builder(ASK, pair).tradableAmount(
BigDecimal.valueOf(sellAmount.toDouble())).limitPrice(BigDecimal.valueOf(exit.getPrice().toDouble())).build();
String orderInfo = poloniexExchange.getTradeService().placeLimitOrder(order);
System.out.println(ansi().fgRed().a(String.format("[== Placed order SELL #%1$s, %3$s, price=%2$.8f ==]", orderInfo,
order.getLimitPrice().doubleValue(), pair.base.getDisplayName())).fgDefault());
}
}
} else {
System.out.println(String.format("[== Should exit with %1$s, but no open trades ==]", pair.base.getDisplayName()));
}
}
if(tradingRecord.getTradeCount() > 0) {
System.out.println(String.format("+ %2$s Trades: %1$d", tradingRecord.getTradeCount(), pair.base.getDisplayName()));
AnalysisCriterion profitTradesRatio = new AverageProfitableTradesCriterion();
System.out.println(String.format("+ %2$s Profitable trades ratio: %1$.8f",
profitTradesRatio.calculate(series, tradingRecord), pair.base.getDisplayName()));
AnalysisCriterion rewardRiskRatio = new RewardRiskRatioCriterion();
System.out.println(String.format("+ %2$s Reward-risk ratio: %1$.8f",
rewardRiskRatio.calculate(series, tradingRecord), pair.base.getDisplayName()));
AnalysisCriterion vsBuyAndHold = new VersusBuyAndHoldCriterion(new TotalProfitCriterion());
System.out.println(String.format("+ %2$s Our profit vs buy-and-hold profit: %1$.8f",
vsBuyAndHold.calculate(series, tradingRecord), pair.base.getDisplayName()));
System.out.println(String.format("== %1$s ================================", pair.base.getDisplayName()));
tradingRecord.getTrades().forEach(trade -> {
double buyPrice = trade.getEntry().getPrice().toDouble();
double sellPrice = trade.getExit().getPrice().toDouble();
System.out.println(ansi().fgDefault()
.a(sellPrice <= buyPrice ? ansi().fgRed() : ansi().fgGreen())
.a(String.format("BOUGHT: %1$.8f || SOLD: %2$.8f", buyPrice, sellPrice))
.fgDefault()
);
}
);
System.out.println("================================================================");
}
} catch (Exception e) {
e.printStackTrace();
}
};
this.scheduler.scheduleWithFixedDelay(runnable, 0, DELAY, TimeUnit.SECONDS);
}
ScheduledExecutorService scheduler;
BotStrategiesFactory(int threadsCount) {
this.scheduler = Executors.newScheduledThreadPool(threadsCount);
}
@Override
public void stop() {
this.scheduler.shutdown();
}
}
|
package me.nithanim.gw2api.v2.api.achievements;
@lombok.NoArgsConstructor
@lombok.AllArgsConstructor
@lombok.Getter
@lombok.EqualsAndHashCode
@lombok.ToString
public class DailyAchievement {
private int id = -1;
private int levelMin = -1;
private int levelMax = -1;
public static enum Type {
PVE, PVP, WVW, FRACTALS, SPECIAL;
}
}
|
package net.floodlightcontroller.statistics;
import com.google.common.primitives.UnsignedLong;
import com.google.common.util.concurrent.ListenableFuture;
import javafx.util.Pair;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.internal.IOFSwitchService;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.core.types.NodePortTuple;
import net.floodlightcontroller.debugcounter.IDebugCounter;
import net.floodlightcontroller.debugcounter.IDebugCounterService;
import net.floodlightcontroller.debugcounter.IDebugCounterService.MetaData;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.statistics.web.SwitchStatisticsWebRoutable;
import net.floodlightcontroller.threadpool.IThreadPoolService;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.match.Match;
import org.projectfloodlight.openflow.protocol.ver13.OFMeterSerializerVer13;
import org.projectfloodlight.openflow.types.DatapathId;
import org.projectfloodlight.openflow.types.OFGroup;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.types.TableId;
import org.projectfloodlight.openflow.types.U64;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.Thread.State;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class StatisticsCollector implements IFloodlightModule, IStatisticsService {
private static final Logger log = LoggerFactory.getLogger(StatisticsCollector.class);
private static IOFSwitchService switchService;
private static IThreadPoolService threadPoolService;
private static IRestApiService restApiService;
protected IDebugCounterService debugCounterService;
private IDebugCounter counterPacketOut;
private static boolean isEnabled = false;
private static int portStatsInterval = 10; /* could be set by REST API, so not final */
private static int flowStatsInterval = 11;
private static ScheduledFuture<?> portStatsCollector;
private static ScheduledFuture<?> flowStatsCollector;
private static ScheduledFuture<?> portDescCollector;
private static final long BITS_PER_BYTE = 8;
private static final long MILLIS_PER_SEC = 1000;
private static final String INTERVAL_PORT_STATS_STR = "collectionIntervalPortStatsSeconds";
private static final String ENABLED_STR = "enable";
private static final HashMap<NodePortTuple, SwitchPortBandwidth> portStats = new HashMap<NodePortTuple, SwitchPortBandwidth>();
private static final HashMap<NodePortTuple, SwitchPortBandwidth> tentativePortStats = new HashMap<NodePortTuple, SwitchPortBandwidth>();
private static final HashMap<Pair<Match,DatapathId>, FlowRuleStats> flowStats = new HashMap<Pair<Match,DatapathId>,FlowRuleStats>();
private static final HashMap<NodePortTuple, PortDesc> portDesc = new HashMap<NodePortTuple, PortDesc>();
/**
* Run periodically to collect all port statistics. This only collects
* bandwidth stats right now, but it could be expanded to record other
* information as well. The difference between the most recent and the
* current RX/TX bytes is used to determine the "elapsed" bytes. A
* timestamp is saved each time stats results are saved to compute the
* bits per second over the elapsed time. There isn't a better way to
* compute the precise bandwidth unless the switch were to include a
* timestamp in the stats reply message, which would be nice but isn't
* likely to happen. It would be even better if the switch recorded
* bandwidth and reported bandwidth directly.
*
* Stats are not reported unless at least two iterations have occurred
* for a single switch's reply. This must happen to compare the byte
* counts and to get an elapsed time.
*
* @author Ryan Izard, ryan.izard@bigswitch.com, rizard@g.clemson.edu
*
*/
protected class PortStatsCollector implements Runnable {
@Override
public void run() {
Map<DatapathId, List<OFStatsReply>> replies = getSwitchStatistics(switchService.getAllSwitchDpids(), OFStatsType.PORT);
for (Entry<DatapathId, List<OFStatsReply>> e : replies.entrySet()) {
for (OFStatsReply r : e.getValue()) {
OFPortStatsReply psr = (OFPortStatsReply) r;
for (OFPortStatsEntry pse : psr.getEntries()) {
NodePortTuple npt = new NodePortTuple(e.getKey(), pse.getPortNo());
SwitchPortBandwidth spb;
if (portStats.containsKey(npt) || tentativePortStats.containsKey(npt)) {
if (portStats.containsKey(npt)) { /* update */
spb = portStats.get(npt);
} else if (tentativePortStats.containsKey(npt)) { /* finish */
spb = tentativePortStats.get(npt);
tentativePortStats.remove(npt);
} else {
log.error("Inconsistent state between tentative and official port stats lists.");
return;
}
/* Get counted bytes over the elapsed period. Check for counter overflow. */
U64 rxBytesCounted;
U64 txBytesCounted;
if (spb.getPriorByteValueRx().compareTo(pse.getRxBytes()) > 0) { /* overflow */
U64 upper = U64.NO_MASK.subtract(spb.getPriorByteValueRx());
U64 lower = pse.getRxBytes();
rxBytesCounted = upper.add(lower);
} else {
rxBytesCounted = pse.getRxBytes().subtract(spb.getPriorByteValueRx());
}
if (spb.getPriorByteValueTx().compareTo(pse.getTxBytes()) > 0) { /* overflow */
U64 upper = U64.NO_MASK.subtract(spb.getPriorByteValueTx());
U64 lower = pse.getTxBytes();
txBytesCounted = upper.add(lower);
} else {
txBytesCounted = pse.getTxBytes().subtract(spb.getPriorByteValueTx());
}
long speed = getSpeed(npt);
double timeDifSec = ((System.nanoTime() - spb.getStartTime_ns()) * 1.0 / 1000000) / MILLIS_PER_SEC;
portStats.put(npt, SwitchPortBandwidth.of(npt.getNodeId(), npt.getPortId(),
U64.ofRaw(speed),
U64.ofRaw(Math.round((rxBytesCounted.getValue() * BITS_PER_BYTE) / timeDifSec)),
U64.ofRaw(Math.round((txBytesCounted.getValue() * BITS_PER_BYTE) / timeDifSec)),
pse.getRxBytes(), pse.getTxBytes())
);
} else { /* initialize */
tentativePortStats.put(npt, SwitchPortBandwidth.of(npt.getNodeId(), npt.getPortId(), U64.ZERO, U64.ZERO, U64.ZERO, pse.getRxBytes(), pse.getTxBytes()));
}
}
}
}
}
protected long getSpeed(NodePortTuple npt) {
IOFSwitch sw = switchService.getSwitch(npt.getNodeId());
long speed = 0;
if(sw == null) return speed; /* could have disconnected; we'll assume zero-speed then */
if(sw.getPort(npt.getPortId()) == null) return speed;
/* getCurrSpeed() should handle different OpenFlow Version */
OFVersion detectedVersion = sw.getOFFactory().getVersion();
switch(detectedVersion){
case OF_10:
log.debug("Port speed statistics not supported in OpenFlow 1.0");
break;
case OF_11:
case OF_12:
case OF_13:
speed = sw.getPort(npt.getPortId()).getCurrSpeed();
break;
case OF_14:
case OF_15:
for(OFPortDescProp p : sw.getPort(npt.getPortId()).getProperties()){
if( p.getType() == 0 ){ /* OpenFlow 1.4 and OpenFlow 1.5 will return zero */
speed = ((OFPortDescPropEthernet) p).getCurrSpeed();
}
}
break;
default:
break;
}
return speed;
}
}
/**
* Run periodically to collect all flow statistics from every switch.
*/
protected class FlowStatsCollector implements Runnable {
@Override
public void run() {
flowStats.clear(); // to clear expired flows
Map<DatapathId, List<OFStatsReply>> replies = getSwitchStatistics(switchService.getAllSwitchDpids(), OFStatsType.FLOW);
for (Entry<DatapathId, List<OFStatsReply>> e : replies.entrySet()) {
IOFSwitch sw = switchService.getSwitch(e.getKey());
for (OFStatsReply r : e.getValue()) {
OFFlowStatsReply psr = (OFFlowStatsReply) r;
for (OFFlowStatsEntry pse : psr.getEntries()) {
if(sw.getOFFactory().getVersion().compareTo(OFVersion.OF_15) == 0){
log.warn("Flow Stats not supported in OpenFlow 1.5.");
} else {
Pair<Match, DatapathId> pair = new Pair<Match,DatapathId>(pse.getMatch(),e.getKey());
flowStats.put(pair,FlowRuleStats.of(
e.getKey(),
pse.getByteCount(),
pse.getPacketCount(),
pse.getPriority(),
pse.getHardTimeout(),
pse.getIdleTimeout(),
pse.getDurationSec()));
}
}
}
}
}
}
/**
* Run periodically to collect port description from every switch and port, so it is possible to know its state and configuration.
* Used in Load balancer to determine if a port is enabled.
*/
private class PortDescCollector implements Runnable {
@Override
public void run() {
Map<DatapathId, List<OFStatsReply>> replies = getSwitchStatistics(switchService.getAllSwitchDpids(), OFStatsType.PORT_DESC);
for (Entry<DatapathId, List<OFStatsReply>> e : replies.entrySet()) {
for (OFStatsReply r : e.getValue()) {
OFPortDescStatsReply psr = (OFPortDescStatsReply) r;
for (OFPortDesc pse : psr.getEntries()) {
NodePortTuple npt = new NodePortTuple(e.getKey(), pse.getPortNo());
portDesc.put(npt,PortDesc.of(e.getKey(),
pse.getPortNo(),
pse.getName(),
pse.getState(),
pse.getConfig(),
pse.isEnabled()));
}
}
}
}
}
/**
* Single thread for collecting switch statistics and
* containing the reply.
*
* @author Ryan Izard, ryan.izard@bigswitch.com, rizard@g.clemson.edu
*
*/
private class GetStatisticsThread extends Thread {
private List<OFStatsReply> statsReply;
private DatapathId switchId;
private OFStatsType statType;
public GetStatisticsThread(DatapathId switchId, OFStatsType statType) {
this.switchId = switchId;
this.statType = statType;
this.statsReply = null;
}
public List<OFStatsReply> getStatisticsReply() {
return statsReply;
}
public DatapathId getSwitchId() {
return switchId;
}
@Override
public void run() {
statsReply = getSwitchStatistics(switchId, statType);
}
}
/*
* IFloodlightModule implementation
*/
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IStatisticsService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() {
Map<Class<? extends IFloodlightService>, IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>, IFloodlightService>();
m.put(IStatisticsService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IOFSwitchService.class);
l.add(IThreadPoolService.class);
l.add(IRestApiService.class);
l.add(IDebugCounterService.class);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
switchService = context.getServiceImpl(IOFSwitchService.class);
threadPoolService = context.getServiceImpl(IThreadPoolService.class);
restApiService = context.getServiceImpl(IRestApiService.class);
debugCounterService = context.getServiceImpl(IDebugCounterService.class);
Map<String, String> config = context.getConfigParams(this);
if (config.containsKey(ENABLED_STR)) {
try {
isEnabled = Boolean.parseBoolean(config.get(ENABLED_STR).trim());
} catch (Exception e) {
log.error("Could not parse '{}'. Using default of {}", ENABLED_STR, isEnabled);
}
}
log.info("Statistics collection {}", isEnabled ? "enabled" : "disabled");
if (config.containsKey(INTERVAL_PORT_STATS_STR)) {
try {
portStatsInterval = Integer.parseInt(config.get(INTERVAL_PORT_STATS_STR).trim());
} catch (Exception e) {
log.error("Could not parse '{}'. Using default of {}", INTERVAL_PORT_STATS_STR, portStatsInterval);
}
}
log.info("Port statistics collection interval set to {}s", portStatsInterval);
}
@Override
public void startUp(FloodlightModuleContext context)
throws FloodlightModuleException {
restApiService.addRestletRoutable(new SwitchStatisticsWebRoutable());
debugCounterService.registerModule("statistics");
if (isEnabled) {
startStatisticsCollection();
}
counterPacketOut = debugCounterService.registerCounter("statistics", "packet-outs-written", "Packet outs written by the StatisticsCollector", MetaData.WARN);
}
/*
* IStatisticsService implementation
*/
@Override
public String setPortStatsPeriod(int period) {
portStatsInterval = period;
return "{\"status\" : \"Port period changed to " + period + "\"}";
}
@Override
public String setFlowStatsPeriod(int period) {
flowStatsInterval = period;
return "{\"status\" : \"Flow period changed to " + period + "\"}";
}
@Override
public Map<NodePortTuple, PortDesc> getPortDesc() {
return Collections.unmodifiableMap(portDesc);
}
@Override
public PortDesc getPortDesc(DatapathId dpid, OFPort port) {
return portDesc.get(new NodePortTuple(dpid,port));
}
@Override
public Map<Pair<Match, DatapathId>, FlowRuleStats> getFlowStats(){
return Collections.unmodifiableMap(flowStats);
}
@Override
public Set<FlowRuleStats> getFlowStats(DatapathId dpid){
Set<FlowRuleStats> frs = new HashSet<FlowRuleStats>();
for(Pair<Match,DatapathId> pair: flowStats.keySet()){
if(pair.getValue().equals(dpid))
frs.add(flowStats.get(pair));
}
return frs;
}
@Override
public SwitchPortBandwidth getBandwidthConsumption(DatapathId dpid, OFPort p) {
return portStats.get(new NodePortTuple(dpid, p));
}
@Override
public Map<NodePortTuple, SwitchPortBandwidth> getBandwidthConsumption() {
return Collections.unmodifiableMap(portStats);
}
@Override
public synchronized void collectStatistics(boolean collect) {
if (collect && !isEnabled) {
startStatisticsCollection();
isEnabled = true;
} else if (!collect && isEnabled) {
stopStatisticsCollection();
isEnabled = false;
}
/* otherwise, state is not changing; no-op */
}
@Override
public boolean isStatisticsCollectionEnabled() {
return isEnabled;
}
/*
* Helper functions
*/
/**
* Start all stats threads.
*/
private void startStatisticsCollection() {
portStatsCollector = threadPoolService.getScheduledExecutor().scheduleAtFixedRate(new PortStatsCollector(), portStatsInterval, portStatsInterval, TimeUnit.SECONDS);
tentativePortStats.clear(); /* must clear out, otherwise might have huge BW result if present and wait a long time before re-enabling stats */
flowStatsCollector = threadPoolService.getScheduledExecutor().scheduleAtFixedRate(new FlowStatsCollector(), flowStatsInterval, flowStatsInterval, TimeUnit.SECONDS);
portDescCollector = threadPoolService.getScheduledExecutor().scheduleAtFixedRate(new PortDescCollector(), portStatsInterval, portStatsInterval, TimeUnit.SECONDS);
log.warn("Statistics collection thread(s) started");
}
/**
* Stop all stats threads.
*/
private void stopStatisticsCollection() {
if (!portStatsCollector.cancel(false) || !flowStatsCollector.cancel(false) || !portDescCollector.cancel(false)) {
log.error("Could not cancel port/flow stats threads");
} else {
log.warn("Statistics collection thread(s) stopped");
}
}
/**
* Retrieve the statistics from all switches in parallel.
* @param dpids
* @param statsType
* @return
*/
private Map<DatapathId, List<OFStatsReply>> getSwitchStatistics(Set<DatapathId> dpids, OFStatsType statsType) {
HashMap<DatapathId, List<OFStatsReply>> model = new HashMap<DatapathId, List<OFStatsReply>>();
List<GetStatisticsThread> activeThreads = new ArrayList<GetStatisticsThread>(dpids.size());
List<GetStatisticsThread> pendingRemovalThreads = new ArrayList<GetStatisticsThread>();
GetStatisticsThread t;
for (DatapathId d : dpids) {
t = new GetStatisticsThread(d, statsType);
activeThreads.add(t);
t.start();
}
/* Join all the threads after the timeout. Set a hard timeout
* of 12 seconds for the threads to finish. If the thread has not
* finished the switch has not replied yet and therefore we won't
* add the switch's stats to the reply.
*/
for (int iSleepCycles = 0; iSleepCycles < portStatsInterval; iSleepCycles++) {
for (GetStatisticsThread curThread : activeThreads) {
if (curThread.getState() == State.TERMINATED) {
model.put(curThread.getSwitchId(), curThread.getStatisticsReply());
pendingRemovalThreads.add(curThread);
}
}
/* remove the threads that have completed the queries to the switches */
for (GetStatisticsThread curThread : pendingRemovalThreads) {
activeThreads.remove(curThread);
}
/* clear the list so we don't try to double remove them */
pendingRemovalThreads.clear();
/* if we are done finish early */
if (activeThreads.isEmpty()) {
break;
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
log.error("Interrupted while waiting for statistics", e);
}
}
return model;
}
/**
* Get statistics from a switch.
* @param switchId
* @param statsType
* @return
*/
@SuppressWarnings("unchecked")
protected List<OFStatsReply> getSwitchStatistics(DatapathId switchId, OFStatsType statsType) {
IOFSwitch sw = switchService.getSwitch(switchId);
ListenableFuture<?> future;
List<OFStatsReply> values = null;
Match match;
if (sw != null) {
OFStatsRequest<?> req = null;
switch (statsType) {
case FLOW:
match = sw.getOFFactory().buildMatch().build();
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_11) >= 0) {
req = sw.getOFFactory().buildFlowStatsRequest()
.setMatch(match)
.setOutPort(OFPort.ANY)
.setOutGroup(OFGroup.ANY)
.setTableId(TableId.ALL)
.build();
} else{
req = sw.getOFFactory().buildFlowStatsRequest()
.setMatch(match)
.setOutPort(OFPort.ANY)
.setTableId(TableId.ALL)
.build();
}
break;
case AGGREGATE:
match = sw.getOFFactory().buildMatch().build();
req = sw.getOFFactory().buildAggregateStatsRequest()
.setMatch(match)
.setOutPort(OFPort.ANY)
.setTableId(TableId.ALL)
.build();
break;
case PORT:
req = sw.getOFFactory().buildPortStatsRequest()
.setPortNo(OFPort.ANY)
.build();
break;
case QUEUE:
req = sw.getOFFactory().buildQueueStatsRequest()
.setPortNo(OFPort.ANY)
.setQueueId(UnsignedLong.MAX_VALUE.longValue())
.build();
break;
case DESC:
req = sw.getOFFactory().buildDescStatsRequest()
.build();
break;
case GROUP:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildGroupStatsRequest()
.build();
}
break;
case METER:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildMeterStatsRequest()
.setMeterId(OFMeterSerializerVer13.ALL_VAL)
.build();
}
break;
case GROUP_DESC:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildGroupDescStatsRequest()
.build();
}
break;
case GROUP_FEATURES:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildGroupFeaturesStatsRequest()
.build();
}
break;
case METER_CONFIG:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildMeterConfigStatsRequest()
.build();
}
break;
case METER_FEATURES:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildMeterFeaturesStatsRequest()
.build();
}
break;
case TABLE:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildTableStatsRequest()
.build();
}
break;
case TABLE_FEATURES:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_10) > 0) {
req = sw.getOFFactory().buildTableFeaturesStatsRequest()
.build();
}
break;
case PORT_DESC:
if (sw.getOFFactory().getVersion().compareTo(OFVersion.OF_13) >= 0) {
req = sw.getOFFactory().buildPortDescStatsRequest()
.build();
}
break;
case EXPERIMENTER:
default:
log.error("Stats Request Type {} not implemented yet", statsType.name());
break;
}
try {
if (req != null) {
future = sw.writeStatsRequest(req);
values = (List<OFStatsReply>) future.get(portStatsInterval*1000 / 2, TimeUnit.MILLISECONDS);
}
} catch (Exception e) {
log.error("Failure retrieving statistics from switch {}. {}", sw, e);
}
}
return values;
}
}
|
package net.glowstone.net.message.play.game;
import com.flowpowered.networking.Message;
import lombok.Data;
import net.glowstone.entity.meta.profile.PlayerProfile;
import net.glowstone.util.TextMessage;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
@Data
public final class UserListItemMessage implements Message {
private final Action action;
private final List<Entry> entries;
public UserListItemMessage(Action action, List<Entry> entries) {
this.action = action;
this.entries = entries;
for (Entry entry : entries) {
if (entry.action != action) {
throw new IllegalArgumentException("Entries must be " + action + ", not " + entry.action);
}
}
}
public UserListItemMessage(Action action, Entry entry) {
this(action, Arrays.asList(entry));
}
// add
public static Entry add(PlayerProfile profile) {
return add(profile, 0, 0, null);
}
public static Entry add(PlayerProfile profile, int gameMode, int ping, TextMessage displayName) {
return new Entry(profile.getUniqueId(), profile, gameMode, ping, displayName, Action.ADD_PLAYER);
}
public static UserListItemMessage addOne(PlayerProfile profile) {
return new UserListItemMessage(Action.ADD_PLAYER, add(profile));
}
// gamemode
public static Entry gameMode(UUID uuid, int gameMode) {
return new Entry(uuid, null, gameMode, 0, null, Action.UPDATE_GAMEMODE);
}
public static UserListItemMessage gameModeOne(UUID uuid, int gameMode) {
return new UserListItemMessage(Action.UPDATE_GAMEMODE, gameMode(uuid, gameMode));
}
// latency
public static Entry latency(UUID uuid, int ping) {
return new Entry(uuid, null, 0, ping, null, Action.UPDATE_LATENCY);
}
public static UserListItemMessage latencyOne(UUID uuid, int ping) {
return new UserListItemMessage(Action.UPDATE_LATENCY, latency(uuid, ping));
}
// display name
public static Entry displayName(UUID uuid, TextMessage displayName) {
return new Entry(uuid, null, 0, 0, displayName, Action.UPDATE_DISPLAY_NAME);
}
public static UserListItemMessage displayNameOne(UUID uuid, TextMessage displayName) {
return new UserListItemMessage(Action.UPDATE_DISPLAY_NAME, displayName(uuid, displayName));
}
// remove
public static Entry remove(UUID uuid) {
return new Entry(uuid, null, 0, 0, null, Action.REMOVE_PLAYER);
}
public static UserListItemMessage removeOne(UUID uuid) {
return new UserListItemMessage(Action.REMOVE_PLAYER, remove(uuid));
}
// inner classes
public enum Action {
ADD_PLAYER,
UPDATE_GAMEMODE,
UPDATE_LATENCY,
UPDATE_DISPLAY_NAME,
REMOVE_PLAYER
}
@Data
public static final class Entry {
public final UUID uuid;
public final PlayerProfile profile;
public final int gameMode;
public final int ping;
public final TextMessage displayName;
private final Action action;
}
}
|
package net.nunnerycode.bukkit.mythicdrops.managers;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import net.nunnerycode.bukkit.mythicdrops.MythicDrops;
import net.nunnerycode.bukkit.mythicdrops.api.items.CustomItem;
import net.nunnerycode.bukkit.mythicdrops.api.items.ItemGenerationReason;
import net.nunnerycode.bukkit.mythicdrops.api.items.MythicEnchantment;
import net.nunnerycode.bukkit.mythicdrops.api.items.MythicItemStack;
import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier;
import net.nunnerycode.bukkit.mythicdrops.events.CustomItemGenerationEvent;
import net.nunnerycode.bukkit.mythicdrops.events.PreCustomItemGenerationEvent;
import net.nunnerycode.bukkit.mythicdrops.events.PreRandomItemGenerationEvent;
import net.nunnerycode.bukkit.mythicdrops.events.RandomItemGenerationEvent;
import net.nunnerycode.bukkit.mythicdrops.tiers.DefaultTier;
import net.nunnerycode.bukkit.mythicdrops.utils.ItemStackUtils;
import net.nunnerycode.bukkit.mythicdrops.utils.RandomRangeUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Color;
import org.bukkit.Material;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.enchantments.EnchantmentWrapper;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.LeatherArmorMeta;
import org.bukkit.material.MaterialData;
public class DropManager {
private final MythicDrops plugin;
public DropManager(final MythicDrops plugin) {
this.plugin = plugin;
}
public ItemStack generateItemStackFromCustomItem(CustomItem customItem, ItemGenerationReason reason) {
CustomItem ci = customItem;
PreCustomItemGenerationEvent preEvent = new PreCustomItemGenerationEvent(reason, ci);
Bukkit.getPluginManager().callEvent(preEvent);
if (preEvent.isCancelled()) {
return null;
}
ci = preEvent.getCustomItem();
ItemStack is = ci.toItemStack();
CustomItemGenerationEvent event = new CustomItemGenerationEvent(reason, is);
Bukkit.getPluginManager().callEvent(event);
return is;
}
public ItemStack generateItemStack(ItemGenerationReason reason) {
Tier t = getPlugin().getTierManager().getRandomTierWithChance();
int attempts = 0;
while (t == null && attempts < 10) {
t = getPlugin().getTierManager().getRandomTierWithChance();
attempts++;
}
if (t == null) {
return null;
}
try {
return constructItemStackFromTier(t, reason);
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
public ItemStack constructItemStackFromTier(Tier tier, ItemGenerationReason reason) throws
IllegalArgumentException, NullPointerException {
if (tier == null) {
throw new IllegalArgumentException("Tier is null");
}
Set<MaterialData> materialDataSet = getPlugin().getItemManager().getMaterialDataSetForTier(tier);
if (materialDataSet.isEmpty()) {
throw new NullPointerException("Tier " + tier.getTierName() + " has no MaterialData to choose from");
}
MaterialData materialData = materialDataSet.toArray(new MaterialData[materialDataSet.size()])
[RandomUtils.nextInt(materialDataSet.size())];
if (materialData == null) {
throw new NullPointerException("Randomly chosen MaterialData is null");
}
if (materialData.getItemTypeId() == 0
|| materialData.getItemType() == Material.AIR) {
throw new IllegalArgumentException("MaterialData cannot be null or AIR");
}
try {
return constructItemStackFromTierAndMaterialData(tier, materialData, reason);
} catch (IllegalArgumentException e) {
e.printStackTrace();
throw new NullPointerException("Generated ItemStack is null");
}
}
public ItemStack constructItemStackFromTierAndMaterialData(Tier tier, MaterialData materialData,
ItemGenerationReason reason) throws IllegalArgumentException {
MythicItemStack is;
Tier t = tier;
MaterialData md = materialData;
if (t == null) {
throw new IllegalArgumentException("Tier is null");
}
if (md == null) {
throw new IllegalArgumentException("MaterialData is null");
}
if (t.equals(DefaultTier.CUSTOM_ITEM)) {
throw new IllegalArgumentException("Tier cannot be CUSTOM_ITEM when using this method");
}
if (md.getItemTypeId() == 0
|| md.getItemType() == Material.AIR) {
throw new IllegalArgumentException("MaterialData cannot be AIR");
}
PreRandomItemGenerationEvent preEvent = new PreRandomItemGenerationEvent(reason, t, md);
Bukkit.getPluginManager().callEvent(preEvent);
if (preEvent.isCancelled()) {
return null;
}
md = preEvent.getMaterialData();
t = preEvent.getTier();
is = new MythicItemStack(md);
if (reason != null && reason != ItemGenerationReason.COMMAND) {
is.setDurability(ItemStackUtils.getAcceptableDurability(md.getItemType(),
ItemStackUtils.getDurabilityForMaterial(md.getItemType(), t.getMinimumDurabilityPercentage(),
t.getMaximumDurabilityPercentage())));
}
addBaseEnchantments(is, t);
addBonusEnchantments(is, t);
ItemMeta im = is.getItemMeta();
im.setDisplayName(getPlugin().getNameManager().randomFormattedName(
is, t));
generateLore(tier, is, t, md, im);
if (im instanceof LeatherArmorMeta) {
((LeatherArmorMeta) im).setColor(Color.fromRGB(RandomUtils.nextInt(256), RandomUtils.nextInt(256),
RandomUtils.nextInt(256)));
}
is.setItemMeta(im);
RandomItemGenerationEvent event = new RandomItemGenerationEvent(reason, t, is);
Bukkit.getPluginManager().callEvent(event);
return is;
}
private void addBaseEnchantments(MythicItemStack is, Tier t) {
for (MythicEnchantment me : t.getBaseEnchantments()) {
if (me.getEnchantment() == null) {
continue;
}
if (t.isSafeBaseEnchantments() && me.getEnchantment().canEnchantItem(is)) {
EnchantmentWrapper enchantmentWrapper = new EnchantmentWrapper(me.getEnchantment().getId());
int minimumLevel = Math.max(me.getMinimumLevel(), enchantmentWrapper.getStartLevel());
int maximumLevel = Math.min(me.getMaximumLevel(), enchantmentWrapper.getMaxLevel());
if (t.isAllowHighBaseEnchantments()) {
is.addEnchantment(me.getEnchantment(), (int) RandomRangeUtils.randomRangeLongInclusive(minimumLevel,
maximumLevel));
} else {
is.addEnchantment(me.getEnchantment(), getAcceptableEnchantmentLevel(me.getEnchantment(),
(int) RandomRangeUtils.randomRangeLongInclusive(minimumLevel, maximumLevel)));
}
} else if (!t.isSafeBaseEnchantments()) {
is.addUnsafeEnchantment(me.getEnchantment(),
(int) RandomRangeUtils.randomRangeLongInclusive(me.getMinimumLevel(), me.getMaximumLevel()));
}
}
}
private int getAcceptableEnchantmentLevel(Enchantment ench, int level) {
EnchantmentWrapper ew = new EnchantmentWrapper(ench.getId());
return Math.max(Math.min(level, ew.getMaxLevel()), ew.getStartLevel());
}
private void addBonusEnchantments(MythicItemStack is, Tier t) {
if (t.getMaximumAmountOfBonusEnchantments() > 0) {
int randEnchs = (int) RandomRangeUtils
.randomRangeLongInclusive(t.getMinimumAmountOfBonusEnchantments(),
t.getMaximumAmountOfBonusEnchantments());
Set<MythicEnchantment> allowEnchs = t.getBonusEnchantments();
List<Enchantment> stackEnchs = new ArrayList<Enchantment>();
for (Enchantment e : Enchantment.values()) {
if (t.isSafeBonusEnchantments() && e.canEnchantItem(is)) {
stackEnchs.add(e);
}
}
List<MythicEnchantment> actual = new ArrayList<MythicEnchantment>();
for (MythicEnchantment te : allowEnchs) {
if (te.getEnchantment() == null) {
continue;
}
if (stackEnchs.contains(te.getEnchantment())) {
actual.add(te);
}
}
for (int i = 0; i < randEnchs; i++) {
if (actual.size() > 0) {
MythicEnchantment ench = actual.get((int) RandomRangeUtils.randomRangeLongExclusive(0, actual.size()));
int lev = (int) RandomRangeUtils
.randomRangeLongInclusive(ench.getMinimumLevel(), ench.getMaximumLevel());
if (t.isSafeBonusEnchantments()) {
if (!t.isSafeBonusEnchantments()) {
is.addEnchantment(
ench.getEnchantment(),
getAcceptableEnchantmentLevel(ench.getEnchantment(),
lev <= 0 ? 1 : Math.abs(lev)));
} else {
is.addUnsafeEnchantment(ench.getEnchantment(), lev <= 0 ? 1 : Math.abs(lev));
}
} else {
is.addUnsafeEnchantment(ench.getEnchantment(), lev <= 0 ? 1 : Math.abs(lev));
}
}
}
}
}
private void generateLore(Tier tier, MythicItemStack is, Tier t, MaterialData md, ItemMeta im) {
List<String> toolTips = (!tier.getTierLore().isEmpty()) ? tier.getTierLore() : getPlugin().getSettingsManager
().getLoreFormat();
List<String> tt = new ArrayList<String>();
String itemType = getPlugin().getNameManager().getItemTypeName(md);
String tName = t.getTierDisplayName();
String baseMaterial = getPlugin().getNameManager().getMinecraftMaterialName(is.getType());
String mythicMaterial = getPlugin().getNameManager().getMythicMaterialName(is.getData());
String enchantmentString = getPlugin().getNameManager().getEnchantmentTypeName(is);
for (String s : toolTips) {
String s1 = s;
if (s1.contains("%itemtype%")) {
s1 = s1.replace("%itemtype%", itemType);
}
if (s1.contains("%basematerial%")) {
s1 = s1.replace("%basematerial%", baseMaterial);
}
if (s1.contains("%tiername%")) {
s1 = s1.replace("%tiername%", tName);
}
if (s1.contains("%mythicmaterial%")) {
s1 = s1.replace("%mythicmaterial%", mythicMaterial);
}
if (s1.contains("%enchantment%")) {
s1 = s1.replace("%enchantment%", enchantmentString);
}
tt.add(ChatColor.translateAlternateColorCodes('&', s1));
}
if (getPlugin().getSettingsManager().isRandomLoreEnabled() &&
RandomRangeUtils.randomRangeDoubleExclusive(0.0, 1.0) <= getPlugin().getSettingsManager()
.getRandomLoreChance()) {
tt.addAll(getPlugin().getNameManager().randomLore(md.getItemType(), t,
ItemStackUtils.getHighestEnchantment(is)));
}
im.setLore(tt);
}
public MythicDrops getPlugin() {
return plugin;
}
public ItemStack constructItemStackFromMaterialData(MaterialData matData, ItemGenerationReason reason) throws IllegalArgumentException, NullPointerException {
Tier tier;
tier = getPlugin().getTierManager().getRandomTierFromSetWithChance(
new HashSet<Tier>(getPlugin().getItemManager().getTiersForMaterialData(matData)));
if (tier == null) {
throw new NullPointerException("Randomly chosen Tier is null");
}
if (matData == null) {
throw new IllegalArgumentException("MaterialData cannot be null");
}
if (matData.getItemTypeId() == 0
|| matData.getItemType() == Material.AIR) {
throw new IllegalArgumentException("MaterialData cannot be AIR");
}
try {
return constructItemStackFromTierAndMaterialData(tier, matData, reason);
} catch (IllegalArgumentException e) {
e.printStackTrace();
throw new NullPointerException("Generated ItemStack is null");
}
}
}
|
package org.suren.autotest.web.framework.data;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Component;
import org.suren.autotest.web.framework.util.CommonNumberUtil;
import org.suren.autotest.web.framework.util.IDCardUtil;
import org.suren.autotest.web.framework.util.RandomUtils;
import org.suren.autotest.web.framework.util.StringUtils;
/**
*
* @author suren
* @date 201714 12:33:01
*/
@Component
public class SimpleDynamicData implements DynamicData
{
private List<String> formatList = new ArrayList<String>();
private String random = "${random-";
public SimpleDynamicData()
{
formatList.add("yyyy-MM-dd");
formatList.add("yyyy-MM-dd HH:mm:ss");
}
@Override
public String getValue(final String orginData)
{
String value = orginData;
value = value.replace("${now}", String.valueOf(System.currentTimeMillis()));
for(String format : formatList)
{
String param = "${now " + format + "}";
if(value.contains(param))
{
value = value.replace(param, new SimpleDateFormat(format).format(new Date()));
}
}
if(value.contains("${id_card}"))
{
value = value.replace("${id_card}", IDCardUtil.generate());
}
if(value.contains("${email}"))
{
value = value.replace("${email}", StringUtils.email());
}
if(value.contains("${phone}"))
{
value = value.replace("${phone}", CommonNumberUtil.phoneNumber());
}
if(value.contains("${postcode}"))
{
value = value.replace("${postcode}", CommonNumberUtil.postCode());
}
if(value.contains(random))
{
value = parseRandomParam(value);
}
return value;
}
private String parseRandomParam(String randomParam)
{
if(randomParam.contains(random))
{
int index = -1;
while((index = randomParam.indexOf(random)) != -1)
{
int numIndex = index + random.length();
int numEndIndex = randomParam.indexOf("}", numIndex);
int num = Integer.parseInt(randomParam.substring(numIndex, numEndIndex));
num = RandomUtils.nextInt(num);
randomParam = randomParam.substring(0, index) + num + randomParam.substring(numEndIndex + 1);
}
}
return randomParam;
}
@Override
public String getType()
{
return "simple";
}
@Override
public void setData(Map<String, Object> data)
{
}
}
|
package org.zalando.nakadi.repository.zookeeper;
import org.apache.curator.RetryPolicy;
import org.apache.curator.ensemble.EnsembleProvider;
import org.apache.curator.ensemble.exhibitor.DefaultExhibitorRestClient;
import org.apache.curator.ensemble.exhibitor.ExhibitorRestClient;
import org.apache.curator.ensemble.exhibitor.Exhibitors;
import org.apache.curator.ensemble.fixed.FixedEnsembleProvider;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.ExponentialBackoffRetry;
import java.util.Arrays;
import java.util.Collection;
public class ZooKeeperHolder {
private static final int EXHIBITOR_RETRY_TIME = 1000;
private static final int EXHIBITOR_RETRY_MAX = 3;
private static final int EXHIBITOR_POLLING_MS = 300000;
private final String zookeeperBrokers;
private final String zookeeperKafkaNamespace;
private final String exhibitorAddresses;
private final Integer exhibitorPort;
private CuratorFramework zooKeeper;
public ZooKeeperHolder(final String zookeeperBrokers,
final String zookeeperKafkaNamespace,
final String exhibitorAddresses,
final Integer exhibitorPort) throws Exception {
this.zookeeperBrokers = zookeeperBrokers;
this.zookeeperKafkaNamespace = zookeeperKafkaNamespace;
this.exhibitorAddresses = exhibitorAddresses;
this.exhibitorPort = exhibitorPort;
initExhibitor();
}
private void initExhibitor() throws Exception {
final RetryPolicy retryPolicy = new ExponentialBackoffRetry(EXHIBITOR_RETRY_TIME, EXHIBITOR_RETRY_MAX);
final EnsembleProvider ensembleProvider;
if (exhibitorAddresses != null) {
final Collection<String> exhibitorHosts = Arrays.asList(exhibitorAddresses.split("\\s*,\\s*"));
final Exhibitors exhibitors = new Exhibitors(exhibitorHosts, exhibitorPort,
() -> zookeeperBrokers + zookeeperKafkaNamespace);
final ExhibitorRestClient exhibitorRestClient = new DefaultExhibitorRestClient();
ensembleProvider = new ExhibitorEnsembleProvider(exhibitors,
exhibitorRestClient, "/exhibitor/v1/cluster/list", EXHIBITOR_POLLING_MS, retryPolicy);
((ExhibitorEnsembleProvider) ensembleProvider).pollForInitialEnsemble();
} else {
ensembleProvider = new FixedEnsembleProvider(zookeeperBrokers + zookeeperKafkaNamespace);
}
zooKeeper = CuratorFrameworkFactory.builder()
.ensembleProvider(ensembleProvider)
.retryPolicy(retryPolicy)
.build();
zooKeeper.start();
}
public CuratorFramework get() {
return zooKeeper;
}
private class ExhibitorEnsembleProvider extends org.apache.curator.ensemble.exhibitor.ExhibitorEnsembleProvider {
public ExhibitorEnsembleProvider(final Exhibitors exhibitors, final ExhibitorRestClient restClient,
final String restUriPath, final int pollingMs, final RetryPolicy retryPolicy) {
super(exhibitors, restClient, restUriPath, pollingMs, retryPolicy);
}
@Override
public String getConnectionString() {
return super.getConnectionString() + zookeeperKafkaNamespace;
}
}
}
|
package uk.ac.ebi.subs.api.validators;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Component;
import org.springframework.validation.Errors;
import org.springframework.validation.Validator;
import uk.ac.ebi.subs.api.services.SubmissionStatusService;
import uk.ac.ebi.subs.api.services.ValidationResultService;
import uk.ac.ebi.subs.data.status.StatusDescription;
import uk.ac.ebi.subs.data.status.SubmissionStatusEnum;
import uk.ac.ebi.subs.repository.model.Submission;
import uk.ac.ebi.subs.repository.model.SubmissionStatus;
import uk.ac.ebi.subs.repository.repos.SubmissionRepository;
import uk.ac.ebi.subs.repository.repos.status.SubmissionStatusRepository;
import java.util.Map;
@Component
@RequiredArgsConstructor
public class SubmissionStatusValidator implements Validator {
@NonNull
private Map<String, StatusDescription> submissionStatusDescriptionMap;
@NonNull
private SubmissionStatusRepository submissionStatusRepository;
@NonNull
private ValidationResultService validationResultService;
@NonNull
private SubmissionRepository submissionRepository;
@NonNull
private SubmissionStatusService submissionStatusService;
@Override
public boolean supports(Class<?> clazz) {
return SubmissionStatus.class.equals(clazz);
}
@Override
public void validate(Object target, Errors errors) {
/* unchecked */
SubmissionStatus submissionStatus = (SubmissionStatus) target;
SubsApiErrors.rejectIfEmptyOrWhitespace(errors,"status");
if (errors.hasErrors()) return;
String targetStatusName = submissionStatus.getStatus();
if (!submissionStatusService.isSubmissionStatusChangeable(submissionStatus)){
SubsApiErrors.resource_locked.addError(errors,"status");
return;
}
Submission submission = submissionRepository.findBySubmissionStatusId(submissionStatus.getId());
if (!submissionStatusService.getAvailableStatusNames(submission, submissionStatusDescriptionMap)
.contains(targetStatusName)) {
SubsApiErrors.invalid.addError(errors,"status");
return;
}
}
}
|
package yahoofinance.quotes.query1v7;
import com.fasterxml.jackson.databind.JsonNode;
import yahoofinance.Stock;
import yahoofinance.Utils;
import yahoofinance.quotes.stock.StockDividend;
import yahoofinance.quotes.stock.StockQuote;
import yahoofinance.quotes.stock.StockStats;
import java.math.BigDecimal;
import java.util.TimeZone;
/**
*
* @author Stijn Strickx
*/
public class StockQuotesQuery1V7Request extends QuotesRequest<Stock> {
private static final BigDecimal ONE_HUNDRED = new BigDecimal(100);
public StockQuotesQuery1V7Request(String symbols) {
super(symbols);
}
@Override
protected Stock parseJson(JsonNode node) {
String symbol = node.get("symbol").asText();
Stock stock = new Stock(symbol);
if(node.has("longName")) {
stock.setName(node.get("longName").asText());
} else {
stock.setName(getStringValue(node, "shortName"));
}
stock.setCurrency(getStringValue(node, "currency"));
stock.setStockExchange(getStringValue(node, "fullExchangeName"));
stock.setQuote(this.getQuote(node));
stock.setStats(this.getStats(node));
stock.setDividend(this.getDividend(node));
return stock;
}
private String getStringValue(JsonNode node, String field) {
if(node.has(field)) {
return node.get(field).asText();
}
return null;
}
private StockQuote getQuote(JsonNode node) {
String symbol = node.get("symbol").asText();
StockQuote quote = new StockQuote(symbol);
quote.setPrice(Utils.getBigDecimal(getStringValue(node,"regularMarketPrice")));
// quote.setLastTradeSize(null);
quote.setAsk(Utils.getBigDecimal(getStringValue(node,"ask")));
quote.setAskSize(Utils.getLong(getStringValue(node,"askSize")));
quote.setBid(Utils.getBigDecimal(getStringValue(node,"bid")));
quote.setBidSize(Utils.getLong(getStringValue(node,"bidSize")));
quote.setOpen(Utils.getBigDecimal(getStringValue(node,"regularMarketOpen")));
quote.setPreviousClose(Utils.getBigDecimal(getStringValue(node,"regularMarketPreviousClose")));
quote.setDayHigh(Utils.getBigDecimal(getStringValue(node,"regularMarketDayHigh")));
quote.setDayLow(Utils.getBigDecimal(getStringValue(node,"regularMarketDayLow")));
quote.setTimeZone(TimeZone.getTimeZone(getStringValue(node,"exchangeTimezoneName")));
if(node.has("regularMarketTime")) {
quote.setLastTradeTime(Utils.unixToCalendar(node.get("regularMarketTime").asLong()));
}
quote.setYearHigh(Utils.getBigDecimal(getStringValue(node,"fiftyTwoWeekHigh")));
quote.setYearLow(Utils.getBigDecimal(getStringValue(node,"fiftyTwoWeekLow")));
quote.setPriceAvg50(Utils.getBigDecimal(getStringValue(node,"fiftyDayAverage")));
quote.setPriceAvg200(Utils.getBigDecimal(getStringValue(node,"twoHundredDayAverage")));
quote.setVolume(Utils.getLong(getStringValue(node,"regularMarketVolume")));
quote.setAvgVolume(Utils.getLong(getStringValue(node,"averageDailyVolume3Month")));
return quote;
}
private StockStats getStats(JsonNode node) {
String symbol = getStringValue(node,"symbol");
StockStats stats = new StockStats(symbol);
stats.setMarketCap(Utils.getBigDecimal(getStringValue(node,"marketCap")));
// stats.setSharesFloat(Utils.getLong(getStringValue(node,"sharesOutstanding")));
stats.setSharesOutstanding(Utils.getLong(getStringValue(node,"sharesOutstanding")));
// stats.setSharesOwned(Utils.getLong(getStringValue(node,"symbol")));
stats.setEps(Utils.getBigDecimal(getStringValue(node,"epsTrailingTwelveMonths")));
stats.setPe(Utils.getBigDecimal(getStringValue(node,"trailingPE")));
// stats.setPeg(Utils.getBigDecimal(getStringValue(node,"symbol")));
stats.setEpsEstimateCurrentYear(Utils.getBigDecimal(getStringValue(node,"epsForward")));
// stats.setEpsEstimateNextQuarter(Utils.getBigDecimal(getStringValue(node,"symbol")));
// stats.setEpsEstimateNextYear(Utils.getBigDecimal(getStringValue(node,"symbol")));
stats.setPriceBook(Utils.getBigDecimal(getStringValue(node,"priceToBook")));
// stats.setPriceSales(Utils.getBigDecimal(getStringValue(node,"symbol")));
stats.setBookValuePerShare(Utils.getBigDecimal(getStringValue(node,"bookValue")));
// stats.setOneYearTargetPrice(Utils.getBigDecimal(getStringValue(node,"symbol")));
// stats.setEBITDA(Utils.getBigDecimal(getStringValue(node,"symbol")));
// stats.setRevenue(Utils.getBigDecimal(getStringValue(node,"symbol")));
// stats.setShortRatio(Utils.getBigDecimal(getStringValue(node,"symbol")));
if(node.has("earningsTimestamp")) {
stats.setEarningsAnnouncement(Utils.unixToCalendar(node.get("earningsTimestamp").asLong()));
}
return stats;
}
private StockDividend getDividend(JsonNode node) {
String symbol = getStringValue(node,"symbol");
StockDividend dividend = new StockDividend(symbol);
if(!node.has("dividendDate")) {
return dividend;
}
long dividendTimestamp = node.get("dividendDate").asLong();
dividend.setPayDate(Utils.unixToCalendar(dividendTimestamp));
// dividend.setExDate(Utils.unixToCalendar(node.get("dividendDate").asLong()));
dividend.setAnnualYield(Utils.getBigDecimal(getStringValue(node,"trailingAnnualDividendRate")));
BigDecimal yield = Utils.getBigDecimal(getStringValue(node,"trailingAnnualDividendYield"));
if(yield != null) {
dividend.setAnnualYieldPercent(yield.multiply(ONE_HUNDRED));
}
return dividend;
}
}
|
package controllers;
import components.TokenExpiredResult;
import exception.*;
import models.AbstractMessage;
import models.Activity;
import models.User;
import models.UserActivityRelation;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import play.mvc.Controller;
import play.mvc.Result;
import utilities.Converter;
import utilities.Loggy;
import java.util.Map;
public class AdminController extends Controller {
public static final String TAG = AdminController.class.getName();
public static Result accept() {
try {
Map<String, String[]> formData = request().body().asFormUrlEncoded();
String token = formData.get(User.TOKEN)[0];
Long userId = DBCommander.queryUserId(token);
if (userId == null) throw new UserNotFoundException();
User user = DBCommander.queryUser(userId);
if (user == null) throw new UserNotFoundException();
if (!DBCommander.validateAdminAccess(user)) throw new AccessDeniedException();
Long activityId = Converter.toLong(formData.get(UserActivityRelation.ACTIVITY_ID)[0]);
if (activityId == null) throw new InvalidQueryParamsException();
Activity activity = DBCommander.queryActivity(activityId);
if (activity == null) throw new ActivityNotFoundException();
if(!DBCommander.acceptActivity(user, activity)) throw new NullPointerException();
return ok();
} catch (TokenExpiredException e) {
return ok(TokenExpiredResult.get());
} catch (Exception e) {
Loggy.e(TAG, "accept", e);
}
return badRequest("");
}
public static Result reject() {
try {
Map<String, String[]> formData = request().body().asFormUrlEncoded();
String token = formData.get(User.TOKEN)[0];
Long userId = DBCommander.queryUserId(token);
if (userId == null) throw new UserNotFoundException();
User user = DBCommander.queryUser(userId);
if (user == null) throw new UserNotFoundException();
if (!DBCommander.validateAdminAccess(user)) throw new AccessDeniedException();
Long activityId = Converter.toLong(formData.get(UserActivityRelation.ACTIVITY_ID)[0]);
if (activityId == null) throw new InvalidQueryParamsException();
Activity activity = DBCommander.queryActivity(activityId);
if (activity == null) throw new ActivityNotFoundException();
if(!DBCommander.rejectActivity(user, activity)) throw new NullPointerException();
return ok();
} catch (TokenExpiredException e) {
return ok(TokenExpiredResult.get());
} catch (Exception e) {
Loggy.e(TAG, "reject", e);
}
return badRequest();
}
public static Result delete() {
try {
Map<String, String[]> formData = request().body().asFormUrlEncoded();
String token = formData.get(User.TOKEN)[0];
Long userId = DBCommander.queryUserId(token);
if (userId == null) throw new UserNotFoundException();
User user = DBCommander.queryUser(userId);
if (user == null) throw new UserNotFoundException();
if (!DBCommander.validateAdminAccess(user)) throw new AccessDeniedException();
Long activityId = Converter.toLong(formData.get(UserActivityRelation.ACTIVITY_ID)[0]);
if (activityId == null) throw new InvalidQueryParamsException();
if(!ExtraCommander.deleteActivity(activityId)) throw new NullPointerException();
return ok();
} catch (TokenExpiredException e) {
return ok(TokenExpiredResult.get());
} catch (Exception e) {
Loggy.e(TAG, "delete", e);
}
return badRequest();
}
public static Result prioritize() {
try {
Map<String, String[]> formData = request().body().asFormUrlEncoded();
String token = formData.get(User.TOKEN)[0];
Long userId = DBCommander.queryUserId(token);
if (userId == null) throw new UserNotFoundException();
User user = DBCommander.queryUser(userId);
if (user == null) throw new UserNotFoundException();
if (!DBCommander.validateAdminAccess(user)) throw new AccessDeniedException();
JSONArray bundle= (JSONArray) JSONValue.parse(formData.get(AbstractMessage.BUNDLE)[0]);
for (Object obj : bundle) {
Activity activity = new Activity((JSONObject) obj);
/**
* TODO: update activity priority settings
* */
}
return ok();
} catch (TokenExpiredException e) {
return ok(TokenExpiredResult.get());
} catch (Exception e) {
Loggy.e(TAG, "prioritize", e);
}
return badRequest();
}
}
|
package org.jboss.as.test.integration.messaging.jms.external;
import static org.jboss.as.controller.client.helpers.ClientConstants.ADD;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
import static org.jboss.shrinkwrap.api.ShrinkWrap.create;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.net.URL;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.jboss.as.arquillian.api.ServerSetup;
import org.jboss.as.controller.client.helpers.Operations;
import org.jboss.as.test.integration.common.HttpRequest;
import org.jboss.as.test.integration.common.jms.JMSOperations;
import org.jboss.as.test.integration.common.jms.JMSOperationsProvider;
import org.jboss.as.test.shared.ServerReload;
import org.jboss.as.test.shared.SnapshotRestoreSetupTask;
import org.jboss.as.test.shared.TimeoutUtil;
import org.jboss.as.test.shared.util.AssumeTestGroupUtil;
import org.jboss.dmr.ModelNode;
import org.jboss.logging.Logger;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jgroups.util.StackType;
import org.jgroups.util.Util;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
@RunAsClient
@ServerSetup(DiscoveryGroupExternalMessagingDeploymentTestCase.SetupTask.class)
public class DiscoveryGroupExternalMessagingDeploymentTestCase {
public static final boolean SKIP = AccessController.doPrivileged((PrivilegedAction<Boolean>) () -> {
return Util.checkForWindows() && (Util.getIpStackType() == StackType.IPv6);
});
public static final String QUEUE_LOOKUP = "java:/jms/DependentMessagingDeploymentTestCase/myQueue";
public static final String TOPIC_LOOKUP = "java:/jms/DependentMessagingDeploymentTestCase/myTopic";
public static final String REMOTE_PCF = "remote-artemis";
private static final String QUEUE_NAME = "myQueue";
private static final String TOPIC_NAME = "myTopic";
private static final String DISCOVERY_GROUP_NAME = "dg1";
private static final String MULTICAST_SOCKET_BINDING = "messaging-group";
private static final String TESTSUITE_MCAST = System.getProperty("mcast", "230.0.0.4");
@ArquillianResource
private URL url;
static class SetupTask extends SnapshotRestoreSetupTask {
private static final Logger logger = Logger.getLogger(DiscoveryGroupExternalMessagingDeploymentTestCase.SetupTask.class);
@Override
public void doSetup(org.jboss.as.arquillian.container.ManagementClient managementClient, String s) throws Exception {
if(SKIP) {
logger.info("We are running on Windows with IPV6 stack");
logger.info("[WFCI-32] Disable on Windows+IPv6 until CI environment is fixed");
return;
}
JMSOperations ops = JMSOperationsProvider.getInstance(managementClient.getControllerClient());
ops.createJmsQueue(QUEUE_NAME, "/queue/" + QUEUE_NAME);
ops.createJmsTopic(TOPIC_NAME, "/topic/" + TOPIC_NAME);
execute(managementClient, addMulticastSocketBinding(MULTICAST_SOCKET_BINDING, TESTSUITE_MCAST, "${jboss.messaging.group.port:45700}"), true);
execute(managementClient, addClientDiscoveryGroup(DISCOVERY_GROUP_NAME, MULTICAST_SOCKET_BINDING), true);
ModelNode op = Operations.createRemoveOperation(getInitialPooledConnectionFactoryAddress());
execute(managementClient, op, true);
execute(managementClient, createBroadcastGroupWithSocketBinding(ops.getServerAddress(), "bg-group1", MULTICAST_SOCKET_BINDING, "http-connector"), true);
execute(managementClient, createDiscoveryGroupWithSocketBinding(ops.getServerAddress(), "dg-group1", MULTICAST_SOCKET_BINDING), true);
execute(managementClient, createClusterConnection(ops.getServerAddress(), "my-cluster", "jms", "http-connector", "dg-group1"), true);
op = Operations.createAddOperation(getPooledConnectionFactoryAddress());
op.get("transaction").set("xa");
op.get("entries").add("java:/JmsXA java:jboss/DefaultJMSConnectionFactory");
op.get("discovery-group").set(DISCOVERY_GROUP_NAME);
execute(managementClient, op, true);
op = Operations.createAddOperation(getClientTopicAddress());
op.get("entries").add(TOPIC_LOOKUP);
op.get("entries").add("/topic/myAwesomeClientTopic");
execute(managementClient, op, true);
op = Operations.createAddOperation(getClientQueueAddress());
op.get("entries").add(QUEUE_LOOKUP);
op.get("entries").add("/topic/myAwesomeClientQueue");
execute(managementClient, op, true);
ServerReload.executeReloadAndWaitForCompletion(managementClient.getControllerClient());
}
private ModelNode execute(final org.jboss.as.arquillian.container.ManagementClient managementClient, final ModelNode op, final boolean expectSuccess) throws IOException {
ModelNode response = managementClient.getControllerClient().execute(op);
final String outcome = response.get("outcome").asString();
if (expectSuccess) {
assertEquals(response.toString(), "success", outcome);
return response.get("result");
} else {
assertEquals("failed", outcome);
return response.get("failure-description");
}
}
ModelNode getPooledConnectionFactoryAddress() {
ModelNode address = new ModelNode();
address.add("subsystem", "messaging-activemq");
address.add("pooled-connection-factory", REMOTE_PCF);
return address;
}
ModelNode getClientTopicAddress() {
ModelNode address = new ModelNode();
address.add("subsystem", "messaging-activemq");
address.add("external-jms-topic", TOPIC_NAME);
return address;
}
ModelNode getClientQueueAddress() {
ModelNode address = new ModelNode();
address.add("subsystem", "messaging-activemq");
address.add("external-jms-queue", QUEUE_NAME);
return address;
}
ModelNode getInitialPooledConnectionFactoryAddress() {
ModelNode address = new ModelNode();
address.add("subsystem", "messaging-activemq");
address.add("server", "default");
address.add("pooled-connection-factory", "activemq-ra");
return address;
}
ModelNode addClientDiscoveryGroup(String name, String socketBinding) {
ModelNode address = new ModelNode();
address.add("subsystem", "messaging-activemq");
address.add("discovery-group", name);
ModelNode add = new ModelNode();
add.get(OP).set(ADD);
add.get(OP_ADDR).set(address);
add.get("socket-binding").set(socketBinding);
add.get("initial-wait-timeout").set(TimeoutUtil.adjust(30000));
return add;
}
ModelNode addMulticastSocketBinding(String bindingName, String multicastAddress, String multicastPort) {
ModelNode address = new ModelNode();
address.add("socket-binding-group", "standard-sockets");
address.add("socket-binding", bindingName);
ModelNode socketBindingOp = new ModelNode();
socketBindingOp.get(OP).set(ADD);
socketBindingOp.get(OP_ADDR).set(address);
socketBindingOp.get("multicast-address").set(multicastAddress);
socketBindingOp.get("multicast-port").set(multicastPort);
return socketBindingOp;
}
ModelNode createDiscoveryGroupWithSocketBinding(ModelNode serverAddress, String discoveryGroupName, String socketBinding) throws Exception {
ModelNode address = serverAddress.clone();
address.add("discovery-group", discoveryGroupName);
ModelNode op = Operations.createAddOperation(address);
op.get("socket-binding").set(socketBinding);
return op;
}
ModelNode createBroadcastGroupWithSocketBinding(ModelNode serverAddress, String broadcastGroupName, String socketBinding, String connector) throws Exception {
ModelNode address = serverAddress.clone();
address.add("broadcast-group", broadcastGroupName);
ModelNode op = Operations.createAddOperation(address);
op.get("socket-binding").set(socketBinding);
op.get("connectors").add(connector);
return op;
}
ModelNode createClusterConnection(ModelNode serverAddress, String name, String address, String connector, String discoveryGroup) throws Exception {
ModelNode opAddress = serverAddress.clone();
opAddress.add("cluster-connection", name);
ModelNode op = Operations.createAddOperation(opAddress);
op.get("cluster-connection-address").set(address);
op.get("connector-name").set(connector);
op.get("discovery-group").set(discoveryGroup);
return op;
}
}
@Deployment
public static WebArchive createArchive() {
if(SKIP) {
return create(WebArchive.class, "ClientMessagingDeploymentTestCase.war")
.addAsWebResource(new StringAsset("Root file"), "root-file.txt");
}
return create(WebArchive.class, "ClientMessagingDeploymentTestCase.war")
.addClass(MessagingServlet.class)
.addClasses(QueueMDB.class, TopicMDB.class)
.addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml");
}
@BeforeClass
public static void skipSecurityManager() {
AssumeTestGroupUtil.assumeSecurityManagerDisabled();
}
@Before
public void before() {
Assume.assumeFalse("[WFCI-32] Disable on Windows+IPv6 until CI environment is fixed", SKIP);
}
@Test
public void testSendMessageInClientQueue() throws Exception {
sendAndReceiveMessage(true);
}
@Test
public void testSendMessageInClientTopic() throws Exception {
sendAndReceiveMessage(false);
}
private void sendAndReceiveMessage(boolean sendToQueue) throws Exception {
String destination = sendToQueue ? "queue" : "topic";
String text = UUID.randomUUID().toString();
URL url = new URL(this.url.toExternalForm() + "ClientMessagingDeploymentTestCase?destination=" + destination + "&text=" + text);
String reply = HttpRequest.get(url.toExternalForm(), TimeoutUtil.adjust(10), TimeUnit.SECONDS);
assertNotNull(reply);
assertEquals(text, reply);
}
}
|
package de.akquinet.engineering.vaadinator.example.address.ui.std.presenter;
import java.util.Map;
import de.akquinet.engineering.vaadinator.example.address.service.AddressService;
import de.akquinet.engineering.vaadinator.example.address.ui.presenter.SubviewCapablePresenter;
import de.akquinet.engineering.vaadinator.example.address.ui.std.view.AddressListView;
public class AddressListPresenterImplEx extends AddressListPresenterImpl {
public AddressListPresenterImplEx(Map<String, Object> context, AddressListView view,
PresenterFactory presenterFactory, AddressService service,
SubviewCapablePresenter subviewCapablePresenter) {
super(context, view, presenterFactory, service, subviewCapablePresenter);
}
@Override
protected void loadFromModel() {
getView().setOrRefreshData(null);
}
@Override
public void startPresenting() {
getView().setObserver(this);
getView().initializeUi();
}
}
|
package web;
import java.io.IOException;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.table.DefaultTableModel;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.select.Elements;
public class Window extends javax.swing.JFrame {
private static boolean escanear = true;
private static final float PORCENTAJE = 0.05f;
private static final long ESPERA = 3000; //ms de esperas entre escaneos en el programa.
private static DefaultTableModel model = new DefaultTableModel(); //Model donde cargare los datos e implementare.
private static Document document;
private static Elements tabla, headersWeb, datos;
private static String[] modelHeaders;
private static String[][] modelData, oldModelData;
/**
* Creates new form in
*/
public Window() {
initComponents();
this.setTitle("Informacion Relativa al Mercado.");
this.setResizable(false);
this.setLocationRelativeTo(null);
}
/**
* Limpieza de los headers para obtener solo texto y no codigo.
* @param element Element actual a tratar.
* @return String con el nombre de la cabecera limpio.
*/
private static String limpiezaHeaders(Element element) {
String header = element.toString();
header = header.substring(header.indexOf('>')+1);
header = header.substring(0, header.indexOf('<'));
return header;
}
/**
* Conversion de Elements a String[] que se puede usar directamente como headers en el JTable.mode() al instanciarlo.
* @param elements Elementos a tratar.
* @return String[] a utilizar directamente como header para table.model().
*/
private static String[] getHeaders(Elements elements) {
ArrayList<String> headersTmp = new ArrayList<>();
String[] headers;
for (int i = 0; i < elements.size()-1; i++) { //-1 para quitar lo de 'informacion relacionada'.
headersTmp.add(limpiezaHeaders(elements.get(i)));
}
headers = new String[headersTmp.size()];
for(int i = 0; i < headersTmp.size(); i++) {
headers[i] = headersTmp.get(i);
}
return headers;
}
/**
* Comprueba si queda alguna tag en al string que eliminar.
* @param string String a chequear.
* @return True si aun quedan tags por eliminar.
*/
private static boolean tagsLeft(String string) {
return string.contains("</") || string.contains(">");
}
/**
* Le quitamos una tag a la String pasada.
* @param string String sobre la que operar.
* @return String con una tag menos.
*/
private static String quitarTag(String string) {
try {
string = string.substring(string.indexOf('>')+1, string.lastIndexOf('<'));
}catch(StringIndexOutOfBoundsException ex) {
string = string.substring(string.indexOf('>')+1);
}
return string;
}
/**
* Conversion de un Nodo para obtener la String que nos interesa.
* @param node Nodo del cual extraemos la informacion.
* @return String con la informacion limpia.
*/
private static String convertNode(Node node) {
String nodo = node.toString();
while(tagsLeft(nodo)) {
nodo = quitarTag(nodo);
}
return nodo;
}
/**
* Quita una etiqueta HTML de la String que se le pasa.
* @param string String a la cual se quiere quitar una etiqueta.
* @return String sin esa etiqueta.
*/
private static String quitarLayerNormal(String string) {
string = string.substring(string.indexOf('>')+1, string.lastIndexOf('<'));
return string;
}
/**
* Quitado expreso de la etiqueta <img />. Hay algunos valores que no la contienen, por lo que es necesario buscarla expresamente.
* @param string String a chequear.
* @return String sin la etiqueta imagen si la contenia.
*/
private static String quitarLayerImg(String string) {
if(string.contains("<img")) {
string = string.substring(string.indexOf('>')+1, string.lastIndexOf('<'));
}
return string;
}
/**
* Chequea la String para comprobar si es un valor que sube o baja. Si se mantiene se marca como baja.
* @param string String a chequear.
* @return True si sube, false si no.
*/
private static boolean getTipo(String string) {
return string.contains("\"Sube\"");
}
/**
* Conversion de un element suelto a String[] para aniadirlo a la Tabla.
* Hago la conversion a mano. Son valores tan irregulares que no puedo automatizarlos.
* @param element Element a convertir.
* @return String[] convertida.
*/
private static String[] convertElement(Element element) {
String[] datos = new String[5];
try {
for(int i = 0; i < 2; i++){
datos[i] = convertNode(element.childNode(i));
}
datos[2] = convertNode(element.childNode(2).unwrap()) +"; " +convertNode(element.childNode(4)) +"h";
//No me gusta dejar esto asi pero hay demasiados cambios como para meterlos en metodos independientes, se deberia poder automatizar los recortes pero me da demasiados probremas. Ahora mismo esta manual para cada caso en concreto.
String s = quitarLayerNormal(element.childNode(5).toString());
s = quitarLayerNormal(s);
boolean sube = getTipo(s);
s = quitarLayerImg(s);
String cambioValor = s.substring(s.indexOf('>')+1, s.indexOf('>')+5);
String porcentaje = s.substring(s.lastIndexOf('>')+1).trim();
if(porcentaje.isEmpty()) porcentaje = "(0,00%)"; //Tengo problemas para capturar los 0% porque la pagina lo estructura de otra manera. Solo esta vacio cuando es 0%.
if(sube) datos[3] = "+" +cambioValor +"; " +porcentaje;
else datos[3] = "-" +cambioValor +"; " +porcentaje;
datos[4] = convertNode(element.childNode(6));
}catch(IndexOutOfBoundsException ex) {
System.out.println("ArrayIndex en convertElement(Element element) capturado: " +ex.getLocalizedMessage());
}
return datos;
}
/**
* Conversion de Elements a String[][] que usare directamente como Datos de JTable.mode().
* @param elements Elements con los datos raw.
* @return String[][] con los datos convertidos.
*/
private static String[][] getData(Elements elements) {
int size = elements.get(0).childNodeSize();
String[][] datos = new String[size][];
for(int i = 0; i < size; i++) {
datos[i] = convertElement(elements.get(0).child(i));
}
return datos;
}
/**
* Proceso de adecuacion y obtencion de datos de la web.
*/
private static void adecuacionDatos() {
modelHeaders = getHeaders(headersWeb);
if(oldModelData == null) { //Para la primera iteracion del programa.
modelData = getData(tabla);
oldModelData = modelData;
}
else {
oldModelData = modelData;
modelData = getData(tabla);
}
model = new DefaultTableModel(modelData, modelHeaders);
Window.jTable.setModel(model);
}
/**
* Get de los elementos necesarios para el parse.
*/
private static void iniElementos(String url) {
try {
document = Jsoup.connect(url).get(); //Selecciona el documento entero.
tabla = document.select("table tbody"); //De ese documento, pilla la tabla. Contiene los datos que nos interesan.
for(int i = 0; i < 4; i++) { //Eliminacion de paja que hay por enmedio, con esto lo dejo listo para tratar.
tabla.remove(tabla.get(0));
}
headersWeb = document.select("thead tr th");
datos = document.select("tr");
}catch(IOException ex) {
ex.printStackTrace();
}
}
/**
* Proceso entero de recoleccion y filtrado de los datos.
* @param url Url a la que nos conectamos.
*/
private static void tratamiento(String url) {
iniElementos(url);
adecuacionDatos();
}
/**
* Obtencion del numero limite mediante el cual si se sobrepasa habra que dar aviso al usuario.
* @param porcentaje Porcentaje establecido por el usuario para dar aviso.
* @param valor Valor el cual chequeamos.
* @return Valor limite para avisar.
*/
private static long getLimite(float porcentaje, long valor) {
return (long) (valor*porcentaje/100);
}
private static long parseLong(String string) {
String parse = "";
try {
while(string.contains(".")) {
parse += string.substring(0, string.indexOf('.'));
string = string.substring(string.indexOf('.')+1);
}
parse += string; //Para el ultimo resto y numeros sin punto.
}catch(NullPointerException ex) {
System.out.println("Problema con el parseo custom del Long. " +ex.getLocalizedMessage());
}
return Long.parseLong(parse);
}
/**
* Comprobacion de si un valor se pasa del limite establecido por el porcentaje indicado.
* @param cambio Cambio realizado entre el escaneo viejo y el actual.
* @param limite Limite del cual si se pasa hay que dar aviso.
* @return True si el cambio se pasa de, o iguala el limite.
*/
private static boolean checkLimite(long cambio, long limite) {
boolean isOver;
if(cambio >= 0) isOver = cambio >= limite;
else {
limite -= limite*2;
isOver = cambio <= limite;
}
return isOver;
}
/**
* Comparacion de los valores anteriores con los nuevos escaneados. Si hay un cambio por arriba o abajo superior al porcentaje establecido, da aviso.
* El porcentaje lo tengo en cuenta a traves del valor Viejo.
* @param datosViejos Datos del escaneo anterior para tener una base sobre la cual comparar.
* @param datosNuevos Datos del escaneo nuevo.
*/
private static void comparacion(String[][] datosViejos, String[][] datosNuevos, int comienzoHilo, int limiteHilo) {
long valorNuevo, valorViejo, cambio, cambioLimite;
for (int indiceEmpresa = comienzoHilo; indiceEmpresa < limiteHilo; indiceEmpresa++) {
valorNuevo = parseLong(datosNuevos[indiceEmpresa][4]);
valorViejo = parseLong(datosViejos[indiceEmpresa][4]);
cambioLimite = getLimite(PORCENTAJE, valorNuevo);
cambio = valorNuevo-valorViejo;
boolean over = checkLimite(cambio, cambioLimite);
if(over) System.out.printf("¡ATENCION! ¡Cambio que ha sobrepasado el limite! Empresa %S. Valor anterior: %d. Cambio de: %d. Valor actual: %d", datosNuevos[indiceEmpresa][0], valorViejo, cambio, valorNuevo);
}
// System.out.printf("Valor viejo: %d, Valor Nuevo: %d, Cambio: %d, CambioLimite: %d, Limite superado: %s\n" ,valorViejo, valorNuevo, cambio, cambioLimite, res); //Para testeo.
}
private synchronized static void escanear(String url) {
escanear = true;
while(escanear) {
try {
tratamiento(url);
System.out.println("Escaneo realizado sobre: " +url);
long tiempo = System.currentTimeMillis();
Thread t1 = new Thread(() -> comparacion(oldModelData, modelData, 0, 25));
Thread t2 = new Thread(() -> comparacion(oldModelData, modelData, 25, 50));
Thread t3 = new Thread(() -> comparacion(oldModelData, modelData, 50, 75));
Thread t4 = new Thread(() -> comparacion(oldModelData, modelData, 75, 100));
t1.start();
t2.start();
t3.start();
t4.start();
t2.join();
t3.join();
t4.join();
Thread.sleep(ESPERA);
} catch (InterruptedException ex) {
System.out.println("Problema con Thread.sleep en escanear(String url). " +ex.getLocalizedMessage());
}
if(!escanear) System.out.println("Escaneo sobre :" +url +" parado.");
}
}
/**
* Cambio de la fuente de datos junto a la Label que informa al usuario.
* @param url URL de donde sacar los datos.
*/
private static void cambioDatos(String url) {
escanear = false;
tratamiento(url);
switch(url) {
case "https://es.finance.yahoo.com/actives?e=mc":
Window.jLabelTitulo.setText("Valores mas Activos");
break;
case "https://es.finance.yahoo.com/gainers?e=mc":
Window.jLabelTitulo.setText("Mayores Subidas de Precio");
break;
case "https://es.finance.yahoo.com/losers?e=mc":
Window.jLabelTitulo.setText("Bajan de Precio");
break;
default:
break;
}
Runnable r = () -> escanear(url);
new Thread(r).start();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
buttonGroupDatos = new javax.swing.ButtonGroup();
jLabelTitulo = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
jTable = new javax.swing.JTable();
jMenuBar1 = new javax.swing.JMenuBar();
jMenuFile = new javax.swing.JMenu();
jMenuItem1 = new javax.swing.JMenuItem();
jMenuValores = new javax.swing.JMenu();
jRadioButtonMenuItemActivos = new javax.swing.JRadioButtonMenuItem();
jRadioButtonMenuItemSubidas = new javax.swing.JRadioButtonMenuItem();
jRadioButtonMenuItemBajadas = new javax.swing.JRadioButtonMenuItem();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
jLabelTitulo.setFont(new java.awt.Font("sansserif", 1, 18)); // NOI18N
jLabelTitulo.setText("Valores mas Activos");
jTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
}
));
jTable.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
jTable.setShowHorizontalLines(true);
jScrollPane1.setViewportView(jTable);
jMenuFile.setText("File");
jMenuItem1.setText("Salir");
jMenuItem1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jMenuItem1ActionPerformed(evt);
}
});
jMenuFile.add(jMenuItem1);
jMenuBar1.add(jMenuFile);
jMenuValores.setText("Valores");
buttonGroupDatos.add(jRadioButtonMenuItemActivos);
jRadioButtonMenuItemActivos.setSelected(true);
jRadioButtonMenuItemActivos.setText("Valores mas Activos");
jRadioButtonMenuItemActivos.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jRadioButtonMenuItemActivosActionPerformed(evt);
}
});
jMenuValores.add(jRadioButtonMenuItemActivos);
buttonGroupDatos.add(jRadioButtonMenuItemSubidas);
jRadioButtonMenuItemSubidas.setText("Mayores Subidas de Precio");
jRadioButtonMenuItemSubidas.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jRadioButtonMenuItemSubidasActionPerformed(evt);
}
});
jMenuValores.add(jRadioButtonMenuItemSubidas);
buttonGroupDatos.add(jRadioButtonMenuItemBajadas);
jRadioButtonMenuItemBajadas.setText("Bajan de Precio");
jRadioButtonMenuItemBajadas.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jRadioButtonMenuItemBajadasActionPerformed(evt);
}
});
jMenuValores.add(jRadioButtonMenuItemBajadas);
jMenuBar1.add(jMenuValores);
setJMenuBar(jMenuBar1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 608, Short.MAX_VALUE)
.addContainerGap())
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabelTitulo)
.addGap(217, 217, 217))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(12, 12, 12)
.addComponent(jLabelTitulo)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 394, Short.MAX_VALUE)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jMenuItem1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem1ActionPerformed
System.exit(0);
}//GEN-LAST:event_jMenuItem1ActionPerformed
private void jRadioButtonMenuItemActivosActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonMenuItemActivosActionPerformed
cambioDatos("https://es.finance.yahoo.com/actives?e=mc");
}//GEN-LAST:event_jRadioButtonMenuItemActivosActionPerformed
private void jRadioButtonMenuItemSubidasActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonMenuItemSubidasActionPerformed
cambioDatos("https://es.finance.yahoo.com/gainers?e=mc");
}//GEN-LAST:event_jRadioButtonMenuItemSubidasActionPerformed
private void jRadioButtonMenuItemBajadasActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jRadioButtonMenuItemBajadasActionPerformed
cambioDatos("https://es.finance.yahoo.com/losers?e=mc");
}//GEN-LAST:event_jRadioButtonMenuItemBajadasActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(Window.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(Window.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(Window.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(Window.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new Window().setVisible(true);
tratamiento("https://es.finance.yahoo.com/actives?e=mc");
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.ButtonGroup buttonGroupDatos;
private static javax.swing.JLabel jLabelTitulo;
private javax.swing.JMenuBar jMenuBar1;
private javax.swing.JMenu jMenuFile;
private javax.swing.JMenuItem jMenuItem1;
private javax.swing.JMenu jMenuValores;
private javax.swing.JRadioButtonMenuItem jRadioButtonMenuItemActivos;
private javax.swing.JRadioButtonMenuItem jRadioButtonMenuItemBajadas;
private javax.swing.JRadioButtonMenuItem jRadioButtonMenuItemSubidas;
private javax.swing.JScrollPane jScrollPane1;
private static javax.swing.JTable jTable;
// End of variables declaration//GEN-END:variables
}
|
package net.java.sip.communicator.plugin.desktoputil;
import java.awt.*;
import java.security.cert.*;
import java.util.Arrays;
import javax.swing.*;
import org.jitsi.service.resources.*;
/**
* Frame for showing information about a certificate.
*/
public class ViewCertificateFrame
extends SIPCommFrame
{
/**
* Serial version UID.
*/
private static final long serialVersionUID = 0L;
/**
* The resource service.
*/
private final ResourceManagementService R = DesktopUtilActivator.getResources();
/**
* The maximum width that we allow message dialogs to have.
*/
private static final int MAX_MSG_PANE_WIDTH = 600;
/**
* The maximum height that we allow message dialogs to have.
*/
private static final int MAX_MSG_PANE_HEIGHT = 800;
/**
* The certificates to show.
*/
Certificate[] certs;
/**
* A text that describes why the verification failed.
*/
String message;
/**
* The certificate panel.
*/
TransparentPanel certPanel;
/**
* This dialog content pane.
*/
TransparentPanel contentPane;
/**
* Creates the dialog.
*
* @param certs the certificates list
* @param title The title of the dialog; when null the resource
* <tt>service.gui.CERT_DIALOG_TITLE</tt> is loaded.
* @param message A text that describes why the verification failed.
*/
public ViewCertificateFrame(Certificate[] certs,
String title, String message)
{
super(false);
setTitle(title != null ? title :
R.getI18NString("service.gui.CERT_DIALOG_TITLE"));
this.certs = certs;
this.message = message;
setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
init();
setLocationRelativeTo(getParent());
}
/**
* Inits the dialog initial display.
*/
private void init()
{
this.getContentPane().setLayout(new BorderLayout());
contentPane =
new TransparentPanel(new BorderLayout(5, 5));
TransparentPanel northPanel =
new TransparentPanel(new BorderLayout(5, 5));
northPanel.setBorder(BorderFactory.createEmptyBorder(10, 5, 5, 5));
JLabel imgLabel = new JLabel(
R.getImage("service.gui.icons.CERTIFICATE_WARNING"));
imgLabel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
northPanel.add(imgLabel, BorderLayout.WEST);
StyledHTMLEditorPane descriptionPane = new StyledHTMLEditorPane();
descriptionPane.setOpaque(false);
descriptionPane.setEditable(false);
descriptionPane.setContentType("text/html");
descriptionPane.setText(message);
descriptionPane.setSize(
new Dimension(MAX_MSG_PANE_WIDTH, MAX_MSG_PANE_HEIGHT));
int height = descriptionPane.getPreferredSize().height;
descriptionPane.setPreferredSize(
new Dimension(MAX_MSG_PANE_WIDTH, height));
northPanel.add(descriptionPane, BorderLayout.CENTER);
contentPane.add(northPanel, BorderLayout.NORTH);
certPanel = new TransparentPanel(new BorderLayout());
contentPane.add(certPanel, BorderLayout.CENTER);
this.getContentPane().add(contentPane, BorderLayout.CENTER);
Component certInfoPane;
if (certs[0] instanceof X509Certificate)
{
certInfoPane = new X509CertificatePanel(Arrays.asList((X509Certificate[])certs));
}
else
{
JTextArea textArea = new JTextArea();
textArea.setOpaque(false);
textArea.setEditable(false);
// for now shows only the first certificate from the chain
textArea.setText(certs[0].toString());
certInfoPane = textArea;
}
final JScrollPane certScroll = new JScrollPane(certInfoPane);
certScroll.setPreferredSize(new Dimension(300, 600));
certPanel.add(certScroll, BorderLayout.CENTER);
SwingUtilities.invokeLater(new Runnable()
{
public void run()
{
certScroll.getVerticalScrollBar().setValue(0);
}
});
setPreferredSize(null);
pack();
}
}
|
package net.sf.mzmine.modules.normalization.rtnormalizer;
import java.util.Arrays;
import java.util.Vector;
import java.util.logging.Logger;
import net.sf.mzmine.data.Peak;
import net.sf.mzmine.data.PeakList;
import net.sf.mzmine.data.PeakListRow;
import net.sf.mzmine.data.impl.SimplePeakList;
import net.sf.mzmine.io.RawDataFile;
import net.sf.mzmine.main.MZmineCore;
import net.sf.mzmine.project.MZmineProject;
import net.sf.mzmine.taskcontrol.Task;
class RTNormalizerTask implements Task {
private Logger logger = Logger.getLogger(this.getClass().getName());
private PeakList[] originalPeakLists;
private TaskStatus status = TaskStatus.WAITING;
private String errorMessage;
// Processed rows counter
private int processedRows, totalRows;
private String suffix;
private float mzTolerance, rtTolerance, minHeight;
private boolean removeOriginal;
public RTNormalizerTask(PeakList[] peakLists,
RTNormalizerParameters parameters) {
this.originalPeakLists = peakLists;
suffix = (String) parameters.getParameterValue(RTNormalizerParameters.suffix);
mzTolerance = (Float) parameters.getParameterValue(RTNormalizerParameters.MZTolerance);
rtTolerance = (Float) parameters.getParameterValue(RTNormalizerParameters.RTTolerance);
minHeight = (Float) parameters.getParameterValue(RTNormalizerParameters.minHeight);
removeOriginal = (Boolean) parameters.getParameterValue(RTNormalizerParameters.autoRemove);
}
public void cancel() {
status = TaskStatus.CANCELED;
}
public String getErrorMessage() {
return errorMessage;
}
public float getFinishedPercentage() {
if (totalRows == 0)
return 0f;
return (float) processedRows / (float) totalRows;
}
public TaskStatus getStatus() {
return status;
}
public String getTaskDescription() {
return "Retention time normalization of "
+ Arrays.toString(originalPeakLists);
}
public void run() {
status = TaskStatus.PROCESSING;
// First we need to find standards by iterating through first peak list
totalRows = originalPeakLists[0].getNumberOfRows();
// Create new peak lists
SimplePeakList normalizedPeakLists[] = new SimplePeakList[originalPeakLists.length];
for (int i = 0; i < originalPeakLists.length; i++) {
normalizedPeakLists[i] = new SimplePeakList(originalPeakLists[i]
+ " " + suffix);
// Add all data files from original peak lists
for (RawDataFile file : originalPeakLists[i].getRawDataFiles())
normalizedPeakLists[i].addRawDataFile(file);
// Remember how many rows we need to normalize
totalRows += originalPeakLists[i].getNumberOfRows();
}
// goodStandards Vector contains identified standard rows, represented
// by arrays. Each array has same length as originalPeakLists array.
// Array items represent particular standard peak in each PeakList
Vector<PeakListRow[]> goodStandards = new Vector<PeakListRow[]>();
// Iterate the first peaklist
standardIteration: for (PeakListRow candidate : originalPeakLists[0].getRows()) {
processedRows++;
// Check that all peaks of this row have proper height
for (Peak p : candidate.getPeaks()) {
if (p.getHeight() < minHeight)
continue standardIteration;
}
PeakListRow goodStandardCandidate[] = new PeakListRow[originalPeakLists.length];
goodStandardCandidate[0] = candidate;
float candidateMZ = candidate.getAverageMZ();
float candidateRT = candidate.getAverageRT();
// Find matching rows in remaining peaklists
for (int i = 1; i < originalPeakLists.length; i++) {
PeakListRow matchingRows[] = originalPeakLists[i].getRowsInsideScanAndMZRange(
candidateRT - rtTolerance, candidateRT + rtTolerance,
candidateMZ - mzTolerance, candidateMZ + mzTolerance);
// If we have not found exactly 1 matching peak, move to next
// standard candidate
if (matchingRows.length != 1)
continue standardIteration;
// Check that all peaks of this row have proper height
for (Peak p : matchingRows[0].getPeaks()) {
if (p.getHeight() < minHeight)
continue standardIteration;
}
// Save reference to matching peak in this peak list
goodStandardCandidate[i] = matchingRows[0];
}
// If we found a match of same peak in all peaklists, mark it as a
// good standard
goodStandards.add(goodStandardCandidate);
logger.finest("Found a good standard for RT normalization: "
+ candidate);
}
// TODO RT normalization
// Add new peaklists to the project
MZmineProject currentProject = MZmineCore.getCurrentProject();
for (int i = 0; i < originalPeakLists.length; i++) {
currentProject.addPeakList(normalizedPeakLists[i]);
// Remove the original peaklists if requested
if (removeOriginal)
currentProject.removePeakList(originalPeakLists[i]);
}
status = TaskStatus.FINISHED;
}
}
|
package Engine;
import java.io.Serializable;
import Constants.EngineConstants;
import Constants.VisualConstants;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.geom.*;
public class Tank extends GameEntity implements Serializable,MovementInterface, TransformInterface, Drawable {
protected double life;
private double energy = 100;
protected Cannon cannon;
private final int tank_id;
protected transient TankCapsule tankCapsule;
private String name, author;
private int rotate_state, move_state;
//the id of the tank will be the current number of instanced tank classes
private static int staticId;
/**
* This block executes once when the class is loaded
*/
static{
staticId = 0;
}
public Tank(double xPos, double yPos,String playerName) {
super(staticId,xPos, yPos);
this.tank_id = staticId++;
this.life = 100;
this.author = playerName;
width = (int)VisualConstants.TANK_WIDTH;
height = (int)VisualConstants.TANK_HEIGHT;
cannon = new Cannon(staticId, xPos, yPos, this);
damage = EngineConstants.DAMAGE;
angle = EngineConstants.ANGLE;
speed = EngineConstants.TANK_SPEED;
life = EngineConstants.LIFE;
}
public Tank(String name, String author) {
super(0,0,0);
Rectangle tankRect = new Rectangle();
Rectangle otherTank = new Rectangle();
this.name = name;
this.author = author;
x = (int)(Math.random()*1000%VisualConstants.ENGINE_WIDTH);
y = (int)(Math.random()*1000%VisualConstants.ENGINE_HEIGHT);
tankRect.x = (int) x;
tankRect.y = (int) y;
synchronized (this) {
for (int i = 0; i < GameEntity.entityList.size(); i++) {
if(GameEntity.entityList.get(i) instanceof Tank){
Tank tank = (Tank) GameEntity.entityList.get(i);
otherTank.x = (int)tank.getX();
otherTank.y = (int)tank.getY();
if(!isInsideArena() || tankRect.intersects(otherTank)){
x = (int) (Math.random() * 1000) % VisualConstants.ENGINE_WIDTH - 10 - VisualConstants.TANK_WIDTH;
y = (int) (Math.random() * 1000) % VisualConstants.ENGINE_HEIGHT - 10 - VisualConstants.TANK_HEIGHT;
i = 0;
}
}
}
//cannon = new Cannon(staticId, x, y);
}
//super(staticId,xPos, yPos);
//this.x = xPos;
//this.y = yPos;
this.tank_id = staticId++;
this.life = 100;
width = (int)VisualConstants.TANK_WIDTH;
height = (int)VisualConstants.TANK_HEIGHT;
cannon = new Cannon(staticId, x, y, this);
damage = EngineConstants.DAMAGE;
angle = EngineConstants.ANGLE;
speed = EngineConstants.TANK_SPEED;
life = EngineConstants.LIFE;
}
public Point getCenter(){
Point center = new Point((int)(this.x+VisualConstants.TANK_WIDTH/2),(int)(this.y+VisualConstants.TANK_HEIGHT/2));
return center;
}
/**
* Gets the id of the tan
* @return a integer value representing the id of the tank.
*/
public int getId(){
return tank_id;
}
/**
* Gets the life of the Tank.
*
* @return
*/
public double getLife() {
return life;
}
/**
* Sets the life of the Tank at 'lfe', if the 'lfe' argument is not lower or
* equal then 0.
*
* @param lfe a double value representing a Tanks life
*/
public void setLife(double lfe) {
if (lfe >= 0) {
life = lfe;
}
}
@Override
public void rotate(double degrees){
angle = (angle + degrees)%360;
rotateCannon(degrees);
}
/**
* Rotates the cannon of the tank by 'degrees' reported to the cannon's rotation angle
* @param degrees a double value representing the rotation value
*/
public void rotateCannon(double degrees){
if(rotate_state < Constants.EngineConstants.ROTATE_LIMIT){
rotate_state++;
cannon.rotate(degrees);
}
}
@Override
public void moveUp() {
setY(getY()-1);
}
@Override
public void moveDown() {
setY(getY()+1);
}
@Override
public void moveLeft() {
setX(getX()-1);
}
@Override
public void moveRight() {
setX(getX()+1);
}
public void janitor(){
resetStates();
rotateCannon(0.1);
rotateCannon(-0.1);
rotate_state -=2;
}
private void resetStates(){
rotate_state = move_state = 0;
}
/**
*
* @param p - Point representing a tank.
* @return Value which specifies whether the tank is inside the arena or not.
*/
public boolean isInsideArena(Point p){
return p.x-10 > 0 && p.y-10 > 0 && p.x+VisualConstants.TANK_WIDTH+10 < VisualConstants.ENGINE_WIDTH && p.y+VisualConstants.TANK_HEIGHT+30 < VisualConstants.ENGINE_HEIGHT;
}
/**
*
* @return Value which specifies whether the tank is inside the arena or not.
*/
public final boolean isInsideArena(){
return this.x-10 > 0 && this.y-10 > 0 && this.x+VisualConstants.TANK_WIDTH+10 < VisualConstants.ENGINE_WIDTH && this.y + VisualConstants.TANK_HEIGHT+30 < VisualConstants.ENGINE_HEIGHT;
}
/**
* Move the tank forward reported to it's current orientation angle.
*/
public void moveFront(){
if(move_state >= Constants.EngineConstants.MOVE_LIMIT)
return;
move_state++;
double origX = x, origY = y;
double s = Math.sin(angle * Math.PI / 180.0);
double c = Math.cos(angle * Math.PI / 180.0);
x += c * speed;
y += s * speed;
if (!isInsideArena()) {
x = origX;
y = origY;
tankCapsule.hitArenaWall();
} else {
//we store the angle of the cannon in cangle
double cangle = cannon.getAngle();
//set the cannon rotaton to the tank rotation
cannon.setAngle(angle);
//then move the cannon front
cannon.moveFront();
//then we restore the cannon to it's former angle
cannon.setAngle(cangle);
}
}
/**
* Shoots a bullet.
*
* @return a Bullet object representing a bullet shoot by the tank, or a null object if the tank is unable to fire.
*
*/
public Bullet fire() {
if(energy < 100)
return null;
energy = 0;
return cannon.fire();
}
public void restoreEnergy(){
if(energy < 100)
energy += EngineConstants.ENERGY_RESTORE_RATE;
if(energy > 100)
energy = 100;
}
public void setTankCapsule(TankCapsule tankCapsule){
this.tankCapsule = tankCapsule;
}
public void hitByBullet(){
this.life -= 10;
this.tankCapsule.gotHitByBullet();
}
public boolean inTheGame(){
return life > 0;
}
@Override
public void draw(Graphics g) {
Graphics2D g2 = (Graphics2D)g;
AffineTransform at = g2.getTransform();
g2.rotate(Math.toRadians(90), x+10, y+10);
g2.rotate(Math.toRadians(angle), x+10, y+10);
g2.drawImage(EngineConstants.TANK_SPRITE, (int)x, (int)y, null);
g2.setTransform(at);
cannon.draw(g);
//draw health bar
g2.setColor(Color.RED);
g2.fillRect((int)x-2, (int)y-10,(int)VisualConstants.HEALTH_BAR_WIDTH, (int)VisualConstants.HEALTH_BAR_HEIGHT);
g2.setColor(Color.decode("#0FB81A")); //this is green
g2.fillRect((int)x-2, (int)y-10, (int) (life/100*VisualConstants.HEALTH_BAR_WIDTH), (int)VisualConstants.HEALTH_BAR_HEIGHT);
//end
//draw energy bar
g2.setColor(Color.decode("#804000")); //brownish
g2.fillRect((int)x-2, (int)(y-10-VisualConstants.HEALTH_BAR_HEIGHT), (int)(VisualConstants.HEALTH_BAR_WIDTH), (int)VisualConstants.HEALTH_BAR_HEIGHT);
g2.setColor(Color.decode("#ff9933")); //orange
g2.fillRect((int)x-2, (int)(y-10-VisualConstants.HEALTH_BAR_HEIGHT), (int)(energy/100*VisualConstants.HEALTH_BAR_WIDTH), (int)VisualConstants.HEALTH_BAR_HEIGHT);
//end
//draw name and author
g2.setColor(Color.BLACK);
Font f = g2.getFont();
g2.setFont(g2.getFont().deriveFont(10f));
g2.drawString(this.name, (int)(this.x+Constants.VisualConstants.TANK_WIDTH + 3), (int)(this.y+Constants.VisualConstants.TANK_HEIGHT/2-4));
g2.drawString(this.author, (int)(this.x+Constants.VisualConstants.TANK_WIDTH + 3), (int)(this.y+Constants.VisualConstants.TANK_HEIGHT/2+4));
g2.setFont(f);
//end
}
@Override
public String toString() {
return "Tank{" + " playerName=" + author + ", life=" + life + ", cannon=" +
cannon + ", width=" + width + ", height=" + height + '}' + " " + super.toString();
}
}
|
package settings;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.math.BigDecimal;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.List;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import network.Peer;
public class Settings {
//NETWORK
private static final int DEFAULT_MIN_CONNECTIONS = 5;
private static final int DEFAULT_MAX_CONNECTIONS = 20;
private static final int DEFAULT_CONNECTION_TIMEOUT = 60000;
private static final int DEFAULT_PING_INTERVAL = 30000;
//RPC
private static final int DEFAULT_RPC_PORT = 9085;
private static final String DEFAULT_RPC_ALLOWED = "127.0.0.1";
private static final boolean DEFAULT_RPC_ENABLED = true;
//GUI CONSOLE
private static final boolean DEFAULT_GUI_CONSOLE_ENABLED = true;
//WEB
private static final int DEFAULT_WEB_PORT = 9083;
private static final String DEFAULT_WEB_ALLOWED = "127.0.0.1";
private static final boolean DEFAULT_WEB_ENABLED = true;
//GUI
private static final boolean DEFAULT_GUI_ENABLED = true;
//SETTINGS.JSON FILE
private static final String DEFAULT_SETTINGS_PATH = "settings.json";
//DATA
private static final String DEFAULT_DATA_DIR = "data";
private static final String DEFAULT_WALLET_DIR = "wallet";
private static final boolean DEFAULT_GENERATOR_KEY_CACHING = false;
private static final boolean DEFAULT_CHECKPOINTING = true;
private static final boolean DEFAULT_SOUND_RECEIVE_COIN = true;
private static final boolean DEFAULT_SOUND_MESSAGE = true;
private static final boolean DEFAULT_SOUND_NEW_TRANSACTION = true;
private static final boolean DEFAULT_BLOCKEXPLORER_BOOST = false;
private static final int DEFAULT_MAX_BYTE_PER_FEE = 512;
private static final BigDecimal DEFAULT_BIG_FEE = new BigDecimal(1000);
private static final String DEFAULT_BIG_FEE_MESSAGE = "Do you really want to set such a large fee?\nThese coins will go to the forgers.";
private static Settings instance;
private JSONObject settingsJSON;
private String currentSettingsPath;
public static Settings getInstance()
{
if(instance == null)
{
instance = new Settings();
}
return instance;
}
public static void FreeInstance()
{
if(instance != null)
{
instance = null;
}
}
private Settings()
{
BufferedReader reader;
int alreadyPassed = 0;
String settingsFilePath = "settings.json";
try
{
while(alreadyPassed<2)
{
//OPEN FILE
File file = new File(settingsFilePath);
currentSettingsPath = settingsFilePath;
//CREATE FILE IF IT DOESNT EXIST
if(!file.exists())
{
file.createNewFile();
}
//READ SETTINGS FILE
reader = new BufferedReader(new FileReader(file));
String line;
String jsonString = "";
//READ LINE
while ((line = reader.readLine()) != null)
{
jsonString += line;
}
//CLOSE
reader.close();
//CREATE JSON OBJECT
this.settingsJSON = (JSONObject) JSONValue.parse(jsonString);
alreadyPassed++;
if(this.settingsJSON.containsKey("settingspath"))
{
settingsFilePath = (String) this.settingsJSON.get("settingspath");
}
else
{
alreadyPassed ++;
}
}
}
catch(Exception e)
{
//STOP
System.out.println("ERROR reading settings.json. closing");
System.exit(0);
}
}
public JSONObject Dump()
{
return settingsJSON;
}
public String getCurrentSettingsPath()
{
return currentSettingsPath;
}
public List<Peer> getKnownPeers()
{
try
{
//GET PEERS FROM JSON
JSONArray peersArray = (JSONArray) this.settingsJSON.get("knownpeers");
//CREATE LIST WITH PEERS
List<Peer> peers = new ArrayList<Peer>();
for(int i=0; i<peersArray.size(); i++)
{
InetAddress address = InetAddress.getByName((String) peersArray.get(i));
//CHECK IF SOCKET IS NOT LOCALHOST
if(!address.equals(InetAddress.getLocalHost()))
{
//CREATE PEER
Peer peer = new Peer(address);
//ADD TO LIST
peers.add(peer);
}
}
//RETURN
return peers;
}
catch(Exception e)
{
//RETURN EMPTY LIST
return new ArrayList<Peer>();
}
}
public int getMaxConnections()
{
if(this.settingsJSON.containsKey("maxconnections"))
{
return ((Long) this.settingsJSON.get("maxconnections")).intValue();
}
return DEFAULT_MAX_CONNECTIONS;
}
public int getMinConnections()
{
if(this.settingsJSON.containsKey("minconnections"))
{
return ((Long) this.settingsJSON.get("minconnections")).intValue();
}
return DEFAULT_MIN_CONNECTIONS;
}
public int getConnectionTimeout()
{
if(this.settingsJSON.containsKey("connectiontimeout"))
{
return ((Long) this.settingsJSON.get("connectiontimeout")).intValue();
}
return DEFAULT_CONNECTION_TIMEOUT;
}
public int getRpcPort()
{
if(this.settingsJSON.containsKey("rpcport"))
{
return ((Long) this.settingsJSON.get("rpcport")).intValue();
}
return DEFAULT_RPC_PORT;
}
public String[] getRpcAllowed()
{
try
{
if(this.settingsJSON.containsKey("rpcallowed"))
{
//GET PEERS FROM JSON
JSONArray allowedArray = (JSONArray) this.settingsJSON.get("rpcallowed");
//CREATE LIST WITH PEERS
String[] allowed = new String[allowedArray.size()];
for(int i=0; i<allowedArray.size(); i++)
{
allowed[i] = (String) allowedArray.get(i);
}
//RETURN
return allowed;
}
//RETURN
return DEFAULT_RPC_ALLOWED.split(";");
}
catch(Exception e)
{
//RETURN EMPTY LIST
return new String[0];
}
}
public boolean isRpcEnabled()
{
if(this.settingsJSON.containsKey("rpcenabled"))
{
return ((Boolean) this.settingsJSON.get("rpcenabled")).booleanValue();
}
return DEFAULT_RPC_ENABLED;
}
public int getWebPort()
{
if(this.settingsJSON.containsKey("webport"))
{
return ((Long) this.settingsJSON.get("webport")).intValue();
}
return DEFAULT_WEB_PORT;
}
public boolean isGuiConsoleEnabled()
{
if(this.settingsJSON.containsKey("guiconsoleenabled"))
{
return ((Boolean) this.settingsJSON.get("guiconsoleenabled")).booleanValue();
}
return DEFAULT_GUI_CONSOLE_ENABLED;
}
public String[] getWebAllowed()
{
try
{
if(this.settingsJSON.containsKey("weballowed"))
{
//GET PEERS FROM JSON
JSONArray allowedArray = (JSONArray) this.settingsJSON.get("weballowed");
//CREATE LIST WITH PEERS
String[] allowed = new String[allowedArray.size()];
for(int i=0; i<allowedArray.size(); i++)
{
allowed[i] = (String) allowedArray.get(i);
}
//RETURN
return allowed;
}
//RETURN
return DEFAULT_WEB_ALLOWED.split(";");
}
catch(Exception e)
{
//RETURN EMPTY LIST
return new String[0];
}
}
public boolean isWebEnabled()
{
if(this.settingsJSON.containsKey("webenabled"))
{
return ((Boolean) this.settingsJSON.get("webenabled")).booleanValue();
}
return DEFAULT_WEB_ENABLED;
}
public String getWalletDir()
{
if(this.settingsJSON.containsKey("walletdir"))
{
return (String) this.settingsJSON.get("walletdir");
}
return DEFAULT_WALLET_DIR;
}
public String getDataDir()
{
if(this.settingsJSON.containsKey("datadir"))
{
return (String) this.settingsJSON.get("datadir");
}
return DEFAULT_DATA_DIR;
}
public String getSettingsPath()
{
if(this.settingsJSON.containsKey("settingspath"))
{
return (String) this.settingsJSON.get("settingspath");
}
return DEFAULT_SETTINGS_PATH;
}
public int getPingInterval()
{
if(this.settingsJSON.containsKey("pinginterval"))
{
return ((Long) this.settingsJSON.get("pinginterval")).intValue();
}
return DEFAULT_PING_INTERVAL;
}
public boolean isGeneratorKeyCachingEnabled()
{
if(this.settingsJSON.containsKey("generatorkeycaching"))
{
return ((Boolean) this.settingsJSON.get("generatorkeycaching")).booleanValue();
}
return DEFAULT_GENERATOR_KEY_CACHING;
}
public boolean isCheckpointingEnabled()
{
if(this.settingsJSON.containsKey("checkpoint"))
{
return ((Boolean) this.settingsJSON.get("checkpoint")).booleanValue();
}
return DEFAULT_CHECKPOINTING;
}
public boolean isSoundReceivePaymentEnabled()
{
if(this.settingsJSON.containsKey("soundreceivepayment"))
{
return ((Boolean) this.settingsJSON.get("soundreceivepayment")).booleanValue();
}
return DEFAULT_SOUND_RECEIVE_COIN;
}
public boolean isSoundReceiveMessageEnabled()
{
if(this.settingsJSON.containsKey("soundreceivemessage"))
{
return ((Boolean) this.settingsJSON.get("soundreceivemessage")).booleanValue();
}
return DEFAULT_SOUND_MESSAGE;
}
public boolean isSoundNewTransactionEnabled()
{
if(this.settingsJSON.containsKey("soundnewtransaction"))
{
return ((Boolean) this.settingsJSON.get("soundnewtransaction")).booleanValue();
}
return DEFAULT_SOUND_NEW_TRANSACTION;
}
public int getMaxBytePerFee()
{
if(this.settingsJSON.containsKey("maxbyteperfee"))
{
return ((Long) this.settingsJSON.get("maxbyteperfee")).intValue();
}
return DEFAULT_MAX_BYTE_PER_FEE;
}
public BigDecimal getBigFee()
{
return DEFAULT_BIG_FEE;
}
public String getBigFeeMessage()
{
return DEFAULT_BIG_FEE_MESSAGE;
}
public boolean isGuiEnabled()
{
if(this.settingsJSON.containsKey("guienabled"))
{
return ((Boolean) this.settingsJSON.get("guienabled")).booleanValue();
}
return DEFAULT_GUI_ENABLED;
}
public boolean isBlockExplorerBoost()
{
if(this.settingsJSON.containsKey("blockexplorerboost"))
{
return ((Boolean) this.settingsJSON.get("blockexplorerboost")).booleanValue();
}
return DEFAULT_BLOCKEXPLORER_BOOST;
}
}
|
package settings;
import java.io.File;
import java.io.InputStream;
import java.math.BigDecimal;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.logging.Logger;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
import controller.Controller;
import lang.Lang;
import network.Peer;
import ntp.NTP;
public class Settings {
//NETWORK
private static final int DEFAULT_MIN_CONNECTIONS = 10;
private static final int DEFAULT_MAX_CONNECTIONS = 50;
private static final int DEFAULT_MAX_RECEIVE_PEERS = 20;
private static final int DEFAULT_MAX_SENT_PEERS = 20;
private static final int DEFAULT_CONNECTION_TIMEOUT = 10000;
private static final int DEFAULT_PING_INTERVAL = 30000;
private static final boolean DEFAULT_TRYING_CONNECT_TO_BAD_PEERS = true;
private static final String[] DEFAULT_PEERS = { };
//TESTNET
public static final long DEFAULT_MAINNET_STAMP = 1400247274336L; // QORA RELEASE
private long genesisStamp = -1;
//RPC
private static final int DEFAULT_RPC_PORT = 9085;
private static final String DEFAULT_RPC_ALLOWED = "127.0.0.1";
private static final boolean DEFAULT_RPC_ENABLED = true;
//GUI CONSOLE
private static final boolean DEFAULT_GUI_CONSOLE_ENABLED = true;
//WEB
private static final int DEFAULT_WEB_PORT = 9090;
private static final String DEFAULT_WEB_ALLOWED = "127.0.0.1";
private static final boolean DEFAULT_WEB_ENABLED = true;
//GUI
private static final boolean DEFAULT_GUI_ENABLED = true;
//DATA
private static final String DEFAULT_DATA_DIR = "data";
private static final String DEFAULT_WALLET_DIR = "wallet";
private static final boolean DEFAULT_GENERATOR_KEY_CACHING = false;
private static final boolean DEFAULT_CHECKPOINTING = true;
private static final boolean DEFAULT_SOUND_RECEIVE_COIN = true;
private static final boolean DEFAULT_SOUND_MESSAGE = true;
private static final boolean DEFAULT_SOUND_NEW_TRANSACTION = true;
private static final int DEFAULT_MAX_BYTE_PER_FEE = 512;
private static final boolean ALLOW_FEE_LESS_REQUIRED = false;
private static final BigDecimal DEFAULT_BIG_FEE = new BigDecimal(1000);
//DATE FORMAT
private static final String DEFAULT_TIME_ZONE = "";
private static final String DEFAULT_TIME_FORMAT = "";
private static final boolean DEFAULT_NS_UPDATE = false;
private static final boolean DEFAULT_FORGING_ENABLED = true;
public static String DEFAULT_LANGUAGE = "en.json";
private static Settings instance;
private JSONObject settingsJSON;
private JSONObject peersJSON;
private String userPath = "";
private InetAddress localAddress;
List<Peer> cacheInternetPeers;
long timeLoadInternetPeers;
public static Settings getInstance()
{
if(instance == null)
{
instance = new Settings();
}
return instance;
}
public static void FreeInstance()
{
if(instance != null)
{
instance = null;
}
}
private Settings()
{
this.localAddress = this.getCurrentIp();
int alreadyPassed = 0;
File file = new File("");
try
{
while(alreadyPassed<2)
{
//OPEN FILE
file = new File(this.userPath + "settings.json");
//CREATE FILE IF IT DOESNT EXIST
if(!file.exists())
{
file.createNewFile();
}
//READ SETTINS JSON FILE
List<String> lines = Files.readLines(file, Charsets.UTF_8);
String jsonString = "";
for(String line : lines){
jsonString += line;
}
//CREATE JSON OBJECT
this.settingsJSON = (JSONObject) JSONValue.parse(jsonString);
alreadyPassed++;
if(this.settingsJSON.containsKey("userpath"))
{
this.userPath = (String) this.settingsJSON.get("userpath");
if (!(this.userPath.endsWith("\\") || this.userPath.endsWith("/")))
{
this.userPath += "/";
}
}
else
{
alreadyPassed ++;
}
}
}
catch(Exception e)
{
//STOP
System.out.println("Error while reading/creating settings.json " + file.getAbsolutePath());
e.printStackTrace();
System.exit(0);
}
//TRY READ PEERS.JSON
try
{
//OPEN FILE
file = new File(this.getPeersPath());
//CREATE FILE IF IT DOESNT EXIST
if(file.exists())
{
//READ PEERS FILE
List<String> lines = Files.readLines(file, Charsets.UTF_8);
String jsonString = "";
for(String line : lines){
jsonString += line;
}
//CREATE JSON OBJECT
this.peersJSON = (JSONObject) JSONValue.parse(jsonString);
} else {
this.peersJSON = new JSONObject();
}
}
catch(Exception e)
{
//STOP
System.out.println("Error while reading peers.json " + file.getAbsolutePath());
e.printStackTrace();
System.exit(0);
}
}
public JSONObject Dump()
{
return (JSONObject) settingsJSON.clone();
}
public String getSettingsPath()
{
return this.userPath + "settings.json";
}
public String getPeersPath()
{
return this.userPath + "peers.json";
}
public String getWalletDir()
{
return this.getUserPath() + DEFAULT_WALLET_DIR;
}
public String getDataDir()
{
return this.getUserPath() + DEFAULT_DATA_DIR;
}
public String getLangDir()
{
return this.getUserPath() + "languages";
}
public String getUserPath()
{
return this.userPath;
}
public JSONArray getPeersJson()
{
if(this.peersJSON != null && this.peersJSON.containsKey("knownpeers")) {
return (JSONArray) this.peersJSON.get("knownpeers");
} else {
return new JSONArray();
}
}
@SuppressWarnings("unchecked")
public List<Peer> getKnownPeers()
{
try {
boolean loadPeersFromInternet = (
Controller.getInstance().getToOfflineTime() != 0L
&&
NTP.getTime() - Controller.getInstance().getToOfflineTime() > 5*60*1000
);
List<Peer> knownPeers = new ArrayList<Peer>();
JSONArray peersArray = new JSONArray();
try {
JSONArray peersArraySettings = (JSONArray) this.settingsJSON.get("knownpeers");
if(peersArraySettings != null)
{
for (Object peer : peersArraySettings) {
if(!peersArray.contains(peer)) {
peersArray.add(peer);
}
}
}
} catch (Exception e) {
Logger.getGlobal().info("Error with loading knownpeers from settings.json.");
}
try {
JSONArray peersArrayPeers = (JSONArray) this.peersJSON.get("knownpeers");
if(peersArrayPeers != null)
{
for (Object peer : peersArrayPeers) {
if(!peersArray.contains(peer)) {
peersArray.add(peer);
}
}
}
} catch (Exception e) {
Logger.getGlobal().info("Error with loading knownpeers from peers.json.");
}
knownPeers = getKnownPeersFromJSONArray(peersArray);
if(knownPeers.size() == 0 || loadPeersFromInternet)
{
knownPeers = getKnownPeersFromInternet();
}
return knownPeers;
} catch (Exception e) {
Logger.getGlobal().info("Error in getKnownPeers().");
return new ArrayList<Peer>();
}
}
public List<Peer> getKnownPeersFromInternet()
{
try {
if(this.cacheInternetPeers == null) {
this.cacheInternetPeers = new ArrayList<Peer>();
}
if(this.cacheInternetPeers.size() == 0 || NTP.getTime() - this.timeLoadInternetPeers > 24*60*60*1000 )
{
this.timeLoadInternetPeers = NTP.getTime();
URL u = new URL("https://raw.githubusercontent.com/Qoracoin/Qora/master/Qora/peers.json");
InputStream in = u.openStream();
String stringInternetSettings = IOUtils.toString( in );
JSONObject internetSettingsJSON = (JSONObject) JSONValue.parse(stringInternetSettings);
JSONArray peersArray = (JSONArray) internetSettingsJSON.get("knownpeers");
if(peersArray != null) {
this.cacheInternetPeers = getKnownPeersFromJSONArray(peersArray);
}
}
Logger.getGlobal().info(Lang.getInstance().translate("Peers loaded from Internet : ") + this.cacheInternetPeers.size());
return this.cacheInternetPeers;
} catch (Exception e) {
//RETURN EMPTY LIST
Logger.getGlobal().info(Lang.getInstance().translate("Peers loaded from Internet with errors : ") + this.cacheInternetPeers.size());
return this.cacheInternetPeers;
}
}
@SuppressWarnings("unchecked")
public List<Peer> getKnownPeersFromJSONArray(JSONArray peersArray)
{
try
{
//GET PEERS FROM JSON
if(peersArray.isEmpty())
peersArray.addAll(Arrays.asList(DEFAULT_PEERS));
//CREATE LIST WITH PEERS
List<Peer> peers = new ArrayList<Peer>();
for(int i=0; i<peersArray.size(); i++)
{
try
{
InetAddress address = InetAddress.getByName((String) peersArray.get(i));
if(!this.isLocalAddress(address))
{
//CREATE PEER
Peer peer = new Peer(address);
//ADD TO LIST
peers.add(peer);
}
}catch(Exception e)
{
Logger.getGlobal().info((String) peersArray.get(i) + " - invalid peer address!");
}
}
//RETURN
return peers;
}
catch(Exception e)
{
//RETURN EMPTY LIST
return new ArrayList<Peer>();
}
}
public void setGenesisStamp(long testNetStamp) {
this.genesisStamp = testNetStamp;
}
public boolean isTestnet () {
return this.getGenesisStamp() != DEFAULT_MAINNET_STAMP;
}
public long getGenesisStamp() {
if(this.genesisStamp == -1) {
if(this.settingsJSON.containsKey("testnetstamp"))
{
if(this.settingsJSON.get("testnetstamp").toString().equals("now") ||
((Long) this.settingsJSON.get("testnetstamp")).longValue() == 0) {
this.genesisStamp = System.currentTimeMillis();
} else {
this.genesisStamp = ((Long) this.settingsJSON.get("testnetstamp")).longValue();
}
} else {
this.genesisStamp = DEFAULT_MAINNET_STAMP;
}
}
return this.genesisStamp;
}
public int getMaxConnections()
{
if(this.settingsJSON.containsKey("maxconnections"))
{
return ((Long) this.settingsJSON.get("maxconnections")).intValue();
}
return DEFAULT_MAX_CONNECTIONS;
}
public int getMaxReceivePeers()
{
if(this.settingsJSON.containsKey("maxreceivepeers"))
{
return ((Long) this.settingsJSON.get("maxreceivepeers")).intValue();
}
return DEFAULT_MAX_RECEIVE_PEERS;
}
public int getMaxSentPeers()
{
if(this.settingsJSON.containsKey("maxsentpeers"))
{
return ((Long) this.settingsJSON.get("maxsentpeers")).intValue();
}
return DEFAULT_MAX_SENT_PEERS;
}
public int getMinConnections()
{
if(this.settingsJSON.containsKey("minconnections"))
{
return ((Long) this.settingsJSON.get("minconnections")).intValue();
}
return DEFAULT_MIN_CONNECTIONS;
}
public int getConnectionTimeout()
{
if(this.settingsJSON.containsKey("connectiontimeout"))
{
return ((Long) this.settingsJSON.get("connectiontimeout")).intValue();
}
return DEFAULT_CONNECTION_TIMEOUT;
}
public boolean isTryingConnectToBadPeers()
{
if(this.settingsJSON.containsKey("tryingconnecttobadpeers"))
{
return ((Boolean) this.settingsJSON.get("tryingconnecttobadpeers")).booleanValue();
}
return DEFAULT_TRYING_CONNECT_TO_BAD_PEERS;
}
public int getRpcPort()
{
if(this.settingsJSON.containsKey("rpcport"))
{
return ((Long) this.settingsJSON.get("rpcport")).intValue();
}
return DEFAULT_RPC_PORT;
}
public String[] getRpcAllowed()
{
try
{
if(this.settingsJSON.containsKey("rpcallowed"))
{
//GET PEERS FROM JSON
JSONArray allowedArray = (JSONArray) this.settingsJSON.get("rpcallowed");
//CREATE LIST WITH PEERS
String[] allowed = new String[allowedArray.size()];
for(int i=0; i<allowedArray.size(); i++)
{
allowed[i] = (String) allowedArray.get(i);
}
//RETURN
return allowed;
}
//RETURN
return DEFAULT_RPC_ALLOWED.split(";");
}
catch(Exception e)
{
//RETURN EMPTY LIST
return new String[0];
}
}
public boolean isRpcEnabled()
{
if(this.settingsJSON.containsKey("rpcenabled"))
{
return ((Boolean) this.settingsJSON.get("rpcenabled")).booleanValue();
}
return DEFAULT_RPC_ENABLED;
}
public int getWebPort()
{
if(this.settingsJSON.containsKey("webport"))
{
return ((Long) this.settingsJSON.get("webport")).intValue();
}
return DEFAULT_WEB_PORT;
}
public boolean isGuiConsoleEnabled()
{
if(this.settingsJSON.containsKey("guiconsoleenabled"))
{
return ((Boolean) this.settingsJSON.get("guiconsoleenabled")).booleanValue();
}
return DEFAULT_GUI_CONSOLE_ENABLED;
}
public String[] getWebAllowed()
{
try
{
if(this.settingsJSON.containsKey("weballowed"))
{
//GET PEERS FROM JSON
JSONArray allowedArray = (JSONArray) this.settingsJSON.get("weballowed");
//CREATE LIST WITH PEERS
String[] allowed = new String[allowedArray.size()];
for(int i=0; i<allowedArray.size(); i++)
{
allowed[i] = (String) allowedArray.get(i);
}
//RETURN
return allowed;
}
//RETURN
return DEFAULT_WEB_ALLOWED.split(";");
}
catch(Exception e)
{
//RETURN EMPTY LIST
return new String[0];
}
}
public boolean isWebEnabled()
{
if(this.settingsJSON.containsKey("webenabled"))
{
return ((Boolean) this.settingsJSON.get("webenabled")).booleanValue();
}
return DEFAULT_WEB_ENABLED;
}
public boolean updateNameStorage()
{
if(this.settingsJSON.containsKey("nsupdate"))
{
return ((Boolean) this.settingsJSON.get("nsupdate")).booleanValue();
}
return DEFAULT_NS_UPDATE;
}
public boolean isForgingEnabled()
{
try {
if(this.settingsJSON.containsKey("forging"))
{
return ((Boolean) this.settingsJSON.get("forging")).booleanValue();
}
} catch (Exception e) {
System.err.println("Bad Settings.json content for parameter forging " + ExceptionUtils.getStackTrace(e));
}
return DEFAULT_FORGING_ENABLED;
}
public int getPingInterval()
{
if(this.settingsJSON.containsKey("pinginterval"))
{
return ((Long) this.settingsJSON.get("pinginterval")).intValue();
}
return DEFAULT_PING_INTERVAL;
}
public boolean isGeneratorKeyCachingEnabled()
{
if(this.settingsJSON.containsKey("generatorkeycaching"))
{
return ((Boolean) this.settingsJSON.get("generatorkeycaching")).booleanValue();
}
return DEFAULT_GENERATOR_KEY_CACHING;
}
public boolean isCheckpointingEnabled()
{
if(this.settingsJSON.containsKey("checkpoint"))
{
return ((Boolean) this.settingsJSON.get("checkpoint")).booleanValue();
}
return DEFAULT_CHECKPOINTING;
}
public boolean isSoundReceivePaymentEnabled()
{
if(this.settingsJSON.containsKey("soundreceivepayment"))
{
return ((Boolean) this.settingsJSON.get("soundreceivepayment")).booleanValue();
}
return DEFAULT_SOUND_RECEIVE_COIN;
}
public boolean isSoundReceiveMessageEnabled()
{
if(this.settingsJSON.containsKey("soundreceivemessage"))
{
return ((Boolean) this.settingsJSON.get("soundreceivemessage")).booleanValue();
}
return DEFAULT_SOUND_MESSAGE;
}
public boolean isSoundNewTransactionEnabled()
{
if(this.settingsJSON.containsKey("soundnewtransaction"))
{
return ((Boolean) this.settingsJSON.get("soundnewtransaction")).booleanValue();
}
return DEFAULT_SOUND_NEW_TRANSACTION;
}
public int getMaxBytePerFee()
{
if(this.settingsJSON.containsKey("maxbyteperfee"))
{
return ((Long) this.settingsJSON.get("maxbyteperfee")).intValue();
}
return DEFAULT_MAX_BYTE_PER_FEE;
}
public boolean isAllowFeeLessRequired()
{
if(this.settingsJSON.containsKey("allowfeelessrequired"))
{
return ((Boolean) this.settingsJSON.get("allowfeelessrequired")).booleanValue();
}
return ALLOW_FEE_LESS_REQUIRED;
}
public BigDecimal getBigFee()
{
return DEFAULT_BIG_FEE;
}
public boolean isGuiEnabled()
{
if(!Controller.getInstance().doesWalletDatabaseExists())
{
return true;
}
if(System.getProperty("nogui") != null)
{
return false;
}
if(this.settingsJSON.containsKey("guienabled"))
{
return ((Boolean) this.settingsJSON.get("guienabled")).booleanValue();
}
return DEFAULT_GUI_ENABLED;
}
public String getTimeZone()
{
if(this.settingsJSON.containsKey("timezone")) {
return (String) this.settingsJSON.get("timezone");
}
return DEFAULT_TIME_ZONE;
}
public String getTimeFormat()
{
if(this.settingsJSON.containsKey("timeformat")) {
return (String) this.settingsJSON.get("timeformat");
}
return DEFAULT_TIME_FORMAT;
}
public boolean isSysTrayEnabled() {
if(this.settingsJSON.containsKey("systray"))
{
return ((Boolean) this.settingsJSON.get("systray")).booleanValue();
}
return true;
}
public boolean isLocalAddress(InetAddress address) {
try {
if(this.localAddress == null) {
return false;
} else {
return address.equals(this.localAddress);
}
} catch (Exception e) {
return false;
}
}
public InetAddress getCurrentIp() {
try {
Enumeration<NetworkInterface> networkInterfaces = NetworkInterface
.getNetworkInterfaces();
while (networkInterfaces.hasMoreElements()) {
NetworkInterface ni = (NetworkInterface) networkInterfaces
.nextElement();
Enumeration<InetAddress> nias = ni.getInetAddresses();
while(nias.hasMoreElements()) {
InetAddress ia= (InetAddress) nias.nextElement();
if (!ia.isLinkLocalAddress()
&& !ia.isLoopbackAddress()
&& ia instanceof Inet4Address) {
return ia;
}
}
}
} catch (SocketException e) {
System.out.println("unable to get current IP " + e.getMessage());
}
return null;
}
public String getLang()
{
if(this.settingsJSON.containsKey("lang"))
{
return ((String) this.settingsJSON.get("lang").toString());
}
return DEFAULT_LANGUAGE;
}
}
|
package org.mtransit.parser.ca_edmonton_ets_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.regex.Pattern;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
public class EdmontonETSBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-edmonton-ets-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new EdmontonETSBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating ETS bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this);
super.start(args);
System.out.printf("\nGenerating ETS bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public long getRouteId(GRoute gRoute) {
return Long.parseLong(gRoute.getRouteShortName()); // using route short name as route ID
}
private static final String SLASH = " / ";
private static final String FORT = "Ft";
private static final String _AVE = " Ave";
private static final String _ST = " St";
private static final String EDMONTON = "Edm";
private static final String EDM_GARRISON = EDMONTON + " Garrison";
private static final String WEST_EDM_MALL = "WEM"; // "West " + EDMONTON + " Mall";
private static final String LEWIS_FARMS = "Lewis Farms";
private static final String WEM_LEWIS_FARMS = WEST_EDM_MALL + SLASH + LEWIS_FARMS;
private static final String CAPILANO = "Capilano"; // "Capilano Transit Ctr"
private static final String CLAREVIEW = "Clareview";
private static final String CROMDALE = "Cromdale";
private static final String JASPER_PLACE = "Jasper Pl";
private static final String COLISEUM = "Coliseum";
private static final String WESTMOUNT = "Westmount";
private static final String UNIVERSITY = "University";
private static final String MILL_WOODS = "Mill Woods";
private static final String SOUTHGATE = "Southgate";
private static final String NORTHGATE = "Northgate";
private static final String ABBOTTSFIELD = "Abbottsfield";
private static final String EAUX_CLAIRES = "Eaux Claires";
private static final String _82_ST_132_AVE = "82" + _ST + " / 132" + _AVE;
private static final String _82_ST_132_AVE_EAUX_CLAIRES = _82_ST_132_AVE + SLASH + EAUX_CLAIRES;
private static final String DOWNTOWN = "Downtown";
private static final String DOWNTOWN_NORTHGATE = DOWNTOWN + SLASH + NORTHGATE;
private static final String MILLGATE = "Millgate";
private static final String MILLGATE_DOWNTOWN = MILLGATE + SLASH + DOWNTOWN;
private static final String DOWNTOWN_JASPER_PLACE = DOWNTOWN + SLASH + JASPER_PLACE;
private static final String GOV_CTR = "Gov Ctr";
private static final String MAC_EWAN = "MacEwan";
private static final String MAC_EWAN_GOV_CTR = MAC_EWAN + SLASH + GOV_CTR;
private static final String CASTLE_DOWNS = "Castle Downs";
private static final String CENTURY_PK = "Century Pk";
private static final String YELLOWBIRD = "Yellowbird";
private static final String YELLOWBIRD_CENTURY_PK = YELLOWBIRD + SLASH + CENTURY_PK;
private static final String MILL_WOODS_CENTURY_PK = MILL_WOODS + SLASH + CENTURY_PK;
private static final String S_CAMPUS = "S Campus";
private static final String FT_EDM = FORT + " " + EDMONTON;
private static final String S_CAMPUS_FT_EDM = S_CAMPUS + SLASH + FT_EDM;
private static final String LEGER = "Leger";
private static final String BRANDER_GDNS = "Brander Gdns";
private static final String MEADOWS = "Meadows";
private static final String BLACKMUD_CRK = "Blackmud Crk";
private static final String BLACKBURN = "Blackburn";
private static final String ALLARD = "Allard";
private static final String HARRY_AINLAY_LP = "Harry Ainlay Lp";
private static final String TWIN_BROOKS = "Twin Brooks";
private static final String RUTHERFORD = "Rutherford";
private static final String RUTHERFORD_BLACKBURN = RUTHERFORD + SLASH + BLACKBURN;
private static final String SOUTHWOOD = "Southwood";
private static final String S_EDM_COMMON = "S " + EDMONTON + " Common";
private static final String PARKALLEN = "Parkallen";
private static final String WINDSOR_PK = "Windsor Pk";
private static final String PARKALLEN_WINDSOR_PK = PARKALLEN + SLASH + WINDSOR_PK;
private static final String KNOTTWOOD = "Knottwood";
private static final String BELVEDERE = "Belvedere";
private static final String BONNIE_DOON = "Bonnie Doon";
private static final String LAUREL = "Laurel";
private static final String PLYPOW = "Plypow";
private static final String TAMARACK = "Tamarack";
private static final String BRECKENRIDGE_GRNS = "Breckenridge Grns";
private static final String WESTRIDGE = "Westridge";
private static final String LESSARD = "Lessard";
private static final String LESSARD_WEST_EDM_MALL = LESSARD + SLASH + WEST_EDM_MALL;
private static final String CAMERON_HTS = "Cameron Hts";
private static final String LYMBURN = "Lymburn";
private static final String WEDGEWOOD_HTS = "Wedgewood Hts";
private static final String THE_GRANGE = "The Grange";
private static final String RIO_TERRACE = "Rio Terrace";
private static final String HAMPTONS = "Hamptons";
private static final String WESTVIEW_VLG = "Westview Vlg";
private static final String MISTATIM_IND = "Mistatim Ind";
private static final String STADIUM = "Stadium";
private static final String LAGO_LINDO = "Lago Lindo";
private static final String MONTROSE = "Montrose";
private static final String KINGSWAY_RAH = "Kingsway RAH";
private static final String KING_EDWARD_PK = "King Edward Pk";
private static final String RAPPERSWILL = "Rapperswill";
private static final String OXFORD = "Oxford";
private static final String _100_ST_160_AVE = "100" + _ST + " / 160" + _AVE;
private static final String _95_ST_132_AVE = "95" + _ST + " / 132" + _AVE;
private static final String CANOSSA = "Canossa";
private static final String CHAMBERY = "Chambery";
private static final String KERNOHAN = "Kernohan";
private static final String LONDONDERRY = "Londonderry";
private static final String EVERGREEN = "Evergreen";
private static final String FRASER = "Fraser";
private static final String FT_SASKATCHEWAN = FORT + " Saskatchewan";
private static final String SPRUCE_GRV = "Spruce Grv";
private static final String MC_CONACHIE = "McConachie";
private static final String SCHONSEE = "Schonsee";
private static final String BRINTNELL = "Brintnell";
private static final String KLARVATTEN = "Klarvatten";
private static final String RIVERDALE = "Riverdale";
private static final String GOLD_BAR = "Gold Bar";
private static final String JASPER_GATES = "Jasper Gates";
private static final String SOUTH_PARK_CTR = "South Park Ctr";
private static final String NORTHLANDS = "Northlands";
private static final String HAWKS_RDG = "Hawks Rdg";
private static final String WINTERBURN = "Winterburn";
private static final String WINTERBURN_IND = WINTERBURN + " Ind";
private static final String HOLYROOD = "Holyrood";
private static final String STRATHCONA_IND = "Strathcona Ind";
private static final String RITCHIE = "Ritchie";
private static final String AMBLESIDE = "Ambleside";
private static final String WINDERMERE = "Windermere";
private static final String _104_ST_82_AVE = "104" + _ST + " / 82" + _AVE;
private static final String BELGRAVIA = "Belgravia";
private static final String ROSENTHAL = "Rosenthal";
private static final String CHAPPELLE = "Chappelle";
private static final String ORCHARDS = "Orchards";
private static final String QUARRY_RDG = "Quarry Rdg";
private static final String HOLLICK_KENYON = "Hollick Kenyon";
private static final String EDM_WASTE_MGT_CTR = EDMONTON + " Waste Mgt Ctr";
private static final String VLY_ZOO = "Vly Zoo";
private static final String _84_ST_111_AVE = "84" + _ST + " / 111" + _AVE;
private static final String VLY_ZOO_FT_EDM = VLY_ZOO + SLASH + FT_EDM;
private static final String ALL_WEATHER_WINDOWS = "All Weather Windows";
private static final String AIRPORT = "Int Airport";
@Override
public String getRouteLongName(GRoute gRoute) {
String gRouteLongName = gRoute.getRouteLongName();
gRouteLongName = CleanUtils.cleanStreetTypes(gRouteLongName);
return CleanUtils.cleanLabel(gRouteLongName);
}
private static final String AGENCY_COLOR_BLUE = "2D3092"; // BLUE (from Wikipedia SVG)
private static final String AGENCY_COLOR = AGENCY_COLOR_BLUE;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
if (ts1.getTripId() == 3801l) { // 38 East to Southgate
if ("4938".equals(ts1GStop.getStopCode()) && "4519".equals(ts2GStop.getStopCode())) {
return -1;
} else if ("4519".equals(ts1GStop.getStopCode()) && "4938".equals(ts2GStop.getStopCode())) {
return +1;
}
} else if (ts1.getTripId() == 3802l) { // 38 West to Leger
if ("4320".equals(ts1GStop.getStopCode()) && "4373".equals(ts2GStop.getStopCode())) {
return -1;
} else if ("4373".equals(ts1GStop.getStopCode()) && "4320".equals(ts2GStop.getStopCode())) {
return +1;
}
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public int compare(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
return super.compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return; // split
}
if (ALL_ROUTE_TRIPS.containsKey(mRoute.id)) {
return; // split
}
if (mRoute.id == 167l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("7496".equals(firstStopId) && "6447".equals(lastStopId)) {
mTrip.setHeadsignString(_100_ST_160_AVE, MDirectionType.NORTH.intValue());
return;
} else if ("6039".equals(firstStopId) && "7060".equals(lastStopId)) {
mTrip.setHeadsignString(_95_ST_132_AVE, MDirectionType.SOUTH.intValue());
return;
}
} else if (mRoute.id == 597l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("5208".equals(firstStopId) && "8740".equals(lastStopId)) {
mTrip.setHeadsignString(ALL_WEATHER_WINDOWS, MDirectionType.WEST.intValue());
return;
}
} else if (mRoute.id == 697l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("4810".equals(firstStopId) && "2703".equals(lastStopId)) {
mTrip.setHeadsignString(S_CAMPUS_FT_EDM, MDirectionType.NORTH.intValue());
return;
}
} else if (mRoute.id == 725l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("1111".equals(firstStopId) && "2002".equals(lastStopId)) {
mTrip.setHeadsignString(UNIVERSITY, MDirectionType.SOUTH.intValue());
return;
}
} else if (mRoute.id == 738l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("4815".equals(firstStopId) && "2709".equals(lastStopId)) {
mTrip.setHeadsignString(LEGER, MDirectionType.SOUTH.intValue());
return;
}
} else if (mRoute.id == 739l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("2745".equals(firstStopId) && "2002".equals(lastStopId)) {
mTrip.setHeadsignString(UNIVERSITY, MDirectionType.NORTH.intValue());
return;
}
} else if (mRoute.id == 757l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if (("6369".equals(firstStopId) || "5201".equals(firstStopId)) && "2002".equals(lastStopId)) {
mTrip.setHeadsignString(UNIVERSITY, MDirectionType.SOUTH.intValue());
return;
}
} else if (mRoute.id == 837l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("1814".equals(firstStopId) && "1110".equals(lastStopId)) {
mTrip.setHeadsignString(KINGSWAY_RAH, MDirectionType.WEST.intValue());
return;
}
} else if (mRoute.id == 853l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("7496".equals(firstStopId) && "7008".equals(lastStopId)) {
mTrip.setHeadsignString(NORTHGATE, MDirectionType.WEST.intValue());
return;
}
} else if (mRoute.id == 926l) {
String firstStopId = SplitUtils.getFirstStopId(mRoute, gtfs, gTrip);
String lastStopId = SplitUtils.getLastStopId(mRoute, gtfs, gTrip);
if ("2001".equals(firstStopId) && "5006".equals(lastStopId)) {
mTrip.setHeadsignString(WEST_EDM_MALL, MDirectionType.WEST.intValue());
return;
}
}
System.out.printf("\n%s: Unexpected trip %s.\n", mRoute.id, gTrip);
System.exit(-1);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return ALL_ROUTE_TRIPS2.get(mRoute.id).getAllTrips();
}
if (ALL_ROUTE_TRIPS.containsKey(mRoute.id)) {
return ALL_ROUTE_TRIPS.get(mRoute.id).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS;
static {
HashMap<Long, RouteTripSpec> map = new HashMap<Long, RouteTripSpec>();
map.put(10l, new RouteTripSpec(10l,
0, MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
1, MTrip.HEADSIGN_TYPE_STRING, COLISEUM)
.addALLFromTo(0, "1203", "7101")
.addALLFromTo(1, "7101", "1203")
);
map.put(13l, new RouteTripSpec(13l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addALLFromTo(MDirectionType.NORTH.intValue(), "7011", "6005")
.addALLFromTo(MDirectionType.SOUTH.intValue(), "6005", "7011")
);
map.put(17l, new RouteTripSpec(17l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addALLFromTo(MDirectionType.NORTH.intValue(), "4203", "2206")
.addALLFromTo(MDirectionType.SOUTH.intValue(), "2206", "4203")
);
map.put(23l, new RouteTripSpec(23l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addALLFromTo(MDirectionType.WEST.intValue(), "3217", "5001")
.addALLFromTo(MDirectionType.EAST.intValue(), "5001", "3217")
);
map.put(24l, new RouteTripSpec(24l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addALLFromTo(MDirectionType.WEST.intValue(), "4201", "4806")
.addALLFromTo(MDirectionType.EAST.intValue(), "4806", "4201")
);
map.put(31l, new RouteTripSpec(31l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER)
.addALLFromTo(MDirectionType.NORTH.intValue(), "4813", "2208")
.addALLFromTo(MDirectionType.SOUTH.intValue(), "2208", "4813")
);
map.put(32l, new RouteTripSpec(32l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRANDER_GDNS,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM)
.addALLFromTo(MDirectionType.WEST.intValue(), "2705", "4025")
.addALLFromTo(MDirectionType.EAST.intValue(), "4025", "2705")
.addBothFromTo(MDirectionType.WEST.intValue(), "2705", "2705") // 4025
);
map.put(34l, new RouteTripSpec(34l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addALLFromTo(MDirectionType.WEST.intValue(), "2209", "4809")
.addALLFromTo(MDirectionType.EAST.intValue(), "4809", "2209")
);
map.put(35l, new RouteTripSpec(35l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addALLFromTo(MDirectionType.WEST.intValue(), "4215", "4812")
.addALLFromTo(MDirectionType.EAST.intValue(), "4812", "4215")
);
map.put(36l, new RouteTripSpec(36l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addALLFromTo(MDirectionType.NORTH.intValue(), "4211", "2703")
.addALLFromTo(MDirectionType.SOUTH.intValue(), "2703", "4211")
);
map.put(37l, new RouteTripSpec(37l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addALLFromTo(MDirectionType.WEST.intValue(), "4215", "4802")
.addALLFromTo(MDirectionType.EAST.intValue(), "4802", "4215")
);
map.put(38l, new RouteTripSpec(38l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addALLFromTo(MDirectionType.WEST.intValue(), "2207", "4805")
.addALLFromTo(MDirectionType.EAST.intValue(), "4805", "2207")
);
map.put(39l, new RouteTripSpec(39l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD)
.addALLFromTo(MDirectionType.NORTH.intValue(), "9242", "4213")
.addALLFromTo(MDirectionType.SOUTH.intValue(), "4213", "9242")
);
ALL_ROUTE_TRIPS = map;
}
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
map2.put(1l, new RouteTripSpec(1l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5009", "5302", "5110", "1346", "2591", "2301" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2301", "2267", "1620", "5101", "5301", "5009" }))
.compileBothTripSort());
map2.put(2l, new RouteTripSpec(2l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD_WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5723", "5008", "5437", "1336",
"1256", //
"1408", "1561", "1454", "7902" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7902", "1561", "1407",
"1266", //
"1620", "5185", "5003", "5723" }))
.compileBothTripSort());
map2.put(3l, new RouteTripSpec(3l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CROMDALE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5106", "5928", "1279", "1360", "1256", "1147" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1147", "1346", "1775", "1846", "1669", "5389", "5106" }))
.compileBothTripSort());
map2.put(4l, new RouteTripSpec(4l,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEM_LEWIS_FARMS,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2306", "2159", "2891", "2001",
"2660", //
"2702",
"5006", //
"5003", "8601" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "8601", "5006", "2714", "2002",
"2834", //
"2065", //
"2196", //
"2159", //
"2593", //
"2549", //
"2447", "2306" }))
.compileBothTripSort());
map2.put(5l, new RouteTripSpec(5l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5210", "1083", "1336", "1188",
"1051", //
"1268", "1202" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1202", "1328", "1620", "5210" }))
.compileBothTripSort());
map2.put(6l, new RouteTripSpec(6l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2203", "2270", "2109", "2102", "3006", "3215" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3215", "3010", "2109", "2266", "2888",
"2630", // ?
"2203" }))
.compileBothTripSort());
map2.put(7l, new RouteTripSpec(7l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5108", "1881", "1829", "1542", "2659", "2891", "2002" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2002", "2860", "2824", "1457", "1989", "1808", "5108" }))
.compileBothTripSort());
map2.put(8l, new RouteTripSpec(8l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3207",
"3008", // ?
"2108", "2551", "1457",
"1989", // ?
"1106",
"1476", //
"1201", "1001" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1001", "1208",
"1075", // ?
"1112",
"1557", // ?
"1542", "2549", "2103", "3207" }))
.compileBothTripSort());
map2.put(9l, new RouteTripSpec(9l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2218", "2875", "1591", "1108", "1476",
"7465", //
"7001", //
"7016",
"7448", //
"6317" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6317",
"6536", //
"7001", "1532", "1104", "1142", "2690", "2218" }))
.compileBothTripSort());
map2.put(11l, new RouteTripSpec(11l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7007", "7186", "7106" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7106", "7572", "7007" }))
.compileBothTripSort());
map2.put(12l, new RouteTripSpec(12l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1251", "1529", "1109", "1533", "1550", "1434", "1435", "1553", "1032", "7003" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7003", "6369", "1109", "1113", "1251" }))
.compileBothTripSort());
map2.put(14l, new RouteTripSpec(14l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN_JASPER_PLACE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5011", "5024",
/*-"5103", -*/
"5112",
"5103", //
"5293", //
"1999" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1123", "5103", "5855", "5011" }))
.compileBothTripSort());
map2.put(15l, new RouteTripSpec(15l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3208", "2117", "2551", "1457", "1532", "1476", "6317" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6317", "1532", "1542", "2188", "2118", "3208" }))
.compileBothTripSort());
map2.put(16l, new RouteTripSpec(16l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1310", "7011", "6314", "6075", "6576", "6009" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6009", "6124", "6317",
"7011", //
"7003", "1310" }))
.compileBothTripSort());
map2.put(30l, new RouteTripSpec(30l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3217", "4211", "4811", "4597", "4153", "2704" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2704", "4021", "4494", "4811", "4803", "4202", "3217" }))
.compileBothTripSort());
map2.put(33l, new RouteTripSpec(33l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5001", "4021", "4040", "2973", "2205", "2215", "2118", "3713" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3713", "2117", "2205", "2984", "4021", "4153", "5001" }))
.compileBothTripSort());
map2.put(40l, new RouteTripSpec(40l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, YELLOWBIRD_CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4290", "4206", "", "4480", "4474", "2211" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2211", "4052", "4475", "4205", "4290" }))
.compileBothTripSort());
map2.put(41l, new RouteTripSpec(41l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4208", "4168", "2213" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2213", "4486", "4208" }))
.compileBothTripSort());
map2.put(42l, new RouteTripSpec(42l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4209", "4070", "2217" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2217", "4342", "4209" }))
.compileBothTripSort());
map2.put(43l, new RouteTripSpec(43l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4214", "4156", "2973", "2711" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2711", "2511", "4378", "4214" }))
.compileBothTripSort());
map2.put(44l, new RouteTripSpec(44l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4265",
"4233", //
"4204", "4210", "4362", "2204" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2204", "4198", "4204",
"4348", //
"4265" }))
.compileBothTripSort());
map2.put(45l, new RouteTripSpec(45l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4207", "4588", "2214" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2214", "2888", "4198", "4207" }))
.compileBothTripSort());
map2.put(46l, new RouteTripSpec(46l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY_LP,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, YELLOWBIRD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4290", "4209", "4307" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4490", "4208", "4290" }))
.compileBothTripSort());
map2.put(47l, new RouteTripSpec(47l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ALLARD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9301", "9163", "4206" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4206", "9164", "9301" }))
.compileBothTripSort());
map2.put(48l, new RouteTripSpec(48l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKBURN)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9226",
"4002", //
"4204" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4204",
"9551", //
"9226" }))
.compileBothTripSort());
map2.put(49l, new RouteTripSpec(49l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKMUD_CRK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9756", "9542", "4210" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4210",
"4105", //
"9756" }))
.compileBothTripSort());
map2.put(50l, new RouteTripSpec(50l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2210", "4277", "2517", "2957", "2710" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2710", "2510", "2924", "4474", "2210" }))
.compileBothTripSort());
map2.put(51l, new RouteTripSpec(51l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKALLEN)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2795", "2861", "2001" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2001", "2889", "2795" })) //
.compileBothTripSort());
map2.put(52l, new RouteTripSpec(52l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] {
"2212",
"2849",
"2632", //
"2290", //
"1425",
"1728", //
"1991", //
"1308", "1777", "1262" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] {
"1262", "1305",
"1792", //
"1629", //
"1993", //
"1425", //
"1567", //
"2768", //
"2821",
"2665", //
"2212" }))
.compileBothTripSort());
map2.put(53l, new RouteTripSpec(53l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2216", "2973", "2712" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2712", "2511", "2216" }))
.compileBothTripSort());
map2.put(54l, new RouteTripSpec(54l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2710", "2001" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2001", "2710" }))
.compileBothTripSort());
map2.put(55l, new RouteTripSpec(55l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2202", "2830", "2709" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2709", "2966", "2202" }))
.compileBothTripSort());
map2.put(57l, new RouteTripSpec(57l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2002", "2860", "2824", "1383", "1358" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1358", "1383", "2659", "2891", "2002" }))
.compileBothTripSort());
map2.put(59l, new RouteTripSpec(59l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_EDM_COMMON)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "3440", "3003", "3209" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3209", "3007", "3440" }))
.compileBothTripSort());
map2.put(60l, new RouteTripSpec(60l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3210", "3585", "2104", "2101", "1780", "1989" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1824", "1780", "2104", "3233", "3210" }))
.compileBothTripSort());
map2.put(61l, new RouteTripSpec(61l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3211", "3585", "2105", "2104", "1780", "1989" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1824", "1780", "2105", "3529", "3211" }))
.compileBothTripSort());
map2.put(62l, new RouteTripSpec(62l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3154", "3161", "3203", "3212", "1780", "1804", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1824", "1804", "1780", "3203", "3139", "3154" }))
.compileBothTripSort());
map2.put(63l, new RouteTripSpec(63l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3143", "3067", "3204", "3212", "1383", "1358" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1358", "1383", "3204", "3080", "3143" }))
.compileBothTripSort());
map2.put(64l, new RouteTripSpec(64l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3026", "3006",
"3599", //
"3001", "3208", "2111", "1358", "1383" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1383", "1358", "2112", "3208", "3009", "3026" }))
.compileBothTripSort());
map2.put(65l, new RouteTripSpec(65l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3023", "3006", "3001", "3208", "2111", "1989" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1824", "2112", "3208", "3009", "3023" }))
.compileBothTripSort());
map2.put(66l, new RouteTripSpec(66l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3213", "3011", "3224", "2101", "2105", "1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1358", "2101", "3371", "3011", "3003", "3213" })) //
.compileBothTripSort());
map2.put(67l, new RouteTripSpec(67l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "3206", "3952", "3957", "3708" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3708", "3957", "3950", "3311", "3116", "3206" }))
.compileBothTripSort());
map2.put(68l, new RouteTripSpec(68l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(), Arrays.asList(new String[] { "3202", "3399", "3586", "2107", "2110", "1989" }))
.addTripSort(MDirectionType.SOUTH.intValue(), Arrays.asList(new String[] { "1824", "2107", "3230", "3584", "3202" }))
.compileBothTripSort());
map2.put(69l, new RouteTripSpec(69l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(), Arrays.asList(new String[] { "3214", "3702", "2110", "2107", "1989" }))
.addTripSort(MDirectionType.SOUTH.intValue(), Arrays.asList(new String[] { "1824", "2110", "3710", "3214" }))
.compileBothTripSort());
map2.put(70l, new RouteTripSpec(70l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3201", "3012", "3093", "2685", "2840", "2659", "2824", "2659", "1780" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1780", "2840", "2245", "3497", "3004", "3201" }))
.compileBothTripSort());
map2.put(71l, new RouteTripSpec(71l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3216", "3224", "2111", "1153", "1303" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1303", "1993", "2103", "3370", "3216" }))
.compileBothTripSort());
map2.put(72l, new RouteTripSpec(72l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE_DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3206", "3255", "3796", "3491", "2106", "2106", "2110", "1989" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1989", "2110", "2106", "3355", "3748", "3185", "3206" }))
.compileBothTripSort());
map2.put(73l, new RouteTripSpec(73l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2203", "2888", "2102", "3002", "3205" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3205", "3010", "2109", "2203" }))
.compileBothTripSort());
map2.put(74l, new RouteTripSpec(74l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2204", "4202","3671", "3107", "3559", "3209" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3209", "3007", "3430", "3110", "4202", "4212", "2204" }))
.compileBothTripSort());
map2.put(78l, new RouteTripSpec(78l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "4205", "3675", "9384", "9725", "3215" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3215", "9147", "9387", "3865", "4205" }))
.compileBothTripSort());
map2.put(79l, new RouteTripSpec(79l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "4207", "3319", "9260", "9139", "3214" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3214", "9287", "9671", "3513", "4207" }))
.compileBothTripSort());
map2.put(80l, new RouteTripSpec(80l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2218", "2769", "2826", "2551", "2599", "2223", "2305" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2305", "2152", "2264", "2188", "2622", "2837", "2888", "2630", "2218" })) //
.compileBothTripSort());
map2.put(81l, new RouteTripSpec(81l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3216", "2106", "2338",
"2697", "2659","2824", //
"1246", "1383", "1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1358", "1383", "2835", //
"2878", "2659", "2840", //
"2385", "2106", "2104", "3216" }))
.compileBothTripSort());
map2.put(82l, new RouteTripSpec(82l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3212", "2339", "2551", "1383", "1358" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1358", "1383", "2255", "2528", "3212" }))
.compileBothTripSort());
map2.put(83l, new RouteTripSpec(83l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1358", "1950", "2196", "2188", "2805",
"2362", "2536", //
"2943", "2286", //
"2693", "3706" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3706", "2693",
"2357", "2148", //
"2804", "2551", "2196","1457", "1763", "1358" })) //
.compileBothTripSort());
map2.put(84l, new RouteTripSpec(84l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2111", "2303" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2303", "2112" }))
.compileBothTripSort());
map2.put(85l, new RouteTripSpec(85l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1358", "2073", "2386", "2985", "2550", "2059", "2307" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2307", "2442", "1358" }))
.compileBothTripSort());
map2.put(86l, new RouteTripSpec(86l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1358", "2073", "2302" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2302", "2442", "1358" }))
.compileBothTripSort());
map2.put(87l, new RouteTripSpec(87l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2106", "2338", "2824", "1383", "1358" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1358", "1383", "2285", "2385", "2106" }))
.compileBothTripSort());
map2.put(88l, new RouteTripSpec(88l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1680", "1336", "2274", "2449", "2307" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2307", "2298", "2267", "1718" }))
.compileBothTripSort());
map2.put(89l, new RouteTripSpec(89l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TAMARACK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3691",
"3608", "3610", "3192", "3193",//
"3979", "3613", "3711" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "3711", "3851", "3605", "3691" })) //
.compileBothTripSort());
map2.put(90l, new RouteTripSpec(90l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1824", "2255", "3707" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3709", "2551", "1989" }))
.compileBothTripSort());
map2.put(91l, new RouteTripSpec(91l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2307",
"2425", //
"1371" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1371", "1131", "2307" }))
.compileBothTripSort());
map2.put(92l, new RouteTripSpec(92l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PLYPOW,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2118", "2876", "22330"})) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { /*-"2876"-*//* + */"22330"/* + */, /* + */"22196"/* + */, "2118" })) //
.compileBothTripSort());
map2.put(94l, new RouteTripSpec(94l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2002", "2860", "2447", "2274", "2449", "2303" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2303", "2298", "2591", "2159", "2891", "2002" }))
.compileBothTripSort());
map2.put(95l, new RouteTripSpec(95l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAUREL,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "3213", "3303", "3761"})) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { /*-"3303"-*//* + */"3761"/* + */, /* + */"3620"/* + */, "3213" })) //
.compileBothTripSort());
map2.put(96l, new RouteTripSpec(96l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2110", "2433", "2196" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2196", "2074", "2110" })) //
.compileBothTripSort());
map2.put(99l, new RouteTripSpec(99l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2304", "1206", "7211" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7211", "1207", "2304" }))
.compileBothTripSort());
map2.put(100l, new RouteTripSpec(100l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1243", "1812", "5449", "5001", "5010", "8610" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "8610", "5001", "5054", "1083", "1256", "1243" })) //
.compileBothTripSort());
map2.put(101l, new RouteTripSpec(101l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD_HTS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5968", "5908", "5821", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5002", "5979", "5660", "5968"})) //
.compileBothTripSort());
map2.put(102l, new RouteTripSpec(102l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5828", "5725", "5004" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "5004", "5755", "5828" }))
.compileBothTripSort());
map2.put(103l, new RouteTripSpec(103l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAMERON_HTS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5752", "5695", "5821", "5002" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5002", "5979", "5623", "5752" }))
.compileBothTripSort());
map2.put(104l, new RouteTripSpec(104l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { /*-"5755",-*/"5828", /* + */"5725"/* + */, "5821", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2706", /*-"5725"-,*//* + */"5755"/* + */,/* + */"5828"/* + */})) //
.compileBothTripSort());
map2.put(105l, new RouteTripSpec(105l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5017", "5932", "5733", "5821", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2706", /* "-5932-", *//* + */"5634"/* + */,/* + */"5017"/* + */})) //
.compileBothTripSort());
map2.put(106l, new RouteTripSpec(106l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5733", "5722", "5004", "5007", "2713", "2001" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2001", "2701", "5004","5699", "5733" })) //
.compileBothTripSort());
map2.put(107l, new RouteTripSpec(107l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTRIDGE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5747", "5657", "5005" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5005", "5877", "5747" }))
.compileBothTripSort());
map2.put(108l, new RouteTripSpec(108l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRECKENRIDGE_GRNS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "8670", "8279", "8608" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "8608", "8999", "8670" }))
.compileBothTripSort());
map2.put(109l, new RouteTripSpec(109l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5012", "5874", "5366", "5111", "5250", "5344", "1496" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1571", "5132", "5111", "5903", "5012" }))
.compileBothTripSort());
map2.put(110l, new RouteTripSpec(110l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTRIDGE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5005", "5877", "5747" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "5747", "5811", "5811", "5005" }))
.compileBothTripSort());
map2.put(111l, new RouteTripSpec(111l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN_JASPER_PLACE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5001", "5795", "5109", "1620" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1620", "5104", "5846", "5001" }))
.compileBothTripSort());
map2.put(112l, new RouteTripSpec(112l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5013", "5433", "5344", "1910",
"1824", //
"1542", "2122", "2302" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2302", "2497", "1457",
"1989", //
"1878", "5132", "5038", "5013" }))
.compileBothTripSort());
map2.put(113l, new RouteTripSpec(113l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5001", "5069", "5104" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5104", "5151", "5001" }))
.compileBothTripSort());
map2.put(114l, new RouteTripSpec(114l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTVIEW_VLG)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "8846", "8941", "5105" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "5105", "8904", "8849", "8846" }))
.compileBothTripSort());
map2.put(115l, new RouteTripSpec(115l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5013", "5433", "5344", "5209", "", "6333", "7011" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7011", "6369", "5203", "5132", "5038", "5013" }))
.compileBothTripSort());
map2.put(117l, new RouteTripSpec(117l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HAMPTONS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEM_LEWIS_FARMS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5010", "8607", "8135", "8106" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "8106", "8033", /*-"8605",-*/"8607", /* + */"8605"/* + */, "5010" })) //
.compileBothTripSort());
map2.put(118l, new RouteTripSpec(118l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIO_TERRACE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5174", "5302", "5103" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5103", "5301", "5174" }))
.compileBothTripSort());
map2.put(119l, new RouteTripSpec(119l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_GRANGE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "8583", "8097", "8033", "8607" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "8607", "8135", "8097", "8046", "8583" }))
.compileBothTripSort());
map2.put(120l, new RouteTripSpec(120l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STADIUM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5110", "1242", "1083", "1336", "1407" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1407", "1328", "1620", "1746", "5110" }))
.compileBothTripSort());
map2.put(121l, new RouteTripSpec(121l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5205", "5215", "6345", "6646", "7011" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7011", "6183", "6371", "5404", "5205" }))
.compileBothTripSort());
map2.put(122l, new RouteTripSpec(122l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5012", "8389", "5928", "5330", "5207" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "5207", "5143", "5389", "8952", "5012" }))
.compileBothTripSort());
map2.put(123l, new RouteTripSpec(123l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5105", "8691", "5648", "5374", "5205" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "5205", "5692", "5635", "8684", "5105" }))
.compileBothTripSort());
map2.put(124l, new RouteTripSpec(124l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MISTATIM_IND)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "6727", "6844", "5207" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "5207", "6345", "6727" }))
.compileBothTripSort());
map2.put(125l, new RouteTripSpec(125l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5101", "5448", "5202", "1113", "1113", "1107", "1251" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1251", "1107", "5209", "5150", "5101" }))
.compileBothTripSort());
map2.put(126l, new RouteTripSpec(126l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5013", "8882", "8590", "5928", "5208" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5208", "5389", "8500", "8952", "5013" }))
.compileBothTripSort());
map2.put(127l, new RouteTripSpec(127l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, // 7205
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) // 5204
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5204", "1110", "1401", "1209", "1205", "7205" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7205", "1209", "1402", "1110", "1105", "5204" }))
.compileBothTripSort());
map2.put(128l, new RouteTripSpec(128l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2002",
"2960", //
"5206", "6333",
"6435", //
"6553", //
"6136", //
"6006", //
"6002",
"6077", //
"6047"
}))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6047",
"6078", //
"6006",
"6137", //
"6568", //
"6435", //
"6366", //
"6369", "5201",
"2515", //
"2002"
}))
.compileBothTripSort());
map2.put(129l, new RouteTripSpec(129l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5012", "8740", "8740", "5960", "5208" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5208", "5936", "8740", "5012" }))
.compileBothTripSort());
map2.put(130l, new RouteTripSpec(130l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2002", "1700", "1107", "1532", "1476", "7002" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7002", "1532", "1111", "1855", "2002" }))
.compileBothTripSort());
map2.put(133l, new RouteTripSpec(133l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "8606", "5001", "2701" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2701", "5010", "8606" }))
.compileBothTripSort());
map2.put(134l, new RouteTripSpec(134l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1251", "1237", "7002" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7002", "1372", "1251" }))
.compileBothTripSort());
map2.put(136l, new RouteTripSpec(136l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HAMPTONS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "8583", "8089", "8033", "8602", "5010" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5010", "8609", "8135", "8177", "8583" }))
.compileBothTripSort());
map2.put(137l, new RouteTripSpec(137l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5010", "8882", "6850", "7011" , "7002", "7908" })) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7908", "7011", "6118", "8861", "5010" }))
.compileBothTripSort());
map2.put(138l, new RouteTripSpec(138l, // TODO not exactly: same loop for the 2 trips
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD_HTS)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { /*-"5627"-*//* + */"5968"/* + */, /* + */"5888"/* + */, /* + */"5789"/* + */, //
"5983", "5747", "2707" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2707", "5747", "5719",
"5627", "5858", "5968"})) //
.compileBothTripSort());
map2.put(139l, new RouteTripSpec(139l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HAMPTONS)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "8457", "8106", "8033", "2707" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2707", "8135", "8457", "8460" }))
.compileBothTripSort());
map2.put(140l, new RouteTripSpec(140l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN_NORTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1251", "1040", "7003", "7010", "7377" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7377", "7003", "1380", "1251" }))
.compileBothTripSort());
map2.put(141l, new RouteTripSpec(141l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1204", "1561", "1003" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1003", "1031", "1204" }))
.compileBothTripSort());
map2.put(142l, new RouteTripSpec(142l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1207", "1521", "1001" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1001", "1367", "1207" }))
.compileBothTripSort());
map2.put(143l, new RouteTripSpec(143l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MONTROSE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY_RAH)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1111", "1476", "1441", "1205", "1260" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1260", "1213", "1278", "1075", "1111" }))
.compileBothTripSort());
map2.put(145l, new RouteTripSpec(145l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _82_ST_132_AVE_EAUX_CLAIRES)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7496", "6315", "7377", "7388"})) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { /*-"7377",-*//* + */"7388"/* + */, /* + */"7483"/* + */, "6315", "6317", "7358", "7165" })) //
.compileBothTripSort());
map2.put(149l, new RouteTripSpec(149l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "6308", "7736", "7113", "7904" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7904", "7153", "7959", "6308" }))
.compileBothTripSort());
map2.put(150l, new RouteTripSpec(150l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5007", "5107", "5207", "6333", "7011", "7010", "6303" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6303", "7011", "6369", "5203", "5102", "5007" }))
.compileBothTripSort());
map2.put(
151l, // TODO not perfect but close enough
new RouteTripSpec(151l, MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KING_EDWARD_PK)
.addTripSort(
MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2253", "2432", "1251", "1591", "1966", "1262", "1346", "1128", "1237", "1043", "6496", "6421",
"6571", "6328", "6222", "6132", "6333", "6553", "6020", "6487", "6251", "6004" }))
.addTripSort(
MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6004", "6426", "6020", "6224", "6234", "6349", "6542", "6434", "6568", "6366", "6292", "6123",
"6383", "6116", "6266", "6496", "6280", "1372", "1064", "1966", "1262", "1243", "1142", "1251", "2079", "2253" }))
.compileBothTripSort());
map2.put(152l, new RouteTripSpec(152l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7003", "7074", "7208" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7208", "7221", "7003" }))
.compileBothTripSort());
map2.put(153l, new RouteTripSpec(153l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7008", "7143", "7204" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7204", "7043", "7008" })) //
.compileBothTripSort());
map2.put(154l, new RouteTripSpec(154l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7009", "7592", "7202" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7202", "7123", "7009" }))
.compileBothTripSort());
map2.put(155l, new RouteTripSpec(155l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RAPPERSWILL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "6823", "6416", "6313" })) //
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "6313", "6078", "6823" })) //
.compileBothTripSort());
map2.put(160l, new RouteTripSpec(160l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1304", "1820", "6348", "6243", "6835", "6676" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6835", "6676", "6442", "6594", "1304" }))
.compileBothTripSort());
map2.put(
161l,
new RouteTripSpec(161l, // like 162 // TODO not perfect, 2 different trip ending, different trip summer/winter
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN_GOV_CTR)
.addTripSort(
MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1309", "1711", "1035", "1903", "1871",
"1824", "1829", "1983", "1680", "1783", "1820", "1707",
"1845", "1271", "1571", "1253", "1555",//
"7009", "6580", "6007" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6007", "6396", "7009", "7003", //
"1221", "1280",
"1721", "1496", "1673", "1622", "1740",
"1756", "1655", "1868", "1837", "1718", "1626", "1703",
"1850", "1989", "1643", "1964", "1309" })) //
.compileBothTripSort());
map2.put(162l, new RouteTripSpec(162l, // like 161 // TODO not perfect, 2 different trip ending, different trip summer/winter
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN_GOV_CTR)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] {
// "1309", "1829", "1820", //
"1309", "1711", "1035", "1903", "1871", "1824", "1829",
"1983", "1680", "1783", "1820", "1707",
"1845", "1271", "1571", "1253", "1555",//
"6311", "6033", "6008" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6008", "6340", "6311", "6310",//
"1221", "1280",
"1721", "1496", "1673", "1622", "1740",
"1756", "1655", "1868", "1837", "1718", "1626", "1703",
"1850", "1989", "1643", "1964", "1309"
}))
.compileBothTripSort());
map2.put(163l, new RouteTripSpec(163l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CHAMBERY,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] {
"6312", "7463", "7748", //
"7381", "6194", "6767"})) //
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] {
"6767", "6598", "6854", "6147", "6362", //
"6074", "6076", "6236", "7482", "6312" })) //
.compileBothTripSort());
map2.put(164l, new RouteTripSpec(164l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CANOSSA,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7015", "6001", "6166" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6166", "6582", "6077", "6236", "6021",
"6080", "6225", "6010", "7015" })) //
.compileBothTripSort());
map2.put(165l, new RouteTripSpec(165l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.id,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.id)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7496", "6130", "6522", "6011", "6127" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6074", "6010", "6396", "6579", "7299" }))
.compileBothTripSort());
map2.put(168l, new RouteTripSpec(168l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7011", "6243", "6619", "6835",
"6725",
"6003", "6305" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6305", "6011",
"6228", //
"6698", //
"6725", //
"6256", //
"6566", //
"6261", //
"6114", //
"6676", "6853", "6442", "7011" }))
.compileBothTripSort());
map2.put(169l, new RouteTripSpec(169l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.id,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.id)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7015", "6001", "6166", "6194" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6194", "6456", "6010", "7015" }))
.compileBothTripSort());
map2.put(180l, new RouteTripSpec(180l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1824", "6304", "7736", "7456", "7207", "7642", "1002"
}))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1002", "7639", "7203", "7384", "7959",
"6304", //
"6317",
"6594", //
"1850", //
"1989"
}))
.compileBothTripSort());
map2.put(181l, new RouteTripSpec(181l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7206",
"7650", //
"7186", //
"7384", "7241", "7604", "7901" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7901", "7414", "7400", "7456",
"7164", //
"7479", //
"7650", //
"7265", //
/*-"7186",-*///
"7206" }))
.compileBothTripSort());
map2.put(182l, new RouteTripSpec(182l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7003", "7186", "7104", "7470" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7470", "7105", "7572", "7003" }))
.compileBothTripSort());
map2.put(183l, new RouteTripSpec(183l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1002", "7668", "7885", "7102" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7102", "7983", "7729", "1002" }))
.compileBothTripSort());
map2.put(184l, new RouteTripSpec(184l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EVERGREEN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7903", "7262", "7128" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7128", "7262", "7903" }))
.compileBothTripSort());
map2.put(185l, new RouteTripSpec(185l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1002",
"7954", //
"7102" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7102",
"7744", //
"1002" }))
.compileBothTripSort());
map2.put(186l, new RouteTripSpec(186l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7358", "7286", "7206", "7104", "7470" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7470", "7105", "7205", "7120", "7011" }))
.compileBothTripSort());
map2.put(187l, new RouteTripSpec(187l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KERNOHAN,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7230", "7103", "7756", "7943" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7943", "7103", "7102", "7185" }))
.compileBothTripSort());
map2.put(188l, new RouteTripSpec(188l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "6309", "7230", "7186", "7907", "7729" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7668", "7904", "7549", "7185", "7188", "6309" }))
.compileBothTripSort());
map2.put(190l, new RouteTripSpec(190l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "6306", "7763", "7803", "7054", "7906" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7906", "7384", "7815", "7674", "6306" }))
.compileBothTripSort());
map2.put(191l, new RouteTripSpec(191l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "6307",
"7865", //
"7827" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] {
"7827", //
"7825", //
"7434",
"7795", //
"7779", "6307" }))
.compileBothTripSort());
map2.put(192l, new RouteTripSpec(192l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRINTNELL,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7909",
"7512", //
"7984" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7984",
"7603", //
"7909" }))
.compileBothTripSort());
map2.put(193l, new RouteTripSpec(193l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRINTNELL,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7910",
"7992", //
"7414" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7414",
"77280", //
"7910" }))
.compileBothTripSort());
map2.put(194l, new RouteTripSpec(194l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SCHONSEE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "6308", "7677", "7919" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7919", "7674", "6308" }))
.compileBothTripSort());
map2.put(195l, new RouteTripSpec(195l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_CONACHIE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7907",
"7879", //
"7308" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7308",
"77436", //
"7907" }))
.compileBothTripSort());
map2.put(197l, new RouteTripSpec(197l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "8737", "8785", "8761", "5415",
"1595", //
"1223", "1850", "1479" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1570", "1679", "1227",
"1187", //
"5389", "8730", "8743", "8737" }))
.compileBothTripSort());
map2.put(198l, new RouteTripSpec(198l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_SASKATCHEWAN,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7908",
"77175", //
"7405" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7405", "7926", "7908" }))
.compileBothTripSort());
map2.put(199l, new RouteTripSpec(199l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_GARRISON,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "6316",
"7873", //
"7895" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7895", "7873", "6316" }))
.compileBothTripSort());
map2.put(301l, new RouteTripSpec(301l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4204", "4065", "4547", "4186", "2211" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2211", "4275", "4543", "4443", "4204" }))
.compileBothTripSort());
map2.put(302l, new RouteTripSpec(302l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EVERGREEN,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7103",
"7689",//
"7262", "7654", "7128" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7128", "7654", "7591",
"7855",//
"7103" }))
.compileBothTripSort());
map2.put(303l, new RouteTripSpec(303l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MISTATIM_IND)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7011", "6688", "6732", "6680", "6183", "6345", "6727" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "6727", "6844", "6646", "6732", "6755",
"6926",//
"6563",//
"6688", "7011" }))
.compileBothTripSort());
map2.put(304l, new RouteTripSpec(304l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_PARK_CTR)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4569",
"2076",//
"2218" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2218", "2888",
"4183",//
"4569" }))
.compileBothTripSort());
map2.put(305l, new RouteTripSpec(305l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_GATES)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5668", "5082", "5528", "", "5208", "5214" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5214", "1481", "1861", "5205", "5055", "5335", "5668" }))
.compileBothTripSort());
map2.put(306l, new RouteTripSpec(306l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2196", "2447", "2805", "2693", "3706" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "3706", "2693", "2804", "2551", "2196" }))
.compileBothTripSort());
map2.put(307l, new RouteTripSpec(307l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOLD_BAR,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2196", "2304", "2012" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2012", "2305", "2196" }))
.compileBothTripSort());
map2.put(308l, new RouteTripSpec(308l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERDALE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] {
"1123",
"1280", //
"1549", //
"1893"
}))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] {
"1893",
"1510", //
"1953", //
"1914", //
"1254",
"1498", //
"1120", //
"1262", "1123"
}))
.compileBothTripSort());
map2.put(309l, new RouteTripSpec(309l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY_RAH,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERDALE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "1254", "1620", "1960", "1746", "1978", "1104", "1366" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "1366", "1455", "1834", "1746", "1141", "1256", "1893", "1254" }))
.compileBothTripSort());
map2.put(310l, new RouteTripSpec(310l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIO_TERRACE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5174", "5302", "5383", "5105" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5105", "5491", "5301", "5174" }))
.compileBothTripSort());
map2.put(311l, new RouteTripSpec(311l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "5011", "5222", "5836", "5105" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "5105", "5851", "5325", "5011" }))
.compileBothTripSort());
map2.put(312l, new RouteTripSpec(312l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7008", "7754", "7944" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7944", "7754", "7008" }))
.compileBothTripSort());
map2.put(313l, new RouteTripSpec(313l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKALLEN_WINDSOR_PK)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2795", "2689", "2002" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2002", "2971", "2001", "2690", "2795" }))
.compileBothTripSort());
map2.put(315l, new RouteTripSpec(315l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINTERBURN_IND,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "8602", "8061", "8989", "8727" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "8727", "8989", "8066", "8602" }))
.compileBothTripSort());
map2.put(316l, new RouteTripSpec(316l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HAWKS_RDG,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "8603", "6824", "6408", "6709" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "6709", "6617", "6825", "8603" }))
.compileBothTripSort());
map2.put(317l, new RouteTripSpec(317l, // TODO better (same stops in both trips in different orders)
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINTERBURN)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] {
"8989", "8967", "8943", "8975", "8927", "8163", "8846", "8975", "8945",
"8941", "5105"
}))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] {
"5105", "8904",
"8694", "8927", "8163", "8846", "8975", "8927", "8163", "8955", "8938", "8989"
}))
.compileBothTripSort());
map2.put(318l, new RouteTripSpec(318l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1208", "1070", "1001", "1491", "1002" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1002", "1340", "1208" }))
.compileBothTripSort());
map2.put(321l, new RouteTripSpec(321l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA_IND)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "3733", "3744", "2106" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2106",
"3481", //
"3733" }))
.compileBothTripSort());
map2.put(322l, new RouteTripSpec(322l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLYROOD,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "2808", "2585", "2841",
"2246", //
}))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] {
"2246", //
"2613", "2808" }))
.compileBothTripSort());
map2.put(323l, new RouteTripSpec(323l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RITCHIE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2419", "2313", "2808" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2808",
"2294", //
"2419" }))
.compileBothTripSort());
map2.put(324l, new RouteTripSpec(324l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMBLESIDE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "9092", "9630", "4201" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "4201", "9635", "9092" }))
.compileBothTripSort());
map2.put(325l, new RouteTripSpec(325l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDERMERE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "9632", "9526", "4801" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "4801",
"4938", //
"9632" }))
.compileBothTripSort());
map2.put(327l, new RouteTripSpec(327l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, _104_ST_82_AVE,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELGRAVIA)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "2765", "2680", "2821" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2821",
"2648", //
"2765" }))
.compileBothTripSort());
map2.put(330l, new RouteTripSpec(330l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4811", "4597", "4153", "2704", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2704", "4021", "4494", "4811" }))
.compileBothTripSort());
map2.put(331l, new RouteTripSpec(331l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CHAPPELLE)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9285",
"9270", //
"9271", //
"9272", //
"9366", //
"9281", //
"9382", //
"4216" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4216",
"9044", //
"9187", //
"9273", //
"9274", //
"9368", //
"9263", //
"9264", //
"9265", //
"9285" }))
.compileBothTripSort());
map2.put(333l, new RouteTripSpec(333l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSENTHAL)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "8167",
"8852", //
"8604" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "8604",
"8168", //
"8167" }))
.compileBothTripSort());
map2.put(334l, new RouteTripSpec(334l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "4809",
"4626", //
"4215" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "4215",
"4642", //
"4809" }))
.compileBothTripSort());
map2.put(336l, new RouteTripSpec(336l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "4810", "4455", "4069", "2208", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2208", "4167", "4129", "4810" }))
.compileBothTripSort());
map2.put(337l, new RouteTripSpec(337l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "4802", "4117", "4110", "4215", }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "4215", "4941", "4856", "4802" }))
.compileBothTripSort());
map2.put(338l, new RouteTripSpec(338l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKBURN)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9226", "4201", "4813", "4597", "4034", "2207", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2207", "4034", "4042", "4805", "4204", "9226" }))
.compileBothTripSort());
map2.put(339l, new RouteTripSpec(339l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD_BLACKBURN)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9251", "9685", "4213", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4213", "9756", "9251" }))
.compileBothTripSort());
map2.put(340l, new RouteTripSpec(340l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3217", "3482", "2102", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2102",
"3448", //
"3217" }))
.compileBothTripSort());
map2.put(347l, new RouteTripSpec(347l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ALLARD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9717", "9685", "4213", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4213", "9666", "9717" }))
.compileBothTripSort());
map2.put(360l, new RouteTripSpec(360l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORCHARDS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9306",
"9050", //
"4216", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4216",
"9051", //
"9306" }))
.compileBothTripSort());
map2.put(361l, new RouteTripSpec(361l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3210", "3585", "2105", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2105", "3529", "3210" }))
.compileBothTripSort());
map2.put(362l, new RouteTripSpec(362l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3082",
"3149", //
"3211", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "3211", "3009", "3082" }))
.compileBothTripSort());
map2.put(363l, new RouteTripSpec(363l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3066", "3003", "3215", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "3215",
"3174", //
"3066" }))
.compileBothTripSort());
map2.put(370l, new RouteTripSpec(370l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "3206", "3957", "3796", "2106", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "2106", "3748", "3950", "3206" }))
.compileBothTripSort());
map2.put(380l, new RouteTripSpec(380l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUARRY_RDG,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "7903",
"7587", //
"7213" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "7213",
"77358", //
"7903" }))
.compileBothTripSort());
map2.put(381l, new RouteTripSpec(381l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLLICK_KENYON,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "7905",
"7982", //
"7151", }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "7151",
"7808", //
"7905" }))
.compileBothTripSort());
map2.put(577l, new RouteTripSpec(577l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, _84_ST_111_AVE)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1408",
"1094", //
"1371" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "1371",
"1180", //
"1408" }))
.compileBothTripSort());
map2.put(589l, new RouteTripSpec(589l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_WASTE_MGT_CTR,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "1211", "7700",
"7701", //
}))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] {
"7700", //
"7701", "1211" }))
.compileBothTripSort());
map2.put(595l, new RouteTripSpec(595l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_EDM)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "4476",
"2978", //
"2706" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2706",
"22160", //
"4476" }))
.compileBothTripSort());
map2.put(596l, new RouteTripSpec(596l,
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, S_CAMPUS_FT_EDM,
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VLY_ZOO_FT_EDM)
.addTripSort(MDirectionType.EAST.intValue(),
Arrays.asList(new String[] { "5015", "4476", "2706" }))
.addTripSort(MDirectionType.WEST.intValue(),
Arrays.asList(new String[] { "2706", "4476", "5015" }))
.compileBothTripSort());
map2.put(747l, new RouteTripSpec(747l,
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK,
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AIRPORT)
.addTripSort(MDirectionType.NORTH.intValue(),
Arrays.asList(new String[] { "9747", "4216" }))
.addTripSort(MDirectionType.SOUTH.intValue(),
Arrays.asList(new String[] { "4216", "9747" }))
.compileBothTripSort()
.addBothFromTo(MDirectionType.SOUTH.intValue(), "4216", "4216")
.addBothFromTo(MDirectionType.NORTH.intValue(), "9747", "9747"));
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.id)) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.id));
}
if (ALL_ROUTE_TRIPS.containsKey(mRoute.id)) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS.get(mRoute.id));
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public String cleanTripHeadsign(String tripHeadsign) {
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern TRANSIT_CENTER = Pattern.compile("((^|\\W){1}(transit center|transit centre)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String TRANSIT_CENTER_REPLACEMENT = "$2TC$4";
private static final Pattern INTERNATIONAL = Pattern.compile("((^|\\W){1}(international)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String INTERNATIONAL_REPLACEMENT = "$2Int$4";
@Override
public String cleanStopName(String gStopName) {
gStopName = TRANSIT_CENTER.matcher(gStopName).replaceAll(TRANSIT_CENTER_REPLACEMENT);
gStopName = INTERNATIONAL.matcher(gStopName).replaceAll(INTERNATIONAL_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
}
|
package org.apache.jmeter.protocol.ldap.sampler;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.BasicAttribute;
import javax.naming.directory.BasicAttributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.ModificationItem;
import org.apache.jmeter.config.Argument;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.config.ConfigTestElement;
import org.apache.jmeter.config.LoginConfig;
import org.apache.jmeter.protocol.ldap.config.LdapConfig;
import org.apache.jmeter.samplers.AbstractSampler;
import org.apache.jmeter.samplers.Entry;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.testelement.property.BooleanProperty;
import org.apache.jmeter.testelement.property.PropertyIterator;
import org.apache.jmeter.testelement.property.StringProperty;
import org.apache.jmeter.testelement.property.TestElementProperty;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
* Ldap Sampler class is main class for the LDAP test. This will control all
* the test available in the LDAP Test.
*
* @author T.Elanjchezhiyan(chezhiyan@siptech.co.in) - Sip Technologies and
* Exports Ltd.
* Created Apr 29 2003 11:00 AM
* @version $Revision$ Last updated: $Date$
*/
public class LDAPSampler extends AbstractSampler
{
transient private static Logger log = LoggingManager.getLoggerForClass();
public final static String SERVERNAME = "servername";
public final static String PORT = "port";
public final static String ROOTDN = "rootdn";
public final static String TEST = "test";
public final static String ADD = "add";
public final static String MODIFY = "modify";
public final static String DELETE = "delete";
public final static String SEARCHBASE = "search";
public final static String SEARCHFILTER = "searchfilter";
public final static String USER_DEFINED = "user_defined";
public final static String ARGUMENTS = "arguments";
public final static String BASE_ENTRY_DN = "base_entry_dn";
//For In build test case using this counter
//create the new entry in the server
public static int counter=0;
private boolean searchFoundEntries;//TODO turn into parameter?
public LDAPSampler()
{
}
public void addCustomTestElement(TestElement element)
{
if(element instanceof LdapConfig || element instanceof LoginConfig)
{
mergeIn(element);
}
}
/**
* Gets the username attribute of the LDAP object.
*
* @return the username
*/
public String getUsername()
{
return getPropertyAsString(ConfigTestElement.USERNAME);
}
/**
* Gets the password attribute of the LDAP object.
*
* @return the password
*/
public String getPassword()
{
return getPropertyAsString(ConfigTestElement.PASSWORD);
}
/**
* Sets the Servername attribute of the ServerConfig object.
*
* @param servername the new servername value
*/
public void setServername(String servername)
{
setProperty(new StringProperty(SERVERNAME, servername));
}
/**
* Sets the Port attribute of the ServerConfig object.
*
* @param port the new Port value
*/
public void setPort(String port)
{
setProperty(new StringProperty(PORT, port));
}
/**
* Gets the servername attribute of the LDAPSampler object.
*
* @return the Servername value
*/
public String getServername()
{
return getPropertyAsString(SERVERNAME);
}
/**
* Gets the Port attribute of the LDAPSampler object.
*
* @return the Port value
*/
public String getPort()
{
return getPropertyAsString(PORT);
}
/**
* Sets the Rootdn attribute of the LDAPSampler object.
*
* @param rootdn the new rootdn value
*/
public void setRootdn(String newRootdn)
{
this.setProperty(ROOTDN,newRootdn);
}
/**
* Gets the Rootdn attribute of the LDAPSampler object.
*
* @return the Rootdn value
*/
public String getRootdn()
{
return getPropertyAsString(ROOTDN);
}
/**
* Sets the Test attribute of the LdapConfig object.
*
* @param newTest the new test value(Add,Modify,Delete and search)
*/
public void setTest(String newTest)
{
this.setProperty(TEST,newTest);
}
/**
* Gets the test attribute of the LDAPSampler object.
*
* @return the test value (Add, Modify, Delete and search)
*/
public String getTest()
{
return getPropertyAsString(TEST);
}
/**
* Sets the UserDefinedTest attribute of the LDAPSampler object.
*
* @param value the new UserDefinedTest value
*/
public void setUserDefinedTest(boolean value)
{
setProperty(new BooleanProperty(USER_DEFINED, value));
}
/**
* Gets the UserDefinedTest attribute of the LDAPSampler object.
*
* @return the test value true or false. If true it will do the
* UserDefinedTest else our own inbuild test case.
*/
public boolean getUserDefinedTest()
{
return getPropertyAsBoolean(USER_DEFINED);
}
/**
* Sets the Base Entry DN attribute of the LDAPSampler object.
*
* @param newbaseentry the new Base entry DN value
*/
public void setBaseEntryDN(String newbaseentry)
{
setProperty(new StringProperty(BASE_ENTRY_DN, newbaseentry));
}
/**
* Gets the BaseEntryDN attribute of the LDAPSampler object.
*
* @return the Base entry DN value
*/
public String getBaseEntryDN()
{
return getPropertyAsString(BASE_ENTRY_DN);
}
/**
* Sets the Arguments attribute of the LdapConfig object. This will
* collect values from the table for user defined test case.
*
* @param value the arguments
*/
public void setArguments(Arguments value)
{
setProperty(new TestElementProperty(ARGUMENTS, value));
}
/**
* Gets the Arguments attribute of the LdapConfig object.
*
* @return the arguments. User defined test case.
*/
public Arguments getArguments()
{
return (Arguments) getProperty(ARGUMENTS).getObjectValue();
}
/**
* Collect all the value from the table (Arguments), using this create the
* basicAttributes. This will create the Basic Attributes for the User
* defined TestCase for Add Test.
*
* @return the BasicAttributes
*/
public BasicAttributes getUserAttributes()
{
BasicAttribute basicattribute = new BasicAttribute("objectclass");
basicattribute.add("top");
basicattribute.add("person");
basicattribute.add("organizationalPerson");
basicattribute.add("inetOrgPerson");
BasicAttributes attrs = new BasicAttributes(true);
attrs.put(basicattribute);
BasicAttribute attr;
PropertyIterator iter = getArguments().iterator();
while (iter.hasNext())
{
Argument item = (Argument) iter.next().getObjectValue();
attr = getBasicAttribute( item.getName(),item.getValue());
attrs.put(attr);
}
return attrs;
}
/**
* Collect all the value from the table (Arguments), using this create the
* basicAttributes. This will create the Basic Attributes for the User
* defined TestCase for Modify test.
*
* @return the BasicAttributes
*/
public ModificationItem[] getUserModAttributes()
{
ModificationItem[] mods =
new ModificationItem[getArguments().getArguments().size()];
BasicAttribute attr;
PropertyIterator iter = getArguments().iterator();
int count =0;
while (iter.hasNext())
{
Argument item = (Argument) iter.next().getObjectValue();
attr = getBasicAttribute( item.getName(),item.getValue());
mods[count] =
new ModificationItem(DirContext.REPLACE_ATTRIBUTE, attr);
count=+1;
}
return mods;
}
/**
* This will create the Basic Attributes for the Inbuilt TestCase for
* Modify test.
*
* @return the BasicAttributes
*/
public ModificationItem[] getModificationItem()
{
ModificationItem[] mods = new ModificationItem[2];
// replace (update) attribute
Attribute mod0 = new BasicAttribute("userpassword",
"secret");
// add mobile phone number attribute
Attribute mod1 = new BasicAttribute("mobile",
"123-456-1234");
mods[0] = new ModificationItem(DirContext.REPLACE_ATTRIBUTE, mod0);
mods[1] = new ModificationItem(DirContext.ADD_ATTRIBUTE, mod1);
return mods;
}
/**
* This will create the Basic Attributes for the In build TestCase for Add
* Test.
*
* @return the BasicAttributes
*/
public BasicAttributes getBasicAttributes()
{
BasicAttributes basicattributes = new BasicAttributes();
BasicAttribute basicattribute = new BasicAttribute("objectclass");
basicattribute.add("top");
basicattribute.add("person");
basicattribute.add("organizationalPerson");
basicattribute.add("inetOrgPerson");
basicattributes.put(basicattribute);
String s1 = "User";
String s3 = "Test";
String s5 = "user";
String s6 = "test";
counter+=1;
basicattributes.put(new BasicAttribute("givenname", s1));
basicattributes.put(new BasicAttribute("sn", s3));
basicattributes.put(new BasicAttribute("cn","TestUser"+counter));
basicattributes.put(new BasicAttribute("uid", s5));
basicattributes.put(new BasicAttribute("userpassword", s6));
setProperty(new StringProperty(ADD,"cn=TestUser"+counter));
return basicattributes;
}
/**
* This will create the Basic Attribute for the given name value pair.
*
* @return the BasicAttribute
*/
public BasicAttribute getBasicAttribute(String name, String value)
{
BasicAttribute attr = new BasicAttribute(name,value);
return attr;
}
/**
* Returns a formatted string label describing this sampler
*
* @return a formatted string label describing this sampler
*/
public String getLabel()
{
return (
"ldap:
+ this.getServername()
+ ":"
+ getPort()
+ "/"
+ this.getRootdn());
}
/**
* This will do the add test for the User defined TestCase as well as
* inbuilt test case.
*
* @return executed time for the give test case
*/
public long addTest(LdapClient ldap)
throws NamingException
{
long start = 0L;
long end = 0L;
if (getPropertyAsBoolean(USER_DEFINED))
{
start = System.currentTimeMillis();
ldap.createTest(
getUserAttributes(),
getPropertyAsString(BASE_ENTRY_DN));
end = System.currentTimeMillis();
}
else
{
start = System.currentTimeMillis();
ldap.createTest(getBasicAttributes(), getPropertyAsString(ADD));
end = System.currentTimeMillis();
ldap.deleteTest(getPropertyAsString(ADD));
}
return (end - start);
}
/**
* This will do the delete test for the User defined TestCase as well as
* inbuilt test case.
*
* @return executed time for the give test case
*/
public long deleteTest(LdapClient ldap)
throws NamingException
{
long start = 0L;
long end = 0L;
if (!getPropertyAsBoolean(USER_DEFINED))
{
ldap.createTest(getBasicAttributes(), getPropertyAsString(ADD));
setProperty(new StringProperty(DELETE, getPropertyAsString(ADD)));
}
start = System.currentTimeMillis();
ldap.deleteTest(getPropertyAsString(DELETE));
end = System.currentTimeMillis();
return (end - start);
}
/**
* This will do the search test for the User defined TestCase as well as
* inbuilt test case.
*
* @return executed time for the give test case
*/
public long searchTest(LdapClient ldap)
throws NamingException
{
long start = 0L;
long end = 0L;
if (!getPropertyAsBoolean(USER_DEFINED))
{
ldap.createTest(getBasicAttributes(), getPropertyAsString(ADD));
setProperty(
new StringProperty(SEARCHBASE, getPropertyAsString(ADD)));
setProperty(
new StringProperty(SEARCHFILTER, getPropertyAsString(ADD)));
}
start = System.currentTimeMillis();
searchFoundEntries = ldap.searchTest(
getPropertyAsString(SEARCHBASE),
getPropertyAsString(SEARCHFILTER));
end = System.currentTimeMillis();
if (!getPropertyAsBoolean(USER_DEFINED))
{
ldap.deleteTest(getPropertyAsString(ADD));
}
return (end - start);
}
/**
* This will do the search test for the User defined TestCase as well as
* inbuilt test case.
*
* @return executed time for the give test case
*/
public long modifyTest(LdapClient ldap)
throws NamingException
{
long start = 0L;
long end = 0L;
if (getPropertyAsBoolean(USER_DEFINED))
{
start = System.currentTimeMillis();
ldap.modifyTest(
getUserModAttributes(),
getPropertyAsString(BASE_ENTRY_DN));
end = System.currentTimeMillis();
}
else
{
ldap.createTest(getBasicAttributes(), getPropertyAsString(ADD));
setProperty(new StringProperty(MODIFY, getPropertyAsString(ADD)));
start = System.currentTimeMillis();
ldap.modifyTest(getModificationItem(), getPropertyAsString(MODIFY));
end = System.currentTimeMillis();
ldap.deleteTest(getPropertyAsString(ADD));
}
return (end - start);
}
public SampleResult sample(Entry e)
{
SampleResult res = new SampleResult();
boolean isSuccessful = false;
res.setSampleLabel(getLabel());
res.setSamplerData(getPropertyAsString(TEST));//TODO improve this
long time=0L;
LdapClient ldap = new LdapClient();
try
{
ldap.connect(
getServername(),
getPort(),
getRootdn(),
getUsername(),
getPassword());
if (getPropertyAsString(TEST).equals("add"))
{
time = addTest(ldap);
}
else if (getPropertyAsString(TEST).equals("delete"))
{
time = deleteTest(ldap);
}
else if (getPropertyAsString(TEST).equals("modify"))
{
time = modifyTest(ldap);
}
else if (getPropertyAsString(TEST).equals("search"))
{
time = searchTest(ldap);
}
//TODO - needs more work ...
if (getPropertyAsString(TEST).equals("search")
&& !searchFoundEntries )
{
res.setResponseCode("201");//TODO is this a sensible number?
res.setResponseMessage("OK - no results");
res.setResponseData("successful - no results".getBytes());
} else {
res.setResponseCode("200");
res.setResponseMessage("OK");
res.setResponseData("successful".getBytes());
}
res.setDataType(SampleResult.TEXT);
isSuccessful = true;
ldap.disconnect();
}
catch (Exception ex)
{
log.error("Ldap client - ",ex);
res.setResponseCode("500");//TODO distinguish errors better
res.setResponseMessage(ex.toString());
ldap.disconnect();
isSuccessful = false;
time = 0L;
}
// Calculate response time
res.setTime(time);
// Set if we were successful or not
res.setSuccessful(isSuccessful);
return res;
}
}
|
package model.component.visual;
import api.ISerializable;
import javafx.animation.Animation;
import javafx.beans.property.SimpleObjectProperty;
import utility.TwoProperty;
import voogasalad.util.spriteanimation.animation.AnimationContainer;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
* This component contains the animated sprite
*
* @author Rhondu Smithwick, Anirudh Jonnavithula
*/
@SuppressWarnings("serial")
public class AnimatedSprite extends Sprite {
private static final String DEFAULT_IMAGE = "resources/spriteSheets/ryuBlue.gif";
private static final String DEFAULT_BUNDLE = "spriteProperties/aniryu";
private final TwoProperty<String, String> twoProperty = new TwoProperty<>("BundlePath", DEFAULT_BUNDLE, "DefaultAnimation", "");
private CustomAnimationContainer container = new CustomAnimationContainer(DEFAULT_BUNDLE);
private String currentAnimationName = "";
private transient Animation currentAnimation;
public AnimatedSprite () {
this(DEFAULT_IMAGE, DEFAULT_BUNDLE);
}
/**
* Construct with no animation.
*
* @param imagePath starting value
*/
public AnimatedSprite (String imagePath, String bundlePath) { // TODO: place default in resource file
super(imagePath);
setBundlePath(bundlePath);
}
/**
* Construct with starting values.
*
* @param imagePath String path to image
* @param imageWidth width of image
* @param imageHeight height of image
* @param imagePath String path to spritesheet
*/
public AnimatedSprite (String imagePath, double imageWidth, double imageHeight, String bundlePath) {
super(imagePath, imageWidth, imageHeight);
setBundlePath(bundlePath);
}
public AnimatedSprite (String imagePath, double imageWidth, double imageHeight, String bundlePath, String defaultAnimation) {
this(imagePath, imageWidth, imageHeight, bundlePath);
setDefaultAnimation(defaultAnimation);
createAndPlayAnimation(currentAnimationName);
}
public Collection<String> getAnimationNames () {
return getContainer().getAnimationNames();
}
public boolean hasAnimation (String animationName) {
return getContainer().hasAnimation(animationName);
}
public Animation createAnimation (String animationName) {
return getContainer().createAnimation(getImageView(), animationName);
}
public Animation createAndPlayAnimation (String animationName) {
boolean validAnimation = !animationName.equals(currentAnimationName) || currentAnimationName.equals(getDefaultAnimation());
if (validAnimation) {
if (currentAnimation != null) {
currentAnimation.stop();
}
currentAnimationName = animationName;
currentAnimation = createAnimation(animationName);
currentAnimation.setOnFinished(e -> createAndPlayAnimation(getDefaultAnimation()));
currentAnimation.play();
}
return currentAnimation;
}
public SimpleObjectProperty<String> bundlePathProperty () {
return twoProperty.property1();
}
public String getBundlePath () {
return bundlePathProperty().get();
}
public void setBundlePath (String bundlePath) {
bundlePathProperty().set(bundlePath);
reInitializeContainer();
// setDefaultAnimation(getAnimationNames().iterator().next());
}
public SimpleObjectProperty<String> defaultAnimationProperty () {
return twoProperty.property2();
}
public String getDefaultAnimation () {
return defaultAnimationProperty().get();
}
public void setDefaultAnimation (String defaultAnimation) {
defaultAnimationProperty().set(defaultAnimation);
}
public AnimationContainer getContainer () {
return container;
}
private void reInitializeContainer () {
this.container = new CustomAnimationContainer(getBundlePath());
}
@Override
public List<SimpleObjectProperty<?>> getProperties () {
return Arrays.asList(defaultAnimationProperty(), bundlePathProperty(), imagePathProperty(), imageWidthProperty(), imageHeightProperty());
}
@Override
public void update() {
super.update();
setBundlePath(getBundlePath());
setDefaultAnimation(getDefaultAnimation());
}
public static class CustomAnimationContainer extends AnimationContainer implements ISerializable {
public CustomAnimationContainer (String bundlePath) {
super(bundlePath);
}
}
}
|
package com.dmdirc;
import com.dmdirc.commandparser.CommandType;
import com.dmdirc.commandparser.parsers.QueryCommandParser;
import com.dmdirc.events.AppErrorEvent;
import com.dmdirc.events.QueryActionEvent;
import com.dmdirc.events.QueryClosedEvent;
import com.dmdirc.events.QueryMessageEvent;
import com.dmdirc.events.QueryNickchangeEvent;
import com.dmdirc.events.QueryQuitEvent;
import com.dmdirc.events.QuerySelfActionEvent;
import com.dmdirc.events.QuerySelfMessageEvent;
import com.dmdirc.interfaces.CommandController;
import com.dmdirc.interfaces.Connection;
import com.dmdirc.interfaces.PrivateChat;
import com.dmdirc.interfaces.User;
import com.dmdirc.logger.ErrorLevel;
import com.dmdirc.parser.common.CallbackManager;
import com.dmdirc.parser.common.CallbackNotFoundException;
import com.dmdirc.parser.common.CompositionState;
import com.dmdirc.parser.interfaces.ClientInfo;
import com.dmdirc.parser.interfaces.Parser;
import com.dmdirc.parser.interfaces.callbacks.CompositionStateChangeListener;
import com.dmdirc.parser.interfaces.callbacks.NickChangeListener;
import com.dmdirc.parser.interfaces.callbacks.PrivateActionListener;
import com.dmdirc.parser.interfaces.callbacks.PrivateMessageListener;
import com.dmdirc.parser.interfaces.callbacks.QuitListener;
import com.dmdirc.ui.core.components.WindowComponent;
import com.dmdirc.ui.input.TabCompleterFactory;
import com.dmdirc.ui.messages.BackBufferFactory;
import com.dmdirc.ui.messages.sink.MessageSinkManager;
import com.dmdirc.util.EventUtils;
import com.dmdirc.util.URLBuilder;
import java.awt.Toolkit;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Optional;
/**
* The Query class represents the client's view of a query with another user. It handles callbacks
* for query events from the parser, maintains the corresponding QueryWindow, and handles user input
* for the query.
*/
public class Query extends MessageTarget implements PrivateActionListener,
PrivateMessageListener, NickChangeListener, QuitListener,
CompositionStateChangeListener, PrivateChat {
/** The Server this Query is on. */
private final Server server;
/** The user associated with this query. */
private final User user;
public Query(
final Server newServer,
final User user,
final TabCompleterFactory tabCompleterFactory,
final CommandController commandController,
final MessageSinkManager messageSinkManager,
final URLBuilder urlBuilder,
final BackBufferFactory backBufferFactory) {
super(newServer, "query",
user.getNickname(),
user.getNickname(),
newServer.getConfigManager(),
backBufferFactory,
new QueryCommandParser(newServer, commandController, newServer.getEventBus()),
tabCompleterFactory.getTabCompleter(newServer.getTabCompleter(),
newServer.getConfigManager(),
CommandType.TYPE_QUERY, CommandType.TYPE_CHAT),
messageSinkManager,
urlBuilder,
newServer.getEventBus(),
Arrays.asList(
WindowComponent.TEXTAREA.getIdentifier(),
WindowComponent.INPUTFIELD.getIdentifier()));
this.server = newServer;
this.user = user;
updateTitle();
}
@Override
public void sendLine(final String line) {
sendLine(line, getNickname());
}
@Override
public void sendLine(final String line, final String target) {
if (server.getState() != ServerState.CONNECTED) {
Toolkit.getDefaultToolkit().beep();
return;
}
splitLine(line).stream().filter(part -> !part.isEmpty()).forEach(part -> {
server.getParser().get().sendMessage(target, part);
final String format = EventUtils.postDisplayable(getEventBus(),
new QuerySelfMessageEvent(this, server.getLocalUser().get(), part),
"querySelfMessage");
doNotification(format, server.getLocalUser().get(), part);
});
}
@Override
protected boolean processNotificationArg(final Object arg, final List<Object> args) {
if (arg instanceof User) {
final User clientInfo = (User) arg;
args.add(clientInfo.getNickname());
args.add(clientInfo.getUsername());
args.add(clientInfo.getHostname());
return true;
} else {
return super.processNotificationArg(arg, args);
}
}
@Override
public int getMaxLineLength() {
return server.getState() == ServerState.CONNECTED ? server.getParser().get()
.getMaxLength("PRIVMSG", getHost()) : -1;
}
@Override
public void sendAction(final String action) {
if (server.getState() != ServerState.CONNECTED) {
Toolkit.getDefaultToolkit().beep();
return;
}
final int maxLineLength = server.getParser().get().getMaxLength("PRIVMSG", getHost());
if (maxLineLength >= action.length() + 2) {
server.getParser().get().sendAction(getNickname(), action);
final String format = EventUtils.postDisplayable(getEventBus(),
new QuerySelfActionEvent(this, server.getLocalUser().get(), action),
"querySelfAction");
doNotification(format, server.getLocalUser().get(), action);
} else {
addLine("actionTooLong", action.length());
}
}
@Override
public void onPrivateMessage(final Parser parser, final Date date,
final String message, final String host) {
final String[] parts = server.parseHostmask(host);
final String format = EventUtils.postDisplayable(getEventBus(),
new QueryMessageEvent(this, server.getLocalUser().get(), message), "queryMessage");
addLine(format, parts[0], parts[1], parts[2], message);
}
@Override
public void onPrivateAction(final Parser parser, final Date date,
final String message, final String host) {
final String[] parts = server.parseHostmask(host);
final String format = EventUtils.postDisplayable(getEventBus(),
new QueryActionEvent(this, server.getLocalUser().get(), message), "queryAction");
addLine(format, parts[0], parts[1], parts[2], message);
}
/**
* Updates the QueryWindow's title.
*/
private void updateTitle() {
setTitle(getNickname());
}
/**
* Reregisters query callbacks. Called when reconnecting to the server.
*/
public void reregister() {
final CallbackManager callbackManager = server.getParser().get().getCallbackManager();
final String nick = getNickname();
try {
callbackManager.addCallback(PrivateActionListener.class, this, nick);
callbackManager.addCallback(PrivateMessageListener.class, this, nick);
callbackManager.addCallback(CompositionStateChangeListener.class, this, nick);
callbackManager.addCallback(QuitListener.class, this);
callbackManager.addCallback(NickChangeListener.class, this);
} catch (CallbackNotFoundException ex) {
getEventBus().publishAsync(new AppErrorEvent(ErrorLevel.HIGH, ex,
"Unable to get query events", ex.getMessage()));
}
}
@Override
public void onNickChanged(final Parser parser, final Date date,
final ClientInfo client, final String oldNick) {
if (oldNick.equals(getNickname())) {
final CallbackManager callbackManager = server.getParser().get().getCallbackManager();
callbackManager.delCallback(PrivateActionListener.class, this);
callbackManager.delCallback(PrivateMessageListener.class, this);
callbackManager.delCallback(CompositionStateChangeListener.class, this);
try {
callbackManager.addCallback(PrivateActionListener.class, this, client.getNickname());
callbackManager.
addCallback(PrivateMessageListener.class, this, client.getNickname());
callbackManager.addCallback(CompositionStateChangeListener.class, this, client.
getNickname());
} catch (CallbackNotFoundException ex) {
getEventBus().publishAsync(
new AppErrorEvent(ErrorLevel.HIGH, ex, "Unable to get query events",
ex.getMessage()));
}
final String format = EventUtils.postDisplayable(getEventBus(),
new QueryNickchangeEvent(this, oldNick),
"queryNickChanged");
server.updateQuery(this, oldNick, client.getNickname());
addLine(format, oldNick, client.getUsername(),
client.getHostname(), client.getNickname());
user.setNickname(client.getNickname());
updateTitle();
setName(client.getNickname());
}
}
@Override
public void onQuit(final Parser parser, final Date date,
final ClientInfo client, final String reason) {
if (client.getNickname().equals(getNickname())) {
final String format = EventUtils.postDisplayable(getEventBus(),
new QueryQuitEvent(this, reason),
reason.isEmpty() ? "queryQuit" : "queryQuitReason");
addLine(format, client.getNickname(), client.getUsername(), client.getHostname(),
reason);
}
}
@Override
public void onCompositionStateChanged(final Parser parser, final Date date,
final CompositionState state, final String host) {
if (state == CompositionState.TYPING) {
addComponent(WindowComponent.TYPING_INDICATOR.getIdentifier());
} else {
removeComponent(WindowComponent.TYPING_INDICATOR.getIdentifier());
}
}
@Override
public Optional<Connection> getConnection() {
return Optional.of(server);
}
@Override
public void close() {
super.close();
// Remove any callbacks or listeners
server.getParser().map(Parser::getCallbackManager).ifPresent(cm -> cm.delAllCallback(this));
// Trigger action for the window closing
getEventBus().publishAsync(new QueryClosedEvent(this));
// Inform any parents that the window is closing
server.delQuery(this);
}
@Override
public String getHost() {
// TODO: Icky, IRC specific. Kill with fire.
return user.getNickname() + '!' + user.getUsername() + '@' + user.getHostname();
}
@Override
public String getNickname() {
return user.getNickname();
}
@Override
public void setCompositionState(final CompositionState state) {
server.getParser().get().setCompositionState(getHost(), state);
}
@Override
public User getUser() {
return user;
}
}
|
package net.openhft.chronicle.wire;
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.core.Jvm;
import net.openhft.chronicle.threads.NamedThreadFactory;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.nio.BufferUnderflowException;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public final class BinaryWireStringInternerTest extends WireTestCommon {
private static final int DATA_SET_SIZE = 1_000;
private static final long SEED_WITHOUT_COLLISIONS = 0x982374EADL;
private final Random random = new Random(SEED_WITHOUT_COLLISIONS);
private final String[] testData = new String[DATA_SET_SIZE];
private final String[] internedStrings = new String[DATA_SET_SIZE];
@SuppressWarnings("rawtypes")
private final Bytes heapBytes = Bytes.allocateElasticOnHeap(4096);
private final BinaryWire wire = BinaryWire.binaryOnly(heapBytes);
private static String message(final int index, final String inputData) {
return String.format("At index %d for string %s",
index, inputData);
}
private static String makeString(final int length, final Random random) {
final StringBuilder builder = new StringBuilder();
for (int i = 0; i < length; i++) {
builder.append((char) ('a' + random.nextInt('z' - 'a')));
}
return builder.toString();
}
@Before
public void createTestData() throws Exception {
for (int i = 0; i < DATA_SET_SIZE; i++) {
testData[i] = makeString(random.nextInt(250) + 32, random);
}
for (int i = 0; i < DATA_SET_SIZE; i++) {
wire.getFixedBinaryValueOut(true).text(testData[i]);
internedStrings[i] = wire.read().text();
}
wire.clear();
}
@Test
public void shouldInternExistingStringsAlright() throws Exception {
final List<RuntimeException> capturedExceptions = new CopyOnWriteArrayList<>();
final ExecutorService executorService = Executors.newFixedThreadPool(
Runtime.getRuntime().availableProcessors(),
new NamedThreadFactory("test"));
for (int i = 0; i < (Jvm.isArm() ? 12 : 200); i++) {
executorService.submit(new BinaryTextReaderWriter(capturedExceptions::add, () -> BinaryWire.binaryOnly(Bytes.allocateElasticOnHeap(4096))));
}
for (int i = 0; i < 50000; i++) {
wire.clear();
final int dataPointIndex = random.nextInt(DATA_SET_SIZE);
wire.getFixedBinaryValueOut(true).text(testData[dataPointIndex]);
final String inputData = wire.read().text();
assertEquals(message(i, inputData), internedStrings[dataPointIndex], inputData);
}
executorService.shutdown();
assertTrue("jobs did not complete in time", executorService.awaitTermination(15L, TimeUnit.SECONDS));
assertTrue(capturedExceptions.isEmpty());
}
@Test
public void multipleThreadsUsingBinaryWiresShouldNotCauseProblems() throws Exception {
final List<RuntimeException> capturedExceptions = new CopyOnWriteArrayList<>();
final ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
for (int i = 0; i < (Jvm.isArm() ? 4 : 200); i++) {
executorService.submit(new BinaryTextReaderWriter(capturedExceptions::add, () -> BinaryWire.binaryOnly(Bytes.allocateElasticOnHeap(4096))));
}
executorService.shutdown();
assertTrue("jobs did not complete in time", executorService.awaitTermination(5L, TimeUnit.SECONDS));
assertTrue(capturedExceptions.isEmpty());
}
@Ignore("used to demonstrate errors that can occur when buffers are shared between threads")
@Test
public void multipleThreadsSharingBinaryWireShouldCauseProblems() throws Exception {
final List<RuntimeException> capturedExceptions = new CopyOnWriteArrayList<>();
final ExecutorService executorService = Executors.newFixedThreadPool(
Runtime.getRuntime().availableProcessors(),
new NamedThreadFactory("test"));
final BinaryWire sharedMutableWire = BinaryWire.binaryOnly(Bytes.allocateElasticOnHeap(4096));
for (int i = 0; i < 1_000; i++) {
executorService.submit(new BinaryTextReaderWriter(capturedExceptions::add, () -> sharedMutableWire));
}
executorService.shutdown();
assertTrue("jobs did not complete in time", executorService.awaitTermination(15L, TimeUnit.SECONDS));
capturedExceptions.stream().filter(e -> e instanceof BufferUnderflowException).forEach(RuntimeException::printStackTrace);
assertTrue(capturedExceptions.isEmpty());
}
private static final class BinaryTextReaderWriter implements Runnable {
private final Supplier<BinaryWire> binaryWireSupplier;
private final ThreadLocal<BinaryWire> wire;
private final Random random = new Random(System.nanoTime());
private final Consumer<RuntimeException> exceptionConsumer;
private BinaryTextReaderWriter(final Consumer<RuntimeException> exceptionConsumer,
final Supplier<BinaryWire> binaryWireSupplier) throws IOException {
this.exceptionConsumer = exceptionConsumer;
this.binaryWireSupplier = binaryWireSupplier;
wire = ThreadLocal.withInitial(
this.binaryWireSupplier);
}
@Override
public void run() {
try {
for (int i = 0; i < 2_000; i++) {
wire.get().getFixedBinaryValueOut(true).text(makeString(250, random));
}
for (int i = 0; i < 2_000; i++) {
if (wire.get().read().text() == null) {
exceptionConsumer.accept(new IllegalStateException("text was null"));
}
}
} catch (RuntimeException e) {
exceptionConsumer.accept(e);
}
}
}
}
|
package com.dmdirc;
import com.dmdirc.actions.ActionManager;
import com.dmdirc.actions.CoreActionType;
import com.dmdirc.commandparser.CommandManager;
import com.dmdirc.commandparser.CommandType;
import com.dmdirc.logger.ErrorLevel;
import com.dmdirc.logger.Logger;
import com.dmdirc.parser.interfaces.ClientInfo;
import com.dmdirc.parser.interfaces.Parser;
import com.dmdirc.parser.common.CallbackManager;
import com.dmdirc.parser.common.CallbackNotFoundException;
import com.dmdirc.parser.interfaces.callbacks.NickChangeListener;
import com.dmdirc.parser.interfaces.callbacks.PrivateActionListener;
import com.dmdirc.parser.interfaces.callbacks.PrivateMessageListener;
import com.dmdirc.parser.interfaces.callbacks.QuitListener;
import com.dmdirc.ui.WindowManager;
import com.dmdirc.ui.input.TabCompleter;
import com.dmdirc.ui.input.TabCompletionType;
import com.dmdirc.ui.interfaces.InputWindow;
import com.dmdirc.ui.interfaces.QueryWindow;
import java.awt.Toolkit;
import java.io.Serializable;
/**
* The Query class represents the client's view of a query with another user.
* It handles callbacks for query events from the parser, maintains the
* corresponding QueryWindow, and handles user input for the query.
* @author chris
*/
public class Query extends MessageTarget implements PrivateActionListener,
PrivateMessageListener, NickChangeListener, QuitListener, Serializable {
/**
* A version number for this class. It should be changed whenever the class
* structure is changed (or anything else that would prevent serialized
* objects being unserialized with the new class).
*/
private static final long serialVersionUID = 1;
/** The Server this Query is on. */
private Server server;
/** The QueryWindow used for this Query. */
private QueryWindow window;
/** The full host and nickname of the client associated with this Query. */
private String host, nickname;
/** The tab completer for the query window. */
private final TabCompleter tabCompleter;
/**
* Creates a new instance of Query.
*
* @param newHost host of the remove client
* @param newServer The server object that this Query belongs to
*/
public Query(final Server newServer, final String newHost) {
super("query", newServer.getParser().parseHostmask(newHost)[0],
newServer.getConfigManager());
this.server = newServer;
this.host = newHost;
this.nickname = server.getParser().parseHostmask(host)[0];
window = Main.getUI().getQuery(this);
WindowManager.addWindow(server.getFrame(), window);
ActionManager.processEvent(CoreActionType.QUERY_OPENED, null, this);
if (!server.getConfigManager().getOptionBool("general", "hidequeries")) {
window.open();
}
tabCompleter = new TabCompleter(server.getTabCompleter());
tabCompleter.addEntries(TabCompletionType.COMMAND,
CommandManager.getCommandNames(CommandType.TYPE_QUERY));
tabCompleter.addEntries(TabCompletionType.COMMAND,
CommandManager.getCommandNames(CommandType.TYPE_CHAT));
window.getInputHandler().setTabCompleter(tabCompleter);
reregister();
updateTitle();
}
/**
* Shows this query's window.
*/
public void show() {
window.open();
}
/** {@inheritDoc} */
@Override
public InputWindow getFrame() {
return window;
}
/**
* Returns the tab completer for this query.
*
* @return This query's tab completer
*/
public TabCompleter getTabCompleter() {
return tabCompleter;
}
/** {@inheritDoc} */
@Override
public void sendLine(final String line) {
if (server.getState() != ServerState.CONNECTED) {
Toolkit.getDefaultToolkit().beep();
return;
}
final ClientInfo client = server.getParser().getLocalClient();
for (String part : splitLine(window.getTranscoder().encode(line))) {
if (!part.isEmpty()) {
server.getParser().sendMessage(getNickname(), part);
final StringBuffer buff = new StringBuffer("querySelfMessage");
ActionManager.processEvent(CoreActionType.QUERY_SELF_MESSAGE, buff, this, part);
addLine(buff, client.getNickname(), client.getUsername(),
client.getHostname(), part);
}
}
}
/** {@inheritDoc} */
@Override
public int getMaxLineLength() {
return server.getState() == ServerState.CONNECTED ? server.getParser()
.getMaxLength("PRIVMSG", host) : -1;
}
/**
* Sends a private action to the remote user.
*
* @param action action text to send
*/
@Override
public void sendAction(final String action) {
if (server.getState() != ServerState.CONNECTED) {
Toolkit.getDefaultToolkit().beep();
return;
}
final ClientInfo client = server.getParser().getLocalClient();
final int maxLineLength = server.getParser().getMaxLength("PRIVMSG", host);
if (maxLineLength >= action.length() + 2) {
server.getParser().sendAction(getNickname(),
window.getTranscoder().encode(action));
final StringBuffer buff = new StringBuffer("querySelfAction");
ActionManager.processEvent(CoreActionType.QUERY_SELF_ACTION, buff, this, action);
addLine(buff, client.getNickname(), client.getUsername(),
client.getHostname(), window.getTranscoder().encode(action));
} else {
addLine("actionTooLong", action.length());
}
}
/**
* Handles a private message event from the parser.
*
* @param parser Parser receiving the event
* @param message message received
* @param remoteHost remote user host
*/
@Override
public void onPrivateMessage(final Parser parser, final String message,
final String remoteHost) {
final String[] parts = parser.parseHostmask(host);
final StringBuffer buff = new StringBuffer("queryMessage");
ActionManager.processEvent(CoreActionType.QUERY_MESSAGE, buff, this, message);
addLine(buff, parts[0], parts[1], parts[2], message);
}
/**
* Handles a private action event from the parser.
*
* @param parser Parser receiving the event
* @param message message received
* @param remoteHost remote host
*/
@Override
public void onPrivateAction(final Parser parser, final String message,
final String remoteHost) {
final String[] parts = parser.parseHostmask(host);
final StringBuffer buff = new StringBuffer("queryAction");
ActionManager.processEvent(CoreActionType.QUERY_ACTION, buff, this, message);
addLine(buff, parts[0], parts[1], parts[2], message);
}
/**
* Updates the QueryWindow's title.
*/
private void updateTitle() {
window.setTitle(getNickname());
}
/**
* Reregisters query callbacks. Called when reconnecting to the server.
*/
public void reregister() {
final CallbackManager<?> callbackManager = server.getParser().getCallbackManager();
final String nick = getNickname();
try {
callbackManager.addCallback(PrivateActionListener.class, this, nick);
callbackManager.addCallback(PrivateMessageListener.class, this, nick);
callbackManager.addCallback(QuitListener.class, this);
callbackManager.addCallback(NickChangeListener.class, this);
} catch (CallbackNotFoundException ex) {
Logger.appError(ErrorLevel.HIGH, "Unable to get query events", ex);
}
}
/** {@inheritDoc} */
@Override
public void onNickChanged(final Parser tParser, final ClientInfo cClient,
final String sOldNick) {
if (sOldNick.equals(getNickname())) {
final CallbackManager<?> callbackManager = server.getParser().getCallbackManager();
callbackManager.delCallback(PrivateActionListener.class, this);
callbackManager.delCallback(PrivateMessageListener.class, this);
try {
callbackManager.addCallback(PrivateActionListener.class, this, cClient.getNickname());
callbackManager.addCallback(PrivateMessageListener.class, this, cClient.getNickname());
} catch (CallbackNotFoundException ex) {
Logger.appError(ErrorLevel.HIGH, "Unable to get query events", ex);
}
final StringBuffer format = new StringBuffer("queryNickChanged");
ActionManager.processEvent(CoreActionType.QUERY_NICKCHANGE, format, this, sOldNick);
server.getTabCompleter().removeEntry(TabCompletionType.QUERY_NICK, sOldNick);
server.getTabCompleter().addEntry(TabCompletionType.QUERY_NICK, cClient.getNickname());
addLine(format, sOldNick, cClient.getUsername(),
cClient.getHostname(), cClient.getNickname());
host = cClient.getNickname() + "!" + cClient.getUsername() + "@" + cClient.getHostname();
nickname = cClient.getNickname();
updateTitle();
setName(cClient.getNickname());
}
}
/** {@inheritDoc} */
@Override
public void onQuit(final Parser tParser, final ClientInfo cClient,
final String sReason) {
if (cClient.getNickname().equals(getNickname())) {
final StringBuffer format = new StringBuffer(sReason.isEmpty()
? "queryQuit" : "queryQuitReason");
ActionManager.processEvent(CoreActionType.QUERY_QUIT, format, this, sReason);
addLine(format, cClient.getNickname(),
cClient.getUsername(), cClient.getHostname(), sReason);
}
}
/**
* Returns the Server associated with this query.
*
* @return associated Server
*/
@Override
public Server getServer() {
return server;
}
/** {@inheritDoc} */
@Override
public void windowClosing() {
// 1: Make the window non-visible
window.setVisible(false);
// 2: Remove any callbacks or listeners
if (server != null && server.getParser() != null) {
server.getParser().getCallbackManager().delAllCallback(this);
}
// 3: Trigger any actions neccessary
// 4: Trigger action for the window closing
ActionManager.processEvent(CoreActionType.QUERY_CLOSED, null, this);
// 5: Inform any parents that the window is closing
if (server != null) {
server.delQuery(this);
}
// 6: Remove the window from the window manager
WindowManager.removeWindow(window);
// 7: Remove any references to the window and parents
window = null;
server = null;
}
/**
* Returns the host that this query is with.
*
* @return The full host that this query is with
*/
public String getHost() {
return host;
}
/**
* Returns the current nickname of the user that this query is with.
*
* @return The nickname of this query's user
*/
public String getNickname() {
return nickname;
}
/** {@inheritDoc} */
@Override
public void activateFrame() {
if (window == null) {
return;
}
if (!window.isVisible()) {
show();
}
super.activateFrame();
}
}
|
package net.runelite.deob.deobfuscators.arithmetic;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import net.runelite.deob.ClassFile;
import net.runelite.deob.ClassGroup;
import net.runelite.deob.Deobfuscator;
import net.runelite.deob.Method;
import net.runelite.deob.attributes.Code;
import net.runelite.deob.attributes.code.Instruction;
import net.runelite.deob.attributes.code.Instructions;
import net.runelite.deob.attributes.code.instructions.LDC_W;
import org.junit.Assert;
import org.junit.Test;
class TestClass
{
private static int dummy(Object... args) { return 0; }
private static final int var = 42;
private static int field1051 = -1611704481;
private int field2701;
public void test()
{
if (-1 != this.field1051 * 1928543073)
{
dummy(this.field1051 * 1928543073);
this.field1051 = dummy() * 1611704481;
}
if (field2701 * 1550405721 > 30000)
{
field2701 += -1868498967 * var;
}
}
}
public class ModArithTest
{
private void checkConstants(ClassFile cf)
{
for (Method m : cf.getMethods().getMethods())
{
Code code = m.getCode();
Instructions instructions = code.getInstructions();
for (Instruction i : instructions.getInstructions())
if (i instanceof LDC_W)
{
LDC_W ldc = (LDC_W) i;
Assert.assertFalse(DMath.isBig(ldc.getConstantAsInt()));
}
}
}
@Test
public void test() throws IOException
{
InputStream in = this.getClass().getClassLoader().getResourceAsStream("net/runelite/deob/deobfuscators/arithmetic/TestClass.class");
Assert.assertNotNull(in);
ClassGroup group = new ClassGroup();
ClassFile cf = new ClassFile(group, new DataInputStream(in));
group.addClass(cf);
ModArith d1 = new ModArith();
d1.run(group);
d1.runOnce();
Deobfuscator d2 = new MultiplicationDeobfuscator();
d2.run(group);
this.checkConstants(cf);
}
}
|
//package com.edsand;
import javax.swing.ImageIcon;
class Lista {
Imagen inicio, fin;
public Lista() {
inicio = fin = null;
}
public void meter(Imagen imagen) {
if (inicio == null) {
inicio = fin = imagen;
fin.setSig(inicio);
inicio.setAnt(fin);
}
else {
fin.setSig(imagen);
fin.getSig().setAnt(fin);
fin = fin.getSig();
fin.setSig(inicio);
inicio.setAnt(fin);
}
}
public void sacar() {
if (inicio == null) System.out.println("Lista vacia!");
else if (inicio == fin) inicio = fin = null;
else {
fin = fin.getAnt();
fin.setSig(inicio);
}
}
public void paso_izq() {
Imagen tmp = inicio;
inicio = inicio.getSig();
meter(tmp);
}
public void paso_der() {
Imagen tmp = fin;
fin = fin.getAnt();
inicio.setAnt(tmp);
inicio = tmp;
}
public Imagen getInicio() {
return inicio;
}
}
|
package org.jsmart.zerocode.core.httpclient;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.util.EntityUtils;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
public class BasicHttpClientTest {
private BasicHttpClient basicHttpClient;
private Map<String, Object> header;
@Before
public void setUp() {
basicHttpClient = new BasicHttpClient();
header = new HashMap<String, Object>();
header.put("Content-Type", "application/x-www-form-urlencoded");
}
@Test
public void createRequestBuilder() throws IOException {
header.put("Content-Type", "application/x-www-form-urlencoded");
String reqBodyAsString = "{\"Company\":\"Amazon\",\"age\":30,\"worthInBillion\":999.999}";
RequestBuilder requestBuilder = basicHttpClient.createRequestBuilder("/api/v1/founder", "POST", header, reqBodyAsString);
String nameValuePairString = EntityUtils.toString(requestBuilder.getEntity(), "UTF-8");
assertThat(requestBuilder.getMethod(), is("POST"));
assertThat(nameValuePairString, is("Company=Amazon&worthInBillion=999.999&age=30"));
}
@Test
public void createRequestBuilder_spaceInKeyValue() throws IOException {
header.put("Content-Type", "application/x-www-form-urlencoded");
String reqBodyAsString = "{\"Name\":\"Larry Pg\",\"Company\":\"Amazon\",\"Title\":\"CEO\"}";
RequestBuilder requestBuilder = basicHttpClient.createRequestBuilder("/api/v1/founder", "POST", header, reqBodyAsString);
String nameValuePairString = EntityUtils.toString(requestBuilder.getEntity(), "UTF-8");
assertThat(nameValuePairString, is("Company=Amazon&Title=CEO&Name=Larry+Pg"));
}
@Test
public void createRequestBuilder_frontSlash() throws IOException {
String reqBodyAsString = "{\"state/region\":\"singapore north\",\"Company\":\"Amazon\",\"Title\":\"CEO\"}";
RequestBuilder requestBuilder = basicHttpClient.createRequestBuilder("/api/v1/founder", "POST", header, reqBodyAsString);
String nameValuePairString = EntityUtils.toString(requestBuilder.getEntity(), "UTF-8");
assertThat(nameValuePairString, is("Company=Amazon&Title=CEO&state%2Fregion=singapore+north"));
}
@Test
public void test_queryParamEncodedChar() throws IOException {
Map<String, Object> queryParamsMap = new HashMap<>();
queryParamsMap.put("q1", "value1");
queryParamsMap.put("q2", "value2");
queryParamsMap.put("state/region", "London UK");
String effectiveUrl = basicHttpClient.handleUrlAndQueryParams("http://abc.com", queryParamsMap);
assertThat(effectiveUrl, is("http://abc.com?q1=value1&q2=value2&state%2Fregion=London+UK"));
}
@Test
public void createRequestBuilder_jsonValue() throws IOException {
header.put("Content-Type", "application/x-www-form-urlencoded");
String reqBodyAsString = "{\n" +
" \"Company\": \"Amazon\",\n" +
" \"addresses\": {\n" +
" \"city\": \"NewYork\",\n" +
" \"type\": \"HeadOffice\"\n" +
" }\n" +
"}";
RequestBuilder requestBuilder = basicHttpClient.createRequestBuilder("/api/v1/founder", "POST", header, reqBodyAsString);
String nameValuePairString = EntityUtils.toString(requestBuilder.getEntity(), "UTF-8");
assertThat(requestBuilder.getMethod(), is("POST"));
//On the server side: address={city=NewYork, type=HeadOffice}
assertThat(nameValuePairString, is("Company=Amazon&addresses=%7Bcity%3DNewYork%2C+type%3DHeadOffice%7D"));
}
}
|
package org.nextrtc.signalingserver.performance;
import org.eclipse.jetty.websocket.api.Session;
import org.eclipse.jetty.websocket.client.WebSocketClient;
import org.junit.Test;
import java.net.URI;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.IntStream;
import static java.util.stream.Collectors.toList;
//@Ignore
public class PerformanceTest {
private ExecutorService service = Executors.newFixedThreadPool(4);
private URI uri = uri();
private URI uri() {
try {
return new URI("ws://localhost:8080/signaling");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Test
public void shouldBeAbleToOpen3000Sessions() throws Exception {
// given
List<Future<WebSocketClient>> clients = new ArrayList<>();
for (int i = 0; i < 30; i++)
clients.add(service.submit(this::open100Sessions));
// when
List<WebSocketClient> webClients = clients.stream().map(f -> doTry(f::get)).collect(toList());
// then
Thread.sleep(100000);
webClients.forEach(w -> {
try {
w.stop();
} catch (Exception e) {
e.printStackTrace();
}
});
}
@Test
public void scenario1_meshConversationWith100Participant() throws Exception {
// given
// when
// then
}
private WebSocketClient open100Sessions() {
WebSocketClient client = new WebSocketClient();
MockedClient socket = new MockedClient();
try {
List<Future<Session>> sessions = new LinkedList<>();
client.start();
IntStream.range(1, 100).forEach(i -> doTry(() -> sessions.add(client.connect(socket, uri))));
} catch (Exception e) {
throw new RuntimeException(e);
}
return client;
}
private <T> T doTry(Except<T> supplier) {
try {
return supplier.execute();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
interface Except<T> {
T execute() throws Exception;
}
}
|
package uk.co.sleonard.unison.output.tests;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.List;
import java.util.Vector;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import uk.co.sleonard.unison.gui.UNISoNException;
import uk.co.sleonard.unison.output.PajekNetworkFile;
import uk.co.sleonard.unison.output.Relationship;
/**
* The Class PajekNetworkFile.
*/
public class PajekNetworkFileTest {
private PajekNetworkFile file;
/**
* Setup.
*/
@Before
public void setUp() throws Exception {
this.file = new PajekNetworkFile();
}
/**
* After.
*/
@After
public void tearDown() throws Exception {
}
/**
* test getPreviewPanel
* Ignored by Error (No X11 DISPLAY) in Travis CI
*/
@Ignore
@Test
public void testGetPreviewPanel() {
try {
assertNotNull(this.file.getPreviewPanel());
} catch (UNISoNException e) {
fail("ERROR : " + e.getMessage());
}
}
/**
* test addRelationship.
*/
@Test
public void testAddRelationship() {
final List<Relationship> links = new Vector<Relationship>();
Relationship link = this.file.addRelationship("Alf", "Bob", links);
System.out.println("Link1: " + link);
link = this.file.addRelationship("Alf", "Bob", links);
System.out.println("Link2: " + link);
}
/**
* test createDirectedLinks.
*/
@Test
public void testCreateDirectedLinks() {
final Vector<Vector<String>> nodePairs = generateNodePairs();
this.file.createDirectedLinks(nodePairs);
assertEquals(2, nodePairs.size());
}
/**
* test createUndirectedLinks.
*/
@Test
public void testCreateUndirectedLinks() {
final Vector<Vector<String>> nodePairs = generateNodePairs();
this.file.createUndirectedLinks(nodePairs);
assertEquals(2, nodePairs.size());
}
/**
* Test getFilename
*/
@Test
public void testGetFilename() {
String expected = "UnitTest.net";
testSaveToFile();
assertEquals(expected, this.file.getFilename());
}
/**
* Test getFileSuffix
*/
@Test
public void testGetFileSuffix() {
String expected = ".net";
assertEquals(expected, this.file.getFileSuffix());
}
/**
* test saveToFile.
*/
@Test
public void testSaveToFile() {
final HashMap<String, String> nodePairs = new HashMap<String, String>();
nodePairs.put("Alf", "Bertie");
nodePairs.put("Bertie", "Charlie");
nodePairs.put("Charlie", "Bertie");
nodePairs.put("Bertie", "Charlie");
nodePairs.put("Derek", "");
this.file.saveToFile("UnitTest");
this.file.saveToFile("UnitTest.net");
}
/**
* test writeData.
*/
@Test
public void testWriteData() {
final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
final Vector<Vector<String>> nodePairs = generateNodePairs();
this.file.createDirectedLinks(nodePairs);
this.file.writeData(new PrintStream(outContent));
assertTrue(outContent.toString().contains("*Vertices"));
this.file.createUndirectedLinks(nodePairs);
this.file.writeData(new PrintStream(outContent));
assertTrue(outContent.toString().contains("*Edges"));
assertTrue(outContent.toString().contains("*Arcs"));
}
/**
* Generate Vector<Vector<String>> with test data.
* @return Vector<Vector<String>> filled.
*/
private Vector<Vector<String>> generateNodePairs() {
final Vector<Vector<String>> nodePairs = new Vector<>();
Vector<String> vector = new Vector<>();
vector.addElement("Alf");
vector.addElement("Bob");
vector.addElement("Carl");
vector.addElement("Carol");
nodePairs.addElement(new Vector<String>(vector));
nodePairs.addElement(new Vector<String>(vector));
return nodePairs;
}
}
|
package org.biojava.bio.structure.align.ce;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import org.biojava.bio.structure.Atom;
import org.biojava.bio.structure.Group;
import org.biojava.bio.structure.Structure;
import org.biojava.bio.structure.align.model.AFPChain;
/** A class to wrap some of the strucutre.gui classes using Reflection
*
* @author Andreas Prlic
*
*/
public class GuiWrapper {
static final String guiPackage = "org.biojava.bio.structure.gui";
static final String strucAlignmentDisplay = "org.biojava.bio.structure.align.gui.StructureAlignmentDisplay";
static final String displayAFP = "org.biojava.bio.structure.align.gui.DisplayAFP" ;
static final String alignmentGUI = "org.biojava.bio.structure.align.gui.AlignmentGui";
static final String strucAligJmol = "org.biojava.bio.structure.align.gui.jmol.StructureAlignmentJmol";
public static boolean isGuiModuleInstalled(){
String className = displayAFP;
try {
Class c = Class.forName(className);
} catch (ClassNotFoundException ex){
return false;
}
return true;
}
@SuppressWarnings("unchecked")
public static Object display(AFPChain afpChain, Atom[] ca1, Atom[] ca2,
List<Group> hetatms1, List<Group> nucs1, List<Group> hetatms2,
List<Group> nucs2)
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, IllegalAccessException{
Class c = Class.forName(strucAlignmentDisplay);
Method display = c.getMethod("display", new Class[]{AFPChain.class, Atom[].class,
Atom[].class, List.class, List.class, List.class,List.class});
Object structureAlignmentJmol = display.invoke(null, afpChain,ca1,ca2, hetatms1, nucs1, hetatms2, nucs2);
return structureAlignmentJmol;
}
@SuppressWarnings("unchecked")
public static void showAlignmentImage(AFPChain afpChain, Atom[] ca1,
Atom[] ca2, Object jmol)
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, IllegalAccessException{
Class structureAlignmentJmol = Class.forName(strucAligJmol);
Class c = Class.forName(displayAFP);
Method show = c.getMethod("showAlignmentImage", new Class[] {AFPChain.class, Atom[].class, Atom[].class, structureAlignmentJmol});
show.invoke(null,afpChain, ca1, ca2, jmol);
}
@SuppressWarnings("unchecked")
public static void showAlignmentGUI()
throws ClassNotFoundException, NoSuchMethodException, InvocationTargetException, IllegalAccessException {
// proxy for AlignmentGui.getInstance();
Class c = Class.forName(alignmentGUI);
Method m = c.getMethod("getInstance", null);
m.invoke(c,null);
}
public static Structure getAlignedStructure(Atom[] ca1, Atom[] ca2)
throws ClassNotFoundException, NoSuchMethodException,
InvocationTargetException, IllegalAccessException{
Class structureAlignmentJmol = Class.forName(strucAligJmol);
Class c = Class.forName(displayAFP);
Method show = c.getMethod("getAlignedStructure", new Class[] { Atom[].class, Atom[].class});
Structure s = (Structure) show.invoke(null, ca1, ca2);
return s;
}
public static Group[] prepareGroupsForDisplay(AFPChain afpChain, Atom[] ca1,
Atom[] ca2)
throws ClassNotFoundException, NoSuchMethodException,
InvocationTargetException, IllegalAccessException{
Class c = Class.forName(strucAlignmentDisplay);
Method display = c.getMethod("prepareGroupsForDisplay", new Class[]{AFPChain.class, Atom[].class,
Atom[].class});
Object groups = display.invoke(null, afpChain,ca1,ca2);
return (Group[]) groups;
}
public static Atom[] getAtomArray(Atom[] ca, List<Group> hetatoms, List<Group> nucs)
throws ClassNotFoundException, NoSuchMethodException,
InvocationTargetException, IllegalAccessException{
Class structureAlignmentJmol = Class.forName(strucAligJmol);
Class c = Class.forName(displayAFP);
Method show = c.getMethod("getAtomArray", new Class[] { Atom[].class, List.class, List.class});
Atom[] atoms = (Atom[]) show.invoke(null, ca, hetatoms, nucs);
return atoms;
}
}
|
package net.openid.message;
import net.openid.association.DiffieHellmanSession;
import net.openid.association.AssociationSessionType;
import net.openid.association.AssociationException;
import java.util.List;
import java.util.Arrays;
/**
* The OpenID Association Request message.
* <p>
* Handles OpenID 2.0 and OpenID 1.x messages.
*
* @see AssociationSessionType
* @author Marius Scurtescu, Johnny Bufu
*/
public class AssociationRequest extends Message
{
public static final String MODE_ASSOC = "associate";
protected final static List requiredFields = Arrays.asList( new String[] {
"openid.mode",
"openid.session_type",
});
protected final static List optionalFields = Arrays.asList( new String[] {
"openid.ns", // not in v1 messages
"openid.assoc_type", // can be missing in v1
"openid.dh_modulus",
"openid.dh_gen",
"openid.dh_consumer_public"
});
/**
* The Diffie-Hellman session containing the cryptografic data needed for
* encrypting the MAC key exchange.
* <p>
* Null for no-encryption sessions.
*/
private DiffieHellmanSession _dhSess;
/**
* Creates an Association Request message with the
* specified association type and "no-encryption" session.
* <p>
* The supplied type must be one of the "no-encryption" types, otherwise
* a DiffieHellman session is required.
*
* @see #AssociationRequest(AssociationSessionType, DiffieHellmanSession)
*/
protected AssociationRequest(AssociationSessionType type)
{
this(type, null);
}
/**
* Constructs an AssociationRequest message with the
* specified association type and Diffie-Hellman session.
*
* @param dhSess Diffie-Hellman session to be used for this association;
* if null, a "no-encryption" session is created.
*/
protected AssociationRequest(AssociationSessionType type,
DiffieHellmanSession dhSess)
{
if (type.isVersion2())
set("openid.ns", OPENID2_NS);
set("openid.mode", MODE_ASSOC);
set("openid.session_type", type.getSessionType());
set("openid.assoc_type", type.getAssociationType());
_dhSess = dhSess;
if (dhSess != null )
{
set("openid.dh_modulus", _dhSess.getModulus());
set("openid.dh_gen", _dhSess.getGenerator());
set("openid.dh_consumer_public", _dhSess.getPublicKey());
}
}
/**
* Constructs an AssociationRequest message from a parameter list.
* <p>
* Useful for processing incoming messages.
*/
protected AssociationRequest(ParameterList params)
{
super(params);
}
public static AssociationRequest createAssociationRequest(
AssociationSessionType type) throws MessageException
{
return createAssociationRequest(type, null);
}
public static AssociationRequest createAssociationRequest(
AssociationSessionType type, DiffieHellmanSession dhSess)
throws MessageException
{
AssociationRequest req = new AssociationRequest(type, dhSess);
// make sure the association / session type matches the dhSess
if ( type == null ||
(dhSess == null && type.getHAlgorithm() != null) ||
(dhSess != null && ! dhSess.getType().equals(type) ) )
throw new MessageException(
"Invalid association / session combination specified: " +
type + "DH session: " + dhSess);
if ( !req.isValid() ) throw new MessageException(
"Invalid set of parameters for the requested message type");
return req;
}
public static AssociationRequest createAssociationRequest(
ParameterList params) throws MessageException
{
AssociationRequest req = new AssociationRequest(params);
if ( !req.isValid() ) throw new MessageException(
"Invalid set of parameters for the requested message type");
return req;
}
public List getRequiredFields()
{
return requiredFields;
}
/**
* Returns true for OpenID 2.0 messages, false otherwise.
*/
public boolean isVersion2()
{
return hasParameter("openid.ns") &&
OPENID2_NS.equals(getParameterValue("openid.ns"));
}
/**
* Gets the association type parameter of the message.
*/
private String getAssociationType()
{
return getParameterValue("openid.assoc_type");
}
/**
* Gets the session type parameter of the message.
*/
private String getSessionType()
{
return getParameterValue("openid.session_type");
}
/**
* Gets the association / session type of the association request.
*
* @throws AssociationException
*/
public AssociationSessionType getType() throws AssociationException
{
return AssociationSessionType.create(
getSessionType(), getAssociationType(), ! isVersion2() );
}
/**
* Gets the Diffie-Hellman session
* Null for no-encryption association requests.
*/
public DiffieHellmanSession getDHSess()
{
return _dhSess;
}
/**
* Gets the Diffie-Hellman modulus parameter of the message, or null for
* messages with no-encryption sessions.
*/
public String getDhModulus()
{
return getParameterValue("openid.dh_modulus");
}
/**
* Gets the Diffie-Hellman generator parameter of the message, or null for
* messages with no-encryption sessions.
*/
public String getDhGen()
{
return getParameterValue("openid.dh_gen");
}
/**
* Gets the Relying Party's (consumer) Diffie-Hellman public key, or null
* for messages with no-encryption sessions.
*/
public String getPublicKey()
{
return getParameterValue("openid.dh_consumer_public");
}
/**
* Checks if the message is a valid OpenID Association Request.
*
* @return True if all validation checkes passed, false otherwise.
*/
public boolean isValid()
{
// basic checks
if (! super.isValid()) return false;
// association / session type checks
// (includes most of the compatibility stuff)
AssociationSessionType type;
try
{
// throws exception for invalid session / association types
type = getType();
// make sure compatibility mode is the same for type and message
if (type.isVersion2() != isVersion2())
return false;
} catch (AssociationException e) {
return false;
}
// additional compatibility checks
if (! isVersion2() && getSessionType() == null)
return false; // sess_type cannot be omitted in v1 requests
// DH seesion parameters
if ( type.getHAlgorithm() != null) // DH session
{
if (getDhGen() == null || getDhModulus() == null ||
getPublicKey() == null)
return false;
}
else // no-enc session
{
if (getDhGen() != null || getDhModulus() != null ||
getPublicKey() != null)
return false;
}
return true;
}
}
|
package net.openid.message;
import net.openid.association.DiffieHellmanSession;
import net.openid.association.AssociationSessionType;
import net.openid.association.AssociationException;
import java.util.List;
import java.util.Arrays;
import org.apache.log4j.Logger;
/**
* The OpenID Association Request message.
* <p>
* Handles OpenID 2.0 and OpenID 1.x messages.
*
* @see AssociationSessionType
* @author Marius Scurtescu, Johnny Bufu
*/
public class AssociationRequest extends Message
{
private static Logger _log = Logger.getLogger(AssociationRequest.class);
private static final boolean DEBUG = _log.isDebugEnabled();
public static final String MODE_ASSOC = "associate";
protected final static List requiredFields = Arrays.asList( new String[] {
"openid.mode",
"openid.session_type",
});
protected final static List optionalFields = Arrays.asList( new String[] {
"openid.ns", // not in v1 messages
"openid.assoc_type", // can be missing in v1
"openid.dh_modulus",
"openid.dh_gen",
"openid.dh_consumer_public"
});
/**
* The Diffie-Hellman session containing the cryptografic data needed for
* encrypting the MAC key exchange.
* <p>
* Null for no-encryption sessions.
*/
private DiffieHellmanSession _dhSess;
/**
* Creates an Association Request message with the
* specified association type and "no-encryption" session.
* <p>
* The supplied type must be one of the "no-encryption" types, otherwise
* a DiffieHellman session is required.
*
* @see #AssociationRequest(AssociationSessionType, DiffieHellmanSession)
*/
protected AssociationRequest(AssociationSessionType type)
{
this(type, null);
}
/**
* Constructs an AssociationRequest message with the
* specified association type and Diffie-Hellman session.
*
* @param dhSess Diffie-Hellman session to be used for this association;
* if null, a "no-encryption" session is created.
*/
protected AssociationRequest(AssociationSessionType type,
DiffieHellmanSession dhSess)
{
if (DEBUG)
_log.debug("Creating association request, type: " + type +
"DH session: " + dhSess);
if (type.isVersion2())
set("openid.ns", OPENID2_NS);
set("openid.mode", MODE_ASSOC);
set("openid.session_type", type.getSessionType());
set("openid.assoc_type", type.getAssociationType());
_dhSess = dhSess;
if (dhSess != null )
{
set("openid.dh_consumer_public", _dhSess.getPublicKey());
if (! Long.toString(DiffieHellmanSession.DEFAULT_GENERATOR)
.equals(_dhSess.getGenerator()))
set("openid.dh_gen", _dhSess.getGenerator());
if (! DiffieHellmanSession.DEFAULT_MODULUS_HEX
.equals(_dhSess.getModulus()))
set("openid.dh_modulus", _dhSess.getModulus());
}
}
/**
* Constructs an AssociationRequest message from a parameter list.
* <p>
* Useful for processing incoming messages.
*/
protected AssociationRequest(ParameterList params)
{
super(params);
}
public static AssociationRequest createAssociationRequest(
AssociationSessionType type) throws MessageException
{
return createAssociationRequest(type, null);
}
public static AssociationRequest createAssociationRequest(
AssociationSessionType type, DiffieHellmanSession dhSess)
throws MessageException
{
AssociationRequest req = new AssociationRequest(type, dhSess);
// make sure the association / session type matches the dhSess
if ( type == null ||
(dhSess == null && type.getHAlgorithm() != null) ||
(dhSess != null && ! dhSess.getType().equals(type) ) )
throw new MessageException(
"Invalid association / session combination specified: " +
type + "DH session: " + dhSess);
if ( !req.isValid() ) throw new MessageException(
"Invalid set of parameters for the requested message type");
if (DEBUG) _log.debug("Created association request:\n"
+ req.keyValueFormEncoding());
return req;
}
public static AssociationRequest createAssociationRequest(
ParameterList params) throws MessageException
{
AssociationRequest req = new AssociationRequest(params);
if ( !req.isValid() ) throw new MessageException(
"Invalid set of parameters for the requested message type");
if (DEBUG)
_log.debug("Created association request from message parameters:\n"
+ req.keyValueFormEncoding());
return req;
}
public List getRequiredFields()
{
return requiredFields;
}
/**
* Returns true for OpenID 2.0 messages, false otherwise.
*/
public boolean isVersion2()
{
return hasParameter("openid.ns") &&
OPENID2_NS.equals(getParameterValue("openid.ns"));
}
/**
* Gets the association type parameter of the message.
*/
private String getAssociationType()
{
return getParameterValue("openid.assoc_type");
}
/**
* Gets the session type parameter of the message.
*/
private String getSessionType()
{
return getParameterValue("openid.session_type");
}
/**
* Gets the association / session type of the association request.
*
* @throws AssociationException
*/
public AssociationSessionType getType() throws AssociationException
{
return AssociationSessionType.create(
getSessionType(), getAssociationType(), ! isVersion2() );
}
/**
* Gets the Diffie-Hellman session
* Null for no-encryption association requests.
*/
public DiffieHellmanSession getDHSess()
{
return _dhSess;
}
/**
* Gets the Diffie-Hellman modulus parameter of the message, or null for
* messages with no-encryption sessions.
*/
public String getDhModulus()
{
String modulus = getParameterValue("openid.dh_modulus");
return modulus != null ?
modulus : hasParameter("openid.dh_consumer_public") ?
DiffieHellmanSession.DEFAULT_MODULUS_HEX : null;
}
/**
* Gets the Diffie-Hellman generator parameter of the message, or null for
* messages with no-encryption sessions.
*/
public String getDhGen()
{
String gen = getParameterValue("openid.dh_gen");
return gen != null ?
gen : hasParameter("openid.dh_consumer_public") ?
Long.toString(DiffieHellmanSession.DEFAULT_GENERATOR) : null;
}
/**
* Gets the Relying Party's (consumer) Diffie-Hellman public key, or null
* for messages with no-encryption sessions.
*/
public String getDhPublicKey()
{
return getParameterValue("openid.dh_consumer_public");
}
/**
* Checks if the message is a valid OpenID Association Request.
*
* @return True if all validation checkes passed, false otherwise.
*/
public boolean isValid()
{
// basic checks
if (! super.isValid()) return false;
// association / session type checks
// (includes most of the compatibility stuff)
AssociationSessionType type;
try
{
// throws exception for invalid session / association types
type = getType();
// make sure compatibility mode is the same for type and message
if (type.isVersion2() != isVersion2())
{
_log.warn("Protocol verison mismatch between association " +
"session type: " + type +
" and AssociationRequest message type.");
return false;
}
} catch (AssociationException e) {
_log.error("Error verifying association request validity.", e);
return false;
}
// additional compatibility checks
if (! isVersion2() && getSessionType() == null)
{
_log.warn("sess_type cannot be omitted in OpenID1 association requests");
return false;
}
// DH seesion parameters
if ( type.getHAlgorithm() != null && getDhPublicKey() == null)
{
_log.warn("DH consumer public key not specified.");
return false;
}
// no-enc session
if (type.getHAlgorithm() == null && getDhGen() != null ||
getDhModulus() != null || getDhPublicKey() != null)
{
_log.warn("No-encryption session, but DH parameters specified.");
return false;
}
return true;
}
}
|
package com.github.maybeec.oomph.task.cli.core.impl;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.github.maybeec.oomph.task.cli.core.CommandLineUtil;
/**
*
* @author sholzer (18.10.2016)
*/
public class CLIUtilTest {
public CommandLineUtil test;
public static final String resources = "test/resources/";
public static final String linuxResources = resources + "linux/";
public static final String winResources = resources + "win/";
public static final String executeOnLinux = linuxResources + "folder";
public static final String executeOnWin = winResources + "folder";
@BeforeClass
public static void beforeClass() {
new File(linuxResources).mkdirs();
new File(winResources).mkdirs();
afterClass();
}
/**
* @throws java.lang.Exception
* @author sholzer (18.10.2016)
*/
@Before
public void setUp() throws Exception {
test = new CommandLineUtilImpl();
}
/**
* Test method for
* {@link com.github.maybeec.oomph.task.cli.core.impl.CommandLineUtilImpl#execute(java.lang.Iterable)}.
* @throws Exception
*/
@Test
public void testExecuteOnLinux() throws Exception {
if (OsUtil.isWindows()) {
System.out.println("
return;
}
List<String> commands = new LinkedList<String>();
commands.add("folder");
test.execute(linuxResources, "mkdir", commands);
File footxt = new File(executeOnLinux);
assertTrue("file not created", footxt.exists());
}
@Test
public void testExecuteOnWin() throws Exception {
if (!OsUtil.isWindows()) {
System.out.println("
return;
}
List<String> commands = new LinkedList<String>();
commands.add("folder");
test.execute(new File(winResources).getAbsolutePath(), "mkdir", commands);
File folder = new File(executeOnWin);
assertTrue("file not created", folder.exists());
}
@Test
public void testWinWhiteSpacePath() throws Exception {
if (!OsUtil.isWindows()) {
System.out.println("
return;
}
Path target = Paths.get(winResources + "new folder");
Files.createDirectories(target);
}
@AfterClass
public static void afterClass() {
delFile(executeOnLinux);
delFile(executeOnWin);
}
public static void delFile(String path) {
File testExecuteOnLinux = new File(path);
if (testExecuteOnLinux.exists()) {
testExecuteOnLinux.delete();
}
}
}
|
package openblocks.common.entity;
import java.util.HashMap;
import net.minecraft.entity.EntityAgeable;
import net.minecraft.entity.EntityList;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.ai.*;
import net.minecraft.entity.monster.EntityCreeper;
import net.minecraft.entity.passive.EntityTameable;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
import openblocks.api.IMutant;
import openblocks.api.IMutantDefinition;
import openblocks.api.MutantRegistry;
import com.google.common.base.Objects;
import com.google.common.io.ByteArrayDataInput;
import com.google.common.io.ByteArrayDataOutput;
import cpw.mods.fml.common.registry.IEntityAdditionalSpawnData;
public class EntityMutant extends EntityTameable implements IEntityAdditionalSpawnData, IMutant {
private Class<? extends EntityLivingBase> head;
private Class<? extends EntityLivingBase> body;
private Class<? extends EntityLivingBase> arms;
private Class<? extends EntityLivingBase> wings;
private Class<? extends EntityLivingBase> legs;
private Class<? extends EntityLivingBase> tail;
public EntityMutant(World world) {
super(world);
setSize(0.6F, 1.8F);
getNavigator().setAvoidsWater(true);
this.tasks.addTask(0, new EntityAISwimming(this));
this.tasks.addTask(1, new EntityAIAttackOnCollide(this, 1.0D, true));
this.tasks.addTask(2, new EntityAIFollowOwner(this, 1.0D, 10.0F, 2.0F));
this.tasks.addTask(3, new EntityAIPanic(this, 1.25D));
this.tasks.addTask(4, new EntityAIWander(this, 1.0D));
this.tasks.addTask(5, new EntityAIWatchClosest(this, EntityPlayer.class, 6.0F));
this.tasks.addTask(6, new EntityAILookIdle(this));
this.targetTasks.addTask(0, new EntityAIOwnerHurtByTarget(this));
this.targetTasks.addTask(1, new EntityAIOwnerHurtTarget(this));
this.targetTasks.addTask(3, new EntityAIHurtByTarget(this, true));
setTamed(true);
}
@Override
public boolean isAIEnabled() {
return true;
}
@Override
protected void applyEntityAttributes() {
super.applyEntityAttributes();
getEntityAttribute(SharedMonsterAttributes.maxHealth).setAttribute(10.0D);
getEntityAttribute(SharedMonsterAttributes.movementSpeed).setAttribute(0.25D);
}
@Override
public EntityAgeable createChild(EntityAgeable entityageable) {
return new EntityMutant(worldObj);
}
@Override
public void writeSpawnData(ByteArrayDataOutput data) {
data.writeUTF(getEntityIdForClass(head));
data.writeUTF(getEntityIdForClass(body));
data.writeUTF(getEntityIdForClass(arms));
data.writeUTF(getEntityIdForClass(wings));
data.writeUTF(getEntityIdForClass(legs));
data.writeUTF(getEntityIdForClass(tail));
}
@Override
public void readSpawnData(ByteArrayDataInput data) {
head = getEntityClassForId(data.readUTF());
body = getEntityClassForId(data.readUTF());
arms = getEntityClassForId(data.readUTF());
wings = getEntityClassForId(data.readUTF());
legs = getEntityClassForId(data.readUTF());
tail = getEntityClassForId(data.readUTF());
}
private String getEntityIdForClass(Class<? extends EntityLivingBase> klazz) {
return Objects.firstNonNull((String)EntityList.classToStringMapping.get(klazz), "");
}
@SuppressWarnings("unchecked")
private Class<? extends EntityLivingBase> getEntityClassForId(String id) {
return (Class<? extends EntityLivingBase>)EntityList.stringToClassMapping.get(id);
}
@Override
public IMutantDefinition getBody() {
return MutantRegistry.getDefinition(body);
}
@Override
public IMutantDefinition getHead() {
return MutantRegistry.getDefinition(head);
}
@Override
public IMutantDefinition getArms() {
return MutantRegistry.getDefinition(arms);
}
@Override
public IMutantDefinition getWings() {
return MutantRegistry.getDefinition(wings);
}
@Override
public IMutantDefinition getLegs() {
return MutantRegistry.getDefinition(legs);
}
@Override
public IMutantDefinition getTail() {
return MutantRegistry.getDefinition(tail);
}
@Override
public int getLegHeight() {
return getLegs().getLegHeight();
}
@Override
public int getBodyHeight() {
return getBody().getBodyHeight();
}
@Override
public float getArmSwingProgress(float scale) {
return getSwingProgress(scale);
}
@Override
public int getNumberOfLegs() {
return getLegs().getNumberOfLegs();
}
public void setTraitsFromMap(HashMap<String, Integer> dnas) {
head = EntityCreeper.class;
body = EntityCreeper.class;
arms = EntityCreeper.class;
legs = EntityCreeper.class;
wings = EntityCreeper.class;
tail = EntityCreeper.class;
}
@Override
public void writeEntityToNBT(NBTTagCompound tag) {
super.writeEntityToNBT(tag);
tag.setString("head", getEntityIdForClass(head));
tag.setString("body", getEntityIdForClass(body));
tag.setString("arms", getEntityIdForClass(arms));
tag.setString("legs", getEntityIdForClass(legs));
tag.setString("wings", getEntityIdForClass(wings));
tag.setString("tail", getEntityIdForClass(tail));
}
@Override
public void readEntityFromNBT(NBTTagCompound tag) {
super.readEntityFromNBT(tag);
head = getEntityClassForId(tag.getString("head"));
body = getEntityClassForId(tag.getString("body"));
arms = getEntityClassForId(tag.getString("arms"));
legs = getEntityClassForId(tag.getString("legs"));
wings = getEntityClassForId(tag.getString("wings"));
tail = getEntityClassForId(tag.getString("tail"));
}
}
|
// This file is part of the "OPeNDAP Web Coverage Service Project."
// Authors:
// Haibo Liu <haibo@iri.columbia.edu>
// Nathan David Potter <ndp@opendap.org>
// Benno Blumenthal <benno@iri.columbia.edu>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
// You can contact OPeNDAP, Inc. at PO Box 112, Saunderstown, RI. 02874-0112.
package opendap.semantics.IRISail;
import opendap.xml.Transformer;
import org.jdom.output.XMLOutputter;
import org.openrdf.model.*;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.query.*;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.slf4j.Logger;
import javax.xml.transform.stream.StreamSource;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.*;
/**
* This class is used to populate the repository. A particular URL is only imported once. Bad URLs
* are skipped. The string vector <code>imports</code> tracks all documents that are imported
* into the repository. The string hashset <code> urlsToBeIgnored</code> is used to track bad urls
* that are skipped. The method <code>importReferencedRdfDocs</code> repeatedly calls method
* <code>findNeededRDFDocuments</code> and <code>addNeededDocuments</code> until no new needed
* RDF documents are found.
* The method <code>findNeededRDFDocuments</code> queries the repository and passes those RDF
* docuemts to <code>addNeededDocuments</code> which in turn adds them into the repository.
*
*
*
*/
public class RdfImporter {
private Logger log;
private HashSet<String> urlsToBeIgnored;
private Vector<String> imports;
private String localResourceDir;
public RdfImporter(String resourceDir) {
log = org.slf4j.LoggerFactory.getLogger(this.getClass());
urlsToBeIgnored = new HashSet<String>();
imports = new Vector<String>();
this.localResourceDir = resourceDir;
}
public void reset() {
urlsToBeIgnored.clear();
imports.clear();
}
public String getLocalResourceDirUrl(){
if(localResourceDir.startsWith("file:"))
return localResourceDir;
return "file:"+ localResourceDir;
}
/**
* Find and import all needed RDF documents into the repository.
*
* @param repository
* @param doNotImportUrls
* @return
*/
public boolean importReferencedRdfDocs(Repository repository, Vector<String> doNotImportUrls) {
boolean repositoryChanged = false;
Vector<String> rdfDocList = new Vector<String>();
if (doNotImportUrls != null)
urlsToBeIgnored.addAll(doNotImportUrls);
findNeededRDFDocuments(repository, rdfDocList);
while (!rdfDocList.isEmpty()) {
if(addNeededRDFDocuments(repository, rdfDocList)){
repositoryChanged = true;
}
rdfDocList.clear();
findNeededRDFDocuments(repository, rdfDocList);
}
return repositoryChanged;
}
/**
* Find all RDF documents that are referenced by existing documents in the repository.
*
* @param repository
* @param rdfDocs
*/
private void findNeededRDFDocuments(Repository repository, Vector<String> rdfDocs) {
TupleQueryResult result = null;
List<String> bindingNames;
RepositoryConnection con = null;
try {
con = repository.getConnection();
String queryString = "(SELECT doc "
+ "FROM {doc} rdf:type {rdfcache:"+Terms.startingPointType +"} "
+ "union "
+ "SELECT doc "
+ "FROM {tp} rdf:type {rdfcache:"+Terms.startingPointType +"}; rdfcache:"+Terms.dependsOnContext+" {doc}) "
+ "MINUS "
+ "SELECT doc "
+ "FROM CONTEXT "+"rdfcache:"+Terms.cacheContext+" {doc} rdfcache:"+Terms.lastModifiedContext+" {lastmod} "
+ "USING NAMESPACE "
+ "rdfcache = <" + Terms.rdfCacheNamespace + ">";
log.debug("Query for NeededRDFDocuments: " + queryString);
TupleQuery tupleQuery = con.prepareTupleQuery(QueryLanguage.SERQL,
queryString);
result = tupleQuery.evaluate();
while (result.hasNext()) {
BindingSet bindingSet = result.next();
Value firstValue = bindingSet.getValue("doc");
String doc = firstValue.stringValue();
if (!rdfDocs.contains(doc) && !imports.contains(doc)
&& !urlsToBeIgnored.contains(doc)
&& doc.startsWith("http:
rdfDocs.add(doc);
log.debug("Adding to rdfDocs: " + doc);
}
}
} catch (QueryEvaluationException e) {
log.error("Caught an QueryEvaluationException! Msg: "
+ e.getMessage());
} catch (RepositoryException e) {
log.error("Caught RepositoryException! Msg: " + e.getMessage());
} catch (MalformedQueryException e) {
log.error("Caught MalformedQueryException! Msg: " + e.getMessage());
}
finally {
if (result != null) {
try {
result.close();
} catch (QueryEvaluationException e) {
log.error("Caught a QueryEvaluationException! Msg: "
+ e.getMessage());
}
}
try {
con.close();
} catch (RepositoryException e) {
log.error("Caught a RepositoryException! in findNeededRDFDocuments() Msg: "
+ e.getMessage());
}
}
log.info("Number of needed files identified: "
+ rdfDocs.size());
}
/**
* Add each of the RDF documents whose URL's are in the passed Vector to the Repository.
*
* @param repository
* @param rdfDocs-holds RDF documents to import
* @return true if one or more RDF document is added into the repository
*
*/
private boolean addNeededRDFDocuments(Repository repository, Vector<String> rdfDocs) {
long inferStartTime, inferEndTime;
inferStartTime = new Date().getTime();
String documentURL;
RepositoryConnection con = null;
int skipCount;
String contentType;
HttpURLConnection httpConnection = null;
InputStream importIS = null;
boolean addedDocument = false;
try {
con = repository.getConnection();
log.debug("addNeededRDFDocuments(): rdfDocs.size=" + rdfDocs.size());
skipCount = 0;
while (!rdfDocs.isEmpty()) {
documentURL = rdfDocs.remove(0);
try {
log.debug("addNeededRDFDocuments(): Checking import URL: " + documentURL);
if (urlsToBeIgnored.contains(documentURL)) {
log.error("addNeededRDFDocuments(): Previous server error, Skipping " + documentURL);
} else {
URL myurl = new URL(documentURL);
int rsCode;
httpConnection = (HttpURLConnection) myurl.openConnection();
log.debug("addNeededRDFDocuments(): Connected to import URL: " + documentURL);
rsCode = httpConnection.getResponseCode();
contentType = httpConnection.getContentType();
log.debug("addNeededRDFDocuments(): Got HTTP status code: " + rsCode);
log.debug("addNeededRDFDocuments(): Got Content Type: " + contentType);
if (rsCode == -1) {
log.error("addNeededRDFDocuments(): Unable to get an HTTP status code for resource "
+ documentURL + " WILL NOT IMPORT!");
urlsToBeIgnored.add(documentURL);
} else if (rsCode != 200) {
log.error("addNeededRDFDocuments(): Error! HTTP status code " + rsCode + " Skipping documentURL " + documentURL);
urlsToBeIgnored.add(documentURL);
} else {
log.debug("addNeededRDFDocuments(): Import URL appears valid ( " + documentURL + " )");
String transformToRdfUrl = RepositoryOps.getUrlForTransformToRdf(repository, documentURL);
if (transformToRdfUrl != null){
log.info("addNeededRDFDocuments(): Transforming " + documentURL +" with "+ transformToRdfUrl);
if(Terms.localResources.containsKey(transformToRdfUrl)){
transformToRdfUrl = getLocalResourceDirUrl() + Terms.localResources.get(transformToRdfUrl);
log.debug("addNeededRDFDocuments(): Transform URL has local code based copy: "+transformToRdfUrl);
}
Transformer t = new Transformer(transformToRdfUrl);
InputStream inStream = t.transform(documentURL);
log.info("addNeededRDFDocuments(): Finished transforming RDFa " + documentURL);
importUrl(con, documentURL, contentType, inStream);
addedDocument = true;
} else if (documentURL.endsWith(".xsd")) {
// XML Schema Document has known transform.
transformToRdfUrl = getLocalResourceDirUrl() + "xsl/xsd2owl.xsl";
log.info("addNeededRDFDocuments(): Transforming Schema Document'" + documentURL +"' with '"+ transformToRdfUrl);
Transformer t = new Transformer(transformToRdfUrl);
InputStream inStream = t.transform(documentURL);
log.info("addNeededRDFDocuments(): Finished transforming Xml Schema Document: '" + documentURL+"'");
importUrl(con, documentURL, contentType, inStream);
addedDocument = true;
} else if(documentURL.endsWith(".owl") || documentURL.endsWith(".rdf")) {
// OWL is RDF and so is the repository - no transform needed.
importIS = httpConnection.getInputStream();
importUrl(con, documentURL, contentType, importIS);
addedDocument = true;
} else if ((contentType != null) &&
(contentType.equalsIgnoreCase("text/plain") ||
contentType.equalsIgnoreCase("text/xml") ||
contentType.equalsIgnoreCase("application/xml") ||
contentType.equalsIgnoreCase("application/rdf+xml"))
) {
importUrl(con, documentURL, contentType, importIS);
log.info("addNeededRDFDocuments(): Imported non owl/xsd from " + documentURL);
addedDocument = true;
} else {
log.warn("addNeededRDFDocuments(): SKIPPING Import URL '" + documentURL + "' It does not appear to reference a " +
"document that I know how to process.");
urlsToBeIgnored.add(documentURL); //skip this file
skipCount++;
}
log.info("addNeededRDFDocuments(): Total non owl/xsd files skipped: " + skipCount);
}
} // while (!rdfDocs.isEmpty()
} catch (Exception e) {
log.error("addNeededRDFDocuments(): Caught " + e.getClass().getName() + " Message: " + e.getMessage());
if (documentURL != null){
log.warn("addNeededRDFDocuments(): SKIPPING Import URL '"+ documentURL +"' Because bad things happened when we tried to get it.");
urlsToBeIgnored.add(documentURL); //skip this file
}
} finally {
if (importIS != null)
try {
importIS.close();
} catch (IOException e) {
log.error("addNeededRDFDocuments(): Caught " + e.getClass().getName() + " Message: " + e.getMessage());
}
if (httpConnection != null)
httpConnection.disconnect();
}
}
}
catch (RepositoryException e) {
log.error("addNeededRDFDocuments(): Caught " + e.getClass().getName() + " Message: " + e.getMessage());
}
finally {
if (con != null) {
try {
con.close();
} catch (RepositoryException e) {
log.error("addNeededRDFDocuments(): Caught an RepositoryException! in addNeededRDFDocuments() Msg: "
+ e.getMessage());
}
}
inferEndTime = new Date().getTime();
double inferTime = (inferEndTime - inferStartTime) / 1000.0;
log.debug("addNeededRDFDocuments(): Import takes " + inferTime + " seconds");
}
return addedDocument;
}
/**
* Add individual RDF document into the repository.
* @param con-connection to the repository
* @param importURL-URL of RDF document to import
* @param contentType-Content type of the RDF document
* @param importIS-Input stream from of the RDF document
* @throws IOException
* @throws RDFParseException
* @throws RepositoryException
*/
private void importUrl(RepositoryConnection con, String importURL, String contentType, InputStream importIS) throws IOException, RDFParseException, RepositoryException {
if (!this.imports.contains(importURL)) { // not in the repository yet
log.info("Importing URL " + importURL);
ValueFactory valueFactory = con.getValueFactory();
URI importUri = new URIImpl(importURL);
con.add(importIS, importURL, RDFFormat.RDFXML, (Resource) importUri);
RepositoryOps.setLTMODContext(importURL, con, valueFactory); // set last modified time of the context
RepositoryOps.setContentTypeContext(importURL, contentType, con, valueFactory);
log.info("Finished importing URL " + importURL);
imports.add(importURL);
}
else {
log.error("Import URL '"+importURL+"' already has been imported! SKIPPING!");
}
}
public static void main(String[] args) throws Exception {
StreamSource httpSource = new StreamSource("http://schemas.opengis.net/wcs/1.1/wcsAll.xsd");
StreamSource fileSource = new StreamSource("file:/Users/ndp/OPeNDAP/Projects/Hyrax/swdev/trunk/olfs/resources/WCS/xsl/xsd2owl.xsl");
StreamSource transform = fileSource;
StreamSource document = httpSource;
XMLOutputter xmlo = new XMLOutputter();
xmlo.output(Transformer.getTransformedDocument(document,transform),System.out);
}
}
|
package com.tesora.dve.common;
import org.apache.log4j.Logger;
public class PEThreadContext {
private static final Logger LOGGER = Logger.getLogger(PEContext.class);
private static boolean enabled = false;
private static final ThreadLocal<PEContext> THREAD_CONTEXT = new ThreadLocal<PEContext>() {
@Override
protected PEContext initialValue() {
return enabled ? new PEContext() : PEContext.NO_OP_CONTEXT;
}
};
public static Ref pushFrame(Class<?> clazz) {
context().push(clazz);
return REF;
}
public static Ref pushFrame(String name) {
context().push(name);
return REF;
}
public static void popFrame() {
context().pop();
}
public static String get(String key) {
return context().get(key);
}
public static Ref put(String key, Object value) {
context().put(key, value);
return REF;
}
public static void remove(String key) {
context().remove(key);
}
public static String asString() {
return context().toString();
}
public static void logDebug() {
if (enabled && LOGGER.isDebugEnabled())
LOGGER.debug(asString());
}
public static PEContext copy() {
return context().copy();
}
private static PEContext context() {
return THREAD_CONTEXT.get();
}
public static void inherit(PEContext context) {
THREAD_CONTEXT.set(context);
if (!Thread.currentThread().getName().equals(context.getSourceThread()))
pushFrame("ThreadSwitch")
.put("from", context.getSourceThread())
.put("to", Thread.currentThread().getName());
}
public static void clear() {
THREAD_CONTEXT.remove();
}
/*
* This is just syntactic sugar to allow chaining push/put calls.
*
* For example: PEThreadContext.push("foo").put("a", "1")
*/
private static final Ref REF = new Ref();
public static final class Ref {
public Ref put(String key, Object value) {
return PEThreadContext.put(key, value);
}
public Ref pushFrame(String name) {
return PEThreadContext.pushFrame(name);
}
public void logDebug() {
PEThreadContext.logDebug();
}
}
public static void setEnabled(boolean enable) {
enable = enable || LOGGER.isDebugEnabled();
PEThreadContext.enabled = enable;
THREAD_CONTEXT.set(enable ? new PEContext() : PEContext.NO_OP_CONTEXT);
}
}
|
package org.appwork.utils.locale;
/**
* @author thomas
*
*/
public enum APPWORKUTILS implements Translate {
ABSTRACTDIALOG_BUTTON_OK("Ok"),
ABSTRACTDIALOG_BUTTON_CANCEL("Cancel"),
ABSTRACTDIALOG_STYLE_SHOW_DO_NOT_DISPLAY_AGAIN("Don't show this again"),
DIALOG_CONFIRMDIALOG_TITLE("Please confirm!"),
DIALOG_INPUT_TITLE("Please enter!"),
DIALOG_PASSWORD_TITLE("Please enter!"),
DIALOG_MESSAGE_TITLE("Message"),
DIALOG_SLIDER_TITLE("Please enter!"),
LOGINDIALOG_LABEL_USERNAME("Login"),
LOGINDIALOG_LABEL_PASSWORD("Password"),
LOGINDIALOG_LABEL_PASSWORD_REPEAT("Repeat Password"),
LOGINDIALOG_BUTTON_REGISTER("Create new User"),
LOGINDIALOG_CHECKBOX_REMEMBER("Remember"),
LOGINDIALOG_BUTTON_LOGIN("Login"),
PASSWORDDIALOG_PASSWORDCHANGE_OLDPASSWORD("Old Password:"),
PASSWORDDIALOG_PASSWORDCHANGE_NEWPASSWORD_REPEAT("Confirm Password:"),
PASSWORDDIALOG_PASSWORDCHANGE_NEWPASSWORD("New Password:"),
SEARCHDIALOG_BUTTON_FIND("Find"),
SEARCHDIALOG_CHECKBOX_CASESENSITIVE("Case sensitive"),
SEARCHDIALOG_CHECKBOX_REGULAREXPRESSION("Regular Expressions"),
TIMERDIALOG_MESSAGE_COUNTDOWN_STARTING("Countdown starting..."),
TIMERDIALOG_TOOLTIP_TIMERLABEL("This dialog has a countdown and closes after a few seconds. Click to cancel the countdown"),
DIALOG_FILECHOOSER_TOOLTIP_UPFOLDER("Switch to parent folder"),
DIALOG_FILECHOOSER_TOOLTIP_HOMEFOLDER("Switch to Home"),
DIALOG_FILECHOOSER_TOOLTIP_NEWFOLDER("Create new folder"),
DIALOG_FILECHOOSER_TOOLTIP_DETAILS("Switch to detailed view"),
DIALOG_FILECHOOSER_TOOLTIP_LIST("Switch to list view"),
EXTTABLE_SEARCH_DIALOG_TITLE("Search table"),
DIALOG_ERROR_TITLE("Error Occured");
// ENDOFENUMS
/**
* @return
*/
public static String list() {
final StringBuilder sb = new StringBuilder();
sb.append("# APPWORK UTILS Locale: " + Loc.getLocale());
int max = 0;
for (final APPWORKUTILS entry : APPWORKUTILS.values()) {
max = Math.max(entry.name().length(), max);
}
for (final APPWORKUTILS entry : APPWORKUTILS.values()) {
sb.append("\r\nAPPWORKUTILS:::");
sb.append(entry.name());
sb.append(" ");
for (int i = entry.name().length(); i < max; i++) {
sb.append(" ");
}
sb.append(" = ");
sb.append(Loc.L("APPWORKUTILS:::" + entry.name(), entry.defaultTranslation).replace("\r", "\\r").replace("\n", "\\n"));
}
return sb.toString();
}
public static void reset() {
for (final APPWORKUTILS rsm : APPWORKUTILS.values()) {
rsm.cache = null;
}
}
/**
* Stores the numbers of wildcards (<code>%s</code>) in this string.
*/
private int wildCardCount = 0;
/**
* Stores the DefaultTranslation.
*/
private String defaultTranslation;
/**
* Stores the translated value or <code>null</code>, if it wasn't translated
* yet.
*/
private String cache = null;
private APPWORKUTILS(final String defaultString) {
this.defaultTranslation = defaultString;
}
private APPWORKUTILS(final String defaultString, final int wildCards) {
this.defaultTranslation = defaultString;
this.wildCardCount = wildCards;
}
public String getDefaultTranslation() {
return this.defaultTranslation;
}
public int getWildCardCount() {
return this.wildCardCount;
}
public String s() {
return this.toString();
}
/*
* (non-Javadoc)
*
* @see org.appwork.utils.locale.Translate#toString(java.lang.Object[])
*/
public String s(final Object... args) {
if (args != null && args.length > 0) {
return Loc.LF("APPWORKUTILS:::" + this.name(), this.defaultTranslation, args);
} else {
if (this.cache != null) { return this.cache; }
this.cache = Loc.L("APPWORKUTILS:::" + this.name(), this.defaultTranslation);
return this.cache;
}
}
@Override
@Deprecated
public String toString() {
return this.s((Object[]) null);
}
}
|
package org.barbon.mangaget.scrape;
import android.os.AsyncTask;
import android.net.http.AndroidHttpClient;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FilterInputStream;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.http.client.methods.HttpGet;
public class Downloader {
protected static class CountingInputStream extends FilterInputStream {
private long byteCount = 0;
public CountingInputStream(InputStream in) {
super(in);
}
public long getCount() {
return byteCount;
}
@Override
public int read(byte[] buffer) throws IOException {
int size = in.read(buffer);
if (size != -1)
byteCount += size;
return size;
}
@Override
public int read() throws IOException {
int ch = in.read();
if (ch != -1)
byteCount += 1;
return ch;
}
@Override
public int read(byte[] buffer, int offset, int count)
throws IOException {
int size = in.read(buffer, offset, count);
if (size != -1)
byteCount += size;
return size;
}
}
public interface OnDownloadProgress {
public void downloadStarted();
public void downloadProgress(long downloaded, long total);
public void downloadComplete(boolean success);
}
public interface DownloadTarget {
public void startDownload(InputStream stream, String encoding)
throws Exception;
public long downloadChunk()
throws Exception;
public void completeDownload(boolean success)
throws Exception;
}
public static class OnDownloadProgressAdapter
implements OnDownloadProgress {
@Override public void downloadStarted() { }
@Override public void downloadProgress(long downloaded, long total) { }
@Override public void downloadComplete(boolean success) { }
}
public class StringDownloadTarget implements DownloadTarget {
private static final int BUFFER_SIZE = 1024;
private DownloadDestination destination;
private InputStream in;
private ByteArrayOutputStream out;
private byte[] buffer = new byte[BUFFER_SIZE];
public StringDownloadTarget(DownloadDestination _destination) {
destination = _destination;
}
@Override
public void startDownload(InputStream stream, String encoding) {
destination.charSet = encoding;
in = stream;
out = new ByteArrayOutputStream();
}
@Override
public long downloadChunk() throws IOException {
int size = in.read(buffer);
if (size != -1)
out.write(buffer, 0, size);
return size;
}
@Override
public void completeDownload(boolean success) {
destination.stream = new ByteArrayInputStream(out.toByteArray());
}
}
public class FileDownloadTarget implements DownloadTarget {
private static final int BUFFER_SIZE = 1024;
private DownloadDestination destination;
private InputStream in;
private OutputStream out;
private byte[] buffer = new byte[BUFFER_SIZE];
public FileDownloadTarget(DownloadDestination _destination) {
destination = _destination;
}
@Override
public void startDownload(InputStream stream, String encoding)
throws IOException {
destination.charSet = encoding;
in = stream;
out = new FileOutputStream(destination.path);
}
@Override
public long downloadChunk() throws IOException {
int size = in.read(buffer);
if (size != -1)
out.write(buffer, 0, size);
return size;
}
@Override
public void completeDownload(boolean success) throws IOException {
out.close();
}
}
private class DownloadTask extends AsyncTask<String, Long, Boolean> {
private OnDownloadProgress progressListener;
private DownloadTarget downloadTarget;
private CountingInputStream byteCounter;
public DownloadTask(OnDownloadProgress listener,
DownloadTarget target) {
progressListener = listener;
downloadTarget = target;
}
@Override
public void onPreExecute() {
// nothing to do
}
public Boolean doInBackground(String... params) {
publishProgress(0L);
AndroidHttpClient client =
AndroidHttpClient.newInstance("MangaGet/1.0");
long totalSize = -1;
try {
HttpResponse response = client.execute(new HttpGet(params[0]));
HttpEntity entity = response.getEntity();
InputStream content = entity.getContent();
totalSize = entity.getContentLength();
byteCounter = new CountingInputStream(content);
downloadTarget.startDownload(
byteCounter, EntityUtils.getContentCharSet(entity));
}
catch (Exception e) {
client.close();
return false;
}
try {
for (;;) {
long size = downloadTarget.downloadChunk();
if (size == -1)
break;
publishProgress(1L, byteCounter.getCount(), totalSize);
}
}
catch(Exception e) {
client.close();
return false;
}
client.close();
return true;
}
@Override
public void onProgressUpdate(Long... values) {
// TODO abort on exception
if (values[0] == 0)
progressListener.downloadStarted();
else
progressListener.downloadProgress(values[1], values[2]);
}
@Override
public void onPostExecute(Boolean result) {
progressListener.downloadComplete(result);
}
}
public class DownloadDestination {
File path;
InputStream stream;
String charSet;
String baseUrl;
public DownloadDestination() {
}
public DownloadDestination(File _path) {
path = _path;
}
}
public DownloadDestination requestDownload(
String url, OnDownloadProgress listener, File path) {
DownloadDestination destination = new DownloadDestination(path);
DownloadTarget target = new FileDownloadTarget(destination);
DownloadTask task = new DownloadTask(listener, target);
task.execute(url);
return destination;
}
public DownloadDestination requestDownload(
String url, OnDownloadProgress listener) {
DownloadDestination destination = new DownloadDestination();
DownloadTarget target = new StringDownloadTarget(destination);
DownloadTask task = new DownloadTask(listener, target);
task.execute(url);
return destination;
}
}
|
package org.biojava.bio.seq.io;
import java.io.*;
import java.util.*;
import org.biojava.utils.*;
import org.biojava.bio.*;
import org.biojava.bio.symbol.*;
import org.biojava.bio.seq.*;
/**
* Encapsulate state used while reading data from a specific
* Genbank file.
*
* @author Thomas Down
* @author Greg Cox
*/
class GenbankContext implements org.biojava.utils.ParseErrorListener, org.biojava.utils.ParseErrorSource
{
private final static int HEADER = 1;
private final static int FEATURES = 2;
private final static int SEQUENCE = 3;
private final static int VERSION_LENGTH = 11;
private final static int TAG_LENGTH = 12;
private int status;
private SymbolTokenization symParser;
private StreamParser streamParser;
private List symbols;
private String accession;
private String headerTag = "";
private StringBuffer headerTagText = new StringBuffer();
private SeqIOListener listener;
private Vector mListeners = new Vector();
private boolean elideSymbols;
/**
* Constructor that takes the listener and the Symbol parser from the
* GenbankFormat.
*
* @param theSymbolParser Symbol parser to use in processing the file
* @param theListener Listener to notify when field has been processed
*/
protected GenbankContext(SymbolTokenization theSymbolParser,
SeqIOListener theListener)
{
this.symbols = new ArrayList();
this.status = HEADER;
this.listener = theListener;
this.symParser = theSymbolParser;
this.streamParser = symParser.parseStream(listener);
if (this.listener instanceof ParseErrorSource)
((ParseErrorSource)(this.listener)).addParseErrorListener(this);
}
/**
* This method determines the behaviour when a bad line is processed.
* Some options are to log the error, throw an exception, ignore it
* completely, or pass the event through.
* <P>
* This method should be overwritten when different behavior is desired.
*
* @param theEvent The event that contains the bad line and token.
*/
public void BadLineParsed(org.biojava.utils.ParseErrorEvent theEvent)
{
notifyParseErrorEvent(theEvent);
}
protected void processLine(String line)
throws ParseException, IllegalSymbolException
{
if (line.startsWith(GenbankFormat.FEATURE_TAG))
{
status = FEATURES;
this.saveSeqAnno();
}
else if (line.startsWith(GenbankFormat.START_SEQUENCE_TAG))
{
status = SEQUENCE;
this.saveSeqAnno();
// Additional commit to push the final feature off the stack
headerTag = line;
this.saveSeqAnno();
}
else if (line.startsWith(GenbankFormat.END_SEQUENCE_TAG))
{
streamParser.close();
}
else if (status == FEATURES)
{
processFeatureLine(line);
}
else if (status == SEQUENCE && elideSymbols == false)
{
processSeqLine(line, streamParser);
}
else if (status == HEADER)
{
processHeaderLine(line);
}
}
/**
* Adds a parse error listener to the list of listeners if it isn't already
* included.
*
* @param theListener Listener to be added.
*/
public synchronized void addParseErrorListener(
ParseErrorListener theListener)
{
if (mListeners.contains(theListener) == false)
{
mListeners.addElement(theListener);
}
}
/**
* Removes a parse error listener from the list of listeners if it is
* included.
*
* @param theListener Listener to be removed.
*/
public synchronized void removeParseErrorListener(
ParseErrorListener theListener)
{
if (mListeners.contains(theListener) == true)
{
mListeners.removeElement(theListener);
}
}
/**
* Passes the event on to all the listeners registered for ParseErrorEvents.
*
* @param theEvent The event to be handed to the listeners.
*/
protected void notifyParseErrorEvent(ParseErrorEvent theEvent)
{
Vector listeners;
synchronized(this)
{
listeners = (Vector) mListeners.clone();
}
int lnrCount = listeners.size();
for (int index = 0; index < lnrCount; index++)
{
ParseErrorListener client =
(ParseErrorListener)listeners.elementAt(index);
client.BadLineParsed(theEvent);
}
}
private void processSeqLine(String line, StreamParser theParser)
throws IllegalSymbolException
{
char[] cline = line.toCharArray();
int parseStart = 0;
int parseEnd = 0;
while (parseStart < cline.length)
{
while ((parseStart < cline.length) &&
((cline[parseStart] == ' ') ||
(Character.isDigit(cline[parseStart]))))
{
// Read past leading spaces and numbers
++parseStart;
}
if (parseStart >= cline.length)
{
break;
}
parseEnd = parseStart + 1;
while ((parseEnd < cline.length && cline[parseEnd] != ' '))
{
if (cline[parseEnd] == '.' || cline[parseEnd] == '~') {
cline[parseEnd] = '-';
}
++parseEnd;
}
// Got a segment of read sequence data
theParser.characters(cline, parseStart, parseEnd - parseStart);
parseStart = parseEnd;
}
}
/**
* Private method to process a line assuming it's a feature line. A
* feature line is defined to be a line between the FEATURE tag and the
* ORIGIN tag. The BASE COUNT line is processed here.
*
* @throws ParseException Thrown when an error occurs parsing the file
* @param line The line to be processed
*/
private void processFeatureLine(String line)
throws ParseException
{
// Check the line is really a feature line
if (line.startsWith(GenbankFormat.FEATURE_LINE_PREFIX))
{
this.saveSeqAnno();
// Flag value as a feature line for GenbankProcessor. By a
// strange coincidence, this happens to be the same as the EMBL
// value
headerTag = GenbankFormat.FEATURE_FLAG;
headerTagText = new StringBuffer(line.substring(5));
}
else
{
// Otherwise, process it as a header line
processHeaderLine(line);
}
}
/**
* Private method to process a line assuming it's a header line. A header
* line is defined to be a line before the FEATURE tag appears in the file.
*
* @throws ParseException Thrown when an error occurs parsing the file
* @param line The line to be processed
*/
private void processHeaderLine(String line)
throws ParseException
{
if (line.startsWith(GenbankFormat.LOCUS_TAG))
{
// Genbank changed the format of the Locus line for release 127.
// The new format is incompatible with the old.
if (this.isLocusLinePre127(line))
{
this.parseLocusLinePre127(line);
}
else
{
this.parseLocusLinePost127(line);
}
}
else if (line.startsWith(GenbankFormat.VERSION_TAG))
{
// VERSION line is a special case because it contains both
// the VERSION field and the GI number
this.saveSeqAnno();
StringTokenizer lineTokens = new StringTokenizer(line);
headerTag = lineTokens.nextToken();
headerTagText = new StringBuffer(lineTokens.nextToken());
if (lineTokens.hasMoreTokens()) {
String nextToken = lineTokens.nextToken();
if (nextToken.startsWith(GenbankFormat.GI_TAG))
{
this.saveSeqAnno();
headerTag = GenbankFormat.GI_TAG; // Possibly should be UID?
headerTagText =
new StringBuffer(nextToken.substring(3));
}
}
}
else if (hasHeaderTag(line))
{ // line has a header tag
this.saveSeqAnno();
headerTag = line.substring(0, TAG_LENGTH).trim();
headerTagText = new StringBuffer(line.substring(TAG_LENGTH));
}
// gbpri1.seq (Release 125.0) has a line which is not
// TAG_LENGTH long. Patch offered by Ron Kuhn (rkuhn@cellomics.com)
else if (line.length() >= TAG_LENGTH)
{ // keep appending tag text value
headerTagText.append(" " + line.substring(TAG_LENGTH));
}
}
/**
* Checks which version of the locus line format is used. The algorithm
* switches on the size of the line; <75 means pre-127, otherwise it's 127.
*
* @param theLine the line to check the format of.
* @return TRUE if the line is in Genbank release 126 or earlier format.
* FALSE otherwise
*/
private boolean isLocusLinePre127(String theLine)
{
return (theLine.length() < 75);
}
/**
* Parses the locus line assuming it is in pre release 127 format.
*
* @param theLine Locus line to parse.
* @throws ParseException If the line is too short.
*/
private void parseLocusLinePre127(String theLine)
throws ParseException
{
if (theLine.length() < 73)
{
StringTokenizer locusTokens = new StringTokenizer(theLine);
//Locus Tag
locusTokens.nextToken();
if (locusTokens.hasMoreTokens()) {
saveSeqAnno2(GenbankFormat.LOCUS_TAG, locusTokens.nextToken());
}
else {
throw new ParseException("LOCUS line too short [" + theLine + "]");
}
}
else {
saveSeqAnno2(GenbankFormat.LOCUS_TAG, theLine.substring(12, 22));
saveSeqAnno2(GenbankFormat.SIZE_TAG, theLine.substring(22, 29));
saveSeqAnno2(GenbankFormat.STRAND_NUMBER_TAG, theLine.substring(33, 35));
saveSeqAnno2(GenbankFormat.TYPE_TAG, theLine.substring(36, 41));
saveSeqAnno2(GenbankFormat.CIRCULAR_TAG, theLine.substring(42, 52));
saveSeqAnno2(GenbankFormat.DIVISION_TAG, theLine.substring(52, 55));
saveSeqAnno2(GenbankFormat.DATE_TAG, theLine.substring(62, 73));
}
}
/**
* Parses the locus line assuming it is in post release 127 format.
* Will also handle the case where the strand tag is optional. That
* is the only tag that is supported as optional. Awaiting a response from
* NCBI if it is or if their data is incorrectly formatted.
*
* @param theLine Locus line to parse.
* @throws ParseException If the line is too short.
*/
private void parseLocusLinePost127(String theLine)
throws ParseException
{
if (theLine.length() < 79)
{
throw new ParseException("LOCUS line too short [" + theLine + "]");
}
StringTokenizer locusTokens = new StringTokenizer(theLine);
if ((locusTokens.countTokens() == 8) || (locusTokens.countTokens() == 7))
{
// While Genbank 127 documentation doesn't allow the strand tag to
// be optional, some files don't have it. A key assumption here is
// that this is the only tag that is optional. The parser will
// generate incorrect data if this assumption is violated.
boolean includedStrandTag = (locusTokens.countTokens() == 8);
// LOCUS tag; not stored
locusTokens.nextToken();
// Locus name
saveSeqAnno2(GenbankFormat.LOCUS_TAG, locusTokens.nextToken());
// Sequence length
saveSeqAnno2(GenbankFormat.SIZE_TAG, locusTokens.nextToken());
// "bp"; not stored
locusTokens.nextToken();
// Strand information
// Both the strand and type are in the same token. The strand
// information is an optional part, so this is a bit hairy
// Some files do not have a strand token. While this is not allowed
// by Genbank's documentation, we will treat this as an optional
// token. It is the only optional token this parser will allow.
if (includedStrandTag)
{
String strandString = locusTokens.nextToken();
StringTokenizer strandTokens = new StringTokenizer(strandString, "-");
if (strandTokens.countTokens() > 1)
{
saveSeqAnno2(GenbankFormat.STRAND_NUMBER_TAG, strandTokens.nextToken());
}
saveSeqAnno2(GenbankFormat.TYPE_TAG, strandTokens.nextToken());
}
// Circularity
saveSeqAnno2(GenbankFormat.CIRCULAR_TAG, locusTokens.nextToken());
// Division code
saveSeqAnno2(GenbankFormat.DIVISION_TAG, locusTokens.nextToken());
// Date in dd-MMM-yyyy format
saveSeqAnno2(GenbankFormat.DATE_TAG, locusTokens.nextToken());
}
else
{
//Try using currently specified positions for tokens
saveSeqAnno2(GenbankFormat.LOCUS_TAG, theLine.substring(12, 28));
saveSeqAnno2(GenbankFormat.SIZE_TAG, theLine.substring(29, 40));
saveSeqAnno2(GenbankFormat.STRAND_NUMBER_TAG, theLine.substring(44, 46));
saveSeqAnno2(GenbankFormat.TYPE_TAG, theLine.substring(47, 53));
saveSeqAnno2(GenbankFormat.CIRCULAR_TAG, theLine.substring(55, 63));
saveSeqAnno2(GenbankFormat.DIVISION_TAG, theLine.substring(64, 67));
saveSeqAnno2(GenbankFormat.DATE_TAG, theLine.substring(68, 79));
}
}
/**
* Passes the tag and the text to the listener.
*
* @throws ParseException Thrown when an error occurs parsing the file
*/
private void saveSeqAnno()
throws ParseException
{
if (! headerTag.equals(""))
{ // save tag and its text
listener.addSequenceProperty(headerTag, headerTagText.substring(0));
headerTag = "";
headerTagText = new StringBuffer("");
}
}
/**
* Private method to process a header tag and associated value.
*
* @param tag The tag to add
* @param value The value of the associated tag
* @throws ParseException Thrown when an error occurs parsing the file
*/
private void saveSeqAnno2(String tag, String value)
throws ParseException
{
value = value.trim(); // strip whitespace
if (value.length() > 0) {
this.saveSeqAnno();
headerTag = tag;
headerTagText = new StringBuffer(value);
}
}
/**
* @return does the line contain a header tag.
* Yes, if any of the leading TAG_LENGTH characters aren't a space
*/
private boolean hasHeaderTag(String line)
{
boolean isHeaderTag = false;
char [] lar = line.toCharArray();
int len = Math.min(lar.length, TAG_LENGTH); // handles empty lines better
for (int i = 0; i < len && i < lar.length; i++)
{
if (lar[i] != ' ')
{
isHeaderTag = true;
break;
}
}
return isHeaderTag;
}
public boolean getElideSymbols()
{
return elideSymbols;
}
public void setElideSymbols(boolean elideSymbols)
{
this.elideSymbols = elideSymbols;
}
}
|
package org.bootstrapjsp.tags.core.misc;
import org.bootstrapjsp.dialect.Html;
import org.bootstrapjsp.facet.ContextFacet;
import org.bootstrapjsp.facet.MoldFacet;
import org.bootstrapjsp.tags.html.Div;
import org.tldgen.annotations.Attribute;
import org.tldgen.annotations.Tag;
/**
* Provide contextual feedback messages for typical user actions with the
* handful of available and flexible alert messages.
* <p><div class="alert">...</div></p>
*/
@Tag(dynamicAttributes=true)
public class Alert extends Div {
public Alert() {
super("alert");
super.addFacet(new ContextFacet("alert", null));
}
/**
* Sets whether this alert is dismissable or not. If true, a
* close button is added to the alert, and an 'alert-dismissable'
* class added to inform Bootstrap that the alert can is dismissed.
*/
@Attribute(rtexprvalue=true)
public void setDismissable(boolean dismissable) {
if (dismissable) {
final Button closeButton = new Button();
closeButton.getFacet(MoldFacet.class).setValue("close");
closeButton.setDismiss("alert");
super.appendChild(closeButton, BEFORE_BODY);
super.setAttribute(Html.CLASS_ATTRIBUTE, "alert-dismissable");
}
}
}
|
package org.bouncycastle.mail.smime;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.security.cert.X509Certificate;
import java.security.cert.CertificateParsingException;
import javax.mail.MessagingException;
import javax.mail.Part;
import javax.mail.internet.MimeBodyPart;
import org.bouncycastle.asn1.cms.IssuerAndSerialNumber;
import org.bouncycastle.cms.CMSTypedStream;
import org.bouncycastle.jce.PrincipalUtil;
import org.bouncycastle.mail.smime.util.SharedFileInputStream;
public class SMIMEUtil
{
static boolean isCanonicalisationRequired(
Part bodyPart,
String defaultContentTransferEncoding)
throws MessagingException
{
if (bodyPart instanceof MimeBodyPart)
{
MimeBodyPart mimePart = (MimeBodyPart)bodyPart;
String[] cte = mimePart.getHeader("Content-Transfer-Encoding");
String contentTransferEncoding;
if (cte == null)
{
contentTransferEncoding = defaultContentTransferEncoding;
}
else
{
contentTransferEncoding = cte[0];
}
return !contentTransferEncoding.equalsIgnoreCase("binary");
}
else
{
return !defaultContentTransferEncoding.equalsIgnoreCase("binary");
}
}
/**
* return the MimeBodyPart described in the raw bytes provided in content
*/
public static MimeBodyPart toMimeBodyPart(
byte[] content)
throws SMIMEException
{
return toMimeBodyPart(new ByteArrayInputStream(content));
}
/**
* return the MimeBodyPart described in the input stream content
*/
public static MimeBodyPart toMimeBodyPart(
InputStream content)
throws SMIMEException
{
try
{
return new MimeBodyPart(content);
}
catch (MessagingException e)
{
throw new SMIMEException("exception creating body part.", e);
}
}
/**
* return the MimeBodyPart described in {@link CMSTypedStream} content
*/
public static MimeBodyPart toMimeBodyPart(
CMSTypedStream content)
throws SMIMEException
{
try
{
File tmp = File.createTempFile("bcMail", ".mime");
OutputStream out = new FileOutputStream(tmp);
InputStream in = content.getContentStream();
byte[] buf = new byte[10000];
int len;
while ((len = in.read(buf, 0, buf.length)) > 0)
{
out.write(buf, 0, len);
}
out.close();
in.close();
return new FileMimeBodyPart(tmp);
}
catch (IOException e)
{
throw new SMIMEException("can't create temporary file: " + e, e);
}
catch (MessagingException e)
{
throw new SMIMEException("can't create part: " + e, e);
}
}
/**
* Return a CMS IssuerAndSerialNumber structure for the passed in X.509 certificate.
*
* @param cert the X.509 certificate to get the issuer and serial number for.
* @return an IssuerAndSerialNumber structure representing the certificate.
*/
public static IssuerAndSerialNumber createIssuerAndSerialNumberFor(
X509Certificate cert)
throws CertificateParsingException
{
try
{
return new IssuerAndSerialNumber(PrincipalUtil.getIssuerX509Principal(cert), cert.getSerialNumber());
}
catch (Exception e)
{
throw new CertificateParsingException("exception extracting issuer and serial number: " + e);
}
}
private static class FileMimeBodyPart
extends MimeBodyPart
{
private final File _file;
public FileMimeBodyPart(
File file)
throws MessagingException, IOException
{
super(new SharedFileInputStream(file.getCanonicalPath()));
_file = file;
}
public void writeTo(
OutputStream out)
throws IOException, MessagingException
{
super.writeTo(out);
contentStream.close();
_file.delete();
}
}
}
|
package org.broad.igv.ui.panel;
import org.apache.log4j.Logger;
import org.broad.igv.PreferenceManager;
import org.broad.igv.track.Track;
import org.broad.igv.track.TrackClickEvent;
import org.broad.igv.track.TrackGroup;
import org.broad.igv.ui.IGV;
import org.broad.igv.ui.UIConstants;
import org.broad.igv.ui.dnd.AbstractGhostDropManager;
import org.broad.igv.ui.dnd.GhostDropEvent;
import org.broad.igv.ui.dnd.GhostDropListener;
import org.broad.igv.ui.dnd.GhostGlassPane;
import org.broad.igv.ui.util.UIUtilities;
import org.jdesktop.layout.GroupLayout;
import javax.swing.*;
import javax.swing.event.MouseInputAdapter;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.util.*;
import java.util.List;
/**
* @author jrobinso
*/
public class TrackNamePanel extends TrackPanelComponent implements Paintable {
private static Logger log = Logger.getLogger(TrackNamePanel.class);
List<GroupExtent> groupExtents = new ArrayList();
BufferedImage dndImage = null;
TrackGroup selectedGroup = null;
boolean showGroupNames = true;
boolean showSampleNamesWhenGrouped = false;
public TrackNamePanel(TrackPanel trackPanel) {
super(trackPanel);
init();
}
Collection<TrackGroup> getGroups() {
return getTrackPanel().getGroups();
}
private boolean isGrouped() {
return getGroups().size() > 1;
}
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
removeMousableRegions();
Rectangle visibleRect = getVisibleRect();
paintImpl(g, visibleRect);
}
public void paintOffscreen(Graphics2D g, Rectangle rect) {
g.setColor(Color.white);
g.fill(rect);
paintImpl(g, rect);
super.paintBorder(g);
}
private void paintImpl(Graphics g, Rectangle visibleRect) {
// Get available tracks
Collection<TrackGroup> groups = getGroups();
boolean isGrouped = groups.size() > 1;
if (!groups.isEmpty()) {
final Graphics2D graphics2D = (Graphics2D) g.create();
graphics2D.setColor(Color.BLACK);
final Graphics2D greyGraphics = (Graphics2D) g.create();
greyGraphics.setColor(UIConstants.ZOOMED_OUT_COLOR);
int regionY = 0;
groupExtents.clear();
Rectangle clipRect = g.getClipBounds();
for (Iterator<TrackGroup> groupIter = groups.iterator(); groupIter.hasNext(); ) {
TrackGroup group = groupIter.next();
if (regionY > clipRect.getMaxY()) {
break;
}
if (group.isVisible()) {
if (isGrouped) {
if (regionY + UIConstants.groupGap >= visibleRect.y && regionY < visibleRect.getMaxY()) {
greyGraphics.fillRect(0, regionY + 1, getWidth(), UIConstants.groupGap - 1);
}
regionY += UIConstants.groupGap;
}
if (group.isDrawBorder() && regionY + UIConstants.groupGap >= visibleRect.y &&
regionY < visibleRect.getMaxY()) {
g.drawLine(0, regionY - 1, getWidth(), regionY - 1);
}
int h = group.getHeight();
Rectangle groupRect = new Rectangle(visibleRect.x, regionY, visibleRect.width, h);
Rectangle displayableRect = getDisplayableRect(groupRect, visibleRect);
regionY = printTrackNames(group, displayableRect, clipRect, graphics2D, 0, regionY);
if (isGrouped) {
groupExtents.add(new GroupExtent(group, groupRect.y, groupRect.y + groupRect.height));
if (showGroupNames) {
//Rectangle displayableRect = getDisplayableRect(groupRect, visibleRect);
group.renderName(graphics2D, displayableRect, group == selectedGroup);
}
}
if (group.isDrawBorder()) {
g.drawLine(0, regionY, getWidth(), regionY);
}
}
}
}
}
private Rectangle getDisplayableRect(Rectangle trackRectangle, Rectangle visibleRect) {
Rectangle rect = null;
if (visibleRect != null) {
Rectangle intersectedRect = trackRectangle.intersection(visibleRect);
if (intersectedRect.height > 15) {
rect = intersectedRect;
} else {
rect = new Rectangle(trackRectangle);
}
}
return rect;
}
private int printTrackNames(TrackGroup group, Rectangle visibleRect, Rectangle clipRect,
Graphics2D graphics2D, int regionX, int regionY) {
List<Track> tmp = new ArrayList(group.getTracks());
final Color backgroundColor = PreferenceManager.getInstance().getAsColor(PreferenceManager.BACKGROUND_COLOR);
graphics2D.setBackground(backgroundColor);
graphics2D.clearRect(visibleRect.x, visibleRect.y, visibleRect.width, visibleRect.height);
for (Track track : tmp) {
if (track == null) continue;
track.setY(regionY);
int trackHeight = track.getHeight();
if (track.isVisible()) {
if (regionY + trackHeight >= clipRect.y && regionY < clipRect.getMaxY()) {
int width = getWidth();
int height = track.getHeight();
Rectangle region = new Rectangle(regionX, regionY, width, height);
addMousableRegion(new MouseableRegion(region, track));
if (!isGrouped() || showSampleNamesWhenGrouped) {
Rectangle rect = new Rectangle(regionX, regionY, width, height);
//Graphics2D g2D = graphics; //(Graphics2D) graphics.create();
if (track.isSelected()) {
graphics2D.setBackground(Color.LIGHT_GRAY);
graphics2D.clearRect(rect.x, rect.y, rect.width, rect.height);
} else {
graphics2D.setBackground(backgroundColor);
}
graphics2D.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING,
RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
track.renderName(graphics2D, rect, visibleRect);
}
}
regionY += trackHeight;
}
}
return regionY;
}
private void init() {
setBorder(javax.swing.BorderFactory.createLineBorder(Color.black));
setBackground(new java.awt.Color(255, 255, 255));
GroupLayout dataTrackNamePanelLayout = new org.jdesktop.layout.GroupLayout(this);
setLayout(dataTrackNamePanelLayout);
dataTrackNamePanelLayout.setHorizontalGroup(
dataTrackNamePanelLayout.createParallelGroup(GroupLayout.LEADING).add(0, 148, Short.MAX_VALUE));
dataTrackNamePanelLayout.setVerticalGroup(
dataTrackNamePanelLayout.createParallelGroup(GroupLayout.LEADING).add(0, 528, Short.MAX_VALUE));
NamePanelMouseAdapter mouseAdapter = new NamePanelMouseAdapter();
addMouseListener(mouseAdapter);
addMouseMotionListener(mouseAdapter);
DropListener dndListener = new DropListener(this);
addGhostDropListener(dndListener);
}
@Override
protected void openPopupMenu(TrackClickEvent te) {
ArrayList<Component> extraItems = null;
if (isGrouped()) {
extraItems = new ArrayList();
final JMenuItem item = new JCheckBoxMenuItem("Show group names");
item.setSelected(showGroupNames);
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
showGroupNames = item.isSelected();
repaint();
}
});
extraItems.add(item);
final JMenuItem item2 = new JCheckBoxMenuItem("Show sample names");
item2.setSelected(showSampleNamesWhenGrouped);
item2.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
showSampleNamesWhenGrouped = item2.isSelected();
repaint();
}
});
extraItems.add(item2);
}
super.openPopupMenu(te, extraItems);
}
public String getTooltipTextForLocation(int x, int y) {
List<MouseableRegion> mouseableRegions = TrackNamePanel.this.getMouseRegions();
for (MouseableRegion mouseableRegion : mouseableRegions) {
if (mouseableRegion.containsPoint(x, y)) {
return mouseableRegion.getText();
}
}
return "";
}
private synchronized void createDnDImage() {
dndImage = new BufferedImage(getWidth(), 2, BufferedImage.TYPE_INT_ARGB);
Graphics g = dndImage.getGraphics();
g.setColor(Color.blue);
g.drawLine(1, 0, getWidth() - 2, 0);
g.drawLine(1, 1, getWidth() - 2, 1);
}
/**
* Shift-click, used to select a range of tracks.
* @param e
*/
protected void shiftSelectTracks(MouseEvent e) {
for (MouseableRegion mouseRegion : mouseRegions) {
if (mouseRegion.containsPoint(e.getX(), e.getY())) {
Collection<Track> clickedTracks = mouseRegion.getTracks();
if (clickedTracks != null && clickedTracks.size() > 0) {
Track t = clickedTracks.iterator().next();
IGV.getInstance().shiftSelectTracks(t);
}
return;
}
}
}
private TrackGroup getGroup(int y) {
for (GroupExtent ge : groupExtents) {
if (ge.contains(y)) {
return ge.group;
}
}
return null;
}
/**
* Mouse adapter for the track name panel. Supports multiple selection,
* popup menu, and drag & drop within or between name panels.
*/
class NamePanelMouseAdapter extends MouseInputAdapter {
boolean isDragging = false;
List<Track> dragTracks = new ArrayList();
Point dragStart = null;
@Override
/**
* Mouse down. Track selection logic goes here.
*/
public void mousePressed(MouseEvent e) {
if (log.isDebugEnabled()) {
log.debug("Enter mousePressed");
}
dragStart = e.getPoint();
requestFocus();
grabFocus();
boolean isGrouped = isGrouped();
if (e.isPopupTrigger()) {
if (isGrouped) {
clearTrackSelections();
TrackGroup g = getGroup(e.getY());
if (null == g || g == selectedGroup) {
selectedGroup = null;
} else {
selectGroup(g);
}
} else if (!isTrackSelected(e)) {
clearTrackSelections();
selectTracks(e);
}
TrackClickEvent te = new TrackClickEvent(e, null);
openPopupMenu(te);
} // meta (mac) or control, toggle selection]
else if (e.getButton() == MouseEvent.BUTTON1) {
if (isGrouped) {
clearTrackSelections();
TrackGroup g = getGroup(e.getY());
if (g == selectedGroup) {
selectedGroup = null;
} else {
selectGroup(getGroup(e.getY()));
}
} else {
if (e.isMetaDown() || e.isControlDown()) {
toggleTrackSelections(e);
} else if (e.isShiftDown()) {
shiftSelectTracks(e);
} else if (!isTrackSelected(e)) {
clearTrackSelections();
selectTracks(e);
}
}
} else {
if (isGrouped) {
} else if (!isTrackSelected(e)) {
clearTrackSelections();
selectTracks(e);
}
}
IGV.getInstance().repaintNamePanels();
}
public void mouseReleased(MouseEvent e) {
if (log.isDebugEnabled()) {
log.debug("Enter mouseReleased");
}
if (isDragging) {
Component c = e.getComponent();
IGV.getInstance().endDnD();
GhostGlassPane glassPane = IGV.getInstance().getDnDGlassPane();
Point p = (Point) e.getPoint().clone();
SwingUtilities.convertPointToScreen(p, c);
Point eventPoint = (Point) p.clone();
SwingUtilities.convertPointFromScreen(p, glassPane);
glassPane.setPoint(p);
glassPane.setVisible(false);
glassPane.setImage(null);
fireGhostDropEvent(new GhostDropEvent(dragStart, eventPoint, dragTracks));
if (selectedGroup != null) {
int idx = getGroupGapNumber(e.getY());
TrackPanel dataTrackView = (TrackPanel) getParent();
dataTrackView.moveGroup(selectedGroup, idx);
dataTrackView.repaint();
}
selectedGroup = null;
}
if (e.isPopupTrigger()) {
TrackClickEvent te = new TrackClickEvent(e, null);
openPopupMenu(te);
} else {
if (!isDragging && !e.isMetaDown() && !e.isControlDown() &&
!e.isShiftDown()) {
clearTrackSelections();
selectTracks(e);
IGV.getInstance().repaintNamePanels();
}
}
isDragging = false;
dragTracks.clear();
dndImage = null;
}
public void mouseDragged(MouseEvent e) {
Component c = e.getComponent();
if (e.isPopupTrigger()) {
return;
}
if (!isDragging) {
if (dragStart == null) {
dragStart = e.getPoint();
return;
} else if (e.getPoint().distance(dragStart) < 5) {
return;
}
dragStart.x = getWidth() / 2;
IGV.getInstance().startDnD();
if (dndImage == null) {
createDnDImage();
}
IGV.getInstance().getDnDGlassPane().setImage(dndImage);
isDragging = true;
dragTracks.clear();
dragTracks.addAll(IGV.getInstance().getSelectedTracks());
if (getGroups().size() > 0) {
selectedGroup = getGroup(e.getY());
} else {
selectedGroup = null;
}
// Code below paints target component on the dndImage. It needs modified to paint some representation
// of the selectect tracks, probably the track names printed as a list.
}
if (isDragging) {
final GhostGlassPane glassPane = IGV.getInstance().getDnDGlassPane();
Point p = (Point) e.getPoint().clone();
p.x = getWidth() / 2;
SwingUtilities.convertPointToScreen(p, c);
SwingUtilities.convertPointFromScreen(p, glassPane);
glassPane.setPoint(p);
UIUtilities.invokeOnEventThread(new Runnable() {
public void run() {
Rectangle bounds = new Rectangle(getBounds());
bounds.height = 10000;
glassPane.paintImmediately(bounds);
}
});
}
}
@Override
public void mouseMoved(MouseEvent e) {
int x = e.getX();
int y = e.getY();
setToolTipText(getTooltipTextForLocation(x, y));
}
/**
* Mouse was clicked. Delegate single-click action to the track(s) clicked on. We won't know if this
* is a double click or not until the double-click interval has passed, so defer the action with a
* TimerTask. If a second click arrives it will be canceled.
*
* @param e
*/
@Override
public void mouseClicked(final MouseEvent e) {
// If this is the second click of a double click, cancel the scheduled single click task.
if (e.getClickCount() > 1) {
clickScheduler.cancelClickTask();
return;
}
TimerTask clickTask = new TimerTask() {
@Override
public void run() {
for (MouseableRegion mouseRegion : mouseRegions) {
if (mouseRegion.containsPoint(e.getX(), e.getY())) {
for (Track t : mouseRegion.getTracks()) {
t.handleNameClick(e);
}
return;
}
}//To change body of implemented methods use File | Settings | File Templates.
}
};
clickScheduler.scheduleClickTask(clickTask);
}
protected void fireGhostDropEvent(GhostDropEvent evt) {
Iterator it = TrackNamePanel.dropListeners.iterator();
while (it.hasNext()) {
((GhostDropListener) it.next()).ghostDropped(evt);
}
}
}
class DropListener extends AbstractGhostDropManager {
TrackNamePanel panel;
public DropListener(TrackNamePanel target) {
super(target);
this.panel = target;
}
public void ghostDropped(GhostDropEvent e) {
Point startPoint = e.getStartLocation();
Point dropPoint = getTranslatedPoint(e.getDropLocation());
Rectangle bounds = component.getVisibleRect();
boolean isInTarget = dropPoint.y > bounds.y && dropPoint.y < bounds.getMaxY();
if (isInTarget) {
tracksDropped(startPoint, dropPoint, e.getTracks());
e.removeTracksFromSource();
e.setTracksDropped(true);
} else {
TrackPanel view = ((TrackPanel) getParent());
if (e.isTracksDropped()) {
view.removeTracks(e.getTracks());
} else {
// Defer removal until we are sure the tracks are dropped in another panel
e.addSourcePanel(view);
}
}
}
void tracksDropped(Point startPoint, Point dropPoint, List<Track> tracks) {
// This cast is horrid but we can't fix everything at once.
TrackPanel view = ((TrackPanel) getParent());
List<MouseableRegion> regions = getMouseRegions();
// Find the regions containing the startPoint and point
boolean before = true;
MouseableRegion dropRegion = null;
MouseableRegion startRegion = null;
for (MouseableRegion region : regions) {
if (region.containsPoint(dropPoint.x, dropPoint.y)) {
dropRegion = region;
Rectangle bnds = dropRegion.getBounds();
int dy1 = (dropPoint.y - bnds.y);
int dy2 = bnds.height - dy1;
before = dy1 < dy2;
}
if (region.containsPoint(startPoint.x, startPoint.y)) {
startRegion = region;
}
if (dropRegion != null && startRegion != null) {
break;
}
}
Track dropTrack = null;
if (dropRegion != null) {
Iterator<Track> tmp = dropRegion.getTracks().iterator();
if (tmp.hasNext()) {
dropTrack = tmp.next();
}
}
view.moveSelectedTracksTo(tracks, dropTrack, before);
}
}
private void selectGroup(TrackGroup group) {
selectedGroup = group;
if (selectedGroup != null) {
for (Track t : selectedGroup.getTracks()) {
t.setSelected(true);
}
}
}
class GroupExtent {
TrackGroup group;
int minY;
int maxY;
GroupExtent(TrackGroup group, int minY, int maxY) {
this.group = group;
this.maxY = maxY;
this.minY = minY;
}
boolean contains(int y) {
return y > minY && y <= maxY;
}
boolean isAfter(int y) {
return minY > y;
}
}
int getGroupGapNumber(int y) {
for (int i = 0; i < groupExtents.size(); i++) {
if (groupExtents.get(i).isAfter(y)) {
return i;
}
}
return groupExtents.size();
}
// Track D&D support follows
// TODO -- this use of a static is really bad, bugs and memory leaks waiting to happen. Redesign this.
static List<DropListener> dropListeners = new ArrayList();
private static void addGhostDropListener(DropListener listener) {
if (listener != null) {
dropListeners.add(listener);
}
}
public static void removeDropListenerFor(TrackNamePanel panel) {
List<DropListener> removeThese = new ArrayList();
for (DropListener dl : dropListeners) {
if (dl.panel == panel) {
removeThese.add(dl);
}
}
dropListeners.removeAll(removeThese);
}
}
|
package org.ensembl.healthcheck;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.logging.Handler;
import java.util.logging.Level;
import org.ensembl.healthcheck.testcase.EnsTestCase;
import org.ensembl.healthcheck.util.ConnectionPool;
import org.ensembl.healthcheck.util.LogFormatter;
import org.ensembl.healthcheck.util.MyStreamHandler;
import org.ensembl.healthcheck.util.Utils;
/**
* TestRunner optimised for outputting results to HTML.
*/
public class WebTestRunner extends TestRunner implements Reporter {
private boolean debug = false;
private String configFile = "web.properties";
private long startTime;
private static String TIMINGS_FILE = "timings.txt";
/**
* Main run method.
*
* @param args Command-line arguments.
*/
private void run(String[] args) {
deleteTimingsFile();
ReportManager.setReporter(this);
parseCommandLine(args);
setupLogging();
Utils.readPropertiesFileIntoSystem(PROPERTIES_FILE);
Utils.readPropertiesFileIntoSystem(configFile);
parseProperties();
groupsToRun = getGroupsFromProperties();
List databaseRegexps = getDatabasesFromProperties();
outputLevel = setOutputLevelFromProperties();
TestRegistry testRegistry = new TestRegistry();
DatabaseRegistry databaseRegistry = new DatabaseRegistry(databaseRegexps, null, null);
if (databaseRegistry.getAll().length == 0) {
logger.warning("Warning: no database names matched any of the database regexps given");
}
runAllTests(databaseRegistry, testRegistry, false);
printOutput();
ConnectionPool.closeAll();
} // run
/**
* Command-line entry point.
*
* @param args Command line args.
*/
public static void main(String[] args) {
new WebTestRunner().run(args);
} // main
private void parseCommandLine(String[] args) {
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-h")) {
printUsage();
System.exit(0);
} else if (args[i].equals("-config")) {
configFile = args[++i];
} else if (args[i].equals("-debug")) {
debug = true;
logger.finest("Running in debug mode");
}
}
} // parseCommandLine
private void printUsage() {
System.out.println("\nUsage: WebTestRunner {options} \n");
System.out.println("Options:");
System.out.println(" -config <file> Properties file to use instead of web.properties");
System.out.println(" -h This message.");
System.out.println(" -debug Print debugging info");
System.out.println();
System.out.println("All configuration information is read from the files database.properties and web.properties. ");
System.out.println("web.properties should contain the following properties:");
System.out.println(" webtestrunner.groups= A comma-separated list of the groups, or individual tests, to run");
System.out.println(" webtestrunner.databases= A comma-separated list of database regexps to match");
System.out.println(" webtestrunner.file= The name of the output file to write to ");
System.out.println(" webtestrunner.outputlevel= How much output to write. Should be one of all, info, warning, correct or problem");
}
private void setupLogging() {
// stop parent logger getting the message
logger.setUseParentHandlers(false);
Handler myHandler = new MyStreamHandler(System.out, new LogFormatter());
logger.addHandler(myHandler);
logger.setLevel(Level.WARNING);
if (debug) {
logger.setLevel(Level.FINEST);
}
} // setupLogging
private void deleteTimingsFile() {
(new File(TIMINGS_FILE)).delete();
}
// Implementation of Reporter interface
/**
* Called when a message is to be stored in the report manager.
*
* @param reportLine The message to store.
*/
public void message(ReportLine reportLine) {
}
/**
* Called just before a test case is run.
*
* @param testCase The test case about to be run.
* @param dbre The database which testCase is to be run on, or null of no/several databases.
*/
public void startTestCase(EnsTestCase testCase, DatabaseRegistryEntry dbre) {
startTime = System.currentTimeMillis();
}
/**
* Should be called just after a test case has been run.
*
* @param testCase The test case that was run.
* @param result The result of testCase.
* @param dbre The database which testCase was run on, or null of no/several databases.
*/
public void finishTestCase(EnsTestCase testCase, boolean result, DatabaseRegistryEntry dbre) {
long duration = System.currentTimeMillis() - startTime;
String str = duration + "\t" + dbre.getName() + "\t" + testCase.getShortTestName() + "\t" + Utils.formatTimeString(duration);
Utils.writeStringToFile(TIMINGS_FILE, str, true, true);
}
private void parseProperties() {
if (System.getProperty("webtestrunner.groups") == null) {
System.err.println("No tests or groups specified in " + configFile);
System.exit(1);
}
if (System.getProperty("webtestrunner.databases") == null) {
System.err.println("No databases specified in " + configFile);
System.exit(1);
}
if (System.getProperty("webtestrunner.file") == null) {
System.err.println("No output file specified in " + configFile);
System.exit(1);
}
}
/**
* Get a list of test groups by parsing the appropriate property.
*
* @return the list of group or test names.
*/
private List getGroupsFromProperties() {
String[] groups = System.getProperty("webtestrunner.groups").split(",");
return Arrays.asList(groups);
}
/**
* Get a list of databases by parsing the appropriate property.
*
* @return The list of database names or patterns.
*/
private List getDatabasesFromProperties() {
String[] dbs = System.getProperty("webtestrunner.databases").split(",");
return Arrays.asList(dbs);
}
private int setOutputLevelFromProperties() {
String lstr = System.getProperty("webtestrunner.outputlevel").toLowerCase();
if (lstr.equals("all")) {
outputLevel = ReportLine.ALL;
} else if (lstr.equals("none")) {
outputLevel = ReportLine.NONE;
} else if (lstr.equals("problem")) {
outputLevel = ReportLine.PROBLEM;
} else if (lstr.equals("correct")) {
outputLevel = ReportLine.CORRECT;
} else if (lstr.equals("warning")) {
outputLevel = ReportLine.WARNING;
} else if (lstr.equals("info")) {
outputLevel = ReportLine.INFO;
} else {
System.err.println("Output level " + lstr + " not recognised; using 'all'");
}
return outputLevel;
}
/**
* Print formatted output held in outputBuffer to file specified in System property file.
*/
private void printOutput() {
String file = System.getProperty("webtestrunner.file");
try {
PrintWriter pw = new PrintWriter(new FileOutputStream(file));
printHeader(pw);
printNavigation(pw);
printExecutiveSummary(pw);
printSummaryByDatabase(pw);
printSummaryByTest(pw);
printReportsByDatabase(pw);
printReportsByTest(pw);
printFooter(pw);
pw.close();
} catch (Exception e) {
System.err.println("Error writing to " + file);
e.printStackTrace();
}
}
private void printHeader(PrintWriter pw) {
print(pw, "<html>");
print(pw, "<head>");
print(pw, "<style type=\"text/css\" media=\"all\">");
print(pw, "@import url(http:
print(pw, "@import url(http:
print(pw, "#page ul li { list-style-type:none; list-style-image: none; margin-left: -2em }");
print(pw, "</style>");
print(pw, "<title>" + System.getProperty("webtestrunner.title") + "</title>");
print(pw, "</head>");
print(pw, "<body>");
print(pw, "<div id='page'><div id='i1'><div id='i2'><div class='sptop'> </div>");
print(pw, "<div id='release'>" + System.getProperty("webtestrunner.title") + "</div>");
print(pw, "<hr>");
}
private void printReportsByDatabase(PrintWriter pw) {
print(pw, "<h2>Detailed reports by database</h2>");
Map reportsByDB = ReportManager.getAllReportsByDatabase();
TreeSet dbs = new TreeSet(reportsByDB.keySet());
Iterator it = dbs.iterator();
while (it.hasNext()) {
String database = (String) it.next();
String link = "<a name=\"" + database + "\">";
print(pw, "<h3 class='boxed'>" + link + database + "</a></h3>");
print(pw, "<p>");
List reports = (List) reportsByDB.get(database);
Iterator it2 = reports.iterator();
String lastTest = "";
while (it2.hasNext()) {
ReportLine line = (ReportLine) it2.next();
String test = line.getShortTestCaseName();
if (!lastTest.equals("") && !test.equals(lastTest)) {
print(pw, "</p><p>");
}
lastTest = test;
String linkTarget = "<a name=\"" + database + ":" + test + "\"></a> ";
String s = linkTarget + getFontForReport(line) + "<strong>" + test + ": </strong>" + line.getMessage() + "</font>" + "<br>";
print(pw, s);
} // while it2
print(pw, "</p>");
} // while it
print(pw, "<hr>");
}
private void printReportsByTest(PrintWriter pw) {
print(pw, "<h2>Detailed reports by test case</h2>");
Map reportsByTC = ReportManager.getAllReportsByTestCase();
TreeSet dbs = new TreeSet(reportsByTC.keySet());
Iterator it = dbs.iterator();
while (it.hasNext()) {
String test = (String) it.next();
String link = "<a name=\"" + test + "\">";
print(pw, "<h3 class='boxed'>" + link + test + "</a></h3>");
print(pw, "<p>");
List reports = (List) reportsByTC.get(test);
Iterator it2 = reports.iterator();
String lastDB = "";
while (it2.hasNext()) {
ReportLine line = (ReportLine) it2.next();
String database = line.getDatabaseName();
if (!lastDB.equals("") && !database.equals(lastDB)) {
print(pw, "</p><p>");
}
lastDB = database;
String linkTarget = "<a name=\"" + line.getShortTestCaseName() + ":" + database + "\"></a> ";
String s = linkTarget + getFontForReport(line) + "<strong>" + database + ": </strong>" + line.getMessage() + "</font>" + "<br>";
print(pw, s);
} // while it2
print(pw, "</p>");
} // while it
print(pw, "<hr>");
}
private String getFontForReport(ReportLine line) {
String s1 = "";
switch (line.getLevel()) {
case (ReportLine.PROBLEM):
s1 = "<font color='red'>";
break;
case (ReportLine.WARNING):
s1 = "<font color='black'>";
break;
case (ReportLine.INFO):
s1 = "<font color='grey'>";
break;
case (ReportLine.CORRECT):
s1 = "<font color='green'>";
break;
default:
s1 = "<font color='black'>";
}
return s1;
}
private void printFooter(PrintWriter pw) {
long runTime = System.currentTimeMillis() - startTime;
String runStr = Utils.formatTimeString(runTime);
print(pw, "<p>Test run was started at " + new Date(startTime).toString() + " and finished at " + new Date().toString() + "<br>");
print(pw, " Run time " + runStr + "</p>");
print(pw, "<h4>Configuration used:</h4>");
print(pw, "<pre>");
print(pw, "Tests/groups run: " + System.getProperty("webtestrunner.groups") + "<br>");
print(pw, "Database host: " + System.getProperty("host") + ":" + System.getProperty("port") + "<br>");
print(pw, "Database names: " + System.getProperty("webtestrunner.databases") + "<br>");
print(pw, "Output file: " + System.getProperty("webtestrunner.file") + "<br>");
print(pw, "Output level: " + System.getProperty("webtestrunner.outputlevel") + "<br>");
print(pw, "</pre>");
print(pw, "</div>");
print(pw, "</body>");
print(pw, "</html>");
print(pw, "<hr>");
}
private void print(PrintWriter pw, String s) {
pw.write(s + "\n");
}
private void printSummaryByDatabase(PrintWriter pw) {
print(pw, "<h2>Summary by database</h2>");
print(pw, "<p><table class='ss'>");
print(pw, "<tr><th>Database</th><th>Passed</th><th>Failed</th></tr>");
Map reportsByDB = ReportManager.getAllReportsByDatabase();
TreeSet databases = new TreeSet(reportsByDB.keySet());
Iterator it = databases.iterator();
while (it.hasNext()) {
String database = (String) it.next();
String link = "<a href=\"#" + database + "\">";
int[] passesAndFails = ReportManager.countPassesAndFailsDatabase(database);
String s = (passesAndFails[1] == 0) ? passFont() : failFont();
String[] t = { link + s + database + "</font></a>", passFont() + passesAndFails[0] + "</font>",
failFont() + passesAndFails[1] + "</font>" };
printTableLine(pw, t);
}
print(pw, "</table></p>");
print(pw, "<hr>");
}
private void printSummaryByTest(PrintWriter pw) {
print(pw, "<h2>Summary by test</h2>");
print(pw, "<p><table class='ss'>");
print(pw, "<tr><th>Test</th><th>Passed</th><th>Failed</th></tr>");
Map reports = ReportManager.getAllReportsByTestCase();
TreeSet tests = new TreeSet(reports.keySet());
Iterator it = tests.iterator();
while (it.hasNext()) {
String test = (String) it.next();
String link = "<a href=\"#" + test + "\">";
int[] passesAndFails = ReportManager.countPassesAndFailsTest(test);
String s = (passesAndFails[1] == 0) ? passFont() : failFont();
String[] t = { link + s + test + "</font></a>", passFont() + passesAndFails[0] + "</font>", failFont() + passesAndFails[1] + "</font>" };
printTableLine(pw, t);
}
print(pw, "</table></p>");
print(pw, "<hr>");
}
private void printExecutiveSummary(PrintWriter pw) {
print(pw, "<h2>Summary</h2>");
int[] result = ReportManager.countPassesAndFailsAll();
StringBuffer s = new StringBuffer();
s.append("<p><strong>");
s.append(passFont() + result[0] + "</font> tests passed and ");
s.append(failFont() + result[1] + "</font> failed out of a total of ");
s.append((result[0] + result[1]) + " tests run.</strong></p>");
print(pw, s.toString());
print(pw, "<hr>");
}
private void printTableLine(PrintWriter pw, String[] s) {
pw.write("<tr>");
for (int i = 0; i < s.length; i++) {
pw.write("<td>" + s[i] + "</td>");
}
pw.write("</tr>\n");
}
private void printNavigation(PrintWriter pw) {
print(pw, "<div id='related'><div id='related-box'>");
print(pw, "<h2>Results by database</h2>");
print(pw, "<ul>");
Map reportsByDB = ReportManager.getAllReportsByDatabase();
TreeSet databases = new TreeSet(reportsByDB.keySet());
Iterator it = databases.iterator();
while (it.hasNext()) {
String database = (String) it.next();
String link = "<a href=\"#" + database + "\">";
print(pw, "<li>" + link + database + "</a></li>");
}
print(pw, "</ul>");
print(pw, "<h2>Results by test</h2>");
print(pw, "<ul>");
Map reports = ReportManager.getAllReportsByTestCase();
TreeSet tests = new TreeSet(reports.keySet());
it = tests.iterator();
while (it.hasNext()) {
String test = (String) it.next();
String name = test.substring(test.lastIndexOf('.') + 1);
String link = "<a href=\"#" + test + "\">";
print(pw, "<li>" + link + name + "</a></li>");
}
print(pw, "</ul>");
print(pw, "</div></div>");
}
private String passFont() {
return "<font color='green'>";
}
private String failFont() {
return "<font color='red'>";
}
} // WebTestRunner
|
package org.ensembl.healthcheck;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.logging.Handler;
import java.util.logging.Level;
import org.ensembl.healthcheck.testcase.EnsTestCase;
import org.ensembl.healthcheck.util.ConnectionPool;
import org.ensembl.healthcheck.util.LogFormatter;
import org.ensembl.healthcheck.util.MyStreamHandler;
import org.ensembl.healthcheck.util.Utils;
/**
* TestRunner optimised for outputting results to HTML.
*/
public class WebTestRunner extends TestRunner implements Reporter {
private boolean debug = false;
private String configFile = "web.properties";
private long testStartTime, appStartTime;
private static String TIMINGS_FILE = "timings.txt";
/**
* Main run method.
*
* @param args
* Command-line arguments.
*/
private void run(String[] args) {
// deleteTimingsFile();
appStartTime = System.currentTimeMillis();
ReportManager.setReporter(this);
parseCommandLine(args);
setupLogging();
Utils.readPropertiesFileIntoSystem(PROPERTIES_FILE);
Utils.readPropertiesFileIntoSystem(configFile);
parseProperties();
groupsToRun = getGroupsFromProperties();
List databaseRegexps = getDatabasesFromProperties();
outputLevel = setOutputLevelFromProperties();
TestRegistry testRegistry = new TestRegistry();
DatabaseRegistry databaseRegistry = new DatabaseRegistry(databaseRegexps, null, null);
if (databaseRegistry.getAll().length == 0) {
logger.warning("Warning: no database names matched any of the database regexps given");
}
runAllTests(databaseRegistry, testRegistry, false);
printOutput();
ConnectionPool.closeAll();
} // run
/**
* Command-line entry point.
*
* @param args
* Command line args.
*/
public static void main(String[] args) {
new WebTestRunner().run(args);
} // main
private void parseCommandLine(String[] args) {
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-h")) {
printUsage();
System.exit(0);
} else if (args[i].equals("-config")) {
configFile = args[++i];
} else if (args[i].equals("-debug")) {
debug = true;
logger.finest("Running in debug mode");
}
}
} // parseCommandLine
private void printUsage() {
System.out.println("\nUsage: WebTestRunner {options} \n");
System.out.println("Options:");
System.out.println(" -config <file> Properties file to use instead of web.properties");
System.out.println(" -h This message.");
System.out.println(" -debug Print debugging info");
System.out.println();
System.out.println("All configuration information is read from the files database.properties and web.properties. ");
System.out.println("web.properties should contain the following properties:");
System.out.println(" webtestrunner.groups= A comma-separated list of the groups, or individual tests, to run");
System.out.println(" webtestrunner.databases= A comma-separated list of database regexps to match");
System.out.println(" webtestrunner.file= The name of the output file to write to ");
System.out
.println(" webtestrunner.outputlevel= How much output to write. Should be one of all, info, warning, correct or problem");
}
private void setupLogging() {
// stop parent logger getting the message
logger.setUseParentHandlers(false);
Handler myHandler = new MyStreamHandler(System.out, new LogFormatter());
logger.addHandler(myHandler);
logger.setLevel(Level.WARNING);
if (debug) {
logger.setLevel(Level.FINEST);
}
} // setupLogging
// Implementation of Reporter interface
/**
* Called when a message is to be stored in the report manager.
*
* @param reportLine
* The message to store.
*/
public void message(ReportLine reportLine) {
}
/**
* Called just before a test case is run.
*
* @param testCase
* The test case about to be run.
* @param dbre
* The database which testCase is to be run on, or null of no/several
* databases.
*/
public void startTestCase(EnsTestCase testCase, DatabaseRegistryEntry dbre) {
testStartTime = System.currentTimeMillis();
}
/**
* Should be called just after a test case has been run.
*
* @param testCase
* The test case that was run.
* @param result
* The result of testCase.
* @param dbre
* The database which testCase was run on, or null of no/several
* databases.
*/
public void finishTestCase(EnsTestCase testCase, boolean result, DatabaseRegistryEntry dbre) {
long duration = System.currentTimeMillis() - testStartTime;
String str = duration + "\t";
if (dbre != null) {
str += dbre.getName() + "\t";
}
str += testCase.getShortTestName() + "\t";
str += Utils.formatTimeString(duration);
Utils.writeStringToFile(TIMINGS_FILE, str, true, true);
}
private void parseProperties() {
if (System.getProperty("webtestrunner.groups") == null) {
System.err.println("No tests or groups specified in " + configFile);
System.exit(1);
}
if (System.getProperty("webtestrunner.databases") == null) {
System.err.println("No databases specified in " + configFile);
System.exit(1);
}
if (System.getProperty("webtestrunner.file") == null) {
System.err.println("No output file specified in " + configFile);
System.exit(1);
}
}
/**
* Get a list of test groups by parsing the appropriate property.
*
* @return the list of group or test names.
*/
private List getGroupsFromProperties() {
String[] groups = System.getProperty("webtestrunner.groups").split(",");
return Arrays.asList(groups);
}
/**
* Get a list of databases by parsing the appropriate property.
*
* @return The list of database names or patterns.
*/
private List getDatabasesFromProperties() {
String[] dbs = System.getProperty("webtestrunner.databases").split(",");
return Arrays.asList(dbs);
}
private int setOutputLevelFromProperties() {
String lstr = System.getProperty("webtestrunner.outputlevel").toLowerCase();
if (lstr.equals("all")) {
outputLevel = ReportLine.ALL;
} else if (lstr.equals("none")) {
outputLevel = ReportLine.NONE;
} else if (lstr.equals("problem")) {
outputLevel = ReportLine.PROBLEM;
} else if (lstr.equals("correct")) {
outputLevel = ReportLine.CORRECT;
} else if (lstr.equals("warning")) {
outputLevel = ReportLine.WARNING;
} else if (lstr.equals("info")) {
outputLevel = ReportLine.INFO;
} else {
System.err.println("Output level " + lstr + " not recognised; using 'all'");
}
return outputLevel;
}
/**
* Print formatted output held in outputBuffer to file specified in System
* property file.
*/
private void printOutput() {
String file = System.getProperty("webtestrunner.file");
try {
PrintWriter pw = new PrintWriter(new FileOutputStream(file));
printHeader(pw);
printNavigation(pw);
printExecutiveSummary(pw);
printSummaryByDatabase(pw);
printSummaryByTest(pw);
printReportsByDatabase(pw);
printReportsByTest(pw);
printFooter(pw);
pw.close();
} catch (Exception e) {
System.err.println("Error writing to " + file);
e.printStackTrace();
}
}
private void printHeader(PrintWriter pw) {
print(pw, "<html>");
print(pw, "<head>");
print(pw, "<style type=\"text/css\" media=\"all\">");
print(pw, "@import url(http:
print(pw, "@import url(http:
print(pw, "#page ul li { list-style-type:none; list-style-image: none; margin-left: -2em }");
print(pw, "</style>");
print(pw, "<title>" + System.getProperty("webtestrunner.title") + "</title>");
print(pw, "</head>");
print(pw, "<body>");
print(pw, "<div id='page'><div id='i1'><div id='i2'><div class='sptop'> </div>");
print(pw, "<div id='release'>" + System.getProperty("webtestrunner.title") + "</div>");
print(pw, "<hr>");
}
private void printReportsByDatabase(PrintWriter pw) {
print(pw, "<h2>Detailed reports by database</h2>");
Map reportsByDB = ReportManager.getAllReportsByDatabase(outputLevel);
TreeSet dbs = new TreeSet(reportsByDB.keySet());
Iterator it = dbs.iterator();
while (it.hasNext()) {
String database = (String) it.next();
List reports = (List) reportsByDB.get(database);
Iterator it2 = reports.iterator();
if (!reports.isEmpty()) {
String link = "<a name=\"" + database + "\">";
print(pw, "<h3 class='boxed'>" + link + database + "</a></h3>");
print(pw, "<p>");
String lastTest = "";
while (it2.hasNext()) {
ReportLine line = (ReportLine) it2.next();
String test = line.getShortTestCaseName();
if (!lastTest.equals("") && !test.equals(lastTest)) {
print(pw, "</p><p>");
}
lastTest = test;
String linkTarget = "<a name=\"" + database + ":" + test + "\"></a> ";
String s = linkTarget + getFontForReport(line) + "<strong>" + test + ": </strong>" + line.getMessage() + "</font>"
+ "<br>";
print(pw, s);
} // while it2
print(pw, "</p>");
}
} // while it
print(pw, "<hr>");
}
private void printReportsByTest(PrintWriter pw) {
print(pw, "<h2>Detailed reports by test case</h2>");
Map reportsByTC = ReportManager.getAllReportsByTestCase(outputLevel);
TreeSet dbs = new TreeSet(reportsByTC.keySet());
Iterator it = dbs.iterator();
while (it.hasNext()) {
String test = (String) it.next();
List reports = (List) reportsByTC.get(test);
Iterator it2 = reports.iterator();
if (!reports.isEmpty()) {
String link = "<a name=\"" + test + "\">";
print(pw, "<h3 class='boxed'>" + link + test + "</a></h3>");
print(pw, "<p>");
String lastDB = "";
while (it2.hasNext()) {
ReportLine line = (ReportLine) it2.next();
String database = line.getDatabaseName();
if (!lastDB.equals("") && !database.equals(lastDB)) {
print(pw, "</p><p>");
}
lastDB = database;
String linkTarget = "<a name=\"" + line.getShortTestCaseName() + ":" + database + "\"></a> ";
String s = linkTarget + getFontForReport(line) + "<strong>" + database + ": </strong>" + line.getMessage() + "</font>"
+ "<br>";
print(pw, s);
} // while it2
print(pw, "</p>");
}
} // while it
print(pw, "<hr>");
}
private String getFontForReport(ReportLine line) {
String s1 = "";
switch (line.getLevel()) {
case (ReportLine.PROBLEM):
s1 = "<font color='red'>";
break;
case (ReportLine.WARNING):
s1 = "<font color='black'>";
break;
case (ReportLine.INFO):
s1 = "<font color='grey'>";
break;
case (ReportLine.CORRECT):
s1 = "<font color='green'>";
break;
default:
s1 = "<font color='black'>";
}
return s1;
}
private void printFooter(PrintWriter pw) {
long runTime = System.currentTimeMillis() - appStartTime;
String runStr = Utils.formatTimeString(runTime);
print(pw, "<p>Test run was started at " + new Date(appStartTime).toString() + " and finished at " + new Date().toString()
+ "<br>");
print(pw, " Run time " + runStr + "</p>");
print(pw, "<h4>Configuration used:</h4>");
print(pw, "<pre>");
print(pw, "Tests/groups run: " + System.getProperty("webtestrunner.groups") + "<br>");
print(pw, "Database host: " + System.getProperty("host") + ":" + System.getProperty("port") + "<br>");
print(pw, "Database names: " + System.getProperty("webtestrunner.databases") + "<br>");
print(pw, "Output file: " + System.getProperty("webtestrunner.file") + "<br>");
print(pw, "Output level: " + System.getProperty("webtestrunner.outputlevel") + "<br>");
print(pw, "</pre>");
print(pw, "</div>");
print(pw, "</body>");
print(pw, "</html>");
print(pw, "<hr>");
}
private void print(PrintWriter pw, String s) {
pw.write(s + "\n");
}
private void printSummaryByDatabase(PrintWriter pw) {
print(pw, "<h2>Summary by database</h2>");
print(pw, "<p><table class='ss'>");
print(pw, "<tr><th>Database</th><th>Passed</th><th>Failed</th></tr>");
Map reportsByDB = ReportManager.getAllReportsByDatabase();
TreeSet databases = new TreeSet(reportsByDB.keySet());
Iterator it = databases.iterator();
while (it.hasNext()) {
String database = (String) it.next();
String link = "<a href=\"#" + database + "\">";
int[] passesAndFails = ReportManager.countPassesAndFailsDatabase(database);
String s = (passesAndFails[1] == 0) ? passFont() : failFont();
String[] t = { link + s + database + "</font></a>", passFont() + passesAndFails[0] + "</font>",
failFont() + passesAndFails[1] + "</font>" };
printTableLine(pw, t);
}
print(pw, "</table></p>");
print(pw, "<hr>");
}
private void printSummaryByTest(PrintWriter pw) {
print(pw, "<h2>Summary by test</h2>");
print(pw, "<p><table class='ss'>");
print(pw, "<tr><th>Test</th><th>Passed</th><th>Failed</th></tr>");
Map reports = ReportManager.getAllReportsByTestCase();
TreeSet tests = new TreeSet(reports.keySet());
Iterator it = tests.iterator();
while (it.hasNext()) {
String test = (String) it.next();
String link = "<a href=\"#" + test + "\">";
int[] passesAndFails = ReportManager.countPassesAndFailsTest(test);
String s = (passesAndFails[1] == 0) ? passFont() : failFont();
String[] t = { link + s + test + "</font></a>", passFont() + passesAndFails[0] + "</font>",
failFont() + passesAndFails[1] + "</font>" };
printTableLine(pw, t);
}
print(pw, "</table></p>");
print(pw, "<hr>");
}
private void printExecutiveSummary(PrintWriter pw) {
print(pw, "<h2>Summary</h2>");
int[] result = ReportManager.countPassesAndFailsAll();
StringBuffer s = new StringBuffer();
s.append("<p><strong>");
s.append(passFont() + result[0] + "</font> tests passed and ");
s.append(failFont() + result[1] + "</font> failed out of a total of ");
s.append((result[0] + result[1]) + " tests run.</strong></p>");
print(pw, s.toString());
print(pw, "<hr>");
}
private void printTableLine(PrintWriter pw, String[] s) {
pw.write("<tr>");
for (int i = 0; i < s.length; i++) {
pw.write("<td>" + s[i] + "</td>");
}
pw.write("</tr>\n");
}
private void printNavigation(PrintWriter pw) {
print(pw, "<div id='related'><div id='related-box'>");
print(pw, "<h2>Results by database</h2>");
print(pw, "<ul>");
Map reportsByDB = ReportManager.getAllReportsByDatabase();
TreeSet databases = new TreeSet(reportsByDB.keySet());
Iterator it = databases.iterator();
while (it.hasNext()) {
String database = (String) it.next();
if (database.length() > 27) {
database = "<font size=-1>" + database + "</font>";
}
String link = "<a href=\"#" + database + "\">";
print(pw, "<li>" + link + database + "</a></li>");
}
print(pw, "</ul>");
print(pw, "<h2>Results by test</h2>");
print(pw, "<ul>");
Map reports = ReportManager.getAllReportsByTestCase();
TreeSet tests = new TreeSet(reports.keySet());
it = tests.iterator();
while (it.hasNext()) {
String test = (String) it.next();
String name = test.substring(test.lastIndexOf('.') + 1);
if (name.length() > 27) {
name = "<font size=-1>" + name + "</font>";
}
String link = "<a href=\"#" + test + "\">";
print(pw, "<li>" + link + name + "</a></li>");
}
print(pw, "</ul>");
print(pw, "</div></div>");
}
private String passFont() {
return "<font color='green' size=-1>";
}
private String failFont() {
return "<font color='red' size=-1>";
}
} // WebTestRunner
|
package org.exist.xquery;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.exist.xquery.value.AtomicValue;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.SequenceIterator;
import org.exist.xquery.value.StringValue;
/**
* Node constructor for attribute nodes.
*
* @author wolf
*/
public class AttributeConstructor extends NodeConstructor {
String qname;
List contents = new ArrayList(5);
boolean isNamespaceDecl = false;
public AttributeConstructor(XQueryContext context, String name) {
super(context);
if(name.startsWith("xmlns"))
isNamespaceDecl = true;
this.qname = name;
}
public void addValue(String value) {
contents.add(value);
}
public void addEnclosedExpr(Expression expr) throws XPathException {
if(isNamespaceDecl)
throw new XPathException("enclosed expressions are not allowed in namespace " +
"declaration attributes");
contents.add(expr);
}
public String getQName() {
return qname;
}
public boolean isNamespaceDeclaration() {
return isNamespaceDecl;
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#eval(org.exist.xquery.StaticContext, org.exist.dom.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item)
*/
public Sequence eval(
Sequence contextSequence,
Item contextItem)
throws XPathException {
StringBuffer buf = new StringBuffer();
Object next;
for(Iterator i = contents.iterator(); i.hasNext(); ) {
next = i.next();
if(next instanceof Expression)
evalEnclosedExpr(((Expression)next).eval(contextSequence, contextItem), buf);
else
buf.append(next);
}
StringValue result = new StringValue(buf.toString());
result.expand();
return result;
}
private void evalEnclosedExpr(Sequence seq, StringBuffer buf) throws XPathException {
Item item;
AtomicValue atomic;
for(SequenceIterator i = seq.iterate(); i.hasNext();) {
item = i.nextItem();
atomic = item.atomize();
buf.append(atomic.getStringValue());
if(i.hasNext())
buf.append(' ');
}
}
/**
* If this is a namespace declaration attribute, return
* the single string value of the attribute.
*
* @return
*/
public String getLiteralValue() {
if(contents.size() == 0)
return "";
return (String)contents.get(0);
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#pprint()
*/
public String pprint() {
StringBuffer buf = new StringBuffer();
buf.append(qname).append("=\"");
Object next;
for(Iterator i = contents.iterator(); i.hasNext(); ) {
next = i.next();
if(next instanceof Expression)
buf.append(((Expression)next).pprint());
else
buf.append(next);
}
buf.append('"');
return buf.toString();
}
/* (non-Javadoc)
* @see org.exist.xquery.NodeConstructor#resetState()
*/
public void resetState() {
Object object;
for(Iterator i = contents.iterator(); i.hasNext(); ) {
object = i.next();
if(object instanceof Expression)
((Expression)object).resetState();
}
}
}
|
package org.first.team2620.subsystems;
import org.first.team2620.RobotMap;
/**
*
* @author frc2620
*/
public class Climber {
private int LevelCount_ = 0;
private int StopClimbLevel_ = 3;
private double LHClimbPower_ = RobotMap.ClimbPower;
private double RHClimbPower_ = -RobotMap.ClimbPower;
private double LegPower_ = RobotMap.LegPower;
private boolean LHClimb_ = true;
private boolean RHClimb_ = true;
private Thread ConveyorsThread_ = null;
private Thread LegThread_ = null;
private boolean End_ = false;
private boolean Climbing_ = false;
public void climb()
{
if(Climbing_ == false)
{
Climbing_ = true;
// Conveyors Movement Thread
ConveyorsThread_ = new Thread(new Runnable() {
public void run() {
while(LevelCount_ <= StopClimbLevel_ && End_ == false)
{
if(RobotMap.LHTopHooked.get() && RobotMap.RHTopHooked.get()) // Both are on top hook
{
LHClimb_ = true;
RHClimb_ = true;
LHClimbPower_ *= -1;
RHClimbPower_ *= -1;
LevelCount_ += 1;
}
else
{
if(RobotMap.LHMiddleHooked.get() && RobotMap.RHMiddleHooked.get()) // Both holding on middle
{
LHClimb_ = true;
RHClimb_ = true;
if(LevelCount_ == StopClimbLevel_)
{
LHClimb_ = false;
RHClimb_ = false;
}
else
{
LHClimbPower_ *= -1;
RHClimbPower_ *= -1;
}
}
else // Middle of climbing, lets keep this bad boy level
{
// Level each side out at top
if(RobotMap.LHTopHooked.get() && (RobotMap.RHTopHooked.get() == false)) // Left hand is ready to stop, right hand keep climbing to top
{
LHClimb_ = false;
}
else if(RobotMap.RHTopHooked.get() && (RobotMap.LHTopHooked.get() == false)) // Right hand is ready to stop, left hand keep climbing to top
{
RHClimb_ = false;
}
// Level each side out at middle
if(RobotMap.LHMiddleHooked.get() && (RobotMap.RHMiddleHooked.get() == false)) // Left hand is ready to stop, right hand keep climbing to top
{
LHClimb_ = false;
}
else if(RobotMap.RHMiddleHooked.get() && (RobotMap.LHMiddleHooked.get() == false)) // Right hand is ready to stop, left hand keep climbing to top
{
RHClimb_ = false;
}
}
}
if(LHClimb_) {
RobotMap.LHConveyor.set(LHClimbPower_);
} else {
RobotMap.LHConveyor.set(0);
}
if(RHClimb_) {
RobotMap.RHConveyor.set(RHClimbPower_);
} else {
RobotMap.RHConveyor.set(0);
}
}
// Just to make sure we stop climbing
RobotMap.LHConveyor.set(0);
RobotMap.RHConveyor.set(0);
}
});
// Leg Movement Thread
LegThread_ = new Thread(new Runnable() {
public void run() {
boolean bringDown = false;
boolean bringUp = false;
while(LevelCount_ < StopClimbLevel_ && End_ == false)
{
if(RobotMap.LHMiddleHooked.get() && RobotMap.RHMiddleHooked.get()) // Both holding on middle, bring leg down
{
bringDown = true;
}
if(RobotMap.LHTopHooked.get() && RobotMap.RHTopHooked.get()) // Both are set on top, bring leg up to get out of corners way
{
bringUp = true;
}
if(bringDown)
{
if(RobotMap.LegDown.get() == false)
{
RobotMap.Leg.set(LegPower_);
}
else
{
RobotMap.Leg.set(0);
bringDown = false;
}
}
if(bringUp)
{
if(RobotMap.LegUp.get() == false)
{
RobotMap.Leg.set(-LegPower_);
}
else
{
RobotMap.Leg.set(0);
bringDown = false;
}
}
}
// Bring leg all the way up after it climbs all the way
while(RobotMap.LegUp.get() == false && End_ == false)
{
RobotMap.Leg.set(-LegPower_);
}
RobotMap.Leg.set(0);
}
});
ConveyorsThread_.start();
LegThread_.start();
}
}
public void overRideClimb()
{
End_ = true;
}
}
|
package org.jamocha.dn.compiler.ecblocks;
import java.util.HashSet;
import java.util.Set;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import org.jamocha.dn.ConstructCache.Defrule.ECSetRule;
import org.jamocha.languages.common.SingleFactVariable;
import com.atlassian.fugue.Either;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Sets;
/**
* @author Fabian Ohler <fabian.ohler1@rwth-aachen.de>
*/
@RequiredArgsConstructor
@Getter
public class Rule {
final ECSetRule original;
final Set<Filter> filters = new HashSet<>();
final Set<SingleFactVariable> factvariables;
final BiMap<Filter.FilterInstance, ExistentialProxy> existentialProxies = HashBiMap.create();
final Either<Rule, ExistentialProxy> either;
public Rule(final ECSetRule original) {
this.original = original;
this.factvariables = Sets.newHashSet(original.getFactVariables());
this.either = Either.left(this);
}
@Override
public String toString() {
return this.original.getParent().getName() + "@" + Integer.toHexString(System.identityHashCode(this));
}
}
|
package org.opennms.mock.snmp;
import static org.junit.Assert.*;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.snmp4j.CommunityTarget;
import org.snmp4j.PDU;
import org.snmp4j.SNMP4JSettings;
import org.snmp4j.ScopedPDU;
import org.snmp4j.Snmp;
import org.snmp4j.TransportMapping;
import org.snmp4j.UserTarget;
import org.snmp4j.event.ResponseEvent;
import org.snmp4j.mp.MPv3;
import org.snmp4j.mp.SnmpConstants;
import org.snmp4j.security.AuthMD5;
import org.snmp4j.security.PrivDES;
import org.snmp4j.security.SecurityLevel;
import org.snmp4j.security.SecurityModels;
import org.snmp4j.security.SecurityProtocols;
import org.snmp4j.security.USM;
import org.snmp4j.security.UsmUser;
import org.snmp4j.smi.Integer32;
import org.snmp4j.smi.OID;
import org.snmp4j.smi.OctetString;
import org.snmp4j.smi.SMIConstants;
import org.snmp4j.smi.UdpAddress;
import org.snmp4j.smi.Variable;
import org.snmp4j.smi.VariableBinding;
import org.snmp4j.transport.DefaultUdpTransportMapping;
@RunWith(Parameterized.class)
public class LLDPMibTest {
@Parameters
public static Collection<Object[]> versions() {
return Arrays.asList(new Object[][] {
{ SnmpConstants.version1 },
{ SnmpConstants.version2c },
{ SnmpConstants.version3 },
});
}
private MockSnmpAgent m_agent;
private USM m_usm;
private ArrayList<AnticipatedRequest> m_requestedVarbinds;
private int m_version;
private long m_timeout = -1; // -1 means use the default
public LLDPMibTest(int version) {
m_version = version;
}
private class AnticipatedRequest {
private String m_requestedOid;
private Variable m_requestedValue;
private String m_expectedOid;
private int m_expectedSyntax;
private Variable m_expectedValue;
public AnticipatedRequest(String requestedOid, Variable requestedValue) {
m_requestedOid = requestedOid;
m_requestedValue = requestedValue;
}
public void andExpect(String expectedOid, int expectedSyntax, Variable expectedValue) {
m_expectedOid = expectedOid;
m_expectedSyntax = expectedSyntax;
m_expectedValue = expectedValue;
}
public VariableBinding getRequestVarbind() {
OID oid = new OID(m_requestedOid);
if (m_requestedValue != null) {
return new VariableBinding(oid, m_requestedValue);
} else {
return new VariableBinding(oid);
}
}
public void verify(VariableBinding vb) {
assertNotNull("variable binding should not be null", vb);
Variable val = vb.getVariable();
assertNotNull("variable should not be null", val);
assertEquals("OID (value: " + val + ")", new OID(m_expectedOid), vb.getOid());
assertEquals("syntax", m_expectedSyntax, vb.getSyntax());
assertEquals("value", m_expectedValue, val);
}
}
@Before
public void setUp() throws Exception {
// Create a global USM that all client calls will use
SNMP4JSettings.setEnterpriseID(5813);
m_usm = new USM(SecurityProtocols.getInstance(), new OctetString(MPv3.createLocalEngineID()), 0);
SecurityModels.getInstance().addSecurityModel(m_usm);
m_agent = MockSnmpAgent.createAgentAndRun(classPathResource("penrose-lldp-mib.properties"), "127.0.0.1/0");
m_requestedVarbinds = new ArrayList<AnticipatedRequest>();
}
@After
public void tearDown() throws Exception {
if (m_agent != null) {
m_agent.shutDownAndWait();
}
}
public AnticipatedRequest request(String requestedOid, Variable requestedValue) {
AnticipatedRequest r = new AnticipatedRequest(requestedOid, requestedValue);
m_requestedVarbinds.add(r);
return r;
}
public AnticipatedRequest request(String requestOid) {
return request(requestOid, null);
}
public void reset() {
m_requestedVarbinds.clear();
}
/**
* Make sure that we can setUp() and tearDown() the agent.
* @throws InterruptedException
*/
@Test
public void testAgentSetup() {
assertNotNull("agent should be non-null", m_agent);
}
/**
* Test that we can setUp() and tearDown() twice to ensure that the
* MockSnmpAgent tears itself down properly. In particular, we want to make
* sure that the UDP listener gets torn down so listening port is free for
* later instances of the agent.
*
* @throws Exception
*/
@Test
public void testSetUpTearDownTwice() throws Exception {
// don't need the first setUp(), since it's already been done by JUnit
tearDown();
setUp();
// don't need the second tearDown(), since it will be done by JUnit
}
@Test
public void testGetNext() throws Exception {
request(".1.0.8802.1.1.2.1.3.1").andExpect(".1.0.8802.1.1.2.1.3.1.0", SMIConstants.SYNTAX_INTEGER32, new Integer32(4));
doGetNext();
}
@Test
public void testGetNextMultipleVarbinds() throws Exception {
request(".1.0.8802.1.1.2.1.3.1").andExpect(".1.0.8802.1.1.2.1.3.1.0", SMIConstants.SYNTAX_INTEGER32, new Integer32(4));
doGetNext();
m_agent.getUsm().setEngineBoots(15);
byte[] hexString = new byte[] { (byte)0x80, (byte)0x71, (byte)0x1F, (byte)0x8F, (byte)0xAF, (byte)0xC0 };
request(".1.0.8802.1.1.2.1.3.1").andExpect(".1.0.8802.1.1.2.1.3.1.0", SMIConstants.SYNTAX_INTEGER32, new Integer32(4));
request(".1.0.8802.1.1.2.1.3.2").andExpect(".1.0.8802.1.1.2.1.3.2.0", SMIConstants.SYNTAX_OCTET_STRING, new OctetString(hexString));
request(".1.0.8802.1.1.2.1.3.3").andExpect(".1.0.8802.1.1.2.1.3.3.0", SMIConstants.SYNTAX_OCTET_STRING, new OctetString("penrose-mx480".getBytes()));
doGetNext();
// This statement breaks the internal state of the SNMP4J agent
// m_agent.getUsm().setLocalEngine(m_agent.getUsm().getLocalEngineID(), 15, 200);
m_agent.getUsm().removeEngineTime(m_usm.getLocalEngineID());
m_usm.removeEngineTime(m_agent.getUsm().getLocalEngineID());
request(".1.0.8802.1.1.2.1.3.1").andExpect(".1.0.8802.1.1.2.1.3.1.0", SMIConstants.SYNTAX_INTEGER32, new Integer32(4));
doGetNext();
}
private void doGetNext() throws Exception {
requestAndVerifyResponse(PDU.GETNEXT, m_version);
}
private void requestAndVerifyResponse(int pduType, int version) throws Exception {
PDU pdu = createPDU(version);
for(AnticipatedRequest a : m_requestedVarbinds) {
pdu.add(a.getRequestVarbind());
}
pdu.setType(pduType);
PDU response = sendRequest(pdu, version);
assertNotNull("request timed out", response);
System.err.println("Response is: "+response);
assertTrue("unexpected report pdu: " + ((VariableBinding)response.getVariableBindings().get(0)).getOid(), response.getType() != PDU.REPORT);
assertEquals("Unexpected number of varbinds returned.", m_requestedVarbinds.size(), response.getVariableBindings().size());
for(int i = 0; i < m_requestedVarbinds.size(); i++) {
AnticipatedRequest a = m_requestedVarbinds.get(i);
VariableBinding vb = response.get(i);
a.verify(vb);
}
reset();
}
private PDU createPDU(int version) {
if (version == SnmpConstants.version3) {
return new ScopedPDU();
} else {
return new PDU();
}
}
private PDU sendRequest(PDU pdu, int version) throws Exception {
if (version == SnmpConstants.version3) {
return sendRequestV3(pdu);
} else {
return sendRequestV1V2(pdu, version);
}
}
private PDU sendRequestV1V2(PDU pdu, int version) throws Exception {
PDU response;
CommunityTarget target = new CommunityTarget();
target.setCommunity(new OctetString("public"));
target.setAddress(new UdpAddress(InetAddress.getByName("127.0.0.1"), m_agent.getPort()));
target.setVersion(version);
if (m_timeout > 0) {
target.setTimeout(m_timeout);
}
TransportMapping<UdpAddress> transport = null;
try {
transport = new DefaultUdpTransportMapping();
Snmp snmp = new Snmp(transport);
transport.listen();
ResponseEvent e = snmp.send(pdu, target);
response = e.getResponse();
} finally {
if (transport != null) {
transport.close();
}
}
return response;
}
private PDU sendRequestV3(PDU pdu) throws IOException {
PDU response;
OctetString userId = new OctetString("opennmsUser");
OctetString pw = new OctetString("0p3nNMSv3");
UserTarget target = new UserTarget();
target.setSecurityLevel(SecurityLevel.AUTH_PRIV);
target.setSecurityName(userId);
target.setAddress(new UdpAddress(InetAddress.getByName("127.0.0.1"), m_agent.getPort()));
target.setVersion(SnmpConstants.version3);
if (m_timeout > 0) {
target.setTimeout(m_timeout);
} else {
target.setTimeout(5000);
}
TransportMapping<UdpAddress> transport = null;
try {
USM usm = new USM(SecurityProtocols.getInstance(), new OctetString(MPv3.createLocalEngineID()), 0);
SecurityModels.getInstance().addSecurityModel(usm);
transport = new DefaultUdpTransportMapping();
Snmp snmp = new Snmp(transport);
UsmUser user = new UsmUser(userId, AuthMD5.ID, pw, PrivDES.ID, pw);
snmp.getUSM().addUser(userId, user);
transport.listen();
ResponseEvent e = snmp.send(pdu, target);
response = e.getResponse();
} finally {
if (transport != null) {
transport.close();
}
}
return response;
}
private URL classPathResource(String path) {
return getClass().getClassLoader().getResource(path);
}
}
|
package org.jgroups.auth;
import org.jgroups.Message;
import org.jgroups.annotations.Property;
import org.jgroups.util.Util;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
/**
* <p>
* The FixedMemberShipToken object predefines a list of IP addresses and Ports that can join the group.
* </p>
* <p>
* Configuration parameters for this example are shown below:
* </p>
* <ul>
* <li>fixed_members_value (required) = List of IP addresses & ports (optionally) - ports must be seperated by a '/' e.g. 127.0.0.1/1010*127.0.0.1/4567</li>
* <li>fixed_members_seperator (required) = The seperator used between IP addresses - e.g. *</li>
* </ul>
* @author Chris Mills (millsy@jboss.com)
*/
public class FixedMembershipToken extends AuthToken {
private List<String> memberList=null;
private String token="emptyToken";
private String fixed_members_seperator=",";
public FixedMembershipToken() {
}
public String getName() {
return "org.jgroups.auth.FixedMembershipToken";
}
public boolean authenticate(AuthToken token, Message msg) {
if((token != null) && (token instanceof FixedMembershipToken) && (this.memberList != null)) {
//Found a valid Token to authenticate against
FixedMembershipToken serverToken=(FixedMembershipToken)token;
String sourceAddressWithPort=msg.getSrc().toString();
String sourceAddressWithoutPort=sourceAddressWithPort.substring(0, sourceAddressWithPort.indexOf(":"));
if(log.isDebugEnabled()) {
log.debug("AUTHToken received from " + sourceAddressWithPort);
}
if((this.memberList.contains(sourceAddressWithPort)) || (this.memberList.contains(sourceAddressWithoutPort))) {
//validated
if(log.isDebugEnabled()) {
log.debug("FixedMembershipToken match");
}
return true;
}
else {
// if(log.isWarnEnabled()){
// log.warn("Authentication failed on FixedMembershipToken");
return false;
}
}
if(log.isWarnEnabled()) {
log.warn("Invalid AuthToken instance - wrong type or null");
}
return false;
}
@Property(name="fixed_members_value")
public void setMemberList(String list) {
memberList=new ArrayList<String>();
StringTokenizer memberListTokenizer=new StringTokenizer(list, fixed_members_seperator);
while(memberListTokenizer.hasMoreTokens()) {
memberList.add(memberListTokenizer.nextToken().replace('/', ':'));
}
}
/**
* Required to serialize the object to pass across the wire
* @param out
* @throws java.io.IOException
*/
public void writeTo(DataOutputStream out) throws IOException {
if(log.isDebugEnabled()) {
log.debug("SimpleToken writeTo()");
}
Util.writeString(this.token, out);
}
public void readFrom(DataInputStream in) throws IOException, IllegalAccessException, InstantiationException {
if(log.isDebugEnabled()) {
log.debug("SimpleToken readFrom()");
}
this.token=Util.readString(in);
}
}
|
package org.mapyrus.dataset;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import org.mapyrus.Argument;
import org.mapyrus.MapyrusException;
import org.mapyrus.MapyrusMessages;
/**
* Parses OGC Well Known Binary (WKB) geometry structures read from a database
* into similar geometry structure used by Mapyrus.
*/
public class WKBGeometryParser
{
/*
* Possible byte order of geometries.
*/
private static byte BIG_ENDIAN = 0;
private static byte LITTLE_ENDIAN = 1;
/*
* Geometry types.
*/
private static int WKB_POINT = 1;
private static int WKB_LINESTRING = 2;
private static int WKB_POLYGON = 3;
private static int WKB_MULTIPOINT = 4;
private static int WKB_MULTILINESTRING = 5;
private static int WKB_MULTIPOLYGON = 6;
private static int WKB_GEOMETRY_COLLECTION = 7;
/**
* Convert bytes in ByteBuffer to hex digits.
* @param b first byte of buffer.
* @param byteBuffer remaining bytes in buffer.
* @return first few bytes of buffer as a hex string.
*/
private static String convertToHexDigits(int b, ByteBuffer byteBuffer)
{
StringBuffer sb = new StringBuffer();
sb.append("0x");
b = (b & 255);
String hex = Integer.toHexString(b);
if (hex.length() == 1)
sb.append("0");
sb.append(hex);
for (int i = 0; i < 6; i++)
{
if (byteBuffer.hasRemaining())
{
b = byteBuffer.get();
b = (b & 255);
hex = Integer.toHexString(b);
if (hex.length() == 1)
sb.append("0");
sb.append(hex);
}
}
if (byteBuffer.hasRemaining())
sb.append("...");
return(sb.toString());
}
/**
* Parse a geometry from WKB buffer.
* Called recursively to parse geometries made up of multiple parts.
* @param byteBuffer buffer containg WKB geometry.
* @param geometry double array to fill with in Mapyrus geometry format.
* @param geometryIndex index at which to start filling geometry array.
* @return number of elements filled in geometry array.
* @throws MapyrusException
*/
private static int parseGeometry(ByteBuffer byteBuffer,
double []geometry, int geometryIndex) throws MapyrusException
{
int index = geometryIndex;
int nPoints, nLines, nRings, nPolygons, nGeometries;
/*
* If buffer is not long enough to hold the shortest geometry (a point)
* then blob cannot possibly hold a valid geometry.
*/
if (byteBuffer.remaining() < 1 + 4 + 8 + 8)
{
String s = "";
if (byteBuffer.hasRemaining())
s = convertToHexDigits(byteBuffer.get(), byteBuffer);
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_OGC_WKB) +
": " + s);
}
/*
* Parse byte order of this geometry.
*/
byte order = byteBuffer.get();
if (order == BIG_ENDIAN)
byteBuffer.order(ByteOrder.BIG_ENDIAN);
else if (order == LITTLE_ENDIAN)
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
else
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_OGC_WKB) +
": " + convertToHexDigits(order, byteBuffer));
}
/*
* Find type of geometry in buffer, then extract it from buffer.
*/
int wkbType = byteBuffer.getInt();
if (wkbType == WKB_POINT)
{
geometry[index++] = Argument.GEOMETRY_POINT;
geometry[index++] = 1;
geometry[index++] = Argument.MOVETO;
geometry[index++] = byteBuffer.getDouble();
geometry[index++] = byteBuffer.getDouble();
}
else if (wkbType == WKB_LINESTRING)
{
geometry[index++] = Argument.GEOMETRY_LINESTRING;
geometry[index++] = nPoints = byteBuffer.getInt();
int operation = Argument.MOVETO;
for (int i = 0; i < nPoints; i++)
{
geometry[index++] = operation;
geometry[index++] = byteBuffer.getDouble();
geometry[index++] = byteBuffer.getDouble();
operation = Argument.LINETO;
}
}
else if (wkbType == WKB_POLYGON)
{
geometry[index++] = Argument.GEOMETRY_POLYGON;
int nPointsIndex = index++;
int totalPoints = 0;
nRings = byteBuffer.getInt();
for (int j = 0; j < nRings; j++)
{
nPoints = byteBuffer.getInt();
totalPoints += nPoints;
int operation = Argument.MOVETO;
for (int i = 0; i < nPoints; i++)
{
geometry[index++] = operation;
geometry[index++] = byteBuffer.getDouble();
geometry[index++] = byteBuffer.getDouble();
operation = Argument.LINETO;
}
}
geometry[nPointsIndex] = totalPoints;
}
else if (wkbType == WKB_MULTIPOINT)
{
geometry[index++] = Argument.GEOMETRY_MULTIPOINT;
geometry[index++] = nPoints = byteBuffer.getInt();
for (int i = 0; i < nPoints; i++)
{
int nEls = parseGeometry(byteBuffer, geometry, index);
index += nEls;
}
}
else if (wkbType == WKB_MULTILINESTRING)
{
geometry[index++] = Argument.GEOMETRY_MULTILINESTRING;
geometry[index++] = nLines = byteBuffer.getInt();
for (int i = 0; i < nLines; i++)
{
int nEls = parseGeometry(byteBuffer, geometry, index);
index += nEls;
}
}
else if (wkbType == WKB_MULTIPOLYGON)
{
geometry[index++] = Argument.GEOMETRY_MULTIPOLYGON;
geometry[index++] = nPolygons = byteBuffer.getInt();
for (int i = 0; i < nPolygons; i++)
{
int nEls = parseGeometry(byteBuffer, geometry, index);
index += nEls;
}
}
else if (wkbType == WKB_GEOMETRY_COLLECTION)
{
geometry[index++] = Argument.GEOMETRY_COLLECTION;
geometry[index++] = nGeometries = byteBuffer.getInt();
for (int i = 0; i < nGeometries; i++)
{
int nEls = parseGeometry(byteBuffer, geometry, index);
index += nEls;
}
}
else
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_OGC_WKB) +
": " + convertToHexDigits(wkbType, byteBuffer));
}
/*
* Return number of slots filled by this geometry.
*/
return(index - geometryIndex);
}
/**
* Parse WKB geometry into geometry used by Mapyrus.
* @param b byte array containing geometry
* @return double array containing geometry in Mapyrus format.
*/
public static double []parse(byte []b) throws MapyrusException
{
double []retval = new double[(b.length + 7) / 8 * 2];
ByteBuffer byteBuffer = ByteBuffer.wrap(b);
parseGeometry(byteBuffer, retval, 0);
return(retval);
}
}
|
package org.myrobotlab.service;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.interfaces.Attachable;
import org.myrobotlab.framework.interfaces.ServiceInterface;
import org.myrobotlab.logging.Level;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.Logging;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.service.interfaces.I2CControl;
import org.myrobotlab.service.interfaces.I2CController;
import org.myrobotlab.service.interfaces.VoltageSensorControl;
import org.slf4j.Logger;
//import com.pi4j.io.i2c.I2CBus;
public class AdafruitIna219 extends Service implements I2CControl, VoltageSensorControl {
private static final long serialVersionUID = 1L;
public final static Logger log = LoggerFactory.getLogger(AdafruitIna219.class);
transient public I2CController controller;
public static final byte INA219_SHUNTVOLTAGE = 0x01;
public static final byte INA219_BUSVOLTAGE = 0x02;
public List<String> deviceAddressList = Arrays.asList("0x40", "0x41", "0x42", "0x43", "0x44", "0x45", "0x46", "0x47", "0x48", "0x49", "0x4A", "0x4B", "0x4C", "0x4D", "0x4E",
"0x4F");
public String deviceAddress = "0x40";
public List<String> deviceBusList = Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7");
public String deviceBus = "1";
public int busVoltage = 0;
public double shuntVoltage = 0;
public double current = 0.0;
public double power = 0.0;
// TODO Add methods to calibrate
// Currently only supports setting the shunt resistance to a different
// value than the default, in case it has been exchanged to measure
// a different range of current
public double shuntResistance = 0.1; // expressed in Ohms
public int scaleRange = 32; // 32V = bus full-scale range
public int pga = 8; // 320 mV = shunt full-scale range
public List<String> controllers;
public String controllerName;
public boolean isAttached = false;
public static void main(String[] args) {
LoggingFactory.init(Level.INFO);
try {
AdafruitIna219 adafruitINA219 = (AdafruitIna219) Runtime.start("AdafruitIna219", "AdafruitIna219");
Runtime.start("gui", "SwingGui");
Runtime.start("webgui", "WebGui");
byte msb = (byte) 0x83;
byte lsb = (byte) 0x00;
double test = (double) ((((int) msb) << 8 | (int) lsb & 0xff)) * .01;
log.info(String.format("msb = %s, lsb = %s, test = %s", msb, lsb, test));
// (((int)(readbuffer[0] & 0xff) << 5)) | ((int)(readbuffer[1] >>
} catch (Exception e) {
Logging.logError(e);
}
}
public AdafruitIna219(String n) {
super(n);
refreshControllers();
subscribe(Runtime.getInstance().getName(), "registered", this.getName(), "onRegistered");
}
public void onRegistered(ServiceInterface s) {
refreshControllers();
broadcastState();
}
/*
* Refresh the list of running services that can be selected in the GUI
*/
public List<String> refreshControllers() {
controllers = Runtime.getServiceNamesFromInterface(I2CController.class);
return controllers;
}
@Override
public void setDeviceBus(String deviceBus) {
if (isAttached) {
log.error(String.format("Already attached to %s, use detach(%s) first", this.controllerName));
return;
}
this.deviceBus = deviceBus;
broadcastState();
}
@Override
public void setDeviceAddress(String deviceAddress) {
if (isAttached) {
log.error(String.format("Already attached to %s, use detach(%s) first", this.controllerName));
return;
}
this.deviceAddress = deviceAddress;
broadcastState();
}
/**
* This method sets the shunt resistance in ohms Default value is .1 Ohms (
* R100 )
*/
// @Override
public void setShuntResistance(double shuntResistance) {
this.shuntResistance = shuntResistance;
}
// @Override
public double getShuntResistance() {
return shuntResistance;
}
/**
* This method reads and returns the power in milliWatts
*/
public void refresh() {
power = getPower();
broadcastState();
}
// @Override
public double getPower() {
power = getBusVoltage() * getCurrent() / 1000;
return power;
}
/**
* This method reads and returns the shunt current in milliAmperes
*/
// @Override
public double getCurrent() {
current = getShuntVoltage() / shuntResistance;
return current;
}
/**
* This method reads and returns the shunt Voltage in milliVolts
*/
// @Override
public double getShuntVoltage() {
byte[] writebuffer = { INA219_SHUNTVOLTAGE };
byte[] readbuffer = { 0x0, 0x0 };
controller.i2cWrite(this, Integer.parseInt(deviceBus), Integer.decode(deviceAddress), writebuffer, writebuffer.length);
controller.i2cRead(this, Integer.parseInt(deviceBus), Integer.decode(deviceAddress), readbuffer, readbuffer.length);
// log.info(String.format("getShuntVoltage x%02X x%02X", readbuffer[0],
// readbuffer[1]));
// The shuntVoltage is signed so the MSB can have sign bits, that needs
// to remain
shuntVoltage = (double) ((((int) readbuffer[0]) << 8 | (int) readbuffer[1] & 0xff)) * .01;
return shuntVoltage;
}
/**
* This method reads and returns the bus Voltage in milliVolts
*/
// @Override
public double getBusVoltage() {
byte[] writebuffer = { INA219_BUSVOLTAGE };
byte[] readbuffer = { 0x0, 0x0 };
controller.i2cWrite(this, Integer.parseInt(deviceBus), Integer.decode(deviceAddress), writebuffer, writebuffer.length);
controller.i2cRead(this, Integer.parseInt(deviceBus), Integer.decode(deviceAddress), readbuffer, readbuffer.length);
// A bit tricky conversion. The LSB needs to be right shifted 3 bits, so
// the MSB needs to be left shifted (8-3) = 5 bits
// And bytes are signed in Java so first a mask of 0xff needs to be
// applied to the MSB to remove the sign
int rawBusVoltage = (((int) readbuffer[0] & 0xff) << 8 | (int) readbuffer[1] & 0xff) >> 3;
log.debug(String.format("Busvoltage high byte = %s, low byte = %s, rawBusVoltagee = %s", readbuffer[0], readbuffer[1], rawBusVoltage));
// LSB = 4mV, so multiply wit 4 to get the volatage in mV
busVoltage = rawBusVoltage * 4;
return busVoltage;
}
/**
* This static method returns all the details of the class without it having
* to be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(AdafruitIna219.class.getCanonicalName());
meta.addDescription("measures voltage and current of a circuit");
meta.setLicenseApache();
meta.addCategory("shield", "sensor", "i2c");
meta.setSponsor("Mats");
return meta;
}
// This section contains all the new attach logic
@Override
public void attach(String service) throws Exception {
attach((Attachable) Runtime.getService(service));
}
@Override
public void attach(Attachable service) throws Exception {
if (I2CController.class.isAssignableFrom(service.getClass())) {
attachI2CController((I2CController) service);
return;
}
}
public void attach(String controllerName, String deviceBus, String deviceAddress) {
attach((I2CController) Runtime.getService(controllerName), deviceBus, deviceAddress);
}
public void attach(I2CController controller, String deviceBus, String deviceAddress) {
if (isAttached && this.controller != controller) {
log.error(String.format("Already attached to %s, use detach(%s) first", this.controllerName, controller.getName()));
}
controllerName = controller.getName();
log.info(String.format("%s attach %s", getName(), controllerName));
this.deviceBus = deviceBus;
this.deviceAddress = deviceAddress;
attachI2CController(controller);
isAttached = true;
broadcastState();
}
public void attachI2CController(I2CController controller) {
if (isAttached(controller))
return;
if (this.controllerName != controller.getName()) {
log.error(String.format("Trying to attached to %s, but already attached to (%s)", controller.getName(), this.controllerName));
return;
}
this.controller = controller;
isAttached = true;
controller.attachI2CControl(this);
log.info(String.format("Attached %s device on bus: %s address %s", controllerName, deviceBus, deviceAddress));
broadcastState();
}
// This section contains all the new detach logic
// TODO: This default code could be in Attachable
@Override
public void detach(String service) {
detach((Attachable) Runtime.getService(service));
}
@Override
public void detach(Attachable service) {
if (I2CController.class.isAssignableFrom(service.getClass())) {
detachI2CController((I2CController) service);
return;
}
}
@Override
public void detachI2CController(I2CController controller) {
if (!isAttached(controller))
return;
controller.detachI2CControl(this);
isAttached = false;
broadcastState();
}
// This section contains all the methods used to query / show all attached
// methods
/**
* Returns all the currently attached services
*/
@Override
public Set<String> getAttached() {
HashSet<String> ret = new HashSet<String>();
if (controller != null && isAttached) {
ret.add(controller.getName());
}
return ret;
}
@Override
public String getDeviceBus() {
return this.deviceBus;
}
@Override
public String getDeviceAddress() {
return this.deviceAddress;
}
@Override
public boolean isAttached(Attachable instance) {
if (controller != null && controller.getName().equals(instance.getName())) {
return isAttached;
}
;
return false;
}
}
|
package org.pentaho.di.job.entry;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.CheckResultSourceInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.SQLStatement;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.variables.Variables;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.job.JobEntryType;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceDefinition;
import org.pentaho.di.resource.ResourceHolderInterface;
import org.pentaho.di.resource.ResourceNamingInterface;
import org.pentaho.di.resource.ResourceReference;
import org.w3c.dom.Node;
public class JobEntryBase implements Cloneable, VariableSpace, CheckResultSourceInterface, ResourceHolderInterface
{
private String name;
private String description;
private String pluginID = null;
/**
* Id as defined in the xml or annotation.
*/
private String configId;
private boolean changed;
private JobEntryType type;
private long id;
protected VariableSpace variables = new Variables();
public JobEntryBase()
{
name = null;
description = null;
type=JobEntryType.NONE;
}
public JobEntryBase(String name, String description)
{
setName(name);
setDescription(description);
setID(-1L);
type=JobEntryType.NONE;
}
public JobEntryBase(JobEntryBase jeb)
{
setName(jeb.getName());
setDescription(jeb.getDescription());
setJobEntryType(jeb.getJobEntryType());
setID(jeb.getID());
}
public void clear()
{
name = null;
description = null;
changed = false;
}
public void setID(long id)
{
this.id = id;
}
public long getID()
{
return id;
}
public void setJobEntryType(JobEntryType type)
{
this.type = type;
}
public String getPluginID()
{
return this.pluginID;
}
/**
* Support for CheckResultSourceInterface
*/
public String getTypeId() {
return getTypeCode();
}
public JobEntryType getJobEntryType()
{
return type;
}
public String getTypeCode()
{
if (this.pluginID != null)
return this.pluginID;
return type.toString();// JobEntryInterface.typeCode[type];
}
public static final String getTypeCode(JobEntryType type)
{
return type.toString();
}
public String getTypeDesc()
{
return type.getDescription();
}
public static final String getTypeDesc(JobEntryType type)
{
return type.getDescription();
}
public void setName(String name)
{
this.name = name;
}
public void setPluginID(String pid)
{
this.pluginID = pid;
}
public String getName()
{
return name;
}
public void setDescription(String Description)
{
this.description = Description;
}
public String getDescription()
{
return description;
}
public void setChanged()
{
setChanged(true);
}
public void setChanged(boolean ch)
{
changed = ch;
}
public boolean hasChanged()
{
return changed;
}
public boolean isStart()
{
return false;
}
public boolean isDummy()
{
return false;
}
public boolean isEvaluation()
{
return getJobEntryType() == JobEntryType.EVAL;
}
public boolean isJob()
{
return getJobEntryType() == JobEntryType.JOB;
}
public boolean isMail()
{
return getJobEntryType() == JobEntryType.MAIL;
}
public boolean isShell()
{
return getJobEntryType() == JobEntryType.MAIL;
}
public boolean isSpecial()
{
return getJobEntryType() == JobEntryType.SPECIAL;
}
public boolean isTransformation()
{
return getJobEntryType() == JobEntryType.TRANS;
}
public boolean isFTP()
{
return getJobEntryType() == JobEntryType.FTP;
}
public boolean isSFTP()
{
return getJobEntryType() == JobEntryType.SFTP;
}
public boolean isHTTP()
{
return getJobEntryType() == JobEntryType.HTTP;
}
// Add here for the new types?
public String getXML()
{
StringBuffer retval = new StringBuffer();
retval.append(" ").append(XMLHandler.addTagValue("name", getName()));
retval.append(" ").append(XMLHandler.addTagValue("description", getDescription()));
if (type != JobEntryType.NONE)
retval.append(" ").append(XMLHandler.addTagValue("type", getTypeCode()));
if (pluginID != null)
retval.append(" ").append(XMLHandler.addTagValue("type", pluginID));
return retval.toString();
}
public void loadXML(Node entrynode, List<DatabaseMeta> databases)
throws KettleXMLException
{
try
{
setName(XMLHandler.getTagValue(entrynode, "name"));
setDescription(XMLHandler.getTagValue(entrynode, "description"));
String stype = XMLHandler.getTagValue(entrynode, "type");
setJobEntryType(JobEntryCopy.getType(stype));
}
catch(Exception e)
{
throw new KettleXMLException("Unable to load base info for job entry", e);
}
}
public void parseRepositoryObjects(Repository rep) throws KettleException
{
}
public void saveRep(Repository rep, long id_job)
throws KettleException
{
try
{
setID(rep.insertJobEntry(id_job, getName(), getDescription(), getTypeCode()));
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry base information to the repository for id_job=" + id_job, dbe);
}
}
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases)
throws KettleException
{
try
{
RowMetaAndData r = rep.getJobEntry(id_jobentry);
if (r != null)
{
setName(r.getString("NAME", null));
setDescription(r.getString("DESCRIPTION", null));
int id_jobentry_type = (int) r.getInteger("ID_JOBENTRY_TYPE", 0);
RowMetaAndData jetrow = rep.getJobEntryType(id_jobentry_type);
if (jetrow != null)
{
type = JobEntryCopy.getType(jetrow.getString("CODE", null));
}
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load base job entry information from the repository for id_jobentry="+id_jobentry, dbe);
}
}
public Object clone()
{
JobEntryBase je;
try
{
je = (JobEntryBase) super.clone();
}
catch(CloneNotSupportedException cnse)
{
return null;
}
return je;
}
public String toString()
{
return name;
}
/**
* @return true if a reset of the number of errors is required before execution.
*/
public boolean resetErrorsBeforeExecution()
{
return true;
}
/**
* check whether or not this job entry evaluates.
* @return true if the job entry evaluates
*/
public boolean evaluates()
{
return false;
}
public boolean isUnconditional()
{
return true;
}
public List<SQLStatement> getSQLStatements(Repository repository) throws KettleException
{
return getSQLStatements(repository, null);
}
public List<SQLStatement> getSQLStatements(Repository repository, VariableSpace space) throws KettleException
{
return new ArrayList<SQLStatement>();
}
public String getFilename()
{
return null;
}
public String getRealFilename()
{
return null;
}
/**
* This method returns all the database connections that are used by the job entry.
* @return an array of database connections meta-data.
* Return an empty array if no connections are used.
*/
public DatabaseMeta[] getUsedDatabaseConnections()
{
return new DatabaseMeta[] {};
}
public void copyVariablesFrom(VariableSpace space)
{
variables.copyVariablesFrom(space);
}
public String environmentSubstitute(String aString)
{
return variables.environmentSubstitute(aString);
}
public String[] environmentSubstitute(String aString[])
{
return variables.environmentSubstitute(aString);
}
public VariableSpace getParentVariableSpace()
{
return variables.getParentVariableSpace();
}
public String getVariable(String variableName, String defaultValue)
{
return variables.getVariable(variableName, defaultValue);
}
public String getVariable(String variableName)
{
return variables.getVariable(variableName);
}
public boolean getBooleanValueOfVariable(String variableName, boolean defaultValue) {
if (!Const.isEmpty(variableName)) {
String value = environmentSubstitute(variableName);
if (!Const.isEmpty(value)) {
return ValueMeta.convertStringToBoolean(value);
}
}
return defaultValue;
}
public void initializeVariablesFrom(VariableSpace parent)
{
variables.initializeVariablesFrom(parent);
}
public String[] listVariables()
{
return variables.listVariables();
}
public void setVariable(String variableName, String variableValue)
{
variables.setVariable(variableName, variableValue);
}
public void shareVariablesWith(VariableSpace space)
{
variables = space;
}
public void injectVariables(Map<String, String> prop)
{
variables.injectVariables(prop);
}
/**
* Support for overrides not having to put in a check method
* @param remarks CheckResults from checking the job entry
* @param jobMeta JobMeta information letting threading back to the JobMeta possible
*/
public void check(List<CheckResultInterface> remarks, JobMeta jobMeta) {
}
/**
* Get a list of all the resource dependencies that the step is depending on.
*
* @return a list of all the resource dependencies that the step is depending on
*/
public List<ResourceReference> getResourceDependencies(JobMeta jobMeta)
{
return new ArrayList<ResourceReference>(5); // default: return an empty resource dependency list. Lower the initial capacity
}
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface) throws KettleException {
return null;
}
public String getConfigId()
{
return configId;
}
public void setConfigId(String configId)
{
this.configId = configId;
}
/**
* This returns the expected name for the dialog that edits a job entry.
* The expected name is in the org.pentaho.di.ui tree and has a class name
* that is the name of the job entry with 'Dialog' added to the end.
*
* e.g. if the job entry is org.pentaho.di.job.entries.zipfile.JobEntryZipFile
* the dialog would be org.pentaho.di.ui.job.entries.zipfile.JobEntryZipFileDialog
*
* If the dialog class for a job entry does not match this pattern it should
* override this method and return the appropriate class name
*
* @return full class name of the dialog
*/
public String getDialogClassName()
{
String className = getClass().getCanonicalName();
className = className.replaceFirst("\\.di\\.", ".di.ui.");
className += "Dialog";
return className;
}
public String getHolderType() {
return "JOBENTRY"; //$NON-NLS-1$
}
protected VariableSpace getVariables()
{
return variables;
}
}
|
package org.pentaho.di.job.entry;
import java.util.List;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.changed.ChangedFlagInterface;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.gui.GUIPositionInterface;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.logging.LogWriter;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.core.xml.XMLInterface;
import org.pentaho.di.job.JobEntryLoader;
import org.pentaho.di.job.JobEntryType;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.JobPlugin;
import org.pentaho.di.repository.Repository;
import org.w3c.dom.Node;
/**
* This class describes the fact that a single JobEntry can be used multiple
* times in the same Job. Therefor it contains a link to a JobEntry, a position,
* a number, etc.
*
* @author Matt
* @since 01-10-2003
*
*/
public class JobEntryCopy implements Cloneable, XMLInterface, GUIPositionInterface, ChangedFlagInterface
{
private JobEntryInterface entry;
private int nr; // Copy nr. 0 is the base copy...
private boolean selected;
private Point location;
private boolean parallel;
private boolean draw;
private long id;
public JobEntryCopy()
{
clear();
}
/**
* @deprecated Log is no longer required.
* @param log
*/
public JobEntryCopy(LogWriter log)
{
clear();
}
public JobEntryCopy(LogWriter log, JobEntryInterface entry)
{
this.entry = entry;
}
public String getXML()
{
StringBuffer retval = new StringBuffer();
retval.append(" <entry>").append(Const.CR);
retval.append(entry.getXML());
retval.append(" ").append(XMLHandler.addTagValue("parallel", parallel));
retval.append(" ").append(XMLHandler.addTagValue("draw", draw));
retval.append(" ").append(XMLHandler.addTagValue("nr", nr));
retval.append(" ").append(XMLHandler.addTagValue("xloc", location.x));
retval.append(" ").append(XMLHandler.addTagValue("yloc", location.y));
retval.append(" </entry>").append(Const.CR);
return retval.toString();
}
public JobEntryCopy(Node entrynode, List<DatabaseMeta> databases, Repository rep)
throws KettleXMLException
{
try
{
String stype = XMLHandler.getTagValue(entrynode, "type");
JobPlugin jobPlugin = JobEntryLoader.getInstance().findJobEntriesWithID(stype);
if (jobPlugin == null)
System.out.println("null jobPlugin for " + stype);
// Get an empty JobEntry of the appropriate class...
entry = JobEntryLoader.getInstance().getJobEntryClass(jobPlugin);
if (entry != null)
{
// System.out.println("New JobEntryInterface built of type:
// "+entry.getTypeDesc());
entry.loadXML(entrynode, databases, rep);
// Handle GUI information: nr & location?
setNr(Const.toInt(XMLHandler.getTagValue(entrynode, "nr"), 0));
setParallel("Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "parallel")));
setDrawn("Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "draw")));
int x = Const.toInt(XMLHandler.getTagValue(entrynode, "xloc"), 0);
int y = Const.toInt(XMLHandler.getTagValue(entrynode, "yloc"), 0);
setLocation(x, y);
}
} catch (Exception e)
{
String message = "Unable to read Job Entry copy info from XML node : " + e.toString();
LogWriter log = LogWriter.getInstance();
log.logError(toString(), message);
log.logError(toString(), Const.getStackTracker(e));
throw new KettleXMLException(message, e);
}
}
/**
* Load the chef graphical entry from repository We load type, name &
* description if no entry can be found.
*
* @param log
* the logging channel
* @param rep
* the Repository
* @param id_job
* The job ID
* @param id_jobentry_copy
* The jobentry copy ID
* @param jobentries
* A list with all jobentries
* @param databases
* A list with all defined databases
*/
public JobEntryCopy(LogWriter log, Repository rep, long id_job, long id_jobentry_copy,
List<JobEntryInterface> jobentries, List<DatabaseMeta> databases) throws KettleException
{
try
{
setID(id_jobentry_copy);
// Handle GUI information: nr, location, ...
RowMetaAndData r = rep.getJobEntryCopy(id_jobentry_copy);
if (r != null)
{
// These are the jobentry_copy fields...
// System.out.println("JobEntryCopy = "+r);
long id_jobentry = r.getInteger("ID_JOBENTRY", 0);
long id_jobentry_type = r.getInteger("ID_JOBENTRY_TYPE", 0);
setNr((int) r.getInteger("NR", 0));
int locx = (int) r.getInteger("GUI_LOCATION_X", 0);
int locy = (int) r.getInteger("GUI_LOCATION_Y", 0);
boolean isdrawn = r.getBoolean("GUI_DRAW", false);
boolean isparallel = r.getBoolean("PARALLEL", false);
// Do we have the jobentry already?
entry = JobMeta.findJobEntry(jobentries, id_jobentry);
if (entry == null)
{
// What type of jobentry do we load now?
// Get the jobentry type code
RowMetaAndData rt = rep.getJobEntryType(id_jobentry_type);
if (rt != null)
{
String jet_code = rt.getString("CODE", null);
JobEntryLoader jobLoader = JobEntryLoader.getInstance();
JobPlugin jobPlugin = jobLoader.findJobEntriesWithID(jet_code);
if (jobPlugin != null)
{
entry = jobLoader.getJobEntryClass(jobPlugin);
// Load the attributes for that jobentry
entry.loadRep(rep, id_jobentry, databases);
jobentries.add(entry);
} else
{
throw new KettleException(
"JobEntryLoader was unable to find Job Entry Plugin with description ["
+ jet_code + "].");
}
} else
{
throw new KettleException("Unable to find Job Entry Type with id=" + id_jobentry_type
+ " in the repository");
}
}
setLocation(locx, locy);
setDrawn(isdrawn);
setParallel(isparallel);
}
} catch (KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry copy from repository with id_jobentry_copy="
+ id_jobentry_copy, dbe);
}
}
public void saveRep(Repository rep, long id_job) throws KettleException
{
try
{
/*
* --1-- Save the JobEntryCopy details... --2-- If we don't find a
* id_jobentry, save the jobentry (meaning: only once)
*/
// See if an entry with the same name is already available...
long id_jobentry = rep.getJobEntryID(getName(), id_job);
if (id_jobentry <= 0)
{
entry.saveRep(rep, id_job);
id_jobentry = entry.getID();
}
// OK, the entry is saved.
// Get the entry type...
long id_jobentry_type = rep.getJobEntryTypeID(entry.getTypeCode());
// Oops, not found: update the repository!
if (id_jobentry_type < 0)
{
rep.updateJobEntryTypes();
// Try again!
id_jobentry_type = rep.getJobEntryTypeID(entry.getTypeCode());
}
// Save the entry copy..
setID(rep.insertJobEntryCopy(id_job, id_jobentry, id_jobentry_type, getNr(), getLocation().x,
getLocation().y, isDrawn(), isParallel()));
} catch (KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry copy to the repository, id_job=" + id_job,
dbe);
}
}
public void clear()
{
location = null;
entry = null;
nr = 0;
parallel = false;
setID(-1L);
}
public Object clone()
{
JobEntryCopy ge = new JobEntryCopy();
ge.replaceMeta(this);
ge.setID(-1L);
return ge;
}
public void replaceMeta(JobEntryCopy jobEntryCopy)
{
entry = jobEntryCopy.entry;
nr = jobEntryCopy.nr; // Copy nr. 0 is the base copy...
selected = jobEntryCopy.selected;
if (jobEntryCopy.location != null)
location = new Point(jobEntryCopy.location.x, jobEntryCopy.location.y);
parallel = jobEntryCopy.parallel;
draw = jobEntryCopy.draw;
id = jobEntryCopy.id;
}
public Object clone_deep()
{
JobEntryCopy ge = (JobEntryCopy) clone();
// Copy underlying object as well...
ge.entry = (JobEntryInterface) entry.clone();
return ge;
}
public void setID(long id)
{
this.id = id;
}
public boolean equals(Object o)
{
if (o == null)
return false;
JobEntryCopy je = (JobEntryCopy) o;
return je.entry.getName().equalsIgnoreCase(entry.getName()) && je.getNr() == getNr();
}
public long getID()
{
return id;
}
public void setEntry(JobEntryInterface je)
{
entry = je;
}
public JobEntryInterface getEntry()
{
return entry;
}
public JobEntryType getJobEntryType()
{
return entry.getJobEntryType();
}
/**
* @return entry in JobEntryInterface.typeCode[] for native jobs,
* entry.getTypeCode() for plugins
*/
public String getTypeDesc()
{
if (getJobEntryType() == JobEntryType.NONE)
return entry.getTypeCode();
return getTypeDesc(entry);
}
public void setLocation(int x, int y)
{
int nx = (x >= 0 ? x : 0);
int ny = (y >= 0 ? y : 0);
Point loc = new Point(nx, ny);
if (!loc.equals(location))
setChanged();
location = loc;
}
public void setLocation(Point loc)
{
if (loc != null && !loc.equals(location))
setChanged();
location = loc;
}
public Point getLocation()
{
return location;
}
public void setChanged()
{
setChanged(true);
}
public void setChanged(boolean ch)
{
entry.setChanged(ch);
}
public boolean hasChanged()
{
return entry.hasChanged();
}
public int getNr()
{
return nr;
}
public void setNr(int n)
{
nr = n;
}
public void setParallel()
{
setParallel(true);
}
public void setParallel(boolean p)
{
parallel = p;
}
public boolean isDrawn()
{
return draw;
}
public void setDrawn()
{
setDrawn(true);
}
public void setDrawn(boolean d)
{
draw = d;
}
public boolean isParallel()
{
return parallel;
}
public static final JobEntryType getType(String dsc)
{
if (dsc != null)
{
JobEntryType[] types = JobEntryType.values();
for (JobEntryType type : types)
{
if (type.getTypeCode().equalsIgnoreCase(dsc))
return type;
}
// Try the long description!
for (JobEntryType type : types)
{
if (type.getDescription().equalsIgnoreCase(dsc))
return type;
}
}
return JobEntryType.NONE;
}
public static final String getTypeDesc(int ty)
{
if (ty > 0 && ty < JobEntryType.values().length)
return JobEntryType.values()[ty].toString();
return JobEntryType.NONE.toString();
}
public static final String getTypeDesc(JobEntryInterface ty)
{
return ty.getJobEntryType().toString();
}
public void setSelected(boolean sel)
{
selected = sel;
}
public void flipSelected()
{
selected = !selected;
}
public boolean isSelected()
{
return selected;
}
public void setDescription(String description)
{
entry.setDescription(description);
}
public String getDescription()
{
return entry.getDescription();
}
public boolean isStart()
{
return entry.isStart();
}
public boolean isDummy()
{
return entry.isDummy();
}
public boolean isTransformation()
{
return getJobEntryType() == JobEntryType.TRANSFORMATION;
}
public boolean isJob()
{
return getJobEntryType() == JobEntryType.JOB;
}
public boolean evaluates()
{
if (entry != null)
return entry.evaluates();
return false;
}
public boolean isUnconditional()
{
if (entry != null)
return entry.isUnconditional();
return true;
}
public boolean isEvaluation()
{
return getJobEntryType() == JobEntryType.EVALUATION;
}
public boolean isMail()
{
return getJobEntryType() == JobEntryType.MAIL;
}
public boolean isSQL()
{
return getJobEntryType() == JobEntryType.MAIL;
}
public boolean isSpecial()
{
return getJobEntryType() == JobEntryType.SPECIAL;
}
public String toString()
{
return entry.getName() + "." + getNr();
}
public String getName()
{
return entry.getName();
}
public void setName(String name)
{
entry.setName(name);
}
public boolean resetErrorsBeforeExecution()
{
return entry.resetErrorsBeforeExecution();
}
}
|
package org.vitrivr.cineast.api;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.eclipsesource.json.JsonValue;
import gnu.trove.map.hash.TObjectDoubleHashMap;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vitrivr.cineast.core.config.Config;
import org.vitrivr.cineast.core.config.QueryConfig;
import org.vitrivr.cineast.core.data.QueryContainer;
import org.vitrivr.cineast.core.data.StringDoublePair;
import org.vitrivr.cineast.core.db.*;
import org.vitrivr.cineast.core.db.ShotLookup.ShotDescriptor;
import org.vitrivr.cineast.core.util.ContinousRetrievalLogic;
import org.vitrivr.cineast.core.util.LogHelper;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.Reader;
import java.net.Socket;
import java.sql.ResultSet;
import java.util.*;
/**
* Handles connection to and from the Client As the name of the class suggests,
* communication is done via JSON-Objects
*/
public class JSONAPIThread extends Thread {
private Socket socket = null;
private Reader reader;
private PrintStream printer;
private static Logger LOGGER = LogManager.getLogger();
public JSONAPIThread(Reader reader, PrintStream printer) {
this.printer = printer;
this.reader = reader;
}
public JSONAPIThread(Socket socket) throws IOException {
this(new InputStreamReader(socket.getInputStream()), new PrintStream(socket.getOutputStream()));
this.socket = socket;
}
@Override
public void run() {
long startTime = System.currentTimeMillis();
/*
* _return will get sent to the client at the end with a toString() call
*/
JsonObject _return = new JsonObject();
try {
JsonObject clientJSON = JsonObject.readFrom(reader);
switch (clientJSON.get("queryType").asString()) {
/*
* Input: id: ID of a video
*
* Output: Information about the video - path, name etc. List of
* all shots belonging to the video and their startframe and
* endframe
*/
case "video": {
JsonObject queryObject = clientJSON.get("query").asObject();
// String category = queryObject.get("category").asString();
String shotId = queryObject.get("shotid").asString();
ShotLookup sl = new ShotLookup();
ShotDescriptor shot = sl.lookUpShot(shotId);
//List<ShotDescriptor> allShots = sl.lookUpVideo(shot.getVideoId());
//Send metadata
VideoLookup vl = new VideoLookup();
VideoLookup.VideoDescriptor descriptor = vl.lookUpVideo(shot.getVideoId());
JsonObject resultobj = JSONEncoder.encodeVideo(descriptor);
// vl.close();
this.printer.print(resultobj.toString());
this.printer.print(',');
String id = descriptor.getVideoId();
//send shots
DBSelector selector = new ADAMproSelector();
ResultSet rset;
// rset = selector.select("SELECT id, startframe, endframe FROM cineast.shots WHERE video = " + id);
int i = 0;
// while (rset.next()) {
// ShotLookup.ShotDescriptor desc= sl.lookUpShot(rset.getInt(1));
// resultobj = JSONEncoder.encodeShot(rset.getInt(1), desc.getVideoId(), desc.getStartFrame(), desc.getEndFrame());
// this.printer.print(resultobj.toString());
// this.printer.print(',');
// printer.flush();
// if(i % 20 == 0){
// Thread.sleep(100);
sl.close();
vl.close();
selector.close();
break;
}
case "relevanceFeedback": {
JsonObject queryObject = clientJSON.get("query").asObject();
JsonArray categories = queryObject.get("categories").asArray();
JsonArray parr = queryObject.get("positive").asArray();
JsonArray narr = queryObject.get("negative").asArray();
HashSet<String> shotids = new HashSet<>();
HashSet<String> videoids = new HashSet<>();
List<StringDoublePair> result;
TObjectDoubleHashMap<String> map;
//String resultCacheName = clientJSON.get("resultname") == null ? null : clientJSON.get("resultname").asString();
QueryConfig qconf = Config.getQueryConfig();
for (JsonValue category : categories) {
map = new TObjectDoubleHashMap<>();
for (JsonValue _el : parr) {
String _shotid = _el.asString();
result = ContinousRetrievalLogic.retrieve(_shotid, category.asString(), qconf);
for (StringDoublePair pair : result) {
if (Double.isInfinite(pair.value) || Double.isNaN(pair.value)) {
continue;
}
if (map.contains(pair.key)) {
map.put(pair.key, map.get(pair.key) + pair.value);
continue;
}
map.put(pair.key, pair.value);
}
}
for (JsonValue _el : narr) {
String _shotid = _el.asString();
result = ContinousRetrievalLogic.retrieve(_shotid, category.asString(), qconf);
for (StringDoublePair pair : result) {
if (Double.isInfinite(pair.value) || Double.isNaN(pair.value)) {
continue;
}
if (map.contains(pair.key)) {
map.put(pair.key, map.get(pair.key) - pair.value);
continue;
}
map.put(pair.key, -pair.value);
}
}
// Take positive score values & put together the definite
// list
List<StringDoublePair> list = new ArrayList<>(map.size());
String[] keys = (String[]) map.keys();
for (String key : keys) {
double val = map.get(key);
if (val > 0) {
list.add(new StringDoublePair(key, val));
}
}
Collections.sort(list, StringDoublePair.COMPARATOR);
int MAX_RESULTS = Config.getRetrieverConfig().getMaxResults();
if (list.size() > MAX_RESULTS) {
list = list.subList(0, MAX_RESULTS);
}
videoids = JSONUtils.printVideosBatched(printer, list, videoids);
shotids = JSONUtils.printShotsBatched(printer, list, shotids);
JSONUtils.printResultsBatched(printer, list, category.asString(), 1);
}
String resultName = DBResultCache.newCachedResult(shotids);
JSONUtils.printResultName(printer, resultName);
break;
}
/*
* Input: Multiple QueryContainers A QueryContainer can contain
* an id
*
* Output: A sorted list of movie sequences
*/
case "multiSketch": {
JsonArray queryArray = clientJSON.get("query").asArray();
HashSet<String> shotids = new HashSet<>();
HashSet<String> videoids = new HashSet<>();
String resultCacheName = clientJSON.get("resultname") == null ? null : clientJSON.get("resultname").asString();
if(resultCacheName != null && resultCacheName.equalsIgnoreCase("null")){
resultCacheName = null;
}
QueryConfig qconf = Config.getQueryConfig();
DBResultCache.createIfNecessary(resultCacheName);
int index = 1;
for (Iterator<JsonValue> it = queryArray.iterator(); it.hasNext(); ++index) {
JsonObject query = it.next().asObject();
for (JsonValue category : query.get("categories").asArray()) {
List<StringDoublePair> result;
if (query.get("id") != null) {
String id = query.get("id").asString();
result = ContinousRetrievalLogic.retrieve(id, category.asString(), qconf);
} else {
QueryContainer qc = JSONUtils.queryContainerFromJSON(query);
result = ContinousRetrievalLogic.retrieve(qc, category.asString(), qconf);
}
videoids = JSONUtils.printVideosBatched(printer, result, videoids);
shotids = JSONUtils.printShotsBatched(printer, result, shotids);
JSONUtils.printResultsBatched(printer, result, category.asString(), index);
}
}
String resultName = DBResultCache.newCachedResult(shotids);
JSONUtils.printResultName(printer, resultName);
break;
}
case "query":{
JsonArray queryArray = clientJSON.get("query").asArray();
HashSet<String> shotids = new HashSet<>();
HashSet<String> videoids = new HashSet<>();
String resultCacheName = clientJSON.get("resultname") == null ? null : clientJSON.get("resultname").asString();
if(resultCacheName != null && resultCacheName.equalsIgnoreCase("null")){
resultCacheName = null;
}
QueryConfig qconf = Config.getQueryConfig();
DBResultCache.createIfNecessary(resultCacheName);
HashMap<String, ArrayList<QueryContainer>> categoryMap = new HashMap<>();
for(JsonValue jval : queryArray){
JsonObject jobj = jval.asObject();
QueryContainer qc = JSONUtils.queryContainerFromJSON(jobj);
if(qc.getWeight() == 0f || jobj.get("categories") == null){
continue;
}
for(JsonValue c : jobj.get("categories").asArray()){
String category = c.asString();
if(!categoryMap.containsKey(category)){
categoryMap.put(category, new ArrayList<QueryContainer>());
}
categoryMap.get(category).add(qc);
}
}
Set<String> categories = categoryMap.keySet();
List<StringDoublePair> result;
for(String category : categories){
TObjectDoubleHashMap<String> map = new TObjectDoubleHashMap<>();
for(QueryContainer qc : categoryMap.get(category)){
float weight = qc.getWeight() > 0f ? 1f : -1f; //TODO better normalisation
if(qc.hasId()){
result = ContinousRetrievalLogic.retrieve(qc.getId(), category, qconf);
}else{
result = ContinousRetrievalLogic.retrieve(qc, category, qconf);
}
for (StringDoublePair pair : result) {
if (Double.isInfinite(pair.value) || Double.isNaN(pair.value)) {
continue;
}
if (map.contains(pair.key)) {
map.put(pair.key, map.get(pair.key) + pair.value * weight);
continue;
}
map.put(pair.key, pair.value * weight);
}
List<StringDoublePair> list = new ArrayList<>(map.size());
Set<String> keys = map.keySet();
for (String key : keys) {
double val = map.get(key);
if (val > 0) {
list.add(new StringDoublePair(key, val));
}
}
Collections.sort(list, StringDoublePair.COMPARATOR);
int MAX_RESULTS = Config.getRetrieverConfig().getMaxResults();
if (list.size() > MAX_RESULTS) {
list = list.subList(0, MAX_RESULTS);
}
videoids = JSONUtils.printVideosBatched(printer, list, videoids);
shotids = JSONUtils.printShotsBatched(printer, list, shotids);
JSONUtils.printResultsBatched(printer, list, category, 1);
}
}
break;
}
/*
* Input: List of shotids
* Output: Information about neighboring shots
*/
case "context":{
LOGGER.debug("Context API call starting");
JsonObject query = clientJSON.get("query").asObject();
JsonArray shotidlist = query.get("shotidlist").asArray();
int limit = query.get("limit") == null ? 1 : query.get("limit").asInt();
DBSelector selector = new ADAMproSelector();
ShotLookup sl = new ShotLookup();
ShotLookup.ShotDescriptor descriptor;
this.printer.print('[');
// PreparedStatement select = selector.createPreparedStatement("(select id, startframe, endframe from cineast.shots WHERE video=? AND startframe<? ORDER BY startframe desc LIMIT ?)UNION(select id, startframe, endframe from cineast.shots WHERE video=? AND endframe>? ORDER BY startframe asc LIMIT ?)");
JsonObject batch = new JsonObject();
batch.add("type", "batch");
batch.add("inner", "shot");
JsonArray array = new JsonArray();
for(int i = 0; i < shotidlist.size(); ++i){
// JsonValue val = shotidlist.get(i);
// String shotid = val.asString();
// descriptor = sl.lookUpShot(shotid);
// select.setInt(1, descriptor.getVideoId());
// select.setInt(2, descriptor.getStartFrame());
// select.setInt(3, limit);
// select.setInt(4, descriptor.getVideoId());
// select.setInt(5, descriptor.getEndFrame());
// select.setInt(6, limit);
// ResultSet rset = select.executeQuery();
// while(rset != null && rset.next()){
// array.add(JSONEncoder.encodeShot(rset.getLong(1), descriptor.getVideoId(), rset.getLong(2), rset.getLong(3), false));
}
batch.add("array", array);
printer.println(batch.toString());
this.printer.print(']');
this.printer.flush();
this.printer.close();
selector.close();
sl.close();
LOGGER.debug("Context API call ending");
break;
}
case "getConcepts":{
LOGGER.debug("Concepts API call starting");
//DBSelector selector = Config.getDatabaseConfig().newSelector();
String[] concepts = new String[]{"fruit, cars"}; //TODO Select distinct concepts from the concept-table
JsonArray jsonConcepts = new JsonArray();
for(String c: concepts){
jsonConcepts.add(c);
}
_return.set("concepts", jsonConcepts);
//selector.close()
LOGGER.debug("Concepts API call ending");
break;
}
case "getLabels":{
//TODO garter snake & grass snake are the same thing... How does that work out in the end?
String[] labels = new String[]{"mayan dog, egyptian dog, garter snake, grass snake"}; //TODO Select labels
JsonArray jsonLabels = new JsonArray();
for(String c: labels){
jsonLabels.add(c);
}
_return.set("labels", jsonLabels);
//selector.close();
break;
}
default: {
LOGGER.warn("queryType {} is unknown", clientJSON.get("queryType").asString());
}
}
} catch (IOException e) {
LOGGER.error(LogHelper.getStackTrace(e));
} catch (Exception e) {
LOGGER.error(e.getMessage() + " | " + e.toString() + "\n");
e.printStackTrace();
} finally {
try {
LOGGER.debug("Finished API request in {} ms", (System.currentTimeMillis() - startTime));
this.printer.print(_return.toString());
/*
* Cleanup
*/
this.printer.flush();
this.printer.close();
this.reader.close();
if (this.socket != null) {
this.socket.close();
}
} catch (Exception e) {
LOGGER.error(e.getMessage() + " | " + e.toString() + "\n");
e.printStackTrace();
}
}
}
}
|
package peergos.server.storage;
import io.prometheus.client.*;
import peergos.server.*;
import peergos.server.corenode.*;
import peergos.server.sql.*;
import peergos.server.util.*;
import peergos.shared.*;
import peergos.shared.cbor.*;
import peergos.shared.crypto.asymmetric.*;
import peergos.shared.crypto.hash.*;
import peergos.shared.io.ipfs.cid.*;
import peergos.shared.storage.*;
import peergos.shared.io.ipfs.multihash.*;
import peergos.shared.util.*;
import java.io.*;
import java.nio.file.*;
import java.sql.*;
import java.time.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.*;
public class S3BlockStorage implements DeletableContentAddressedStorage {
private static final Logger LOG = Logger.getGlobal();
private static final Histogram readTimerLog = Histogram.build()
.labelNames("filesize")
.name("block_read_seconds")
.help("Time to read a block from immutable storage")
.exponentialBuckets(0.01, 2, 16)
.register();
private static final Histogram writeTimerLog = Histogram.build()
.labelNames("filesize")
.name("s3_block_write_seconds")
.help("Time to write a block to immutable storage")
.exponentialBuckets(0.01, 2, 16)
.register();
private static final Counter nonLocalGets = Counter.build()
.name("p2p_block_gets")
.help("Number of block gets which fell back to p2p retrieval")
.register();
private final Multihash id;
private final String region, bucket, folder, regionEndpoint, host;
private final String accessKeyId, secretKey;
private final BlockStoreProperties props;
private final TransactionStore transactions;
private final Hasher hasher;
private final ContentAddressedStorage p2pFallback;
public S3BlockStorage(S3Config config,
Multihash id,
BlockStoreProperties props,
TransactionStore transactions,
Hasher hasher,
ContentAddressedStorage p2pFallback) {
this.id = id;
this.region = config.region;
this.bucket = config.bucket;
this.folder = config.path.isEmpty() || config.path.endsWith("/") ? config.path : config.path + "/";
this.regionEndpoint = config.regionEndpoint;
this.host = config.getHost();
this.accessKeyId = config.accessKey;
this.secretKey = config.secretKey;
LOG.info("Using S3 Block Storage at " + config.regionEndpoint + ", bucket " + config.bucket + ", path: " + config.path);
this.props = props;
this.transactions = transactions;
this.hasher = hasher;
this.p2pFallback = p2pFallback;
}
@Override
public ContentAddressedStorage directToOrigin() {
return this;
}
private static String hashToKey(Multihash hash) {
return DirectS3BlockStore.hashToKey(hash);
}
private Multihash keyToHash(String key) {
return DirectS3BlockStore.keyToHash(key.substring(folder.length()));
}
@Override
public CompletableFuture<BlockStoreProperties> blockStoreProperties() {
return Futures.of(props);
}
@Override
public CompletableFuture<List<PresignedUrl>> authReads(List<Multihash> blocks) {
if (blocks.size() > 50)
throw new IllegalStateException("Too many reads to auth!");
List<PresignedUrl> res = new ArrayList<>();
for (Multihash block : blocks) {
String s3Key = hashToKey(block);
res.add(S3Request.preSignGet(s3Key, Optional.of(600), ZonedDateTime.now(), host, region, accessKeyId, secretKey));
}
return Futures.of(res);
}
@Override
public CompletableFuture<List<PresignedUrl>> authWrites(PublicKeyHash owner,
PublicKeyHash writerHash,
List<byte[]> signedHashes,
List<Integer> blockSizes,
boolean isRaw,
TransactionId tid) {
try {
if (signedHashes.size() > 50)
throw new IllegalStateException("Too many writes to auth!");
if (blockSizes.size() != signedHashes.size())
throw new IllegalStateException("Number of sizes doesn't match number of signed hashes!");
PublicSigningKey writer = getSigningKey(writerHash).get().get();
List<Pair<Multihash, Integer>> blockProps = new ArrayList<>();
for (int i=0; i < signedHashes.size(); i++) {
Cid.Codec codec = isRaw ? Cid.Codec.Raw : Cid.Codec.DagCbor;
Cid cid = new Cid(1, codec, Multihash.Type.sha2_256, writer.unsignMessage(signedHashes.get(i)));
blockProps.add(new Pair<>(cid, blockSizes.get(i)));
}
List<PresignedUrl> res = new ArrayList<>();
for (Pair<Multihash, Integer> props : blockProps) {
if (props.left.type != Multihash.Type.sha2_256)
throw new IllegalStateException("Can only pre-auth writes of sha256 hashed blocks!");
transactions.addBlock(props.left, tid, owner);
String s3Key = hashToKey(props.left);
String contentSha256 = ArrayOps.bytesToHex(props.left.getHash());
Map<String, String> extraHeaders = new LinkedHashMap<>();
extraHeaders.put("Content-Type", "application/octet-stream");
res.add(S3Request.preSignPut(s3Key, props.right, contentSha256, false,
ZonedDateTime.now(), host, extraHeaders, region, accessKeyId, secretKey));
}
return Futures.of(res);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public CompletableFuture<Optional<CborObject>> get(Multihash hash) {
if (hash instanceof Cid && ((Cid) hash).codec == Cid.Codec.Raw)
throw new IllegalStateException("Need to call getRaw if cid is not cbor!");
return getRaw(hash).thenApply(opt -> opt.map(CborObject::fromByteArray));
}
@Override
public CompletableFuture<Optional<byte[]>> getRaw(Multihash hash) {
String path = folder + hashToKey(hash);
PresignedUrl getUrl = S3Request.preSignGet(path, Optional.of(600),
ZonedDateTime.now(), host, region, accessKeyId, secretKey);
Histogram.Timer readTimer = readTimerLog.labels("read").startTimer();
try {
return Futures.of(Optional.of(HttpUtil.get(getUrl)));
} catch (IOException e) {
String msg = e.getMessage();
boolean notFound = msg.startsWith("<?xml version=\"1.0\" encoding=\"UTF-8\"?><Error><Code>NoSuchKey</Code>");
if (! notFound) {
LOG.warning("S3 error reading " + path);
LOG.log(Level.WARNING, msg, e);
}
nonLocalGets.inc();
return p2pFallback.getRaw(hash);
} finally {
readTimer.observeDuration();
}
}
@Override
public CompletableFuture<List<Multihash>> mirror(PublicKeyHash owner,
Optional<Multihash> existing,
Optional<Multihash> updated,
TransactionId tid) {
if (updated.isEmpty())
return Futures.of(Collections.emptyList());
Multihash newRoot = updated.get();
if (existing.equals(updated))
return Futures.of(Collections.singletonList(newRoot));
boolean isRaw = (newRoot instanceof Cid) && ((Cid) newRoot).codec == Cid.Codec.Raw;
Optional<byte[]> newVal = p2pFallback.getRaw(newRoot).join();
if (newVal.isEmpty())
throw new IllegalStateException("Couldn't retrieve block: " + newRoot);
byte[] newBlock = newVal.get();
put(newBlock, isRaw, tid, owner);
if (isRaw)
return Futures.of(Collections.singletonList(newRoot));
List<Multihash> newLinks = CborObject.fromByteArray(newBlock).links();
List<Multihash> existingLinks = existing.map(h -> get(h).join())
.flatMap(copt -> copt.map(CborObject::links))
.orElse(Collections.emptyList());
for (int i=0; i < newLinks.size(); i++) {
Optional<Multihash> existingLink = i < existingLinks.size() ?
Optional.of(existingLinks.get(i)) :
Optional.empty();
Optional<Multihash> updatedLink = Optional.of(newLinks.get(i));
mirror(owner, existingLink, updatedLink, tid);
}
return Futures.of(Collections.singletonList(newRoot));
}
@Override
public CompletableFuture<List<Multihash>> pinUpdate(PublicKeyHash owner, Multihash existing, Multihash updated) {
return Futures.of(Collections.singletonList(updated));
}
@Override
public CompletableFuture<List<Multihash>> recursivePin(PublicKeyHash owner, Multihash hash) {
return Futures.of(Collections.singletonList(hash));
}
@Override
public CompletableFuture<List<Multihash>> recursiveUnpin(PublicKeyHash owner, Multihash hash) {
return Futures.of(Collections.singletonList(hash));
}
@Override
public CompletableFuture<List<byte[]>> getChampLookup(PublicKeyHash owner, Multihash root, byte[] champKey) {
return getChampLookup(root, champKey, hasher);
}
@Override
public CompletableFuture<Boolean> gc() {
return Futures.errored(new IllegalStateException("S3 doesn't implement GC!"));
}
@Override
public List<Multihash> getOpenTransactionBlocks() {
return transactions.getOpenTransactionBlocks();
}
private void collectGarbage(JdbcIpnsAndSocial pointers) {
GarbageCollector.collect(this, pointers, this::savePointerSnapshot);
}
private CompletableFuture<Boolean> savePointerSnapshot(Stream<Map.Entry<PublicKeyHash, byte[]>> pointers) {
// Save pointers snapshot to file
Path pointerSnapshotFile = Paths.get("pointers-snapshot-" + LocalDateTime.now() + ".txt");
pointers.forEach(entry -> {
try {
Files.write(pointerSnapshotFile, (entry.getKey() + ":" +
ArrayOps.bytesToHex(entry.getValue()) + "\n").getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
return Futures.of(true);
}
@Override
public CompletableFuture<Optional<Integer>> getSize(Multihash hash) {
return getSize(hash, 3, 100);
}
private CompletableFuture<Optional<Integer>> getSize(Multihash hash, int retries, long sleepMillis) {
if (hash.isIdentity()) // Identity hashes are not actually stored explicitly
return Futures.of(Optional.of(0));
Histogram.Timer readTimer = readTimerLog.labels("size").startTimer();
try {
PresignedUrl headUrl = S3Request.preSignHead(folder + hashToKey(hash), Optional.of(60),
ZonedDateTime.now(), host, region, accessKeyId, secretKey);
Map<String, List<String>> headRes = HttpUtil.head(headUrl);
long size = Long.parseLong(headRes.get("Content-Length").get(0));
return Futures.of(Optional.of((int)size));
} catch (Exception e) {
if (e.getMessage().contains("HTTP 503")) {
LOG.info("Sleeping for "+sleepMillis+" because of http 503 from S3 (you are being rate limited) getting size of " + hash + " ...");
try {Thread.sleep(sleepMillis);} catch (InterruptedException f) {}
if (retries <= 0)
throw new RuntimeException(e);
return getSize(hash, retries - 1, sleepMillis * 2);
} else {
LOG.log(Level.SEVERE, e.getMessage(), e);
return Futures.of(Optional.empty());
}
} finally {
readTimer.observeDuration();
}
}
public boolean contains(Multihash hash) {
try {
PresignedUrl headUrl = S3Request.preSignHead(folder + hashToKey(hash), Optional.of(60),
ZonedDateTime.now(), host, region, accessKeyId, secretKey);
Map<String, List<String>> headRes = HttpUtil.head(headUrl);
return true;
} catch (Exception e) {
return false;
}
}
@Override
public CompletableFuture<Multihash> id() {
return Futures.of(id);
}
@Override
public CompletableFuture<TransactionId> startTransaction(PublicKeyHash owner) {
return CompletableFuture.completedFuture(transactions.startTransaction(owner));
}
@Override
public CompletableFuture<Boolean> closeTransaction(PublicKeyHash owner, TransactionId tid) {
transactions.closeTransaction(owner, tid);
return CompletableFuture.completedFuture(true);
}
@Override
public CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signedHashes,
List<byte[]> blocks,
TransactionId tid) {
return put(owner, blocks, false, tid);
}
@Override
public CompletableFuture<List<Multihash>> putRaw(PublicKeyHash owner,
PublicKeyHash writer,
List<byte[]> signedHashes,
List<byte[]> blocks,
TransactionId tid,
ProgressConsumer<Long> progressConsumer) {
return put(owner, blocks, true, tid);
}
private CompletableFuture<List<Multihash>> put(PublicKeyHash owner,
List<byte[]> blocks,
boolean isRaw,
TransactionId tid) {
return CompletableFuture.completedFuture(blocks.stream()
.map(b -> put(b, isRaw, tid, owner))
.collect(Collectors.toList()));
}
/** Must be atomic relative to reads of the same key
*
* @param data
*/
public Multihash put(byte[] data, boolean isRaw, TransactionId tid, PublicKeyHash owner) {
Histogram.Timer writeTimer = writeTimerLog.labels("write").startTimer();
Multihash hash = new Multihash(Multihash.Type.sha2_256, Hash.sha256(data));
Cid cid = new Cid(1, isRaw ? Cid.Codec.Raw : Cid.Codec.DagCbor, hash.type, hash.getHash());
String key = hashToKey(cid);
try {
transactions.addBlock(cid, tid, owner);
String s3Key = folder + key;
Map<String, String> extraHeaders = new TreeMap<>();
extraHeaders.put("Content-Type", "application/octet-stream");
boolean hashContent = true;
String contentHash = hashContent ? ArrayOps.bytesToHex(hash.getHash()) : "UNSIGNED-PAYLOAD";
PresignedUrl putUrl = S3Request.preSignPut(s3Key, data.length, contentHash, false,
ZonedDateTime.now(), host, extraHeaders, region, accessKeyId, secretKey);
HttpUtil.put(putUrl, data);
return cid;
} catch (IOException e) {
LOG.log(Level.SEVERE, e.getMessage(), e);
throw new RuntimeException(e.getMessage(), e);
} finally {
writeTimer.observeDuration();
}
}
public Stream<Multihash> getAllBlockHashes() {
// todo make this actually streaming
return getFiles(Long.MAX_VALUE).stream();
}
private List<Multihash> getFiles(long maxReturned) {
List<Multihash> results = new ArrayList<>();
applyToAll(obj -> {
try {
results.add(keyToHash(obj.key));
} catch (Exception e) {
LOG.warning("Couldn't parse S3 key to Cid: " + obj.key);
}
}, maxReturned);
return results;
}
private List<String> getFilenames(long maxReturned) {
List<String> results = new ArrayList<>();
applyToAll(obj -> results.add(obj.key), maxReturned);
return results;
}
private void applyToAll(Consumer<S3Request.ObjectMetadata> processor, long maxObjects) {
try {
Optional<String> continuationToken = Optional.empty();
S3Request.ListObjectsReply result;
long processedObjects = 0;
do {
result = S3Request.listObjects(folder, 1_000, continuationToken,
ZonedDateTime.now(), host, region, accessKeyId, secretKey, url -> {
try {
return HttpUtil.get(url);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
for (S3Request.ObjectMetadata objectSummary : result.objects) {
if (objectSummary.key.endsWith("/")) {
LOG.fine(" - " + objectSummary.key + " " + "(directory)");
continue;
}
processor.accept(objectSummary);
processedObjects++;
if (processedObjects >= maxObjects)
return;
}
LOG.log(Level.FINE, "Next Continuation Token : " + result.continuationToken);
continuationToken = result.continuationToken;
} while (result.isTruncated);
} catch (Exception e) {
LOG.log(Level.SEVERE, e.getMessage(), e);
}
}
public void delete(Multihash hash) {
try {
PresignedUrl delUrl = S3Request.preSignDelete(folder + hashToKey(hash), ZonedDateTime.now(), host,
region, accessKeyId, secretKey);
HttpUtil.delete(delUrl);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void bulkDelete(List<Multihash> hash) {
try {
List<String> keys = hash.stream()
.map(h -> folder + hashToKey(h))
.collect(Collectors.toList());
S3Request.bulkDelete(keys, ZonedDateTime.now(), host, region, accessKeyId, secretKey,
b -> ArrayOps.bytesToHex(Hash.sha256(b)),
(url, body) -> {
try {
return HttpUtil.post(url, body);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void main(String[] args) throws Exception {
System.out.println("Performing GC on block store...");
Args a = Args.parse(args);
Crypto crypto = Main.initCrypto();
Hasher hasher = crypto.hasher;
S3Config config = S3Config.build(a);
boolean usePostgres = a.getBoolean("use-postgres", false);
SqlSupplier sqlCommands = usePostgres ?
new PostgresCommands() :
new SqliteCommands();
Supplier<Connection> database = Main.getDBConnector(a, "mutable-pointers-file");
Supplier<Connection> transactionsDb = Main.getDBConnector(a, "transactions-sql-file");
TransactionStore transactions = JdbcTransactionStore.build(transactionsDb, sqlCommands);
S3BlockStorage s3 = new S3BlockStorage(config, Cid.decode(a.getArg("ipfs.id")),
BlockStoreProperties.empty(), transactions, hasher, new RAMStorage(hasher));
JdbcIpnsAndSocial rawPointers = new JdbcIpnsAndSocial(database, sqlCommands);
s3.collectGarbage(rawPointers);
}
public static void test(String[] args) throws Exception {
// Use this method to test access to a bucket
Crypto crypto = Main.initCrypto();
Hasher hasher = crypto.hasher;
S3Config config = S3Config.build(Args.parse(args));
System.out.println("Testing S3 bucket: " + config.bucket + " in region " + config.region + " with base dir: " + config.path);
Multihash id = new Multihash(Multihash.Type.sha2_256, RAMStorage.hash("S3Storage".getBytes()));
TransactionStore transactions = JdbcTransactionStore.build(Main.buildEphemeralSqlite(), new SqliteCommands());
S3BlockStorage s3 = new S3BlockStorage(config, id, BlockStoreProperties.empty(), transactions, hasher, new RAMStorage(hasher));
System.out.println("***** Testing ls and read");
System.out.println("Testing ls...");
List<Multihash> files = s3.getFiles(1000);
System.out.println("Success! found " + files.size());
System.out.println("Testing read...");
byte[] data = s3.getRaw(files.get(0)).join().get();
System.out.println("Success: read blob of size " + data.length);
System.out.println("Testing write...");
byte[] uploadData = new byte[10 * 1024];
new Random().nextBytes(uploadData);
PublicKeyHash owner = PublicKeyHash.NULL;
TransactionId tid = s3.startTransaction(owner).join();
Multihash put = s3.put(uploadData, true, tid, owner);
System.out.println("Success!");
System.out.println("Testing delete...");
s3.delete(put);
System.out.println("Success!");
}
@Override
public String toString() {
return "S3BlockStore[" + bucket + ":" + folder + "]";
}
}
|
package org.bouncycastle.jsse.provider;
import java.security.AlgorithmParameters;
import java.security.GeneralSecurityException;
import java.security.cert.CertPathValidatorException;
import java.security.cert.Certificate;
import java.security.cert.CertificateParsingException;
import java.security.cert.PKIXCertPathChecker;
import java.security.cert.X509Certificate;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.bouncycastle.asn1.edec.EdECObjectIdentifiers;
import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers;
import org.bouncycastle.asn1.x509.KeyPurposeId;
import org.bouncycastle.asn1.x9.X9ObjectIdentifiers;
import org.bouncycastle.jcajce.util.JcaJceHelper;
import org.bouncycastle.jsse.java.security.BCAlgorithmConstraints;
import org.bouncycastle.jsse.java.security.BCCryptoPrimitive;
import org.bouncycastle.util.Arrays;
class ProvAlgorithmChecker
extends PKIXCertPathChecker
{
static final int KU_DIGITAL_SIGNATURE = 0;
static final int KU_KEY_ENCIPHERMENT = 2;
static final int KU_KEY_AGREEMENT = 4;
private static final Map<String, String> sigAlgNames = createSigAlgNames();
private static final Set<String> sigAlgNoParams = createSigAlgNoParams();
private static final byte[] DER_NULL_ENCODING = new byte[]{ 0x05, 0x00 };
private static Map<String, String> createSigAlgNames()
{
Map<String, String> names = new HashMap<String, String>();
// TODO[jsse] We may need more mappings (from sigAlgOID) here for SunJSSE compatibility (e.g. RSASSA-PSS?)
names.put(EdECObjectIdentifiers.id_Ed25519.getId(), "Ed25519");
names.put(EdECObjectIdentifiers.id_Ed448.getId(), "Ed448");
names.put(OIWObjectIdentifiers.dsaWithSHA1.getId(), "SHA1withDSA");
names.put(X9ObjectIdentifiers.id_dsa_with_sha1.getId(), "SHA1withDSA");
return Collections.unmodifiableMap(names);
}
private static Set<String> createSigAlgNoParams()
{
Set<String> noParams = new HashSet<String>();
noParams.add(OIWObjectIdentifiers.dsaWithSHA1.getId());
noParams.add(X9ObjectIdentifiers.id_dsa_with_sha1.getId());
return Collections.unmodifiableSet(noParams);
}
private final JcaJceHelper helper;
private final BCAlgorithmConstraints algorithmConstraints;
private X509Certificate issuerCert;
ProvAlgorithmChecker(JcaJceHelper helper, BCAlgorithmConstraints algorithmConstraints)
{
if (null == helper)
{
throw new NullPointerException("'helper' cannot be null");
}
if (null == algorithmConstraints)
{
throw new NullPointerException("'algorithmConstraints' cannot be null");
}
this.helper = helper;
this.algorithmConstraints = algorithmConstraints;
this.issuerCert = null;
}
@Override
public void init(boolean forward) throws CertPathValidatorException
{
if (forward)
{
throw new CertPathValidatorException("forward checking not supported");
}
this.issuerCert = null;
}
@Override
public boolean isForwardCheckingSupported()
{
return false;
}
@Override
public Set<String> getSupportedExtensions()
{
return null;
}
@Override
public void check(Certificate cert, Collection<String> unresolvedCritExts) throws CertPathValidatorException
{
if (!(cert instanceof X509Certificate))
{
throw new CertPathValidatorException("checker can only be used for X.509 certificates");
}
X509Certificate subjectCert = (X509Certificate)cert;
if (null == issuerCert)
{
// NOTE: This would be redundant with the 'taCert' check in 'checkCertPathExtras'
//checkIssued(helper, algorithmConstraints, subjectCert);
}
else
{
checkIssuedBy(helper, algorithmConstraints, subjectCert, issuerCert);
}
this.issuerCert = subjectCert;
}
static void checkCertPathExtras(JcaJceHelper helper, BCAlgorithmConstraints algorithmConstraints,
X509Certificate[] chain, KeyPurposeId ekuOID, int kuBit) throws CertPathValidatorException
{
X509Certificate taCert = chain[chain.length - 1];
if (chain.length > 1)
{
checkIssuedBy(helper, algorithmConstraints, chain[chain.length - 2], taCert);
}
X509Certificate eeCert = chain[0];
checkEndEntity(helper, algorithmConstraints, eeCert, ekuOID, kuBit);
}
static void checkChain(JcaJceHelper helper, BCAlgorithmConstraints algorithmConstraints,
Set<X509Certificate> trustedCerts, X509Certificate[] chain, KeyPurposeId ekuOID, int kuBit)
throws CertPathValidatorException
{
int taPos = chain.length;
while (taPos > 0 && trustedCerts.contains(chain[taPos - 1]))
{
--taPos;
}
if (taPos < chain.length)
{
X509Certificate taCert = chain[taPos];
if (taPos > 0)
{
checkIssuedBy(helper, algorithmConstraints, chain[taPos - 1], taCert);
}
}
else
{
checkIssued(helper, algorithmConstraints, chain[taPos - 1]);
}
ProvAlgorithmChecker algorithmChecker = new ProvAlgorithmChecker(helper, algorithmConstraints);
algorithmChecker.init(false);
for (int i = taPos - 1; i >= 0; --i)
{
algorithmChecker.check(chain[i]);
}
X509Certificate eeCert = chain[0];
checkEndEntity(helper, algorithmConstraints, eeCert, ekuOID, kuBit);
}
private static void checkEndEntity(JcaJceHelper helper, BCAlgorithmConstraints algorithmConstraints,
X509Certificate eeCert, KeyPurposeId ekuOID, int kuBit) throws CertPathValidatorException
{
if (null != ekuOID)
{
if (!supportsExtendedKeyUsage(eeCert, ekuOID))
{
throw new CertPathValidatorException(
"Certificate doesn't support '" + getExtendedKeyUsageName(ekuOID) + "' ExtendedKeyUsage");
}
}
if (kuBit >= 0)
{
if (!supportsKeyUsage(eeCert, kuBit))
{
throw new CertPathValidatorException(
"Certificate doesn't support '" + getKeyUsageName(kuBit) + "' KeyUsage");
}
if (!algorithmConstraints.permits(getKeyUsagePrimitives(kuBit), eeCert.getPublicKey()))
{
throw new CertPathValidatorException(
"Public key not permitted for '" + getKeyUsageName(kuBit) + "' KeyUsage");
}
}
}
private static void checkIssued(JcaJceHelper helper, BCAlgorithmConstraints algorithmConstraints,
X509Certificate cert) throws CertPathValidatorException
{
String sigAlgName = getSigAlgName(cert);
AlgorithmParameters sigAlgParams = getSigAlgParams(helper, cert);
if (!algorithmConstraints.permits(JsseUtils.SIGNATURE_CRYPTO_PRIMITIVES_BC, sigAlgName, sigAlgParams))
{
throw new CertPathValidatorException();
}
}
private static void checkIssuedBy(JcaJceHelper helper, BCAlgorithmConstraints algorithmConstraints,
X509Certificate subjectCert, X509Certificate issuerCert) throws CertPathValidatorException
{
String sigAlgName = getSigAlgName(subjectCert);
AlgorithmParameters sigAlgParams = getSigAlgParams(helper, subjectCert);
if (!algorithmConstraints.permits(JsseUtils.SIGNATURE_CRYPTO_PRIMITIVES_BC, sigAlgName,
issuerCert.getPublicKey(), sigAlgParams))
{
throw new CertPathValidatorException();
}
}
private static String getExtendedKeyUsageName(KeyPurposeId ekuOID)
{
if (KeyPurposeId.id_kp_clientAuth.equals(ekuOID))
{
return "clientAuth";
}
if (KeyPurposeId.id_kp_serverAuth.equals(ekuOID))
{
return "serverAuth";
}
return "(" + ekuOID + ")";
}
private static String getKeyUsageName(int kuBit)
{
switch (kuBit)
{
case KU_DIGITAL_SIGNATURE:
return "digitalSignature";
case KU_KEY_ENCIPHERMENT:
return "keyEncipherment";
case KU_KEY_AGREEMENT:
return "keyAgreement";
default:
return "(" + kuBit + ")";
}
}
private static Set<BCCryptoPrimitive> getKeyUsagePrimitives(int kuBit)
{
switch (kuBit)
{
case KU_KEY_AGREEMENT:
return JsseUtils.KEY_AGREEMENT_CRYPTO_PRIMITIVES_BC;
case KU_KEY_ENCIPHERMENT:
return JsseUtils.KEY_ENCAPSULATION_CRYPTO_PRIMITIVES_BC;
default:
return JsseUtils.SIGNATURE_CRYPTO_PRIMITIVES_BC;
}
}
private static String getSigAlgName(X509Certificate cert)
{
String sigAlgName = sigAlgNames.get(cert.getSigAlgOID());
if (null != sigAlgName)
{
return sigAlgName;
}
return cert.getSigAlgName();
}
private static AlgorithmParameters getSigAlgParams(JcaJceHelper helper, X509Certificate cert)
throws CertPathValidatorException
{
byte[] encoded = cert.getSigAlgParams();
if (null == encoded)
{
return null;
}
String sigAlgOID = cert.getSigAlgOID();
if (sigAlgNoParams.contains(sigAlgOID) && Arrays.areEqual(DER_NULL_ENCODING, encoded))
{
return null;
}
AlgorithmParameters sigAlgParams;
try
{
sigAlgParams = helper.createAlgorithmParameters(sigAlgOID);
}
catch (GeneralSecurityException e)
{
// TODO[jsse] Consider requiring 'encoded' to be a DER NULL encoding here
return null;
}
try
{
sigAlgParams.init(encoded);
}
catch (Exception e)
{
throw new CertPathValidatorException(e);
}
return sigAlgParams;
}
static boolean supportsExtendedKeyUsage(X509Certificate cert, KeyPurposeId ekuOID)
{
try
{
return supportsExtendedKeyUsage(cert.getExtendedKeyUsage(), ekuOID);
}
catch (CertificateParsingException e)
{
return false;
}
}
static boolean supportsExtendedKeyUsage(List<String> eku, KeyPurposeId ekuOID)
{
return null == eku
|| eku.contains(ekuOID.getId())
|| eku.contains(KeyPurposeId.anyExtendedKeyUsage.getId());
}
static boolean supportsKeyUsage(X509Certificate cert, int kuBit)
{
return supportsKeyUsage(cert.getKeyUsage(), kuBit);
}
static boolean supportsKeyUsage(boolean[] ku, int kuBit)
{
return null == ku || (ku.length > kuBit && ku[kuBit]);
}
}
|
package com.marklogic.contentpump;
import java.io.IOException;
import java.util.HashMap;
import java.util.Set;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
public class AggregateXMLReader<VALUEIN> extends AbstractRecordReader<VALUEIN> {
private static String DEFAULT_NS = null;
private int currDepth = 0;
protected XMLStreamReader xmlSR;
protected String recordName;
protected String recordNamespace;
private int recordDepth = Integer.MAX_VALUE;
private StringBuilder buffer;
protected String idName;
private boolean skippingRecord = false;
protected String currentId = null;
private boolean keepGoing = true;
protected HashMap<String, String> nameSpaces = new HashMap<String, String>();
protected boolean startOfRecord = true;
protected boolean hasNext = true;
private boolean newDoc = false;
protected boolean useAutomaticId = false;
protected String mode;
protected IdGenerator idGen;
public AggregateXMLReader() {
}
@Override
public void close() throws IOException {
if (xmlSR != null) {
try {
xmlSR.close();
} catch (XMLStreamException e) {
e.printStackTrace();
}
}
}
@Override
public float getProgress() throws IOException, InterruptedException {
return hasNext ? 0 : 1;
}
@Override
public void initialize(InputSplit inSplit, TaskAttemptContext context)
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
Path file = ((FileSplit) inSplit).getPath();
initCommonConfigurations(conf, file);
FileSystem fs = file.getFileSystem(context.getConfiguration());
FSDataInputStream fileIn = fs.open(file);
XMLInputFactory f = XMLInputFactory.newInstance();
try {
xmlSR = f.createXMLStreamReader(fileIn);
} catch (XMLStreamException e) {
e.printStackTrace();
}
idName = conf.get(ConfigConstants.CONF_AGGREGATE_URI_ID);
if(idName == null) {
useAutomaticId = true;
}
recordName = conf.get(ConfigConstants.CONF_AGGREGATE_RECORD_ELEMENT);
recordNamespace = conf
.get(ConfigConstants.CONF_AGGREGATE_RECORD_NAMESPACE);
mode = conf.get(ConfigConstants.CONF_MODE);
if (mode == null) {
idGen = new LocalIdGenerator(context.getTaskAttemptID().toString());
} else if (mode.equals(ConfigConstants.MODE_LOCAL)) {
idGen = new LocalIdGenerator(file.getName());
}
}
private void write(String str) {
// TODO
if (skippingRecord) {
return;
}
if (buffer == null) {
buffer = new StringBuilder();
}
if (recordDepth <= currDepth) {
buffer.append(str);
}
}
protected void copyNameSpaceDecl(){
if (recordDepth < currDepth) {
return;
}
int stop = xmlSR.getNamespaceCount();
if (stop > 0) {
String nsDeclPrefix, nsDeclUri;
LOG.debug("checking namespace declarations");
for (int i = 0; i < stop; i++) {
nsDeclPrefix = xmlSR.getNamespacePrefix(i);
nsDeclUri = xmlSR.getNamespaceURI(i);
nameSpaces.put(nsDeclPrefix, nsDeclUri);
}
}
}
private void processStartElement() throws XMLStreamException {
String name = xmlSR.getLocalName();
String namespace = xmlSR.getNamespaceURI();
String prefix = xmlSR.getPrefix();
int attrCount = xmlSR.getAttributeCount();
boolean isNewRootStart = false;
currDepth++;
if (recordName == null) {
recordName = name;
if (recordNamespace == null) {
recordNamespace = namespace;
}
recordDepth = currDepth;
isNewRootStart = true;
newDoc = true;
if (useAutomaticId) {
setKey(idGen.incrementAndGet());
}
} else {
// record element name may not nest
if (name.equals(recordName)
&& ((recordNamespace == null && namespace == null)
|| (recordNamespace != null && recordNamespace
.equals(namespace)))) {
recordDepth = currDepth;
isNewRootStart = true;
newDoc = true;
if (useAutomaticId) {
setKey(idGen.incrementAndGet());
}
}
}
copyNameSpaceDecl();
StringBuilder sb = new StringBuilder();
sb.append("<");
if (prefix != null && !prefix.equals("")) {
sb.append(prefix + ":" + name);
} else {
sb.append(name);
}
// add namespaces declared into the new root element
if (isNewRootStart && namespace != null) {
Set<String> keys = nameSpaces.keySet();
for (String k : keys) {
String v = nameSpaces.get(k);
if (DEFAULT_NS == k) {
sb.append(" xmlns=\"" + v + "\"");
} else {
sb.append(" xmlns:" + k + "=\"" + v + "\"");
}
}
}
for (int i = 0; i < attrCount; i++) {
String aPrefix = xmlSR.getAttributePrefix(i);
String aName = xmlSR.getAttributeLocalName(i);
String aValue = Utilities.escapeXml(xmlSR.getAttributeValue(i));
sb.append(" " + (null == aPrefix ? "" : aPrefix + ":") + aName
+ "=\"" + aValue + "\"");
if (!useAutomaticId && newDoc && ("@" + aName).equals(idName)) {
currentId = aValue;
setKey(aValue);
}
}
sb.append(">");
// allow for repeated idName elements: first one wins
// NOTE: idName is namespace-insensitive
if (!useAutomaticId && newDoc && name.equals(idName)) {
if (xmlSR.next() != XMLStreamConstants.CHARACTERS) {
throw new XMLStreamException("badly formed xml or " + idName
+ " is not a simple node: at" + xmlSR.getLocation());
}
String newId = xmlSR.getText();
currentId = newId;
sb.append(newId);
setKey(newId);
// advance to the END_ELEMENT
if (xmlSR.next() != XMLStreamConstants.END_ELEMENT) {
throw new XMLStreamException(
"badly formed xml: no END_TAG after id text"
+ xmlSR.getLocation());
}
sb.append("</");
sb.append(idName);
sb.append(">");
currDepth
}
// TODO
// if the startId is still defined, and the uri has been found,
// we should skip as much of this work as possible
// this avoids OutOfMemory errors, too
// if (skippingRecord) {
// LOG.debug("skipping record");
// return;
write(sb.toString());
}
/**
*
* @return false when the record end-element is found; true when keep going
* @throws XMLStreamException
*/
@SuppressWarnings("unchecked")
private boolean processEndElement() throws XMLStreamException {
// TODO
if (skippingRecord) {
return false;
}
String name = xmlSR.getLocalName();
String prefix = xmlSR.getPrefix();
if (!useAutomaticId && null == currentId && newDoc) {
throw new XMLStreamException("end of record element " + name
+ " with no id found: " + ConfigConstants.AGGREGATE_URI_ID
+ "=" + idName);
}
StringBuilder sb = new StringBuilder();
sb.append("</");
if (prefix != null && prefix != "") {
sb.append(prefix + ":" + name);
} else {
sb.append(name);
}
sb.append(">");
write(sb.toString());
if (recordDepth != currDepth || !newDoc) {
// not the end of the record: go look for more nodes
currDepth
return true;
}
if (value instanceof Text) {
((Text) value).set(buffer.toString());
} else if (value instanceof ContentWithFileNameWritable) {
VALUEIN realValue = ((ContentWithFileNameWritable<VALUEIN>) value)
.getValue();
if (realValue instanceof Text) {
((Text) realValue).set(buffer.toString());
} else {
throw new XMLStreamException("Expects Text in aggregate XML");
}
} else {
throw new XMLStreamException("Expects Text in aggregate XML");
}
currentId = null;
newDoc = false;
// reset buffer
buffer.setLength(0);
currDepth
// end of new record
return false;
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (xmlSR == null) {
hasNext = false;
return false;
}
try {
while (xmlSR.hasNext()) {
int eventType;
eventType = xmlSR.next();
switch (eventType) {
case XMLStreamConstants.START_ELEMENT:
if (startOfRecord) {
// this is the start of the root, only copy namespaces
copyNameSpaceDecl();
startOfRecord = false;
continue;
}
processStartElement();
break;
case XMLStreamConstants.CHARACTERS:
write(Utilities.escapeXml(xmlSR.getText()));
break;
case XMLStreamConstants.CDATA:
write("<![CDATA[");
write(xmlSR.getText());
write("]]>");
break;
case XMLStreamConstants.SPACE:
write(xmlSR.getText());
break;
case XMLStreamConstants.ENTITY_REFERENCE:
write("&");
write(xmlSR.getLocalName());
write(";");
break;
case XMLStreamConstants.DTD:
write("<!DOCTYPE");
write(xmlSR.getText());
write(">");
break;
case XMLStreamConstants.PROCESSING_INSTRUCTION:
write("<?");
write(xmlSR.getPIData());
write("?>");
break;
case XMLStreamConstants.COMMENT:
write("<!
write(xmlSR.getText());
write("
break;
case XMLStreamConstants.END_ELEMENT:
keepGoing = processEndElement();
if (!keepGoing) {
keepGoing = true;
return true;
}
break;
case XMLStreamConstants.START_DOCUMENT:
throw new XMLStreamException(
"unexpected start of document within record!\n"
+ "recordName = " + recordName
+ ", recordNamespace = " + recordNamespace
+ " at " + xmlSR.getLocation());
case XMLStreamConstants.END_DOCUMENT:
if (currentId != null) {
throw new XMLStreamException(
"end of document before end of current record!\n"
+ "recordName = " + recordName
+ ", recordNamespace = " + recordNamespace
+ " at " + xmlSR.getLocation());
} else {
hasNext = false;
break;
}
default:
throw new XMLStreamException("UNIMPLEMENTED: " + eventType);
}
}
} catch (XMLStreamException e) {
LOG.warn(e.getClass().getSimpleName() + " at "
+ xmlSR.getLocation());// .getPositionDescription());
if (e.getMessage().contains("quotation or apostrophe")
/* && !config.isFatalErrors() */) {
// messed-up attribute? skip it?
LOG.warn("attribute error: " + e.getMessage());
// all we can do is ignore it, apparently
} else {
LOG.error(e.toString());
throw new IOException(e.toString());
}
}
return false;
}
}
|
package com.gmail.zahusek.protocol;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import net.minecraft.util.com.mojang.authlib.GameProfile;
import net.minecraft.util.io.netty.channel.Channel;
import net.minecraft.util.io.netty.channel.ChannelDuplexHandler;
import net.minecraft.util.io.netty.channel.ChannelFuture;
import net.minecraft.util.io.netty.channel.ChannelHandlerContext;
import net.minecraft.util.io.netty.channel.ChannelInboundHandlerAdapter;
import net.minecraft.util.io.netty.channel.ChannelInitializer;
import net.minecraft.util.io.netty.channel.ChannelPipeline;
import net.minecraft.util.io.netty.channel.ChannelPromise;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.HandlerList;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerLoginEvent;
import org.bukkit.event.server.PluginDisableEvent;
import org.bukkit.plugin.Plugin;
import pl.bukkit.team.protocols.Reflection.*;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
public class TinyProtocol {
private static final AtomicInteger ID = new AtomicInteger(0);
private static final MethodInvoker getPlayerHandle = Reflection.getMethod(
"{obc}.entity.CraftPlayer", "getHandle");
private static final FieldAccessor<Object> getConnection = Reflection
.getField("{nms}.EntityPlayer", "playerConnection", Object.class);
private static final FieldAccessor<Object> getManager = Reflection
.getField("{nms}.PlayerConnection", "networkManager", Object.class);
private static final FieldAccessor<Channel> getChannel = Reflection
.getField("{nms}.NetworkManager", Channel.class, 0);
private static final Class<Object> minecraftServerClass = Reflection
.getUntypedClass("{nms}.MinecraftServer");
private static final Class<Object> serverConnectionClass = Reflection
.getUntypedClass("{nms}.ServerConnection");
private static final FieldAccessor<Object> getMinecraftServer = Reflection
.getField("{obc}.CraftServer", minecraftServerClass, 0);
private static final FieldAccessor<Object> getServerConnection = Reflection
.getField(minecraftServerClass, serverConnectionClass, 0);
private static final MethodInvoker getNetworkMarkers = Reflection
.getTypedMethod(serverConnectionClass, null, List.class,
serverConnectionClass);
private static final Class<?> PACKET_LOGIN_IN_START = Reflection
.getMinecraftClass("PacketLoginInStart");
private static final FieldAccessor<GameProfile> getGameProfile = Reflection
.getField(PACKET_LOGIN_IN_START, GameProfile.class, 0);
private Map<String, Channel> channelLookup = new MapMaker().weakValues()
.makeMap();
private Listener listener;
private Set<Channel> uninjectedChannels = Collections
.newSetFromMap(new MapMaker().weakKeys()
.<Channel, Boolean> makeMap());
private List<Object> networkManagers;
private List<Channel> serverChannels = Lists.newArrayList();
private ChannelInboundHandlerAdapter serverChannelHandler;
private ChannelInitializer<Channel> beginInitProtocol;
private ChannelInitializer<Channel> endInitProtocol;
private String handlerName;
private List<PacketListener> packetListeners = new ArrayList<PacketListener>();
protected volatile boolean closed;
protected Plugin plugin;
public TinyProtocol(Plugin plugin) {
this.plugin = plugin;
this.handlerName = getHandlerName();
registerBukkitEvents();
registerChannelHandler();
registerPlayers(plugin);
}
private void createServerChannelHandler() {
endInitProtocol = new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel channel) throws Exception {
try {
synchronized (networkManagers) {
if (!closed) {
injectChannelInternal(channel);
}
}
} catch (Exception e) {
plugin.getLogger().log(Level.SEVERE,
"Cannot inject incomming channel " + channel, e);
}
}
};
beginInitProtocol = new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel channel) throws Exception {
channel.pipeline().addLast(endInitProtocol);
}
};
serverChannelHandler = new ChannelInboundHandlerAdapter() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
throws Exception {
Channel channel = (Channel) msg;
channel.pipeline().addFirst(beginInitProtocol);
ctx.fireChannelRead(msg);
}
};
}
private void registerBukkitEvents() {
listener = new Listener() {
@EventHandler(priority = EventPriority.LOWEST)
public final void onPlayerLogin(PlayerLoginEvent e) {
if (closed)
return;
Channel channel = getChannel(e.getPlayer());
if (!uninjectedChannels.contains(channel)) {
injectPlayer(e.getPlayer());
}
}
@EventHandler
public final void onPluginDisable(PluginDisableEvent e) {
if (e.getPlugin().equals(plugin)) {
close();
}
}
};
plugin.getServer().getPluginManager().registerEvents(listener, plugin);
}
@SuppressWarnings("unchecked")
private void registerChannelHandler() {
Object mcServer = getMinecraftServer.get(Bukkit.getServer());
Object serverConnection = getServerConnection.get(mcServer);
boolean looking = true;
networkManagers = (List<Object>) getNetworkMarkers.invoke(null,
serverConnection);
createServerChannelHandler();
for (int i = 0; looking; i++) {
List<Object> list = Reflection.getField(
serverConnection.getClass(), List.class, i).get(
serverConnection);
for (Object item : list) {
if (!ChannelFuture.class.isInstance(item))
break;
Channel serverChannel = ((ChannelFuture) item).channel();
serverChannels.add(serverChannel);
serverChannel.pipeline().addFirst(serverChannelHandler);
looking = false;
}
}
}
private void unregisterChannelHandler() {
if (serverChannelHandler == null)
return;
for (Channel serverChannel : serverChannels) {
final ChannelPipeline pipeline = serverChannel.pipeline();
// Remove channel handler
serverChannel.eventLoop().execute(new Runnable() {
@Override
public void run() {
try {
pipeline.remove(serverChannelHandler);
} catch (NoSuchElementException e) {
// That's fine
}
}
});
}
}
private void registerPlayers(Plugin plugin) {
for (Player player : plugin.getServer().getOnlinePlayers()) {
injectPlayer(player);
}
}
public void sendPacket(Player player, Object packet) {
sendPacket(getChannel(player), packet);
}
public void sendPacket(Channel channel, Object packet) {
channel.pipeline().writeAndFlush(packet);
}
public void receivePacket(Player player, Object packet) {
receivePacket(getChannel(player), packet);
}
public void receivePacket(Channel channel, Object packet) {
channel.pipeline().context("encoder").fireChannelRead(packet);
}
protected String getHandlerName() {
return "tiny-" + plugin.getName() + "-" + ID.incrementAndGet();
}
public void injectPlayer(Player player) {
injectChannelInternal(getChannel(player)).player = player;
}
public void injectChannel(Channel channel) {
injectChannelInternal(channel);
}
private PacketInterceptor injectChannelInternal(Channel channel) {
try {
PacketInterceptor interceptor = (PacketInterceptor) channel
.pipeline().get(handlerName);
if (interceptor == null) {
interceptor = new PacketInterceptor();
channel.pipeline().addBefore("packet_handler", handlerName,
interceptor);
uninjectedChannels.remove(channel);
}
return interceptor;
} catch (IllegalArgumentException e) {
return (PacketInterceptor) channel.pipeline().get(handlerName);
}
}
public Channel getChannel(Player player) {
Channel channel = channelLookup.get(player.getName());
if (channel == null) {
Object connection = getConnection.get(getPlayerHandle
.invoke(player));
Object manager = getManager.get(connection);
channelLookup.put(player.getName(),
channel = getChannel.get(manager));
}
return channel;
}
public void uninjectPlayer(Player player) {
uninjectChannel(getChannel(player));
}
public void uninjectChannel(final Channel channel) {
if (!closed) {
uninjectedChannels.add(channel);
}
channel.eventLoop().execute(new Runnable() {
@Override
public void run() {
channel.pipeline().remove(handlerName);
}
});
}
public boolean hasInjected(Player player) {
return hasInjected(getChannel(player));
}
public boolean hasInjected(Channel channel) {
return channel.pipeline().get(handlerName) != null;
}
public final void close() {
if (!closed) {
closed = true;
// Remove our handlers
for (Player player : plugin.getServer().getOnlinePlayers()) {
uninjectPlayer(player);
}
// Clean up Bukkit
HandlerList.unregisterAll(listener);
unregisterChannelHandler();
}
}
private final class PacketInterceptor extends ChannelDuplexHandler {
public volatile Player player;
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg)
throws Exception {
final Channel channel = ctx.channel();
handleLoginStart(channel, msg);
try {
for (PacketListener listener : packetListeners) {
msg = listener.onPacketInAsync(player, channel, msg);
}
} catch (Exception e) {
plugin.getLogger().log(Level.SEVERE,
"Error in onPacketInAsync().", e);
}
if (msg != null) {
super.channelRead(ctx, msg);
}
}
@Override
public void write(ChannelHandlerContext ctx, Object msg,
ChannelPromise promise) throws Exception {
try {
for (PacketListener listener : packetListeners) {
msg = listener.onPacketOutAsync(player, ctx.channel(), msg);
}
} catch (Exception e) {
plugin.getLogger().log(Level.SEVERE,
"Error in onPacketOutAsync().", e);
}
if (msg != null) {
super.write(ctx, msg, promise);
}
}
private void handleLoginStart(Channel channel, Object packet) {
if (PACKET_LOGIN_IN_START.isInstance(packet)) {
GameProfile profile = getGameProfile.get(packet);
channelLookup.put(profile.getName(), channel);
}
}
}
public Plugin getPlugin() {
return this.plugin;
}
public void registerListener(PacketListener listener) {
this.packetListeners.add(listener);
}
public void unregisterListener(PacketListener listener) {
this.packetListeners.remove(listener);
}
public interface PacketListener {
public Object onPacketOutAsync(Player reciever, Channel channel,
Object packet);
public Object onPacketInAsync(Player sender, Channel channel,
Object packet);
}
}
|
package project.v_trainning;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import java.util.Locale;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.Engine;
import android.speech.tts.TextToSpeech.OnInitListener;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
import project.database.DataBase_vTrainning;
import android.os.Bundle;
import android.os.SystemClock;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.Chronometer;
import android.widget.TextView;
import android.widget.Toast;
import android.os.AsyncTask;
import android.os.Looper;
import android.location.Criteria;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.LocationSource;
import com.google.android.gms.maps.MapFragment;
import com.google.android.gms.maps.model.CameraPosition;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
//import com.google.android.gms.maps.SupportMapFragment;
//import com.google.android.gms.maps.model.MarkerOptions;
/**
* This class performs all the tasks about the training, and displays all training data for example distance, duration,
* type of training, and the route on a map
* @author Various
* @version 1.0
*/
public class TrainningActivity extends Activity implements LocationSource{
/***
* @param mode, MYPREFS_SETTINGS
* Variables estaticas del modo en que se guardarn las preferencias
* */
final int mode = Activity.MODE_PRIVATE;
final String MYPREFS_SETTINGS = "MyPreferencesSettings";
final String MYPREFS_TRAINING = "MyPreferencesTrainning";
SharedPreferences myPreferences, myPreferencesRecover,myPreferencesRecoverTrainning;
boolean isActivatedAjustes=false;
TextView txtActividad;
TextView txtDistancia;
TextView txtVelocidad;
Button btnTrainActStart,btnTrainActPause,btnTrainActStop;
Chronometer crono;
/*
* Variables para almacenar lo que devuelve el GPS
*/
public Vector<Long> Tiempo;
public Vector<Double> Velocidades;
public Vector<Double> Distancia;
public int index=0;
/***
* It indicates if the training mode is active.
* */
boolean isTrainingActive=false; //Agregado por Marlon
private OnLocationChangedListener mListener;
private LocationManager locationManager;
private LocationManager lm;
private LocationProvider locationProvider;
private Marker marker;
private List<Location> list = new ArrayList<Location>();
private Iterator iterador;
private Iterator it;
private Location locationAnt;
private Location loc;
Location locationPost;
Location locationInicial;
Location locationAux;
LocationListener myLocationListener;
private GoogleMap mMap;
private Boolean havelocation=false;
private Boolean flag=true;
private Boolean flag2 = true;
private Boolean gpsIsEnabled;
private Double velocidad;
private Double velocidadKM = 0.0;
private Double velocidadFinal;
private Double velocidadesAux;
private Float distancia=(float) 0;
private Float distanciaKM = (float) 0;
private Float distanciaLap = (float) 0;
private DecimalFormat df = new DecimalFormat("0.00");
//private SimpleDateFormat dfo = new SimpleDateFormat("00:00");
private Integer edad;
private String actividad;
//private Bundle recuperacionpeso; //recuperacin param
private Double peso; //recuperacin param
//private Double kg=90.0; //variable peso en kg si no se introduce por parte de usuario
private Double calorias =0.0;
private long minutos;
private String nombre;
private double frecMaxCad=0.0;
private int frecBasal=120;
private double frecOptima=0.0;
private String proveedor;
String locationContext = Context.LOCATION_SERVICE;
String ritmo;
String ritmoresumen;
String estado ="inactivo";
String duracel;
String chronoText4;
Integer repeat = 1;
/////TextToSpeech
private TextToSpeech tts;
private static int TTS_DATA_CHECK = 1;
private boolean isTTSInitialized = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_trainning);
Velocidades=new Vector<Double>();
Distancia=new Vector<Double>();
Tiempo=new Vector<Long>();
//Crea el objeto que gestiona las localizaciones
lm = (LocationManager) getSystemService(locationContext);
myLocationListener = new MyLocationListener();
lm.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, myLocationListener);
//Location myLocation = lm.getLastKnownLocation("network"); //using "gps" returned NULL
Location myLocation = lm.getLastKnownLocation(lm.GPS_PROVIDER);
//System.out.println("Creando Trainning");
createWidget();
setUpMapIfNeeded();
//avisoNoConexionGPS();
speakBeggin();
confirmTTSData();
if(locationManager != null)
{
gpsIsEnabled = locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER);
if(gpsIsEnabled) //actualizo cada 2 segundos
{
startListener();
}
else
{
//Show an error dialog that GPS is disabled...
Toast.makeText(this, getResources().getString(R.string.msgNoGPS), Toast.LENGTH_LONG).show();
}
}
else
{
//Toast.makeText(this, "fallo location manager", Toast.LENGTH_LONG).show();
}
// speakUserLocale(getResources().getString(R.string.msgWelcome));
// confirmTTSData();
}
@Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
savePreferences();
}
@Override
protected void onResume() {
// Auto-generated method stub
super.onResume();
showSavedPreferencesSettings();
Toast.makeText(getApplicationContext(), R.string.msgInicio, Toast.LENGTH_LONG).show();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.trainning, menu);
return true;
}
/**
* Ask the user for a confirmation of the exit action.
*/
@Override
public void onBackPressed() {
// Auto-generated method stub
questionMessage(getResources().getString(R.string.title_activity_trainning),getResources().getString(R.string.msgSalir),
getResources().getString(R.string.btnOk),getResources().getString(R.string.btnCancel),false);
}
/**
* Define the menu actions for this activity.
* @param item
* The menu selected item.
*
*/
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menuOpAjustes:
//System.out.println("menuAjustes");
launchSettingActivity(findViewById(R.id.btnTrainActAjust));
return true;
case R.id.menuOpResumen:
//System.out.println("menuResumen");
launchResultsActivity(findViewById(R.id.btnTrainActResum));
return true;
case R.id.menuOpCreditos:
launchAboutActivity(findViewById(R.id.btnTrainActTrain));
return true;
case R.id.menuOpSalir:
questionMessage(getResources().getString(R.string.title_activity_trainning),getResources().getString(R.string.msgSalir),
getResources().getString(R.string.btnOk),getResources().getString(R.string.btnCancel),false);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* This method initialize the widgets and implements the start button, stop button and pause button
*/
private void createWidget(){
txtActividad=(TextView)findViewById(R.id.tViewTrainActAct);
txtDistancia=(TextView)findViewById(R.id.tViewTrainActDist);
txtVelocidad=(TextView)findViewById(R.id.tViewTrainActVeloc);
//etxtPulso=(EditText)findViewById(R.id.eTextPulso);
crono = (Chronometer)findViewById(R.id.chronometer1);
btnTrainActStart=(Button)findViewById(R.id.btnTrainActStart);
btnTrainActPause=(Button)findViewById(R.id.btnTrainActPause);
btnTrainActStop=(Button)findViewById(R.id.btnTrainActStop);
if (!isTrainingActive){
btnTrainActStart.setAlpha(1);
btnTrainActStart.setEnabled(true);
btnTrainActPause.setAlpha((float) 0.5);
btnTrainActPause.setEnabled(false);
btnTrainActStop.setAlpha((float) 0.5);
btnTrainActStop.setEnabled(false);
}else{
btnTrainActStart.setAlpha((float) 0.5);
btnTrainActStart.setEnabled(false);
btnTrainActPause.setAlpha(1);
btnTrainActPause.setEnabled(true);
btnTrainActStop.setAlpha(1);
btnTrainActStop.setEnabled(true);
}
btnTrainActStart.setOnClickListener(new View.OnClickListener() {
/**
* Start the training measurements.
*/
public void onClick(View view) {
speakUserLocale(getResources().getString(R.string.msgStart));
confirmTTSData();
if (isActivatedAjustes){
int stoppedMilliseconds = 0;
String chronoText = crono.getText().toString();
String array[] = chronoText.split(":"); //SEPARA CIFRAS POR EL :
if (array.length == 2) {
//pasa a minutos
stoppedMilliseconds = Integer.parseInt(array[0]) * 60 * 1000
+ Integer.parseInt(array[1]) * 1000;
} else if (array.length == 3) {
//a horas
stoppedMilliseconds = Integer.parseInt(array[0]) * 60 * 60 * 1000
+ Integer.parseInt(array[1]) * 60 * 1000
+ Integer.parseInt(array[2]) * 1000;
}
crono.setBase(SystemClock.elapsedRealtime() - stoppedMilliseconds);
//elapsedMillisLapInicial = (SystemClock.elapsedRealtime() - crono.getBase())/1000;
float tiempo = (SystemClock.elapsedRealtime() - stoppedMilliseconds);
estado = "activo"; //al darle a botn inicio el estado pasa a estar activo
//etxtPulso.setActivated(false);
isTrainingActive=true;
//speakUserLocale();
crono.start(); //inicia el cronmetro
btnTrainActStart.setAlpha((float) 0.5);
btnTrainActStart.setEnabled(false);
btnTrainActPause.setAlpha(1);
btnTrainActPause.setEnabled(true);
btnTrainActStop.setAlpha(1);
btnTrainActStop.setEnabled(true);
}else{
Toast.makeText(getApplicationContext(), R.string.msgNoAjustes, Toast.LENGTH_LONG).show();
}
}
});
btnTrainActPause.setOnClickListener(new View.OnClickListener() {
/**
* Pause the training measurements.
*/
public void onClick(View view) {
//System.out.println("click pause");
speakUserLocale(getResources().getString(R.string.msgPause));
confirmTTSData();
//etxtPulso.setActivated(true);
isTrainingActive=false;
crono.stop();
btnTrainActStart.setAlpha(1);
btnTrainActStart.setEnabled(true);
btnTrainActPause.setAlpha((float) 0.5);
btnTrainActPause.setEnabled(false);
btnTrainActStop.setAlpha((float) 0.5);
btnTrainActStop.setEnabled(false);
}
});
btnTrainActStop.setOnClickListener(new View.OnClickListener() {
/**
* Stop the training measurements and make the results computations.
*/
public void onClick(View view) {
//clculo de los parmetros finales de resumen
speakUserLocale(getResources().getString(R.string.msgStop));
confirmTTSData();
calculofinal();
//acceso a bd para guardar el entrenamiento realizado
//SQLiteDatabase db = data.getWritableDatabase();
//CAMBIOS EN FECHA AHORA SIENDO UN STRING MOSTRANDO BIEN EL FORMATO
//mitime = fechaActusql.getTime();
//String mitime = java.text.DateFormat.getDateTimeInstance().format(Calendar.getInstance().getTime());
//guardamos entrenamiento en la BD
//data.guardarEntrenamiento(id, duracel, distanciaKM, velocidadfinal, ritmoresumen, caloriastotal, mitime, elapsedMillis);
//db.close();
btnTrainActStart.setAlpha(1);
btnTrainActStart.setEnabled(true);
btnTrainActPause.setAlpha((float) 0.5);
btnTrainActPause.setEnabled(false);
btnTrainActStop.setAlpha((float) 0.5);
btnTrainActStop.setEnabled(false);
crono.setBase(SystemClock.elapsedRealtime());
estado = "inactivo"; //el estado pasa a estar inactivo y paramos el tiempo
crono.stop();
//crono.setBase(0);
txtDistancia.setText("");
txtVelocidad.setText("");
showResults();
//reset de preferencias al guardar la sesin
//misPreferencias = getSharedPreferences(MYPREFS, mode);
//miEditor = misPreferencias.edit();
//miEditor.putFloat("calorias", 0);
//miEditor.putInt("distancia", 0);
//miEditor.putLong("duracion", 0);
//miEditor.commit();
//tarea tat = new tarea(MainActivity.this);
// tat.execute();
}
});
}
/**
* Method to launch Settings Activity.
* @param view
* */
public void launchSettingActivity(View view){
startActivity(new Intent(TrainningActivity.this,AjustesActivity.class));
}
/**
* Method to launch Resumen Activity.
* @param view
* */
public void launchResumeActivity(View view){
startActivity(new Intent(TrainningActivity.this,ResumenActivity.class));
}
/**
* Method to launch Results Activity.
* @param view
* */
public void launchResultsActivity(View view){
startActivity(new Intent(TrainningActivity.this,ResultsActivity.class));
}
/**
* Method to launch About Activity.
* @param view
* */
public void launchAboutActivity(View view){
startActivity(new Intent(TrainningActivity.this,AboutActivity.class));
}
/**
* Method to launch Results Activity with the training results.
* */
private void showResults(){
startActivity(new Intent(TrainningActivity.this,ResultsActivity.class));
}
/**
* Save the user data in the app preferences.
*/
private void savePreferences() {
// Auto-generated method stub
myPreferences = getSharedPreferences(MYPREFS_TRAINING, mode);
Editor myEditor = myPreferences.edit();
myEditor.putBoolean("estadoTimer", isTrainingActive);
myEditor.commit();
}
/**
* Recover the saved data from preferences
*/
private void showSavedPreferencesSettings() {
// Auto-generated method stub
String temp;
myPreferencesRecover = getSharedPreferences(MYPREFS_SETTINGS,mode);
//System.out.println(myPreferencesRecover.getString("nombre_actividad", "").toString());
nombre=myPreferencesRecover.getString("nombre", "").toString();
//System.out.println(nombre);
txtActividad.setText(myPreferencesRecover.getString("nombre_actividad", "").toString());
temp=myPreferencesRecover.getString("completo", "").toString();
isActivatedAjustes=Boolean.valueOf(temp);
temp=myPreferencesRecover.getString("peso", "60.0").toString();
if (isPosDouble(temp)){
peso=Double.valueOf(temp);
}else peso=0.0;
temp=myPreferencesRecover.getString("edad", "20").toString();
if (isPosInteger(temp))
edad=Integer.valueOf(temp);
else edad=0;
temp=myPreferencesRecover.getString("frecuencia_basal", "120");
if (isPosInteger(temp))
frecBasal=Integer.valueOf(temp);
else frecBasal=120;
myPreferencesRecoverTrainning=getSharedPreferences(MYPREFS_TRAINING,mode);
isTrainingActive=myPreferencesRecoverTrainning.getBoolean("estadoTimer", false);
velocidadFinal=Double.valueOf(myPreferencesRecoverTrainning.getString("velocidadFinal", "0.0"));
}
@Override
public void activate(OnLocationChangedListener listener) {
// Auto-generated method stub
mListener = listener;
}
@Override
public void deactivate() {
// Auto-generated method stub
mListener = null;
}
//SE HACE UN PROPIO LOCATION LISTENER
private class MyLocationListener implements LocationListener
{
@Override
public void onLocationChanged(Location location) //cambio de lugar de localizacin
{
if( mListener != null )
{
mListener.onLocationChanged(location);
mMap.animateCamera(CameraUpdateFactory.newLatLng(new LatLng(location.getLatitude(), location.getLongitude())));
//enter.setEnabled(true); //activamos el botn de inicio al tener ya conexin gps para realizar la actividad
if(estado == "activo")
//enter.setEnabled(false);
if(estado == "activo"){
if(flag){
locationInicial = location;
flag = false;
}
list.add(location);
//nos centrar la posicin en el mapa
CameraPosition newCameraPosition = new CameraPosition.Builder()
.target(new LatLng(location.getLatitude(), location.getLongitude()))
.zoom(15)
.bearing(0)
.tilt(0)
.build();
mMap.animateCamera(CameraUpdateFactory.newCameraPosition(newCameraPosition));
//end
//calculo de la velocidad actual
velocidad = (double) location.getSpeed();
velocidadKM = (velocidad / 1000) * 3600;
txtVelocidad.setText(String.valueOf((Math.round(velocidadKM*10)/10)));
velocidadesAux=0.0;
Velocidades.add(velocidadKM);
//System.out.println("Velocidad Size:"+Velocidades.size());
//long t=Long.valueOf((crono.getText().toString().substring(0,crono.getText().toString().indexOf(":"))));
if(Long.valueOf((crono.getText().toString().substring(0,crono.getText().toString().indexOf(":"))))>0){
if(multiplo(Long.valueOf((crono.getText().toString().substring(0,crono.getText().toString().indexOf(":")))), 2)){
if(unaVez){
System.out.println("siiii");
speakUserLocale("Animo!!! vamos!!!");
confirmTTSData();
unaVez=false;
}else{
}
}else{
unaVez=true;
}
}
for(int i=0;i<Velocidades.size();i++){
//System.out.println("....l."+i+":"+Velocidades.get(i));
velocidadesAux+=Velocidades.get(i);
}
velocidadFinal=velocidadesAux/(Velocidades.size());
myPreferences = getSharedPreferences(MYPREFS_TRAINING, mode);
Editor myEditor = myPreferences.edit();
myEditor.putString("velocidadFinal", velocidadFinal.toString());
myEditor.commit();
//llamamos a la funcin que nos calcula la distancia
distancefunc(location);
//ALTITUD
//altitud = (int) location.getAltitude();//RETORNA UN DOUBLE
//llamamos a la tarea asncrona
task task = new task();
task.execute();
}
}else{
Toast.makeText(TrainningActivity.this, R.string.msgWarning, Toast.LENGTH_LONG).show();
}
}
//forzar usuario a que encienda gps antes de iniciar app
/**
| * If GPS is disabled force the user to activate it.
* @param arg0
*/
@Override
public void onProviderDisabled(String arg0) {
// Auto-generated method stub
Toast.makeText(getApplicationContext(), R.string.msgNoGPS, Toast.LENGTH_LONG).show();
Intent intent = new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS);
startActivity(intent);
}
@Override
public void onProviderEnabled(String arg0) {
// Auto-generated method stub
}
@Override
public void onStatusChanged(String proveedor, int status, Bundle extras) {
// Auto-generated method stub
}
}
/**
* Calculate the distance between the actual and last locations.
* @param locationDis
* The actual location.
* @author Daniel Aparicio
*/
public void distancefunc (Location locationDis){
// misPreferencias = getSharedPreferences(MYPREFS,mode);
// distareto = misPreferencias.getInt("distancia", 0);
if(distancia == 0) locationAux = locationInicial ;
//clculo de la distancia
if(estado == "activo"){
float distanciaAux = locationAux.distanceTo(locationDis);
distancia = distancia + distanciaAux;
distanciaLap = distanciaLap + distanciaAux;
}
distanciaKM = (distancia / 1000);
locationAux = locationDis;
}
/**
* This class provides the asyncronus tasks fot updates the distance textView.
* @author Jorge Zambrano
*
*/
private class task extends AsyncTask<Void, Void, Void>{
@Override
protected Void doInBackground(Void... arg0) {
return null;
}
protected void onPostExecute(Void result) {
//speed = (TextView) findViewById(R.id.textView6);
//alt = (TextView) findViewById(R.id.textView7);
//ritmoact = (TextView) findViewById(R.id.textView10);
//txtDistancia = (TextView) findViewById(R.id.tViewTrainActDist);
//cal = (TextView) findViewById(R.id.textView11);
txtDistancia.setText(String.valueOf(df.format(distanciaKM)));
System.out.println(distanciaKM);
//speed.setText(String.valueOf(df.format(velocidadKM)));
//alt.setText(String.valueOf(altitud));
//ritmoact.setText(ritmo);
//int calo = (int) (Math.round(caloriastotal));
//cal.setText(String.valueOf((calo)));
}
}
/**
* Initialize the map.
*/
private void setUpMap()
{
mMap.setMyLocationEnabled(true); //nos localiza
mMap.getUiSettings().setMyLocationButtonEnabled(false); //quita el botn de mi opsicin
}
/**
* Setup the map if the map isn't initializated.
*/
@SuppressLint("NewApi")
private void setUpMapIfNeeded() {
// todava sin inicializar el mapa
if (mMap == null)
{
// Tratamos de obtener el mapa
mMap = ((MapFragment) getFragmentManager().findFragmentById(R.id.fragment1)).getMap();
//inicializamos el mapa
if (mMap != null)
{
setUpMap();
}
//se registra el LocationSource del mapa
mMap.setLocationSource(this);
}
}
/**
* Start the listener for GPS.
*/
private void startListener()
{
// Create a new Thread and start it
new Thread()
{
/** Set the location listener */
public void run()
{
// Init a message looper, required for Android
Looper.prepare();
//se establece la precisin
Criteria c = new Criteria();
c.setAccuracy(Criteria.ACCURACY_FINE);
//obtiene el mejor proveedor en funcin del criterio asignado
//(la mejor precisin posible)
proveedor = locationManager.getBestProvider(c, true);
lm.requestLocationUpdates(proveedor, 0, 10F, myLocationListener);
locationProvider = lm.getProvider(proveedor);
//ya sabemos que GPS ON
Toast.makeText(TrainningActivity.this, getResources().getString(R.string.msgGPSok), Toast.LENGTH_LONG).show();
//lm.requestLocationUpdates(LocationManager.GPS_PROVIDER, 2000, 3F, myLocationListener);
// 20 meters locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 2000, 3F, this);
// locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 2000, 3F, myLocationListener);
}
}
.start();
}
/**
* Show an alert if GPS isn't enable or doesn't work suitably.
*/
public void avisoNoConexionGPS(){
AlertDialog.Builder builder4 = new AlertDialog.Builder(TrainningActivity.this);
builder4.setTitle(getResources().getString(R.string.msgWarning));
builder4.setMessage(getResources().getString(R.string.msgNoGPS))
.setCancelable(false)
.setPositiveButton(getResources().getString(R.string.btnOk), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
//dialog.cancel();
//saludo();
}
});
AlertDialog alert4 = builder4.create();
alert4.show();
}
/**
* Finish the Activity and the app.
*/
public void salir(){
finish();
System.exit(0);
}
private void questionMessage(String title,String text, String nameButton1, String nameButton2, boolean cancelable){
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(text)
.setTitle(title)
.setCancelable(cancelable)
.setPositiveButton(nameButton1,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
salir();
//finish();
}
})
.setNegativeButton(nameButton2,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
AlertDialog alert = builder.create();
alert.show();
}
/**
* Calculates the training Results.
* @author Sandra Ruiz
*/
public void calculofinal(){
Toast.makeText(TrainningActivity.this, getResources().getString(R.string.msgCalculo), Toast.LENGTH_LONG).show();
double basal=frecBasal;
actividad=txtActividad.getText().toString();
frecMaxCad=208-(0.7*edad);
frecOptima=((frecMaxCad-frecBasal)/2)+80;
myPreferencesRecoverTrainning=getSharedPreferences(MYPREFS_TRAINING,mode);
velocidadFinal=Double.valueOf(myPreferencesRecoverTrainning.getString("velocidadFinal", "0.0"));
minutos=Long.valueOf((crono.getText().toString().substring(0,crono.getText().toString().indexOf(":"))));
//System.out.println("..............."+minutos);
//Actividad ciclismo
if(actividad.equalsIgnoreCase(getResources().getString(R.string.ActivCycling))){
if(velocidadFinal>=0 && velocidadFinal<=16){
calorias=0.049*peso*2.2*minutos;
}else {
calorias=0.071*peso*2.2*minutos;
}
}else if(actividad.equalsIgnoreCase(getResources().getString(R.string.ActivRunning))){
// System.out.println("peso:"+peso);
// System.out.println("distancia:"+String.valueOf(distanciaKM).substring(0, String.valueOf(distanciaKM).indexOf(".")+4));
calorias=1.03*peso*Double.valueOf(String.valueOf(distanciaKM).substring(0, String.valueOf(distanciaKM).indexOf(".")+4));
}
String mitime = java.text.DateFormat.getDateTimeInstance().format(Calendar.getInstance().getTime());
double distanciafinal=Double.valueOf(String.valueOf(distanciaKM).substring(0, String.valueOf(distanciaKM).indexOf(".")+4));
savedDataBaseSesion(nombre,mitime,basal, 60, calorias, velocidadFinal, distanciafinal, minutos+"");
it=list.listIterator();
while(it.hasNext()){
loc = new Location((Location) it.next());
//guardamos puntos longitud-latitud en BD
// data.guardarPuntos(id, loc.getLongitude(), loc.getLatitude());
savedDataBaseGPS(nombre,loc.getLatitude(),loc.getLongitude(),loc.getAltitude(),0.0,0.0,0.0);
}
calculaVelocidades();
}
/**
* Save the training session results in the database.
* @param name
* User name
* @param date
* The training date
* @param basal_sistolica
* Sistolic heart rate
* @param basal_diastolica
* Distolic heart rate
* @param calories
* Burned calories
* @param average_speed
* Training average speed
* @param total_distance
* Total training distance
* @param total_time
* Total training time
*/
private void savedDataBaseSesion(String name,String date,double basal_sistolica, double basal_diastolica,double calories, double average_speed,double total_distance, String total_time){
DataBase_vTrainning db =new DataBase_vTrainning(this, "DBvTrainning", null, 1);
//Inserta Usuarios
boolean bbb= db.setSesion(name, date, basal_sistolica, basal_diastolica, calories, average_speed, total_distance, total_time);
// System.out.println(name);
// System.out.println(date);
// System.out.println(basal_sistolica);
// System.out.println(calories);
// System.out.println(total_time);
// System.out.println("Guardado:"+bbb);
db.closeDataBase();
}
/**
* Save the GPS data for future use.
* @param name
* @param latitude
* @param longitude
* @param GPS_height
* @param partial_speed
* @param partial_distance
* @param partial_time
*/
private void savedDataBaseGPS(String name,double latitude,double longitude,double GPS_height,double partial_speed,double partial_distance,double partial_time){
DataBase_vTrainning db =new DataBase_vTrainning(this, "DBvTrainning", null, 1);
//Inserta Usuarios
db.setGPS(name, latitude, longitude, GPS_height, partial_speed, partial_distance, partial_time);
db.closeDataBase();
}
/**
* Calculates the average speeds over time, to show them in a graphic.
*/
private void calculaVelocidades(){
int i,j,k,l;
double velProm;
Double [] promedios= new Double[9];
i=Velocidades.size();
Double [] veloc=new Double[i];
for (j=0; j<i; j++)
veloc[j]= Velocidades.get(j);
k=Math.round(i/16);
if (k==0) k=1;
promedios[0]=veloc[0];
for (i=1;i<9;i++){
velProm=0;
l=i*2*k;
for (j=(l-k);j<=(l+k);j++){
if (j<Velocidades.size() && j>=0)
velProm=velProm+veloc[j];
}
if (j<=Velocidades.size()){
promedios[i]=velProm/(2*k);
}else promedios[i]=velProm/(k);
}
myPreferences = getSharedPreferences(MYPREFS_TRAINING, mode);
Editor myEditor = myPreferences.edit();
myEditor.putString("velocidadPromedio0", String.valueOf(promedios[0]));
myEditor.putString("velocidadPromedio1", String.valueOf(promedios[1]));
myEditor.putString("velocidadPromedio2", String.valueOf(promedios[2]));
myEditor.putString("velocidadPromedio3", String.valueOf(promedios[3]));
myEditor.putString("velocidadPromedio4", String.valueOf(promedios[4]));
myEditor.putString("velocidadPromedio5", String.valueOf(promedios[5]));
myEditor.putString("velocidadPromedio6", String.valueOf(promedios[6]));
myEditor.putString("velocidadPromedio7", String.valueOf(promedios[7]));
myEditor.putString("velocidadPromedio8", String.valueOf(promedios[8]));
myEditor.commit();
}
/**
* Verify if an input string is a positive integer.
* @param cadena
* @return true if cadena is a integer number greater than zero; false if not.
*/
private boolean isPosInteger(String cadena){
try{
int dd=Integer.parseInt(cadena);
if (dd>0){
return true;
}else return false;
}catch (NumberFormatException e){
return false;
}
}
/**
* Verify if an input string is a positive double.
* @param cadena
* @return true if cadena is a double number greater than zero; false if not.
*/
private boolean isPosDouble(String cadena){
try{
double dd=Double.parseDouble(cadena);
if (dd>0){
return true;
}else return false;
}catch (NumberFormatException e){
return false;
}
}
/**
* Confirms data text to speech
*
*/
private void confirmTTSData() {
Intent intent = new Intent(Engine.ACTION_CHECK_TTS_DATA);
startActivityForResult(intent, TTS_DATA_CHECK);
}
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == TTS_DATA_CHECK) {
if (resultCode == Engine.CHECK_VOICE_DATA_PASS) {
//Voice data exists
initializeTTS();
}
else {
Intent installIntent = new Intent(Engine.ACTION_INSTALL_TTS_DATA);
startActivity(installIntent);
}
}
}
private void initializeTTS() {
tts = new TextToSpeech(this, new OnInitListener() {
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
isTTSInitialized = true;
}
else {
//Handle initialization error here
isTTSInitialized = false;
}
}
});
}
/**
* Transforms the received text to synthesize speech
* @param message text to speech
*
*/
private void speakUserLocale(String message) {
if(isTTSInitialized) {
//Determine User's Locale
Locale locale = this.getResources().getConfiguration().locale;
// System.out.println("locale="+locale);
if (tts.isLanguageAvailable(locale) >= 0)
tts.setLanguage(locale);
tts.setPitch(0.8f);
tts.setSpeechRate(1.1f);
tts.speak(message, TextToSpeech.QUEUE_ADD, null);
}
}
private void speakBeggin() {
if(isTTSInitialized) {
//Determine User's Locale
Locale locale = this.getResources().getConfiguration().locale;
//System.out.println("locale="+locale);
if (tts.isLanguageAvailable(locale) >= 0)
tts.setLanguage(locale);
tts.setPitch(0.8f);
tts.setSpeechRate(1.1f);
tts.speak("", TextToSpeech.QUEUE_ADD, null);
}
}
@Override
public void onDestroy() {
if (tts != null) {
tts.stop();
tts.shutdown();
}
super.onDestroy();
}
boolean unaVez=false;
/**
* Verify if an number is multiple of another one
* @param num input number
* @param multiplo multiple number
*
*/
public boolean multiplo(long num, int multiplo){
long mul=num%multiplo;
if(mul==0){
return true;
}
else{
return false;
}
}
}
|
package residence.gui;
import gui.Building.Building;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.ArrayList;
import java.util.List;
import javax.swing.JPanel;
import javax.swing.Timer;
public class ApartmentAnimationPanel extends JPanel implements MouseListener {
private List<Building> apartments = new ArrayList<>();
static final int XCOOR = 0;
static final int YCOOR = 0;
private final int WINDOWX = 800;
private final int WINDOWY = 400;
public ApartmentAnimationPanel() {
setVisible(true);
this.setBackground(Color.BLACK);
addMouseListener(this);
}
/*public void addBuilding(Building b) {
buildings.add(b);
}*/
public void mouseClicked(MouseEvent me) {
/*for(Building b : buildings) {
if(b.contains(me.getX(), me.getY())){
b.displayBuilding();
}
}*/
}
@Override
public void mouseEntered(MouseEvent arg0) {
}
@Override
public void mouseExited(MouseEvent arg0) {
}
@Override
public void mousePressed(MouseEvent arg0) {
}
@Override
public void mouseReleased(MouseEvent arg0) {
}
public void paintComponent(Graphics g) {
Graphics2D g2 = (Graphics2D)g;
g2.setColor(getBackground());
g2.fillRect(XCOOR, YCOOR, WINDOWX, WINDOWY);
g2.setColor(Color.orange);
g.fillOval(50, 50, 100, 100);
g.fillOval(200, 50, 100, 100);
g.fillOval(50, 200, 100, 100);
g.fillOval(200, 200, 100, 100);
}
}
|
package seedu.addressbook.data.person;
import seedu.addressbook.data.exception.IllegalValueException;
/**
* Represents a Person's address in the address book.
* Guarantees: immutable; is valid as declared in {@link #isValidAddress(String)}
*/
public class Address {
public static final String EXAMPLE = "123, some street";
public static final String MESSAGE_ADDRESS_CONSTRAINTS = "Person addresses can be in any format";
public static final String ADDRESS_VALIDATION_REGEX = ".+";
private static final String SEPARATOR = ", ";
public final String value;
private Block blockNumber;
private Street streetName;
private Unit unitNumber;
private PostalCode postalCodeNumber;
private boolean isPrivate;
private String getAddress(){
return blockNumber.get() + SEPARATOR + streetName.get() + SEPARATOR +
unitNumber.get() + SEPARATOR + postalCodeNumber.get();
}
public Address(String address, boolean isPrivate) throws IllegalValueException {
String trimmedAddress = address.trim();
this.isPrivate = isPrivate;
if (!isValidAddress(trimmedAddress)) {
throw new IllegalValueException(MESSAGE_ADDRESS_CONSTRAINTS);
}
//this.value = trimmedAddress;
String[] AddressComponents = trimmedAddress.split(SEPARATOR);
blockNumber = new Block(AddressComponents[BLOCK_INDEX]);
streetName = new Street(AddressComponents[STREET_INDEX]);
unitNumber = new Unit(AddressComponents[UNIT_INDEX]);
postalCodeNumber = new PostalCode(AddressComponents[POSTAL_CODE_INDEX]);
}
/**
* Returns true if a given string is a valid person email.
*/
public static boolean isValidAddress(String test) {
return test.matches(ADDRESS_VALIDATION_REGEX);
}
@Override
public String toString() {
return value;
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof Address // instanceof handles nulls
&& this.value.equals(((Address) other).value)); // state check
}
@Override
public int hashCode() {
return value.hashCode();
}
public boolean isPrivate() {
return isPrivate;
}
}
|
package sg.edu.cs2103aug2015_w13_2j;
import static org.junit.Assert.*;
import java.util.*;
import org.junit.Test;
import sg.edu.cs2103aug2015_w13_2j.TaskInterface.Label;
/**Test cases for the Logic Component
*
* @author Nguyen Tuong Van
*
*/
public class LogicTest {
private Logic logicComponent;
@Test
public void testAdd() {
logicComponent = new Logic();
Task newTask = new Task("First");
logicComponent.addTask(newTask);
assertEquals("First", logicComponent.getTask(0).getName());
}
@Test
public void testDelete(){
logicComponent = new Logic();
logicComponent.addTask(new Task("first test task"));
logicComponent.addTask(new Task("second test task"));
logicComponent.deleteTask(logicComponent.findTaskByName("first test task"));
assertEquals(logicComponent.getAllTasks().get(0).getName(), "second test task");
}
@Test
public void testDetermineType(){
logicComponent = new Logic();
Task newTask = new Task("first test task");
logicComponent.determineType(newTask);
assertTrue(newTask.getDeadline() == null);
assertEquals(newTask.getType(), "FLOAT");
newTask.setDeadline(new Date());
logicComponent.determineType(newTask);
assertEquals(newTask.getType(), "DUE");
newTask.setStart(new Date());
logicComponent.determineType(newTask);
assertEquals(newTask.getType(), "EVENT");
}
@Test
public void testEdit(){
logicComponent = new Logic();
Task original = new Task("first test task");
assertEquals(original.getStatus(), "ONGOING");
logicComponent.addTask(original);
assertEquals(original.getType(), "FLOAT");
Task newTask = new Task("first test task");
newTask.setDeadline(new Date());
logicComponent.determineType(newTask);
logicComponent.editTask("first test task", newTask);
assertEquals(original.getType(), "DUE");
assertTrue(logicComponent.getDeadlines().contains(original));
assertEquals(logicComponent.getFloats().contains(original), false);
newTask.setStart(new Date());
logicComponent.determineType(newTask);
logicComponent.editTask("first test task", newTask);
assertEquals(original.getType(), "EVENT");
newTask.setName("I have changed");
logicComponent.editTask("first test task", newTask);
assertEquals(original.getName(), newTask.getName());
}
@Test
public void testStatus(){
logicComponent = new Logic();
Task original = new Task("second test task");
logicComponent.addTask(original);
assertEquals(original.getStatus(), "ONGOING");
original.markCompleted();
assertEquals(original.getStatus(), "COMPLETED");
assertTrue(logicComponent.getAllTasks().size() == 1);
assertTrue(logicComponent.viewCompleted().size() == 1);
assertTrue(logicComponent.getFloats().size() == 1);
assertTrue(logicComponent.getEvents().isEmpty());
assertTrue(logicComponent.getDeadlines().isEmpty());
logicComponent.deleteTask(original);
assertEquals(original.getStatus(), "DELETED");
assertTrue(logicComponent.getAllTasks().isEmpty());
assertTrue(logicComponent.viewCompleted().isEmpty());
assertTrue(logicComponent.getFloats().isEmpty());
assertTrue(logicComponent.getEvents().isEmpty());
assertTrue(logicComponent.getDeadlines().isEmpty());
}
@Test
public void testSortByDeadline(){
logicComponent = new Logic();
Task one = new Task("ONE");
logicComponent.addTask(one);
Task two = new Task("TWO");
logicComponent.addTask(two);
Task seven = new Task("SEVEN");
logicComponent.addTask(seven);
System.out.println("Sorting all floats by name");
ArrayList<Task> list = logicComponent.sortByDeadline();
for(int i = 0; i< list.size(); i++){
System.out.println(list.get(i).getName() + " " + list.get(i).getType());
}
System.out.println("Seven becomes deadline");
seven.setDeadline(new Date());
assertTrue(seven.getStart() == null && seven.getDeadline() != null);
System.out.println("Seven's deadline = " + seven.getDeadline());
logicComponent.determineType(seven);
//logicComponent.editTask("SEVEN", seven);
//seven.setTypeDeadline();
System.out.println("Seven's type = " + seven.getType());
list = logicComponent.sortByDeadline();
for(int i = 0; i< list.size(); i++){
System.out.println(list.get(i).getName() +" " + list.get(i).getType());
}
System.out.println("Even more Seven ");
Task newSeven = new Task("SEVEN SEVEN");
newSeven.setDeadline(new Date());
newSeven.setStart(new Date());
logicComponent.editTask("SEVEN", newSeven);
list = logicComponent.sortByDeadline();
for(int i = 0; i< list.size(); i++){
System.out.println(list.get(i).getName() +" " + list.get(i).getType());
}
System.out.println("TWO's gonna get in front!");
Task newTwo = new Task("I AM THE NEW TWO");
newTwo.setDeadline(new Date());
newTwo.setStart(new Date());
logicComponent.editTask("TWO", newTwo);
Task five = new Task("FIVE");
logicComponent.addTask(five);
Task six = new Task("SIX");
six.setDeadline(new Date());
logicComponent.addTask(six);
list = logicComponent.sortByDeadline();
for(int i = 0; i< list.size(); i++){
System.out.println(list.get(i).getName() + " " +list.get(i).getType());
}
}
}
|
package org.nakedobjects.viewer.skylark;
import org.nakedobjects.utility.ToString;
public class SimpleInternalDrag extends InternalDrag {
private final Location location;
// TODO replace Location with Offset
private final Location offset;
private final View view;
/**
* Creates a new drag event. The source view has its pickup(), and then,
* exited() methods called on it. The view returned by the pickup method
* becomes this event overlay view, which is moved continuously so that it
* tracks the pointer,
*
* @param view
* the view over which the pointer was when this event started
* @param location
* the location within the viewer (the Frame/Applet/Window etc)
*
* TODO combine the two constructors
*/
public SimpleInternalDrag(View view, Location location) {
this.view = view;
this.location = new Location(location);
offset = view.getAbsoluteLocation();
Padding targetPadding = view.getPadding();
Padding containerPadding = view.getView().getPadding();
offset.add(containerPadding.getLeft() - targetPadding.getLeft(), containerPadding.getTop() - targetPadding.getTop());
this.location.subtract(offset);
}
public SimpleInternalDrag(View view, Offset off) {
this.view = view;
location = new Location();
offset = new Location(off.getDeltaX(), off.getDeltaY());
Padding targetPadding = view.getPadding();
Padding containerPadding = view.getView().getPadding();
offset.add(containerPadding.getLeft() - targetPadding.getLeft(), containerPadding.getTop() - targetPadding.getTop());
this.location.subtract(offset);
}
protected void cancel(Viewer viewer) {
view.dragCancel(this);
}
protected void drag(Viewer viewer, Location location, int mods) {
this.location.x = location.x;
this.location.y = location.y;
this.location.subtract(offset);
view.drag(this);
}
protected void end(Viewer viewer) {
view.dragTo(this);
}
/**
* Gets the location of the pointer relative to the view.
*/
public Location getLocation() {
return new Location(location);
}
public View getOverlay() {
return null;
}
protected void start(Viewer viewer) {}
public String toString() {
ToString s = new ToString(this, super.toString());
s.append("location", location);
s.append("relative", getLocation());
return s.toString();
}
}
|
package hex.kmeans;
import hex.ModelBuilder;
import hex.schemas.KMeansV2;
import hex.schemas.ModelBuilderSchema;
import water.*;
import water.H2O.H2OCountedCompleter;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.ArrayUtils;
import water.util.Log;
import water.util.RandomUtils;
import java.util.ArrayList;
import java.util.Random;
public class KMeans extends ModelBuilder<KMeansModel,KMeansModel.KMeansParameters,KMeansModel.KMeansOutput> {
public enum Initialization {
None, PlusPlus, Furthest
}
// Number of categorical columns
private int _ncats;
// Number of reinitialization attempts for preventing empty clusters
transient private int reinit_attempts;
// Called from an http request
public KMeans( KMeansModel.KMeansParameters parms) {
super(Key.make("KMeansModel"),"K-means",parms,parms._max_iters/*work is max iterations*/);
}
public ModelBuilderSchema schema() { return new KMeansV2(); }
/** Start the KMeans training Job on an F/J thread. */
@Override public Job train() {
return start(new KMeansDriver());
}
private class KMeansDriver extends H2OCountedCompleter<KMeansDriver> {
@Override protected void compute2() {
Frame fr = null;
KMeansModel model = null;
try {
// Fetch & read-lock source frame
Value val = DKV.get(_parms._src);
if( val == null ) throw new IllegalArgumentException("Missing frame "+_parms._src);
fr = val.get();
fr.read_lock(_key);
if ( fr.numRows() < _parms._K) throw new IllegalArgumentException("Cannot make " + _parms._K + " clusters out of " + fr.numRows() + " rows.");
// Sort columns, so the categoricals are all up front. They use a
// different distance metric than numeric columns.
Vec vecs[] = fr.vecs();
final int N = vecs.length; // Feature count
int ncats=0, len=N;
while( ncats != len ) {
while( vecs[ncats].isEnum() ) ncats++;
while( len > 0 && !vecs[len-1].isEnum() ) len
if( ncats < len-1 ) fr.swap(ncats,len-1);
}
_ncats = ncats;
// The model to be built
model = new KMeansModel(dest(), fr, _parms, new KMeansModel.KMeansOutput(), ncats);
model.delete_and_lock(_key);
// means are used to impute NAs
double[] means = new double[N];
for( int i = 0; i < N; i++ )
means[i] = vecs[i].mean();
// mults & means for normalization
double[] mults = null;
if( _parms._normalize ) {
mults = new double[N];
for( int i = 0; i < N; i++ ) {
double sigma = vecs[i].sigma();
mults[i] = normalize(sigma) ? 1.0 / sigma : 1.0;
}
}
// Initialize clusters
Random rand = water.util.RandomUtils.getRNG(_parms._seed - 1);
double clusters[][]; // Normalized cluster centers
if( _parms._init == Initialization.None ) {
// Initialize all clusters to random rows
clusters = model._output._clusters = new double[_parms._K][fr.numCols()];
for( double[] cluster : clusters )
randomRow(vecs, rand, cluster, means, mults);
} else {
clusters = new double[1][vecs.length];
// Initialize first cluster to random row
randomRow(vecs, rand, clusters[0], means, mults);
while( model._output._iters < 5 ) {
// Sum squares distances to clusters
SumSqr sqr = new SumSqr(clusters,means,mults,_ncats).doAll(vecs);
// Sample with probability inverse to square distance
Sampler sampler = new Sampler(clusters, means, mults, _ncats, sqr._sqr, _parms._K * 3, _parms._seed).doAll(vecs);
clusters = ArrayUtils.append(clusters,sampler._sampled);
// Fill in sample clusters into the model
if( !isRunning() ) return; // Stopped/cancelled
model._output._clusters = denormalize(clusters, ncats, means, mults);
model._output._mse = sqr._sqr/fr.numRows();
model._output._iters++; // One iteration done
// This doesn't count towards model building (we didn't account these iterations as work to be done during construction)
// update(1); // One unit of work
model.update(_key); // Early version of model is visible
}
// Recluster down to K normalized clusters
clusters = recluster(clusters, rand);
}
model._output._iters = 0; // Reset iteration count
// Run the main KMeans Clustering loop
// Stop after enough iterations
LOOP:
for( ; model._output._iters < _parms._max_iters; model._output._iters++ ) {
if( !isRunning() ) return; // Stopped/cancelled
Lloyds task = new Lloyds(clusters,means,mults,_ncats, _parms._K).doAll(vecs);
// Pick the max categorical level for clusters' center
max_cats(task._cMeans,task._cats);
// Handle the case where some clusters go dry. Rescue only 1 cluster
// per iteration ('cause we only tracked the 1 worst row)
boolean badrow=false;
for( int clu=0; clu<_parms._K; clu++ ) {
if (task._rows[clu] == 0) {
// If we see 2 or more bad rows, just re-run Lloyds to get the
// next-worst row. We don't count this as an iteration, because
// we're not really adjusting the centers, we're trying to get
// some centers *at-all*.
if (badrow) {
Log.warn("KMeans: Re-running Lloyds to re-init another cluster");
model._output._iters--; // Do not count against iterations
if (reinit_attempts++ < _parms._K) {
continue LOOP; // Rerun Lloyds, and assign points to centroids
} else {
reinit_attempts = 0;
break; //give up and accept empty cluster
}
}
long row = task._worst_row;
Log.warn("KMeans: Re-initializing cluster " + clu + " to row " + row);
data(clusters[clu] = task._cMeans[clu], vecs, row, means, mults);
task._rows[clu] = 1;
badrow = true;
}
}
// Fill in the model; denormalized centers
model._output._clusters = denormalize(task._cMeans, ncats, means, mults);
model._output._rows = task._rows;
model._output._mses = task._cSqr;
double ssq = 0; // sum squared error
for( int i=0; i<_parms._K; i++ ) {
ssq += model._output._mses[i]; // sum squared error all clusters
model._output._mses[i] /= task._rows[i]; // mse per-cluster
}
model._output._mse = ssq/fr.numRows(); // mse total
model.update(_key); // Update model in K/V store
update(1); // One unit of work
// Compute change in clusters centers
double sum=0;
for( int clu=0; clu<_parms._K; clu++ )
sum += distance(clusters[clu],task._cMeans[clu],ncats);
sum /= N; // Average change per feature
Log.info("KMeans: Change in cluster centers="+sum);
if( sum < 1e-6 ) break; // Model appears to be stable
clusters = task._cMeans; // Update cluster centers
StringBuilder sb = new StringBuilder();
sb.append("KMeans: iter: ").append(model._output._iters).append(", MSE=").append(model._output._mse);
for( int i=0; i<_parms._K; i++ )
sb.append(", ").append(task._cSqr[i]).append("/").append(task._rows[i]);
Log.info(sb);
}
} catch( Throwable t ) {
t.printStackTrace();
cancel2(t);
throw t;
} finally {
if( model != null ) model.unlock(_key);
if( fr != null ) fr.unlock(_key);
done(); // Job done!
}
tryComplete();
}
}
// Initial sum-of-square-distance to nearest cluster
private static class SumSqr extends MRTask<SumSqr> {
double[][] _clusters;
double[] _means, _mults; // Normalization
final int _ncats;
// OUT
double _sqr;
SumSqr( double[][] clusters, double[] means, double[] mults, int ncats ) {
_clusters = clusters;
_means = means;
_mults = mults;
_ncats = ncats;
}
@Override public void map(Chunk[] cs) {
double[] values = new double[cs.length];
ClusterDist cd = new ClusterDist();
for( int row = 0; row < cs[0].len(); row++ ) {
data(values, cs, row, _means, _mults);
_sqr += minSqr(_clusters, values, _ncats, cd);
}
_means = _mults = null;
_clusters = null;
}
@Override public void reduce(SumSqr other) { _sqr += other._sqr; }
}
// Sample rows with increasing probability the farther they are from any
// cluster.
private static class Sampler extends MRTask<Sampler> {
double[][] _clusters;
double[] _means, _mults; // Normalization
final int _ncats;
final double _sqr; // Min-square-error
final double _probability; // Odds to select this point
final long _seed;
// OUT
double[][] _sampled; // New clusters
Sampler( double[][] clusters, double[] means, double[] mults, int ncats, double sqr, double prob, long seed ) {
_clusters = clusters;
_means = means;
_mults = mults;
_ncats = ncats;
_sqr = sqr;
_probability = prob;
_seed = seed;
}
@Override public void map(Chunk[] cs) {
double[] values = new double[cs.length];
ArrayList<double[]> list = new ArrayList<>();
Random rand = RandomUtils.getRNG(_seed + cs[0].start());
ClusterDist cd = new ClusterDist();
for( int row = 0; row < cs[0].len(); row++ ) {
data(values, cs, row, _means, _mults);
double sqr = minSqr(_clusters, values, _ncats, cd);
if( _probability * sqr > rand.nextDouble() * _sqr )
list.add(values.clone());
}
_sampled = new double[list.size()][];
list.toArray(_sampled);
_clusters = null;
_means = _mults = null;
}
@Override public void reduce(Sampler other) {
_sampled = ArrayUtils.append(_sampled, other._sampled);
}
}
// A Lloyd's pass:
// Find nearest cluster for every point;
// Compute new mean/center & variance & rows for each cluster;
// Compute distance between clusters
// Compute total sqr distance
private static class Lloyds extends MRTask<Lloyds> {
double[][] _clusters;
double[] _means, _mults; // Normalization
final int _ncats, _K;
// OUT
double[][] _cMeans; // Means for each cluster
long[][][] _cats; // Histogram of cat levels
double[] _cSqr; // Sum of squares for each cluster
long[] _rows; // Rows per cluster
long _worst_row; // Row with max err
double _worst_err; // Max-err-row's max-err
Lloyds( double[][] clusters, double[] means, double[] mults, int ncats, int K ) {
_clusters = clusters;
_means = means;
_mults = mults;
_ncats = ncats;
_K = K;
}
@Override public void map(Chunk[] cs) {
int N = cs.length;
assert _clusters[0].length==N;
_cMeans = new double[_K][N];
_cSqr = new double[_K];
_rows = new long[_K];
// Space for cat histograms
_cats = new long[_K][_ncats][];
for( int clu=0; clu<_K; clu++ )
for( int col=0; col<_ncats; col++ )
_cats[clu][col] = new long[cs[col].vec().cardinality()];
_worst_err = 0;
// Find closest cluster for each row
double[] values = new double[N];
ClusterDist cd = new ClusterDist();
for( int row = 0; row < cs[0].len(); row++ ) {
data(values, cs, row, _means, _mults);
closest(_clusters, values, _ncats, cd);
int clu = cd._cluster;
assert clu != -1; // No broken rows
_cSqr[clu] += cd._dist;
// Add values and increment counter for chosen cluster
for( int col = 0; col < _ncats; col++ )
_cats[clu][col][(int)values[col]]++; // Histogram the cats
for( int col = _ncats; col < N; col++ )
_cMeans[clu][col] += values[col];
_rows[clu]++;
// Track worst row
if( cd._dist > _worst_err) { _worst_err = cd._dist; _worst_row = cs[0].start()+row; }
}
// Scale back down to local mean
for( int clu = 0; clu < _K; clu++ )
if( _rows[clu] != 0 ) ArrayUtils.div(_cMeans[clu],_rows[clu]);
_clusters = null;
_means = _mults = null;
}
@Override public void reduce(Lloyds mr) {
for( int clu = 0; clu < _K; clu++ ) {
long ra = _rows[clu];
long rb = mr._rows[clu];
double[] ma = _cMeans[clu];
double[] mb = mr._cMeans[clu];
for( int c = 0; c < ma.length; c++ ) // Recursive mean
if( ra+rb > 0 ) ma[c] = (ma[c] * ra + mb[c] * rb) / (ra + rb);
}
ArrayUtils.add(_cats, mr._cats);
ArrayUtils.add(_cSqr, mr._cSqr);
ArrayUtils.add(_rows, mr._rows);
// track global worst-row
if( _worst_err < mr._worst_err) { _worst_err = mr._worst_err; _worst_row = mr._worst_row; }
}
}
// A pair result: nearest cluster, and the square distance
private static final class ClusterDist { int _cluster; double _dist; }
private static double minSqr(double[][] clusters, double[] point, int ncats, ClusterDist cd) {
return closest(clusters, point, ncats, cd, clusters.length)._dist;
}
private static double minSqr(double[][] clusters, double[] point, int ncats, ClusterDist cd, int count) {
return closest(clusters,point,ncats,cd,count)._dist;
}
private static ClusterDist closest(double[][] clusters, double[] point, int ncats, ClusterDist cd) {
return closest(clusters, point, ncats, cd, clusters.length);
}
private static double distance(double[] cluster, double[] point, int ncats) {
double sqr = 0; // Sum of dimensional distances
int pts = point.length; // Count of valid points
// Categorical columns first. Only equals/unequals matters (i.e., distance is either 0 or 1).
for(int column = 0; column < ncats; column++) {
double d = point[column];
if( Double.isNaN(d) ) pts
else if( d != cluster[column] )
sqr += 1.0; // Manhatten distance
}
// Numeric column distance
for( int column = ncats; column < cluster.length; column++ ) {
double d = point[column];
if( Double.isNaN(d) ) pts--; // Do not count
else {
double delta = d - cluster[column];
sqr += delta * delta;
}
}
// Scale distance by ratio of valid dimensions to all dimensions - since
// we did not add any error term for the missing point, the sum of errors
// is small - ratio up "as if" the missing error term is equal to the
// average of other error terms. Same math another way:
// double avg_dist = sqr / pts; // average distance per feature/column/dimension
// sqr = sqr * point.length; // Total dist is average*#dimensions
if( 0 < pts && pts < point.length )
sqr *= point.length / pts;
return sqr;
}
/** Return both nearest of N cluster/centroids, and the square-distance. */
private static ClusterDist closest(double[][] clusters, double[] point, int ncats, ClusterDist cd, int count) {
int min = -1;
double minSqr = Double.MAX_VALUE;
for( int cluster = 0; cluster < count; cluster++ ) {
double sqr = distance(clusters[cluster],point,ncats);
if( sqr < minSqr ) { // Record nearest cluster
min = cluster;
minSqr = sqr;
}
}
cd._cluster = min; // Record nearest cluster
cd._dist = minSqr; // Record square-distance
return cd; // Return for flow-coding
}
// For KMeansModel scoring; just the closest cluster
static int closest(double[][] clusters, double[] point, int ncats) {
int min = -1;
double minSqr = Double.MAX_VALUE;
for( int cluster = 0; cluster < clusters.length; cluster++ ) {
double sqr = distance(clusters[cluster],point,ncats);
if( sqr < minSqr ) { // Record nearest cluster
min = cluster;
minSqr = sqr;
}
}
return min;
}
// KMeans++ re-clustering
private double[][] recluster(double[][] points, Random rand) {
double[][] res = new double[_parms._K][];
res[0] = points[0];
int count = 1;
ClusterDist cd = new ClusterDist();
switch( _parms._init ) {
case None:
break;
case PlusPlus: { // k-means++
while( count < res.length ) {
double sum = 0;
for (double[] point1 : points) sum += minSqr(res, point1, _ncats, cd, count);
for (double[] point : points) {
if (minSqr(res, point, _ncats, cd, count) >= rand.nextDouble() * sum) {
res[count++] = point;
break;
}
}
}
break;
}
case Furthest: { // Takes cluster further from any already chosen ones
while( count < res.length ) {
double max = 0;
int index = 0;
for( int i = 0; i < points.length; i++ ) {
double sqr = minSqr(res, points[i], _ncats, cd, count);
if( sqr > max ) {
max = sqr;
index = i;
}
}
res[count++] = points[index];
}
break;
}
default: throw H2O.fail();
}
return res;
}
private void randomRow(Vec[] vecs, Random rand, double[] cluster, double[] means, double[] mults) {
long row = Math.max(0, (long) (rand.nextDouble() * vecs[0].length()) - 1);
data(cluster, vecs, row, means, mults);
}
private static boolean normalize(double sigma) {
// TODO unify handling of constant columns
return sigma > 1e-6;
}
// Pick most common cat level for each cluster_centers' cat columns
private static double[][] max_cats(double[][] clusters, long[][][] cats) {
int K = cats.length;
int ncats = cats[0].length;
for( int clu = 0; clu < K; clu++ )
for( int col = 0; col < ncats; col++ ) // Cats use max level for cluster center
clusters[clu][col] = ArrayUtils.maxIndex(cats[clu][col]);
return clusters;
}
private static double[][] denormalize(double[][] clusters, int ncats, double[] means, double[] mults) {
int K = clusters.length;
int N = clusters[0].length;
double[][] value = new double[K][N];
for( int clu = 0; clu < K; clu++ ) {
System.arraycopy(clusters[clu],0,value[clu],0,N);
if( mults!=null ) // Reverse normalization
for( int col = ncats; col < N; col++ )
value[clu][col] = value[clu][col] / mults[col] + means[col];
}
return value;
}
private static void data(double[] values, Vec[] vecs, long row, double[] means, double[] mults) {
for( int i = 0; i < values.length; i++ ) {
double d = vecs[i].at(row);
values[i] = data(d, i, means, mults, vecs[i].cardinality());
}
}
private static void data(double[] values, Chunk[] chks, int row, double[] means, double[] mults) {
for( int i = 0; i < values.length; i++ ) {
double d = chks[i].at0(row);
values[i] = data(d, i, means, mults, chks[i].vec().cardinality());
}
}
/**
* Takes mean if NaN, normalize if requested.
*/
private static double data(double d, int i, double[] means, double[] mults, int cardinality) {
if(cardinality == -1) {
if( Double.isNaN(d) )
d = means[i];
if( mults != null ) {
d -= means[i];
d *= mults[i];
}
} else {
// TODO: If NaN, then replace with majority class?
if(Double.isNaN(d))
d = Math.min(Math.round(means[i]), cardinality-1);
}
return d;
}
}
|
package main;
import android.util.*;
import android.app.Application;
import android.content.res.Configuration;
public class HermesApplication extends Application
{
private static final String TAG = "HermesApp";
public HermesApplication(){
super();
Log.i(TAG, "Application object was constructed");
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
@Override
public void onCreate() {
super.onCreate();
Log.i(TAG, "Application onCreate");
//Create some threads
}
@Override
public void onLowMemory() {
super.onLowMemory();
Log.w(TAG, "Application has low memory");
}
@Override
public void onTerminate() {
Log.i(TAG, "Application will Terminate");
super.onTerminate();
}
}
|
package org.archive.wayback.util.flatfile;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import java.util.Iterator;
import org.archive.wayback.util.CloseableIterator;
/**
* Subclass of File, which allows binary searching, returning Iterators
* that allow scanning forwards and backwards thru the (sorted) file starting
* from a particular prefix.
*
* @author brad
* @version $Date$, $Revision$
*/
public class FlatFile {
private static final long serialVersionUID = 6174187801001601557L;
private long lastMatchOffset;
private File file = null;
public FlatFile() {
}
/**
* @param parent
* @param child
*/
public FlatFile(File parent, String child) {
file = new File(parent,child);
}
/**
* @param path
*/
public FlatFile(String path) {
file = new File(path);
}
/**
* @param path to set
*/
public void setPath(String path) {
file = new File(path);
}
/**
* @return current String path, or null if none has been set
*/
public String getPath() {
if(file == null) {
return null;
}
return file.getAbsolutePath();
}
/**
* Binary search thru RandomAccessFile argument to locate the first line
* prefixed by key argument. As a side effect, the RandomAccessFile's
* position is also set to the start of the first matching line.
*
* @param fh
* @param key
* @return long offset where first record prefixed with key is found
* @throws IOException
*/
public long findKeyOffset(RandomAccessFile fh, String key) throws IOException {
int blockSize = 8192;
long fileSize = fh.length();
long min = 0;
long max = (long) fileSize / blockSize;
long mid;
String line;
while (max - min > 1) {
mid = min + (long)((max - min) / 2);
fh.seek(mid * blockSize);
if(mid > 0) line = fh.readLine(); // probably a partial line
line = fh.readLine();
if (key.compareTo(line) > 0) {
min = mid;
} else {
max = mid;
}
}
// find the right line
min = min * blockSize;
fh.seek(min);
if(min > 0) line = fh.readLine();
while(true) {
min = fh.getFilePointer();
line = fh.readLine();
if(line == null) break;
if(line.compareTo(key) >= 0) break;
}
fh.seek(min);
return min;
}
/**
* @return Returns the lastMatchOffset.
*/
public long getLastMatchOffset() {
return lastMatchOffset;
}
/**
* @return Iterator returning one String object for each line in the file.
* @throws IOException
*/
public CloseableIterator<String> getSequentialIterator() throws IOException {
BufferedReader br = new BufferedReader(new FileReader(file));
return new RecordIterator(br);
}
/**
* @param prefix
* @return Iterator for records beggining with key
* @throws IOException
*/
public Iterator<String> getRecordIterator(final String prefix) throws IOException {
RecordIterator itr = null;
RandomAccessFile raf = new RandomAccessFile(file,"r");
long offset = findKeyOffset(raf,prefix);
lastMatchOffset = offset;
BufferedReader br = new BufferedReader(new FileReader(raf.getFD()));
itr = new RecordIterator(br);
return itr;
}
/**
*
* @param prefix
* @return ReverseRecordIterator positioned to return the first line BEFORE
* prefix at the first call to readPrevLine().
* @throws IOException
*/
public ReverseRecordIterator getReverseRecordIterator(final String prefix)
throws IOException {
ReverseRecordIterator itr = null;
RandomAccessFile raf = new RandomAccessFile(file,"r");
long offset = findKeyOffset(raf,prefix);
if(offset < 1) {
raf.close();
return new ReverseRecordIterator(null);
}
raf.seek(raf.getFilePointer()-1);
lastMatchOffset = offset - 1;
itr = new ReverseRecordIterator(new ReverseBufferedReader(raf));
return itr;
}
public void store(Iterator<String> itr) throws IOException {
PrintWriter pw = new PrintWriter(file);
while(itr.hasNext()) {
pw.println(file);
}
pw.close();
}
private static void USAGE() {
System.err.println("Usage: PREFIX FILE1 [FILE2] ...");
System.exit(3);
}
/**
* @param args
*/
public static void main(String[] args) {
if(args.length < 2) {
USAGE();
}
String prefix = args[0];
for(int i=1; i < args.length; i++) {
FlatFile ff = new FlatFile(args[i]);
RecordIterator ri;
try {
ri = (RecordIterator) ff.getRecordIterator(prefix);
while(ri.hasNext()) {
String line = (String) ri.next();
if(!line.startsWith(prefix)) {
break;
}
if(args.length > 2) {
System.out.println(args[i] + " " + line);
} else {
System.out.println(line);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
|
package com.github.podd.impl;
import info.aduna.iteration.Iterations;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import org.openrdf.OpenRDFException;
import org.openrdf.model.Literal;
import org.openrdf.model.Model;
import org.openrdf.model.Namespace;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.model.impl.LinkedHashModel;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.model.util.Namespaces;
import org.openrdf.model.vocabulary.OWL;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.repository.Repository;
import org.openrdf.repository.RepositoryConnection;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.RepositoryResult;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.Rio;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLException;
import org.semanticweb.owlapi.model.OWLOntologyID;
import org.semanticweb.owlapi.rio.RioMemoryTripleSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.clarkparsia.owlapi.explanation.GlassBoxExplanation;
import com.github.podd.api.DanglingObjectPolicy;
import com.github.podd.api.DataReferenceVerificationPolicy;
import com.github.podd.api.MetadataPolicy;
import com.github.podd.api.PoddArtifactManager;
import com.github.podd.api.PoddOWLManager;
import com.github.podd.api.PoddRepositoryManager;
import com.github.podd.api.PoddSchemaManager;
import com.github.podd.api.PoddSesameManager;
import com.github.podd.api.UpdatePolicy;
import com.github.podd.api.file.DataReference;
import com.github.podd.api.file.DataReferenceManager;
import com.github.podd.api.file.PoddDataRepositoryManager;
import com.github.podd.api.purl.PoddPurlManager;
import com.github.podd.api.purl.PoddPurlReference;
import com.github.podd.exception.ArtifactModifyException;
import com.github.podd.exception.DeleteArtifactException;
import com.github.podd.exception.DisconnectedObjectException;
import com.github.podd.exception.DuplicateArtifactIRIException;
import com.github.podd.exception.EmptyOntologyException;
import com.github.podd.exception.FileReferenceVerificationFailureException;
import com.github.podd.exception.InconsistentOntologyException;
import com.github.podd.exception.OntologyNotInProfileException;
import com.github.podd.exception.PoddException;
import com.github.podd.exception.PoddRuntimeException;
import com.github.podd.exception.PublishArtifactException;
import com.github.podd.exception.PublishedArtifactModifyException;
import com.github.podd.exception.PurlProcessorNotHandledException;
import com.github.podd.exception.UnmanagedArtifactIRIException;
import com.github.podd.exception.UnmanagedArtifactVersionException;
import com.github.podd.exception.UnmanagedSchemaIRIException;
import com.github.podd.utils.InferredOWLOntologyID;
import com.github.podd.utils.OntologyUtils;
import com.github.podd.utils.PODD;
import com.github.podd.utils.PoddObjectLabel;
import com.github.podd.utils.RdfUtility;
/**
* Implementation of the PODD Artifact Manager API, to manage the lifecycle for PODD Artifacts.
*
* @author Peter Ansell p_ansell@yahoo.com
*
*/
public class PoddArtifactManagerImpl implements PoddArtifactManager
{
static
{
GlassBoxExplanation.setup();
}
private final Logger log = LoggerFactory.getLogger(this.getClass());
private DataReferenceManager dataReferenceManager;
private PoddDataRepositoryManager dataRepositoryManager;
private PoddOWLManager owlManager;
private PoddPurlManager purlManager;
private PoddSchemaManager schemaManager;
private PoddRepositoryManager repositoryManager;
private PoddSesameManager sesameManager;
public PoddArtifactManagerImpl()
{
}
@Override
public InferredOWLOntologyID attachDataReference(final InferredOWLOntologyID artifactId, final URI objectUri,
final DataReference dataReference, final DataReferenceVerificationPolicy dataReferenceVerificationPolicy)
throws OpenRDFException, PoddException, IOException, OWLException
{
return this.attachDataReferences(artifactId, dataReference.toRDF(), dataReferenceVerificationPolicy);
}
@Override
public InferredOWLOntologyID attachDataReferences(final InferredOWLOntologyID ontologyId, final Model model,
final DataReferenceVerificationPolicy dataReferenceVerificationPolicy) throws OpenRDFException,
IOException, OWLException, PoddException
{
model.removeAll(model.filter(null, PODD.PODD_BASE_INFERRED_VERSION, null));
final Set<Resource> fileReferences =
model.filter(null, RDF.TYPE, PODD.PODD_BASE_DATA_REFERENCE_TYPE).subjects();
final Collection<URI> fileReferenceObjects = new ArrayList<URI>(fileReferences.size());
for(final Resource nextFileReference : fileReferences)
{
if(nextFileReference instanceof URI)
{
fileReferenceObjects.add((URI)nextFileReference);
}
else
{
this.log.warn("Will not be updating file reference for blank node reference, will instead be creating a new file reference for it.");
}
}
final Model exportArtifact = this.exportArtifact(ontologyId, false);
exportArtifact.addAll(model);
final Model resultModel =
this.updateArtifact(ontologyId.getOntologyIRI().toOpenRDFURI(), ontologyId.getVersionIRI()
.toOpenRDFURI(), fileReferenceObjects, model, UpdatePolicy.MERGE_WITH_EXISTING,
DanglingObjectPolicy.REPORT, dataReferenceVerificationPolicy);
return OntologyUtils.modelToOntologyIDs(resultModel, true, false).get(0);
}
@Override
public boolean deleteArtifact(final InferredOWLOntologyID artifactId) throws PoddException
{
if(artifactId.getOntologyIRI() == null)
{
throw new PoddRuntimeException("Ontology IRI cannot be null");
}
RepositoryConnection connection = null;
try
{
if(this.isPublished(artifactId))
{
throw new DeleteArtifactException("Published Artifacts cannot be deleted", artifactId);
}
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(artifactId);
connection = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
connection.begin();
List<InferredOWLOntologyID> requestedArtifactIds =
this.getSesameManager().getAllOntologyVersions(artifactId.getOntologyIRI(), connection,
this.getRepositoryManager().getArtifactManagementGraph());
if(artifactId.getVersionIRI() != null)
{
final IRI requestedVersionIRI = artifactId.getVersionIRI();
for(final InferredOWLOntologyID nextVersion : new ArrayList<InferredOWLOntologyID>(requestedArtifactIds))
{
if(requestedVersionIRI.equals(nextVersion.getVersionIRI()))
{
requestedArtifactIds = Arrays.asList(nextVersion);
}
}
}
this.getSesameManager().deleteOntologies(requestedArtifactIds, connection,
this.getRepositoryManager().getArtifactManagementGraph());
connection.commit();
// - ensure deleted ontologies are removed from the
// OWLOntologyManager's cache
for(final InferredOWLOntologyID deletedOntologyId : requestedArtifactIds)
{
this.getOWLManager().removeCache(deletedOntologyId.getBaseOWLOntologyID());
this.getOWLManager().removeCache(deletedOntologyId.getInferredOWLOntologyID());
}
return !requestedArtifactIds.isEmpty();
}
catch(final OpenRDFException | OWLException e)
{
try
{
if(connection != null && connection.isActive())
{
connection.rollback();
}
}
catch(final RepositoryException e1)
{
this.log.error("Found error rolling back repository connection", e1);
}
throw new DeleteArtifactException("Repository exception occurred", e, artifactId);
}
finally
{
try
{
if(connection != null && connection.isOpen())
{
connection.close();
}
}
catch(final RepositoryException e)
{
throw new DeleteArtifactException("Repository exception occurred", e, artifactId);
}
}
}
@Override
public InferredOWLOntologyID deleteObject(final URI artifactUri, final URI versionUri, final URI objectUri,
final boolean cascade) throws PoddException, OpenRDFException, IOException, OWLException
{
// check if the specified artifact URI refers to a managed artifact
InferredOWLOntologyID artifactID = null;
try
{
artifactID = this.getArtifact(IRI.create(artifactUri));
}
catch(final UnmanagedArtifactIRIException e)
{
this.log.error("This artifact is unmanaged. [{}]", artifactUri);
throw e;
}
if(this.isPublished(artifactID))
{
throw new PublishedArtifactModifyException("Attempting to modify a Published Artifact", artifactID);
}
this.log.debug("deleteObject ({}) from artifact {} with cascade={}", objectUri, artifactUri, cascade);
final URI objectToDelete = objectUri;
final Collection<URI> objectsToUpdate = new ArrayList<URI>();
objectsToUpdate.add(objectToDelete);
final Model fragments = new LinkedHashModel();
final Model artifactModel = this.exportArtifact(artifactID, false);
// - find the objectToDelete's parent and remove parent-child link
final Model parentDetails = this.getParentDetails(artifactID, objectToDelete);
if(parentDetails.subjects().size() != 1)
{
this.log.error("Object {} cannot be deleted. (No parent)", objectUri, artifactUri);
throw new ArtifactModifyException("Object cannot be deleted. (No parent)", artifactID, objectToDelete);
}
final Resource parent = parentDetails.subjects().iterator().next();
fragments.addAll(artifactModel.filter(parent, null, null));
fragments.remove(parent, null, objectToDelete);
objectsToUpdate.add((URI)parent);
// - remove any refersToLinks
final Model referenceLinks = this.getReferenceLinks(artifactID, objectToDelete);
final Set<Resource> referrers = referenceLinks.subjects();
for(final Resource referrer : referrers)
{
final Model referrerStatements = artifactModel.filter(referrer, null, null);
referrerStatements.remove(referrer, null, objectToDelete);
fragments.addAll(referrerStatements);
objectsToUpdate.add((URI)referrer);
}
DanglingObjectPolicy danglingObjectPolicy = DanglingObjectPolicy.REPORT;
if(cascade)
{
danglingObjectPolicy = DanglingObjectPolicy.FORCE_CLEAN;
}
this.updateArtifact(artifactID.getOntologyIRI().toOpenRDFURI(), artifactID.getVersionIRI().toOpenRDFURI(),
objectsToUpdate, fragments, UpdatePolicy.REPLACE_EXISTING, danglingObjectPolicy,
DataReferenceVerificationPolicy.DO_NOT_VERIFY);
return this.getArtifact(artifactID.getOntologyIRI());
}
@Override
public Model exportArtifact(final InferredOWLOntologyID ontologyId, final boolean includeInferred)
throws OpenRDFException, PoddException, IOException
{
if(ontologyId.getOntologyIRI() == null || ontologyId.getVersionIRI() == null)
{
throw new PoddRuntimeException("Ontology IRI and Version IRI cannot be null");
}
if(includeInferred && ontologyId.getInferredOntologyIRI() == null)
{
throw new PoddRuntimeException("Inferred Ontology IRI cannot be null");
}
List<URI> contexts;
if(includeInferred)
{
contexts =
Arrays.asList(ontologyId.getVersionIRI().toOpenRDFURI(), ontologyId.getInferredOntologyIRI()
.toOpenRDFURI());
}
else
{
contexts = Arrays.asList(ontologyId.getVersionIRI().toOpenRDFURI());
}
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyId);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final RepositoryResult<Statement> statements =
conn.getStatements(null, null, null, includeInferred, contexts.toArray(new Resource[] {}));
final Model model = new LinkedHashModel(Iterations.asList(statements));
final RepositoryResult<Namespace> namespaces = conn.getNamespaces();
for(final Namespace nextNs : Iterations.asSet(namespaces))
{
model.setNamespace(nextNs);
}
return model;
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
@Override
public void exportArtifact(final InferredOWLOntologyID ontologyId, final OutputStream outputStream,
final RDFFormat format, final boolean includeInferred) throws OpenRDFException, PoddException, IOException
{
final Model model = this.exportArtifact(ontologyId, includeInferred);
Rio.write(model, outputStream, format);
}
@Override
public void exportObjectMetadata(final URI objectType, final OutputStream outputStream, final RDFFormat format,
final boolean includeDoNotDisplayProperties, final MetadataPolicy containsPropertyPolicy,
final InferredOWLOntologyID artifactID) throws OpenRDFException, PoddException, IOException
{
RepositoryConnection conn = null;
try
{
Set<InferredOWLOntologyID> schemaImports;
if(artifactID != null)
{
schemaImports = this.getSchemaImports(artifactID);
}
else
{
// If they don't have an artifact yet, we return the set of current schema
// ontologies
schemaImports = this.getSchemaManager().getCurrentSchemaOntologies();
}
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI[] contexts =
this.sesameManager.versionAndSchemaContexts(artifactID, conn,
this.repositoryManager.getSchemaManagementGraph());
Model model;
if(containsPropertyPolicy == MetadataPolicy.ONLY_CONTAINS)
{
model = this.sesameManager.getObjectTypeContainsMetadata(objectType, conn, contexts);
}
else
{
model =
this.sesameManager.getObjectTypeMetadata(objectType, includeDoNotDisplayProperties,
containsPropertyPolicy, conn, contexts);
}
Rio.write(model, outputStream, format);
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
@Override
public Model fillMissingData(final InferredOWLOntologyID ontologyID, final Model inputModel)
throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI[] contexts =
this.getSesameManager().versionAndSchemaContexts(ontologyID, conn,
this.getRepositoryManager().getSchemaManagementGraph());
return this.getSesameManager().fillMissingLabels(inputModel, conn, contexts);
}
catch(final OpenRDFException e)
{
try
{
if(conn != null && conn.isActive())
{
conn.rollback();
}
}
catch(final RepositoryException e1)
{
this.log.error("Found error rolling back repository connection", e1);
}
throw e;
}
finally
{
try
{
if(conn != null && conn.isOpen())
{
conn.close();
}
}
catch(final RepositoryException e)
{
throw e;
}
}
}
@Override
public InferredOWLOntologyID getArtifact(final IRI artifactIRI) throws UnmanagedArtifactIRIException,
UnmanagedSchemaIRIException
{
try
{
return this.getArtifact(artifactIRI, null);
}
catch(final UnmanagedArtifactVersionException e)
{
this.log.error("Null artifact version not recognised, this should not happen");
return null;
}
}
@Override
public InferredOWLOntologyID getArtifact(final IRI artifactIRI, final IRI versionIRI)
throws UnmanagedArtifactIRIException, UnmanagedArtifactVersionException, UnmanagedSchemaIRIException
{
RepositoryConnection repositoryConnection = null;
try
{
final Set<InferredOWLOntologyID> schemaImports =
this.getSchemaImports(new InferredOWLOntologyID(artifactIRI, versionIRI, null));
repositoryConnection = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
InferredOWLOntologyID result = null;
if(versionIRI != null)
{
result =
this.getSesameManager().getOntologyVersion(versionIRI, repositoryConnection,
this.getRepositoryManager().getArtifactManagementGraph());
}
if(result == null)
{
result =
this.getSesameManager().getCurrentArtifactVersion(artifactIRI, repositoryConnection,
this.getRepositoryManager().getArtifactManagementGraph());
}
if(result != null)
{
// If the result that was returned contained a different
// artifact IRI then throw an
// exception early instead of returning inconsistent results
if(versionIRI != null && !result.getVersionIRI().equals(versionIRI))
{
throw new UnmanagedArtifactVersionException(artifactIRI, result.getVersionIRI(), versionIRI,
"Artifact IRI and Version IRI combination did not match");
}
}
return result;
}
catch(final OpenRDFException e)
{
throw new UnmanagedArtifactIRIException(artifactIRI, e);
}
finally
{
if(repositoryConnection != null)
{
try
{
repositoryConnection.close();
}
catch(final RepositoryException e)
{
this.log.error("Failed to close repository connection", e);
}
}
}
}
/*
* (non-Javadoc)
*
* Wraps PoddSesameManager.getChildObjects()
*
* @see com.github.podd.api.PoddArtifactManager#getChildObjects()
*/
@Override
public Set<URI> getChildObjects(final InferredOWLOntologyID ontologyID, final URI objectUri)
throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI[] contexts =
this.getSesameManager().versionAndSchemaContexts(ontologyID, conn,
this.getRepositoryManager().getSchemaManagementGraph());
return this.getSesameManager().getChildObjects(objectUri, conn, contexts);
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#getFileReferenceManager()
*/
@Override
public DataReferenceManager getDataReferenceManager()
{
return this.dataReferenceManager;
}
@Override
public Set<DataReference> getFileReferences(final InferredOWLOntologyID artifactId)
{
// TODO Auto-generated method stub
return null;
}
@Override
public Set<DataReference> getFileReferences(final InferredOWLOntologyID artifactId, final String alias)
{
// TODO Auto-generated method stub
return null;
}
@Override
public Set<DataReference> getFileReferences(final InferredOWLOntologyID artifactId, final URI objectUri)
{
// TODO Auto-generated method stub
return null;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#getFileRepositoryManager()
*/
@Override
public PoddDataRepositoryManager getFileRepositoryManager()
{
return this.dataRepositoryManager;
}
/*
* (non-Javadoc)
*
* Wraps PoddSesameManager.getObjectDetailsForDisplay()
*
* @see com.github.podd.api.PoddArtifactManager#getObjectDetailsForDisplay()
*/
@Override
public Model getObjectDetailsForDisplay(final InferredOWLOntologyID ontologyID, final URI objectUri)
throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
return this.getSesameManager().getObjectDetailsForDisplay(ontologyID, objectUri, conn,
this.getRepositoryManager().getSchemaManagementGraph());
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
@Override
public PoddObjectLabel getObjectLabel(final InferredOWLOntologyID ontologyID, final URI objectUri)
throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
return this.getSesameManager().getObjectLabel(ontologyID, objectUri, conn,
this.getRepositoryManager().getSchemaManagementGraph());
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#getObjectTypes(com.github.podd .utils.
* InferredOWLOntologyID, org.openrdf.model.URI)
*/
@Override
public List<PoddObjectLabel> getObjectTypes(final InferredOWLOntologyID artifactId, final URI objectUri)
throws OpenRDFException, UnmanagedSchemaIRIException
{
final List<PoddObjectLabel> results = new ArrayList<PoddObjectLabel>();
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(artifactId);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final List<URI> typesList =
this.getSesameManager().getObjectTypes(artifactId, objectUri, conn,
this.getRepositoryManager().getSchemaManagementGraph());
for(final URI objectType : typesList)
{
results.add(this.getSesameManager().getObjectLabel(artifactId, objectType, conn,
this.getRepositoryManager().getSchemaManagementGraph()));
}
}
finally
{
if(conn != null)
{
conn.close();
}
}
return results;
}
/*
* (non-Javadoc)
*
* Wraps PoddSesameManager.getOrderedProperties()
*
* @see com.github.podd.api.PoddArtifactManager#getOrderedProperties()
*/
@Override
public List<URI> getOrderedProperties(final InferredOWLOntologyID ontologyID, final URI objectUri,
final boolean excludeContainsProperties) throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI[] contexts =
this.getSesameManager().versionAndSchemaContexts(ontologyID, conn,
this.getRepositoryManager().getSchemaManagementGraph());
return this.getSesameManager().getWeightedProperties(objectUri, excludeContainsProperties, conn, contexts);
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#getOWLManager()
*/
@Override
public PoddOWLManager getOWLManager()
{
return this.owlManager;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#getParentDetails(com.github.podd .utils.
* InferredOWLOntologyID, org.openrdf.model.URI)
*/
@Override
public Model getParentDetails(final InferredOWLOntologyID ontologyID, final URI objectUri) throws OpenRDFException,
UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI[] contexts =
this.getSesameManager().versionAndSchemaContexts(ontologyID, conn,
this.getRepositoryManager().getSchemaManagementGraph());
return this.getSesameManager().getParentDetails(objectUri, conn, contexts);
}
catch(final Throwable e)
{
try
{
if(conn != null && conn.isActive())
{
conn.rollback();
}
}
catch(final RepositoryException e1)
{
this.log.error("Found error rolling back repository connection", e1);
}
throw e;
}
finally
{
try
{
if(conn != null && conn.isOpen())
{
conn.close();
}
}
catch(final RepositoryException e)
{
throw e;
}
}
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#getPurlManager()
*/
@Override
public PoddPurlManager getPurlManager()
{
return this.purlManager;
}
public Model getReferenceLinks(final InferredOWLOntologyID ontologyID, final URI objectUri)
throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyID);
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI[] contexts =
this.getSesameManager().versionAndSchemaContexts(ontologyID, conn,
this.getRepositoryManager().getSchemaManagementGraph());
return this.getSesameManager().getReferringObjectDetails(objectUri, conn, contexts);
}
finally
{
try
{
if(conn != null && conn.isOpen())
{
conn.close();
}
}
catch(final RepositoryException e)
{
throw e;
}
}
}
@Override
public PoddRepositoryManager getRepositoryManager()
{
return this.repositoryManager;
}
@Override
public Set<InferredOWLOntologyID> getSchemaImports(final InferredOWLOntologyID artifactID) throws OpenRDFException,
UnmanagedSchemaIRIException
{
Objects.requireNonNull(
artifactID,
"Cannot get schema imports without an artifact reference. May need to try PoddSchemaManager.getCurrentSchemaOntologies instead.");
final Set<InferredOWLOntologyID> results = new LinkedHashSet<InferredOWLOntologyID>();
RepositoryConnection conn = null;
try
{
conn = this.getRepositoryManager().getManagementRepository().getConnection();
final Set<URI> directImports =
this.getSesameManager().getDirectImports(artifactID.getOntologyIRI(), conn,
this.getRepositoryManager().getArtifactManagementGraph());
for(final URI nextDirectImport : directImports)
{
results.add(this.getSchemaManager().getSchemaOntologyVersion(IRI.create(nextDirectImport)));
}
}
finally
{
if(conn != null)
{
conn.close();
}
}
return results;
}
@Override
public PoddSchemaManager getSchemaManager()
{
return this.schemaManager;
}
@Override
public PoddSesameManager getSesameManager()
{
return this.sesameManager;
}
@Override
public List<PoddObjectLabel> getTopObjectLabels(final List<InferredOWLOntologyID> artifacts)
throws OpenRDFException, UnmanagedSchemaIRIException
{
final List<PoddObjectLabel> results = new ArrayList<PoddObjectLabel>();
RepositoryConnection conn = null;
for(final InferredOWLOntologyID artifactId : artifacts)
{
try
{
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(artifactId);
// TODO: Should be a simple way to avoid creating multiple
// connections here
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
final URI objectIRI = this.getSesameManager().getTopObjectIRI(artifactId, conn);
results.add(this.getSesameManager().getObjectLabel(artifactId, objectIRI, conn,
this.getRepositoryManager().getSchemaManagementGraph()));
}
finally
{
if(conn != null)
{
conn.close();
}
}
}
return results;
}
/**
* Helper method to cache schema ontologies in memory before loading statements into OWLAPI
*/
private void handleCacheSchemasInMemory(final RepositoryConnection managementRepositoryConnection,
final RepositoryConnection tempRepositoryConnection, final URI tempContext) throws OpenRDFException,
OWLException, IOException, PoddException
{
final Set<URI> importedSchemas =
this.getSesameManager().getDirectImports(tempRepositoryConnection, tempContext);
final Set<InferredOWLOntologyID> importedSchemaOntologies = new HashSet<>();
for(final URI importedSchemaIRI : importedSchemas)
{
importedSchemaOntologies.add(this.getSesameManager().getSchemaVersion(IRI.create(importedSchemaIRI),
managementRepositoryConnection, this.getRepositoryManager().getSchemaManagementGraph()));
}
this.getOWLManager().cacheSchemaOntologies(importedSchemaOntologies, managementRepositoryConnection,
this.getRepositoryManager().getSchemaManagementGraph());
}
/**
* Checks for dangling objects that are not linked to the artifact and deletes them if
* <i>force</i> is true.
*
* @param artifactID
* @param repositoryConnection
* @param context
* @param force
* If true, deletes any dangling objects. If false, throws a
* DisconnectedObjectException if any dangling objects are found.
* @throws RepositoryException
* @throws DisconnectedObjectException
*/
private void handleDanglingObjects(final IRI artifactID, final RepositoryConnection repositoryConnection,
final URI context, final DanglingObjectPolicy policy) throws RepositoryException,
DisconnectedObjectException
{
// Short-circuit if they wanted to ignore dangling objects
if(policy == DanglingObjectPolicy.IGNORE)
{
this.log.info("Not checking for dangling objects for artifact: {}", artifactID);
return;
}
final Set<URI> danglingObjects =
RdfUtility.findDisconnectedNodes(artifactID.toOpenRDFURI(), repositoryConnection, context);
if(!danglingObjects.isEmpty())
{
if(policy.equals(DanglingObjectPolicy.REPORT))
{
this.log.error("Found {} dangling object(s) (reporting). \n {}", danglingObjects.size(),
danglingObjects);
throw new DisconnectedObjectException(danglingObjects, "Update leads to disconnected PODD objects");
}
else if(policy.equals(DanglingObjectPolicy.FORCE_CLEAN))
{
this.log.info("Found {} dangling object(s) (force cleaning). \n {}", danglingObjects.size(),
danglingObjects);
for(final URI danglingObject : danglingObjects)
{
repositoryConnection.remove(danglingObject, null, null, context);
repositoryConnection.remove(null, null, (Value)danglingObject, context);
}
}
}
}
/**
* Helper method to handle File References in a newly loaded/updated set of statements.
*
* TODO: Optionally remove invalid file references or mark them as invalid using RDF
* statements/OWL Classes
*
* @param repositoryConnection
* @param context
* @param policy
* If true, verifies that DataReference objects are accessible from their respective
* remote File Repositories
*
* @throws OpenRDFException
* @throws PoddException
*/
private void handleFileReferences(final RepositoryConnection repositoryConnection,
final DataReferenceVerificationPolicy policy, final URI... contexts) throws OpenRDFException, PoddException
{
if(DataReferenceVerificationPolicy.VERIFY == policy)
{
if(this.getDataReferenceManager() == null)
{
this.log.error("Could not verify data references as the manager was not initialised.");
}
else
{
this.log.debug("Extracting data references");
final Set<DataReference> fileReferenceResults =
this.getDataReferenceManager().extractDataReferences(repositoryConnection, contexts);
this.log.debug("Handling File reference validation");
try
{
this.dataRepositoryManager.verifyDataReferences(fileReferenceResults);
}
catch(final FileReferenceVerificationFailureException e)
{
this.log.warn("From " + fileReferenceResults.size() + " file references, "
+ e.getValidationFailures().size() + " failed validation.");
throw e;
}
}
}
}
/**
* Helper method to handle File References in a newly loaded/updated set of statements
*/
private Set<PoddPurlReference> handlePurls(final RepositoryConnection repositoryConnection, final URI context)
throws PurlProcessorNotHandledException, OpenRDFException
{
if(this.getPurlManager() == null)
{
return Collections.emptySet();
}
this.log.debug("Handling Purl generation");
final Set<PoddPurlReference> purlResults =
this.getPurlManager().extractPurlReferences(repositoryConnection, context);
this.getPurlManager().convertTemporaryUris(purlResults, repositoryConnection, context);
return purlResults;
}
/**
* Helper method to check schema ontology imports and update use of ontology IRIs to version
* IRIs.
*/
private void handleSchemaImports(final URI ontologyIRI, final RepositoryConnection managementRepositoryConnection,
final RepositoryConnection tempRepositoryConnection, final URI tempContext) throws OpenRDFException,
UnmanagedSchemaIRIException
{
final Set<URI> importedSchemas =
this.getSesameManager().getDirectImports(tempRepositoryConnection, tempContext);
for(final URI importedSchemaIRI : importedSchemas)
{
final InferredOWLOntologyID schemaOntologyID =
this.getSesameManager().getSchemaVersion(IRI.create(importedSchemaIRI),
managementRepositoryConnection, this.getRepositoryManager().getSchemaManagementGraph());
// Always replace with the version IRI
if(!importedSchemaIRI.equals(schemaOntologyID.getVersionIRI()))
{
// modify import to be a specific version of the schema
this.log.debug("Updating import to version <{}>", schemaOntologyID.getVersionIRI());
tempRepositoryConnection.remove(ontologyIRI, OWL.IMPORTS, importedSchemaIRI, tempContext);
tempRepositoryConnection.add(ontologyIRI, OWL.IMPORTS, schemaOntologyID.getVersionIRI().toOpenRDFURI(),
tempContext);
}
}
}
/**
* This helper method checks for statements with the given property and having a date-time value
* with the year 1970 and updates their date-time with the given {@link Value}.
*
* @param repositoryConnection
* @param propertyUri
* @param newTimestamp
* @param context
* @throws OpenRDFException
*/
private void handleTimestamps(final RepositoryConnection repositoryConnection, final URI propertyUri,
final Value newTimestamp, final URI context) throws OpenRDFException
{
final List<Statement> statements =
Iterations.asList(repositoryConnection.getStatements(null, propertyUri, null, false, context));
for(final Statement s : statements)
{
final Value object = s.getObject();
if(object instanceof Literal)
{
final int year = ((Literal)object).calendarValue().getYear();
if(year == 1970)
{
repositoryConnection.remove(s, context);
repositoryConnection.add(s.getSubject(), s.getPredicate(), newTimestamp, context);
}
}
}
}
public String incrementVersion(final String oldVersion)
{
final char versionSeparatorChar = ':';
final int positionVersionSeparator = oldVersion.lastIndexOf(versionSeparatorChar);
if(positionVersionSeparator > 1)
{
final String prefix = oldVersion.substring(0, positionVersionSeparator);
final String version = oldVersion.substring(positionVersionSeparator + 1);
try
{
int versionInt = Integer.parseInt(version);
versionInt++;
return prefix + versionSeparatorChar + versionInt;
}
catch(final NumberFormatException e)
{
return oldVersion.concat("1");
}
}
return oldVersion.concat("1");
}
@Override
public boolean isPublished(final InferredOWLOntologyID ontologyId) throws OpenRDFException
{
RepositoryConnection conn = null;
try
{
conn = this.repositoryManager.getManagementRepository().getConnection();
return this.getSesameManager().isPublished(ontologyId, conn,
this.getRepositoryManager().getArtifactManagementGraph());
}
finally
{
if(conn != null && conn.isOpen())
{
conn.close();
}
}
}
private List<InferredOWLOntologyID> listArtifacts(final boolean published, final boolean unpublished)
throws OpenRDFException
{
if(!published && !unpublished)
{
throw new IllegalArgumentException("Cannot choose to exclude both published and unpublished artifacts");
}
final List<InferredOWLOntologyID> results = new ArrayList<InferredOWLOntologyID>();
RepositoryConnection conn = null;
try
{
conn = this.getRepositoryManager().getManagementRepository().getConnection();
final Collection<InferredOWLOntologyID> ontologies =
this.getSesameManager().getOntologies(true, conn,
this.getRepositoryManager().getArtifactManagementGraph());
for(final InferredOWLOntologyID nextOntology : ontologies)
{
final boolean isPublished =
this.getSesameManager().isPublished(nextOntology, conn,
this.getRepositoryManager().getArtifactManagementGraph());
if(isPublished)
{
if(published)
{
results.add(nextOntology);
}
}
else if(unpublished)
{
results.add(nextOntology);
}
}
}
finally
{
if(conn != null && conn.isOpen())
{
conn.close();
}
}
return results;
}
@Override
public List<InferredOWLOntologyID> listPublishedArtifacts() throws OpenRDFException
{
return this.listArtifacts(true, false);
}
@Override
public List<InferredOWLOntologyID> listUnpublishedArtifacts() throws OpenRDFException
{
return this.listArtifacts(false, true);
}
@Override
public InferredOWLOntologyID loadArtifact(final InputStream inputStream, final RDFFormat format)
throws OpenRDFException, PoddException, IOException, OWLException
{
return this.loadArtifact(inputStream, format, DanglingObjectPolicy.REPORT,
DataReferenceVerificationPolicy.DO_NOT_VERIFY);
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#loadArtifact(java.io.InputStream,
* org.openrdf.rio.RDFFormat)
*/
@Override
public InferredOWLOntologyID loadArtifact(final InputStream inputStream, RDFFormat format,
final DanglingObjectPolicy danglingObjectPolicy,
final DataReferenceVerificationPolicy dataReferenceVerificationPolicy) throws OpenRDFException,
PoddException, IOException, OWLException
{
if(inputStream == null)
{
throw new NullPointerException("Input stream must not be null");
}
if(format == null)
{
format = RDFFormat.RDFXML;
}
final URI randomContext = ValueFactoryImpl.getInstance().createURI("urn:uuid:" + UUID.randomUUID().toString());
final Model model = Rio.parse(inputStream, "", format, randomContext);
final List<InferredOWLOntologyID> ontologyIDs = OntologyUtils.modelToOntologyIDs(model);
if(ontologyIDs.isEmpty())
{
throw new EmptyOntologyException(null, "Loaded ontology is empty");
}
else if(ontologyIDs.size() > 1)
{
this.log.warn("Found multiple ontologies when we were only expecting a single ontology: {}", ontologyIDs);
}
// FIXME: This method only works if the imports are already in a
// repository somewhere, need
// to fix the Sesame manager to look for imports in Models also
final Set<InferredOWLOntologyID> schemaImports = this.getSchemaImports(ontologyIDs.get(0));
// connection to the temporary repository that the artifact RDF triples
// will be stored while they are initially parsed by OWLAPI.
Repository tempRepository = null;
RepositoryConnection temporaryRepositoryConnection = null;
RepositoryConnection permanentRepositoryConnection = null;
RepositoryConnection managementRepositoryConnection = null;
InferredOWLOntologyID inferredOWLOntologyID = null;
try
{
tempRepository = this.repositoryManager.getNewTemporaryRepository(schemaImports);
temporaryRepositoryConnection = tempRepository.getConnection();
// Load the artifact RDF triples into a random context in the temp
// repository, which may be shared between different uploads
temporaryRepositoryConnection.add(model, randomContext);
// Remove any assertions that the user has made about publication status, as this
// information is a privileged operation that must be done through the designated API
// method
temporaryRepositoryConnection.remove((Resource)null, PODD.PODD_BASE_HAS_PUBLICATION_STATUS, (Resource)null,
randomContext);
this.handlePurls(temporaryRepositoryConnection, randomContext);
final Repository managementRepository = this.getRepositoryManager().getManagementRepository();
managementRepositoryConnection = managementRepository.getConnection();
managementRepositoryConnection.begin();
final Repository permanentRepository = this.getRepositoryManager().getPermanentRepository(schemaImports);
permanentRepositoryConnection = permanentRepository.getConnection();
permanentRepositoryConnection.begin();
// Set a Version IRI for this artifact
/*
* Version information need not be available in uploaded artifacts (any existing values
* are ignored).
*
* For a new artifact, a Version IRI is created based on the Ontology IRI while for a
* new version of a managed artifact, the most recent version is incremented.
*/
final IRI ontologyIRI =
this.getSesameManager().getOntologyIRI(temporaryRepositoryConnection, randomContext);
if(ontologyIRI == null)
{
throw new EmptyOntologyException(null, "Loaded ontology is empty");
}
// check for managed version from artifact graph
OWLOntologyID currentManagedArtifactID = null;
try
{
currentManagedArtifactID =
this.getSesameManager().getCurrentArtifactVersion(ontologyIRI, managementRepositoryConnection,
this.getRepositoryManager().getArtifactManagementGraph());
if(currentManagedArtifactID != null)
{
throw new DuplicateArtifactIRIException(ontologyIRI, "This artifact is already managed");
}
}
catch(final UnmanagedArtifactIRIException e)
{
// ignore. indicates a new artifact is being uploaded
this.log.info("This is an unmanaged artifact IRI {}", ontologyIRI);
}
IRI newVersionIRI = null;
if(currentManagedArtifactID == null || currentManagedArtifactID.getVersionIRI() == null)
{
newVersionIRI = IRI.create(ontologyIRI.toString() + ":version:1");
}
if(newVersionIRI != null)
{
// set version IRI in temporary repository
this.log.info("Setting version IRI to <{}>", newVersionIRI);
}
temporaryRepositoryConnection.remove(ontologyIRI.toOpenRDFURI(), PODD.OWL_VERSION_IRI, null, randomContext);
temporaryRepositoryConnection.add(ontologyIRI.toOpenRDFURI(), PODD.OWL_VERSION_IRI,
newVersionIRI.toOpenRDFURI(), randomContext);
// check and update statements with default timestamp values
final Value now = PODD.VF.createLiteral(new Date());
this.handleTimestamps(temporaryRepositoryConnection, PODD.PODD_BASE_CREATED_AT, now, randomContext);
this.handleTimestamps(temporaryRepositoryConnection, PODD.PODD_BASE_LAST_MODIFIED, now, randomContext);
this.handleDanglingObjects(ontologyIRI, temporaryRepositoryConnection, randomContext, danglingObjectPolicy);
// check and ensure schema ontology imports are for version IRIs
this.handleSchemaImports(ontologyIRI.toOpenRDFURI(), managementRepositoryConnection,
temporaryRepositoryConnection, randomContext);
// ensure schema ontologies are cached in memory before loading
// statements into OWLAPI
this.handleCacheSchemasInMemory(managementRepositoryConnection, temporaryRepositoryConnection,
randomContext);
inferredOWLOntologyID =
this.loadInferStoreArtifact(temporaryRepositoryConnection, permanentRepositoryConnection,
managementRepositoryConnection, randomContext, dataReferenceVerificationPolicy, false);
permanentRepositoryConnection.commit();
managementRepositoryConnection.commit();
return inferredOWLOntologyID;
}
catch(final Exception e)
{
if(temporaryRepositoryConnection != null && temporaryRepositoryConnection.isActive())
{
temporaryRepositoryConnection.rollback();
}
if(permanentRepositoryConnection != null && permanentRepositoryConnection.isActive())
{
permanentRepositoryConnection.rollback();
}
if(managementRepositoryConnection != null && managementRepositoryConnection.isActive())
{
managementRepositoryConnection.rollback();
}
throw e;
}
finally
{
try
{
// release resources
if(inferredOWLOntologyID != null)
{
try
{
this.getOWLManager().removeCache(inferredOWLOntologyID.getBaseOWLOntologyID());
}
finally
{
this.getOWLManager().removeCache(inferredOWLOntologyID.getInferredOWLOntologyID());
}
}
}
finally
{
try
{
if(managementRepositoryConnection != null && managementRepositoryConnection.isOpen())
{
managementRepositoryConnection.close();
}
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing management repository connection", e);
}
finally
{
try
{
if(permanentRepositoryConnection != null && permanentRepositoryConnection.isOpen())
{
permanentRepositoryConnection.close();
}
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing permanent repository connection", e);
}
finally
{
try
{
if(temporaryRepositoryConnection != null && temporaryRepositoryConnection.isOpen())
{
temporaryRepositoryConnection.close();
}
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing temporary repository connection", e);
}
finally
{
if(tempRepository != null)
{
tempRepository.shutDown();
}
}
}
}
}
}
}
/**
* Helper method to load the artifact into OWLAPI from a temporary location, perform reasoning
* and store in permanent repository.
*
* @param fileReferencePolicy
*/
private InferredOWLOntologyID loadInferStoreArtifact(final RepositoryConnection tempRepositoryConnection,
final RepositoryConnection permanentRepositoryConnection,
final RepositoryConnection managementRepositoryConnection, final URI tempContext,
final DataReferenceVerificationPolicy fileReferencePolicy, final boolean asynchronousInferences)
throws OpenRDFException, OWLException, IOException, PoddException, OntologyNotInProfileException,
InconsistentOntologyException
{
// load into OWLAPI
this.log.debug("Loading podd artifact from temp repository: {}", tempContext);
final List<Statement> statements =
Iterations.asList(tempRepositoryConnection.getStatements(null, null, null, true, tempContext));
final RioMemoryTripleSource owlSource =
new RioMemoryTripleSource(statements.iterator(), Namespaces.asMap(Iterations
.asSet(tempRepositoryConnection.getNamespaces())));
final InferredOWLOntologyID inferredOWLOntologyID =
this.getOWLManager().loadAndInfer(owlSource, permanentRepositoryConnection, null);
// Check file references after inferencing to accurately identify
// the parent object
this.handleFileReferences(permanentRepositoryConnection, fileReferencePolicy, inferredOWLOntologyID
.getVersionIRI().toOpenRDFURI(), inferredOWLOntologyID.getInferredOntologyIRI().toOpenRDFURI());
this.getSesameManager().updateManagedPoddArtifactVersion(inferredOWLOntologyID, true,
managementRepositoryConnection, this.getRepositoryManager().getArtifactManagementGraph());
return inferredOWLOntologyID;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#publishArtifact(org.semanticweb .owlapi.model.
* OWLOntologyID)
*/
@Override
public InferredOWLOntologyID publishArtifact(final InferredOWLOntologyID ontologyId) throws OpenRDFException,
PublishArtifactException, UnmanagedArtifactIRIException, UnmanagedSchemaIRIException
{
final IRI ontologyIRI = ontologyId.getOntologyIRI();
final IRI versionIRI = ontologyId.getVersionIRI();
if(versionIRI == null)
{
throw new PublishArtifactException("Could not publish artifact as version was not specified.", ontologyId);
}
Repository repository = null;
RepositoryConnection repositoryConnection = null;
try
{
final Set<InferredOWLOntologyID> currentSchemaImports = this.getSchemaImports(ontologyId);
repository = this.getRepositoryManager().getPermanentRepository(currentSchemaImports);
repositoryConnection = repository.getConnection();
repositoryConnection.begin();
if(this.getSesameManager().isPublished(ontologyId, repositoryConnection,
this.getRepositoryManager().getArtifactManagementGraph()))
{
// Cannot publish multiple versions of a single artifact
throw new PublishArtifactException("Could not publish artifact as a version was already published",
ontologyId);
}
final InferredOWLOntologyID currentVersion =
this.getSesameManager().getCurrentArtifactVersion(ontologyIRI, repositoryConnection,
this.getRepositoryManager().getArtifactManagementGraph());
if(!currentVersion.getVersionIRI().equals(versionIRI))
{
// User must make the given artifact version the current version
// manually before
// publishing, to ensure that work from the current version is
// not lost accidentally
throw new PublishArtifactException(
"Could not publish artifact as it was not the most current version.", ontologyId);
}
final InferredOWLOntologyID published =
this.getSesameManager().setPublished(true, currentVersion, repositoryConnection,
this.getRepositoryManager().getArtifactManagementGraph());
repositoryConnection.commit();
return published;
}
catch(final Throwable e)
{
if(repositoryConnection != null && repositoryConnection.isActive())
{
repositoryConnection.rollback();
}
throw e;
}
finally
{
// release resources
try
{
if(repositoryConnection != null && repositoryConnection.isOpen())
{
repositoryConnection.close();
}
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing repository connection", e);
}
}
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#searchForOntologyLabels(org.
* semanticweb.owlapi.model. OWLOntologyID, java.lang.String, org.openrdf.model.URI[])
*/
@Override
public Model searchForOntologyLabels(final InferredOWLOntologyID ontologyID, final String searchTerm,
final URI[] searchTypes) throws OpenRDFException, UnmanagedSchemaIRIException
{
RepositoryConnection conn = null;
try
{
Set<InferredOWLOntologyID> schemaImports;
if(ontologyID != null)
{
schemaImports = this.getSchemaImports(ontologyID);
}
else
{
schemaImports = this.getSchemaManager().getCurrentSchemaOntologies();
}
conn = this.getRepositoryManager().getPermanentRepository(schemaImports).getConnection();
// FIXME: Cannot use contexts like this for a federated method
final URI[] contexts =
this.getSesameManager().versionAndInferredAndSchemaContexts(ontologyID, conn,
this.getRepositoryManager().getSchemaManagementGraph());
return this.getSesameManager().searchOntologyLabels(searchTerm, searchTypes, 1000, 0, conn, contexts);
}
catch(final Throwable e)
{
try
{
if(conn != null && conn.isActive())
{
conn.rollback();
}
}
catch(final RepositoryException e1)
{
this.log.error("Found error rolling back repository connection", e1);
}
throw e;
}
finally
{
try
{
if(conn != null && conn.isOpen())
{
conn.close();
}
}
catch(final RepositoryException e)
{
throw e;
}
}
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#setFileReferenceManager(com.github
* .podd.api.file. PoddFileReferenceManager)
*/
@Override
public void setDataReferenceManager(final DataReferenceManager fileManager)
{
this.dataReferenceManager = fileManager;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#setFileRepositoryManager(com.
* github.podd.api.file .PoddFileRepositoryManager)
*/
@Override
public void setDataRepositoryManager(final PoddDataRepositoryManager dataRepositoryManager)
{
this.dataRepositoryManager = dataRepositoryManager;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#setOwlManager(com.github.podd
* .api.PoddOWLManager)
*/
@Override
public void setOwlManager(final PoddOWLManager owlManager)
{
this.owlManager = owlManager;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#setPurlManager(com.github.podd
* .api.purl.PoddPurlManager )
*/
@Override
public void setPurlManager(final PoddPurlManager purlManager)
{
this.purlManager = purlManager;
}
@Override
public void setRepositoryManager(final PoddRepositoryManager repositoryManager)
{
this.repositoryManager = repositoryManager;
}
@Override
public void setSchemaManager(final PoddSchemaManager schemaManager)
{
this.schemaManager = schemaManager;
}
@Override
public void setSesameManager(final PoddSesameManager sesameManager)
{
this.sesameManager = sesameManager;
}
/*
* (non-Javadoc)
*
* @see com.github.podd.api.PoddArtifactManager#updateArtifact(org.openrdf.model .URI,
* java.io.InputStream, org.openrdf.rio.RDFFormat)
*/
@Override
public Model updateArtifact(final URI artifactUri, final URI versionUri, final Collection<URI> objectUris,
final InputStream inputStream, RDFFormat format, final UpdatePolicy updatePolicy,
final DanglingObjectPolicy danglingObjectAction, final DataReferenceVerificationPolicy fileReferenceAction)
throws OpenRDFException, IOException, OWLException, PoddException
{
if(inputStream == null)
{
throw new NullPointerException("Input stream must not be null");
}
if(format == null)
{
format = RDFFormat.RDFXML;
}
final Model model = Rio.parse(inputStream, "", format);
return this.updateArtifact(artifactUri, versionUri, objectUris, model, updatePolicy, danglingObjectAction,
fileReferenceAction);
}
/**
* Internal updateArtifact() method which takes a {@link Model} containing the modified triples
* instead of an InputStream.
*/
protected Model updateArtifact(final URI artifactUri, final URI versionUri, final Collection<URI> objectUris,
final Model model, final UpdatePolicy updatePolicy, final DanglingObjectPolicy danglingObjectAction,
final DataReferenceVerificationPolicy fileReferenceAction) throws OpenRDFException, IOException,
OWLException, PoddException
{
if(model == null)
{
throw new NullPointerException("Input Model must not be null");
}
// check if the specified artifact URI refers to a managed artifact
InferredOWLOntologyID artifactID = null;
try
{
artifactID = this.getArtifact(IRI.create(artifactUri));
}
catch(final UnmanagedArtifactIRIException e)
{
this.log.error("This artifact is unmanaged. [{}]", artifactUri);
throw e;
}
// check if updating from the most current version of the artifact
try
{
artifactID = this.getArtifact(IRI.create(versionUri));
}
catch(final UnmanagedArtifactIRIException e)
{
// if the version IRI is not the most current, it is unmanaged
final String message =
"Attempting to update from an invalid version of an artifact [" + versionUri
+ "]. The current version is [" + artifactID.getVersionIRI().toString() + "]";
this.log.error(message);
// TODO: UpdatePolicy.MERGE_WITH_EXISTING and
// UpdatePolicy.REPLACE_ALL should be fine to
// go on in most cases
throw new UnmanagedArtifactVersionException(artifactID.getOntologyIRI(), artifactID.getVersionIRI(),
IRI.create(versionUri), message, e);
// FIXME - handle this conflict intelligently instead of rejecting
// the update.
}
final Set<InferredOWLOntologyID> currentSchemaImports = this.getSchemaImports(artifactID);
final Repository tempRepository = this.getRepositoryManager().getNewTemporaryRepository(currentSchemaImports);
RepositoryConnection tempRepositoryConnection = null;
RepositoryConnection permanentRepositoryConnection = null;
RepositoryConnection managementRepositoryConnection = null;
InferredOWLOntologyID inferredOWLOntologyID = null;
try
{
// create a temporary in-memory repository
tempRepositoryConnection = tempRepository.getConnection();
tempRepositoryConnection.begin();
managementRepositoryConnection = this.getRepositoryManager().getManagementRepository().getConnection();
permanentRepositoryConnection =
this.getRepositoryManager().getPermanentRepository(currentSchemaImports).getConnection();
permanentRepositoryConnection.begin();
// load and copy the artifact's concrete statements to the temporary
// store
final RepositoryResult<Statement> repoResult =
permanentRepositoryConnection.getStatements(null, null, null, false, artifactID.getVersionIRI()
.toOpenRDFURI());
final URI tempContext = artifactID.getVersionIRI().toOpenRDFURI();
tempRepositoryConnection.add(repoResult, tempContext);
// update the artifact statements
if(UpdatePolicy.REPLACE_ALL == updatePolicy)
{
throw new PoddRuntimeException("TODO: Implement support for UpdatePolicy.REPLACE_ALL");
}
else if(UpdatePolicy.REPLACE_EXISTING == updatePolicy)
{
// create an intermediate context and add "edit" statements to
final URI intContext = PODD.VF.createURI("urn:intermediate:", UUID.randomUUID().toString());
tempRepositoryConnection.add(model, intContext);
final Collection<URI> replaceableObjects = new ArrayList<URI>(objectUris);
// If they did not send a list, we create one ourselves.
if(replaceableObjects.isEmpty())
{
// get all Subjects in "edit" statements
final RepositoryResult<Statement> statements =
tempRepositoryConnection.getStatements(null, null, null, false, intContext);
final List<Statement> allEditStatements = Iterations.addAll(statements, new ArrayList<Statement>());
// remove all references to these Subjects in "main" context
for(final Statement statement : allEditStatements)
{
if(statement.getSubject() instanceof URI)
{
replaceableObjects.add((URI)statement.getSubject());
}
else
{
// We do not support replacing objects that are not
// referenced using
// URIs, so they must stay for REPLACE_EXISTING
// To remove blank node subject statements, replace
// the entire object
// using REPLACE_ALL
}
}
}
for(final URI nextReplaceableObject : replaceableObjects)
{
tempRepositoryConnection.remove(nextReplaceableObject, null, null, tempContext);
}
// copy the "edit" statements from intermediate context into our
// "main" context
tempRepositoryConnection.add(
tempRepositoryConnection.getStatements(null, null, null, false, intContext), tempContext);
}
else if(UpdatePolicy.MERGE_WITH_EXISTING == updatePolicy)
{
tempRepositoryConnection.add(model, tempContext);
}
else
{
throw new PoddRuntimeException("Did not recognise the UpdatePolicy: " + updatePolicy);
}
// check and update statements with default timestamp values
final Value now = PODD.VF.createLiteral(new Date());
this.handleTimestamps(tempRepositoryConnection, PODD.PODD_BASE_CREATED_AT, now, tempContext);
this.handleTimestamps(tempRepositoryConnection, PODD.PODD_BASE_LAST_MODIFIED, now, tempContext);
this.handleDanglingObjects(artifactID.getOntologyIRI(), tempRepositoryConnection, tempContext,
danglingObjectAction);
// Remove any assertions that the user has made about publication
// status, as this
// information is a privileged operation that must be done through
// the designated API
// method
tempRepositoryConnection.remove((Resource)null, PODD.PODD_BASE_HAS_PUBLICATION_STATUS, (Resource)null,
tempContext);
final Set<PoddPurlReference> purls = this.handlePurls(tempRepositoryConnection, tempContext);
final Model resultsModel = new LinkedHashModel();
// add (temp-object-URI :replacedTempUriWith PURL) statements to Model
// NOTE: Using nested loops is rather inefficient, but these collections are not
// expected to have more than a handful of elements
for(final URI objectUri : objectUris)
{
for(final PoddPurlReference purl : purls)
{
final URI tempUri = purl.getTemporaryURI();
if(objectUri.equals(tempUri))
{
resultsModel.add(objectUri, PODD.PODD_REPLACED_TEMP_URI_WITH, purl.getPurlURI());
break; // out of inner loop
}
}
}
// increment the version
final OWLOntologyID currentManagedArtifactID =
this.getSesameManager().getCurrentArtifactVersion(IRI.create(artifactUri),
permanentRepositoryConnection, this.getRepositoryManager().getArtifactManagementGraph());
final URI newVersionIRI =
PODD.VF.createURI(this.incrementVersion(currentManagedArtifactID.getVersionIRI().toString()));
// set version IRI in temporary repository
this.log.info("Setting version IRI to <{}>", newVersionIRI);
tempRepositoryConnection.remove(artifactID.getOntologyIRI().toOpenRDFURI(), PODD.OWL_VERSION_IRI, null,
tempContext);
tempRepositoryConnection.add(artifactID.getOntologyIRI().toOpenRDFURI(), PODD.OWL_VERSION_IRI,
newVersionIRI, tempContext);
// check and ensure schema ontology imports are for version IRIs
this.handleSchemaImports(artifactID.getOntologyIRI().toOpenRDFURI(), managementRepositoryConnection,
tempRepositoryConnection, tempContext);
// ensure schema ontologies are cached in memory before loading
// statements into OWLAPI
this.handleCacheSchemasInMemory(managementRepositoryConnection, tempRepositoryConnection, tempContext);
inferredOWLOntologyID =
this.loadInferStoreArtifact(tempRepositoryConnection, permanentRepositoryConnection,
managementRepositoryConnection, tempContext, fileReferenceAction, false);
permanentRepositoryConnection.commit();
managementRepositoryConnection.commit();
tempRepositoryConnection.rollback();
return OntologyUtils.ontologyIDsToModel(Arrays.asList(inferredOWLOntologyID), resultsModel);
}
catch(final Exception e)
{
if(managementRepositoryConnection != null && managementRepositoryConnection.isActive())
{
managementRepositoryConnection.rollback();
}
if(permanentRepositoryConnection != null && permanentRepositoryConnection.isActive())
{
permanentRepositoryConnection.rollback();
}
if(tempRepositoryConnection != null && tempRepositoryConnection.isActive())
{
tempRepositoryConnection.rollback();
}
throw e;
}
finally
{
if(managementRepositoryConnection != null && managementRepositoryConnection.isOpen())
{
try
{
managementRepositoryConnection.close();
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing repository connection", e);
}
}
if(permanentRepositoryConnection != null && permanentRepositoryConnection.isOpen())
{
try
{
permanentRepositoryConnection.close();
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing repository connection", e);
}
}
// release resources
if(inferredOWLOntologyID != null)
{
this.getOWLManager().removeCache(inferredOWLOntologyID.getBaseOWLOntologyID());
this.getOWLManager().removeCache(inferredOWLOntologyID.getInferredOWLOntologyID());
}
if(tempRepositoryConnection != null && tempRepositoryConnection.isOpen())
{
try
{
tempRepositoryConnection.close();
}
catch(final RepositoryException e)
{
this.log.error("Found exception closing repository connection", e);
}
}
tempRepository.shutDown();
}
}
@Override
public InferredOWLOntologyID updateSchemaImports(final InferredOWLOntologyID artifactId,
final Set<? extends OWLOntologyID> oldSchemaOntologyIds,
final Set<? extends OWLOntologyID> newSchemaOntologyIds) throws OpenRDFException, PoddException,
IOException, OWLException
{
if(artifactId == null)
{
throw new IllegalArgumentException("Artifact was null");
}
RepositoryConnection managementRepositoryConnection = null;
RepositoryConnection permanentRepositoryConnection = null;
RepositoryConnection tempRepositoryConnection = null;
Repository tempRepository = null;
try
{
managementRepositoryConnection = this.repositoryManager.getManagementRepository().getConnection();
managementRepositoryConnection.begin();
permanentRepositoryConnection =
this.repositoryManager.getPermanentRepository(newSchemaOntologyIds).getConnection();
permanentRepositoryConnection.begin();
final InferredOWLOntologyID artifactVersion =
this.sesameManager.getCurrentArtifactVersion(artifactId.getOntologyIRI(),
managementRepositoryConnection, this.repositoryManager.getArtifactManagementGraph());
if(!artifactVersion.getVersionIRI().equals(artifactId.getVersionIRI()))
{
throw new UnmanagedArtifactVersionException(artifactId.getOntologyIRI(),
artifactVersion.getVersionIRI(), artifactId.getVersionIRI(),
"Cannot update schema imports for artifact as the specified version was not found.");
}
this.log.info("Starting exporting artifact to RDF: {}", artifactVersion);
// Export the artifact without including the old inferred triples, and they will be
// regenerated using the new schema ontologies
final Model model = this.exportArtifact(artifactVersion, false);
this.log.info("Finished exporting artifact to RDF: {}", artifactVersion);
tempRepository = this.repositoryManager.getNewTemporaryRepository(newSchemaOntologyIds);
tempRepositoryConnection = tempRepository.getConnection();
tempRepositoryConnection.begin();
// Bump the version identifier to a new value
final IRI newVersionIRI = IRI.create(this.incrementVersion(artifactVersion.getVersionIRI().toString()));
tempRepositoryConnection.add(model, newVersionIRI.toOpenRDFURI());
tempRepositoryConnection.remove(artifactVersion.getOntologyIRI().toOpenRDFURI(), OWL.VERSIONIRI, null);
tempRepositoryConnection.add(artifactVersion.getOntologyIRI().toOpenRDFURI(), OWL.VERSIONIRI,
newVersionIRI.toOpenRDFURI(), newVersionIRI.toOpenRDFURI());
for(final OWLOntologyID nextOldSchemaOntologyID : oldSchemaOntologyIds)
{
// Remove both a generic import and a version specific import,
// so this method can be
// used to bump generic imports to version specific imports
// after they are imported,
// if necessary.
tempRepositoryConnection.remove(artifactVersion.getOntologyIRI().toOpenRDFURI(), OWL.IMPORTS,
nextOldSchemaOntologyID.getOntologyIRI().toOpenRDFURI());
tempRepositoryConnection.remove(artifactVersion.getOntologyIRI().toOpenRDFURI(), OWL.IMPORTS,
nextOldSchemaOntologyID.getVersionIRI().toOpenRDFURI());
}
this.log.info("Started caching schema ontologies for artifact migration: {}", artifactVersion);
// Even if the old version of the artifact did not import this
// schema, we include it now
// as it may be required by the others
for(final OWLOntologyID nextNewSchemaOntologyID : newSchemaOntologyIds)
{
// Add import to the specific version
tempRepositoryConnection.add(artifactVersion.getOntologyIRI().toOpenRDFURI(), OWL.IMPORTS,
nextNewSchemaOntologyID.getVersionIRI().toOpenRDFURI(), newVersionIRI.toOpenRDFURI());
}
this.log.info("Started caching schema ontologies: {}", newSchemaOntologyIds);
this.getOWLManager().cacheSchemaOntologies(newSchemaOntologyIds, managementRepositoryConnection,
this.getRepositoryManager().getSchemaManagementGraph());
this.log.info("Finished caching schema ontology: {}", newSchemaOntologyIds);
this.log.info("Finished caching schema ontologies for artifact migration: {}", artifactVersion);
tempRepositoryConnection.commit();
this.log.info("Starting reload of artifact to Repository: {}", artifactVersion);
// If the following does not succeed, then it throws an exception and we rollback
// permanentRepositoryConnection
final InferredOWLOntologyID result =
this.loadInferStoreArtifact(tempRepositoryConnection, permanentRepositoryConnection,
managementRepositoryConnection, newVersionIRI.toOpenRDFURI(),
DataReferenceVerificationPolicy.DO_NOT_VERIFY, false);
this.log.info("Completed reload of artifact to Repository: {}", artifactVersion);
permanentRepositoryConnection.commit();
managementRepositoryConnection.commit();
return result;
}
catch(final Throwable e)
{
if(managementRepositoryConnection != null)
{
managementRepositoryConnection.rollback();
}
if(permanentRepositoryConnection != null)
{
permanentRepositoryConnection.rollback();
}
if(tempRepositoryConnection != null)
{
tempRepositoryConnection.rollback();
}
throw e;
}
finally
{
if(managementRepositoryConnection != null)
{
managementRepositoryConnection.close();
}
if(permanentRepositoryConnection != null)
{
permanentRepositoryConnection.close();
}
if(tempRepositoryConnection != null)
{
tempRepositoryConnection.close();
}
if(tempRepository != null)
{
tempRepository.shutDown();
}
}
}
}
|
package edu.cornell.mannlib.vitro.webapp.filters;
import java.io.IOException;
import java.util.List;
import java.util.regex.Pattern;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers;
import edu.cornell.mannlib.vitro.webapp.auth.policy.ServletPolicyList;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.FactoryID;
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess.ModelID;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.WebappDaoFactoryFiltering;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByPolicyFilter;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.filter.LanguageFilteringRDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.filter.LanguageFilteringUtils;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
/**
* This sets up several objects in the Request scope for each incoming HTTP
* request. This is done in a Filter so that controllers and JSPs get the same
* setup.
*
* This code configures the WebappDaoFactory for each request.
*/
public class RequestModelsPrep implements Filter {
private final static Log log = LogFactory.getLog(RequestModelsPrep.class);
/**
* The filter will be applied to all incoming urls, this is a list of URI
* patterns to skip. These are matched against the requestURI sans query
* parameters, e.g. "/vitro/index.jsp" "/vitro/themes/enhanced/css/edit.css"
*/
private final static Pattern[] skipPatterns = {
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg)$"),
Pattern.compile(".*\\.css$"), Pattern.compile(".*\\.js$"),
Pattern.compile("/.*/themes/.*/site_icons/.*"),
Pattern.compile("/.*/images/.*") };
private ServletContext ctx;
private ConfigurationProperties props;
private String defaultNamespace;
@Override
public void init(FilterConfig fc) throws ServletException {
ctx = fc.getServletContext();
props = ConfigurationProperties.getBean(ctx);
defaultNamespace = props.getProperty("Vitro.defaultNamespace");
}
@Override
public void doFilter(ServletRequest request, ServletResponse response,
FilterChain filterChain) throws IOException, ServletException {
HttpServletRequest req = (HttpServletRequest) request;
HttpServletResponse resp = (HttpServletResponse) response;
// If we're not authorized for this request, skip the chain and
// redirect.
if (!ModelSwitcher.authorizedForSpecialModel(req)) {
VitroHttpServlet.redirectUnauthorizedRequest(req, resp);
return;
}
if (!thisRequestNeedsModels(req) || modelsAreAlreadySetUp(req)) {
filterChain.doFilter(req, resp);
} else {
RDFService rdfService = RDFServiceUtils.getRDFServiceFactory(ctx)
.getShortTermRDFService();
try {
setUpTheRequestModels(rdfService, req);
filterChain.doFilter(req, resp);
} finally {
rdfService.close();
}
}
}
private boolean thisRequestNeedsModels(HttpServletRequest req) {
String requestURI = req.getRequestURI();
for (Pattern skipPattern : skipPatterns) {
if (skipPattern.matcher(requestURI).matches()) {
log.debug("request matched skipPattern '" + skipPattern
+ "', skipping RequestModelsPrep");
return false;
}
}
return true;
}
private boolean modelsAreAlreadySetUp(HttpServletRequest req) {
String attributeName = RequestModelsPrep.class.getName() + "-setup";
if (req.getAttribute(attributeName) != null) {
return true;
} else {
req.setAttribute(attributeName, Boolean.TRUE);
return false;
}
}
private void setUpTheRequestModels(RDFService rawRdfService,
HttpServletRequest req) {
VitroRequest vreq = new VitroRequest(req);
vreq.setUnfilteredRDFService(rawRdfService);
List<String> langs = getPreferredLanguages(req);
RDFService rdfService = addLanguageAwareness(langs, rawRdfService);
vreq.setRDFService(rdfService);
Dataset dataset = new RDFServiceDataset(rdfService);
vreq.setDataset(dataset);
WebappDaoFactoryConfig config = createWadfConfig(langs, req);
ModelAccess.on(vreq).setJenaOntModel(
ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM,
dataset.getDefaultModel()));
addLanguageAwarenessToRequestModel(req, ModelID.DISPLAY);
addLanguageAwarenessToRequestModel(req, ModelID.APPLICATION_METADATA);
addLanguageAwarenessToRequestModel(req, ModelID.UNION_TBOX);
addLanguageAwarenessToRequestModel(req, ModelID.UNION_FULL);
addLanguageAwarenessToRequestModel(req, ModelID.BASE_TBOX);
addLanguageAwarenessToRequestModel(req, ModelID.BASE_FULL);
WebappDaoFactory unfilteredWadf = new WebappDaoFactorySDB(rdfService,
ModelAccess.on(vreq).getUnionOntModelSelector(), config);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNFILTERED_UNION,
unfilteredWadf);
WebappDaoFactory unfilteredAssertionsWadf = new WebappDaoFactorySDB(
rdfService, ModelAccess.on(vreq).getBaseOntModelSelector(),
config, SDBDatasetMode.ASSERTIONS_ONLY);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.BASE,
unfilteredAssertionsWadf);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNFILTERED_BASE,
unfilteredAssertionsWadf);
WebappDaoFactory wadf = new WebappDaoFactorySDB(rdfService, ModelAccess
.on(vreq).getUnionOntModelSelector(), config);
// Do model switching and replace the WebappDaoFactory with
// a different version if requested by parameters
WebappDaoFactory switchedWadf = new ModelSwitcher()
.checkForModelSwitching(vreq, wadf);
HideFromDisplayByPolicyFilter filter = new HideFromDisplayByPolicyFilter(
RequestIdentifiers.getIdBundleForRequest(req),
ServletPolicyList.getPolicies(ctx));
WebappDaoFactoryFiltering filteredWadf = new WebappDaoFactoryFiltering(
switchedWadf, filter);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNION, filteredWadf);
}
private WebappDaoFactoryConfig createWadfConfig(List<String> langs, HttpServletRequest req) {
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs);
config.setUnderlyingStoreReasoned(isStoreReasoned(req));
return config;
}
private List<String> getPreferredLanguages(HttpServletRequest req) {
log.debug("Accept-Language: " + req.getHeader("Accept-Language"));
return LanguageFilteringUtils.localesToLanguages(req.getLocales());
}
/**
* Language awareness is enabled unless they explicitly disable it.
*/
private Boolean isLanguageAwarenessEnabled() {
return Boolean.valueOf(props.getProperty("RDFService.languageFilter",
"true"));
}
private RDFService addLanguageAwareness(List<String> langs,
RDFService rawRDFService) {
if (isLanguageAwarenessEnabled()) {
return new LanguageFilteringRDFService(rawRDFService, langs);
} else {
return rawRDFService;
}
}
private void addLanguageAwarenessToRequestModel(HttpServletRequest req, ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req.getSession()).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private boolean isStoreReasoned(ServletRequest req) {
String isStoreReasoned = ConfigurationProperties.getBean(req).getProperty(
"VitroConnection.DataSource.isStoreReasoned", "true");
return ("true".equals(isStoreReasoned));
}
@Override
public void destroy() {
// Nothing to destroy
}
}
|
package com.alwaysallthetime.adnlib;
import android.os.AsyncTask;
import android.os.Build;
import com.alwaysallthetime.adnlib.data.Annotatable;
import com.alwaysallthetime.adnlib.data.Channel;
import com.alwaysallthetime.adnlib.data.Entities;
import com.alwaysallthetime.adnlib.data.File;
import com.alwaysallthetime.adnlib.data.Message;
import com.alwaysallthetime.adnlib.data.Post;
import com.alwaysallthetime.adnlib.data.PrivateMessage;
import com.alwaysallthetime.adnlib.data.StreamMarker;
import com.alwaysallthetime.adnlib.data.StreamMarkerList;
import com.alwaysallthetime.adnlib.data.User;
import com.alwaysallthetime.adnlib.request.AppDotNetApiFileUploadRequest;
import com.alwaysallthetime.adnlib.request.AppDotNetApiImageUploadRequest;
import com.alwaysallthetime.adnlib.request.AppDotNetApiJsonRequest;
import com.alwaysallthetime.adnlib.request.AppDotNetApiRequest;
import com.alwaysallthetime.adnlib.request.AppDotNetOAuthRequest;
import com.alwaysallthetime.adnlib.request.AppDotNetRequest;
import com.alwaysallthetime.adnlib.response.AccessTokenResponseHandler;
import com.alwaysallthetime.adnlib.response.ChannelListResponseHandler;
import com.alwaysallthetime.adnlib.response.ChannelResponseHandler;
import com.alwaysallthetime.adnlib.response.ConfigurationResponseHandler;
import com.alwaysallthetime.adnlib.response.CountResponseHandler;
import com.alwaysallthetime.adnlib.response.FileListResponseHandler;
import com.alwaysallthetime.adnlib.response.FileResponseHandler;
import com.alwaysallthetime.adnlib.response.LoginResponseHandler;
import com.alwaysallthetime.adnlib.response.MessageListResponseHandler;
import com.alwaysallthetime.adnlib.response.MessageResponseHandler;
import com.alwaysallthetime.adnlib.response.PlaceListResponseHandler;
import com.alwaysallthetime.adnlib.response.PlaceResponseHandler;
import com.alwaysallthetime.adnlib.response.PostListResponseHandler;
import com.alwaysallthetime.adnlib.response.PostResponseHandler;
import com.alwaysallthetime.adnlib.response.StreamMarkerListResponseHandler;
import com.alwaysallthetime.adnlib.response.StreamMarkerResponseHandler;
import com.alwaysallthetime.adnlib.response.TokenResponseHandler;
import com.alwaysallthetime.adnlib.response.UserListResponseHandler;
import com.alwaysallthetime.adnlib.response.UserResponseHandler;
import org.apache.http.NameValuePair;
import org.apache.http.message.BasicNameValuePair;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import javax.net.ssl.SSLSocketFactory;
public class AppDotNetClient {
public static final String METHOD_DELETE = "DELETE";
public static final String METHOD_GET = "GET";
public static final String METHOD_POST = "POST";
public static final String METHOD_PUT = "PUT";
protected static final int ID_LENGTH = 10; // max string length of object ID including delimiter
protected static final String ENDPOINT_USERS = "users";
protected static final String ENDPOINT_POSTS = "posts";
protected static final String ENDPOINT_STARS = "stars";
protected static final String ENDPOINT_MENTIONS = "mentions";
protected static final String ENDPOINT_CHANNELS = "channels";
protected static final String ENDPOINT_MESSAGES = "messages";
protected static final String ENDPOINT_PLACES = "places";
protected static final String ENDPOINT_FILES = "files";
protected static final String ENDPOINT_CONFIGURATION = "config";
protected String authHeader;
protected String languageHeader;
protected List<NameValuePair> authParams;
protected SSLSocketFactory sslSocketFactory;
public AppDotNetClient() {
final Locale locale = Locale.getDefault();
languageHeader = String.format("%s-%s", locale.getLanguage(), locale.getCountry());
}
public AppDotNetClient(String token) {
this();
setToken(token);
}
public AppDotNetClient(String clientId, String passwordGrantSecret) {
this();
authParams = new ArrayList<NameValuePair>(3);
authParams.add(new BasicNameValuePair("client_id", clientId));
authParams.add(new BasicNameValuePair("password_grant_secret", passwordGrantSecret));
authParams.add(new BasicNameValuePair("grant_type", "password"));
}
public void setToken(String token) {
authHeader = "Bearer " + token;
}
public boolean hasToken() {
return authHeader != null;
}
public void setSslSocketFactory(SSLSocketFactory sslSocketFactory) {
this.sslSocketFactory = sslSocketFactory;
}
/*
* OAUTH
*/
public void authenticateWithPassword(String username, String password, String scope, LoginResponseHandler responseHandler) {
if (authParams == null)
throw new IllegalStateException("client must be constructed with client ID and password grant secret");
final List<NameValuePair> params = getAuthenticationParams(username, password, scope);
final AccessTokenResponseHandler tokenResponseHandler = new AccessTokenResponseHandler(this, responseHandler);
execute(new AppDotNetOAuthRequest(tokenResponseHandler, params, "access_token"));
}
protected List<NameValuePair> getAuthenticationParams(String username, String password, String scope) {
final List<NameValuePair> params = new ArrayList<NameValuePair>(authParams.size() + 3);
params.addAll(authParams);
params.add(new BasicNameValuePair("username", username));
params.add(new BasicNameValuePair("password", password));
if (scope != null)
params.add(new BasicNameValuePair("scope", scope));
return params;
}
/*
* USER
*/
public void retrieveUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, userId));
}
public void retrieveUser(String userId, UserResponseHandler responseHandler) {
retrieveUser(userId, null, responseHandler);
}
public void retrieveCurrentUser(QueryParameters queryParameters, UserResponseHandler responseHandler) {
retrieveUser("me", queryParameters, responseHandler);
}
public void retrieveCurrentUser(UserResponseHandler responseHandler) {
retrieveCurrentUser(null, responseHandler);
}
public void updateCurrentUser(User user, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, METHOD_PUT, user, queryParameters, ENDPOINT_USERS, "me"));
}
public void updateCurrentUser(User user, UserResponseHandler responseHandler) {
updateCurrentUser(user, null, responseHandler);
}
public void updateAvatar(byte[] image, int offset, int count, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiImageUploadRequest(responseHandler, "avatar", image, offset, count, queryParameters,
ENDPOINT_USERS, "me/avatar"));
}
public void updateAvatar(byte[] image, int offset, int count, UserResponseHandler responseHandler) {
updateAvatar(image, offset, count, null, responseHandler);
}
public void updateAvatar(byte[] image, QueryParameters queryParameters, UserResponseHandler responseHandler) {
updateAvatar(image, 0, image.length, queryParameters, responseHandler);
}
public void updateAvatar(byte[] image, UserResponseHandler responseHandler) {
updateAvatar(image, null, responseHandler);
}
public void updateCover(byte[] image, int offset, int count, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiImageUploadRequest(responseHandler, "cover", image, offset, count, queryParameters,
ENDPOINT_USERS, "me/cover"));
}
public void updateCover(byte[] image, int offset, int count, UserResponseHandler responseHandler) {
updateCover(image, offset, count, null, responseHandler);
}
public void updateCover(byte[] image, QueryParameters queryParameters, UserResponseHandler responseHandler) {
updateCover(image, 0, image.length, queryParameters, responseHandler);
}
public void updateCover(byte[] image, UserResponseHandler responseHandler) {
updateCover(image, null, responseHandler);
}
public void followUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_USERS, userId, "follow"));
}
public void followUser(String userId, UserResponseHandler responseHandler) {
followUser(userId, null, responseHandler);
}
public void unfollowUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_USERS, userId, "follow"));
}
public void unfollowUser(String userId, UserResponseHandler responseHandler) {
unfollowUser(userId, null, responseHandler);
}
public void muteUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_USERS, userId, "mute"));
}
public void muteUser(String userId, UserResponseHandler responseHandler) {
muteUser(userId, null, responseHandler);
}
public void unmuteUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_USERS, userId, "mute"));
}
public void unmuteUser(String userId, UserResponseHandler responseHandler) {
unmuteUser(userId, null, responseHandler);
}
public void blockUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_USERS, userId, "block"));
}
public void blockUser(String userId, UserResponseHandler responseHandler) {
blockUser(userId, null, responseHandler);
}
public void unblockUser(String userId, QueryParameters queryParameters, UserResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_USERS, userId, "block"));
}
public void unblockUser(String userId, UserResponseHandler responseHandler) {
unblockUser(userId, null, responseHandler);
}
protected void retrieveUsers(String userIds, QueryParameters queryParameters, UserListResponseHandler responseHandler) {
if (queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("ids", userIds);
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS));
}
public void retrieveUsersById(List<String> userIds, QueryParameters queryParameters, UserListResponseHandler responseHandler) {
retrieveUsers(getIdString(userIds), queryParameters, responseHandler);
}
public void retrieveUsersById(List<String> userIds, UserListResponseHandler responseHandler) {
retrieveUsersById(userIds, null, responseHandler);
}
public void retrieveUsers(List<User> users, QueryParameters queryParameters, UserListResponseHandler responseHandler) {
retrieveUsers(getObjectIdString(users), queryParameters, responseHandler);
}
public void retrieveUsers(List<User> users, UserListResponseHandler responseHandler) {
retrieveUsers(users, null, responseHandler);
}
public void retrieveUsersWithSearchQuery(String query, QueryParameters queryParameters, UserListResponseHandler responseHandler) {
if (queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("q", query);
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, "search"));
}
public void retrieveUsersWithSearchQuery(String query, UserListResponseHandler responseHandler) {
retrieveUsersWithSearchQuery(query, null, responseHandler);
}
/*
* POST - CREATION
*/
public void createPost(Post post, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, post, queryParameters, ENDPOINT_POSTS));
}
public void createPost(Post post, PostResponseHandler responseHandler) {
createPost(post, null, responseHandler);
}
/*
* POST - RETRIEVAL BY ID
*/
public void retrievePost(String postId, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_POSTS, postId));
}
public void retrievePost(String postId, PostResponseHandler responseHandler) {
retrievePost(postId, null, responseHandler);
}
public void retrievePosts(List<Post> posts, PostListResponseHandler responseHandler) {
retrievePosts(posts, null, responseHandler);
}
public void retrievePosts(List<Post> posts, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePosts(getObjectIdString(posts), queryParameters, responseHandler);
}
public void retrievePostsById(List<String> postIds, PostListResponseHandler responseHandler) {
retrievePostsById(postIds, null, responseHandler);
}
public void retrievePostsById(List<String> postIds, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePosts(getIdString(postIds), queryParameters, responseHandler);
}
protected void retrievePosts(String postIds, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
if(queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("ids", postIds);
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_POSTS));
}
/*
* POST - RETRIEVAL BY USER
*/
public void retrievePostsForUser(User user, PostListResponseHandler responseHandler) {
retrievePostsForUser(user.getId(), null, responseHandler);
}
public void retrievePostsForUser(User user, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsForUser(user.getId(), queryParameters, responseHandler);
}
public void retrievePostsForUserId(String userId, PostListResponseHandler responseHandler) {
retrievePostsForUser(userId, null, responseHandler);
}
public void retrievePostsForUserId(String userId, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsForUser(userId, queryParameters, responseHandler);
}
public void retrievePostsForCurrentUser(PostListResponseHandler responseHandler) {
retrievePostsForUser("me", null, responseHandler);
}
public void retrievePostsForCurrentUser(QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsForUser("me", queryParameters, responseHandler);
}
protected void retrievePostsForUser(String userString, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, userString, ENDPOINT_POSTS));
}
/*
* POST - RETRIEVE STARS
*/
public void retrieveStarredPostsForUser(User user, PostListResponseHandler responseHandler) {
retrieveStarredPostsForUser(user.getId(), null, responseHandler);
}
public void retrieveStarredPostsForUser(User user, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrieveStarredPostsForUser(user.getId(), queryParameters, responseHandler);
}
public void retrieveStarredPostsForUserId(String userId, PostListResponseHandler responseHandler) {
retrieveStarredPostsForUser(userId, null, responseHandler);
}
public void retrieveStarredPostsForUserId(String userId, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrieveStarredPostsForUser(userId, queryParameters, responseHandler);
}
public void retrieveStarredPostsForCurrentUser(PostListResponseHandler responseHandler) {
retrieveStarredPostsForUser("me", null, responseHandler);
}
public void retrieveStarredPostsForCurrentUser(QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrieveStarredPostsForUser("me", queryParameters, responseHandler);
}
protected void retrieveStarredPostsForUser(String userString, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, userString, ENDPOINT_STARS));
}
/*
* POST - RETRIEVE MENTIONS
*/
public void retrievePostsMentioningUser(User user, PostListResponseHandler responseHandler) {
retrievePostsMentioningUser(user.getId(), null, responseHandler);
}
public void retrievePostsMentioningUser(User user, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsMentioningUser(user.getId(), queryParameters, responseHandler);
}
public void retrievePostsMentioningUserWithId(String userId, PostListResponseHandler responseHandler) {
retrievePostsMentioningUser(userId, null, responseHandler);
}
public void retrievePostsMentioningUserWithId(String userId, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsMentioningUser(userId, queryParameters, responseHandler);
}
public void retrievePostsMentioningCurrentUser(PostListResponseHandler responseHandler) {
retrievePostsMentioningUser("me", null, responseHandler);
}
public void retrievePostsMentioningCurrentUser(QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsMentioningUser("me", queryParameters, responseHandler);
}
protected void retrievePostsMentioningUser(String userString, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, userString, ENDPOINT_MENTIONS));
}
/*
* POST - RETRIEVE BY HASHTAG
*/
public void retrievePostsWithHashtag(Entities.Hashtag hashtag, PostListResponseHandler responseHandler) {
retrievePostsWithHashtag(hashtag.getName(), null, responseHandler);
}
public void retrievePostsWithHashtag(Entities.Hashtag hashtag, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrievePostsWithHashtag(hashtag.getName(), queryParameters, responseHandler);
}
public void retrievePostsWithHashtag(String hashtag, PostListResponseHandler responseHandler) {
retrievePostsWithHashtag(hashtag, null, responseHandler);
}
public void retrievePostsWithHashtag(String hashtag, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_POSTS, "tag", hashtag));
}
/*
* POST - RETRIEVE REPLIES
*/
public void retrieveRepliesToPost(Post post, PostListResponseHandler responseHandler) {
retrieveRepliesToPost(post.getId(), null, responseHandler);
}
public void retrieveRepliesToPost(Post post, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
retrieveRepliesToPost(post.getId(), queryParameters, responseHandler);
}
public void retrieveRepliesToPost(String postId, PostListResponseHandler responseHandler) {
retrieveRepliesToPost(postId, null, responseHandler);
}
public void retrieveRepliesToPost(String postId, QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_POSTS, postId, "replies"));
}
public void deletePost(String postId, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_POSTS, postId));
}
/*
* POST - DELETION
*/
public void deletePost(String postId, PostResponseHandler responseHandler) {
deletePost(postId, null, responseHandler);
}
/*
* POST - REPOST
*/
public void repostPost(String postId, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_POSTS, postId, "repost"));
}
public void repostPost(String postId, PostResponseHandler responseHandler) {
repostPost(postId, null, responseHandler);
}
public void unrepostPost(String postId, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_POSTS, postId, "repost"));
}
public void unrepostPost(String postId, PostResponseHandler responseHandler) {
unrepostPost(postId, null, responseHandler);
}
/*
* POST - STAR
*/
public void starPost(String postId, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_POSTS, postId, "star"));
}
public void starPost(String postId, PostResponseHandler responseHandler) {
starPost(postId, null, responseHandler);
}
public void unstarPost(String postId, QueryParameters queryParameters, PostResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_POSTS, postId, "star"));
}
public void unstarPost(String postId, PostResponseHandler responseHandler) {
unstarPost(postId, null, responseHandler);
}
/*
* POST - PERSONALIZED STREAM
*/
public void retrievePersonalizedStream(PostListResponseHandler responseHandler) {
retrievePersonalizedStream(null, responseHandler);
}
public void retrievePersonalizedStream(QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_POSTS, "stream"));
}
/*
* POST - UNIFIED STREAM
*/
public void retrieveUnifiedStream(PostListResponseHandler responseHandler) {
retrieveUnifiedStream(null, responseHandler);
}
public void retrieveUnifiedStream(QueryParameters queryParameters, PostListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_POSTS, "stream", "unified"));
}
/*
* CHANNEL
*/
public void retrieveCurrentUserSubscribedChannels(QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS));
}
public void retrieveCurrentUserSubscribedChannels(ChannelListResponseHandler responseHandler) {
retrieveCurrentUserSubscribedChannels(null, responseHandler);
}
public void createChannel(Channel channel, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, channel, queryParameters, ENDPOINT_CHANNELS));
}
public void createChannel(Channel channel, ChannelResponseHandler responseHandler) {
createChannel(channel, null, responseHandler);
}
public void retrieveChannel(String channelId, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS, channelId));
}
public void retrieveChannel(String channelId, ChannelResponseHandler responseHandler) {
retrieveChannel(channelId, null, responseHandler);
}
protected void retrieveChannels(String channelIds, QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
if (queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("ids", channelIds);
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS));
}
public void retrieveChannelsById(List<String> channelIds, QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
retrieveChannels(getIdString(channelIds), queryParameters, responseHandler);
}
public void retrieveChannelsById(List<String> channelIds, ChannelListResponseHandler responseHandler) {
retrieveChannelsById(channelIds, null, responseHandler);
}
public void retrieveChannels(List<Channel> channels, QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
retrieveChannels(getObjectIdString(channels), queryParameters, responseHandler);
}
public void retrieveChannels(List<Channel> channels, ChannelListResponseHandler responseHandler) {
retrieveChannels(channels, null, responseHandler);
}
public void retrieveChannels(QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS, "search"));
}
public void retrieveChannelsWithSearchQuery(String query, ChannelListResponseHandler responseHandler) {
retrieveChannelsWithSearchQuery(query, null, responseHandler);
}
public void retrieveChannelsWithSearchQuery(String query, QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
if (queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("q", query);
retrieveChannels(queryParameters, responseHandler);
}
public void retrieveCurrentUserChannels(QueryParameters queryParameters, ChannelListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, "me", ENDPOINT_CHANNELS));
}
public void retrieveCurrentUserChannels(ChannelListResponseHandler responseHandler) {
retrieveCurrentUserChannels(null, responseHandler);
}
public void retrieveUnreadPrivateMessageChannelCount(CountResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, null, ENDPOINT_USERS, "me", ENDPOINT_CHANNELS, "pm/num_unread"));
}
public void updateChannel(Channel channel, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, METHOD_PUT, channel, queryParameters, ENDPOINT_CHANNELS, channel.getId()));
}
public void updateChannel(Channel channel, ChannelResponseHandler responseHandler) {
updateChannel(channel, null, responseHandler);
}
public void subscribeChannel(String channelId, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_CHANNELS, channelId, "subscribe"));
}
public void subscribeChannel(String channelId, ChannelResponseHandler responseHandler) {
subscribeChannel(channelId, null, responseHandler);
}
public void subscribeChannel(Channel channel, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
subscribeChannel(channel.getId(), queryParameters, responseHandler);
}
public void subscribeChannel(Channel channel, ChannelResponseHandler responseHandler) {
subscribeChannel(channel, null, responseHandler);
}
public void unsubscribeChannel(String channelId, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_CHANNELS, channelId, "subscribe"));
}
public void unsubscribeChannel(String channelId, ChannelResponseHandler responseHandler) {
unsubscribeChannel(channelId, null, responseHandler);
}
public void unsubscribeChannel(Channel channel, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
unsubscribeChannel(channel.getId(), queryParameters, responseHandler);
}
public void unsubscribeChannel(Channel channel, ChannelResponseHandler responseHandler) {
unsubscribeChannel(channel, null, responseHandler);
}
public void muteChannel(String channelId, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_POST, queryParameters, ENDPOINT_CHANNELS, channelId, "mute"));
}
public void muteChannel(String channelId, ChannelResponseHandler responseHandler) {
muteChannel(channelId, null, responseHandler);
}
public void muteChannel(Channel channel, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
muteChannel(channel.getId(), queryParameters, responseHandler);
}
public void muteChannel(Channel channel, ChannelResponseHandler responseHandler) {
muteChannel(channel, null, responseHandler);
}
public void unmuteChannel(String channelId, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_CHANNELS, channelId, "mute"));
}
public void unmuteChannel(String channelId, ChannelResponseHandler responseHandler) {
unmuteChannel(channelId, null, responseHandler);
}
public void unmuteChannel(Channel channel, QueryParameters queryParameters, ChannelResponseHandler responseHandler) {
unmuteChannel(channel.getId(), queryParameters, responseHandler);
}
public void unmuteChannel(Channel channel, ChannelResponseHandler responseHandler) {
unmuteChannel(channel, null, responseHandler);
}
/*
* MESSAGE
*/
public void retrieveMessagesInChannel(String channelId, QueryParameters queryParameters, MessageListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS, channelId, ENDPOINT_MESSAGES));
}
public void retrieveMessagesInChannel(String channelId, MessageListResponseHandler responseHandler) {
retrieveMessagesInChannel(channelId, null, responseHandler);
}
public void retrieveMessagesInChannel(Channel channel, QueryParameters queryParameters, MessageListResponseHandler responseHandler) {
retrieveMessagesInChannel(channel.getId(), queryParameters, responseHandler);
}
public void retrieveMessagesInChannel(Channel channel, MessageListResponseHandler responseHandler) {
retrieveMessagesInChannel(channel, null, responseHandler);
}
public void createMessage(String channelId, Message message, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, message, queryParameters, ENDPOINT_CHANNELS, channelId, ENDPOINT_MESSAGES));
}
public void createMessage(String channelId, Message message, MessageResponseHandler responseHandler) {
createMessage(channelId, message, null, responseHandler);
}
public void createMessage(Channel channel, Message message, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
createMessage(channel.getId(), message, queryParameters, responseHandler);
}
public void createMessage(Channel channel, Message message, MessageResponseHandler responseHandler) {
createMessage(channel, message, null, responseHandler);
}
public void createPrivateMessage(PrivateMessage message, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
if (message.getDestinations() == null)
throw new IllegalArgumentException("private message must specify destinations");
createMessage("pm", message, queryParameters, responseHandler);
}
public void createPrivateMessage(PrivateMessage message, MessageResponseHandler responseHandler) {
createPrivateMessage(message, null, responseHandler);
}
public void retrieveMessage(String channelId, String messageId, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS, channelId, ENDPOINT_MESSAGES, messageId));
}
public void retrieveMessage(String channelId, String messageId, MessageResponseHandler responseHandler) {
retrieveMessage(channelId, messageId, null, responseHandler);
}
public void retrieveMessage(Channel channel, String messageId, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
retrieveMessage(channel.getId(), messageId, queryParameters, responseHandler);
}
public void retrieveMessage(Channel channel, String messageId, MessageResponseHandler responseHandler) {
retrieveMessage(channel, messageId, null, responseHandler);
}
protected void retrieveMessages(String messageIds, QueryParameters queryParameters, MessageListResponseHandler responseHandler) {
if (queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("ids", messageIds);
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_CHANNELS, ENDPOINT_MESSAGES));
}
public void retrieveMessagesById(List<String> messageIds, QueryParameters queryParameters, MessageListResponseHandler responseHandler) {
retrieveMessages(getIdString(messageIds), queryParameters, responseHandler);
}
public void retrieveMessagesById(List<String> messageIds, MessageListResponseHandler responseHandler) {
retrieveMessagesById(messageIds, null, responseHandler);
}
public void retrieveMessages(List<Message> messages, QueryParameters queryParameters, MessageListResponseHandler responseHandler) {
retrieveMessages(getObjectIdString(messages), queryParameters, responseHandler);
}
public void retrieveMessages(List<Message> messages, MessageListResponseHandler responseHandler) {
retrieveMessages(messages, null, responseHandler);
}
public void retrieveCurrentUserMessages(QueryParameters queryParameters, MessageListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, "me", ENDPOINT_MESSAGES));
}
public void retrieveCurrentUserMessages(MessageListResponseHandler responseHandler) {
retrieveCurrentUserMessages(null, responseHandler);
}
public void deleteMessage(String channelId, String messageId, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, queryParameters, ENDPOINT_CHANNELS, channelId, ENDPOINT_MESSAGES, messageId));
}
public void deleteMessage(String channelId, String messageId, MessageResponseHandler responseHandler) {
deleteMessage(channelId, messageId, null, responseHandler);
}
public void deleteMessage(Message message, QueryParameters queryParameters, MessageResponseHandler responseHandler) {
deleteMessage(message.getChannelId(), message.getId(), queryParameters, responseHandler);
}
public void deleteMessage(Message message, MessageResponseHandler responseHandler) {
deleteMessage(message, null, responseHandler);
}
/*
* PLACE
*/
public void retrievePlace(String factualId, PlaceResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, null, ENDPOINT_PLACES, factualId));
}
public void retrievePlacesWithSearchQuery(PlaceQueryParameters queryParameters, PlaceListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_PLACES, "search"));
}
/*
* STREAM MARKER
*/
public void updateStreamMarker(StreamMarker streamMarker, StreamMarkerResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, streamMarker, null, ENDPOINT_POSTS, "marker"));
}
public void updateStreamMarkers(StreamMarkerList streamMarkers, StreamMarkerListResponseHandler responseHandler) {
execute(new AppDotNetApiJsonRequest(responseHandler, streamMarkers, null, ENDPOINT_POSTS, "marker"));
}
/*
* FILE
*/
public void retrieveCurrentUserFiles(FileListResponseHandler responseHandler) {
retrieveCurrentUserFiles(null, responseHandler);
}
public void retrieveCurrentUserFiles(QueryParameters queryParameters, FileListResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_USERS, "me", ENDPOINT_FILES));
}
public void retrieveFiles(List<File> files, FileListResponseHandler responseHandler) {
retrieveFiles(files, null, responseHandler);
}
public void retrieveFiles(List<File> files, QueryParameters queryParameters, FileListResponseHandler responseHandler) {
retrieveFiles(getObjectIdString(files), queryParameters, responseHandler);
}
public void retrieveFilesById(List<String> fileIds, FileListResponseHandler responseHandler) {
retrieveFilesById(fileIds, null, responseHandler);
}
public void retrieveFilesById(List<String> fileIds, QueryParameters queryParameters, FileListResponseHandler responseHandler) {
retrieveFiles(getIdString(fileIds), queryParameters, responseHandler);
}
protected void retrieveFiles(String fileIds, QueryParameters queryParameters, FileListResponseHandler responseHandler) {
if (queryParameters == null)
queryParameters = new QueryParameters();
queryParameters.put("ids", fileIds);
execute(new AppDotNetApiRequest(responseHandler, queryParameters, ENDPOINT_FILES));
}
public void createFile(File file, byte[] fileData, String mimeType, FileResponseHandler responseHandler) {
execute(new AppDotNetApiFileUploadRequest(responseHandler, file, fileData, mimeType, ENDPOINT_FILES));
}
public void deleteFile(String fileId, FileResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, METHOD_DELETE, null, ENDPOINT_FILES, fileId));
}
/*
* TOKEN
*/
public void retrieveCurrentToken(TokenResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, null, "token"));
}
/*
* CONFIGURATION
*/
public void retrieveConfiguration(ConfigurationResponseHandler responseHandler) {
execute(new AppDotNetApiRequest(responseHandler, null, ENDPOINT_CONFIGURATION));
}
/*
* MISC
*/
protected String getIdString(List<String> ids) {
final StringBuilder buffer = new StringBuilder(ids.size() * ID_LENGTH);
for (final String id : ids) {
buffer.append(id);
buffer.append(',');
}
return buffer.substring(0, buffer.length() - 1);
}
protected String getObjectIdString(List<? extends Annotatable> objects) {
final ArrayList<String> ids = new ArrayList<String>(objects.size());
for (final Annotatable object : objects) {
ids.add(object.getId());
}
return getIdString(ids);
}
protected void execute(AppDotNetRequest request) {
if (request.isAuthenticated() && !hasToken()) {
throw new IllegalStateException("authentication token not set");
}
final AppDotNetClientTask task = new AppDotNetClientTask(authHeader, languageHeader, sslSocketFactory);
// AsyncTask was changed in Honeycomb to execute in serial by default, at which time
// executeOnExecutor was added to specify parallel execution.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, request);
} else {
task.execute(request);
}
}
}
|
package fr.tvbarthel.apps.adaptilo.engine;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Message;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
/**
* Engine used to proceed clap detection.
*/
public class ClapEngine {
/**
* Log cat.
*/
private static final String TAG = ClapEngine.class.getSimpleName();
/**
* Name of the temp file used while recording.
*/
private static final String RECORDER_FILE_NAME = "clap.3gp";
/**
* Delay in milli seconds between two getMaxAmplitude.
*/
private static final int TICKING_TIME_IN_MILLI = 200;
private static final int AMPLITUDE_THRESHOLD = 18000;
/**
* MediaRecorder used to detected clap from microphone.
*/
private MediaRecorder mMediaRecorder;
/**
* Temp file used while recording.
*/
private File mTempFile;
/**
* True while the engine is running.
*/
private boolean mRecording;
/**
* True when the engine is paused.
*/
private boolean mIsPaused;
/**
* Last max amplitude recorded.
*/
private int mLastMaxAmplitude;
/**
* Thread used to perform clap detection off the ui thread.
*/
private Thread mThread;
/**
* Handler used to catch clap event from non ui Thread which perform the detection.
*/
private ClapHandler mClapHandler;
public ClapEngine(Context context, ClapListener listener) {
mTempFile = new File(context.getExternalFilesDir(null), RECORDER_FILE_NAME);
mMediaRecorder = new MediaRecorder();
initMediaRecorder();
initThread();
//Handler attach to the ui thread.
mClapHandler = new ClapHandler(listener);
mLastMaxAmplitude = -1;
}
/**
* Start the Clap Engine
*/
public void start() {
if (mMediaRecorder == null) {
mMediaRecorder = new MediaRecorder();
initMediaRecorder();
}
if (!mRecording) {
mMediaRecorder.start();
mRecording = true;
}
if (mThread == null) {
initThread();
}
//start thread
mThread.start();
}
/**
* Resume the Clap Engine
*/
public void resume() {
mIsPaused = false;
}
/**
* Pause the Clap Engine
*/
public void pause() {
mIsPaused = true;
}
/**
* Stop the Clap Engine
*/
public void stop() {
mMediaRecorder.stop();
mMediaRecorder.reset();
mMediaRecorder.release();
mMediaRecorder = null;
mTempFile.delete();
mThread.interrupt();
mThread = null;
mRecording = false;
mIsPaused = true;
}
/**
* Used to initialize the media recorder.
*/
private void initMediaRecorder() {
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setOutputFile(mTempFile.getAbsolutePath());
try {
mMediaRecorder.prepare();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Used to initialize the Thread on which clap processing while be achieved.
*/
private void initThread() {
mThread = new Thread() {
@Override
public void run() {
super.run();
while (mRecording) {
if (!mIsPaused) {
processNewAmplitude(mMediaRecorder.getMaxAmplitude());
}
try {
sleep(TICKING_TIME_IN_MILLI);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
}
/**
* Process the max amplitude since the last call in order to detect if the user clap his hands.
*
* @param newAmplitude new amplitude from {@link android.media.MediaRecorder#getMaxAmplitude()}
*/
private void processNewAmplitude(int newAmplitude) {
if (mLastMaxAmplitude == -1) {
mLastMaxAmplitude = newAmplitude;
} else {
int delta = Math.abs(mLastMaxAmplitude - newAmplitude);
if (delta > AMPLITUDE_THRESHOLD) {
//send message
Message message = mClapHandler.obtainMessage(ClapHandler.CLAP_DETECTED);
message.sendToTarget();
mLastMaxAmplitude = 0;
} else {
mLastMaxAmplitude = newAmplitude;
}
}
}
/**
* Handler which will be attached to the ui thread in order to encapsulate event propagation and
* potential ui modification
*/
private static class ClapHandler extends Handler {
/**
* "What" used when clap detected event is catch.
*/
protected static final int CLAP_DETECTED = 0x00000010;
/**
* Listener used to catch clap event.
*/
private final WeakReference<ClapListener> mListener;
public ClapHandler(ClapListener listener) {
mListener = new WeakReference<ClapListener>(listener);
}
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what) {
case CLAP_DETECTED:
//propagate clap event on ui thread
mListener.get().onClapDetected();
}
}
}
}
|
package com.lb.auto_fit_textview;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.os.Build;
import android.text.Layout.Alignment;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.util.AttributeSet;
import android.util.SparseIntArray;
import android.util.TypedValue;
import android.widget.TextView;
public class AutoResizeTextView extends TextView
{
private static final int NO_LINE_LIMIT =-1;
private final RectF _availableSpaceRect =new RectF();
private final SparseIntArray _textCachedSizes =new SparseIntArray();
private final SizeTester _sizeTester;
private float _maxTextSize;
private float _spacingMult =1.0f;
private float _spacingAdd =0.0f;
private float _minTextSize;
private int _widthLimit;
private int _maxLines;
private boolean _enableSizeCache =true;
private boolean _initiallized =false;
private TextPaint paint;
private interface SizeTester
{
/**
* @param suggestedSize
* Size of text to be tested
* @param availableSpace
* available space in which text must fit
* @return an integer < 0 if after applying {@code suggestedSize} to
* text, it takes less space than {@code availableSpace}, > 0
* otherwise
*/
public int onTestSize(int suggestedSize,RectF availableSpace);
}
public AutoResizeTextView(final Context context)
{
this(context,null,0);
}
public AutoResizeTextView(final Context context,final AttributeSet attrs)
{
this(context,attrs,0);
}
public AutoResizeTextView(final Context context,final AttributeSet attrs,final int defStyle)
{
super(context,attrs,defStyle);
// using the minimal recommended font size
_minTextSize=TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_SP,12,getResources().getDisplayMetrics());
_maxTextSize=getTextSize();
if(_maxLines==0)
// no value was assigned during construction
_maxLines=NO_LINE_LIMIT;
// prepare size tester:
_sizeTester=new SizeTester()
{
final RectF textRect =new RectF();
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public int onTestSize(final int suggestedSize,final RectF availableSPace)
{
paint.setTextSize(suggestedSize);
final String text=getText().toString();
final boolean singleline=getMaxLines()==1;
if(singleline)
{
textRect.bottom=paint.getFontSpacing();
textRect.right=paint.measureText(text);
}
else
{
final StaticLayout layout=new StaticLayout(text,paint,_widthLimit,Alignment.ALIGN_NORMAL,_spacingMult,_spacingAdd,true);
// return early if we have more lines
if(getMaxLines()!=NO_LINE_LIMIT&&layout.getLineCount()>getMaxLines())
return 1;
textRect.bottom=layout.getHeight();
int maxWidth=-1;
for(int i=0;i<layout.getLineCount();i++)
if(maxWidth<layout.getLineWidth(i))
maxWidth=(int)layout.getLineWidth(i);
textRect.right=maxWidth;
}
textRect.offsetTo(0,0);
if(availableSPace.contains(textRect))
// may be too small, don't worry we will find the best match
return -1;
// else, too big
return 1;
}
};
_initiallized=true;
}
@Override
public void setTypeface(Typeface tf) {
paint=new TextPaint(getPaint());
paint.setTypeface(tf);
super.setTypeface(tf);
}
@Override
public void setTextSize(final float size)
{
_maxTextSize=size;
_textCachedSizes.clear();
adjustTextSize();
}
@Override
public void setMaxLines(final int maxlines)
{
super.setMaxLines(maxlines);
_maxLines=maxlines;
reAdjust();
}
@Override
public int getMaxLines()
{
return _maxLines;
}
@Override
public void setSingleLine()
{
super.setSingleLine();
_maxLines=1;
reAdjust();
}
@Override
public void setSingleLine(final boolean singleLine)
{
super.setSingleLine(singleLine);
if(singleLine)
_maxLines=1;
else _maxLines=NO_LINE_LIMIT;
reAdjust();
}
@Override
public void setLines(final int lines)
{
super.setLines(lines);
_maxLines=lines;
reAdjust();
}
@Override
public void setTextSize(final int unit,final float size)
{
final Context c=getContext();
Resources r;
if(c==null)
r=Resources.getSystem();
else r=c.getResources();
_maxTextSize=TypedValue.applyDimension(unit,size,r.getDisplayMetrics());
_textCachedSizes.clear();
adjustTextSize();
}
@Override
public void setLineSpacing(final float add,final float mult)
{
super.setLineSpacing(add,mult);
_spacingMult=mult;
_spacingAdd=add;
}
/**
* Set the lower text size limit and invalidate the view
*
* @param minTextSize
*/
public void setMinTextSize(final float minTextSize)
{
_minTextSize=minTextSize;
reAdjust();
}
private void reAdjust()
{
adjustTextSize();
}
private void adjustTextSize()
{
if(!_initiallized)
return;
final int startSize=(int)_minTextSize;
final int heightLimit=getMeasuredHeight()-getCompoundPaddingBottom()-getCompoundPaddingTop();
_widthLimit=getMeasuredWidth()-getCompoundPaddingLeft()-getCompoundPaddingRight();
if (_widthLimit <= 0)
return;
_availableSpaceRect.right=_widthLimit;
_availableSpaceRect.bottom=heightLimit;
super.setTextSize(TypedValue.COMPLEX_UNIT_PX,efficientTextSizeSearch(startSize,(int)_maxTextSize,_sizeTester,_availableSpaceRect));
}
/**
* Enables or disables size caching, enabling it will improve performance
* where you are animating a value inside TextView. This stores the font
* size against getText().length() Be careful though while enabling it as 0
* takes more space than 1 on some fonts and so on.
*
* @param enable
* enable font size caching
*/
public void setEnableSizeCache(final boolean enable)
{
_enableSizeCache=enable;
_textCachedSizes.clear();
adjustTextSize();
}
private int efficientTextSizeSearch(final int start,final int end,final SizeTester sizeTester,final RectF availableSpace)
{
if(!_enableSizeCache)
return binarySearch(start,end,sizeTester,availableSpace);
final String text=getText().toString();
final int key=text==null ? 0 : text.length();
int size=_textCachedSizes.get(key);
if(size!=0)
return size;
size=binarySearch(start,end,sizeTester,availableSpace);
_textCachedSizes.put(key,size);
return size;
}
private int binarySearch(final int start,final int end,final SizeTester sizeTester,final RectF availableSpace)
{
int lastBest=start;
int lo=start;
int hi=end-1;
int mid=0;
while(lo<=hi)
{
mid=lo+hi>>>1;
final int midValCmp=sizeTester.onTestSize(mid,availableSpace);
if(midValCmp<0)
{
lastBest=lo;
lo=mid+1;
}
else if(midValCmp>0)
{
hi=mid-1;
lastBest=hi;
}
else return mid;
}
// make sure to return last best
// this is what should always be returned
return lastBest;
}
@Override
protected void onTextChanged(final CharSequence text,final int start,final int before,final int after)
{
super.onTextChanged(text,start,before,after);
reAdjust();
}
@Override
protected void onSizeChanged(final int width,final int height,final int oldwidth,final int oldheight)
{
_textCachedSizes.clear();
super.onSizeChanged(width,height,oldwidth,oldheight);
if(width!=oldwidth||height!=oldheight)
reAdjust();
}
}
|
package com.rehivetech.beeeon.household.device;
import android.content.Context;
import android.support.annotation.Nullable;
import com.rehivetech.beeeon.IOrderIdentifier;
import com.rehivetech.beeeon.IconResourceType;
import com.rehivetech.beeeon.household.device.values.BaseValue;
import com.rehivetech.beeeon.household.device.values.EnumValue;
import java.util.List;
public final class Module implements IOrderIdentifier {
public static final String ID_SEPARATOR = "
/**
* Properties inherited from device's specification table.
*/
private final String mId;
private final ModuleType mType; // type defines what BaseValue should be created and also allows searching/comparing by type + offset
private final int mOffset;
private final Integer mSort;
private final int mGroupRes;
private final int mNameRes;
private final boolean mIsActuator;
// private final Constraints mConstraints; // FIXME: implement later
// private final Values mValues; // this is not needed, as needed is BaseValue property
// private final Rules mRules; // FIXME: implement later
private final Device mDevice; // parent device
private final BaseValue mValue;
public static Module createUnknownModule(Device device, String id) {
return new Module(device, id, ModuleType.TYPE_UNKNOWN.getTypeId(), 0, null, null, null, false);
}
public Module(Device device, String id, int typeId, int offset, Integer sort, Integer groupRes, Integer nameRes, boolean isActuator) {
mDevice = device;
mId = id;
mSort = sort;
mGroupRes = groupRes != null ? groupRes : 0;
mNameRes = nameRes != null ? nameRes : 0;
mIsActuator = isActuator;
mOffset = offset;
mType = ModuleType.fromTypeId(typeId);
if (mType.getValueClass() == EnumValue.class) {
throw new IllegalArgumentException("ValueClass received from ModuleType is EnumValue, but constructor was called without enumValues.");
}
mValue = BaseValue.createFromModuleType(mType);
}
public Module(Device device, String id, int typeId, int offset, Integer sort, Integer groupRes, Integer nameRes, boolean isActuator, List<EnumValue.Item> enumValues) {
mDevice = device;
mId = id;
mSort = sort;
mGroupRes = groupRes != null ? groupRes : 0;
mNameRes = nameRes != null ? nameRes : 0;
mIsActuator = isActuator;
mOffset = offset;
mType = ModuleType.fromTypeId(typeId);
if (mType.getValueClass() != EnumValue.class) {
throw new IllegalArgumentException("ValueClass received from ModuleType is not EnumValue, but constructor was called with enumValues.");
}
mValue = new EnumValue(enumValues);
}
/**
* Represents settings of module which could be saved to server
*/
public enum SaveModule {
SAVE_NAME, // change name of module
SAVE_LOCATION, // change location of mDevice
SAVE_VISIBILITY, // change visibility of module
SAVE_REFRESH, // change refresh interval of mDevice
SAVE_VALUE, // change value of actor module
SAVE_INITIALIZED,
}
public ModuleType getType() {
return mType;
}
public BaseValue getValue() {
return mValue;
}
public void setValue(String value) {
mValue.setValue(value);
}
/**
* Get resource for human readable string representing type of this module
*
* @return
*/
public int getTypeStringResource() {
return mType.getStringResource();
}
public int getIconResource(){
return getIconResource(IconResourceType.DARK);
}
public int getIconResource(IconResourceType type){
return mIsActuator ? mValue.getActorIconResource(type) : mValue.getIconResource(type);
}
public Device getDevice() {
return mDevice;
}
/**
* Get unique identifier of module (address of mDevice + raw type id containing offset)
*
* @return id
*/
public String getId() {
if (mDevice == null)
throw new RuntimeException("Module's mDevice is null!");
return mDevice.getAddress() + ID_SEPARATOR + mId;
}
public int getOffset() {
return mOffset;
}
/**
* @param context
* @return name of group
*/
public String getGroupName(Context context) {
return mGroupRes > 0 ? context.getString(mGroupRes) : "";
}
/**
* @param context
* @param withGroup
* @return name of module, optionally prefixed with name of group
*/
public String getName(Context context, boolean withGroup) {
String group = mGroupRes > 0 ? context.getString(mGroupRes) : "";
String name = mNameRes > 0 ? context.getString(mNameRes) : "";
return withGroup ? String.format("%s %s", group, name).trim() : name;
}
/**
* @param context
* @return name of module
*/
public String getName(Context context) {
return getName(context, false);
}
/**
* @return true if module should be visible to the user at this moment
*/
public boolean isVisible() {
// FIXME: real check based on rules
return true;
}
public boolean isActuator() {
return mIsActuator;
}
@Nullable
@Override
public Integer getSort() {
return mSort;
}
}
|
package nl.wietmazairac.bimql.set.attribute;
import org.bimserver.models.store.IfcEngine;
public class SetAttributeSubIfcEngine {
// fields
private Object object;
private String attributeName;
private String attributeNewValue;
// constructors
public SetAttributeSubIfcEngine() {
}
public SetAttributeSubIfcEngine(Object object, String attributeName, String attributeNewValue) {
this.object = object;
this.attributeName = attributeName;
this.attributeNewValue = attributeNewValue;
}
// methods
public Object getObject() {
return object;
}
public void setObject(Object object) {
this.object = object;
}
public String getAttributeName() {
return attributeName;
}
public void setAttributeName(String attributeName) {
this.attributeName = attributeName;
}
public String getAttributeNewValue() {
return attributeNewValue;
}
public void setAttributeNewValue(String attributeNewValue) {
this.attributeNewValue = attributeNewValue;
}
public void setAttribute() {
if (attributeName.equals("ClassName")) {
//1NoEList
((IfcEngine) object).setClassName(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("Settings")) {
//1NoEList
//1void
//1Settings
}
else if (attributeName.equals("Name")) {
//1NoEList
((IfcEngine) object).setName(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("Enabled")) {
//1NoEList
((IfcEngine) object).setEnabled(Boolean.parseBoolean(attributeNewValue));
//1void
//1boolean
}
}
}
|
package com.braintreepayments.api;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.VisibleForTesting;
import com.braintreepayments.api.exceptions.ErrorWithResponse;
import com.braintreepayments.api.exceptions.GoogleApiClientException;
import com.braintreepayments.api.exceptions.InvalidArgumentException;
import com.braintreepayments.api.interfaces.BraintreeCancelListener;
import com.braintreepayments.api.interfaces.BraintreeErrorListener;
import com.braintreepayments.api.interfaces.BraintreeListener;
import com.braintreepayments.api.interfaces.ConfigurationFetchedErrorListener;
import com.braintreepayments.api.interfaces.ConfigurationListener;
import com.braintreepayments.api.interfaces.HttpResponseCallback;
import com.braintreepayments.api.interfaces.PaymentMethodCreatedListener;
import com.braintreepayments.api.interfaces.PaymentMethodsUpdatedListener;
import com.braintreepayments.api.interfaces.QueuedCallback;
import com.braintreepayments.api.internal.BraintreeHttpClient;
import com.braintreepayments.api.models.ClientKey;
import com.braintreepayments.api.models.ClientToken;
import com.braintreepayments.api.models.Configuration;
import com.braintreepayments.api.models.PaymentMethod;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.GoogleApiClient.ConnectionCallbacks;
import com.google.android.gms.common.api.GoogleApiClient.OnConnectionFailedListener;
import com.google.android.gms.wallet.Wallet;
import com.google.android.gms.wallet.WalletConstants;
import org.json.JSONException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Queue;
/**
* Core Braintree class that handles network requests and managing callbacks.
*/
public class BraintreeFragment extends Fragment {
public static final String EXTRA_CLIENT_TOKEN = "com.braintreepayments.api.EXTRA_CLIENT_TOKEN";
public static final String EXTRA_CLIENT_KEY = "com.braintreepayments.api.EXTRA_CLIENT_KEY";
public static final String EXTRA_INTEGRATION_TYPE = "com.braintreepayments.api.EXTRA_INTEGRATION_TYPE";
public static final String TAG = "com.braintreepayments.api.BraintreeFragment";
private Context mContext;
private ClientKey mClientKey;
private ClientToken mClientToken;
private Configuration mConfiguration;
@VisibleForTesting
protected BraintreeHttpClient mHttpClient;
protected GoogleApiClient mGoogleApiClient;
private Queue<QueuedCallback> mCallbackQueue = new ArrayDeque<>();
private List<PaymentMethod> mCachedPaymentMethods = new ArrayList<>();
private boolean mHasFetchedPaymentMethods = false;
private ConfigurationListener mConfigurationListener;
private ConfigurationFetchedErrorListener mConfigurationErrorListener;
private BraintreeCancelListener mCancelListener;
private PaymentMethodsUpdatedListener mPaymentMethodsUpdatedListener;
private PaymentMethodCreatedListener mPaymentMethodCreatedListener;
private BraintreeErrorListener mErrorListener;
@VisibleForTesting
protected String mIntegrationType;
public BraintreeFragment() {}
/**
* Create a new instance of {@link BraintreeFragment} using the client token and add it to the
* {@link Activity}'s {@link FragmentManager}.
*
* @param activity The {@link Activity} to add the {@link Fragment} to.
* @param clientKeyOrToken The client key or token to use.
* @return {@link BraintreeFragment}
* @throws InvalidArgumentException If the client token is not valid json or cannot be parsed.
*/
public static BraintreeFragment newInstance(Activity activity, String clientKeyOrToken)
throws InvalidArgumentException {
FragmentManager fm = activity.getFragmentManager();
String integrationType = "custom";
try {
if (Class.forName("com.braintreepayments.api.BraintreePaymentActivity")
.isInstance(activity)) {
integrationType = "dropin";
}
} catch (ClassNotFoundException ignored) {}
BraintreeFragment braintreeFragment = (BraintreeFragment) fm.findFragmentByTag(TAG);
if (braintreeFragment == null) {
braintreeFragment = new BraintreeFragment();
Bundle bundle = new Bundle();
try {
bundle.putString(EXTRA_CLIENT_KEY, ClientKey.fromString(clientKeyOrToken).clientKeyString());
} catch (InvalidArgumentException e) {
try {
bundle.putString(EXTRA_CLIENT_TOKEN,
ClientToken.fromString(clientKeyOrToken).toJson());
} catch (JSONException e1) {
throw new InvalidArgumentException("Client key or client token was invalid.");
}
}
bundle.putString(EXTRA_INTEGRATION_TYPE, integrationType);
braintreeFragment.setArguments(bundle);
fm.beginTransaction().add(braintreeFragment, TAG).commit();
}
return braintreeFragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRetainInstance(true);
mContext = getActivity().getApplicationContext();
mIntegrationType = getArguments().getString(EXTRA_INTEGRATION_TYPE);
try {
if (getArguments().containsKey(EXTRA_CLIENT_KEY)) {
mClientKey = ClientKey.fromString(getArguments().getString(EXTRA_CLIENT_KEY));
if (mHttpClient == null) {
mHttpClient = new BraintreeHttpClient(mClientKey);
}
sendAnalyticsEvent("started.client-key");
} else {
mClientToken = ClientToken.fromString(getArguments().getString(EXTRA_CLIENT_TOKEN));
if (mHttpClient == null) {
mHttpClient = new BraintreeHttpClient(mClientToken);
}
sendAnalyticsEvent("started.client-token");
}
} catch (InvalidArgumentException | JSONException ignored) {
// already checked in BraintreeFragment.newInstance
}
fetchConfiguration();
}
@Override
public void onResume() {
super.onResume();
if (getActivity() instanceof BraintreeListener) {
addListener((BraintreeListener) getActivity());
}
flushCallbacks();
if (mGoogleApiClient != null) {
mGoogleApiClient.connect();
}
}
@Override
public void onPause() {
super.onPause();
AnalyticsManager.flushEvents(this);
if (getActivity() instanceof BraintreeListener) {
removeListener((BraintreeListener) getActivity());
}
if (mGoogleApiClient != null) {
mGoogleApiClient.disconnect();
}
}
@Override
public void onActivityResult(final int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_CANCELED) {
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mCancelListener != null;
}
@Override
public void run() {
mCancelListener.onCancel(requestCode);
}
});
}
switch (requestCode) {
case PayPal.PAYPAL_AUTHORIZATION_REQUEST_CODE:
PayPal.onActivityResult(this, resultCode, data);
break;
case Venmo.VENMO_REQUEST_CODE:
Venmo.onActivityResult(this, resultCode, data);
break;
case ThreeDSecure.THREE_D_SECURE_REQUEST_CODE:
ThreeDSecure.onActivityResult(this, resultCode, data);
break;
}
}
/**
* Adds a listener.
*
* @param listener the listener to add.
*/
public <T extends BraintreeListener> void addListener(T listener) {
if (listener instanceof ConfigurationListener) {
mConfigurationListener = (ConfigurationListener) listener;
}
if (listener instanceof ConfigurationFetchedErrorListener) {
mConfigurationErrorListener = (ConfigurationFetchedErrorListener) listener;
}
if (listener instanceof BraintreeCancelListener) {
mCancelListener = (BraintreeCancelListener) listener;
}
if (listener instanceof PaymentMethodsUpdatedListener) {
mPaymentMethodsUpdatedListener = (PaymentMethodsUpdatedListener) listener;
}
if (listener instanceof PaymentMethodCreatedListener) {
mPaymentMethodCreatedListener = (PaymentMethodCreatedListener) listener;
}
if (listener instanceof BraintreeErrorListener) {
mErrorListener = (BraintreeErrorListener) listener;
}
flushCallbacks();
}
/**
* Removes a previously added listener.
*
* @param listener the listener to remove.
*/
public <T extends BraintreeListener> void removeListener(T listener) {
if (listener instanceof ConfigurationListener) {
mConfigurationListener = null;
}
if (listener instanceof ConfigurationFetchedErrorListener) {
mConfigurationErrorListener = null;
}
if (listener instanceof BraintreeCancelListener) {
mCancelListener = null;
}
if (listener instanceof PaymentMethodsUpdatedListener) {
mPaymentMethodsUpdatedListener = null;
}
if (listener instanceof PaymentMethodCreatedListener) {
mPaymentMethodCreatedListener = null;
}
if (listener instanceof BraintreeErrorListener) {
mErrorListener = null;
}
}
protected void sendAnalyticsEvent(final String eventFragment) {
AnalyticsManager.sendRequest(this, mIntegrationType, eventFragment);
}
protected void postCallback(final PaymentMethod paymentMethod) {
mCachedPaymentMethods.add(0, paymentMethod);
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mPaymentMethodCreatedListener != null;
}
@Override
public void run() {
mPaymentMethodCreatedListener.onPaymentMethodCreated(paymentMethod);
}
});
}
protected void postCallback(final List<PaymentMethod> paymentMethodList) {
mCachedPaymentMethods = paymentMethodList;
mHasFetchedPaymentMethods = true;
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mPaymentMethodsUpdatedListener != null;
}
@Override
public void run() {
mPaymentMethodsUpdatedListener.onPaymentMethodsUpdated(paymentMethodList);
}
});
}
protected void postCallback(final Throwable error) {
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mErrorListener != null;
}
@Override
public void run() {
if (error instanceof ErrorWithResponse) {
mErrorListener.onRecoverableError((ErrorWithResponse) error);
} else {
mErrorListener.onUnrecoverableError(error);
}
}
});
}
@VisibleForTesting
protected void postOrQueueCallback(QueuedCallback callback) {
if (!callback.shouldRun()) {
mCallbackQueue.add(callback);
} else {
callback.run();
}
}
@VisibleForTesting
protected void flushCallbacks() {
Queue<QueuedCallback> queue = new ArrayDeque<>();
queue.addAll(mCallbackQueue);
for (QueuedCallback callback : queue) {
if (callback.shouldRun()) {
callback.run();
mCallbackQueue.remove(callback);
}
}
}
@VisibleForTesting
protected void fetchConfiguration() {
String configUrl;
if (mClientKey != null) {
configUrl = mClientKey.getConfigUrl();
} else {
configUrl = mClientToken.getConfigUrl();
}
configUrl = Uri.parse(configUrl)
.buildUpon()
.appendQueryParameter("configVersion", "3")
.build()
.toString();
getHttpClient().get(configUrl, new HttpResponseCallback() {
@Override
public void success(String responseBody) {
try {
mConfiguration = Configuration.fromJson(responseBody);
getHttpClient().setBaseUrl(mConfiguration.getClientApiUrl());
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mConfigurationListener != null;
}
@Override
public void run() {
mConfigurationListener.onConfigurationFetched(getConfiguration());
}
});
flushCallbacks();
} catch (final JSONException e) {
postCallback(e);
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mConfigurationErrorListener != null;
}
@Override
public void run() {
mConfigurationErrorListener.onConfigurationError(e);
}
});
}
}
@Override
public void failure(final Exception exception) {
postCallback(exception);
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return mConfigurationErrorListener != null;
}
@Override
public void run() {
mConfigurationErrorListener.onConfigurationError(exception);
}
});
}
});
}
protected void waitForConfiguration(final ConfigurationListener listener) {
postOrQueueCallback(new QueuedCallback() {
@Override
public boolean shouldRun() {
return getConfiguration() != null;
}
@Override
public void run() {
listener.onConfigurationFetched(getConfiguration());
}
});
}
protected Context getApplicationContext() {
return mContext;
}
protected ClientKey getClientKey() {
return mClientKey;
}
protected ClientToken getClientToken() {
return mClientToken;
}
protected Configuration getConfiguration() {
return mConfiguration;
}
protected BraintreeHttpClient getHttpClient() {
return mHttpClient;
}
protected boolean hasFetchedPaymentMethods() {
return mHasFetchedPaymentMethods;
}
protected List<PaymentMethod> getCachedPaymentMethods() {
return Collections.unmodifiableList(mCachedPaymentMethods);
}
/**
* Obtain an instance of a {@link GoogleApiClient} that is connected or connecting to be used
* for Android Pay. This instance will be automatically disconnected in
* {@link Fragment#onPause()} and automatically connected in {@link Fragment#onResume()}.
*
* Connection failed and connection suspended errors will be sent to
* {@link BraintreeErrorListener#onUnrecoverableError(Throwable)}.
*
* @return {@link GoogleApiClient}.
*/
public GoogleApiClient getGoogleApiClient() {
if (mGoogleApiClient == null) {
mGoogleApiClient = new GoogleApiClient.Builder(getActivity())
.addApi(Wallet.API, new Wallet.WalletOptions.Builder()
.setEnvironment(AndroidPay.getEnvironment(getConfiguration().getAndroidPay()))
.setTheme(WalletConstants.THEME_LIGHT)
.build())
.build();
}
if (!mGoogleApiClient.isConnected() && !mGoogleApiClient.isConnecting()) {
mGoogleApiClient.registerConnectionCallbacks(new ConnectionCallbacks() {
@Override
public void onConnected(Bundle bundle) {}
@Override
public void onConnectionSuspended(int i) {
postCallback(new GoogleApiClientException("Connection suspended: " + i));
}
});
mGoogleApiClient.registerConnectionFailedListener(new OnConnectionFailedListener() {
@Override
public void onConnectionFailed(ConnectionResult connectionResult) {
postCallback(new GoogleApiClientException("Connection failed: " + connectionResult.getErrorCode()));
}
});
mGoogleApiClient.connect();
}
return mGoogleApiClient;
}
}
|
import java.util.ArrayList;
import java.util.Scanner;
/**
* @author Liam Carter-Condon
*
*/
public class homeworkOne {
private static Scanner in = new Scanner(System.in);
/**
* @param args; no arguments
*
*/
public static void main(String[] args) {
partOne();
long a = System.currentTimeMillis();
while(System.currentTimeMillis() < a + 2000);
partTwo();
}
private static void partOne() {
System.out.println("Part One, Homework One");
System.out.println("
System.out.println("Give me two names, delimited with a semicolon");
// store input as names, splitting along semicolons;
String[] names = in.nextLine().split("; ?");
System.out.println("Give me their respective heights, delimited with a space");
// store height inputs in an arraylist for easier accessing
ArrayList<Integer> heights = new ArrayList<Integer>();
heights.add(new Integer(in.nextInt()));
heights.add(new Integer(in.nextInt()));
// create a 2D arraylist for feet inches [feet][inches]
ArrayList<ArrayList<Integer>> feetInches = new ArrayList<ArrayList<Integer>>();
// another arraylist for storing the conversion
ArrayList<Double> centimeters = new ArrayList<Double>();
// for each loops are easy with arraylists
for(Integer i: heights) {
ArrayList<Integer> temp = new ArrayList<Integer>();
temp.add(i/12);
temp.add(i%12);
feetInches.add(temp);
centimeters.add(i * 2.54);
}
// need to be able to index into arraylists
for(int i = 0; i < names.length; i++){
Integer feet = feetInches.get(i).get(0);
Integer inches = feetInches.get(i).get(1);
System.out.println(names[i] + " is " +
// change wording with ternary based on number (plural vs singular)
(feet==0?"":feet + (feet==1?" foot":" feet") + " and ") +
// same change of wording based on number
inches + (inches==1?" inch":" inches") + " tall (" +
centimeters.get(i) + " cm).");
}
// calculate the differences
int difference = Math.abs(heights.get(0)-heights.get(1));
System.out.println("The height difference between them is " + difference + " inches (" +
difference*2.54 + " cm).");
}
private static void partTwo() {
System.out.println("Part Two, Homework One");
System.out.println("
final double APR = 7.49;
System.out.println("Enter this month's payment (dollars.cents):");
double payment = in.nextDouble();
System.out.println("Enter the principal owed (dollars.cents)");
double owed = in.nextDouble();
System.out.printf("Previous Balance:\t\t$" + owed + "\n");
System.out.printf("Payment:\t\t\t$" + payment + "\n");
System.out.printf("Interest Paid:\t\t\t$%.2f\n",(owed * APR/12.0/100.0));
System.out.printf("Amount applied to principal:\t$%.2f\n",(payment - (owed*APR/12.0/100.0)));
System.out.printf("New Balance:\t\t\t$%.2f\n", (owed - (payment - (owed*APR/12.0/100.0))));
in.close();
}
}
|
package ru.job4j;
import java.util.Calendar;
public class UserM {
private String name;
private int children;
private Calendar birthday;
public UserM(String name, int children, Calendar birthday) {
this.name = name;
this.children = children;
this.birthday = birthday;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UserM userM = (UserM) o;
if (children != userM.children) return false;
if (name != null ? !name.equals(userM.name) : userM.name != null) return false;
return birthday != null ? birthday.equals(userM.birthday) : userM.birthday == null;
}
public String getName() {
return name;
}
public int getChildren() {
return children;
}
public Calendar getBirthday() {
return birthday;
}
public void setName(String name) {
this.name = name;
}
public void setChildren(int children) {
this.children = children;
}
public void setBirthday(Calendar birthday) {
this.birthday = birthday;
}
}
|
package org.opendaylight.controller.cluster.datastore.node.utils;
import com.google.common.base.Splitter;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
public class AugmentationIdentifierGenerator {
private static final Pattern PATTERN = Pattern.compile("AugmentationIdentifier\\Q{\\EchildNames=\\Q[\\E(.*)\\Q]}\\E");
private static final Splitter COMMA_SPLITTER = Splitter.on(',').trimResults();
private final String id;
private final Matcher matcher;
private final boolean doesMatch;
public AugmentationIdentifierGenerator(String id) {
this.id = id;
matcher = PATTERN.matcher(this.id);
doesMatch = matcher.matches();
}
public boolean matches() {
return doesMatch;
}
public YangInstanceIdentifier.AugmentationIdentifier getPathArgument() {
final String childQNames = matcher.group(1);
final Set<QName> childNames = new HashSet<>();
for (String name : COMMA_SPLITTER.split(childQNames)) {
childNames.add(QNameFactory.create(name));
}
return new YangInstanceIdentifier.AugmentationIdentifier(childNames);
}
}
|
package net.ssehub.easy.instantiation.core.model.templateModel;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import net.ssehub.easy.basics.logger.EASyLoggerFactory;
import net.ssehub.easy.basics.logger.EASyLoggerFactory.EASyLogger;
import net.ssehub.easy.basics.modelManagement.AvailableModels;
import net.ssehub.easy.basics.modelManagement.IndentationConfiguration;
import net.ssehub.easy.basics.modelManagement.ModelImport;
import net.ssehub.easy.basics.modelManagement.ModelInfo;
import net.ssehub.easy.basics.modelManagement.ModelManagementException;
import net.ssehub.easy.instantiation.core.Bundle;
import net.ssehub.easy.instantiation.core.model.artifactModel.ArtifactTypes;
import net.ssehub.easy.instantiation.core.model.artifactModel.IArtifact;
import net.ssehub.easy.instantiation.core.model.common.Compound;
import net.ssehub.easy.instantiation.core.model.common.ExecutionVisitor;
import net.ssehub.easy.instantiation.core.model.common.IResolvableModel;
import net.ssehub.easy.instantiation.core.model.common.ITerminatable;
import net.ssehub.easy.instantiation.core.model.common.ModelCallExpression;
import net.ssehub.easy.instantiation.core.model.common.Typedef;
import net.ssehub.easy.instantiation.core.model.common.VilException;
import net.ssehub.easy.instantiation.core.model.expressions.AbstractCallExpression;
import net.ssehub.easy.instantiation.core.model.expressions.CallArgument;
import net.ssehub.easy.instantiation.core.model.expressions.ConstantExpression;
import net.ssehub.easy.instantiation.core.model.expressions.Expression;
import net.ssehub.easy.instantiation.core.model.expressions.ExpressionParserRegistry;
import net.ssehub.easy.instantiation.core.model.expressions.IExpressionParser;
import net.ssehub.easy.instantiation.core.model.expressions.ResolvableOperationCallExpression;
import net.ssehub.easy.instantiation.core.model.expressions.StringReplacer;
import net.ssehub.easy.instantiation.core.model.templateModel.ContentStatement.LineEndType;
import net.ssehub.easy.instantiation.core.model.expressions.ExpressionParserRegistry.ILanguage;
import net.ssehub.easy.instantiation.core.model.expressions.IArgumentProvider;
import net.ssehub.easy.instantiation.core.model.vilTypes.Collection;
import net.ssehub.easy.instantiation.core.model.vilTypes.ITypedModel;
import net.ssehub.easy.instantiation.core.model.vilTypes.StringValueHelper;
import net.ssehub.easy.instantiation.core.model.vilTypes.TypeRegistry;
import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.EnumValue;
import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.IvmlElement;
import net.ssehub.easy.instantiation.core.model.vilTypes.configuration.IvmlTypes;
import net.ssehub.easy.varModel.model.values.ContainerValue;
import net.ssehub.easy.varModel.model.values.NullValue;
/**
* Implements the execution of the template language.
*
* @author Holger Eichelberger
*/
public class TemplateLangExecution extends ExecutionVisitor<Template, Def, VariableDeclaration, Resolver>
implements ITemplateLangVisitor, ITerminatable {
public static final ILanguage<Resolver> LANGUAGE = new ILanguage<Resolver>() {
@Override
public String getName() {
return "VTL";
}
};
/**
* The name of the default main template (called {@value}).
*/
public static final String DEFAULT_MAIN_TEMPLATE = "main";
/**
* Denotes the default name of the configuration parameter (may be overwritten by user values).
*/
public static final String PARAM_CONFIG = "config";
/**
* Denotes the default name of the target artifact parameter (may be overwritten by user values).
*/
public static final String PARAM_TARGET = "target";
/**
* Denotes a prefix for internal parameter which must be set in addition.
*/
public static final String INTERNAL_PARAM_PREFIX = "$$";
/**
* Denotes the default name of the configuration parameter always containing the source configuration.
*/
public static final String PARAM_CONFIG_SURE = INTERNAL_PARAM_PREFIX + PARAM_CONFIG;
/**
* Denotes the default name of the target artifact parameter always containing the target artifact.
*/
public static final String PARAM_TARGET_SURE = INTERNAL_PARAM_PREFIX + PARAM_TARGET;
private static final List<JavaExtension> DEFAULT_EXTENSIONS = new ArrayList<JavaExtension>();
private static final String EMPTY_CONTENT = "\0\1\0"; // don't use for anything else
private RuntimeEnvironment environment;
private Writer mainOut;
private PrintWriter out;
private String mainName;
private ITracer tracer;
private boolean stop = false;
private int contentNestingLevel;
private int lastContentNestingLevel = -1;
private boolean lastContentFormatted = false;
private Stack<String> defContentStack = new Stack<String>();
private ContentStatement lastContent = null;
private LineEndType lastContentLineEndType = LineEndType.DEFAULT;
/**
* Creates a new evaluation visitor.
*
* @param tracer the tracer instance for testing
* @param out where to put the output to
* @param parameter the top-level parameter for the script to be executed
*/
public TemplateLangExecution(ITracer tracer, Writer out, Map<String, Object> parameter) {
this(tracer, out, DEFAULT_MAIN_TEMPLATE, parameter);
}
/**
* Creates a new evaluation visitor.
*
* @param tracer the tracer instance for testing
* @param out where to put the output to
* @param mainName the name of the sub-template to be executed
* @param parameter the top-level parameter for the script to be executed
*/
public TemplateLangExecution(ITracer tracer, Writer out, String mainName, Map<String, Object> parameter) {
super(new RuntimeEnvironment(), tracer, parameter);
this.environment = (RuntimeEnvironment) getRuntimeEnvironment();
this.mainOut = out;
this.out = new PrintWriter(out);
this.mainName = mainName;
this.tracer = tracer;
enableArtifactAutoStoreOnParameters(false);
}
/**
* Creates a new execution visitor for import expression evaluation.
*
* @param environment the runtime environment to be used for expression evaluation
*/
TemplateLangExecution(RuntimeEnvironment environment) {
super(environment, NoTracer.INSTANCE, new HashMap<String, Object>());
this.environment = environment;
this.out = new PrintWriter(new Writer() {
@Override
public void close() throws IOException {
}
@Override
public void flush() throws IOException {
}
@Override
public void write(char[] arg0, int arg1, int arg2) throws IOException {
}
});
this.mainName = DEFAULT_MAIN_TEMPLATE;
this.tracer = NoTracer.INSTANCE;
}
@Override
public void release(boolean releaseDefault) {
enableArtifactAutoStoreOnParameters(true);
super.release(releaseDefault);
}
/**
* Register a default Java extension.
*
* @param extension the extension to be registered
*/
public static void registerDefaultExtension(Class<?> extension) {
if (null != extension) {
EASyLogger logger = EASyLoggerFactory.INSTANCE.getLogger(TemplateLangExecution.class, Bundle.ID);
try {
DEFAULT_EXTENSIONS.add(new JavaExtension(extension, TypeRegistry.DEFAULT));
logger.info("registered default VTL extension " + extension.getName());
} catch (VilException e) {
logger.exception(e);
}
}
}
/**
* Returns the number of default extensions.
*
* @return the number of default extensions
*/
public static int getDefaultExtensionCount() {
return DEFAULT_EXTENSIONS.size();
}
/**
* Returns the specified default extension.
*
* @param index the 0-based index of the default extension
* @return the default extension
* @throws IndexOutOfBoundsException in case that
* <code>index < 0 || index >={@link #getDefaultExtensionCount()}</code>
*/
public static JavaExtension getDefaultExtension(int index) {
return DEFAULT_EXTENSIONS.get(index);
}
@Override
public Object visitTemplate(Template template) throws VilException {
environment.switchContext(template); // initial context, assumption that method is only called from outside
tracer.visitTemplate(template);
visitModelHeader(template);
Def main = null;
for (int d = 0; null == main && d < template.getDefCount(); d++) {
Def def = template.getDef(d);
if (def.getName().equals(mainName)) {
if (template.getParameterCount() == def.getParameterCount()) {
main = def;
for (int p = 0; null != main && p < template.getParameterCount(); p++) {
if (!template.getParameter(p).getType().isAssignableFrom(def.getParameter(p).getType())) {
main = null;
}
}
}
}
}
if (null == main) {
throw new VilException("no '" + mainName + "' template found with suitable parameters",
VilException.ID_RUNTIME_STARTRULE);
}
Object result = executeMain(template, main);
tracer.visitedTemplate(template);
return result;
}
@Override
public Object visitDef(Def def) throws VilException {
defContentStack.push("");
Object result;
if (def.isPlaceholder()) {
result = null;
} else {
tracer.visitDef(def, environment);
result = visitTemplateBlock(def); // increases indentation
// if top-level, print last line ending by default else not
if (null != lastContent && lastContent.needsLineEnd(0 == contentNestingLevel)) {
appendContent(getLineEnd());
}
lastContent = null; // handled, reset
tracer.visitedDef(def, environment, result);
}
String content = defContentStack.pop();
if (0 == contentNestingLevel) { // top level - compose or emit
if (defContentStack.isEmpty()) {
out.print(content);
} else {
appendContent(content);
}
} // nested content mode - composed from return
return result;
}
/**
* Increases the current indentation level depending on the
* specified <code>element</code>. This information is required for
* content formatting.
*
* @param element the element to be considered
*/
private void increaseIndentation(ITemplateElement element) {
if (!element.isBlock()) {
environment.increaseIndentation();
}
}
/**
* Decreases the current indentation level depending on the
* specified <code>element</code>. This information is required for
* content formatting.
*
* @param element the element to be considered
*/
private void decreaseIndentation(ITemplateElement element) {
if (!element.isBlock()) {
environment.decreaseIndentation();
}
}
@Override
public Object visitTemplateBlock(TemplateBlock block) throws VilException {
boolean ok = true;
boolean returns = !TypeRegistry.voidType().isSame(block.inferType());
Object value = null;
environment.increaseIndentation();
int count = block.getBodyElementCount();
for (int e = 0; ok && !stop && e < count; e++) {
ITemplateElement elt = block.getBodyElement(e);
value = elt.accept(this);
if ((!returns || (returns && e + 1 < count)) && mayFail(elt)) {
ok = checkConditionResult(value, block, ConditionTest.DONT_CARE);
}
if (!ok) {
tracer.failedAt(block.getBodyElement(e));
value = null;
}
}
environment.decreaseIndentation();
return value;
}
@Override
public Object visitAlternative(AlternativeStatement alternative) throws VilException {
Object value = null;
Expression cond = alternative.getCondition();
Object condValue = cond.accept(this);
if (checkConditionResult(condValue, cond, ConditionTest.DONT_CARE)) {
ITemplateElement ifStmt = alternative.getIfStatement();
increaseIndentation(ifStmt);
tracer.visitAlternative(true);
value = ifStmt.accept(this);
decreaseIndentation(ifStmt);
value = checkContentStatement(value, null, alternative.getIfStatement());
} else {
if (null != alternative.getElseStatement()) {
ITemplateElement elseStmt = alternative.getElseStatement();
increaseIndentation(elseStmt);
tracer.visitAlternative(false);
value = elseStmt.accept(this);
decreaseIndentation(elseStmt);
value = checkContentStatement(value, null, alternative.getElseStatement());
} else {
// there is no else - check content for if-part
boolean isIfContentStatement = isContentStatement(alternative.getIfStatement());
if (isIfContentStatement) { // pretend there is one, otherwise content evaluation fails
if (!defContentStack.isEmpty()) { // emulate visitContentStatement(""), pass through content
value = defContentStack.peek();
} else {
value = "";
}
}
value = checkContentStatement(value, null, isIfContentStatement);
}
}
return value;
}
/**
* Turns the <code>expression</code> into a separator.
*
* @param expression the expression (may be <b>null</b>)
* @return the separator (or <b>null</b>)
* @throws VilException in case of evaluation problems
*/
private String getSeparatorFromExpression(Expression expression) throws VilException {
String separator;
if (null != expression) {
separator = StringValueHelper.getStringValue(expression.accept(this), null);
} else {
separator = null;
}
return separator;
}
@Override
public Object visitLoop(LoopStatement loop) throws VilException {
Object object;
Expression expr = loop.getContainerExpression();
object = convertToContainer(expr, expr.accept(this), "loop");
String separator = getSeparatorFromExpression(loop.getSeparatorExpression());
String finalSeparator = getSeparatorFromExpression(loop.getFinalSeparatorExpression());
Object bodyResult = NullValue.VALUE;
if (object instanceof Collection<?>) {
VariableDeclaration iterVar = loop.getIteratorVariable();
environment.pushLevel();
Collection<?> collection = (Collection<?>) object;
Iterator<?> iter = collection.iterator();
tracer.visitLoop(iterVar);
while (iter.hasNext() && !stop) {
Object value = iter.next();
environment.addValue(iterVar, value);
tracer.valueDefined(iterVar, null, value);
ITemplateElement loopStmt = loop.getLoopStatement();
increaseIndentation(loopStmt);
bodyResult = loopStmt.accept(this);
decreaseIndentation(loopStmt);
if (null != separator && iter.hasNext()) {
appendContent(separator);
}
if (null != finalSeparator && !iter.hasNext()) {
appendContent(finalSeparator);
}
}
tracer.visitedLoop(iterVar);
environment.popLevel();
} else {
if (null != object) {
throw new VilException("loop must iterate over collection", VilException.ID_SEMANTIC);
}
}
return checkContentStatement(bodyResult, NullValue.VALUE, loop.getLoopStatement());
}
/**
* Checks the current value for the need of correction with respect to the last content statement.
*
* @param currentValue the current value
* @param noValue the object indicating no value in this context
* @param check the template element/statement to check for a content statement
* @return <code>currentValue</code> or {@link #EMPTY_CONTENT}
*/
private Object checkContentStatement(Object currentValue, Object noValue, ITemplateElement check) {
return checkContentStatement(currentValue, noValue, isContentStatement(check));
}
/**
* Checks the current value for the need of correction with respect to the last content statement.
*
* @param currentValue the current value
* @param noValue the object indicating no value in this context
* @param isContentStatement whether the relevant element is a content statement
* @return <code>currentValue</code> or {@link #EMPTY_CONTENT}
*/
private Object checkContentStatement(Object currentValue, Object noValue, boolean isContentStatement) {
Object result = currentValue;
if (noValue == currentValue && isContentStatement) {
result = EMPTY_CONTENT;
}
return result;
}
/**
* Returns whether the template element or the last statement within is a content statement.
*
* @param elt the element (may be <b>null</b>)
* @return <code>true</code> for content statement, <code>false</code> else
* (also if <code>elt</code> is <b>null</b>)
*/
private static final boolean isContentStatement(ITemplateElement elt) {
return null == elt ? false : elt.endsWithContentStatement();
}
/**
* Adds content to the current top element of {@link #defContentStack}.
*
* @param string the string to add to the current content.
*/
private void appendContent(String string) {
String topContent = defContentStack.pop();
if (null == topContent) {
topContent = string;
} else {
topContent += string;
}
defContentStack.push(topContent);
}
@Override
public Object visitSwitch(SwitchStatement swtch) throws VilException {
Object value;
environment.pushLevel();
Object select = swtch.getSwitchExpression().accept(this);
environment.addValue(swtch.getImplicitVariable(), select);
int found = -1;
value = null;
// currently no indentation as content is not allowe in switch
for (int a = 0; found < 0 && a < swtch.getAlternativeCount(); a++) {
SwitchStatement.Alternative alt = swtch.getAlternative(a);
boolean take = alt.isDefault();
if (!take) {
Expression cond = alt.getCondition();
Object condValue = cond.accept(this);
take = equals(condValue, select);
}
if (take) {
value = alt.getValue().accept(this);
found = a;
}
}
// currently no indentation as content is not allowe in switch
tracer.visitedSwitch(select, found, value);
environment.popLevel();
return value;
}
/**
* Checks for equality among the given <code>condValue</code> (condition value) and the
* given <code>exprValue</code>, whereby <code>exprValue</code> may be an {@link IvmlElement} and, thus,
* implicitly casted to the right value. This method is intended where common object values need to
* be compared and no VIL/VTL casts may happen.
*
* @param condValue the condition value
* @param exprValue the expression value
* @return <code>true</code> if the values are equal <code>false</code> else
*/
private boolean equals(Object condValue, Object exprValue) {
boolean result = condValue.equals(exprValue);
if (!result && exprValue instanceof IvmlElement) {
IvmlElement iElt = (IvmlElement) exprValue;
if (condValue instanceof String) {
result = condValue.equals(iElt.getStringValue());
} else if (condValue instanceof Integer) {
result = condValue.equals(iElt.getIntegerValue());
} else if (condValue instanceof Boolean) {
result = condValue.equals(iElt.getBooleanValue());
} else if (condValue instanceof Double) {
result = condValue.equals(iElt.getRealValue());
} else if (condValue instanceof EnumValue) {
result = condValue.equals(iElt.getEnumValue());
}
}
return result;
}
@Override
public Object visitContentStatement(ContentStatement cnt) throws VilException {
contentNestingLevel++;
lastContentFormatted = false;
if (null != lastContent && lastContent.needsLineEnd(true)) { // if sequence, print line ending by default
appendContent(getLineEnd());
}
lastContent = null; // handled, reset
String content = (String) cnt.getContent().accept(this);
if (null != content) {
// search for \r\n, \r, \n followed by indentation*step whitespaces or tabs +1
content = cleanLineEnd(content, true);
}
if (null != content) {
int indentation = environment.getIndentation();
if (indentation > 0) {
int indent = indentation + getAdditionalIndentation();
content = IndentationUtils.removeIndentation(content, indent, getTabEmulation());
}
int forced = 0;
if (null != cnt.getIndentExpression()) {
Object val = cnt.getIndentExpression().accept(this);
if (val instanceof Integer) {
forced = ((Integer) val).intValue();
if (forced > 0) { // precondition of insertIndentation
content = IndentationUtils.insertIndentation(content, forced, contentNestingLevel > 1);
lastContentFormatted = true;
}
} else {
throw new VilException("indentation value is no integer", VilException.ID_SEMANTIC);
}
}
String topContent = defContentStack.pop();
if (0 == topContent.length()) {
topContent = content;
} else {
topContent = IndentationUtils.appendWithLastIndentation(topContent, content,
contentNestingLevel == 1 || lastContentNestingLevel == contentNestingLevel);
}
defContentStack.push(topContent);
content = topContent; // replace by all for end of def/return
}
lastContentNestingLevel = contentNestingLevel;
contentNestingLevel
lastContent = cnt;
lastContentLineEndType = cnt.getLineEndType();
return content;
}
@Override
protected boolean lastContentReplaceEmptyLine() {
boolean result = lastContentLineEndType == LineEndType.NO_LINE_END;
lastContentLineEndType = null;
return result;
}
/**
* Removes the empty line marker {@link #EMPTY_CONTENT} and considers surrounding indentation for removal.
*
* @param content the content to be considered
* @param includeIndentation whether indentation shall be taken into account
* @return the modified content
*/
private static String cleanLineEnd(String content, boolean includeIndentation) {
int pos;
do {
pos = content.indexOf(EMPTY_CONTENT);
if (pos >= 0) {
int end = pos + EMPTY_CONTENT.length();
if (includeIndentation) {
while (end < content.length() && IndentationUtils.isLineEnd(content.charAt(end))) {
end++;
}
}
int start = pos;
if (start > 0) {
start
if (includeIndentation) {
while (start >= 0 && IndentationUtils.isIndentationChar(content.charAt(start))) {
start
}
}
if (start < 0) {
start = 0;
}
}
content = content.substring(0, start) + content.substring(end);
}
} while (pos >= 0);
return content;
}
@Override
protected String appendInCompositeExpression(String s1, Expression e1, Object v1, String s2, Expression e2) {
String result;
boolean format = false;
boolean clear = false;
boolean isS1 = false;
IndentationConfiguration config = environment.getIndentationConfiguration();
if (e1 instanceof ConstantExpression && e2 instanceof TemplateCallExpression) {
// do formatting only in presence of a ${template call} and if not explicitly formatted
format = !lastContentFormatted;
if (format) {
if (s1.length() > 0) {
// avoid in-place indentation
char last = s1.charAt(s1.length() - 1);
format = IndentationUtils.isLineEnd(last) || IndentationUtils.isIndentationChar(last);
}
}
} else if (e2 instanceof InContentExpression) {
// an in-content expression leading to an empty information shall be wiped out including prepared indent
clear = null != config && s2.isEmpty();
} else if (e1 instanceof InContentExpression) {
// if the last in-content expression led to an empty string and the following is an indentation, leave
// out the indentation
if (v1.toString().isEmpty() && IndentationUtils.isIndentationString(s2)) {
format = false;
isS1 = true;
}
}
String hint = e2.getFormattingHint();
boolean clearLE = false;
if (null != hint) {
if ("e".equals(hint)) {
if (s2.length() == 0) {
clear = true;
clearLE = true;
} else {
format = true;
}
} else if ("<".equals(hint)) {
s1 = IndentationUtils.removeLastIndentation(s1, false);
clear = false;
format = false;
isS1 = false;
}
}
if (format) {
int indentation = environment.getIndentation();
indentation -= getIndentationStep(); // we are within/among expressions, one step out
if (indentation > 0) {
int indent = indentation + getAdditionalIndentation();
if (IndentationUtils.allLinesStartWith(s2, indent)) {
s2 = IndentationUtils.removeIndentation(s2, indent, getTabEmulation());
}
}
result = IndentationUtils.appendWithLastIndentation(s1, s2, false);
} else if (clear) {
result = IndentationUtils.removeLastIndentation(s1, clearLE);
} else if (isS1) {
result = s1;
} else {
result = super.appendInCompositeExpression(s1, e1, v1, s2, e2);
}
return result;
}
/**
* Returns the additional indentation as configured.
*
* @return the additional indentation or {@code 0} if no configuration is present
*/
private int getAdditionalIndentation() {
IndentationConfiguration icfg = environment.getIndentationConfiguration();
return null == icfg ? 0 : icfg.getAdditional();
}
private int getTabEmulation() {
IndentationConfiguration icfg = environment.getIndentationConfiguration();
return null == icfg ? 0 : icfg.getTabEmulation();
}
private int getIndentationStep() {
IndentationConfiguration icfg = environment.getIndentationConfiguration();
return null == icfg ? 0 : icfg.getIndentationStep();
}
/**
* Returns the current line end based on the formatting configuration of the actual context model.
*
* @return the actual line end (fallback is the Java line end)
*/
private String getLineEnd() {
FormattingConfiguration cfg = null;
ITypedModel model = environment.getContextModel();
if (model instanceof Template) {
Template template = (Template) model;
cfg = template.getFormattingConfiguration();
}
return FormattingConfiguration.getLineEnding(cfg);
}
@Override
public Object visitConstantExpression(ConstantExpression cst) throws VilException {
Object result = cst.getValue();
// we have to care for $name and ${} but only in strings
if (result instanceof String) {
result = StringReplacer.substitute(result.toString(), new Resolver(environment),
getExpressionParser(), this, null);
}
return result;
}
@Override
public Object visitJavaExtension(JavaExtension ext) throws VilException {
// operations shall already be resolvable and handled by expressions
return null;
}
@Override
public Object visitTemplateCallExpression(TemplateCallExpression call) throws VilException {
Object result;
if (stop) {
result = null;
} else {
int indentation = environment.getIndentation();
environment.setIndentationSteps(1); // reset to template level
result = visitModelCallExpression(call);
environment.setIndentation(indentation);
}
return result;
}
@Override
protected Object executeModelCall(Def def) throws VilException {
return def.accept(this);
}
@Override
protected ModelCallExpression<VariableDeclaration, Template, Def> createModelCall(Template model, Def operation,
CallArgument... arguments) throws VilException {
return new TemplateCallExpression(model, operation, arguments);
}
@Override
protected Def dynamicDispatch(Def operation, Object[] args, IArgumentProvider argumentProvider,
boolean enableParentScope) {
return AbstractCallExpression.dynamicDispatch(operation, args, Def.class, environment.getTypeRegistry(),
argumentProvider, enableParentScope ? environment.getMostSpecificContextModel() : null);
}
@Override
protected void handleParameterInSequence(IResolvableModel<VariableDeclaration, Template> model,
Map<String, VariableDeclaration> varMap) throws VilException {
if (model.getParameterCount() >= 2) {
// check default sequence instead, config, target
boolean ok = IvmlTypes.configurationType().isAssignableFrom(model.getParameter(0).getType());
ok &= ArtifactTypes.artifactType().isAssignableFrom(model.getParameter(1).getType());
if (ok) {
assignModelParameter(model, model);
for (int p = 0; p < 2; p++) {
varMap.remove(model.getParameter(p).getName());
}
}
}
// remove anyway
varMap.remove(PARAM_CONFIG_SURE);
varMap.remove(PARAM_TARGET_SURE);
}
@Override
protected void assignModelParameter(IResolvableModel<VariableDeclaration, Template> targetModel,
IResolvableModel<VariableDeclaration, Template> srcModel) throws VilException {
// take sure values
if (srcModel.getParameterCount() >= 1) {
setModelArgument(srcModel.getParameter(0), getParameter(PARAM_CONFIG_SURE));
}
if (srcModel.getParameterCount() >= 2) {
setModelArgument(srcModel.getParameter(1), getParameter(PARAM_TARGET_SURE));
}
evaluateModelParameter(targetModel, srcModel, 2);
}
@Override
protected IExpressionParser<Resolver> getExpressionParser() {
return ExpressionParserRegistry.getExpressionParser(LANGUAGE);
}
@Override
public Object visitResolvableOperationCallExpression(ResolvableOperationCallExpression ex) throws VilException {
Object result;
Object val = environment.getValue(ex.getVariable());
int indentation = environment.getIndentation();
environment.setIndentationSteps(1); // reset to template level
if (val instanceof Def) {
Def def = (Def) val;
result = proceedModelCall(def, (Template) environment.getContextModel(), ex,
ex.isPlaceholder(), false);
} else {
result = super.visitResolvableOperationCallExpression(ex);
}
environment.setIndentation(indentation);
return result;
}
@Override
public Object visitTypedef(Typedef typedef) throws VilException {
return null; // typedefs are processed during parsing
}
@Override
public Object visitWhile(WhileStatement stmt) throws VilException {
Expression condition = stmt.getConditionExpression();
boolean executeLoop = false;
environment.pushLevel();
Object bodyResult = NullValue.VALUE;
do {
Object conditionResult = condition.accept(this);
executeLoop = (conditionResult instanceof Boolean && (Boolean) conditionResult);
if (executeLoop) {
tracer.visitWhileBody();
ITemplateElement loopStmt = stmt.getLoopStatement();
increaseIndentation(loopStmt);
bodyResult = loopStmt.accept(this);
if (null == bodyResult) {
executeLoop = false;
}
decreaseIndentation(loopStmt);
tracer.visitedWhileBody();
}
} while (executeLoop && !stop);
environment.popLevel();
return checkContentStatement(bodyResult, NullValue.VALUE, stmt.getLoopStatement());
}
@Override
public void stop() {
stop = true;
}
@Override
public Object visitFlush(FlushStatement stmt) throws VilException {
tracer.visitFlush();
storeContent();
tracer.visitedFlush();
return null;
}
/**
* Explicitly stores the concatenated results of the content statements to the target artifact.
*
* @throws VilException if storing the content fails for some reason
*/
public void storeContent() throws VilException {
Object tgt = getParameter(TemplateLangExecution.PARAM_TARGET_SURE);
if (null == tgt) {
tgt = getParameter(TemplateLangExecution.PARAM_TARGET);
}
if (tgt instanceof IArtifact && mainOut instanceof StringWriter) {
storeContent((IArtifact) tgt, (StringWriter) mainOut);
}
}
/**
* Explicitly stores the concatenated results of the content statements to the target artifact.
*
* @param target the target artifact
* @param out the output writer to take the produced content from
* @throws VilException if storing the content fails for some reason
*/
public static void storeContent(IArtifact target, StringWriter out) throws VilException {
String tmp = out.toString();
if (tmp.length() > 0) {
target.getText().setText(tmp);
}
target.store();
}
@Override
public Object visitContentAlternativeExpression(ContentAlternativeExpression ex) throws VilException {
Object result = null;
Object cond = ex.getCondition().accept(this);
if (Boolean.TRUE.equals(cond)) {
result = evaluateContentExpression(ex.thenEx());
} else if (Boolean.FALSE.equals(cond)) {
if (ex.getElseExpressionsCount() > 0) {
result = evaluateContentExpression(ex.elseEx());
} else {
result = "";
}
}
return result;
}
/**
* Evaluates a separator expression.
*
* @param ex the expression (may be <b>null</b>)
* @param deflt the default value to be used as result if <code>ex</code> is <b>null</b>
* @return the separator, <code>deflt</code> if <code>ex</code> is <b>null</b> or the evaluation leads to undefined,
* the separator string else
* @throws VilException in case that the evaluation fails
*/
private String evaluateSeparator(Expression ex, String deflt) throws VilException {
String separator = deflt;
if (null != ex) {
Object tmp = ex.accept(this);
if (null == tmp) {
separator = deflt;
} else {
separator = tmp.toString();
}
}
return separator;
}
@Override
public Object visitContentLoopExpression(ContentLoopExpression ex) throws VilException {
String result = null;
String separator = evaluateSeparator(ex.getSeparator(), "");
String endSeparator = evaluateSeparator(ex.getEndSeparator(), "");
Object init = ex.getInit().accept(this);
Iterator<?> iter;
if (init instanceof Collection) {
iter = ((Collection<?>) init).iterator();
} else if (init instanceof java.util.Collection) {
iter = ((java.util.Collection<?>) init).iterator();
} else if (init instanceof ContainerValue) {
iter = ((ContainerValue) init).iterator();
} else {
iter = null;
}
if (null != iter) {
result = "";
environment.pushLevel();
VariableDeclaration decl = ex.getIterator();
environment.addValue(decl, null);
while (iter.hasNext()) {
environment.setValue(decl, iter.next());
String tmp = evaluateContentExpression(ex);
if (null == tmp) {
break;
}
if (result.length() == 0) {
result = tmp;
} else {
result = result + separator + tmp;
}
}
environment.popLevel();
if (result.length() > 0) {
result = result + endSeparator;
}
}
return result;
}
@Override
public Object visitContentVarDeclExpression(ContentVarDeclExpression ex) throws VilException {
VariableDeclaration var = ex.getVariable();
Object value;
if (null != var.getExpression()) {
value = var.getExpression().accept(this);
} else {
value = null; // shall not occur due to required syntax in StringParser
}
environment.addValue(var, value);
return ""; // replace me by nothing
}
@Override
public Object visitContentImportExpression(ContentImportExpression ex) throws VilException {
try {
Template current = (Template) environment.getContextModel();
AvailableModels<Template> available = TemplateModel.INSTANCE.availableModels();
ModelInfo<Template> currentInfo = available.getModelInfo(current);
URI baseUri = null;
if (null != currentInfo) {
baseUri = currentInfo.getLocation();
}
if (null == baseUri) {
baseUri = getFallbackBaseURI();
}
Template resolved = TemplateModel.INSTANCE.resolve(ex.getTemplate(), ex.getVersionRestriction(),
baseUri, environment);
if (null != resolved) {
ModelImport<Template> imp = new ModelImport<Template>(ex.getTemplate());
imp.setResolved(resolved);
current.addRuntimeImport(imp);
}
} catch (ModelManagementException e) {
throw new VilException(e);
}
return ""; // replace me by nothing
}
/**
* Returns the base URI for template resolution in this execution if there is no template to obtain it from.
*
* @return the base URI
*/
protected URI getFallbackBaseURI() {
return null;
}
@Override
public Object visitCompound(Compound compound) throws VilException {
return null; // declaration is not executed
}
}
|
package org.nuxeo.ecm.platform.pictures.tiles.service;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.nuxeo.ecm.core.api.Blob;
import org.nuxeo.ecm.core.api.ClientException;
import org.nuxeo.ecm.core.api.DocumentModel;
import org.nuxeo.ecm.core.api.impl.blob.StringBlob;
import org.nuxeo.ecm.platform.picture.api.ImagingService;
import org.nuxeo.ecm.platform.picture.api.MetadataConstants;
import org.nuxeo.ecm.platform.picture.api.adapters.PictureResourceAdapter;
import org.nuxeo.ecm.platform.pictures.tiles.gwt.client.TilingPreviewConstant;
import org.nuxeo.ecm.platform.preview.adapter.AbstractPreviewer;
import org.nuxeo.ecm.platform.preview.adapter.ImagePreviewer;
import org.nuxeo.ecm.platform.preview.adapter.MimeTypePreviewer;
import org.nuxeo.ecm.platform.preview.api.PreviewException;
import org.nuxeo.runtime.api.Framework;
/**
* @author Alexandre Russel
*
*/
public class TiledImagePreviewer extends AbstractPreviewer implements
MimeTypePreviewer {
private static final Log log = LogFactory.getLog(TiledImagePreviewer.class);
protected static final String ORIGINAL_VIEW_NAME = "Original";
public List<Blob> getPreview(Blob blob, DocumentModel dm) throws PreviewException {
if(useTiling(blob, dm)) {
List<Blob> blobResults = new ArrayList<Blob>();
String htmlFile = getString().replace("$repoId$", dm.getRepositoryName());
htmlFile = htmlFile.replace("$docId$", dm.getId());
htmlFile = htmlFile.replace("$tileWidth$", "" + 200);
htmlFile = htmlFile.replace("$tileHeight$", "" + 200);
htmlFile = htmlFile.replace("$maxTiles$", "" + 2);
Blob mainBlob = new StringBlob(htmlFile);
mainBlob.setFilename("index.html");
mainBlob.setMimeType("text/html");
blob.setFilename("image");
blobResults.add(mainBlob);
blobResults.add(blob);
return blobResults;
}
return new ImagePreviewer().getPreview(blob, dm);
}
protected boolean useTiling(Blob blob, DocumentModel dm) {
Long width = Long.valueOf(0);
Long height = Long.valueOf(0);
if ("Picture".equals(dm.getType())) {
try {
PictureResourceAdapter adapter = dm.getAdapter(PictureResourceAdapter.class);
String xpath = adapter.getViewXPath(ORIGINAL_VIEW_NAME);
if (xpath == null) {
xpath = adapter.getFirstViewXPath();
}
width = (Long) dm.getPropertyValue(xpath + "width");
height = (Long) dm.getPropertyValue(xpath + "height");
} catch (ClientException e) {
log.error("Failed to get picture dimensions", e);
}
} else {
ImagingService imagingService = Framework.getLocalService(ImagingService.class);
if (imagingService != null) {
try {
Map<String, Object> imageMetadata = imagingService.getImageMetadata(blob.getStream());
width = ((Integer) imageMetadata.get(MetadataConstants.META_WIDTH)).longValue();
height = ((Integer) imageMetadata.get(MetadataConstants.META_HEIGHT)).longValue();
} catch (IOException e) {
log.error("Failed to get picture dimensions", e);
}
}
}
Integer widthThreshold = Integer.valueOf(PictureTilingComponent.getEnvValue("WidthThreshold", "1200"));
Integer heightThreshold = Integer.valueOf(PictureTilingComponent.getEnvValue("HeightThreshold", "1200"));
return width > widthThreshold || height > heightThreshold;
}
private String getString() {
StringWriter writer = new StringWriter();
writer.write("<html><head></head><body>");
writer.write("<script type=\"text/javascript\">");
writer.write("var serverSetting = {");
writer.write("repoId : '$repoId$' ,");
writer.write("docId : '$docId$'");
writer.write("};");
writer.write("</script>");
writer.write("<script type=\"text/javascript\"");
writer.write("src=\"/nuxeo/org.nuxeo.ecm.platform.pictures.tiles.gwt.TilingPreview/org.nuxeo.ecm.platform.pictures.tiles.gwt.TilingPreview.nocache.js\">");
writer.write("</script>");
appendPreviewSettings(writer);
writer.write("<div id=\"display\"></div>");
writer.write("</body></html>");
return writer.toString();
}
private void appendPreviewSettings(StringWriter sb) {
sb.append("<script type=\"text/javascript\">");
sb.append("var previewSettings = { ");
sb.append("imageOnly: \"true\", ");
sb.append("multiImageAnnotation: \"true\", ");
sb.append("xPointerFilterPath: \"" + TilingPreviewConstant.ORG_NUXEO_ECM_PLATFORM_PICTURES_TILES_GWT_CLIENT_XPOINTER_FILTER + "\", ");
sb.append("pointerAdapter: \"" + TilingPreviewConstant.ORG_NUXEO_ECM_PLATFORM_PICTURES_TILES_GWT_CLIENT_POINTER_ADAPTER + "\", ");
sb.append("annotationDecoratorFunction: \"" + TilingPreviewConstant.ORG_NUXEO_ECM_PLATFORM_PICTURES_TILES_GWT_CLIENT_UPDATE_ANNOTATED_DOCUMENT + "\"");
sb.append("}");
sb.append("</script>");
}
}
|
package gov.nih.nci.cacore.workbench.portal.application;
import gov.nih.nci.cacore.workbench.portal.viewer.SemanticIntegrationViewer;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import javax.swing.JButton;
import javax.swing.JEditorPane;
import javax.swing.JInternalFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.text.html.HTMLEditorKit;
import org.apache.log4j.Logger;
public abstract class ExternalWorkflowApplicationComponent extends JInternalFrame {
private static final Logger log = Logger.getLogger(ExternalWorkflowApplicationComponent.class);
private static final long serialVersionUID = 1L;
private Color defaultBackgroundColor = Color.WHITE;
/*
* Primary Panel definitions
*/
private JPanel mainPanel = null;
protected JPanel contentPanel = null;
private JPanel buttonPanel = null;
private JSplitPane mainSplitPane = null;
public ExternalWorkflowApplicationComponent() {
//setSize(600, 700);
setMaximizable(true);
setIconifiable(false);
setClosable(true);
setResizable(true);
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
protected JPanel getButtonPanel(JButton[] buttons) {
if (buttonPanel == null) {
buttonPanel = new JPanel();
buttonPanel.setLayout(new GridBagLayout());
int gridy = 0;
for (JButton button:buttons){
log.debug("* * * adding button: "+button.getText()+" for gridy: "+gridy);
Dimension minimumSize = new Dimension(151,31);
button.setMinimumSize(minimumSize);
button.setPreferredSize(minimumSize);
buttonPanel.add(button, getButtonContraints(gridy++));
}
}
return buttonPanel;
}
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
private GridBagConstraints getButtonContraints(int gridy) {
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = gridy;
gridBagConstraints.anchor = java.awt.GridBagConstraints.CENTER;
return gridBagConstraints;
}
protected JSplitPane getMainSplitPanel(String description,JButton[] buttons){
if (mainSplitPane == null){
JScrollPane descriptionScrollPane = new JScrollPane(getTextPane(description));
JScrollPane buttonPanelScrollPane = new JScrollPane(getButtonPanel(buttons));
//Create a split pane with the two scroll panes in it.
mainSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT,
descriptionScrollPane, buttonPanelScrollPane);
mainSplitPane.setOneTouchExpandable(false);
mainSplitPane.setOneTouchExpandable(false);
mainSplitPane.setDividerLocation(650);
mainSplitPane.setDividerSize(5);
}
return mainSplitPane;
}
protected abstract JPanel getContentPanel();
/**
* This method initializes jPanel
*
* @return javax.swing.JPanel
*/
protected JPanel getMainPanel() {
if (mainPanel == null) {
GridBagConstraints gridBagConstraints10 = new GridBagConstraints();
gridBagConstraints10.fill = GridBagConstraints.BOTH;
gridBagConstraints10.weighty = 1.0;
gridBagConstraints10.weightx = 1.0;
mainPanel = new JPanel();
mainPanel.setLayout(new GridBagLayout());
mainPanel.add(getContentPanel(), gridBagConstraints10);
}
return mainPanel;
}
private JEditorPane getTextPane(String text){
JEditorPane textPane = new JEditorPane();
textPane.setEditable(false);
textPane.setEnabled(true);
textPane.setMargin(new Insets(10, 10, 10, 10));
HTMLEditorKit htmlEditorKit = new HTMLEditorKit();
//htmlEditorKit.setStyleSheet();
textPane.setEditorKit(htmlEditorKit);
textPane.setBackground(defaultBackgroundColor);
textPane.setText(text);
return textPane;
}
}
|
package com.yelli;
import java.util.Date;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.Handler;
import android.provider.Settings;
import android.provider.Settings.SettingNotFoundException;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
import android.widget.Toast;
import com.gc.materialdesign.views.ButtonRectangle;
import com.gc.materialdesign.widgets.ProgressDialog;
import com.tripadvisor.seekbar.CircularClockSeekBar;
import com.tripadvisor.seekbar.CircularClockSeekBar.OnSeekBarChangeListener;
import com.yelli.background.BackgroundService;
import com.yelli.background.ServiceResultReceiver;
import com.yelli.background.ServiceResultReceiver.Receiver;
public class YelliActivity extends ActionBarActivity implements
OnClickListener, Receiver {
private ButtonRectangle trackButton;
private ButtonRectangle shareButton;
private ButtonRectangle createNewButton;
private CircularClockSeekBar seekBar;
private TextView seekBarText;
private TextView helpText;
private String TAG = getClass().getSimpleName();
private String shareLink;
private long timeLimit = 0;
private ProgressDialog pdia;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.yelli_main);
trackButton = (ButtonRectangle) findViewById(R.id.submitButton);
shareButton = (ButtonRectangle) findViewById(R.id.shareButton);
createNewButton = (ButtonRectangle) findViewById(R.id.createNewButton);
seekBar = (CircularClockSeekBar) findViewById(R.id.seekBar);
seekBarText = (TextView) findViewById(R.id.seekBarText);
helpText = (TextView) findViewById(R.id.helpText);
trackButton.setOnClickListener(this);
shareButton.setOnClickListener(this);
createNewButton.setOnClickListener(this);
long currentlyRunningTrackTimelimit = getSharedPreferences(
Utils.SHARED_PREF_NAME, Context.MODE_PRIVATE).getLong(
Utils.TIME_LIMIT_PREF, 0);
if (Utils.isCurrentlyBeingTracked(currentlyRunningTrackTimelimit)) {
showTrackId(getSharedPreferences(Utils.SHARED_PREF_NAME,
MODE_PRIVATE).getString(Utils.TRACKING_ID_PREF, ""));
}
// Calendar calendar = Calendar.getInstance();
// DateTime minTime = new DateTime(calendar.get(Calendar.YEAR),
// calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH),
// calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE));
// calendar.add(Calendar.HOUR, 2);
// DateTime maxTime = new DateTime(calendar.get(Calendar.YEAR),
// calendar.get(Calendar.MONTH), calendar.get(Calendar.DAY_OF_MONTH),
// calendar.get(Calendar.HOUR_OF_DAY), calendar.get(Calendar.MINUTE));
seekBar.setMaxProgress(120);
seekBar.setSeekBarChangeListener(new OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(CircularClockSeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onStartTrackingTouch(CircularClockSeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onProgressChanged(CircularClockSeekBar seekBar,
int progress, boolean fromUser) {
if (progress == 120) {
progress = 0;
}
if (progress < 60) {
seekBarText.setText("Allow tracking for the next "
+ progress + " mins");
} else {
int mins = progress - 60;
seekBarText
.setText("Allow tracking for the next 1 hour and "
+ mins + " mins");
}
timeLimit = progress * 60000;
Log.d(TAG, "Date chosen : " + new Date(timeLimit).toString());
}
@Override
public void onAnimationComplete(CircularClockSeekBar seekBar) {
}
});
}
private void showTrackId(String trackId) {
seekBarText.setVisibility(View.GONE);
seekBar.setVisibility(View.GONE);
trackButton.setVisibility(View.GONE);
findViewById(R.id.shareButtonLayout).setVisibility(View.VISIBLE);
shareLink = Utils.yelliWebPath + "?trackId=" + trackId;
helpText.setText("Sharing the link will let the chosen ones to see where you are");
}
private void createNewTrackId() {
seekBarText.setVisibility(View.VISIBLE);
seekBar.setVisibility(View.VISIBLE);
trackButton.setVisibility(View.VISIBLE);
findViewById(R.id.shareButtonLayout).setVisibility(View.GONE);
shareLink = "";
helpText.setText("Set the time till which you wish to be tracked by the people chosen by yourself");
}
@Override
public void onClick(View v) {
if (v.getId() == R.id.shareButton) {
shareIntent(shareLink);
} else if (v.getId() == R.id.submitButton) {
if (timeLimit < (10 * 60000)) {
Toast.makeText(this,
"Please select a time period more than 10 minutes ",
Toast.LENGTH_SHORT).show();
return;
}
int locationMode = 0;
try {
locationMode = getLocationMode();
} catch (SettingNotFoundException e) {
e.printStackTrace();
}
if (locationMode != 3) {
Toast.makeText(this, "Please enable GPS", Toast.LENGTH_SHORT).show();
startActivity(new Intent(
Settings.ACTION_LOCATION_SOURCE_SETTINGS));
return;
}
pdia = new ProgressDialog(this, "Connecting");
pdia.setCancelable(false);
pdia.show();
// create intent and create tracking id
ServiceResultReceiver receiver = new ServiceResultReceiver(
new Handler());
receiver.setReceiver(this);
Intent intent = new Intent(Intent.ACTION_SYNC, null, this,
BackgroundService.class);
intent.putExtra("fromActivity", true);
intent.putExtra("timelimit", timeLimit);
intent.putExtra("receiver", receiver);
startService(intent);
} else if (v.getId() == R.id.createNewButton) {
createNewTrackId();
}
}
private void shareIntent(String link) {
Intent share = new Intent(android.content.Intent.ACTION_SEND);
share.setType("text/plain");
share.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
// Add data to the intent, the receiving app will decide
// what to do with it.
share.putExtra(Intent.EXTRA_SUBJECT, "Yelli");
share.putExtra(Intent.EXTRA_TEXT, link);
startActivity(Intent.createChooser(share, "Share link!"));
}
@Override
public void onReceiveResult(int resultCode, Bundle resultData) {
String errorMessage = resultData.getString("error");
pdia.cancel();
if (errorMessage == null || errorMessage.equals("")) {
String trackId = resultData.getString("trackId");
showTrackId(trackId);
} else {
// error. do something
Toast.makeText(this, errorMessage, Toast.LENGTH_SHORT).show();
}
}
@SuppressLint("InlinedApi")
private int getLocationMode() throws SettingNotFoundException {
int apiLevel = Integer.valueOf(android.os.Build.VERSION.SDK_INT);
if (apiLevel < 19) {
LocationManager manager = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
boolean isGpsEnabled = manager
.isProviderEnabled(LocationManager.GPS_PROVIDER);
if (isGpsEnabled) {
return 3;
} else {
return 0;
}
} else {
return Settings.Secure.getInt(getContentResolver(),
Settings.Secure.LOCATION_MODE);
}
}
}
|
//$HeadURL$
package org.deegree.feature.persistence.simplesql;
import static java.lang.System.currentTimeMillis;
import static org.deegree.feature.persistence.query.Query.QueryHint.HINT_SCALE;
import static org.slf4j.LoggerFactory.getLogger;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeMap;
import javax.xml.namespace.QName;
import org.deegree.commons.annotations.LoggingNotes;
import org.deegree.commons.config.DeegreeWorkspace;
import org.deegree.commons.config.ResourceInitException;
import org.deegree.commons.jdbc.ConnectionManager;
import org.deegree.commons.jdbc.ResultSetIterator;
import org.deegree.commons.jdbc.ConnectionManager.Type;
import org.deegree.commons.utils.Pair;
import org.deegree.cs.CRSUtils;
import org.deegree.cs.coordinatesystems.ICRS;
import org.deegree.cs.exceptions.TransformationException;
import org.deegree.cs.exceptions.UnknownCRSException;
import org.deegree.cs.persistence.CRSManager;
import org.deegree.feature.Feature;
import org.deegree.feature.GenericFeature;
import org.deegree.feature.persistence.FeatureStore;
import org.deegree.feature.persistence.FeatureStoreException;
import org.deegree.feature.persistence.FeatureStoreTransaction;
import org.deegree.feature.persistence.lock.LockManager;
import org.deegree.feature.persistence.query.CombinedResultSet;
import org.deegree.feature.persistence.query.FeatureResultSet;
import org.deegree.feature.persistence.query.FilteredFeatureResultSet;
import org.deegree.feature.persistence.query.IteratorResultSet;
import org.deegree.feature.persistence.query.Query;
import org.deegree.feature.property.GenericProperty;
import org.deegree.feature.property.Property;
import org.deegree.feature.property.SimpleProperty;
import org.deegree.feature.types.ApplicationSchema;
import org.deegree.feature.types.FeatureType;
import org.deegree.feature.types.GenericFeatureType;
import org.deegree.feature.types.property.GeometryPropertyType;
import org.deegree.feature.types.property.PropertyType;
import org.deegree.feature.types.property.SimplePropertyType;
import org.deegree.feature.utils.DBUtils;
import org.deegree.filter.FilterEvaluationException;
import org.deegree.geometry.Envelope;
import org.deegree.geometry.Geometry;
import org.deegree.geometry.GeometryFactory;
import org.deegree.geometry.GeometryTransformer;
import org.deegree.geometry.io.WKBReader;
import org.deegree.geometry.io.WKTReader;
import org.deegree.geometry.io.WKTWriter;
import org.deegree.gml.GMLObject;
import org.slf4j.Logger;
import com.vividsolutions.jts.io.ParseException;
/**
* {@link FeatureStore} implementation that is backed by an SQL database and configured by providing an SQL statement /
* an SQL connection.
*
* @see FeatureStore
*
* @author <a href="mailto:schmitz@lat-lon.de">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
@LoggingNotes(info = "logs problems when connecting to the DB/getting data from the DB", debug = "logs the SQL statements sent to the SQL server", trace = "logs stack traces")
public class SimpleSQLFeatureStore implements FeatureStore {
static final Logger LOG = getLogger( SimpleSQLFeatureStore.class );
private final QName ftName;
private boolean available = false;
private String connId;
ICRS crs;
private ApplicationSchema schema;
private GeometryFactory fac = new GeometryFactory();
GenericFeatureType featureType;
private String bbox;
private GeometryTransformer transformer;
private TreeMap<Integer, String> lods;
private Pair<Long, Envelope> cachedEnvelope = new Pair<Long, Envelope>();
private DeegreeWorkspace workspace;
/**
* @param connId
* @param crs
* @param sql
* @param ftLocalName
* @param ftNamespace
* @param ftPrefix
* @param bbox
* @param lods
*/
public SimpleSQLFeatureStore( String connId, String crs, String sql, String ftLocalName, String ftNamespace,
String ftPrefix, String bbox, List<Pair<Integer, String>> lods ) {
this.connId = connId;
sql = sql.trim();
if ( sql.endsWith( ";" ) ) {
sql = sql.substring( 0, sql.length() - 1 );
}
this.bbox = bbox;
// TODO allow null namespaces / empty prefix
// NOTE: verify that the WFS code for dealing with that (e.g. repairing unqualified names) works with that first
ftLocalName = ( ftLocalName != null && !ftLocalName.isEmpty() ) ? ftLocalName : "Feature";
ftNamespace = ( ftNamespace != null && !ftNamespace.isEmpty() ) ? ftNamespace : "http:
ftPrefix = ( ftPrefix != null && !ftPrefix.isEmpty() ) ? ftPrefix : "app";
this.ftName = new QName( ftNamespace, ftLocalName, ftPrefix );
try {
this.crs = CRSManager.lookup( crs );
transformer = new GeometryTransformer( this.crs );
} catch ( IllegalArgumentException e ) {
LOG.error( "The invalid crs '{}' was specified for the simple SQL data store.", crs );
LOG.trace( "Stack trace:", e );
} catch ( UnknownCRSException e ) {
LOG.error( "The invalid crs '{}' was specified for the simple SQL data store.", crs );
LOG.trace( "Stack trace:", e );
}
this.lods = new TreeMap<Integer, String>();
this.lods.put( -1, sql );
for ( Pair<Integer, String> p : lods ) {
this.lods.put( p.first, p.second );
}
}
public FeatureStoreTransaction acquireTransaction()
throws FeatureStoreException {
throw new FeatureStoreException( "Transactions are not implemented for the simple SQL datastore." );
}
public void destroy() {
// nothing to do
}
public Envelope getEnvelope( QName ftName ) {
synchronized ( cachedEnvelope ) {
long current = currentTimeMillis();
if ( cachedEnvelope.first != null && ( current - cachedEnvelope.first ) < 1000 ) {
return cachedEnvelope.second;
}
ResultSet set = null;
PreparedStatement stmt = null;
Connection conn = null;
try {
conn = workspace.getSubsystemManager( ConnectionManager.class ).get( connId );
stmt = conn.prepareStatement( bbox );
LOG.debug( "Getting bbox with query '{}'.", stmt );
stmt.execute();
set = stmt.getResultSet();
if ( set.next() ) {
String bboxString = set.getString( "bbox" );
if ( bboxString == null ) {
LOG.info( "Could not determine envelope of database table, using world bbox instead." );
return fac.createEnvelope( -180, -90, 180, 90, CRSUtils.EPSG_4326 );
}
Geometry g = new WKTReader( crs ).read( bboxString );
cachedEnvelope.first = current;
cachedEnvelope.second = g.getEnvelope();
return cachedEnvelope.second;
}
} catch ( SQLException e ) {
LOG.info( "BBox could not be read: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
available = false;
return null;
} catch ( ParseException e ) {
LOG.info( "BBox could not be read: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
available = false;
return null;
} finally {
if ( set != null ) {
try {
set.close();
} catch ( SQLException e ) {
LOG.info( "A DB error occurred: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
if ( stmt != null ) {
try {
stmt.close();
} catch ( SQLException e ) {
LOG.info( "A DB error occurred: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
if ( conn != null ) {
try {
conn.close();
} catch ( SQLException e ) {
LOG.info( "A DB error occurred: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
}
}
return null;
}
}
public LockManager getLockManager()
throws FeatureStoreException {
throw new FeatureStoreException( "Transactions are not implemented for the simple SQL datastore." );
}
public GMLObject getObjectById( String id )
throws FeatureStoreException {
throw new FeatureStoreException( "Getting objects by id is not implemented for the simple SQL datastore." );
}
public ApplicationSchema getSchema() {
return schema;
}
/**
* @return the feature type (it can have only one)
*/
public GenericFeatureType getFeatureType() {
return featureType;
}
public void init( DeegreeWorkspace workspace )
throws ResourceInitException {
this.workspace = workspace;
featureType = DBUtils.determineFeatureType( ftName, connId, lods.values().iterator().next() );
if ( featureType == null ) {
available = false;
} else {
schema = new ApplicationSchema( new FeatureType[] { featureType }, null, null, null );
available = true;
}
}
@Override
public boolean isAvailable() {
return available;
}
public FeatureResultSet query( Query query )
throws FeatureStoreException, FilterEvaluationException {
return query( new Query[] { query } );
}
public FeatureResultSet query( final Query[] queries )
throws FeatureStoreException, FilterEvaluationException {
PreparedStatement stmt = null;
Connection conn = null;
FeatureResultSet set = null;
try {
LinkedList<FeatureResultSet> list = new LinkedList<FeatureResultSet>();
for ( final Query q : queries ) {
Envelope bbox = q.getPrefilterBBox();
if ( bbox == null ) {
bbox = getEnvelope( ftName );
}
Object scaleHint = q.getHint( HINT_SCALE );
int scale = -1;
if ( scaleHint != null ) {
scale = (Integer) scaleHint;
}
String sql = null;
for ( Integer i : lods.keySet() ) {
if ( i <= scale ) {
LOG.debug( "Considering use of LOD with scale {}.", i );
sql = lods.get( i );
}
}
ConnectionManager mgr = workspace.getSubsystemManager( ConnectionManager.class );
conn = mgr.get( connId );
Type connType = mgr.getType( connId );
if ( q.getMaxFeatures() > 0 && connType == Type.PostgreSQL ) {
sql += " limit " + q.getMaxFeatures();
}
stmt = conn.prepareStatement( sql );
try {
bbox = transformer.transform( bbox );
} catch ( UnknownCRSException e ) {
LOG.info( "Bounding box could not be transformed: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
} catch ( TransformationException e ) {
LOG.info( "Bounding box could not be transformed: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
}
int parameterCount = stmt.getParameterMetaData().getParameterCount();
if ( parameterCount == 0 ) {
LOG.info( "No parameter for the bbox was found, requesting without bbox!" );
} else if ( parameterCount > 1 ) {
LOG.warn( "Too many parameters specified ({}), cannot go further!", parameterCount );
return null;
}
stmt.setString( 1, WKTWriter.write( bbox ) );
LOG.debug( "Statement to fetch features was '{}'.", connType == Type.Oracle ? sql : stmt );
stmt.execute();
set = new IteratorResultSet( new ResultSetIterator<Feature>( stmt.getResultSet(), conn, stmt ) {
@Override
protected Feature createElement( ResultSet rs )
throws SQLException {
LinkedList<Property> props = new LinkedList<Property>();
for ( PropertyType pt : featureType.getPropertyDeclarations() ) {
if ( pt instanceof GeometryPropertyType ) {
byte[] bs = rs.getBytes( pt.getName().getLocalPart() );
if ( bs != null ) {
try {
Geometry geom = WKBReader.read( bs, crs );
props.add( new GenericProperty( pt, geom ) );
} catch ( ParseException e ) {
LOG.info( "WKB from the DB could not be parsed: '{}'.", e.getLocalizedMessage() );
LOG.info( "For PostGIS users: you have to select the geometry field 'asbinary(geometry)'." );
LOG.trace( "Stack trace:", e );
}
}
} else {
Object obj = rs.getObject( pt.getName().getLocalPart() );
if ( obj != null ) {
SimplePropertyType spt = (SimplePropertyType) pt;
props.add( new SimpleProperty( spt, "" + obj, spt.getPrimitiveType() ) );
}
}
}
return new GenericFeature( featureType, null, props, null, null );
}
} );
if ( q.getFilter() != null ) {
set = new FilteredFeatureResultSet( set, q.getFilter() );
}
list.add( set );
}
return new CombinedResultSet( list.iterator() );
} catch ( SQLException e ) {
LOG.info( "Data store could not be accessed: '{}'.", e.getLocalizedMessage() );
LOG.trace( "Stack trace:", e );
available = false;
throw new FeatureStoreException( "Data store could not be accessed." );
}
}
public int queryHits( Query query )
throws FeatureStoreException, FilterEvaluationException {
// TODO
return query( query ).toCollection().size();
}
public int queryHits( Query[] queries )
throws FeatureStoreException, FilterEvaluationException {
// TODO
return query( queries ).toCollection().size();
}
/**
* Returns the CRS of the geometry column.
*
* @return the CRS of the geometry column, never <code>null</code>
*/
public ICRS getStorageCRS() {
return crs;
}
}
|
package pl.edu.icm.coansys.disambiguation.author.pig.extractor;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultDataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.tools.pigstats.PigStatusReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import java.util.concurrent.ConcurrentHashMap;
import pl.edu.icm.coansys.commons.java.StackTraceExtractor;
import pl.edu.icm.coansys.disambiguation.author.features.extractors.DisambiguationExtractorFactory;
import pl.edu.icm.coansys.disambiguation.author.features.extractors.indicators.DisambiguationExtractor;
import pl.edu.icm.coansys.disambiguation.author.features.extractors.indicators.DisambiguationExtractorAuthor;
import pl.edu.icm.coansys.disambiguation.author.features.extractors.indicators.DisambiguationExtractorDocument;
import pl.edu.icm.coansys.disambiguation.features.FeatureInfo;
import pl.edu.icm.coansys.models.DocumentProtos.Author;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentMetadata;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentWrapper;
/**
*
* @author pdendek
* @author mwos
*/
@SuppressWarnings({ "unchecked" })
public class EXTRACT_CONTRIBDATA_GIVENDATA extends EvalFunc<DataBag> {
private static final Logger logger = LoggerFactory
.getLogger(EXTRACT_CONTRIBDATA_GIVENDATA.class);
private List<DisambiguationExtractorDocument> des4Doc = new ArrayList<DisambiguationExtractorDocument>();
private List<DisambiguationExtractorAuthor> des4Author = new ArrayList<DisambiguationExtractorAuthor>();
private List<String> des4DocNameOrId = new ArrayList<String>(),
des4AuthorNameOrId = new ArrayList<String>();
@Parameter(names = { "-lang", "-language" }, description = "Filter metadata by language", converter = LangConverter.class)
private String language = null; // null means all
@Parameter(names = "-skipEmptyFeatures", arity = 1, description = "Skip contributor's features, when feature bag is empty (no data for feature).")
private boolean skipEmptyFeatures = false;
@Parameter(names = "-useIdsForExtractors", arity = 1, description = "Use short ids for extractors (features) names in temporary sequance files.")
private boolean useIdsForExtractors = false;
@Parameter(names = "-returnNull", arity = 1, description = "Return null data bag after processing. Use only for debuging.")
private boolean returnNull = false;
@Parameter(names = { "-featureinfo", "-featureInfo" }, required = true, description = "Features description - model for calculating affinity and contributors clustering.")
private String featureinfo = null;
private DisambiguationExtractorFactory extrFactory = new DisambiguationExtractorFactory();
@Override
public Schema outputSchema(Schema p_input) {
try {
return Schema.generateNestedSchema(DataType.BAG);
} catch (FrontendException e) {
logger.error("Error in creating output schema:", e);
throw new IllegalStateException(e);
}
}
private void setDisambiguationExtractor(String featureInfo)
throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
if (featureInfo == null || featureInfo.isEmpty()) {
throw new IllegalArgumentException("FeatureInfo model is required");
}
List<FeatureInfo> features = FeatureInfo
.parseFeatureInfoString(featureinfo);
// Get indicators names. Indicators (super classes of extractors) says about
// extractor kind: document or author data dependent
String ExtractorDocClassName = new DisambiguationExtractorDocument()
.getClass().getSimpleName();
String ExtractorAuthorClassName = new DisambiguationExtractorAuthor()
.getClass().getSimpleName();
DisambiguationExtractor extractor;
String currentClassNameOrId;
// iterate through all given extractors, create them
for (int i = 0; i < features.size(); i++) {
extractor = extrFactory.create(features.get(i));
// Get indicator of this extractor. Note that super class of the
// extractor may be other extractor, not directly indicator. So we
// need to "climb up" the inheritance tree.
Class<DisambiguationExtractor> superClass = (Class<DisambiguationExtractor>) extractor
.getClass();
while (superClass.getSimpleName().startsWith("EX_")) {
superClass = (Class<DisambiguationExtractor>) superClass
.getSuperclass();
}
String currentIndicatorName = superClass.getSimpleName();
if (useIdsForExtractors) {
currentClassNameOrId = extrFactory.toExId(extractor.getClass()
.getSimpleName());
} else {
currentClassNameOrId = extractor.getClass().getSimpleName();
}
if (currentIndicatorName.equals(ExtractorDocClassName)) {
des4Doc.add((DisambiguationExtractorDocument) extractor);
des4DocNameOrId.add(currentClassNameOrId);
} else if (currentIndicatorName.equals(ExtractorAuthorClassName)) {
des4Author.add((DisambiguationExtractorAuthor) extractor);
des4AuthorNameOrId.add(currentClassNameOrId);
} else {
String m = "Cannot create extractor: "
+ extractor.getClass().getSimpleName()
+ ". Its superclass: " + currentIndicatorName
+ " does not match to any superclass.";
logger.error(m);
throw new ClassNotFoundException(m);
}
}
}
public EXTRACT_CONTRIBDATA_GIVENDATA(String params)
throws InstantiationException, IllegalAccessException,
ClassNotFoundException {
String[] argv = params.split(" ");
new JCommander(this, argv);
setDisambiguationExtractor(featureinfo);
}
static ConcurrentHashMap<String,EXTRACT_CONTRIBDATA_GIVENDATA> extractors=new ConcurrentHashMap<>();
public static synchronized EXTRACT_CONTRIBDATA_GIVENDATA get_EXTRACT_CONTRIBDATA_GIVENDATA(String params) throws InstantiationException, IllegalAccessException, ClassNotFoundException {
if (!extractors.contains(params)) {
extractors.put(params, new EXTRACT_CONTRIBDATA_GIVENDATA(params));
}
return extractors.get(params);
}
public Map<String, Object> debugComponents() {
HashMap<String, Object> ret = new HashMap<String, Object>();
if (language != null) {
ret.put("-lang", language);
}
if (skipEmptyFeatures) {
ret.put("-skipEmptyFeatures", skipEmptyFeatures);
}
if (useIdsForExtractors) {
ret.put("-useIdsForExtractors", useIdsForExtractors);
}
if (returnNull) {
ret.put("-returnNull", returnNull);
}
if (featureinfo != null) {
ret.put("-featureinfo", featureinfo);
}
return ret;
}
@Override
public DataBag exec(Tuple input) throws IOException {
initializePigReporterWithZeroes();
if (input == null || input.size() == 0) {
return null;
}
try {
DataByteArray dba = (DataByteArray) input.get(0);
DocumentWrapper dw = DocumentWrapper.parseFrom(dba.get());
dba = null;
// metadata
DocumentMetadata dm = dw.getDocumentMetadata();
String docKey = dm.getKey();
dw = null;
// result bag with tuples, which describe each contributor
DataBag ret = new DefaultDataBag();
Collection<Author> authors = dm.getBasicMetadata().getAuthorList();
reportAuthors(authors);
if (authors.isEmpty()) {
// returning empty bag
return ret;
}
Map<String, DataBag> finalAuthorMap;
// taking from document metadata data universal for all contribs
Map<String, DataBag> DocumentMap = extractDocBasedFeatures(dm);
// creating disambiguation extractor only for normalizer
DisambiguationExtractor extractor = new DisambiguationExtractor();
// bag making tuples (one tuple for one contributor from document)
// with replicated metadata for
int i = -1;
for (Author a : authors) {
i++;
// here we have sure that Object = Integer
Object normalizedSname = extractor.normalizeExtracted(a.getSurname());
// additional code for keeping information about the real surname for debug reasons
String rawNormalizedSname = a.getSurname().toLowerCase();
// pig status reporter
reportSname(a.getSurname(), normalizedSname);
String cId = UUID.nameUUIDFromBytes(a.toByteArray()).toString();
// taking from document metadata data specific for each contrib
finalAuthorMap = extractAuthBasedFeatures(dm, DocumentMap, i);
Object[] to = new Object[] { docKey, cId, normalizedSname,
finalAuthorMap, rawNormalizedSname };
Tuple t = TupleFactory.getInstance()
.newTuple(Arrays.asList(to));
ret.add(t);
}
if (returnNull) {
return null;
}
return ret;
} catch (Exception e) {
logger.error("Error in processing input row:", e);
throw new IOException("Caught exception processing input row:\n"
+ StackTraceExtractor.getStackTrace(e));
}
}
private Map<String, DataBag> extractAuthBasedFeatures(DocumentMetadata dm,
Map<String, DataBag> InitialMap, int authorIndex) {
Map<String, DataBag> finalAuthorMap = new HashMap<String, DataBag>(
InitialMap);
// in arrays we are storing DataBags from extractors
DataBag extractedAuthorObj;
for (int j = 0; j < des4Author.size(); j++) {
extractedAuthorObj = des4Author.get(j).extract(dm, authorIndex,
language);
// adding to map extractor name and features' data
reportAuthorDataExistance(extractedAuthorObj, j);
if (extractedAuthorObj == null
|| (extractedAuthorObj.size() == 0 && skipEmptyFeatures)) {
continue;
}
finalAuthorMap.put(des4AuthorNameOrId.get(j), extractedAuthorObj);
}
return finalAuthorMap;
}
private Map<String, DataBag> extractDocBasedFeatures(DocumentMetadata dm) {
Map<String, DataBag> map = new HashMap<String, DataBag>();
// in arrays we are storing DataBags from extractors
DataBag extractedDocObj;
for (int i = 0; i < des4Doc.size(); i++) {
extractedDocObj = des4Doc.get(i).extract(dm, language);
// monit to pig status reporter
raportDocumentDataExistance(extractedDocObj, i);
// adding to map extractor name and features' data
if (extractedDocObj == null
|| (extractedDocObj.size() == 0 && skipEmptyFeatures)) {
continue;
}
map.put(des4DocNameOrId.get(i), extractedDocObj);
}
return map;
}
// Pig Status Reporter staff:
private PigStatusReporter myreporter = null;
private Counter counters4Doc[][], counters4Author[][],
counterNormalizedSname[], counterOriginalSname[], countersExist;
static class REPORTER_CONST {
public static final String CONTRIB_EX = "Contrib_Existing";
public static final String CONTRIB_MS = "Contrib_Missing";
public static final String DOC_EX = "Doc_Existing";
public static final String DOC_MS = "Doc_Missing";
public static final int MISS = 0;
public static final int EXIST = 1;
}
// cannot be run in constructor, have to take instance of reporter in each
// exec(...) call
private void initializePigReporterWithZeroes() {
// instance of reporter may change in each exec(...) run
myreporter = PigStatusReporter.getInstance();
counters4Doc = new Counter[des4Doc.size()][2];
counters4Author = new Counter[des4Author.size()][2];
counterNormalizedSname = new Counter[2];
counterOriginalSname = new Counter[2];
countersExist = myreporter.getCounter("unused", "unused");
if (countersExist == null) {
return;
}
for (int i = 0; i < des4Doc.size(); i++) {
counters4Doc[i][REPORTER_CONST.MISS] = myreporter.getCounter(
REPORTER_CONST.DOC_MS, des4Doc.get(i).getClass()
.getSimpleName());
counters4Doc[i][REPORTER_CONST.EXIST] = myreporter.getCounter(
REPORTER_CONST.DOC_EX, des4Doc.get(i).getClass()
.getSimpleName());
counters4Doc[i][REPORTER_CONST.MISS].increment(0);
counters4Doc[i][REPORTER_CONST.EXIST].increment(0);
}
for (int i = 0; i < des4Author.size(); i++) {
counters4Author[i][REPORTER_CONST.MISS] = myreporter.getCounter(
REPORTER_CONST.CONTRIB_MS, des4Author.get(i).getClass()
.getSimpleName());
counters4Author[i][REPORTER_CONST.EXIST] = myreporter.getCounter(
REPORTER_CONST.CONTRIB_EX, des4Author.get(i).getClass()
.getSimpleName());
counters4Author[i][REPORTER_CONST.MISS].increment(0);
counters4Author[i][REPORTER_CONST.EXIST].increment(0);
}
counterNormalizedSname[REPORTER_CONST.MISS] = myreporter.getCounter(
REPORTER_CONST.CONTRIB_MS, "Normalized sname");
counterNormalizedSname[REPORTER_CONST.EXIST] = myreporter.getCounter(
REPORTER_CONST.CONTRIB_EX, "Normalized sname");
counterOriginalSname[REPORTER_CONST.MISS] = myreporter.getCounter(
REPORTER_CONST.CONTRIB_MS, "Original sname");
counterOriginalSname[REPORTER_CONST.EXIST] = myreporter.getCounter(
REPORTER_CONST.CONTRIB_EX, "Original sname");
counterNormalizedSname[REPORTER_CONST.MISS].increment(0);
counterNormalizedSname[REPORTER_CONST.EXIST].increment(0);
counterOriginalSname[REPORTER_CONST.MISS].increment(0);
counterOriginalSname[REPORTER_CONST.EXIST].increment(0);
}
private void reportAuthorDataExistance(DataBag extractedAuthorObj, int j) {
if (countersExist == null) {
return;
}
if (extractedAuthorObj == null || extractedAuthorObj.size() == 0) {
counters4Author[j][REPORTER_CONST.MISS].increment(1);
} else {
counters4Author[j][REPORTER_CONST.EXIST].increment(1);
}
}
private void raportDocumentDataExistance(DataBag extractedDocObj, int i) {
if (countersExist == null) {
return;
}
if (extractedDocObj == null || extractedDocObj.size() == 0) {
counters4Doc[i][REPORTER_CONST.MISS].increment(1);
} else {
counters4Doc[i][REPORTER_CONST.EXIST].increment(1);
}
}
private void reportSname(Object orgSname, Object normSname) {
if (countersExist == null) {
return;
}
if (normSname == null || normSname.toString().isEmpty()) {
counterNormalizedSname[REPORTER_CONST.MISS].increment(1);
} else {
counterNormalizedSname[REPORTER_CONST.EXIST].increment(1);
}
if (orgSname == null || orgSname.toString().isEmpty()) {
counterOriginalSname[REPORTER_CONST.MISS].increment(1);
} else {
counterOriginalSname[REPORTER_CONST.EXIST].increment(1);
}
}
private void reportAuthors(Collection<Author> authors) {
if (countersExist == null) {
return;
}
myreporter.getCounter(REPORTER_CONST.DOC_MS,
"Any author (unprocessed documents)").increment(
authors.isEmpty() ? 1 : 0);
myreporter.getCounter(REPORTER_CONST.DOC_EX,
"Any author (processed documents)").increment(
authors.isEmpty() ? 0 : 1);
}
}
|
package cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.impl;
import com.google.common.collect.Table;
import cz.cuni.mff.odcleanstore.conflictresolution.*;
import cz.cuni.mff.odcleanstore.conflictresolution.exceptions.ConflictResolutionException;
import cz.cuni.mff.odcleanstore.conflictresolution.exceptions.ResolutionFunctionNotRegisteredException;
import cz.cuni.mff.odcleanstore.conflictresolution.impl.CRContextImpl;
import cz.cuni.mff.odcleanstore.conflictresolution.impl.ConflictResolutionPolicyImpl;
import cz.cuni.mff.odcleanstore.conflictresolution.impl.ResolutionStrategyImpl;
import cz.cuni.mff.odcleanstore.conflictresolution.impl.ResolvedStatementFactoryImpl;
import cz.cuni.mff.odcleanstore.conflictresolution.impl.util.CRUtils;
import cz.cuni.mff.odcleanstore.conflictresolution.impl.util.EmptyMetadataModel;
import cz.cuni.mff.odcleanstore.conflictresolution.resolution.AllResolution;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.ResourceDescription;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.ResourceDescriptionConflictResolver;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.UriMapping;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.urimapping.AlternativeUriNavigator;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.urimapping.EmptyUriMappingIterable;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.urimapping.UriMappingIterable;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.urimapping.UriMappingIterableImpl;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.util.ClusterIterator;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.util.ODCSFusionToolCRUtils;
import cz.cuni.mff.odcleanstore.fusiontool.conflictresolution.util.StatementMapper;
import cz.cuni.mff.odcleanstore.vocabulary.ODCS;
import org.openrdf.model.*;
import org.openrdf.model.impl.ValueFactoryImpl;
import org.openrdf.util.iterators.Iterators;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* TODO
*/
public class ResourceDescriptionConflictResolverImpl implements ResourceDescriptionConflictResolver {
private static final Logger LOG = LoggerFactory.getLogger(ResourceDescriptionConflictResolverImpl.class);
/** Default conflict resolution strategy. */
public static final ResolutionStrategy DEFAULT_RESOLUTION_STRATEGY = new ResolutionStrategyImpl(
AllResolution.getName(),
EnumCardinality.MANYVALUED,
EnumAggregationErrorStrategy.RETURN_ALL);
/** Default prefix of graph names where resolved quads are placed. */
public static final String DEFAULT_RESOLVED_GRAPHS_URI_PREFIX = ODCS.NAMESPACE + "cr/";
private static final ValueFactory VF = ValueFactoryImpl.getInstance();
private static final SortedListModelFactory SORTED_LIST_MODEL_FACTORY = new SortedListModelFactory();
private final Model metadataModel;
private final UriMapping uriMapping;
private final ConflictResolutionPolicy effectiveResolutionPolicy;
private final ResolutionFunctionRegistry resolutionFunctionRegistry;
private final ResolvedStatementFactoryImpl resolvedStatementFactory;
private final Set<URI> resourceDescriptionProperties;
private final AlternativeUriNavigator dependentPropertyMapping;
/**
* Creates a new instance with the given settings.
* @param resolutionFunctionRegistry registry for obtaining conflict resolution function implementations
* @param conflictResolutionPolicy conflict resolution parameters
* @param uriMapping mapping of URIs to their canonical URI (based on owl:sameAs links)
* @param metadata additional metadata for use by resolution functions (e.g. source quality etc.)
* @param resolvedGraphsURIPrefix prefix of graph names where resolved quads are placed
* @param resourceDescriptionProperties resource description properties, i.e. properties whose values are structured attributes
* which should be resolved transitively
*/
public ResourceDescriptionConflictResolverImpl(
ResolutionFunctionRegistry resolutionFunctionRegistry,
ConflictResolutionPolicy conflictResolutionPolicy,
UriMapping uriMapping,
Model metadata,
String resolvedGraphsURIPrefix,
Set<URI> resourceDescriptionProperties) {
this.resolutionFunctionRegistry = resolutionFunctionRegistry;
this.effectiveResolutionPolicy = getEffectiveResolutionPolicy(conflictResolutionPolicy, uriMapping);
this.dependentPropertyMapping = new AlternativeUriNavigator(getDependentPropertyMapping(effectiveResolutionPolicy, uriMapping));
this.uriMapping = uriMapping != null
? uriMapping
: EmptyUriMappingIterable.getInstance();
this.metadataModel = metadata != null
? metadata
: new EmptyMetadataModel();
this.resolvedStatementFactory = resolvedGraphsURIPrefix != null
? new ResolvedStatementFactoryImpl(resolvedGraphsURIPrefix)
: new ResolvedStatementFactoryImpl(DEFAULT_RESOLVED_GRAPHS_URI_PREFIX);
this.resourceDescriptionProperties = new HashSet<>();
for (URI property : resourceDescriptionProperties) {
this.resourceDescriptionProperties.add((URI) uriMapping.mapResource(property));
}
}
/**
* Apply conflict resolution process to the given resource description and return result.
* @param resourceDescription container of quads that make up the description of the respective statement
* (i.e. quads that are relevant for the conflict resolution process)
* @return collection of quads derived from the input with resolved
* conflicts, (F-)quality estimate and provenance information.
* @throws ConflictResolutionException error during the conflict resolution process
* @see ResolvedStatement
*/
@Override
public Collection<ResolvedStatement> resolveConflicts(ResourceDescription resourceDescription) throws ConflictResolutionException {
long startTime = logStarted(resourceDescription.getDescribingStatements().size());
ConflictClustersMap conflictClustersMap = ConflictClustersMap.fromCollection(resourceDescription.getDescribingStatements(), uriMapping);
Collection<ResolvedStatement> result = createResultCollection(resourceDescription.getDescribingStatements().size());
Resource canonicalResource = uriMapping.mapResource(resourceDescription.getResource());
resolveResource(
conflictClustersMap.getResourceStatementsMap(canonicalResource),
canonicalResource,
result);
logFinished(startTime, result);
return result;
}
/**
* Resolve conflicts in statements contained in {@code conflictClustersMap} for the given {@code canonicalResource}.
* @param statementsToResolveByProperty statements to be resolved as a map canonical property -> (unmapped) statements with the property
* @param result collection where the resolved result is added to
*/
private void resolveResource(
Map<URI, List<Statement>> statementsToResolveByProperty,
Resource expectedSubject,
Collection<ResolvedStatement> result) throws ConflictResolutionException {
Set<URI> resolvedProperties = new HashSet<>();
Set<URI> canonicalProperties = statementsToResolveByProperty.keySet();
for (URI canonicalProperty : canonicalProperties) {
if (resolvedProperties.contains(canonicalProperty)) {
continue;
}
List<URI> dependentProperties = getDependentProperties(canonicalProperty);
if (dependentProperties == null) {
List<Statement> conflictClusterStatements = statementsToResolveByProperty.get(canonicalProperty);
Model conflictClusterModel = createMappedModel(conflictClusterStatements);
Collection<ResolvedStatement> resolvedStatements = resolveConflictCluster(
conflictClusterModel, canonicalProperty, conflictClusterModel);
addToResult(resolvedStatements, result, expectedSubject);
resolvedProperties.add(canonicalProperty);
} else {
List<ResolvedStatement> resolvedStatements = resolveResourceDependentProperties(
statementsToResolveByProperty,
dependentProperties);
addToResult(resolvedStatements, result, expectedSubject);
resolvedProperties.addAll(dependentProperties);
}
}
}
private void addToResult(Collection<ResolvedStatement> resolvedStatements, Collection<ResolvedStatement> result, Resource expectedSubject) {
SubjectMappingIterator it = new SubjectMappingIterator(resolvedStatements.iterator(), expectedSubject, resolvedStatementFactory);
Iterators.addAll(it, result);
}
private Iterator<ResolvedStatement> mapSubject(Collection<ResolvedStatement> resolvedStatements, Resource expectedSubject) {
return new SubjectMappingIterator(resolvedStatements.iterator(), expectedSubject, resolvedStatementFactory);
}
// TODO: refactor + move ?
// (triples from the same graph should go together even if the same resource URI is used in multiple graphs)
// FIXME: !!!! DO NOT SELECT BEST SUBJECT, BUT COMBINATION OF SUBJECT AND GRAPH
/**
* Resolves conflicts in {@code statementsToResolveByProperty} for a set of mutually dependent properties.
* This method <b>doesn't strictly require statements to share the same subject or map to the same canonical subject</b> but it
* treats the input triples as though they do map to the same canonical subject.
* @param statementsToResolveByProperty statements to be resolved as a map canonical property -> (unmapped) statements with the property
* @param dependentProperties list of mutually dependent properties to be resolved
* @throws ConflictResolutionException CR error
*/
private ArrayList<ResolvedStatement> resolveResourceDependentProperties(
Map<URI, List<Statement>> statementsToResolveByProperty,
List<URI> dependentProperties) throws ConflictResolutionException {
// Step 1: resolve conflicts for each (non-canonical) subject and property
Table<Resource, URI, Collection<ResolvedStatement>> conflictClustersTable = ODCSFusionToolCRUtils.newHashTable();
for (URI property : dependentProperties) {
List<Statement> conflictingStatements = statementsToResolveByProperty.get(property);
if (conflictingStatements == null) {
continue;
}
Collection<Statement> mappedConflictingStatements = new StatementMapper(uriMapping, VF).mapStatements(conflictingStatements);
ClusterIterator<Statement> subjectClusterIterator = new ClusterIterator<>(conflictingStatements, StatementBySubjectComparator.getInstance());
while (subjectClusterIterator.hasNext()) {
List<Statement> statements = subjectClusterIterator.next();
Resource notMappedSubject = statements.get(0).getSubject();
Model conflictClusterModel = createMappedModel(statements);
Collection<ResolvedStatement> resolvedConflictCluster = resolveConflictCluster(
conflictClusterModel, property, mappedConflictingStatements);
conflictClustersTable.put(notMappedSubject, property, resolvedConflictCluster);
}
}
// Step 2: Choose the best subject by aggregate quality
Resource bestSubject = null;
double bestSubjectQuality = -1;
for (Resource notMappedSubject : conflictClustersTable.rowKeySet()) {
double aggregateQualitySum = 0;
for (URI property : dependentProperties) {
aggregateQualitySum += aggregateConflictClusterQuality(conflictClustersTable.get(notMappedSubject, property));
}
double notMappedSubjectQuality = aggregateQualitySum / dependentProperties.size();
if (notMappedSubjectQuality > bestSubjectQuality) {
bestSubject = notMappedSubject;
bestSubjectQuality = notMappedSubjectQuality;
}
}
// Step 3: Add statements for the best subject to the result
ArrayList<ResolvedStatement> result = new ArrayList<>();
if (bestSubject != null) {
Map<URI, Collection<ResolvedStatement>> selectedStatements = conflictClustersTable.row(bestSubject);
for (Collection<ResolvedStatement> resolvedStatements : selectedStatements.values()) {
result.addAll(resolvedStatements);
}
}
return result;
}
// this method assumes that all statements in conflictClusterModel share the same subject and property
/**
* Resolve conflicts in {@code conflictClusterToResolve}. This methods expects that <b>all statements in
* {@code conflictClusterToResolve} share the same subject and property</b> (no further mapping is performed).
* @param conflictClusterToResolve statements to be resolved;
* subjects and predicate in these triples must be the same for all triples
* @param property canonical property for the conflict cluster
* @param conflictingMappedStatements conflicting statements to be considered during quality calculation.
* @return resolved statements produced by conflict resolution function
* @throws ConflictResolutionException CR error
*/
private Collection<ResolvedStatement> resolveConflictCluster(
Model conflictClusterToResolve,
URI property,
Collection<Statement> conflictingMappedStatements) throws ConflictResolutionException {
if (conflictClusterToResolve.isEmpty()) {
return Collections.emptyList();
}
// Get resolution strategy
ResolutionStrategy resolutionStrategy = effectiveResolutionPolicy.getPropertyResolutionStrategies().get(property);
if (resolutionStrategy == null) {
resolutionStrategy = effectiveResolutionPolicy.getDefaultResolutionStrategy();
}
// Prepare resolution functions & context
ResolutionFunction resolutionFunction = getResolutionFunction(resolutionStrategy);
CRContext context = new CRContextImpl(conflictingMappedStatements, metadataModel, resolutionStrategy, resolvedStatementFactory);
// Resolve conflicts & append to result
// FIXME: resolution functions generally assume that the model is spog-sorted; while this works now, it can be easily broken in future
return resolutionFunction.resolve(conflictClusterToResolve, context);
}
/**
* Apply uri mapping to given statements and create a model from them.
* @param statements statements
* @return model created from mapped {@code statements}
*/
private Model createMappedModel(Iterable<Statement> statements) {
StatementMappingIterator mappingIterator = new StatementMappingIterator(statements.iterator(), uriMapping, VF);
return SORTED_LIST_MODEL_FACTORY.fromUnorderedIterator(mappingIterator);
}
private static ConflictResolutionPolicy getEffectiveResolutionPolicy(ConflictResolutionPolicy conflictResolutionPolicy, UriMapping uriMapping) {
ResolutionStrategy effectiveDefaultStrategy = DEFAULT_RESOLUTION_STRATEGY;
Map<URI, ResolutionStrategy> effectivePropertyStrategies = new HashMap<>();
if (conflictResolutionPolicy != null && conflictResolutionPolicy.getDefaultResolutionStrategy() != null) {
effectiveDefaultStrategy = CRUtils.fillResolutionStrategyDefaults(
conflictResolutionPolicy.getDefaultResolutionStrategy(),
DEFAULT_RESOLUTION_STRATEGY);
}
if (conflictResolutionPolicy != null && conflictResolutionPolicy.getPropertyResolutionStrategies() != null) {
for (Map.Entry<URI, ResolutionStrategy> entry : conflictResolutionPolicy.getPropertyResolutionStrategies().entrySet()) {
URI mappedURI = (URI) uriMapping.mapResource(entry.getKey());
ResolutionStrategy strategy = CRUtils.fillResolutionStrategyDefaults(entry.getValue(), effectiveDefaultStrategy);
effectivePropertyStrategies.put(mappedURI, strategy);
}
}
ConflictResolutionPolicyImpl result = new ConflictResolutionPolicyImpl();
result.setDefaultResolutionStrategy(effectiveDefaultStrategy);
result.setPropertyResolutionStrategy(effectivePropertyStrategies);
return result;
}
private static UriMappingIterable getDependentPropertyMapping(ConflictResolutionPolicy effectiveResolutionPolicy, UriMapping uriMapping) {
checkNotNull(effectiveResolutionPolicy);
checkNotNull(uriMapping);
UriMappingIterableImpl dependentPropertyMapping = new UriMappingIterableImpl();
for (URI uri : effectiveResolutionPolicy.getPropertyResolutionStrategies().keySet()) {
ResolutionStrategy resolutionStrategy = effectiveResolutionPolicy.getPropertyResolutionStrategies().get(uri);
if (resolutionStrategy.getDependsOn() != null) {
dependentPropertyMapping.addLink(
(URI) uriMapping.mapResource(uri),
(URI) uriMapping.mapResource(resolutionStrategy.getDependsOn()));
}
}
return dependentPropertyMapping;
}
protected ResolutionFunction getResolutionFunction(ResolutionStrategy resolutionStrategy) throws ResolutionFunctionNotRegisteredException {
return resolutionFunctionRegistry.get(resolutionStrategy.getResolutionFunctionName());
}
protected Collection<ResolvedStatement> createResultCollection(int inputSize) {
return new ArrayList<>(inputSize / 2);
}
private double aggregateConflictClusterQuality(Collection<ResolvedStatement> resolvedStatements) {
if (resolvedStatements == null || resolvedStatements.isEmpty()) {
return 0d;
}
double sum = 0d;
for (ResolvedStatement resolvedStatement : resolvedStatements) {
sum += resolvedStatement.getQuality();
}
return sum / resolvedStatements.size();
}
private List<URI> getDependentProperties(URI property) {
if (dependentPropertyMapping.hasAlternativeUris(property)) {
return dependentPropertyMapping.listAlternativeUris(property);
} else {
return null;
}
}
private long logStarted(int inputStatementCount) {
if (LOG.isDebugEnabled()) {
LOG.debug("Resolving conflicts among {} quads.", inputStatementCount);
return System.currentTimeMillis();
} else {
return 0;
}
}
private void logFinished(long startTime, Collection<ResolvedStatement> result) {
if (LOG.isDebugEnabled()) {
LOG.debug("Conflict resolution executed in {} ms, resolved to {} quads",
System.currentTimeMillis() - startTime, result.size());
}
}
private static class StatementBySubjectComparator implements Comparator<Statement> {
private static final Comparator<Statement> INSTANCE = new StatementBySubjectComparator();
public static Comparator<Statement> getInstance() {
return INSTANCE;
}
@Override
public int compare(Statement o1, Statement o2) {
return CRUtils.compareValues(o1.getSubject(), o2.getSubject());
}
}
}
|
package io.quarkus.hibernate.reactive.panache.common.runtime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import javax.enterprise.inject.spi.Bean;
import javax.persistence.LockModeType;
import org.hibernate.internal.util.LockModeConverter;
import org.hibernate.reactive.mutiny.Mutiny;
import org.hibernate.reactive.mutiny.Mutiny.Session;
import io.quarkus.arc.Arc;
import io.quarkus.panache.common.Parameters;
import io.quarkus.panache.common.Sort;
import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil;
import io.smallrye.mutiny.Multi;
import io.smallrye.mutiny.Uni;
import io.vertx.core.Vertx;
public abstract class AbstractJpaOperations<PanacheQueryType> {
// FIXME: make it configurable?
static final long TIMEOUT_MS = 5000;
private static void executeInVertxEventLoop(Runnable runnable) {
Vertx vertx = Arc.container().instance(Vertx.class).get();
// this needs to be sync
CompletableFuture<Void> cf = new CompletableFuture<>();
vertx.runOnContext(v -> {
try {
runnable.run();
cf.complete(null);
} catch (Throwable t) {
cf.completeExceptionally(t);
}
});
try {
cf.get(TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw new RuntimeException(e);
}
}
private static Session lookupSessionFromArc() {
return Arc.container().instance(Session.class).get();
}
protected abstract PanacheQueryType createPanacheQuery(Uni<Mutiny.Session> session, String query, String orderBy,
Object paramsArrayOrMap);
protected abstract Uni<List<?>> list(PanacheQueryType query);
protected abstract Multi<?> stream(PanacheQueryType query);
// Instance methods
public Uni<Void> persist(Object entity) {
return persist(getSession(), entity);
}
public Uni<Void> persist(Uni<Mutiny.Session> sessionUni, Object entity) {
return sessionUni.chain(session -> {
if (!session.contains(entity)) {
return session.persist(entity);
}
return Uni.createFrom().nullItem();
});
}
public Uni<Void> persist(Iterable<?> entities) {
return persist(StreamSupport.stream(entities.spliterator(), false));
}
public Uni<Void> persist(Object firstEntity, Object... entities) {
List<Object> array = new ArrayList<>(entities.length + 1);
array.add(firstEntity);
for (Object entity : entities) {
array.add(entity);
}
return persist(array.stream());
}
public Uni<Void> persist(Stream<?> entities) {
Uni<Mutiny.Session> session = getSession();
List<Uni<Void>> uniList = entities.map(entity -> persist(session, entity)).collect(Collectors.toList());
return Uni.combine().all().unis(uniList).discardItems();
// this should work, but doesn't
// return Multi.createFrom().items(entities)
// .map(entity -> persist(session, entity))
// .onItem().ignoreAsUni();
}
public Uni<Void> delete(Object entity) {
return getSession().chain(session -> session.remove(entity));
}
public boolean isPersistent(Object entity) {
// only attempt to look up the request context session if it's already there: do not
// run the producer method otherwise, before we know which thread we're on
Session requestSession = isInRequestContext(Mutiny.Session.class) ? lookupSessionFromArc()
: null;
if (requestSession != null) {
return requestSession.contains(entity);
} else {
return false;
}
}
public Uni<Void> flush() {
return getSession().chain(Session::flush);
}
// Private stuff
public static Uni<Mutiny.Session> getSession() {
// Always check if we're running on the event loop: if not,
// we need to delegate the execution of all tasks on it.
if (io.vertx.core.Context.isOnEventLoopThread()) {
return Uni.createFrom().item(lookupSessionFromArc());
} else {
// FIXME: we may need context propagation
final Executor executor = AbstractJpaOperations::executeInVertxEventLoop;
return Uni.createFrom().item(AbstractJpaOperations::lookupSessionFromArc)
.runSubscriptionOn(executor);
}
}
private static boolean isInRequestContext(Class<?> klass) {
Set<Bean<?>> beans = Arc.container().beanManager().getBeans(klass);
if (beans.isEmpty())
return false;
return Arc.container().requestContext().get(beans.iterator().next()) != null;
}
public static Mutiny.Query<?> bindParameters(Mutiny.Query<?> query, Object[] params) {
if (params == null || params.length == 0)
return query;
for (int i = 0; i < params.length; i++) {
query.setParameter(i + 1, params[i]);
}
return query;
}
public static Mutiny.Query<?> bindParameters(Mutiny.Query<?> query, Map<String, Object> params) {
if (params == null || params.size() == 0)
return query;
for (Entry<String, Object> entry : params.entrySet()) {
query.setParameter(entry.getKey(), entry.getValue());
}
return query;
}
public int paramCount(Object[] params) {
return params != null ? params.length : 0;
}
public int paramCount(Map<String, Object> params) {
return params != null ? params.size() : 0;
}
// Queries
public Uni<?> findById(Class<?> entityClass, Object id) {
return getSession().chain(session -> session.find(entityClass, id));
}
public Uni<?> findById(Class<?> entityClass, Object id, LockModeType lockModeType) {
return getSession()
.chain(session -> session.find(entityClass, id, LockModeConverter.convertToLockMode(lockModeType)));
}
public PanacheQueryType find(Class<?> entityClass, String query, Object... params) {
return find(entityClass, query, null, params);
}
public PanacheQueryType find(Class<?> entityClass, String query, Sort sort, Object... params) {
String findQuery = PanacheJpaUtil.createFindQuery(entityClass, query, paramCount(params));
Uni<Mutiny.Session> session = getSession();
// FIXME: check for duplicate ORDER BY clause?
if (PanacheJpaUtil.isNamedQuery(query)) {
String namedQuery = query.substring(1);
NamedQueryUtil.checkNamedQuery(entityClass, namedQuery);
return createPanacheQuery(session, query, PanacheJpaUtil.toOrderBy(sort), params);
}
return createPanacheQuery(session, findQuery, PanacheJpaUtil.toOrderBy(sort), params);
}
public PanacheQueryType find(Class<?> entityClass, String query, Map<String, Object> params) {
return find(entityClass, query, null, params);
}
public PanacheQueryType find(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
String findQuery = PanacheJpaUtil.createFindQuery(entityClass, query, paramCount(params));
Uni<Mutiny.Session> session = getSession();
// FIXME: check for duplicate ORDER BY clause?
if (PanacheJpaUtil.isNamedQuery(query)) {
String namedQuery = query.substring(1);
NamedQueryUtil.checkNamedQuery(entityClass, namedQuery);
return createPanacheQuery(session, query, PanacheJpaUtil.toOrderBy(sort), params);
}
return createPanacheQuery(session, findQuery, PanacheJpaUtil.toOrderBy(sort), params);
}
public PanacheQueryType find(Class<?> entityClass, String query, Parameters params) {
return find(entityClass, query, null, params);
}
public PanacheQueryType find(Class<?> entityClass, String query, Sort sort, Parameters params) {
return find(entityClass, query, sort, params.map());
}
public Uni<List<?>> list(Class<?> entityClass, String query, Object... params) {
return list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Object... params) {
return list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Map<String, Object> params) {
return list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return list(find(entityClass, query, sort, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Parameters params) {
return list(find(entityClass, query, params));
}
public Uni<List<?>> list(Class<?> entityClass, String query, Sort sort, Parameters params) {
return list(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Object... params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Object... params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Map<String, Object> params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Map<String, Object> params) {
return stream(find(entityClass, query, sort, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Parameters params) {
return stream(find(entityClass, query, params));
}
public Multi<?> stream(Class<?> entityClass, String query, Sort sort, Parameters params) {
return stream(find(entityClass, query, sort, params));
}
public PanacheQueryType findAll(Class<?> entityClass) {
String query = "FROM " + PanacheJpaUtil.getEntityName(entityClass);
Uni<Mutiny.Session> session = getSession();
return createPanacheQuery(session, query, null, null);
}
public PanacheQueryType findAll(Class<?> entityClass, Sort sort) {
String query = "FROM " + PanacheJpaUtil.getEntityName(entityClass);
Uni<Mutiny.Session> session = getSession();
return createPanacheQuery(session, query, PanacheJpaUtil.toOrderBy(sort), null);
}
public Uni<List<?>> listAll(Class<?> entityClass) {
return list(findAll(entityClass));
}
public Uni<List<?>> listAll(Class<?> entityClass, Sort sort) {
return list(findAll(entityClass, sort));
}
public Multi<?> streamAll(Class<?> entityClass) {
return stream(findAll(entityClass));
}
public Multi<?> streamAll(Class<?> entityClass, Sort sort) {
return stream(findAll(entityClass, sort));
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public Uni<Long> count(Class<?> entityClass) {
return (Uni) getSession()
.chain(session -> session.createQuery("SELECT COUNT(*) FROM " + PanacheJpaUtil.getEntityName(entityClass))
.getSingleResult());
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Uni<Long> count(Class<?> entityClass, String query, Object... params) {
return (Uni) getSession().chain(session -> bindParameters(
session.createQuery(PanacheJpaUtil.createCountQuery(entityClass, query, paramCount(params))),
params).getSingleResult());
}
@SuppressWarnings({ "rawtypes", "unchecked" })
public Uni<Long> count(Class<?> entityClass, String query, Map<String, Object> params) {
return (Uni) getSession().chain(session -> bindParameters(
session.createQuery(PanacheJpaUtil.createCountQuery(entityClass, query, paramCount(params))),
params).getSingleResult());
}
public Uni<Long> count(Class<?> entityClass, String query, Parameters params) {
return count(entityClass, query, params.map());
}
public Uni<Boolean> exists(Class<?> entityClass) {
return count(entityClass).map(c -> c > 0);
}
public Uni<Boolean> exists(Class<?> entityClass, String query, Object... params) {
return count(entityClass, query, params).map(c -> c > 0);
}
public Uni<Boolean> exists(Class<?> entityClass, String query, Map<String, Object> params) {
return count(entityClass, query, params).map(c -> c > 0);
}
public Uni<Boolean> exists(Class<?> entityClass, String query, Parameters params) {
return count(entityClass, query, params).map(c -> c > 0);
}
public Uni<Long> deleteAll(Class<?> entityClass) {
return getSession().chain(
session -> session.createQuery("DELETE FROM " + PanacheJpaUtil.getEntityName(entityClass)).executeUpdate()
.map(Integer::longValue));
}
public Uni<Boolean> deleteById(Class<?> entityClass, Object id) {
// Impl note : we load the entity then delete it because it's the only implementation generic enough for any model,
// and correct in all cases (composite key, graph of entities, ...). HQL cannot be directly used for these reasons.
return findById(entityClass, id)
.chain(entity -> {
if (entity == null) {
return Uni.createFrom().item(false);
}
return getSession().chain(session -> session.remove(entity).map(v -> true));
});
}
public Uni<Long> delete(Class<?> entityClass, String query, Object... params) {
return getSession().chain(session -> bindParameters(
session.createQuery(PanacheJpaUtil.createDeleteQuery(entityClass, query, paramCount(params))), params)
.executeUpdate().map(Integer::longValue));
}
public Uni<Long> delete(Class<?> entityClass, String query, Map<String, Object> params) {
return getSession().chain(session -> bindParameters(
session.createQuery(PanacheJpaUtil.createDeleteQuery(entityClass, query, paramCount(params))), params)
.executeUpdate().map(Integer::longValue));
}
public Uni<Long> delete(Class<?> entityClass, String query, Parameters params) {
return delete(entityClass, query, params.map());
}
public IllegalStateException implementationInjectionMissing() {
return new IllegalStateException(
"This method is normally automatically overridden in subclasses: did you forget to annotate your entity with @Entity?");
}
public static Uni<Integer> executeUpdate(String query, Object... params) {
return getSession().chain(session -> {
Mutiny.Query<?> jpaQuery = session.createQuery(query);
bindParameters(jpaQuery, params);
return jpaQuery.executeUpdate();
});
}
public static Uni<Integer> executeUpdate(String query, Map<String, Object> params) {
return getSession().chain(session -> {
Mutiny.Query<?> jpaQuery = session.createQuery(query);
bindParameters(jpaQuery, params);
return jpaQuery.executeUpdate();
});
}
public Uni<Integer> executeUpdate(Class<?> entityClass, String query, Object... params) {
String updateQuery = PanacheJpaUtil.createUpdateQuery(entityClass, query, paramCount(params));
return executeUpdate(updateQuery, params);
}
public Uni<Integer> executeUpdate(Class<?> entityClass, String query, Map<String, Object> params) {
String updateQuery = PanacheJpaUtil.createUpdateQuery(entityClass, query, paramCount(params));
return executeUpdate(updateQuery, params);
}
public Uni<Integer> update(Class<?> entityClass, String query, Map<String, Object> params) {
return executeUpdate(entityClass, query, params);
}
public Uni<Integer> update(Class<?> entityClass, String query, Parameters params) {
return update(entityClass, query, params.map());
}
public Uni<Integer> update(Class<?> entityClass, String query, Object... params) {
return executeUpdate(entityClass, query, params);
}
}
|
package org.opendaylight.controller.cluster.datastore;
import static org.awaitility.Awaitility.await;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import akka.actor.ActorRef;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.actor.Address;
import akka.actor.AddressFromURIString;
import akka.cluster.Cluster;
import akka.cluster.Member;
import akka.dispatch.Futures;
import akka.pattern.Patterns;
import akka.testkit.javadsl.TestKit;
import com.google.common.base.Stopwatch;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Uninterruptibles;
import com.typesafe.config.ConfigFactory;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.stubbing.Answer;
import org.opendaylight.controller.cluster.access.client.RequestTimeoutException;
import org.opendaylight.controller.cluster.access.concepts.TransactionIdentifier;
import org.opendaylight.controller.cluster.databroker.ClientBackedDataStore;
import org.opendaylight.controller.cluster.databroker.ConcurrentDOMDataBroker;
import org.opendaylight.controller.cluster.databroker.TestClientBackedDataStore;
import org.opendaylight.controller.cluster.datastore.DatastoreContext.Builder;
import org.opendaylight.controller.cluster.datastore.TestShard.RequestFrontendMetadata;
import org.opendaylight.controller.cluster.datastore.TestShard.StartDropMessages;
import org.opendaylight.controller.cluster.datastore.TestShard.StopDropMessages;
import org.opendaylight.controller.cluster.datastore.exceptions.NoShardLeaderException;
import org.opendaylight.controller.cluster.datastore.exceptions.ShardLeaderNotRespondingException;
import org.opendaylight.controller.cluster.datastore.messages.CommitTransactionReply;
import org.opendaylight.controller.cluster.datastore.messages.ForwardedReadyTransaction;
import org.opendaylight.controller.cluster.datastore.messages.GetShardDataTree;
import org.opendaylight.controller.cluster.datastore.messages.ReadyLocalTransaction;
import org.opendaylight.controller.cluster.datastore.messages.ReadyTransactionReply;
import org.opendaylight.controller.cluster.datastore.modification.MergeModification;
import org.opendaylight.controller.cluster.datastore.modification.WriteModification;
import org.opendaylight.controller.cluster.datastore.persisted.FrontendClientMetadata;
import org.opendaylight.controller.cluster.datastore.persisted.FrontendShardDataTreeSnapshotMetadata;
import org.opendaylight.controller.cluster.datastore.persisted.MetadataShardDataTreeSnapshot;
import org.opendaylight.controller.cluster.datastore.persisted.ShardSnapshotState;
import org.opendaylight.controller.cluster.datastore.utils.UnsignedLongBitmap;
import org.opendaylight.controller.cluster.raft.base.messages.TimeoutNow;
import org.opendaylight.controller.cluster.raft.client.messages.GetOnDemandRaftState;
import org.opendaylight.controller.cluster.raft.client.messages.OnDemandRaftState;
import org.opendaylight.controller.cluster.raft.client.messages.Shutdown;
import org.opendaylight.controller.cluster.raft.messages.AppendEntries;
import org.opendaylight.controller.cluster.raft.messages.RequestVote;
import org.opendaylight.controller.cluster.raft.persisted.ApplyJournalEntries;
import org.opendaylight.controller.cluster.raft.persisted.Snapshot;
import org.opendaylight.controller.cluster.raft.policy.DisableElectionsRaftPolicy;
import org.opendaylight.controller.cluster.raft.utils.InMemoryJournal;
import org.opendaylight.controller.cluster.raft.utils.InMemorySnapshotStore;
import org.opendaylight.controller.md.cluster.datastore.model.CarsModel;
import org.opendaylight.controller.md.cluster.datastore.model.PeopleModel;
import org.opendaylight.controller.md.cluster.datastore.model.SchemaContextHelper;
import org.opendaylight.controller.md.cluster.datastore.model.TestModel;
import org.opendaylight.mdsal.common.api.LogicalDatastoreType;
import org.opendaylight.mdsal.common.api.TransactionCommitFailedException;
import org.opendaylight.mdsal.dom.api.DOMDataTreeWriteTransaction;
import org.opendaylight.mdsal.dom.api.DOMTransactionChain;
import org.opendaylight.mdsal.dom.api.DOMTransactionChainListener;
import org.opendaylight.mdsal.dom.spi.store.DOMStore;
import org.opendaylight.mdsal.dom.spi.store.DOMStoreReadTransaction;
import org.opendaylight.mdsal.dom.spi.store.DOMStoreReadWriteTransaction;
import org.opendaylight.mdsal.dom.spi.store.DOMStoreThreePhaseCommitCohort;
import org.opendaylight.mdsal.dom.spi.store.DOMStoreTransactionChain;
import org.opendaylight.mdsal.dom.spi.store.DOMStoreWriteTransaction;
import org.opendaylight.yangtools.yang.common.Uint64;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
import org.opendaylight.yangtools.yang.data.api.schema.MapEntryNode;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.api.schema.SystemMapNode;
import org.opendaylight.yangtools.yang.data.api.schema.builder.CollectionNodeBuilder;
import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTree;
import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeConfiguration;
import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeModification;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.impl.ImmutableContainerNodeBuilder;
import org.opendaylight.yangtools.yang.data.impl.schema.tree.InMemoryDataTreeFactory;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import scala.collection.Set;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
/**
* End-to-end distributed data store tests that exercise remote shards and transactions.
*
* @author Thomas Pantelis
*/
@RunWith(Parameterized.class)
public class DistributedDataStoreRemotingIntegrationTest extends AbstractTest {
@Parameters(name = "{0}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{ TestDistributedDataStore.class, 7}, { TestClientBackedDataStore.class, 12 }
});
}
@Parameter(0)
public Class<? extends AbstractDataStore> testParameter;
@Parameter(1)
public int commitTimeout;
private static final String[] CARS_AND_PEOPLE = {"cars", "people"};
private static final String[] CARS = {"cars"};
private static final Address MEMBER_1_ADDRESS = AddressFromURIString.parse(
"akka://cluster-test@127.0.0.1:2558");
private static final Address MEMBER_2_ADDRESS = AddressFromURIString.parse(
"akka://cluster-test@127.0.0.1:2559");
private static final String MODULE_SHARDS_CARS_ONLY_1_2 = "module-shards-cars-member-1-and-2.conf";
private static final String MODULE_SHARDS_CARS_PEOPLE_1_2 = "module-shards-member1-and-2.conf";
private static final String MODULE_SHARDS_CARS_PEOPLE_1_2_3 = "module-shards-member1-and-2-and-3.conf";
private static final String MODULE_SHARDS_CARS_1_2_3 = "module-shards-cars-member-1-and-2-and-3.conf";
private ActorSystem leaderSystem;
private ActorSystem followerSystem;
private ActorSystem follower2System;
private final DatastoreContext.Builder leaderDatastoreContextBuilder =
DatastoreContext.newBuilder().shardHeartbeatIntervalInMillis(100).shardElectionTimeoutFactor(2);
private final DatastoreContext.Builder followerDatastoreContextBuilder =
DatastoreContext.newBuilder().shardHeartbeatIntervalInMillis(100).shardElectionTimeoutFactor(5)
.customRaftPolicyImplementation(DisableElectionsRaftPolicy.class.getName());
private final TransactionIdentifier tx1 = nextTransactionId();
private final TransactionIdentifier tx2 = nextTransactionId();
private AbstractDataStore followerDistributedDataStore;
private AbstractDataStore leaderDistributedDataStore;
private IntegrationTestKit followerTestKit;
private IntegrationTestKit leaderTestKit;
@Before
public void setUp() {
InMemoryJournal.clear();
InMemorySnapshotStore.clear();
leaderSystem = ActorSystem.create("cluster-test", ConfigFactory.load().getConfig("Member1"));
Cluster.get(leaderSystem).join(MEMBER_1_ADDRESS);
followerSystem = ActorSystem.create("cluster-test", ConfigFactory.load().getConfig("Member2"));
Cluster.get(followerSystem).join(MEMBER_1_ADDRESS);
follower2System = ActorSystem.create("cluster-test", ConfigFactory.load().getConfig("Member3"));
Cluster.get(follower2System).join(MEMBER_1_ADDRESS);
}
@After
public void tearDown() {
if (followerDistributedDataStore != null) {
leaderDistributedDataStore.close();
}
if (leaderDistributedDataStore != null) {
leaderDistributedDataStore.close();
}
TestKit.shutdownActorSystem(leaderSystem, true);
TestKit.shutdownActorSystem(followerSystem, true);
TestKit.shutdownActorSystem(follower2System,true);
InMemoryJournal.clear();
InMemorySnapshotStore.clear();
}
private void initDatastoresWithCars(final String type) throws Exception {
initDatastores(type, MODULE_SHARDS_CARS_ONLY_1_2, CARS);
}
private void initDatastoresWithCarsAndPeople(final String type) throws Exception {
initDatastores(type, MODULE_SHARDS_CARS_PEOPLE_1_2, CARS_AND_PEOPLE);
}
private void initDatastores(final String type, final String moduleShardsConfig, final String[] shards)
throws Exception {
initDatastores(type, moduleShardsConfig, shards, leaderDatastoreContextBuilder,
followerDatastoreContextBuilder);
}
private void initDatastores(final String type, final String moduleShardsConfig, final String[] shards,
final DatastoreContext.Builder leaderBuilder, final DatastoreContext.Builder followerBuilder)
throws Exception {
leaderTestKit = new IntegrationTestKit(leaderSystem, leaderBuilder, commitTimeout);
leaderDistributedDataStore = leaderTestKit.setupAbstractDataStore(
testParameter, type, moduleShardsConfig, false, shards);
followerTestKit = new IntegrationTestKit(followerSystem, followerBuilder, commitTimeout);
followerDistributedDataStore = followerTestKit.setupAbstractDataStore(
testParameter, type, moduleShardsConfig, false, shards);
leaderTestKit.waitUntilLeader(leaderDistributedDataStore.getActorUtils(), shards);
leaderTestKit.waitForMembersUp("member-2");
followerTestKit.waitForMembersUp("member-1");
}
private static void verifyCars(final DOMStoreReadTransaction readTx, final MapEntryNode... entries)
throws Exception {
final Optional<NormalizedNode> optional = readTx.read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS);
assertTrue("isPresent", optional.isPresent());
final CollectionNodeBuilder<MapEntryNode, SystemMapNode> listBuilder = ImmutableNodes.mapNodeBuilder(
CarsModel.CAR_QNAME);
for (final NormalizedNode entry: entries) {
listBuilder.withChild((MapEntryNode) entry);
}
assertEquals("Car list node", listBuilder.build(), optional.get());
}
private static void verifyNode(final DOMStoreReadTransaction readTx, final YangInstanceIdentifier path,
final NormalizedNode expNode) throws Exception {
assertEquals(Optional.of(expNode), readTx.read(path).get(5, TimeUnit.SECONDS));
}
private static void verifyExists(final DOMStoreReadTransaction readTx, final YangInstanceIdentifier path)
throws Exception {
assertEquals("exists", Boolean.TRUE, readTx.exists(path).get(5, TimeUnit.SECONDS));
}
@Test
public void testWriteTransactionWithSingleShard() throws Exception {
final String testName = "testWriteTransactionWithSingleShard";
initDatastoresWithCars(testName);
final String followerCarShardName = "member-2-shard-cars-" + testName;
DOMStoreWriteTransaction writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
assertNotNull("newWriteOnlyTransaction returned null", writeTx);
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
final MapEntryNode car1 = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
final YangInstanceIdentifier car1Path = CarsModel.newCarPath("optima");
writeTx.merge(car1Path, car1);
final MapEntryNode car2 = CarsModel.newCarEntry("sportage", Uint64.valueOf(25000));
final YangInstanceIdentifier car2Path = CarsModel.newCarPath("sportage");
writeTx.merge(car2Path, car2);
followerTestKit.doCommit(writeTx.ready());
verifyCars(followerDistributedDataStore.newReadOnlyTransaction(), car1, car2);
verifyCars(leaderDistributedDataStore.newReadOnlyTransaction(), car1, car2);
// Test delete
writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
writeTx.delete(car1Path);
followerTestKit.doCommit(writeTx.ready());
verifyExists(followerDistributedDataStore.newReadOnlyTransaction(), car2Path);
verifyCars(followerDistributedDataStore.newReadOnlyTransaction(), car2);
verifyCars(leaderDistributedDataStore.newReadOnlyTransaction(), car2);
// Re-instate the follower member 2 as a single-node to verify replication and recovery.
// The following is a bit tricky. Before we reinstate the follower we need to ensure it has persisted and
// applied and all the log entries from the leader. Since we've verified the car data above we know that
// all the transactions have been applied on the leader so we first read and capture its lastAppliedIndex.
final AtomicLong leaderLastAppliedIndex = new AtomicLong();
IntegrationTestKit.verifyShardState(leaderDistributedDataStore, CARS[0],
state -> leaderLastAppliedIndex.set(state.getLastApplied()));
// Now we need to make sure the follower has persisted the leader's lastAppliedIndex via ApplyJournalEntries.
// However we don't know exactly how many ApplyJournalEntries messages there will be as it can differ between
// the tell-based and ask-based front-ends. For ask-based there will be exactly 2 ApplyJournalEntries but
// tell-based persists additional payloads which could be replicated and applied in a batch resulting in
// either 2 or 3 ApplyJournalEntries. To handle this we read the follower's persisted ApplyJournalEntries
// until we find the one that encompasses the leader's lastAppliedIndex.
Stopwatch sw = Stopwatch.createStarted();
boolean done = false;
while (!done) {
final List<ApplyJournalEntries> entries = InMemoryJournal.get(followerCarShardName,
ApplyJournalEntries.class);
for (ApplyJournalEntries aje: entries) {
if (aje.getToIndex() >= leaderLastAppliedIndex.get()) {
done = true;
break;
}
}
assertTrue("Follower did not persist ApplyJournalEntries containing leader's lastAppliedIndex "
+ leaderLastAppliedIndex + ". Entries persisted: " + entries, sw.elapsed(TimeUnit.SECONDS) <= 5);
Uninterruptibles.sleepUninterruptibly(50, TimeUnit.MILLISECONDS);
}
TestKit.shutdownActorSystem(leaderSystem, true);
TestKit.shutdownActorSystem(followerSystem, true);
final ActorSystem newSystem = newActorSystem("reinstated-member2", "Member2");
try (AbstractDataStore member2Datastore = new IntegrationTestKit(newSystem, leaderDatastoreContextBuilder,
commitTimeout)
.setupAbstractDataStore(testParameter, testName, "module-shards-member2", true, CARS)) {
verifyCars(member2Datastore.newReadOnlyTransaction(), car2);
}
}
@Test
public void testSingleTransactionsWritesInQuickSuccession() throws Exception {
initDatastoresWithCars("testSingleTransactionsWritesInQuickSuccession");
final DOMStoreTransactionChain txChain = followerDistributedDataStore.createTransactionChain();
DOMStoreWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
followerTestKit.doCommit(writeTx.ready());
int numCars = 5;
for (int i = 0; i < numCars; i++) {
writeTx = txChain.newWriteOnlyTransaction();
writeTx.write(CarsModel.newCarPath("car" + i), CarsModel.newCarEntry("car" + i, Uint64.valueOf(20000)));
followerTestKit.doCommit(writeTx.ready());
try (var tx = txChain.newReadOnlyTransaction()) {
tx.read(CarsModel.BASE_PATH).get();
}
}
// wait to let the shard catch up with purged
await("Range set leak test").atMost(5, TimeUnit.SECONDS)
.pollInterval(500, TimeUnit.MILLISECONDS)
.untilAsserted(() -> {
final var localShard = leaderDistributedDataStore.getActorUtils().findLocalShard("cars")
.orElseThrow();
final var frontendMetadata =
(FrontendShardDataTreeSnapshotMetadata) leaderDistributedDataStore.getActorUtils()
.executeOperation(localShard, new RequestFrontendMetadata());
final var clientMeta = frontendMetadata.getClients().get(0);
if (leaderDistributedDataStore.getActorUtils().getDatastoreContext().isUseTellBasedProtocol()) {
assertTellClientMetadata(clientMeta, numCars * 2);
} else {
assertAskClientMetadata(clientMeta);
}
});
try (var tx = txChain.newReadOnlyTransaction()) {
final var body = tx.read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS).orElseThrow().body();
assertThat(body, instanceOf(Collection.class));
assertEquals(numCars, ((Collection<?>) body).size());
}
}
private void assertAskClientMetadata(final FrontendClientMetadata clientMeta) {
// ask based should track no metadata
assertEquals(List.of(), clientMeta.getCurrentHistories());
}
private void assertTellClientMetadata(final FrontendClientMetadata clientMeta, final long lastPurged) {
final var iterator = clientMeta.getCurrentHistories().iterator();
var metadata = iterator.next();
while (iterator.hasNext() && metadata.getHistoryId() != 1) {
metadata = iterator.next();
}
// FIXME: CONTROLLER-1991: remove this assumption
assumeTrue(false);
assertEquals(UnsignedLongBitmap.of(), metadata.getClosedTransactions());
assertEquals("[[0.." + lastPurged + "]]", metadata.getPurgedTransactions().ranges().toString());
}
@Test
public void testCloseTransactionMetadataLeak() throws Exception {
// FIXME: CONTROLLER-2016: ask-based frontend triggers this:
// member-2-datastore-testCloseTransactionMetadataLeak-fe-0-chn-1-txn-1-0 is not ready yet
// at org.opendaylight.controller.cluster.datastore.TransactionChainProxy$Allocated.checkReady()
// at org.opendaylight.controller.cluster.datastore.TransactionChainProxy.newReadOnlyTransaction()
assumeTrue(testParameter.isAssignableFrom(ClientBackedDataStore.class));
initDatastoresWithCars("testCloseTransactionMetadataLeak");
final DOMStoreTransactionChain txChain = followerDistributedDataStore.createTransactionChain();
DOMStoreWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
followerTestKit.doCommit(writeTx.ready());
int numCars = 5;
for (int i = 0; i < numCars; i++) {
writeTx = txChain.newWriteOnlyTransaction();
writeTx.close();
try (var tx = txChain.newReadOnlyTransaction()) {
tx.read(CarsModel.BASE_PATH).get();
}
}
writeTx = txChain.newWriteOnlyTransaction();
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
followerTestKit.doCommit(writeTx.ready());
// wait to let the shard catch up with purged
await("Close transaction purge leak test.").atMost(5, TimeUnit.SECONDS)
.pollInterval(500, TimeUnit.MILLISECONDS)
.untilAsserted(() -> {
final var localShard = leaderDistributedDataStore.getActorUtils().findLocalShard("cars")
.orElseThrow();
final var frontendMetadata =
(FrontendShardDataTreeSnapshotMetadata) leaderDistributedDataStore.getActorUtils()
.executeOperation(localShard, new RequestFrontendMetadata());
final var clientMeta = frontendMetadata.getClients().get(0);
if (leaderDistributedDataStore.getActorUtils().getDatastoreContext().isUseTellBasedProtocol()) {
assertTellClientMetadata(clientMeta, numCars * 2 + 1);
} else {
assertAskClientMetadata(clientMeta);
}
});
try (var tx = txChain.newReadOnlyTransaction()) {
final var body = tx.read(CarsModel.CAR_LIST_PATH).get(5, TimeUnit.SECONDS).orElseThrow().body();
assertThat(body, instanceOf(Collection.class));
assertEquals(numCars, ((Collection<?>) body).size());
}
}
@Test
public void testReadWriteTransactionWithSingleShard() throws Exception {
initDatastoresWithCars("testReadWriteTransactionWithSingleShard");
final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
assertNotNull("newReadWriteTransaction returned null", rwTx);
rwTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
rwTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
final MapEntryNode car1 = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
rwTx.merge(CarsModel.newCarPath("optima"), car1);
verifyCars(rwTx, car1);
final MapEntryNode car2 = CarsModel.newCarEntry("sportage", Uint64.valueOf(25000));
final YangInstanceIdentifier car2Path = CarsModel.newCarPath("sportage");
rwTx.merge(car2Path, car2);
verifyExists(rwTx, car2Path);
followerTestKit.doCommit(rwTx.ready());
verifyCars(followerDistributedDataStore.newReadOnlyTransaction(), car1, car2);
}
@Test
public void testWriteTransactionWithMultipleShards() throws Exception {
initDatastoresWithCarsAndPeople("testWriteTransactionWithMultipleShards");
final DOMStoreWriteTransaction writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
assertNotNull("newWriteOnlyTransaction returned null", writeTx);
final YangInstanceIdentifier carsPath = CarsModel.BASE_PATH;
final NormalizedNode carsNode = CarsModel.emptyContainer();
writeTx.write(carsPath, carsNode);
final YangInstanceIdentifier peoplePath = PeopleModel.BASE_PATH;
final NormalizedNode peopleNode = PeopleModel.emptyContainer();
writeTx.write(peoplePath, peopleNode);
followerTestKit.doCommit(writeTx.ready());
final DOMStoreReadTransaction readTx = followerDistributedDataStore.newReadOnlyTransaction();
verifyNode(readTx, carsPath, carsNode);
verifyNode(readTx, peoplePath, peopleNode);
}
@Test
public void testReadWriteTransactionWithMultipleShards() throws Exception {
initDatastoresWithCarsAndPeople("testReadWriteTransactionWithMultipleShards");
final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
assertNotNull("newReadWriteTransaction returned null", rwTx);
final YangInstanceIdentifier carsPath = CarsModel.BASE_PATH;
final NormalizedNode carsNode = CarsModel.emptyContainer();
rwTx.write(carsPath, carsNode);
final YangInstanceIdentifier peoplePath = PeopleModel.BASE_PATH;
final NormalizedNode peopleNode = PeopleModel.emptyContainer();
rwTx.write(peoplePath, peopleNode);
followerTestKit.doCommit(rwTx.ready());
final DOMStoreReadTransaction readTx = followerDistributedDataStore.newReadOnlyTransaction();
verifyNode(readTx, carsPath, carsNode);
verifyNode(readTx, peoplePath, peopleNode);
}
@Test
public void testTransactionChainWithSingleShard() throws Exception {
initDatastoresWithCars("testTransactionChainWithSingleShard");
final DOMStoreTransactionChain txChain = followerDistributedDataStore.createTransactionChain();
// Add the top-level cars container with write-only.
final DOMStoreWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
assertNotNull("newWriteOnlyTransaction returned null", writeTx);
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
final DOMStoreThreePhaseCommitCohort writeTxReady = writeTx.ready();
// Verify the top-level cars container with read-only.
verifyNode(txChain.newReadOnlyTransaction(), CarsModel.BASE_PATH, CarsModel.emptyContainer());
// Perform car operations with read-write.
final DOMStoreReadWriteTransaction rwTx = txChain.newReadWriteTransaction();
verifyNode(rwTx, CarsModel.BASE_PATH, CarsModel.emptyContainer());
rwTx.merge(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
final MapEntryNode car1 = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
final YangInstanceIdentifier car1Path = CarsModel.newCarPath("optima");
rwTx.write(car1Path, car1);
verifyExists(rwTx, car1Path);
verifyCars(rwTx, car1);
final MapEntryNode car2 = CarsModel.newCarEntry("sportage", Uint64.valueOf(25000));
rwTx.merge(CarsModel.newCarPath("sportage"), car2);
rwTx.delete(car1Path);
followerTestKit.doCommit(writeTxReady);
followerTestKit.doCommit(rwTx.ready());
txChain.close();
verifyCars(followerDistributedDataStore.newReadOnlyTransaction(), car2);
}
@Test
public void testTransactionChainWithMultipleShards() throws Exception {
initDatastoresWithCarsAndPeople("testTransactionChainWithMultipleShards");
final DOMStoreTransactionChain txChain = followerDistributedDataStore.createTransactionChain();
DOMStoreWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
assertNotNull("newWriteOnlyTransaction returned null", writeTx);
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
writeTx.write(PeopleModel.PERSON_LIST_PATH, PeopleModel.newPersonMapNode());
followerTestKit.doCommit(writeTx.ready());
final DOMStoreReadWriteTransaction readWriteTx = txChain.newReadWriteTransaction();
final MapEntryNode car = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
final YangInstanceIdentifier carPath = CarsModel.newCarPath("optima");
readWriteTx.write(carPath, car);
final MapEntryNode person = PeopleModel.newPersonEntry("jack");
final YangInstanceIdentifier personPath = PeopleModel.newPersonPath("jack");
readWriteTx.merge(personPath, person);
assertEquals(Optional.of(car), readWriteTx.read(carPath).get(5, TimeUnit.SECONDS));
assertEquals(Optional.of(person), readWriteTx.read(personPath).get(5, TimeUnit.SECONDS));
final DOMStoreThreePhaseCommitCohort cohort2 = readWriteTx.ready();
writeTx = txChain.newWriteOnlyTransaction();
writeTx.delete(personPath);
final DOMStoreThreePhaseCommitCohort cohort3 = writeTx.ready();
followerTestKit.doCommit(cohort2);
followerTestKit.doCommit(cohort3);
txChain.close();
final DOMStoreReadTransaction readTx = followerDistributedDataStore.newReadOnlyTransaction();
verifyCars(readTx, car);
assertEquals(Optional.empty(), readTx.read(personPath).get(5, TimeUnit.SECONDS));
}
@Test
public void testChainedTransactionFailureWithSingleShard() throws Exception {
initDatastoresWithCars("testChainedTransactionFailureWithSingleShard");
final ConcurrentDOMDataBroker broker = new ConcurrentDOMDataBroker(
ImmutableMap.<LogicalDatastoreType, DOMStore>builder().put(
LogicalDatastoreType.CONFIGURATION, followerDistributedDataStore).build(),
MoreExecutors.directExecutor());
final DOMTransactionChainListener listener = mock(DOMTransactionChainListener.class);
final DOMTransactionChain txChain = broker.createTransactionChain(listener);
final DOMDataTreeWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
final ContainerNode invalidData = ImmutableContainerNodeBuilder.create().withNodeIdentifier(
new YangInstanceIdentifier.NodeIdentifier(CarsModel.BASE_QNAME))
.withChild(ImmutableNodes.leafNode(TestModel.JUNK_QNAME, "junk")).build();
writeTx.merge(LogicalDatastoreType.CONFIGURATION, CarsModel.BASE_PATH, invalidData);
final var ex = assertThrows(ExecutionException.class, () -> writeTx.commit().get(5, TimeUnit.SECONDS))
.getCause();
assertThat(ex, instanceOf(TransactionCommitFailedException.class));
verify(listener, timeout(5000)).onTransactionChainFailed(eq(txChain), eq(writeTx), any(Throwable.class));
txChain.close();
broker.close();
}
@Test
public void testChainedTransactionFailureWithMultipleShards() throws Exception {
initDatastoresWithCarsAndPeople("testChainedTransactionFailureWithMultipleShards");
final ConcurrentDOMDataBroker broker = new ConcurrentDOMDataBroker(
ImmutableMap.<LogicalDatastoreType, DOMStore>builder().put(
LogicalDatastoreType.CONFIGURATION, followerDistributedDataStore).build(),
MoreExecutors.directExecutor());
final DOMTransactionChainListener listener = mock(DOMTransactionChainListener.class);
final DOMTransactionChain txChain = broker.createTransactionChain(listener);
final DOMDataTreeWriteTransaction writeTx = txChain.newWriteOnlyTransaction();
writeTx.put(LogicalDatastoreType.CONFIGURATION, PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
final ContainerNode invalidData = ImmutableContainerNodeBuilder.create().withNodeIdentifier(
new YangInstanceIdentifier.NodeIdentifier(CarsModel.BASE_QNAME))
.withChild(ImmutableNodes.leafNode(TestModel.JUNK_QNAME, "junk")).build();
// Note that merge will validate the data and fail but put succeeds b/c deep validation is not
// done for put for performance reasons.
writeTx.merge(LogicalDatastoreType.CONFIGURATION, CarsModel.BASE_PATH, invalidData);
final var ex = assertThrows(ExecutionException.class, () -> writeTx.commit().get(5, TimeUnit.SECONDS))
.getCause();
assertThat(ex, instanceOf(TransactionCommitFailedException.class));
verify(listener, timeout(5000)).onTransactionChainFailed(eq(txChain), eq(writeTx), any(Throwable.class));
txChain.close();
broker.close();
}
@Test
public void testSingleShardTransactionsWithLeaderChanges() throws Exception {
followerDatastoreContextBuilder.backendAlivenessTimerIntervalInSeconds(2);
final String testName = "testSingleShardTransactionsWithLeaderChanges";
initDatastoresWithCars(testName);
final String followerCarShardName = "member-2-shard-cars-" + testName;
InMemoryJournal.addWriteMessagesCompleteLatch(followerCarShardName, 1, ApplyJournalEntries.class);
// Write top-level car container from the follower so it uses a remote Tx.
DOMStoreWriteTransaction writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
followerTestKit.doCommit(writeTx.ready());
InMemoryJournal.waitForWriteMessagesComplete(followerCarShardName);
// Switch the leader to the follower
sendDatastoreContextUpdate(followerDistributedDataStore, followerDatastoreContextBuilder
.shardElectionTimeoutFactor(1).customRaftPolicyImplementation(null));
TestKit.shutdownActorSystem(leaderSystem, true);
Cluster.get(followerSystem).leave(MEMBER_1_ADDRESS);
followerTestKit.waitUntilNoLeader(followerDistributedDataStore.getActorUtils(), CARS);
leaderSystem = ActorSystem.create("cluster-test", ConfigFactory.load().getConfig("Member1"));
Cluster.get(leaderSystem).join(MEMBER_2_ADDRESS);
final DatastoreContext.Builder newMember1Builder = DatastoreContext.newBuilder()
.shardHeartbeatIntervalInMillis(100).shardElectionTimeoutFactor(5);
IntegrationTestKit newMember1TestKit = new IntegrationTestKit(leaderSystem, newMember1Builder, commitTimeout);
try (AbstractDataStore ds =
newMember1TestKit.setupAbstractDataStore(
testParameter, testName, MODULE_SHARDS_CARS_ONLY_1_2, false, CARS)) {
followerTestKit.waitUntilLeader(followerDistributedDataStore.getActorUtils(), CARS);
// Write a car entry to the new leader - should switch to local Tx
writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
MapEntryNode car1 = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
YangInstanceIdentifier car1Path = CarsModel.newCarPath("optima");
writeTx.merge(car1Path, car1);
followerTestKit.doCommit(writeTx.ready());
verifyCars(followerDistributedDataStore.newReadOnlyTransaction(), car1);
}
}
@SuppressWarnings("unchecked")
@Test
public void testReadyLocalTransactionForwardedToLeader() throws Exception {
initDatastoresWithCars("testReadyLocalTransactionForwardedToLeader");
followerTestKit.waitUntilLeader(followerDistributedDataStore.getActorUtils(), "cars");
final Optional<ActorRef> carsFollowerShard =
followerDistributedDataStore.getActorUtils().findLocalShard("cars");
assertTrue("Cars follower shard found", carsFollowerShard.isPresent());
final DataTree dataTree = new InMemoryDataTreeFactory().create(
DataTreeConfiguration.DEFAULT_OPERATIONAL, SchemaContextHelper.full());
// Send a tx with immediate commit.
DataTreeModification modification = dataTree.takeSnapshot().newModification();
new WriteModification(CarsModel.BASE_PATH, CarsModel.emptyContainer()).apply(modification);
new MergeModification(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode()).apply(modification);
final MapEntryNode car1 = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
new WriteModification(CarsModel.newCarPath("optima"), car1).apply(modification);
modification.ready();
ReadyLocalTransaction readyLocal = new ReadyLocalTransaction(tx1 , modification, true, Optional.empty());
carsFollowerShard.get().tell(readyLocal, followerTestKit.getRef());
Object resp = followerTestKit.expectMsgClass(Object.class);
if (resp instanceof akka.actor.Status.Failure) {
throw new AssertionError("Unexpected failure response", ((akka.actor.Status.Failure)resp).cause());
}
assertEquals("Response type", CommitTransactionReply.class, resp.getClass());
verifyCars(leaderDistributedDataStore.newReadOnlyTransaction(), car1);
// Send another tx without immediate commit.
modification = dataTree.takeSnapshot().newModification();
MapEntryNode car2 = CarsModel.newCarEntry("sportage", Uint64.valueOf(30000));
new WriteModification(CarsModel.newCarPath("sportage"), car2).apply(modification);
modification.ready();
readyLocal = new ReadyLocalTransaction(tx2 , modification, false, Optional.empty());
carsFollowerShard.get().tell(readyLocal, followerTestKit.getRef());
resp = followerTestKit.expectMsgClass(Object.class);
if (resp instanceof akka.actor.Status.Failure) {
throw new AssertionError("Unexpected failure response", ((akka.actor.Status.Failure)resp).cause());
}
assertEquals("Response type", ReadyTransactionReply.class, resp.getClass());
final ActorSelection txActor = leaderDistributedDataStore.getActorUtils().actorSelection(
((ReadyTransactionReply)resp).getCohortPath());
ThreePhaseCommitCohortProxy cohort = new ThreePhaseCommitCohortProxy(leaderDistributedDataStore.getActorUtils(),
List.of(new ThreePhaseCommitCohortProxy.CohortInfo(Futures.successful(txActor),
() -> DataStoreVersions.CURRENT_VERSION)), tx2);
cohort.canCommit().get(5, TimeUnit.SECONDS);
cohort.preCommit().get(5, TimeUnit.SECONDS);
cohort.commit().get(5, TimeUnit.SECONDS);
verifyCars(leaderDistributedDataStore.newReadOnlyTransaction(), car1, car2);
}
@SuppressWarnings("unchecked")
@Test
public void testForwardedReadyTransactionForwardedToLeader() throws Exception {
initDatastoresWithCars("testForwardedReadyTransactionForwardedToLeader");
followerTestKit.waitUntilLeader(followerDistributedDataStore.getActorUtils(), "cars");
final Optional<ActorRef> carsFollowerShard =
followerDistributedDataStore.getActorUtils().findLocalShard("cars");
assertTrue("Cars follower shard found", carsFollowerShard.isPresent());
carsFollowerShard.get().tell(GetShardDataTree.INSTANCE, followerTestKit.getRef());
final DataTree dataTree = followerTestKit.expectMsgClass(DataTree.class);
// Send a tx with immediate commit.
DataTreeModification modification = dataTree.takeSnapshot().newModification();
new WriteModification(CarsModel.BASE_PATH, CarsModel.emptyContainer()).apply(modification);
new MergeModification(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode()).apply(modification);
final MapEntryNode car1 = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
new WriteModification(CarsModel.newCarPath("optima"), car1).apply(modification);
ForwardedReadyTransaction forwardedReady = new ForwardedReadyTransaction(tx1, DataStoreVersions.CURRENT_VERSION,
new ReadWriteShardDataTreeTransaction(mock(ShardDataTreeTransactionParent.class), tx1, modification),
true, Optional.empty());
carsFollowerShard.get().tell(forwardedReady, followerTestKit.getRef());
Object resp = followerTestKit.expectMsgClass(Object.class);
if (resp instanceof akka.actor.Status.Failure) {
throw new AssertionError("Unexpected failure response", ((akka.actor.Status.Failure)resp).cause());
}
assertEquals("Response type", CommitTransactionReply.class, resp.getClass());
verifyCars(leaderDistributedDataStore.newReadOnlyTransaction(), car1);
// Send another tx without immediate commit.
modification = dataTree.takeSnapshot().newModification();
MapEntryNode car2 = CarsModel.newCarEntry("sportage", Uint64.valueOf(30000));
new WriteModification(CarsModel.newCarPath("sportage"), car2).apply(modification);
forwardedReady = new ForwardedReadyTransaction(tx2, DataStoreVersions.CURRENT_VERSION,
new ReadWriteShardDataTreeTransaction(mock(ShardDataTreeTransactionParent.class), tx2, modification),
false, Optional.empty());
carsFollowerShard.get().tell(forwardedReady, followerTestKit.getRef());
resp = followerTestKit.expectMsgClass(Object.class);
if (resp instanceof akka.actor.Status.Failure) {
throw new AssertionError("Unexpected failure response", ((akka.actor.Status.Failure)resp).cause());
}
assertEquals("Response type", ReadyTransactionReply.class, resp.getClass());
ActorSelection txActor = leaderDistributedDataStore.getActorUtils().actorSelection(
((ReadyTransactionReply)resp).getCohortPath());
final ThreePhaseCommitCohortProxy cohort = new ThreePhaseCommitCohortProxy(
leaderDistributedDataStore.getActorUtils(), List.of(
new ThreePhaseCommitCohortProxy.CohortInfo(Futures.successful(txActor),
() -> DataStoreVersions.CURRENT_VERSION)), tx2);
cohort.canCommit().get(5, TimeUnit.SECONDS);
cohort.preCommit().get(5, TimeUnit.SECONDS);
cohort.commit().get(5, TimeUnit.SECONDS);
verifyCars(leaderDistributedDataStore.newReadOnlyTransaction(), car1, car2);
}
@Test
public void testTransactionForwardedToLeaderAfterRetry() throws Exception {
followerDatastoreContextBuilder.shardBatchedModificationCount(2);
leaderDatastoreContextBuilder.shardBatchedModificationCount(2);
initDatastoresWithCarsAndPeople("testTransactionForwardedToLeaderAfterRetry");
// Do an initial write to get the primary shard info cached.
final DOMStoreWriteTransaction initialWriteTx = followerDistributedDataStore.newWriteOnlyTransaction();
initialWriteTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
initialWriteTx.write(PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
followerTestKit.doCommit(initialWriteTx.ready());
// Wait for the commit to be replicated to the follower.
MemberNode.verifyRaftState(followerDistributedDataStore, "cars",
raftState -> assertEquals("getLastApplied", 1, raftState.getLastApplied()));
MemberNode.verifyRaftState(followerDistributedDataStore, "people",
raftState -> assertEquals("getLastApplied", 1, raftState.getLastApplied()));
// Prepare, ready and canCommit a WO tx that writes to 2 shards. This will become the current tx in
// the leader shard.
final DOMStoreWriteTransaction writeTx1 = followerDistributedDataStore.newWriteOnlyTransaction();
writeTx1.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
writeTx1.write(PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
final DOMStoreThreePhaseCommitCohort writeTx1Cohort = writeTx1.ready();
final ListenableFuture<Boolean> writeTx1CanCommit = writeTx1Cohort.canCommit();
writeTx1CanCommit.get(5, TimeUnit.SECONDS);
// Prepare and ready another WO tx that writes to 2 shards but don't canCommit yet. This will be queued
// in the leader shard.
final DOMStoreWriteTransaction writeTx2 = followerDistributedDataStore.newWriteOnlyTransaction();
final LinkedList<MapEntryNode> cars = new LinkedList<>();
int carIndex = 1;
cars.add(CarsModel.newCarEntry("car" + carIndex, Uint64.valueOf(carIndex)));
writeTx2.write(CarsModel.newCarPath("car" + carIndex), cars.getLast());
carIndex++;
NormalizedNode people = ImmutableNodes.mapNodeBuilder(PeopleModel.PERSON_QNAME)
.withChild(PeopleModel.newPersonEntry("Dude")).build();
writeTx2.write(PeopleModel.PERSON_LIST_PATH, people);
final DOMStoreThreePhaseCommitCohort writeTx2Cohort = writeTx2.ready();
// Prepare another WO that writes to a single shard and thus will be directly committed on ready. This
// tx writes 5 cars so 2 BatchedModidifications messages will be sent initially and cached in the
// leader shard (with shardBatchedModificationCount set to 2). The 3rd BatchedModidifications will be
// sent on ready.
final DOMStoreWriteTransaction writeTx3 = followerDistributedDataStore.newWriteOnlyTransaction();
for (int i = 1; i <= 5; i++, carIndex++) {
cars.add(CarsModel.newCarEntry("car" + carIndex, Uint64.valueOf(carIndex)));
writeTx3.write(CarsModel.newCarPath("car" + carIndex), cars.getLast());
}
// Prepare another WO that writes to a single shard. This will send a single BatchedModidifications
// message on ready.
final DOMStoreWriteTransaction writeTx4 = followerDistributedDataStore.newWriteOnlyTransaction();
cars.add(CarsModel.newCarEntry("car" + carIndex, Uint64.valueOf(carIndex)));
writeTx4.write(CarsModel.newCarPath("car" + carIndex), cars.getLast());
carIndex++;
// Prepare a RW tx that will create a tx actor and send a ForwardedReadyTransaciton message to the
// leader shard on ready.
final DOMStoreReadWriteTransaction readWriteTx = followerDistributedDataStore.newReadWriteTransaction();
cars.add(CarsModel.newCarEntry("car" + carIndex, Uint64.valueOf(carIndex)));
readWriteTx.write(CarsModel.newCarPath("car" + carIndex), cars.getLast());
// FIXME: CONTROLLER-2017: ClientBackedDataStore reports only 4 transactions
assumeTrue(DistributedDataStore.class.isAssignableFrom(testParameter));
IntegrationTestKit.verifyShardStats(leaderDistributedDataStore, "cars",
stats -> assertEquals("getReadWriteTransactionCount", 5, stats.getReadWriteTransactionCount()));
// Disable elections on the leader so it switches to follower.
sendDatastoreContextUpdate(leaderDistributedDataStore, leaderDatastoreContextBuilder
.customRaftPolicyImplementation(DisableElectionsRaftPolicy.class.getName())
.shardElectionTimeoutFactor(10));
leaderTestKit.waitUntilNoLeader(leaderDistributedDataStore.getActorUtils(), "cars");
// Submit all tx's - the messages should get queued for retry.
final ListenableFuture<Boolean> writeTx2CanCommit = writeTx2Cohort.canCommit();
final DOMStoreThreePhaseCommitCohort writeTx3Cohort = writeTx3.ready();
final DOMStoreThreePhaseCommitCohort writeTx4Cohort = writeTx4.ready();
final DOMStoreThreePhaseCommitCohort rwTxCohort = readWriteTx.ready();
// Enable elections on the other follower so it becomes the leader, at which point the
// tx's should get forwarded from the previous leader to the new leader to complete the commits.
sendDatastoreContextUpdate(followerDistributedDataStore, followerDatastoreContextBuilder
.customRaftPolicyImplementation(null).shardElectionTimeoutFactor(1));
IntegrationTestKit.findLocalShard(followerDistributedDataStore.getActorUtils(), "cars")
.tell(TimeoutNow.INSTANCE, ActorRef.noSender());
IntegrationTestKit.findLocalShard(followerDistributedDataStore.getActorUtils(), "people")
.tell(TimeoutNow.INSTANCE, ActorRef.noSender());
followerTestKit.doCommit(writeTx1CanCommit, writeTx1Cohort);
followerTestKit.doCommit(writeTx2CanCommit, writeTx2Cohort);
followerTestKit.doCommit(writeTx3Cohort);
followerTestKit.doCommit(writeTx4Cohort);
followerTestKit.doCommit(rwTxCohort);
DOMStoreReadTransaction readTx = leaderDistributedDataStore.newReadOnlyTransaction();
verifyCars(readTx, cars.toArray(new MapEntryNode[cars.size()]));
verifyNode(readTx, PeopleModel.PERSON_LIST_PATH, people);
}
@Test
public void testLeadershipTransferOnShutdown() throws Exception {
// FIXME: remove when test passes also for ClientBackedDataStore
assumeTrue(DistributedDataStore.class.isAssignableFrom(testParameter));
leaderDatastoreContextBuilder.shardBatchedModificationCount(1);
followerDatastoreContextBuilder.shardElectionTimeoutFactor(10).customRaftPolicyImplementation(null);
final String testName = "testLeadershipTransferOnShutdown";
initDatastores(testName, MODULE_SHARDS_CARS_PEOPLE_1_2_3, CARS_AND_PEOPLE);
final IntegrationTestKit follower2TestKit = new IntegrationTestKit(follower2System,
DatastoreContext.newBuilderFrom(followerDatastoreContextBuilder.build()).operationTimeoutInMillis(500),
commitTimeout);
try (AbstractDataStore follower2DistributedDataStore = follower2TestKit.setupAbstractDataStore(
testParameter, testName, MODULE_SHARDS_CARS_PEOPLE_1_2_3, false)) {
followerTestKit.waitForMembersUp("member-3");
follower2TestKit.waitForMembersUp("member-1", "member-2");
// Create and submit a couple tx's so they're pending.
DOMStoreWriteTransaction writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
writeTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
writeTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
writeTx.write(PeopleModel.BASE_PATH, PeopleModel.emptyContainer());
final DOMStoreThreePhaseCommitCohort cohort1 = writeTx.ready();
IntegrationTestKit.verifyShardStats(leaderDistributedDataStore, "cars",
stats -> assertEquals("getTxCohortCacheSize", 1, stats.getTxCohortCacheSize()));
writeTx = followerDistributedDataStore.newWriteOnlyTransaction();
final MapEntryNode car = CarsModel.newCarEntry("optima", Uint64.valueOf(20000));
writeTx.write(CarsModel.newCarPath("optima"), car);
final DOMStoreThreePhaseCommitCohort cohort2 = writeTx.ready();
IntegrationTestKit.verifyShardStats(leaderDistributedDataStore, "cars",
stats -> assertEquals("getTxCohortCacheSize", 2, stats.getTxCohortCacheSize()));
// Gracefully stop the leader via a Shutdown message.
sendDatastoreContextUpdate(leaderDistributedDataStore, leaderDatastoreContextBuilder
.shardElectionTimeoutFactor(100));
final FiniteDuration duration = FiniteDuration.create(5, TimeUnit.SECONDS);
final Future<ActorRef> future = leaderDistributedDataStore.getActorUtils().findLocalShardAsync("cars");
final ActorRef leaderActor = Await.result(future, duration);
final Future<Boolean> stopFuture = Patterns.gracefulStop(leaderActor, duration, Shutdown.INSTANCE);
// Commit the 2 transactions. They should finish and succeed.
followerTestKit.doCommit(cohort1);
followerTestKit.doCommit(cohort2);
// Wait for the leader actor stopped.
final Boolean stopped = Await.result(stopFuture, duration);
assertEquals("Stopped", Boolean.TRUE, stopped);
// Verify leadership was transferred by reading the committed data from the other nodes.
verifyCars(followerDistributedDataStore.newReadOnlyTransaction(), car);
verifyCars(follower2DistributedDataStore.newReadOnlyTransaction(), car);
}
}
@Test
public void testTransactionWithIsolatedLeader() throws Exception {
// FIXME: CONTROLLER-2018: remove when test passes also for ClientBackedDataStore
assumeTrue(DistributedDataStore.class.isAssignableFrom(testParameter));
// Set the isolated leader check interval high so we can control the switch to IsolatedLeader.
leaderDatastoreContextBuilder.shardIsolatedLeaderCheckIntervalInMillis(10000000);
final String testName = "testTransactionWithIsolatedLeader";
initDatastoresWithCars(testName);
// Tx that is submitted after the follower is stopped but before the leader transitions to IsolatedLeader.
final DOMStoreWriteTransaction preIsolatedLeaderWriteTx = leaderDistributedDataStore.newWriteOnlyTransaction();
preIsolatedLeaderWriteTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
// Tx that is submitted after the leader transitions to IsolatedLeader.
final DOMStoreWriteTransaction noShardLeaderWriteTx = leaderDistributedDataStore.newWriteOnlyTransaction();
noShardLeaderWriteTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
// Tx that is submitted after the follower is reinstated.
final DOMStoreWriteTransaction successWriteTx = leaderDistributedDataStore.newWriteOnlyTransaction();
successWriteTx.merge(CarsModel.BASE_PATH, CarsModel.emptyContainer());
// Stop the follower
followerTestKit.watch(followerDistributedDataStore.getActorUtils().getShardManager());
followerDistributedDataStore.close();
followerTestKit.expectTerminated(followerDistributedDataStore.getActorUtils().getShardManager());
// Submit the preIsolatedLeaderWriteTx so it's pending
final DOMStoreThreePhaseCommitCohort preIsolatedLeaderTxCohort = preIsolatedLeaderWriteTx.ready();
// Change the isolated leader check interval low so it changes to IsolatedLeader.
sendDatastoreContextUpdate(leaderDistributedDataStore, leaderDatastoreContextBuilder
.shardIsolatedLeaderCheckIntervalInMillis(200));
MemberNode.verifyRaftState(leaderDistributedDataStore, "cars",
raftState -> assertEquals("getRaftState", "IsolatedLeader", raftState.getRaftState()));
final var ex = assertThrows(ExecutionException.class,
() -> leaderTestKit.doCommit(noShardLeaderWriteTx.ready()));
assertEquals(NoShardLeaderException.class, Throwables.getRootCause(ex).getClass());
sendDatastoreContextUpdate(leaderDistributedDataStore, leaderDatastoreContextBuilder
.shardElectionTimeoutFactor(100));
final DOMStoreThreePhaseCommitCohort successTxCohort = successWriteTx.ready();
followerDistributedDataStore = followerTestKit.setupAbstractDataStore(
testParameter, testName, MODULE_SHARDS_CARS_ONLY_1_2, false, CARS);
leaderTestKit.doCommit(preIsolatedLeaderTxCohort);
leaderTestKit.doCommit(successTxCohort);
}
@Test
public void testTransactionWithShardLeaderNotResponding() throws Exception {
followerDatastoreContextBuilder.frontendRequestTimeoutInSeconds(2);
followerDatastoreContextBuilder.shardElectionTimeoutFactor(50);
initDatastoresWithCars("testTransactionWithShardLeaderNotResponding");
// Do an initial read to get the primary shard info cached.
final DOMStoreReadTransaction readTx = followerDistributedDataStore.newReadOnlyTransaction();
readTx.read(CarsModel.BASE_PATH).get(5, TimeUnit.SECONDS);
// Shutdown the leader and try to create a new tx.
TestKit.shutdownActorSystem(leaderSystem, true);
followerDatastoreContextBuilder.operationTimeoutInMillis(50).shardElectionTimeoutFactor(1);
sendDatastoreContextUpdate(followerDistributedDataStore, followerDatastoreContextBuilder);
final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
rwTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
final var ex = assertThrows(ExecutionException.class, () -> followerTestKit.doCommit(rwTx.ready()));
final String msg = "Unexpected exception: " + Throwables.getStackTraceAsString(ex.getCause());
if (DistributedDataStore.class.isAssignableFrom(testParameter)) {
assertTrue(msg, Throwables.getRootCause(ex) instanceof NoShardLeaderException
|| ex.getCause() instanceof ShardLeaderNotRespondingException);
} else {
assertThat(msg, Throwables.getRootCause(ex), instanceOf(RequestTimeoutException.class));
}
}
@Test
public void testTransactionWithCreateTxFailureDueToNoLeader() throws Exception {
followerDatastoreContextBuilder.frontendRequestTimeoutInSeconds(2);
initDatastoresWithCars("testTransactionWithCreateTxFailureDueToNoLeader");
// Do an initial read to get the primary shard info cached.
final DOMStoreReadTransaction readTx = followerDistributedDataStore.newReadOnlyTransaction();
readTx.read(CarsModel.BASE_PATH).get(5, TimeUnit.SECONDS);
// Shutdown the leader and try to create a new tx.
TestKit.shutdownActorSystem(leaderSystem, true);
Cluster.get(followerSystem).leave(MEMBER_1_ADDRESS);
Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
sendDatastoreContextUpdate(followerDistributedDataStore, followerDatastoreContextBuilder
.operationTimeoutInMillis(10).shardElectionTimeoutFactor(1).customRaftPolicyImplementation(null));
final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
rwTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
final var ex = assertThrows(ExecutionException.class, () -> followerTestKit.doCommit(rwTx.ready()));
final String msg = "Unexpected exception: " + Throwables.getStackTraceAsString(ex.getCause());
if (DistributedDataStore.class.isAssignableFrom(testParameter)) {
assertThat(msg, Throwables.getRootCause(ex), instanceOf(NoShardLeaderException.class));
} else {
assertThat(msg, Throwables.getRootCause(ex), instanceOf(RequestTimeoutException.class));
}
}
@Test
public void testTransactionRetryWithInitialAskTimeoutExOnCreateTx() throws Exception {
followerDatastoreContextBuilder.backendAlivenessTimerIntervalInSeconds(2);
String testName = "testTransactionRetryWithInitialAskTimeoutExOnCreateTx";
initDatastores(testName, MODULE_SHARDS_CARS_1_2_3, CARS);
final DatastoreContext.Builder follower2DatastoreContextBuilder = DatastoreContext.newBuilder()
.shardHeartbeatIntervalInMillis(100).shardElectionTimeoutFactor(10);
final IntegrationTestKit follower2TestKit = new IntegrationTestKit(
follower2System, follower2DatastoreContextBuilder, commitTimeout);
try (AbstractDataStore ds =
follower2TestKit.setupAbstractDataStore(
testParameter, testName, MODULE_SHARDS_CARS_1_2_3, false, CARS)) {
followerTestKit.waitForMembersUp("member-1", "member-3");
follower2TestKit.waitForMembersUp("member-1", "member-2");
// Do an initial read to get the primary shard info cached.
final DOMStoreReadTransaction readTx = followerDistributedDataStore.newReadOnlyTransaction();
readTx.read(CarsModel.BASE_PATH).get(5, TimeUnit.SECONDS);
// Shutdown the leader and try to create a new tx.
TestKit.shutdownActorSystem(leaderSystem, true);
Cluster.get(followerSystem).leave(MEMBER_1_ADDRESS);
sendDatastoreContextUpdate(followerDistributedDataStore, followerDatastoreContextBuilder
.operationTimeoutInMillis(500).shardElectionTimeoutFactor(5).customRaftPolicyImplementation(null));
final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
rwTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
followerTestKit.doCommit(rwTx.ready());
}
}
@Test
public void testSemiReachableCandidateNotDroppingLeader() throws Exception {
final String testName = "testSemiReachableCandidateNotDroppingLeader";
initDatastores(testName, MODULE_SHARDS_CARS_1_2_3, CARS);
final DatastoreContext.Builder follower2DatastoreContextBuilder = DatastoreContext.newBuilder()
.shardHeartbeatIntervalInMillis(100).shardElectionTimeoutFactor(10);
final IntegrationTestKit follower2TestKit = new IntegrationTestKit(
follower2System, follower2DatastoreContextBuilder, commitTimeout);
final AbstractDataStore ds2 =
follower2TestKit.setupAbstractDataStore(
testParameter, testName, MODULE_SHARDS_CARS_1_2_3, false, CARS);
followerTestKit.waitForMembersUp("member-1", "member-3");
follower2TestKit.waitForMembersUp("member-1", "member-2");
// behavior is controlled by akka.coordinated-shutdown.run-by-actor-system-terminate configuration option
TestKit.shutdownActorSystem(follower2System, true);
ActorRef cars = leaderDistributedDataStore.getActorUtils().findLocalShard("cars").get();
final OnDemandRaftState initialState = (OnDemandRaftState) leaderDistributedDataStore.getActorUtils()
.executeOperation(cars, GetOnDemandRaftState.INSTANCE);
Cluster leaderCluster = Cluster.get(leaderSystem);
Cluster followerCluster = Cluster.get(followerSystem);
Cluster follower2Cluster = Cluster.get(follower2System);
Member follower2Member = follower2Cluster.readView().self();
await().atMost(10, TimeUnit.SECONDS)
.until(() -> containsUnreachable(leaderCluster, follower2Member));
await().atMost(10, TimeUnit.SECONDS)
.until(() -> containsUnreachable(followerCluster, follower2Member));
ActorRef followerCars = followerDistributedDataStore.getActorUtils().findLocalShard("cars").get();
// to simulate a follower not being able to receive messages, but still being able to send messages and becoming
// candidate, we can just send a couple of RequestVotes to both leader and follower.
cars.tell(new RequestVote(initialState.getCurrentTerm() + 1, "member-3-shard-cars", -1, -1), null);
followerCars.tell(new RequestVote(initialState.getCurrentTerm() + 1, "member-3-shard-cars", -1, -1), null);
cars.tell(new RequestVote(initialState.getCurrentTerm() + 3, "member-3-shard-cars", -1, -1), null);
followerCars.tell(new RequestVote(initialState.getCurrentTerm() + 3, "member-3-shard-cars", -1, -1), null);
OnDemandRaftState stateAfter = (OnDemandRaftState) leaderDistributedDataStore.getActorUtils()
.executeOperation(cars, GetOnDemandRaftState.INSTANCE);
OnDemandRaftState followerState = (OnDemandRaftState) followerDistributedDataStore.getActorUtils()
.executeOperation(cars, GetOnDemandRaftState.INSTANCE);
assertEquals(initialState.getCurrentTerm(), stateAfter.getCurrentTerm());
assertEquals(initialState.getCurrentTerm(), followerState.getCurrentTerm());
ds2.close();
}
private static Boolean containsUnreachable(final Cluster cluster, final Member member) {
// unreachableMembers() returns scala.collection.immutable.Set, but we are using scala.collection.Set to fix JDT
// see https://bugs.eclipse.org/bugs/show_bug.cgi?id=468276#c32
final Set<Member> members = cluster.readView().unreachableMembers();
return members.contains(member);
}
@Test
public void testInstallSnapshot() throws Exception {
final String testName = "testInstallSnapshot";
final String leaderCarShardName = "member-1-shard-cars-" + testName;
final String followerCarShardName = "member-2-shard-cars-" + testName;
// Setup a saved snapshot on the leader. The follower will startup with no data and the leader should
// install a snapshot to sync the follower.
DataTree tree = new InMemoryDataTreeFactory().create(DataTreeConfiguration.DEFAULT_CONFIGURATION,
SchemaContextHelper.full());
final ContainerNode carsNode = CarsModel.newCarsNode(
CarsModel.newCarsMapNode(CarsModel.newCarEntry("optima", Uint64.valueOf(20000))));
AbstractShardTest.writeToStore(tree, CarsModel.BASE_PATH, carsNode);
final NormalizedNode snapshotRoot = AbstractShardTest.readStore(tree, YangInstanceIdentifier.empty());
final Snapshot initialSnapshot = Snapshot.create(
new ShardSnapshotState(new MetadataShardDataTreeSnapshot(snapshotRoot)),
Collections.emptyList(), 5, 1, 5, 1, 1, null, null);
InMemorySnapshotStore.addSnapshot(leaderCarShardName, initialSnapshot);
InMemorySnapshotStore.addSnapshotSavedLatch(leaderCarShardName);
InMemorySnapshotStore.addSnapshotSavedLatch(followerCarShardName);
initDatastoresWithCars(testName);
assertEquals(Optional.of(carsNode), leaderDistributedDataStore.newReadOnlyTransaction().read(
CarsModel.BASE_PATH).get(5, TimeUnit.SECONDS));
verifySnapshot(InMemorySnapshotStore.waitForSavedSnapshot(leaderCarShardName, Snapshot.class),
initialSnapshot, snapshotRoot);
verifySnapshot(InMemorySnapshotStore.waitForSavedSnapshot(followerCarShardName, Snapshot.class),
initialSnapshot, snapshotRoot);
}
@Test
public void testReadWriteMessageSlicing() throws Exception {
// The slicing is only implemented for tell-based protocol
assumeTrue(ClientBackedDataStore.class.isAssignableFrom(testParameter));
leaderDatastoreContextBuilder.maximumMessageSliceSize(100);
followerDatastoreContextBuilder.maximumMessageSliceSize(100);
initDatastoresWithCars("testLargeReadReplySlicing");
final DOMStoreReadWriteTransaction rwTx = followerDistributedDataStore.newReadWriteTransaction();
final NormalizedNode carsNode = CarsModel.create();
rwTx.write(CarsModel.BASE_PATH, carsNode);
verifyNode(rwTx, CarsModel.BASE_PATH, carsNode);
}
@SuppressWarnings("IllegalCatch")
@Test
public void testRaftCallbackDuringLeadershipDrop() throws Exception {
final String testName = "testRaftCallbackDuringLeadershipDrop";
initDatastores(testName, MODULE_SHARDS_CARS_1_2_3, CARS);
final ExecutorService executor = Executors.newSingleThreadExecutor();
final IntegrationTestKit follower2TestKit = new IntegrationTestKit(follower2System,
DatastoreContext.newBuilderFrom(followerDatastoreContextBuilder.build()).operationTimeoutInMillis(500)
.shardLeaderElectionTimeoutInSeconds(3600),
commitTimeout);
final DOMStoreWriteTransaction initialWriteTx = leaderDistributedDataStore.newWriteOnlyTransaction();
initialWriteTx.write(CarsModel.BASE_PATH, CarsModel.emptyContainer());
leaderTestKit.doCommit(initialWriteTx.ready());
try (AbstractDataStore follower2DistributedDataStore = follower2TestKit.setupAbstractDataStore(
testParameter, testName, MODULE_SHARDS_CARS_1_2_3, false)) {
final ActorRef member3Cars = ((LocalShardStore) follower2DistributedDataStore).getLocalShards()
.getLocalShards().get("cars").getActor();
final ActorRef member2Cars = ((LocalShardStore)followerDistributedDataStore).getLocalShards()
.getLocalShards().get("cars").getActor();
member2Cars.tell(new StartDropMessages(AppendEntries.class), null);
member3Cars.tell(new StartDropMessages(AppendEntries.class), null);
final DOMStoreWriteTransaction newTx = leaderDistributedDataStore.newWriteOnlyTransaction();
newTx.write(CarsModel.CAR_LIST_PATH, CarsModel.newCarMapNode());
final AtomicBoolean submitDone = new AtomicBoolean(false);
executor.submit(() -> {
try {
leaderTestKit.doCommit(newTx.ready());
submitDone.set(true);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
final ActorRef leaderCars = ((LocalShardStore) leaderDistributedDataStore).getLocalShards()
.getLocalShards().get("cars").getActor();
await().atMost(10, TimeUnit.SECONDS)
.until(() -> ((OnDemandRaftState) leaderDistributedDataStore.getActorUtils()
.executeOperation(leaderCars, GetOnDemandRaftState.INSTANCE)).getLastIndex() >= 1);
final OnDemandRaftState raftState = (OnDemandRaftState)leaderDistributedDataStore.getActorUtils()
.executeOperation(leaderCars, GetOnDemandRaftState.INSTANCE);
// Simulate a follower not receiving heartbeats but still being able to send messages ie RequestVote with
// new term(switching to candidate after election timeout)
leaderCars.tell(new RequestVote(raftState.getCurrentTerm() + 1,
"member-3-shard-cars-testRaftCallbackDuringLeadershipDrop", -1,
-1), member3Cars);
member2Cars.tell(new StopDropMessages(AppendEntries.class), null);
member3Cars.tell(new StopDropMessages(AppendEntries.class), null);
await("Is tx stuck in COMMIT_PENDING")
.atMost(10, TimeUnit.SECONDS).untilAtomic(submitDone, equalTo(true));
}
executor.shutdownNow();
}
@Test
public void testSnapshotOnRootOverwrite() throws Exception {
// FIXME: ClientBackedDatastore does not have stable indexes/term, the snapshot index seems to fluctuate
assumeTrue(DistributedDataStore.class.isAssignableFrom(testParameter));
final String testName = "testSnapshotOnRootOverwrite";
final String[] shards = {"cars", "default"};
initDatastores(testName, "module-shards-default-cars-member1-and-2.conf", shards,
leaderDatastoreContextBuilder.snapshotOnRootOverwrite(true),
followerDatastoreContextBuilder.snapshotOnRootOverwrite(true));
leaderTestKit.waitForMembersUp("member-2");
final ContainerNode rootNode = ImmutableContainerNodeBuilder.create()
.withNodeIdentifier(YangInstanceIdentifier.NodeIdentifier.create(SchemaContext.NAME))
.withChild(CarsModel.create())
.build();
leaderTestKit.testWriteTransaction(leaderDistributedDataStore, YangInstanceIdentifier.empty(), rootNode);
IntegrationTestKit.verifyShardState(leaderDistributedDataStore, "cars",
state -> assertEquals(1, state.getSnapshotIndex()));
IntegrationTestKit.verifyShardState(followerDistributedDataStore, "cars",
state -> assertEquals(1, state.getSnapshotIndex()));
verifySnapshot("member-1-shard-cars-testSnapshotOnRootOverwrite", 1);
verifySnapshot("member-2-shard-cars-testSnapshotOnRootOverwrite", 1);
for (int i = 0; i < 10; i++) {
leaderTestKit.testWriteTransaction(leaderDistributedDataStore, CarsModel.newCarPath("car " + i),
CarsModel.newCarEntry("car " + i, Uint64.ONE));
}
// fake snapshot causes the snapshotIndex to move
IntegrationTestKit.verifyShardState(leaderDistributedDataStore, "cars",
state -> assertEquals(10, state.getSnapshotIndex()));
IntegrationTestKit.verifyShardState(followerDistributedDataStore, "cars",
state -> assertEquals(10, state.getSnapshotIndex()));
// however the real snapshot still has not changed and was taken at index 1
verifySnapshot("member-1-shard-cars-testSnapshotOnRootOverwrite", 1);
verifySnapshot("member-2-shard-cars-testSnapshotOnRootOverwrite", 1);
// root overwrite so expect a snapshot
leaderTestKit.testWriteTransaction(leaderDistributedDataStore, YangInstanceIdentifier.empty(), rootNode);
// this was a real snapshot so everything should be in it(1(DisableTrackingPayload) + 1 + 10 + 1)
IntegrationTestKit.verifyShardState(leaderDistributedDataStore, "cars",
state -> assertEquals(12, state.getSnapshotIndex()));
IntegrationTestKit.verifyShardState(followerDistributedDataStore, "cars",
state -> assertEquals(12, state.getSnapshotIndex()));
verifySnapshot("member-1-shard-cars-testSnapshotOnRootOverwrite", 12);
verifySnapshot("member-2-shard-cars-testSnapshotOnRootOverwrite", 12);
}
private void verifySnapshot(final String persistenceId, final long lastAppliedIndex) {
await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> {
List<Snapshot> snap = InMemorySnapshotStore.getSnapshots(persistenceId, Snapshot.class);
assertEquals(1, snap.size());
assertEquals(lastAppliedIndex, snap.get(0).getLastAppliedIndex());
}
);
}
private static void verifySnapshot(final Snapshot actual, final Snapshot expected,
final NormalizedNode expRoot) {
assertEquals("Snapshot getLastAppliedTerm", expected.getLastAppliedTerm(), actual.getLastAppliedTerm());
assertEquals("Snapshot getLastAppliedIndex", expected.getLastAppliedIndex(), actual.getLastAppliedIndex());
assertEquals("Snapshot getLastTerm", expected.getLastTerm(), actual.getLastTerm());
assertEquals("Snapshot getLastIndex", expected.getLastIndex(), actual.getLastIndex());
assertEquals("Snapshot state type", ShardSnapshotState.class, actual.getState().getClass());
MetadataShardDataTreeSnapshot shardSnapshot =
(MetadataShardDataTreeSnapshot) ((ShardSnapshotState)actual.getState()).getSnapshot();
assertEquals("Snapshot root node", expRoot, shardSnapshot.getRootNode().get());
}
private static void sendDatastoreContextUpdate(final AbstractDataStore dataStore, final Builder builder) {
final Builder newBuilder = DatastoreContext.newBuilderFrom(builder.build());
final DatastoreContextFactory mockContextFactory = mock(DatastoreContextFactory.class);
final Answer<DatastoreContext> answer = invocation -> newBuilder.build();
doAnswer(answer).when(mockContextFactory).getBaseDatastoreContext();
doAnswer(answer).when(mockContextFactory).getShardDatastoreContext(anyString());
dataStore.onDatastoreContextUpdated(mockContextFactory);
}
}
|
package warGame.JUnitTest;
import junit.framework.*;
public class TestRunner{
public static Test testSuite(){
TestSuite ModelSuite = new TestSuite();
ModelSuite.addTestSuite(MapModelTest1.class);
ModelSuite.addTestSuite(MapModelTest2.class);
ModelSuite.addTestSuite(MapModelTest3.class);
ModelSuite.addTestSuite(MapModelTest4.class);
ModelSuite.addTestSuite(CharacterModelTest1.class);
ModelSuite.addTestSuite(CharacterModelTest2.class);
ModelSuite.addTestSuite(CharacterModelTest3.class);
ModelSuite.addTestSuite(CharacterModelTest4.class);
ModelSuite.addTestSuite(CharacterModelTest5.class);
ModelSuite.addTestSuite(CharacterModelTest6.class);
ModelSuite.addTestSuite(CharacterModelTest7.class);
ModelSuite.addTestSuite(CharacterModelTest8.class);
ModelSuite.addTestSuite(ItemModelTest1.class);
ModelSuite.addTestSuite(ItemModelTest2.class);
ModelSuite.addTestSuite(ItemModelTest3.class);
ModelSuite.addTestSuite(StartModelTest1.class);
ModelSuite.addTestSuite(StartModelTest2.class);
ModelSuite.addTestSuite(CampaignModelTest1.class);
return ModelSuite;
}
public static void main(String args[]){
junit.textui.TestRunner.run(testSuite());
}
}
|
package org.xwiki.wiki.internal.descriptor;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.test.junit5.mockito.InjectMockComponents;
import org.xwiki.test.junit5.mockito.MockComponent;
import org.xwiki.wiki.descriptor.WikiDescriptor;
import org.xwiki.wiki.descriptor.WikiDescriptorManager;
import org.xwiki.wiki.internal.descriptor.builder.WikiDescriptorBuilder;
import org.xwiki.wiki.internal.descriptor.document.WikiDescriptorDocumentHelper;
import org.xwiki.wiki.internal.manager.WikiDescriptorCache;
import org.xwiki.wiki.manager.WikiManagerException;
import org.xwiki.wiki.properties.WikiPropertyGroup;
import com.xpn.xwiki.doc.XWikiDocument;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.test.MockitoOldcore;
import com.xpn.xwiki.test.junit5.mockito.InjectMockitoOldcore;
import com.xpn.xwiki.test.junit5.mockito.OldcoreTest;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Unit tests for {@link org.xwiki.wiki.internal.descriptor.DefaultWikiDescriptorManager}.
*
* @version $Id$
* @since 6.0M1
*/
@OldcoreTest
class DefaultWikiDescriptorManagerTest
{
@InjectMockComponents
private DefaultWikiDescriptorManager descriptorManager;
@MockComponent
private WikiDescriptorCache cache;
@MockComponent
private WikiDescriptorDocumentHelper descriptorDocumentHelper;
@MockComponent
private WikiDescriptorBuilder wikiDescriptorBuilder;
@InjectMockitoOldcore
private MockitoOldcore oldcore;
@BeforeEach
void beforeEach()
{
// Cache is supposed to return null and not empty list by default
when(this.cache.getWikiIds()).thenReturn(null);
}
@Test
void getByIdWhenNotInCacheButExists() throws Exception
{
// Not in cache
when(this.cache.getFromId("wikiid")).thenReturn(null);
// But exists
XWikiDocument document = mock(XWikiDocument.class);
when(this.descriptorDocumentHelper.getDocumentFromWikiId("wikiid")).thenReturn(document);
when(document.isNew()).thenReturn(false);
// Get all XWiki.XWikiServerClass XObjects to pass to the Wiki Descriptor Builder
List<BaseObject> baseObjects = Arrays.asList(mock(BaseObject.class));
when(document.getXObjects(any(EntityReference.class))).thenReturn(baseObjects);
// Get a Wiki from the Wiki Descriptor Builder
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("wikiid", "wikialias");
when(this.wikiDescriptorBuilder.buildDescriptorObject(anyList(), any(XWikiDocument.class)))
.thenReturn(descriptor);
assertEquals(descriptor, this.descriptorManager.getById("wikiid"));
// Verify that calling getById() also sets the descriptor in the cache.
verify(this.cache).add(descriptor);
}
@Test
void getByIdWhenInCache() throws Exception
{
// Wiki id is in cache...
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("wikiid", "wikialias");
when(this.cache.getFromId("wikiid")).thenReturn(descriptor);
assertEquals(descriptor, this.descriptorManager.getById("wikiid"));
}
@Test
void getByIdWhenNotInCacheAndItDoesntExist() throws Exception
{
// Get the XWikiDocument for the Document Reference but mark it as new (meaning that it doesn't exist)
XWikiDocument document = mock(XWikiDocument.class);
when(this.descriptorDocumentHelper.getDocumentFromWikiId("wikiid")).thenReturn(document);
when(document.isNew()).thenReturn(true);
assertNull(this.descriptorManager.getById("wikiid"));
verify(this.cache).addFromId(eq("wikiid"), same(DefaultWikiDescriptor.VOID));
}
@Test
void getByAliasWhenNotInCacheButExists() throws Exception
{
// Not in cache
when(this.cache.getFromId("wikiid")).thenReturn(null);
// But exists
XWikiDocument document = mock(XWikiDocument.class);
when(this.descriptorDocumentHelper.findXWikiServerClassDocument("wikialias")).thenReturn(document);
when(document.isNew()).thenReturn(false);
// Get all XWiki.XWikiServerClass XObjects to pass to the Wiki Descriptor Builder
List<BaseObject> baseObjects = Arrays.asList(mock(BaseObject.class));
when(document.getXObjects(any(EntityReference.class))).thenReturn(baseObjects);
// Get a Wiki from the Wiki Descriptor Builder
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("wikiid", "wikialias");
when(this.wikiDescriptorBuilder.buildDescriptorObject(anyList(), any(XWikiDocument.class)))
.thenReturn(descriptor);
assertEquals(descriptor, this.descriptorManager.getByAlias("wikialias"));
// Verify that calling getByAlias() also sets the descriptor in the cache.
verify(this.cache).add(descriptor);
}
@Test
void getByAliasWhenInCache() throws Exception
{
// Wiki alias is in cache...
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("wikiid", "wikialias");
when(this.cache.getFromAlias("wikialias")).thenReturn(descriptor);
assertEquals(descriptor, this.descriptorManager.getByAlias("wikialias"));
}
@Test
void getByAliasWhenNotInCacheAndItDoesntExist() throws Exception
{
assertNull(this.descriptorManager.getByAlias("wikialias"));
verify(this.cache).addFromAlias(eq("wikialias"), same(DefaultWikiDescriptor.VOID));
}
@Test
void getAll() throws Exception
{
// Get the XWikiDocuments for the Document References
XWikiDocument document1 = mock(XWikiDocument.class);
XWikiDocument document2 = mock(XWikiDocument.class);
XWikiDocument maindocument = mock(XWikiDocument.class);
DefaultWikiDescriptor descriptor3 = new DefaultWikiDescriptor("wikiid3", "wikialias3");
// Get documents
when(this.descriptorDocumentHelper.getAllXWikiServerClassDocumentNames()).thenReturn(
Arrays.asList("XWiki.XWikiServerWikiid1", "XWiki.XWikiServerWikiid2", "XWiki.XWikiServerWikiid3"));
when(this.descriptorDocumentHelper.getWikiIdFromDocumentFullname("XWiki.XWikiServerWikiid1"))
.thenReturn("wikiid1");
when(this.descriptorDocumentHelper.getWikiIdFromDocumentFullname("XWiki.XWikiServerWikiid2"))
.thenReturn("wikiid2");
when(this.descriptorDocumentHelper.getWikiIdFromDocumentFullname("XWiki.XWikiServerWikiid3"))
.thenReturn("wikiid3");
when(this.cache.getFromId("wikiid3")).thenReturn(descriptor3);
when(this.descriptorDocumentHelper.getDocumentFromWikiId("wikiid1")).thenReturn(document1);
when(this.descriptorDocumentHelper.getDocumentFromWikiId("wikiid2")).thenReturn(document2);
when(this.descriptorDocumentHelper.getDocumentFromWikiId("xwiki")).thenReturn(maindocument);
when(maindocument.isNew()).thenReturn(true);
// Get all XWiki.XWikiServerClass XObjects to pass to the Wiki Descriptor Builder
List<BaseObject> baseObjects = Arrays.asList(mock(BaseObject.class));
when(document1.getXObjects(any(EntityReference.class))).thenReturn(baseObjects);
when(document2.getXObjects(any(EntityReference.class))).thenReturn(baseObjects);
// Get a Wiki from the Wiki Descriptor Builder
DefaultWikiDescriptor descriptor1 = new DefaultWikiDescriptor("wikiid1", "wikialias1");
DefaultWikiDescriptor descriptor2 = new DefaultWikiDescriptor("wikiid2", "wikialias2");
when(this.wikiDescriptorBuilder.buildDescriptorObject(anyList(), any(XWikiDocument.class)))
.thenReturn(descriptor1, descriptor2);
Collection<WikiDescriptor> descriptors = this.descriptorManager.getAll();
assertEquals(4, descriptors.size());
// Verify that XWiki.XWikiServerWikiid3 has not be loaded
verify(this.descriptorDocumentHelper, never()).getDocumentFromWikiId("wikiid3");
// Verify all descriptors were put in cache except those which was already there
verify(this.cache).add(descriptor1);
verify(this.cache).add(descriptor2);
verify(this.cache, never()).add(descriptor3);
}
@Test
void exists() throws Exception
{
when(this.cache.getWikiIds()).thenReturn(Arrays.asList("wikiid1"));
// When the wiki exists
assertTrue(this.descriptorManager.exists("wikiid1"));
// When the wiki does not exists
assertFalse(this.descriptorManager.exists("wikiid2"));
}
@Test
void getMainWikiId()
{
assertEquals("xwiki", this.descriptorManager.getMainWikiId());
}
@Test
void isMainWiki()
{
assertTrue(this.descriptorManager.isMainWiki(null));
assertTrue(this.descriptorManager.isMainWiki(""));
assertTrue(this.descriptorManager.isMainWiki("xwiki"));
assertFalse(this.descriptorManager.isMainWiki("notmainwiki"));
}
@Test
void getMainWikiDescriptor() throws Exception
{
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("xwiki", "xwiki");
when(this.cache.getFromId("xwiki")).thenReturn(descriptor);
assertEquals(descriptor, this.descriptorManager.getMainWikiDescriptor());
}
@Test
void cacheProtection() throws WikiManagerException
{
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("xwiki", "xwiki");
descriptor.setPrettyName("pretty name");
WikiPropertyGroup propertyGroup = new WikiPropertyGroup("group");
propertyGroup.set("property", "value");
descriptor.addPropertyGroup(propertyGroup);
when(this.cache.getFromId("xwiki")).thenReturn(descriptor);
when(this.cache.getFromAlias("xwiki")).thenReturn(descriptor);
WikiDescriptorManager wikiDescriptorManager = this.descriptorManager;
// Modify the descriptor without saving it
wikiDescriptorManager.getById("xwiki").setPrettyName("changed pretty name");
assertEquals("pretty name", wikiDescriptorManager.getById("xwiki").getPrettyName());
wikiDescriptorManager.getById("xwiki").getPropertyGroup("group").set("property", "modified value");
assertEquals("value", wikiDescriptorManager.getById("xwiki").getPropertyGroup("group").get("property"));
// Modify the descriptor without saving it
wikiDescriptorManager.getByAlias("xwiki").setPrettyName("changed pretty name");
assertEquals("pretty name", wikiDescriptorManager.getByAlias("xwiki").getPrettyName());
wikiDescriptorManager.getByAlias("xwiki").getPropertyGroup("group").set("property", "modified value");
assertEquals("value", wikiDescriptorManager.getByAlias("xwiki").getPropertyGroup("group").get("property"));
// Modify the descriptor without saving it
wikiDescriptorManager.getMainWikiDescriptor().setPrettyName("changed pretty name");
assertEquals("pretty name", wikiDescriptorManager.getMainWikiDescriptor().getPrettyName());
wikiDescriptorManager.getMainWikiDescriptor().getPropertyGroup("group").set("property", "modified value");
assertEquals("value", wikiDescriptorManager.getMainWikiDescriptor().getPropertyGroup("group").get("property"));
}
@Test
void getCurrentWikiId()
{
this.oldcore.getXWikiContext().setWikiId("wiki");
assertEquals("wiki", this.descriptorManager.getCurrentWikiId());
}
@Test
void getCurrentWikiDescriptor() throws WikiManagerException
{
this.oldcore.getXWikiContext().setWikiId("wiki");
DefaultWikiDescriptor descriptor = new DefaultWikiDescriptor("wiki", "wikialias");
when(this.cache.getFromId("wiki")).thenReturn(descriptor);
assertEquals(descriptor, this.descriptorManager.getCurrentWikiDescriptor());
}
}
|
package org.apache.isis.viewer.restfulobjects.rendering.eventserializer;
import java.io.IOException;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.apache.isis.applib.annotation.Hidden;
import org.apache.isis.applib.annotation.Programmatic;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.applib.services.publish.EventMetadata;
import org.apache.isis.applib.services.publish.EventPayload;
import org.apache.isis.applib.services.publish.EventSerializer;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.viewer.restfulobjects.applib.JsonRepresentation;
import org.apache.isis.viewer.restfulobjects.applib.util.JsonMapper;
import org.apache.isis.viewer.restfulobjects.rendering.RendererContext;
import org.apache.isis.viewer.restfulobjects.rendering.domainobjects.DomainObjectReprRenderer;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
@Hidden
public class RestfulObjectsSpecEventSerializer implements EventSerializer {
private final static JsonMapper jsonMapper = JsonMapper.instance();
private final static DomainObjectReprRenderer.Factory objectRendererFactory = new DomainObjectReprRenderer.Factory();
private final static String BASE_URL_KEY = RestfulObjectsSpecEventSerializer.class.getName() + ".baseUrl";
private static final String BASE_URL_DEFAULT = "http://localhost:8080/restful/";
private String baseUrl;
@PostConstruct
public void init(Map<String,String> props) {
final String baseUrlFromConfig = props.get(BASE_URL_KEY);
baseUrl = baseUrlFromConfig != null? baseUrlFromConfig: BASE_URL_DEFAULT;
}
@PreDestroy
public void shutdown() {
}
@Programmatic
@Override
public Object serialize(EventMetadata metadata, EventPayload payload) {
final RendererContext rendererContext = new EventSerializerRendererContext(baseUrl, Where.OBJECT_FORMS);
final JsonRepresentation payloadRepr = asPayloadRepr(rendererContext, payload);
final JsonRepresentation eventRepr = asEventRepr(metadata, payloadRepr);
return jsonFor(eventRepr);
}
JsonRepresentation asEventRepr(EventMetadata metadata, final JsonRepresentation payloadRepr) {
final JsonRepresentation eventRepr = JsonRepresentation.newMap();
final JsonRepresentation metadataRepr = JsonRepresentation.newMap();
eventRepr.mapPut("metadata", metadataRepr);
metadataRepr.mapPut("guid", metadata.getGuid());
metadataRepr.mapPut("user", metadata.getUser());
metadataRepr.mapPut("timestamp", metadata.getTimestamp());
eventRepr.mapPut("payload", payloadRepr);
return eventRepr;
}
JsonRepresentation asPayloadRepr(final RendererContext rendererContext, EventPayload payload) {
final DomainObjectReprRenderer renderer = (DomainObjectReprRenderer) objectRendererFactory.newRenderer(rendererContext, null, JsonRepresentation.newMap());
final ObjectAdapter objectAdapter = rendererContext.getAdapterManager().adapterFor(payload);
renderer.with(objectAdapter).asEventSerialization();
return renderer.render();
}
String jsonFor(final Object object) {
try {
return getJsonMapper().write(object);
} catch (final JsonGenerationException e) {
throw new RuntimeException(e);
} catch (final JsonMappingException e) {
throw new RuntimeException(e);
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
JsonMapper getJsonMapper() {
return jsonMapper;
}
}
|
package org.opencb.opencga.storage.hadoop.variant.index.sample;
import com.google.common.collect.BiMap;
import htsjdk.variant.vcf.VCFConstants;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.opencb.biodata.models.core.Region;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.commons.datastore.core.ObjectMap;
import org.opencb.opencga.storage.core.exceptions.StorageEngineException;
import org.opencb.opencga.storage.core.metadata.VariantStorageMetadataManager;
import org.opencb.opencga.storage.core.metadata.models.SampleMetadata;
import org.opencb.opencga.storage.core.metadata.models.StudyMetadata;
import org.opencb.opencga.storage.core.variant.VariantStorageEngine;
import org.opencb.opencga.storage.core.variant.VariantStorageOptions;
import org.opencb.opencga.storage.core.variant.adaptors.GenotypeClass;
import org.opencb.opencga.storage.core.variant.adaptors.VariantQueryException;
import org.opencb.opencga.storage.core.variant.adaptors.VariantQueryParam;
import org.opencb.opencga.storage.core.variant.query.VariantQueryUtils;
import org.opencb.opencga.storage.hadoop.variant.AbstractVariantsTableDriver;
import org.opencb.opencga.storage.hadoop.variant.GenomeHelper;
import org.opencb.opencga.storage.hadoop.variant.adaptors.VariantHBaseQueryParser;
import org.opencb.opencga.storage.hadoop.variant.adaptors.phoenix.VariantPhoenixSchema;
import org.opencb.opencga.storage.hadoop.variant.converters.HBaseToVariantConverter;
import org.opencb.opencga.storage.hadoop.variant.converters.VariantRow;
import org.opencb.opencga.storage.hadoop.variant.converters.study.HBaseToStudyEntryConverter;
import org.opencb.opencga.storage.hadoop.variant.index.annotation.mr.VariantTableSampleIndexOrderMapper;
import org.opencb.opencga.storage.hadoop.variant.mr.VariantAlignedInputFormat;
import org.opencb.opencga.storage.hadoop.variant.mr.VariantMapReduceUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
import static org.apache.hadoop.hbase.filter.CompareFilter.CompareOp.EQUAL;
import static org.apache.hadoop.hbase.filter.CompareFilter.CompareOp.NOT_EQUAL;
import static org.apache.phoenix.query.QueryConstants.SEPARATOR_BYTE;
public class SampleIndexDriver extends AbstractVariantsTableDriver {
private static final Logger LOGGER = LoggerFactory.getLogger(SampleIndexDriver.class);
public static final String SAMPLES = "samples";
public static final String SAMPLE_IDS = "sampleIds";
public static final String OUTPUT = "output";
public static final String SECONDARY_ONLY = "secondary-only";
// public static final String MAIN_ONLY = "main-only";
public static final String PARTIAL_SCAN_SIZE = "partial-scan-size";
public static final String MAX_COLUMNS_PER_SCAN = "max-columns-per-scan";
private static final String SAMPLE_ID_TO_FILE_ID_MAP = "SampleIndexDriver.sampleIdToFileIdMap";
private static final String MULTI_FILE_SAMPLES = "SampleIndexDriver.multiFileSamples";
private static final String FIXED_ATTRIBUTES = "SampleIndexDriver.fixedAttributes";
private static final String PARTIAL_SCAN = "SampleIndexDriver.partial_scan";
private int study;
private String outputTable;
private boolean allSamples;
private boolean secondaryOnly;
private boolean mainOnly;
private boolean hasGenotype;
private TreeSet<Integer> sampleIds;
private Map<Integer, List<Integer>> sampleIdToFileIdMap;
private Set<Integer> multiFileSamples;
private String region;
private double partialScanSize;
private List<String> fixedAttributes;
private boolean multiScan = false;
private int maxColumns;
@Override
protected String getJobOperationName() {
return "sample_index";
}
@Override
protected Class<?> getMapperClass() {
return SampleIndexerMapper.class;
}
@Override
protected Map<String, String> getParams() {
Map<String, String> params = new LinkedHashMap<>();
params.put("--" + SAMPLES, "<samples>");
params.put("--" + SAMPLE_IDS, "<sample-ids>");
params.put("--" + VariantStorageOptions.STUDY.key(), "<study>");
params.put("--" + OUTPUT, "<output-table>");
params.put("--" + SECONDARY_ONLY, "<true|false>");
// params.put("--" + MAIN_ONLY, "<main-alternate-only>");
params.put("--" + VariantQueryParam.REGION.key(), "<region>");
params.put("--" + PARTIAL_SCAN_SIZE, "<samples-per-scan>");
params.put("--" + MAX_COLUMNS_PER_SCAN, "<max-columns-per-scan>");
return params;
}
@Override
protected void parseAndValidateParameters() throws IOException {
super.parseAndValidateParameters();
study = getStudyId();
if (study < 0) {
BiMap<String, Integer> map = getMetadataManager().getStudies();
if (map.size() == 1) {
study = map.values().iterator().next();
setStudyId(study);
} else {
throw new IllegalArgumentException("Select one study from " + map.keySet());
}
}
outputTable = getParam(OUTPUT);
if (outputTable == null || outputTable.isEmpty()) {
outputTable = getTableNameGenerator().getSampleIndexTableName(study);
}
secondaryOnly = Boolean.valueOf(getParam(SECONDARY_ONLY, "false"));
// mainOnly = Boolean.valueOf(getParam(MAIN_ONLY, "false"));
if (secondaryOnly && mainOnly) {
throw new IllegalArgumentException("Incompatible options " + secondaryOnly + " and " + mainOnly);
}
region = getParam(VariantQueryParam.REGION.key());
// Max number of samples to be processed in each Scan.
partialScanSize = Integer.valueOf(getParam(PARTIAL_SCAN_SIZE, "1000"));
maxColumns = Integer.valueOf(getParam(MAX_COLUMNS_PER_SCAN, "4000"));
String samplesParam = getParam(SAMPLES);
String sampleIdsStr = getParam(SAMPLE_IDS);
sampleIds = new TreeSet<>(Integer::compareTo);
VariantStorageMetadataManager metadataManager = getMetadataManager();
if (StringUtils.isNotEmpty(samplesParam) && StringUtils.isNotEmpty(sampleIdsStr)) {
throw new IllegalArgumentException("Incompatible params " + SAMPLES + " and " + SAMPLE_IDS);
}
if (VariantQueryUtils.ALL.equals(samplesParam)) {
allSamples = true;
sampleIds.addAll(metadataManager.getIndexedSamples(study));
} else if (StringUtils.isNotEmpty(samplesParam)) {
allSamples = false;
for (String sample : samplesParam.split(",")) {
Integer sampleId = metadataManager.getSampleId(getStudyId(), sample);
if (sampleId == null) {
throw VariantQueryException.sampleNotFound(sample, study);
}
sampleIds.add(sampleId);
}
} else if (StringUtils.isNotEmpty(sampleIdsStr)) {
allSamples = false;
for (String sample : sampleIdsStr.split(",")) {
sampleIds.add(Integer.valueOf(sample));
}
}
if (sampleIds.isEmpty()) {
throw new IllegalArgumentException("empty samples!");
}
sampleIdToFileIdMap = new HashMap<>();
multiFileSamples = new HashSet<>();
for (Integer sampleId : sampleIds) {
SampleMetadata sampleMetadata = metadataManager.getSampleMetadata(study, sampleId);
sampleIdToFileIdMap.put(sampleMetadata.getId(), sampleMetadata.getFiles());
if (sampleMetadata.isMultiFileSample()) {
multiFileSamples.add(sampleId);
}
}
StudyMetadata studyMetadata = metadataManager.getStudyMetadata(study);
fixedAttributes = HBaseToVariantConverter.getFixedAttributes(studyMetadata);
List<String> fixedFormat = HBaseToVariantConverter.getFixedFormat(studyMetadata);
hasGenotype = fixedFormat.contains(VCFConstants.GENOTYPE_KEY);
if (hasGenotype) {
LOGGER.info("Study with genotypes : " + fixedFormat);
} else {
LOGGER.info("Study without genotypes : " + fixedFormat);
}
}
@Override
protected Job setupJob(Job job, String archiveTable, String table) throws IOException {
FilterList filter = new FilterList(FilterList.Operator.MUST_PASS_ALL,
new QualifierFilter(EQUAL, new BinaryPrefixComparator(Bytes.toBytes(VariantPhoenixSchema.buildStudyColumnsPrefix(study)))),
new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'0', '|', '0', SEPARATOR_BYTE})),
new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'0', '/', '0', SEPARATOR_BYTE})),
new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'.', '/', '.', SEPARATOR_BYTE})),
new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'.', '|', '.', SEPARATOR_BYTE})),
new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'.', SEPARATOR_BYTE}))
);
if (secondaryOnly) {
filter.addFilter(new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'0', '/', '1', SEPARATOR_BYTE})));
filter.addFilter(new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'1', '/', '1', SEPARATOR_BYTE})));
filter.addFilter(new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'1', '/', '2', SEPARATOR_BYTE})));
filter.addFilter(new ValueFilter(NOT_EQUAL, new BinaryPrefixComparator(new byte[]{'1', SEPARATOR_BYTE})));
}
List<Scan> scans;
if (multiScan) {
// FIXME: This will fail for large number of samples. Has to be fixed to use filters instead of explicit columns
double numScans = Math.ceil(sampleIds.size() / partialScanSize);
int samplesPerScan = (int) Math.ceil(sampleIds.size() / numScans);
scans = new ArrayList<>((int) numScans);
for (List<Integer> samplesSubSet : sampleIds.stream().collect(Collectors.groupingBy(s -> s / samplesPerScan)).values()) {
Scan scan = new Scan();
if (StringUtils.isNotEmpty(region)) {
VariantHBaseQueryParser.addRegionFilter(scan, Region.parseRegion(region));
}
scan.setFilter(filter);
scans.add(scan);
for (int sample : samplesSubSet) {
byte[] sampleColumn = VariantPhoenixSchema.buildSampleColumnKey(study, sample);
scan.addColumn(GenomeHelper.COLUMN_FAMILY_BYTES, sampleColumn);
for (Integer fileId : sampleIdToFileIdMap.get(sample)) {
if (multiFileSamples.contains(sample)) {
byte[] sampleFileColumn = VariantPhoenixSchema.buildSampleColumnKey(study, sample, fileId);
scan.addColumn(GenomeHelper.COLUMN_FAMILY_BYTES, sampleFileColumn);
}
byte[] fileColumn = VariantPhoenixSchema.buildFileColumnKey(study, fileId);
scan.addColumn(GenomeHelper.COLUMN_FAMILY_BYTES, fileColumn);
}
}
}
// TODO: PartialResults may be an interesting feature, but is not available in v1.1.2. See [HBASE-14696] for more information
// scan.setAllowPartialResults(true);
if (scans.size() != numScans) {
throw new IllegalArgumentException("Wrong number of scans. Expected " + numScans + " got " + scans.size());
}
} else {
Scan scan = new Scan();
if (StringUtils.isNotEmpty(region)) {
VariantHBaseQueryParser.addRegionFilter(scan, Region.parseRegion(region));
}
scan.setFilter(filter);
int approxExpectedNumColumns =
sampleIds.size()
+ sampleIdToFileIdMap.values().stream().flatMap(Collection::stream).collect(Collectors.toSet()).size();
if (approxExpectedNumColumns < maxColumns) {
for (Integer sample : sampleIds) {
byte[] sampleColumn = VariantPhoenixSchema.buildSampleColumnKey(study, sample);
scan.addColumn(GenomeHelper.COLUMN_FAMILY_BYTES, sampleColumn);
for (Integer fileId : sampleIdToFileIdMap.get(sample)) {
if (multiFileSamples.contains(sample)) {
byte[] sampleFileColumn = VariantPhoenixSchema.buildSampleColumnKey(study, sample, fileId);
scan.addColumn(GenomeHelper.COLUMN_FAMILY_BYTES, sampleFileColumn);
}
byte[] fileColumn = VariantPhoenixSchema.buildFileColumnKey(study, fileId);
scan.addColumn(GenomeHelper.COLUMN_FAMILY_BYTES, fileColumn);
}
}
} else {
job.getConfiguration().setBoolean(PARTIAL_SCAN, true);
}
scans = Collections.singletonList(scan);
}
VariantMapReduceUtil.configureMapReduceScans(scans, getConf());
for (int i = 0; i < scans.size(); i++) {
Scan s = scans.get(i);
LOGGER.info("scan[" + i + "]= " + s.toJSON(20));
}
try {
VariantMapReduceUtil.initTableMapperJob(job, table, scans, SampleIndexerMapper.class);
Class<? extends InputFormat<?, ?>> delegatedInputFormatClass = job.getInputFormatClass();
job.setInputFormatClass(VariantAlignedInputFormat.class);
VariantAlignedInputFormat.setDelegatedInputFormat(job, delegatedInputFormatClass);
VariantAlignedInputFormat.setBatchSize(job, SampleIndexSchema.BATCH_SIZE);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
VariantMapReduceUtil.setOutputHBaseTable(job, outputTable);
VariantMapReduceUtil.setNoneReduce(job);
// job.setSpeculativeExecution(false);
job.getConfiguration().setInt(MRJobConfig.TASK_TIMEOUT, 20 * 60 * 1000);
StringBuilder sb = new StringBuilder();
for (Map.Entry<Integer, List<Integer>> entry : sampleIdToFileIdMap.entrySet()) {
sb.append(entry.getKey()).append(':');
Iterator<Integer> fileIt = entry.getValue().iterator();
sb.append(fileIt.next());
while (fileIt.hasNext()) {
sb.append('_').append(fileIt.next());
}
sb.append(',');
}
job.getConfiguration().setBoolean(SampleIndexerMapper.HAS_GENOTYPE, hasGenotype);
job.getConfiguration().set(SAMPLE_ID_TO_FILE_ID_MAP, sb.toString());
job.getConfiguration().set(MULTI_FILE_SAMPLES, multiFileSamples.stream().map(Object::toString).collect(Collectors.joining(",")));
job.getConfiguration().set(FIXED_ATTRIBUTES, String.join(",", fixedAttributes));
if (allSamples) {
job.getConfiguration().unset(SAMPLES);
} else {
job.getConfiguration().set(SAMPLES, sampleIds.stream().map(Object::toString).collect(Collectors.joining(",")));
}
return job;
}
@Override
protected void preExecution() throws IOException, StorageEngineException {
super.preExecution();
ObjectMap options = new ObjectMap();
options.putAll(getParams());
SampleIndexSchema.createTableIfNeeded(outputTable, getHBaseManager(), options);
}
public static void main(String[] args) throws Exception {
try {
System.exit(new SampleIndexDriver().privateMain(args, null));
} catch (Exception e) {
LOGGER.error("Error executing " + SampleIndexDriver.class, e);
System.exit(1);
}
}
public static class SampleIndexerMapper extends VariantTableSampleIndexOrderMapper<ImmutableBytesWritable, Put> {
private static final String HAS_GENOTYPE = "SampleIndexerMapper.hasGenotype";
public static final int SAMPLES_TO_COUNT = 2;
private Set<Integer> samplesToCount;
private VariantFileIndexConverter fileIndexConverter;
private List<String> fixedAttributes;
private final Map<Integer, SampleMetadata> samples = new HashMap<>();
private final Set<Integer> files = new HashSet<>();
private boolean hasGenotype;
private final Map<Integer, SampleIndexEntryPutBuilder> samplesMap = new HashMap<>();
private boolean partialScan;
@Override
protected void setup(Context context) throws IOException, InterruptedException {
new GenomeHelper(context.getConfiguration());
hasGenotype = context.getConfiguration().getBoolean(HAS_GENOTYPE, true);
fileIndexConverter = new VariantFileIndexConverter();
int[] sampleIds = context.getConfiguration().getInts(SAMPLES);
if (sampleIds == null || sampleIds.length == 0) {
samplesToCount = new HashSet<>(SAMPLES_TO_COUNT);
for (int i = 0; i < SAMPLES_TO_COUNT; i++) {
samplesToCount.add(i + 1);
}
} else {
samplesToCount = new HashSet<>(SAMPLES_TO_COUNT);
for (int i = 0; i < Math.min(sampleIds.length, SAMPLES_TO_COUNT); i++) {
samplesToCount.add(sampleIds[i]);
}
}
String[] strings = context.getConfiguration().getStrings(FIXED_ATTRIBUTES);
if (strings != null) {
fixedAttributes = Arrays.asList(strings);
} else {
fixedAttributes = Collections.emptyList();
}
String s = context.getConfiguration().get(SAMPLE_ID_TO_FILE_ID_MAP);
for (String sampleFiles : s.split(",")) {
if (!sampleFiles.isEmpty()) {
String[] sampleFilesSplit = sampleFiles.split(":");
Integer sampleId = Integer.valueOf(sampleFilesSplit[0]);
String[] files = sampleFilesSplit[1].split("_");
List<Integer> fileIds = new ArrayList<>(files.length);
for (String file : files) {
fileIds.add(Integer.valueOf(file));
}
this.files.addAll(fileIds);
samples.put(sampleId, new SampleMetadata(0, sampleId, null).setFiles(fileIds));
}
}
partialScan = context.getConfiguration().getBoolean(PARTIAL_SCAN, false);
for (int sampleId : context.getConfiguration().getInts(MULTI_FILE_SAMPLES)) {
samples.get(sampleId).setSplitData(VariantStorageEngine.SplitData.MULTI);
}
}
@Override
protected void map(ImmutableBytesWritable key, Result result, Context context) throws IOException, InterruptedException {
VariantRow variantRow = new VariantRow(result);
Variant variant = variantRow.getVariant();
// Get fileIndex for each file
Map<Integer, Short> fileIndexMap = new HashMap<>();
variantRow.forEachFile(fileColumn -> {
if (partialScan && !this.files.contains(fileColumn.getFileId())) {
// Discard extra files
// Only check map with a Partial Scan.
return;
}
Map<String, String> fileAttributes = HBaseToStudyEntryConverter.convertFileAttributes(fileColumn.raw(), fixedAttributes);
short fileIndexValue = fileIndexConverter.createFileIndexValue(variant.getType(), 0, fileAttributes, null);
fileIndexMap.put(fileColumn.getFileId(), fileIndexValue);
});
variantRow.forEachSample(sampleColumn -> {
int sampleId = sampleColumn.getSampleId();
SampleMetadata sampleMetadata = samples.get(sampleId);
if (sampleMetadata == null) {
// Discard extra samples
return;
}
String gt;
boolean validGt;
if (hasGenotype) {
gt = sampleColumn.getGT();
if (gt == null || gt.isEmpty()) {
gt = GenotypeClass.NA_GT_VALUE;
validGt = true;
} else {
validGt = SampleIndexSchema.isAnnotatedGenotype(gt);
}
} else {
gt = GenotypeClass.NA_GT_VALUE;
validGt = true;
}
if (validGt) {
SampleIndexEntryPutBuilder builder = samplesMap.computeIfAbsent(sampleId,
s -> new SampleIndexEntryPutBuilder(s, variant));
List<Integer> files;
int filePosition;
if (sampleMetadata.isMultiFileSample()) {
Integer fileId = sampleColumn.getFileId();
if (fileId == null) {
files = Collections.singletonList(sampleMetadata.getFiles().get(0));
filePosition = 0;
} else {
files = Collections.singletonList(fileId);
filePosition = sampleMetadata.getFiles().indexOf(fileId);
}
} else {
files = sampleMetadata.getFiles();
filePosition = -1;
}
// Add fileIndex value for this genotype
boolean fileFound = false;
for (Integer fileId : files) {
Short fileIndex = fileIndexMap.get(fileId);
if (fileIndex != null) {
fileFound = true;
if (filePosition > 0) {
fileIndex = VariantFileIndexConverter.setFilePosition(fileIndex, filePosition);
}
builder.add(gt, new SampleVariantIndexEntry(variant, fileIndex));
if (samplesToCount.contains(sampleId)) {
switch (gt) {
case "1/1":
case "0/1":
case "1/2":
case "1/3":
case "0|1":
case "1|0":
case "1|1":
context.getCounter(COUNTER_GROUP_NAME, "SAMPLE_" + sampleId + "_" + gt).increment(1);
break;
default:
context.getCounter(COUNTER_GROUP_NAME, "SAMPLE_" + sampleId + "_x/x").increment(1);
break;
}
}
}
}
if (!fileFound) {
throw new IllegalStateException("File " + files + " not found for sample " + sampleId + " in variant " + variant);
}
}
});
}
@Override
public void flush(Context context, String chromosome, int position) throws IOException, InterruptedException {
for (SampleIndexEntryPutBuilder builder : samplesMap.values()) {
Put put = builder.build();
if (put == null || put.isEmpty()) {
context.getCounter(COUNTER_GROUP_NAME, "empty_put").increment(1);
} else {
context.write(new ImmutableBytesWritable(put.getRow()), put);
}
}
samplesMap.clear();
}
}
}
|
package org.xwiki.extension.internal;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Provider;
import javax.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.xwiki.component.annotation.Component;
import org.xwiki.configuration.ConfigurationSource;
import org.xwiki.container.Container;
import org.xwiki.extension.ExtensionManagerConfiguration;
import org.xwiki.extension.repository.ExtensionRepositoryId;
/**
* Default implementation of {@link ExtensionManagerConfiguration}.
*
* @version $Id$
*/
@Component
@Singleton
public class DefaultExtensionManagerConfiguration implements ExtensionManagerConfiguration
{
/**
* Used to parse repositories entries from the configuration.
*/
private static final Pattern REPOSITORYIDPATTERN = Pattern.compile("([^:]+):([^:]+):(.+)");
/**
* The type identifier for a maven repository.
*/
private static final String TYPE_MAVEN = "maven";
/**
* The type identifier for a xwiki repository.
*/
private static final String TYPE_XWIKI = "xwiki";
/**
* The default user agent.
*/
private static final String DEFAULT_USERAGENT = "Extension Manager";
/**
* The logger to log.
*/
@Inject
private Logger logger;
/**
* Used to get work directory.
*/
@Inject
private Container container;
/**
* The configuration.
*/
@Inject
@Named("configurationSource")
private Provider<ConfigurationSource> configuration;
// Cache
/**
* @see DefaultExtensionManagerConfiguration#getLocalRepository()
*/
private File localRepository;
/**
* @return extension manage home folder
*/
public File getHome()
{
return new File(this.container.getApplicationContext().getPermanentDirectory(), "extension/");
}
@Override
public File getLocalRepository()
{
if (this.localRepository == null) {
String localRepositoryPath = this.configuration.get().getProperty("extension.localRepository");
if (localRepositoryPath == null) {
this.localRepository = new File(getHome(), "repository/");
} else {
this.localRepository = new File(localRepositoryPath);
}
}
return this.localRepository;
}
@Override
public List<ExtensionRepositoryId> getRepositories()
{
List<ExtensionRepositoryId> repositories = new ArrayList<ExtensionRepositoryId>();
List<String> repositoryStrings =
this.configuration.get().getProperty("extension.repositories", Collections.<String> emptyList());
if (repositoryStrings.isEmpty()) {
try {
repositories.add(new ExtensionRepositoryId("maven-xwiki", TYPE_MAVEN, new URI(
"http://nexus.xwiki.org/nexus/content/groups/public")));
repositories.add(new ExtensionRepositoryId("extensions.xwiki.org", TYPE_XWIKI, new URI(
"http://extensions.xwiki.org/xwiki/rest/")));
} catch (Exception e) {
// Should never happen
}
} else {
for (String repositoryString : repositoryStrings) {
if (StringUtils.isNotBlank(repositoryString)) {
try {
ExtensionRepositoryId extensionRepositoryId = parseRepository(repositoryString);
repositories.add(extensionRepositoryId);
} catch (Exception e) {
this.logger.warn("Faild to parse repository [" + repositoryString + "] from configuration", e);
}
} else {
this.logger.debug("Empty repository id found in the configuration");
}
}
}
return repositories;
}
/**
* Create a {@link ExtensionRepositoryId} from a string entry.
*
* @param repositoryString the repository configuration entry
* @return the {@link ExtensionRepositoryId}
* @throws URISyntaxException Failed to create an {@link URI} object from the configuration entry
* @throws ExtensionManagerConfigurationException Failed to parse configuration
*/
private ExtensionRepositoryId parseRepository(String repositoryString) throws URISyntaxException,
ExtensionManagerConfigurationException
{
Matcher matcher = REPOSITORYIDPATTERN.matcher(repositoryString);
if (matcher.matches()) {
return new ExtensionRepositoryId(matcher.group(1), matcher.group(2), new URI(matcher.group(3)));
}
throw new ExtensionManagerConfigurationException("Don't match repository configuration [" + repositoryString
+ "]");
}
@Override
public String getUserAgent()
{
// TODO: add version (need a way to get platform version first)
return this.configuration.get().getProperty("extension.userAgent", DEFAULT_USERAGENT);
}
}
|
package org.xwiki.mentions.test.ui;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.xwiki.test.docker.junit5.UITest;
/**
* All tests of the mentions application UI.
*
* @version $Id$
* @since 12.5RC1
*/
@UITest(
properties = {
// Required for filters preferences
"xwikiDbHbmCommonExtraMappings=notification-filter-preferences.hbm.xml"
},
extraJARs = {
// It's currently not possible to install a JAR contributing a Hibernate mapping file as an Extension. Thus
// we need to provide the JAR inside WEB-INF/lib
"org.xwiki.platform:xwiki-platform-notifications-filters-default",
"org.xwiki.platform:xwiki-platform-eventstream-store-hibernate",
// The Solr store is not ready yet to be installed as extension
"org.xwiki.platform:xwiki-platform-eventstream-store-solr"
}, resolveExtraJARs = true)
public class AllIT
{
@Nested
@DisplayName("Mentions UI")
class NestedMentionsIT extends MentionsIT
{
}
}
|
package eu.diachron.qualitymetrics.accessibility.interlinking;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.mapdb.DB;
import org.mapdb.HTreeMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.sparql.core.Quad;
import com.hp.hpl.jena.vocabulary.RDF;
import de.unibonn.iai.eis.diachron.mapdb.MapDbFactory;
import de.unibonn.iai.eis.diachron.semantics.DQM;
import de.unibonn.iai.eis.diachron.technques.probabilistic.ReservoirSampler;
import de.unibonn.iai.eis.diachron.technques.probabilistic.ResourceBaseURIOracle;
import de.unibonn.iai.eis.luzzu.assessment.QualityMetric;
import de.unibonn.iai.eis.luzzu.datatypes.ProblemList;
import de.unibonn.iai.eis.luzzu.exceptions.ProblemListInitialisationException;
import de.unibonn.iai.eis.luzzu.properties.EnvironmentProperties;
import de.unibonn.iai.eis.luzzu.semantics.vocabularies.QPRO;
import eu.diachron.qualitymetrics.accessibility.availability.helper.ModelParser;
import eu.diachron.qualitymetrics.cache.CachedHTTPResource;
import eu.diachron.qualitymetrics.cache.DiachronCacheManager;
import eu.diachron.qualitymetrics.utilities.HTTPRetriever;
public class EstimatedLinkExternalDataProviders implements QualityMetric {
private final Resource METRIC_URI = DQM.LinksToExternalDataProvidersMetric;
final static Logger logger = LoggerFactory.getLogger(EstimatedLinkExternalDataProviders.class);
/**
* Parameter: default size for the reservoir
*/
private static int reservoirsize = 5000;
/**
* MapDB database, used to persist the Map containing the instances found to be declared in the dataset
*/
private DB mapDB = MapDbFactory.createHeapDB();
/**
* A set that holds all unique PLDs together with a sampled set of resources
*/
private HTreeMap<String, ReservoirSampler<String>> mapPLDs = mapDB.createHashMap("estimated-link-external-data-providers").make();
/**
* A set that holds all unique PLDs that return RDF data
*/
private Set<String> setPLDsRDF = mapDB.createHashSet("link-external-data-providers-rdf").make();
/**
* Object used to determine the base URI of the resource based on its contents
*/
private boolean computed = false;
private Queue<String> notFetchedQueue = new ConcurrentLinkedQueue<String>();
private DiachronCacheManager dcmgr = DiachronCacheManager.getInstance();
private HTTPRetriever httpRetriever = new HTTPRetriever();
private List<Quad> _problemList = new ArrayList<Quad>();
/**
* Processes a single quad making part of the dataset. Determines whether the subject and/or object of the quad
* are data-level URIs, if so, extracts their pay-level domain and adds them to the set of TLD URIs.
* @param quad Quad to be processed as part of the computation of the metric
*/
public void compute(Quad quad) {
logger.debug("Computing : {} ", quad.asTriple().toString());
if (!(quad.getPredicate().matches(RDF.type.asNode()))){
String subjectPLD = "";
String objectPLD = "";
if (quad.getSubject().isURI()) subjectPLD = ResourceBaseURIOracle.extractPayLevelDomainURI(quad.getSubject().toString());
if (quad.getObject().isURI()) objectPLD = ResourceBaseURIOracle.extractPayLevelDomainURI(quad.getObject().toString());
if (!(subjectPLD.equals(objectPLD))){
if (quad.getSubject().isURI()) this.addUriToSampler(quad.getSubject().toString());
if (quad.getObject().isURI()) this.addUriToSampler(quad.getObject().toString());
}
}
}
private void addUriToSampler(String uri) {
String pld = ResourceBaseURIOracle.extractPayLevelDomainURI(uri);
if(pld != null) {
if (this.mapPLDs.containsKey(pld)){
ReservoirSampler<String> res = this.mapPLDs.get(pld);
res.add(uri);
mapPLDs.put(pld, res);
} else {
ReservoirSampler<String> res = new ReservoirSampler<String>(reservoirsize, true);
res.add(uri);
mapPLDs.put(pld, res);
}
}
}
/**
* Compute the value of the metric as the ratio between the number of different TLDs found among the data-level
* constants of the resource that are different of the resource's TLD and the total number of
* data-level constant URIs found in the resource.
* @return value of the existence of links to external data providers metric computed on the current resource
*/
public double metricValue() {
if (!computed){
//remove the base uri from the set because that will not be an "external link"
String baseURI = EnvironmentProperties.getInstance().getDatasetURI();
Iterator<String> iterator = mapPLDs.keySet().iterator();
while (iterator.hasNext()) {
String element = iterator.next();
if (element.contains(baseURI)) iterator.remove();
}
this.checkForRDFLinks();
computed = true;
statsLogger.info("EstimatedLinkExternalDataProviders. Dataset: {} - # Top Level Domains : {};",
EnvironmentProperties.getInstance().getDatasetURI(), mapPLDs.size());
}
return mapPLDs.size();
}
private void checkForRDFLinks() {
HTreeMap<String, Integer> mapPLDtotres = mapDB.createHashMap("tempMapToRes").make();
HTreeMap<String, Integer> mapPLDtotresRDF = mapDB.createHashMap("tempMapToResRDF").make();
for(String key : this.mapPLDs.keySet()){
ReservoirSampler<String> resources = this.mapPLDs.get(key);
List<String> uriSet = resources.getItems();
mapPLDtotres.put(key, uriSet.size());
httpRetriever.addListOfResourceToQueue(uriSet);
this.notFetchedQueue.addAll(uriSet);
}
httpRetriever.start();
while (this.notFetchedQueue.size() > 0){
String uri = this.notFetchedQueue.poll();
CachedHTTPResource httpResource = (CachedHTTPResource) dcmgr.getFromCache(DiachronCacheManager.HTTP_RESOURCE_CACHE, uri);
if (httpResource == null || httpResource.getStatusLines() == null) {
this.notFetchedQueue.add(uri);
} else {
if (ModelParser.hasRDFContent(httpResource)){
String pld = ResourceBaseURIOracle.extractPayLevelDomainURI(httpResource.getUri());
if (mapPLDtotresRDF.containsKey(pld)) mapPLDtotresRDF.put(pld, mapPLDtotresRDF.get(pld) + 1);
else mapPLDtotresRDF.put(pld, 1);
logger.debug("URI successfully dereferenced: {}. To go: {}", uri, this.notFetchedQueue.size());
}
}
}
// if more than 50% of the resources in the sampler return RDF, then
// we assume that PLD domain return RDF data thus adding it to setPLDsRDF
for(String plds : mapPLDtotres.keySet()){
Integer iOri = mapPLDtotres.get(plds);
Integer iRes = mapPLDtotresRDF.get(plds);
if(iOri != null && iRes != null) {
double ori = iOri.doubleValue();
double res = iRes.doubleValue();
double perc = ((res * 100) / ori);
if (perc > 50.0) setPLDsRDF.add(plds);
else {
Quad q = new Quad(null, ModelFactory.createDefaultModel().createResource(plds).asNode(), QPRO.exceptionDescription.asNode(), DQM.LowPercentageOfValidPLDResources.asNode());
this._problemList.add(q);
}
logger.debug("OK -> Computation of percentage for PLD: {}. ORI: {}, RES: {}", plds, iOri, iRes);
} else {
logger.warn("Computation of percentage for PLD: {} aborted. ORI and/or RES could not be retrieved: ORI: {}, RES: {}", plds, iOri, iRes);
}
}
}
public Resource getMetricURI() {
return this.METRIC_URI;
}
public ProblemList<?> getQualityProblems() {
ProblemList<Quad> pl = null;
try {
if(this._problemList != null && this._problemList.size() > 0) {
pl = new ProblemList<Quad>(this._problemList);
} else {
pl = new ProblemList<Quad>();
}
} catch (ProblemListInitialisationException e) {
logger.error(e.getMessage());
}
return pl;
}
@Override
public boolean isEstimate() {
return true;
}
@Override
public Resource getAgentURI() {
return DQM.LuzzuProvenanceAgent;
}
/**
* Sets the reservoir size parameter
* @param reservoirSize Approximation parameter
*/
public static void setReservoirSize(int reservoirSize) {
EstimatedLinkExternalDataProviders.reservoirsize = reservoirSize;
}
/*
* TODO: Make sure that this method is to be removed...
private boolean is200AnRDF(CachedHTTPResource resource) {
if(resource != null && resource.getResponses() != null) {
for (SerialisableHttpResponse response : resource.getResponses()) {
if(response != null && response.getHeaders("Content-Type") != null) {
if (CommonDataStructures.ldContentTypes.contains(response.getHeaders("Content-Type"))) {
if (response.getHeaders("Content-Type").equals(WebContent.contentTypeTextPlain)){
Model m = this.tryRead(resource.getUri());
if (m.size() == 0){
Quad q = new Quad(null, ModelFactory.createDefaultModel().createResource(resource.getUri()).asNode(), QPRO.exceptionDescription.asNode(), DQM.NoValidRDFDataForExternalLink.asNode());
this._problemList.add(q);
resource.setContainsRDF(false);
return false;
}
}
resource.setContainsRDF(true);
return true;
}
}
}
}
Quad q = new Quad(null, ModelFactory.createDefaultModel().createResource(resource.getUri()).asNode(), QPRO.exceptionDescription.asNode(), DQM.NoValidRDFDataForExternalLink.asNode());
this._problemList.add(q);
resource.setContainsRDF(false);
return false;
}
private Model tryRead(String uri) {
Model m = ModelFactory.createDefaultModel();
try{
m = RDFDataMgr.loadModel(uri, Lang.NTRIPLES);
} catch (RiotException r) {
Log.debug("Resource could not be parsed:", r.getMessage());
}
return m;
} */
}
|
package org.opentosca.bus.management.plugins.remote.service.impl;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.namespace.QName;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.ProducerTemplate;
import org.apache.commons.io.FilenameUtils;
import org.opentosca.bus.management.header.MBHeader;
import org.opentosca.bus.management.plugins.remote.service.impl.servicehandler.ServiceHandler;
import org.opentosca.bus.management.plugins.remote.service.impl.typeshandler.ArtifactTypesHandler;
import org.opentosca.bus.management.plugins.service.IManagementBusPluginService;
import org.opentosca.bus.management.utils.MBUtils;
import org.opentosca.container.core.common.Settings;
import org.opentosca.container.core.engine.ResolvedArtifacts;
import org.opentosca.container.core.engine.ResolvedArtifacts.ResolvedDeploymentArtifact;
import org.opentosca.container.core.model.csar.id.CSARID;
import org.opentosca.container.core.tosca.convention.Interfaces;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Management Bus-Plug-in for remoteIAs.<br>
* <br>
*
*
*
* The Plugin gets needed information from the ManagementBus and is responsible to handle "remote
* IAs". Remote IAs are IAs such as scripts that needs to be executed on the host machine. Therefore
* this plugin also is responsible for the uploading of the files and the installation of required
* packages on the target machine (if specified).
*
*
*
* @author Michael Zimmermann - michael.zimmermann@iaas.uni-stuttgart.de
*
*
*/
public class ManagementBusPluginRemoteServiceImpl implements IManagementBusPluginService {
final private static String PLACEHOLDER_TARGET_FILE_PATH = "{TARGET_FILE_PATH}";
final private static String PLACEHOLDER_TARGET_FILE_FOLDER_PATH = "{TARGET_FILE_FOLDER_PATH}";
final private static String PLACEHOLDER_TARGET_FILE_NAME_WITH_EXTENSION = "{TARGET_FILE_NAME_WITH_E}";
final private static String PLACEHOLDER_TARGET_FILE_NAME_WITHOUT_EXTENSION = "{TARGET_FILE_NAME_WITHOUT_E}";
final private static String PLACEHOLDER_DA_NAME_PATH_MAP = "{DA_NAME_PATH_MAP}";
final private static String PLACEHOLDER_DA_INPUT_PARAMETER = "{INPUT_PARAMETER}";
final private static String RUN_SCRIPT_OUTPUT_PARAMETER_NAME = "ScriptResult";
final private static Logger LOG = LoggerFactory.getLogger(ManagementBusPluginRemoteServiceImpl.class);
@Override
public Exchange invoke(final Exchange exchange) {
final Message message = exchange.getIn();
ManagementBusPluginRemoteServiceImpl.LOG.debug("Management Bus Remote Plugin getting information...");
final CSARID csarID = message.getHeader(MBHeader.CSARID.toString(), CSARID.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("CsarID: {}", csarID);
final QName artifactTemplateID = message.getHeader(MBHeader.ARTIFACTTEMPLATEID_QNAME.toString(), QName.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("ArtifactTemplateID: {}", artifactTemplateID);
String nodeTemplateID = message.getHeader(MBHeader.NODETEMPLATEID_STRING.toString(), String.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("NodeTemplateID: {}", nodeTemplateID);
final String relationshipTemplateID =
message.getHeader(MBHeader.RELATIONSHIPTEMPLATEID_STRING.toString(), String.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("RelationshipTemplateID: {}", relationshipTemplateID);
final QName serviceTemplateID = message.getHeader(MBHeader.SERVICETEMPLATEID_QNAME.toString(), QName.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("ServiceTemplateID: {}", serviceTemplateID);
final QName nodeTypeID = message.getHeader(MBHeader.NODETYPEID_QNAME.toString(), QName.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("NodeTypeID: {}", nodeTypeID);
final QName relationshipTypeID = message.getHeader(MBHeader.RELATIONSHIPTYPEID_QNAME.toString(), QName.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("RelationshipTypeID: {}", relationshipTypeID);
final String interfaceName = message.getHeader(MBHeader.INTERFACENAME_STRING.toString(), String.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("InterfaceName: {}", interfaceName);
final String operationName = message.getHeader(MBHeader.OPERATIONNAME_STRING.toString(), String.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("OperationName: {}", operationName);
final URI serviceInstanceID = message.getHeader(MBHeader.SERVICEINSTANCEID_URI.toString(), URI.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("ServiceInstanceID: {}", serviceInstanceID);
final String nodeInstanceID = message.getHeader(MBHeader.NODEINSTANCEID_STRING.toString(), String.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("NodeInstanceID: {}", nodeInstanceID);
final String engineIAPublicIP = message.getHeader(MBHeader.ENGINE_IA_PUBLIC_IP.toString(), String.class);
ManagementBusPluginRemoteServiceImpl.LOG.debug("ENGINE_IA_PUBLIC_IP: {}", engineIAPublicIP);
if (nodeTemplateID == null && relationshipTemplateID != null) {
final boolean isBoundToSourceNode =
ServiceHandler.toscaEngineService.isOperationOfRelationshipBoundToSourceNode(csarID, relationshipTypeID,
interfaceName,
operationName);
if (isBoundToSourceNode) {
nodeTemplateID =
ServiceHandler.toscaEngineService.getSourceNodeTemplateIDOfRelationshipTemplate(csarID,
serviceTemplateID,
relationshipTemplateID);
} else {
nodeTemplateID =
ServiceHandler.toscaEngineService.getTargetNodeTemplateIDOfRelationshipTemplate(csarID,
serviceTemplateID,
relationshipTemplateID);
}
}
// Determine output parameters of the current operation
final List<String> outputParameters = new LinkedList<>();
final boolean hasOutputParams =
ServiceHandler.toscaEngineService.hasOperationOfANodeTypeSpecifiedOutputParams(csarID, nodeTypeID,
interfaceName,
operationName);
if (hasOutputParams) {
final Node outputParametersNode =
ServiceHandler.toscaEngineService.getOutputParametersOfANodeTypeOperation(csarID, nodeTypeID,
interfaceName, operationName);
if (outputParametersNode != null) {
final NodeList children = outputParametersNode.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
final Node child = children.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE) {
final String name = ((Element) child).getAttribute("name");
outputParameters.add(name);
}
}
}
}
for (final String param : outputParameters) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Output parameter: {}", param);
}
final QName artifactType =
ServiceHandler.toscaEngineService.getArtifactTypeOfArtifactTemplate(csarID, artifactTemplateID);
ManagementBusPluginRemoteServiceImpl.LOG.debug("ArtifactType of ArtifactTemplate {} : {}", artifactTemplateID,
artifactType);
if (artifactType != null && nodeTemplateID != null) {
// search operating system ia to upload files and run scripts on
// target
// machine
final String osNodeTemplateID =
MBUtils.getOperatingSystemNodeTemplateID(csarID, serviceTemplateID, nodeTemplateID);
if (osNodeTemplateID != null) {
final QName osNodeTypeID =
ServiceHandler.toscaEngineService.getNodeTypeOfNodeTemplate(csarID, serviceTemplateID,
osNodeTemplateID);
if (osNodeTypeID != null) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("OperatingSystem-NodeType found: {}", osNodeTypeID);
final String osIAName = MBUtils.getOperatingSystemIA(csarID, serviceTemplateID, osNodeTemplateID);
if (osIAName != null) {
final Object params = message.getBody();
// create headers
final HashMap<String, Object> headers = new HashMap<>();
headers.put(MBHeader.CSARID.toString(), csarID);
headers.put(MBHeader.SERVICETEMPLATEID_QNAME.toString(), serviceTemplateID);
headers.put(MBHeader.NODETEMPLATEID_STRING.toString(), osNodeTemplateID);
headers.put(MBHeader.INTERFACENAME_STRING.toString(),
MBUtils.getInterfaceForOperatingSystemNodeType(csarID, osNodeTypeID));
headers.put(MBHeader.SERVICEINSTANCEID_URI.toString(), serviceInstanceID);
headers.put(MBHeader.ENGINE_IA_PUBLIC_IP.toString(), engineIAPublicIP);
// install packages
ManagementBusPluginRemoteServiceImpl.LOG.debug("Installing packages...");
installPackages(artifactType, headers);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Packages installed.");
// upload files
ManagementBusPluginRemoteServiceImpl.LOG.debug("Uploading files...");
final List<String> artifactReferences =
ServiceHandler.toscaEngineService.getArtifactReferenceWithinArtifactTemplate(csarID,
artifactTemplateID);
String fileSource;
String targetFilePath = null;
String targetFileFolderPath = null;
for (final String artifactRef : artifactReferences) {
fileSource =
Settings.CONTAINER_API + "/csars/" + csarID.getFileName() + "/content/" + artifactRef;
targetFilePath = "~/" + csarID.getFileName() + "/" + artifactRef;
targetFileFolderPath = FilenameUtils.getFullPathNoEndSeparator(targetFilePath);
final String createDirCommand = "sleep 1 && mkdir -p " + targetFileFolderPath;
// create directory before uploading file
runScript(createDirCommand, headers);
// upload file
transferFile(csarID, artifactTemplateID, fileSource, targetFilePath, headers);
}
ManagementBusPluginRemoteServiceImpl.LOG.debug("Files uploaded.");
// run script
ManagementBusPluginRemoteServiceImpl.LOG.debug("Running scripts...");
final String fileNameWithE = FilenameUtils.getName(targetFilePath);
final String fileNameWithoutE = FilenameUtils.getBaseName(targetFilePath);
String artifactTypeSpecificCommand =
createArtifcatTypeSpecificCommandString(csarID, artifactType, artifactTemplateID, params);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Replacing further generic placeholder...");
// replace placeholders
artifactTypeSpecificCommand =
artifactTypeSpecificCommand.replace(ManagementBusPluginRemoteServiceImpl.PLACEHOLDER_TARGET_FILE_PATH,
targetFilePath);
artifactTypeSpecificCommand =
artifactTypeSpecificCommand.replace(ManagementBusPluginRemoteServiceImpl.PLACEHOLDER_TARGET_FILE_FOLDER_PATH,
targetFileFolderPath);
artifactTypeSpecificCommand =
artifactTypeSpecificCommand.replace(ManagementBusPluginRemoteServiceImpl.PLACEHOLDER_TARGET_FILE_NAME_WITH_EXTENSION,
fileNameWithE);
artifactTypeSpecificCommand =
artifactTypeSpecificCommand.replace(ManagementBusPluginRemoteServiceImpl.PLACEHOLDER_TARGET_FILE_NAME_WITHOUT_EXTENSION,
fileNameWithoutE);
artifactTypeSpecificCommand =
artifactTypeSpecificCommand.replace(ManagementBusPluginRemoteServiceImpl.PLACEHOLDER_DA_NAME_PATH_MAP,
"sudo -E "
+ createDANamePathMapEnvVar(csarID,
serviceTemplateID,
nodeTypeID,
nodeTemplateID)
+ " CSAR='" + csarID + "' ");
artifactTypeSpecificCommand =
artifactTypeSpecificCommand.replace(ManagementBusPluginRemoteServiceImpl.PLACEHOLDER_DA_INPUT_PARAMETER,
createParamsString(params));
ManagementBusPluginRemoteServiceImpl.LOG.debug("Final command for ArtifactType {} : {}",
artifactType, artifactTypeSpecificCommand);
final Object result = runScript(artifactTypeSpecificCommand, headers);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Script execution result: {}", result);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Scripts finished.");
// create response
final Map<String, String> resultMap = new HashMap<>();
addOutputParametersToResultMap(resultMap, result, outputParameters);
if (resultMap.isEmpty()) {
// create dummy response in case there are no output parameters
resultMap.put("invocation", "finished");
}
exchange.getIn().setBody(resultMap);
} else {
ManagementBusPluginRemoteServiceImpl.LOG.warn("No OperatingSystem-IA found!");
}
} else {
ManagementBusPluginRemoteServiceImpl.LOG.warn("No OperatingSystem-NodeType found!");
}
} else {
ManagementBusPluginRemoteServiceImpl.LOG.warn("No OperatingSystem-NodeTemplate found!");
}
} else {
ManagementBusPluginRemoteServiceImpl.LOG.warn("Could not determine ArtifactType of ArtifactTemplate: {}!",
artifactTemplateID);
}
return exchange;
}
/**
* Check if the output parameters for this remote service operation are returned in the script
* result and add them to the result map.
*
* @param resultMap The result map which is returned for the invocation of the remote service
* operation
* @param result The returned result of the run script operation
* @param outputParameters The output parameters that are expected for the operation
*/
private void addOutputParametersToResultMap(final Map<String, String> resultMap, final Object result,
final List<String> outputParameters) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Adding output parameters to the response message.");
// process result as HashMap
if (result instanceof HashMap<?, ?>) {
final HashMap<?, ?> resultHashMap = (HashMap<?, ?>) result;
// get ScriptResult part of the response which contains the parameters
if (resultHashMap.containsKey(ManagementBusPluginRemoteServiceImpl.RUN_SCRIPT_OUTPUT_PARAMETER_NAME)) {
final Object scriptResult =
resultHashMap.get(ManagementBusPluginRemoteServiceImpl.RUN_SCRIPT_OUTPUT_PARAMETER_NAME);
if (scriptResult != null) {
final String scriptResultString = scriptResult.toString();
ManagementBusPluginRemoteServiceImpl.LOG.debug("{}: {}",
ManagementBusPluginRemoteServiceImpl.RUN_SCRIPT_OUTPUT_PARAMETER_NAME,
scriptResultString);
// split result on line breaks as every parameter is returned in a separate "echo"
// command
final String[] resultParameters = scriptResultString.split("[\\r\\n]+");
// add each parameter that is defined in the operation and passed back
for (final String resultParameter : resultParameters) {
for (final String outputParameter : outputParameters) {
if (resultParameter.startsWith(outputParameter)) {
final String value = resultParameter.substring(resultParameter.indexOf("=") + 1);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Adding parameter {} with value: {}",
outputParameter, value);
resultMap.put(outputParameter, value);
}
}
}
}
} else {
ManagementBusPluginRemoteServiceImpl.LOG.warn("Result contains no result entry '{}'",
ManagementBusPluginRemoteServiceImpl.RUN_SCRIPT_OUTPUT_PARAMETER_NAME);
}
} else {
ManagementBusPluginRemoteServiceImpl.LOG.warn("Result of type {} not supported. The bus should return a HashMap as result class when it is used as input.",
result.getClass());
}
}
/**
* @param csarID
* @param serviceTemplateID
* @param nodeTypeID
* @param nodeTemplateID
*
* @return mapping with DeploymentArtifact names and their paths.
*/
private String createDANamePathMapEnvVar(final CSARID csarID, final QName serviceTemplateID, final QName nodeTypeID,
final String nodeTemplateID) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Checking if NodeTemplate {} has DAs...", nodeTemplateID);
final HashMap<String, List<String>> daNameReferenceMapping = new HashMap<>();
final QName nodeTemplateQName = new QName(serviceTemplateID.getNamespaceURI(), nodeTemplateID);
final ResolvedArtifacts resolvedArtifacts =
ServiceHandler.toscaEngineService.getResolvedArtifactsOfNodeTemplate(csarID, nodeTemplateQName);
final List<ResolvedDeploymentArtifact> resolvedDAs = resolvedArtifacts.getDeploymentArtifacts();
List<String> daArtifactReferences;
for (final ResolvedDeploymentArtifact resolvedDA : resolvedDAs) {
daArtifactReferences = resolvedDA.getReferences();
for (final String daArtifactReference : daArtifactReferences) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Artifact reference for DA: {} found: {} .",
resolvedDA.getName(), daArtifactReference);
List<String> currentValue = daNameReferenceMapping.get(resolvedDA.getName());
if (currentValue == null) {
currentValue = new ArrayList<>();
daNameReferenceMapping.put(resolvedDA.getName(), currentValue);
}
currentValue.add(daArtifactReference);
}
}
final List<QName> nodeTypeImpls =
ServiceHandler.toscaEngineService.getNodeTypeImplementationsOfNodeType(csarID, nodeTypeID);
for (final QName nodeTypeImpl : nodeTypeImpls) {
final List<String> daNames =
ServiceHandler.toscaEngineService.getDeploymentArtifactNamesOfNodeTypeImplementation(csarID,
nodeTypeImpl);
for (final String daName : daNames) {
final QName daArtifactTemplate =
ServiceHandler.toscaEngineService.getArtifactTemplateOfADeploymentArtifactOfANodeTypeImplementation(csarID,
nodeTypeImpl,
daName);
daArtifactReferences =
ServiceHandler.toscaEngineService.getArtifactReferenceWithinArtifactTemplate(csarID,
daArtifactTemplate);
for (final String daArtifactReference : daArtifactReferences) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Artifact reference for DA: {} found: {} .", daName,
daArtifactReference);
List<String> currentValue = daNameReferenceMapping.get(daName);
if (currentValue == null) {
currentValue = new ArrayList<>();
daNameReferenceMapping.put(daName, currentValue);
}
currentValue.add(daArtifactReference);
}
}
}
String daEnvMap = "";
if (!daNameReferenceMapping.isEmpty()) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("NodeTemplate {} has {} DAs.", nodeTemplateID,
daNameReferenceMapping.size());
daEnvMap += "DAs=\"";
for (final Entry<String, List<String>> da : daNameReferenceMapping.entrySet()) {
final String daName = da.getKey();
final List<String> daRefs = da.getValue();
for (String daRef : daRefs) {
// FIXME / is a brutal assumption
if (!daRef.startsWith("/")) {
daRef = "/" + daRef;
}
daEnvMap += daName + "," + daRef + ";";
}
}
daEnvMap += "\" ";
ManagementBusPluginRemoteServiceImpl.LOG.debug("Created DA-DANamePathMapEnvVar for NodeTemplate {} : {}",
nodeTemplateID, daEnvMap);
}
return daEnvMap;
}
/**
*
* Installs required and specified packages of the specified ArtifactType. Required packages are in
* defined the corresponding *.xml file.
*
* @param artifactType
* @param headers
*/
private void installPackages(final QName artifactType, final HashMap<String, Object> headers) {
final List<String> requiredPackages = ArtifactTypesHandler.getRequiredPackages(artifactType);
String requiredPackagesString = "";
if (!requiredPackages.isEmpty()) {
final HashMap<String, String> inputParamsMap = new HashMap<>();
for (final String requiredPackage : requiredPackages) {
requiredPackagesString += requiredPackage;
requiredPackagesString += " ";
}
inputParamsMap.put(Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_PARAMETER_PACKAGENAMES,
requiredPackagesString);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Installing packages: {} for ArtifactType: {} ",
requiredPackages, artifactType);
headers.put(MBHeader.OPERATIONNAME_STRING.toString(),
Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_INSTALLPACKAGE);
invokeManagementBusEngine(inputParamsMap, headers);
} else {
ManagementBusPluginRemoteServiceImpl.LOG.debug("ArtifactType: {} needs no packages to install.",
requiredPackages, artifactType);
}
}
/**
*
* For transferring files to the target machine.
*
* @param csarID
* @param artifactTemplate
* @param source
* @param target
* @param headers
*/
private void transferFile(final CSARID csarID, final QName artifactTemplate, final String source,
final String target, final HashMap<String, Object> headers) {
final HashMap<String, String> inputParamsMap = new HashMap<>();
inputParamsMap.put(Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_PARAMETER_TARGETABSOLUTPATH,
target);
inputParamsMap.put(Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_PARAMETER_SOURCEURLORLOCALPATH,
source);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Uploading file. Source: {} Target: {} ", source, target);
headers.put(MBHeader.OPERATIONNAME_STRING.toString(),
Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_TRANSFERFILE);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Invoking ManagementBus for transferFile with the following headers:");
for (final String key : headers.keySet()) {
if (headers.get(key) != null && headers.get(key) instanceof String) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Header: " + key + " Value: " + headers.get(key));
}
}
invokeManagementBusEngine(inputParamsMap, headers);
}
/**
*
* For running scripts on the target machine. Commands to be executed are defined in the
* corresponding *.xml file.
*
* @param commandsString
* @param headers
*/
private Object runScript(final String commandsString, final HashMap<String, Object> headers) {
final HashMap<String, String> inputParamsMap = new HashMap<>();
inputParamsMap.put(Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_PARAMETER_SCRIPT, commandsString);
ManagementBusPluginRemoteServiceImpl.LOG.debug("RunScript: {} ", commandsString);
headers.put(MBHeader.OPERATIONNAME_STRING.toString(),
Interfaces.OPENTOSCA_DECLARATIVE_INTERFACE_OPERATINGSYSTEM_RUNSCRIPT);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Invoking ManagementBus for runScript with the following headers:");
for (final String key : headers.keySet()) {
if (headers.get(key) != null && headers.get(key) instanceof String) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Header: " + key + " Value: " + headers.get(key));
}
}
return invokeManagementBusEngine(inputParamsMap, headers);
}
/**
*
* Creates ArtifactType specific commands that should be executed on the target machine. Commands to
* be executed are defined in the corresponding *.xml file.
*
* @param csarID
* @param artifactType
* @param artifactTemplateID
* @param params
*
* @return the created command
*/
@SuppressWarnings("unchecked")
private String createArtifcatTypeSpecificCommandString(final CSARID csarID, final QName artifactType,
final QName artifactTemplateID, final Object params) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Creating ArtifcatType specific command for artifactType {}:...",
artifactType);
String commandsString = "";
final List<String> commands = ArtifactTypesHandler.getCommands(artifactType);
for (final String command : commands) {
commandsString += command;
commandsString += " && ";
}
if (commandsString.endsWith(" && ")) {
commandsString = commandsString.substring(0, commandsString.length() - 4);
}
ManagementBusPluginRemoteServiceImpl.LOG.debug("Defined generic command for ArtifactType {} : {} ",
artifactType, commandsString);
// replace placeholder with data from inputParams and/or instance data
if (commandsString.contains("{{") && commandsString.contains("}}")) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Replacing the placeholder of the generic command with properties data and/or provided input parameter...");
HashMap<String, String> paramsMap = new HashMap<>();
if (params instanceof HashMap) {
paramsMap = (HashMap<String, String>) params;
} else if (params instanceof Document) {
final Document paramsDoc = (Document) params;
paramsMap = MBUtils.docToMap(paramsDoc, true);
}
final Document propDoc =
ServiceHandler.toscaEngineService.getPropertiesOfAArtifactTemplate(csarID, artifactTemplateID);
if (propDoc != null) {
paramsMap.putAll(MBUtils.docToMap(propDoc, true));
}
for (final Entry<String, String> prop : paramsMap.entrySet()) {
commandsString = commandsString.replace("{{" + prop.getKey() + "}}", prop.getValue());
}
// delete not replaced placeholder
commandsString = commandsString.replaceAll("\\{\\{.*?\\}\\}", "");
ManagementBusPluginRemoteServiceImpl.LOG.debug("Generic command with replaced placeholder: {}",
commandsString);
}
return commandsString;
}
/**
* @param params
* @return whitespace separated String with parameter keys and values
*/
@SuppressWarnings("unchecked")
private String createParamsString(final Object params) {
HashMap<String, String> paramsMap = new HashMap<>();
if (params instanceof HashMap) {
paramsMap = (HashMap<String, String>) params;
} else if (params instanceof Document) {
final Document paramsDoc = (Document) params;
paramsMap = MBUtils.docToMap(paramsDoc, true);
}
String paramsString = "";
for (final Entry<String, String> param : paramsMap.entrySet()) {
paramsString += param.getKey() + "='" + param.getValue() + "' ";
}
return paramsString;
}
/**
*
* Invokes the Management Bus.
*
* @param paramsMap
* @param headers
*/
private Object invokeManagementBusEngine(final HashMap<String, String> paramsMap,
final HashMap<String, Object> headers) {
ManagementBusPluginRemoteServiceImpl.LOG.debug("Invoking the Management Bus...");
final ProducerTemplate template = Activator.camelContext.createProducerTemplate();
final Object response =
template.requestBodyAndHeaders("bean:org.opentosca.bus.management.service.IManagementBusService?method=invokeIA",
paramsMap, headers);
ManagementBusPluginRemoteServiceImpl.LOG.debug("Invocation finished: {}", response);
return response;
}
@Override
public List<String> getSupportedTypes() {
final List<String> supportedTypes = new ArrayList<>();
final List<QName> supportedTypesQName = ArtifactTypesHandler.getSupportedTypes();
for (final QName supportedTypeQName : supportedTypesQName) {
supportedTypes.add(supportedTypeQName.toString());
}
return supportedTypes;
}
}
|
// This file is part of OpenTSDB.
// This program is free software: you can redistribute it and/or modify it
// option) any later version. This program is distributed in the hope that it
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
package net.opentsdb.core;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import net.opentsdb.meta.Annotation;
import net.opentsdb.query.filter.TagVFilter;
import net.opentsdb.stats.QueryStats;
import net.opentsdb.stats.QueryStats.QueryStat;
import net.opentsdb.uid.UniqueId;
import net.opentsdb.utils.DateTime;
import org.hbase.async.Bytes.ByteMap;
import org.hbase.async.DeleteRequest;
import org.hbase.async.KeyValue;
import org.hbase.async.Scanner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
import com.stumbleupon.async.Callback;
import com.stumbleupon.async.Deferred;
/**
* A class that handles coordinating the various scanners created for each
* salt bucket when salting is enabled. Each scanner stores it's results in
* local maps and once everyone has reported in, then the maps are parsed and
* combined into a proper set of spans to return to the {@link TsdbQuery} class.
*
* Note that if one or more of the scanners throws an exception, then that
* exception will be returned to the caller in the deferred. Unfortunately we
* don't have a good way to cancel a scan in progress so the first scanner with
* an error will store it, then we wait for all of the other scanners to
* complete.
*
* Concurrency is important in this class as the scanners are executing
* asynchronously and can modify variables at any time.
*/
public class SaltScanner {
private static final Logger LOG = LoggerFactory.getLogger(SaltScanner.class);
/** This is a map that the caller must supply. We'll fill it with data.
* WARNING: The salted row comparator should be applied to this map. */
private final TreeMap<byte[], Span> spans;
/** The list of pre-configured scanners. One scanner should be created per
* salt bucket. */
private final List<Scanner> scanners;
/** Stores the compacted columns from each scanner as it completes. After all
* scanners are done, we process this into the span map above. */
private final Map<Integer, List<KeyValue>> kv_map =
new ConcurrentHashMap<Integer, List<KeyValue>>();
/** Stores annotations from each scanner as it completes */
private final Map<byte[], List<Annotation>> annotation_map =
Collections.synchronizedMap(
new TreeMap<byte[], List<Annotation>>(new RowKey.SaltCmp()));
/** A deferred to call with the spans on completion */
private final Deferred<TreeMap<byte[], Span>> results =
new Deferred<TreeMap<byte[], Span>>();
/** The metric this scanner set is dealing with. If a row comes in with a
* different metric we toss an exception. This shouldn't happen though. */
private final byte[] metric;
/** The TSDB to which we belong */
private final TSDB tsdb;
/** A stats object associated with the sub query used for storing stats
* about scanner operations. */
private final QueryStats query_stats;
/** Index of the sub query in the main query list */
private final int query_index;
/** A counter used to determine how many scanners are still running */
private AtomicInteger completed_tasks = new AtomicInteger();
/** When the scanning started. We store the scan latency once all scanners
* are done.*/
private long start_time; // milliseconds.
/** Whether or not to delete the queried data */
private final boolean delete;
/** A list of filters to iterate over when processing rows */
private final List<TagVFilter> filters;
/** A holder for storing the first exception thrown by a scanner if something
* goes pear shaped. Make sure to synchronize on this object when checking
* for null or assigning from a scanner's callback. */
private volatile Exception exception;
public SaltScanner(final TSDB tsdb, final byte[] metric,
final List<Scanner> scanners,
final TreeMap<byte[], Span> spans,
final List<TagVFilter> filters) {
this(tsdb, metric, scanners, spans, filters, false, null, 0);
}
public SaltScanner(final TSDB tsdb, final byte[] metric,
final List<Scanner> scanners,
final TreeMap<byte[], Span> spans,
final List<TagVFilter> filters,
final boolean delete,
final QueryStats query_stats,
final int query_index) {
if (Const.SALT_WIDTH() < 1) {
throw new IllegalArgumentException(
"Salting is disabled. Use the regular scanner");
}
if (tsdb == null) {
throw new IllegalArgumentException("The TSDB argument was null.");
}
if (spans == null) {
throw new IllegalArgumentException("Span map cannot be null.");
}
if (!spans.isEmpty()) {
throw new IllegalArgumentException("The span map should be empty.");
}
if (scanners == null || scanners.isEmpty()) {
throw new IllegalArgumentException("Missing or empty scanners list. "
+ "Please provide a list of scanners for each salt.");
}
if (scanners.size() != Const.SALT_BUCKETS()) {
throw new IllegalArgumentException("Not enough or too many scanners " +
scanners.size() + " when the salt bucket count is " +
Const.SALT_BUCKETS());
}
if (metric == null) {
throw new IllegalArgumentException("The metric array was null.");
}
if (metric.length != TSDB.metrics_width()) {
throw new IllegalArgumentException("The metric was too short. It must be "
+ TSDB.metrics_width() + "bytes wide.");
}
this.scanners = scanners;
this.spans = spans;
this.metric = metric;
this.tsdb = tsdb;
this.filters = filters;
this.delete = delete;
this.query_stats = query_stats;
this.query_index = query_index;
}
/**
* Starts all of the scanners asynchronously and returns the data fetched
* once all of the scanners have completed. Note that the result may be an
* exception if one or more of the scanners encountered an exception. The
* first error will be returned, others will be logged.
* @return A deferred to wait on for results.
*/
public Deferred<TreeMap<byte[], Span>> scan() {
start_time = System.currentTimeMillis();
int i = 0;
for (final Scanner scanner: scanners) {
new ScannerCB(scanner, i++).scan();
}
return results;
}
/**
* Called once all of the scanners have reported back in to record our
* latency and merge the results into the spans map. If there was an exception
* stored then we'll return that instead.
*/
private void mergeAndReturnResults() {
final long hbase_time = System.currentTimeMillis();
TsdbQuery.scanlatency.add((int)(hbase_time - start_time));
long rows = 0;
if (exception != null) {
LOG.error("After all of the scanners finished, at "
+ "least one threw an exception", exception);
results.callback(exception);
return;
}
// Merge sorted spans together
final long merge_start = DateTime.nanoTime();
for (final List<KeyValue> kvs : kv_map.values()) {
if (kvs == null || kvs.isEmpty()) {
LOG.warn("Found a key value list that was null or empty");
continue;
}
for (final KeyValue kv : kvs) {
if (kv == null) {
LOG.warn("Found a key value item that was null");
continue;
}
if (kv.key() == null) {
LOG.warn("A key for a kv was null");
continue;
}
Span datapoints = spans.get(kv.key());
if (datapoints == null) {
datapoints = new Span(tsdb);
spans.put(kv.key(), datapoints);
}
if (annotation_map.containsKey(kv.key())) {
for (final Annotation note: annotation_map.get(kv.key())) {
datapoints.getAnnotations().add(note);
}
annotation_map.remove(kv.key());
}
try {
datapoints.addRow(kv);
rows++;
} catch (RuntimeException e) {
LOG.error("Exception adding row to span", e);
throw e;
}
}
}
kv_map.clear();
for (final byte[] key : annotation_map.keySet()) {
Span datapoints = spans.get(key);
if (datapoints == null) {
datapoints = new Span(tsdb);
spans.put(key, datapoints);
}
for (final Annotation note: annotation_map.get(key)) {
datapoints.getAnnotations().add(note);
}
}
if (query_stats != null) {
query_stats.addStat(query_index, QueryStat.SCANNER_MERGE_TIME,
(DateTime.nanoTime() - merge_start));
}
if (LOG.isDebugEnabled()) {
LOG.debug("Scanning completed in " + (hbase_time - start_time) + " ms, " +
rows + " rows, and stored in " + spans.size() + " spans");
LOG.debug("It took " + (System.currentTimeMillis() - hbase_time) + " ms, "
+ " to merge and sort the rows into a tree map");
}
results.callback(spans);
}
/**
* Scanner callback executed recursively each time we get a set of data
* from storage. This is responsible for determining what columns are
* returned and issuing requests to load leaf objects.
* When the scanner returns a null set of rows, the method initiates the
* final callback.
*/
final class ScannerCB implements Callback<Object,
ArrayList<ArrayList<KeyValue>>> {
private final Scanner scanner;
private final int index;
private final List<KeyValue> kvs = new ArrayList<KeyValue>();
private final ByteMap<List<Annotation>> annotations =
new ByteMap<List<Annotation>>();
private final Set<String> skips = Collections.newSetFromMap(
new ConcurrentHashMap<String, Boolean>());
private final Set<String> keepers = Collections.newSetFromMap(
new ConcurrentHashMap<String, Boolean>());
private long scanner_start = -1;
/** nanosecond timestamps */
private long fetch_start = 0; // reset each time we send an RPC to HBase
private long fetch_time = 0; // cumulation of time waiting on HBase
private long uid_resolve_time = 0; // cumulation of time resolving UIDs
private long uids_resolved = 0;
private long compaction_time = 0; // cumulation of time compacting
private long dps_pre_filter = 0;
private long rows_pre_filter = 0;
private long dps_post_filter = 0;
private long rows_post_filter = 0;
public ScannerCB(final Scanner scanner, final int index) {
this.scanner = scanner;
this.index = index;
if (query_stats != null) {
query_stats.addScannerId(query_index, index, scanner.toString());
}
}
/** Error callback that will capture an exception from AsyncHBase and store
* it so we can bubble it up to the caller.
*/
class ErrorCb implements Callback<Object, Exception> {
@Override
public Object call(final Exception e) throws Exception {
LOG.error("Scanner " + scanner + " threw an exception", e);
close(false);
handleException(e);
return null;
}
}
/**
* Starts the scanner and is called recursively to fetch the next set of
* rows from the scanner.
* @return The map of spans if loaded successfully, null if no data was
* found
*/
public Object scan() {
if (scanner_start < 0) {
scanner_start = DateTime.nanoTime();
}
fetch_start = DateTime.nanoTime();
return scanner.nextRows().addCallback(this).addErrback(new ErrorCb());
}
/**
* Iterate through each row of the scanner results, parses out data
* points (and optional meta data).
* @return null if no rows were found, otherwise the TreeMap with spans
*/
@Override
public Object call(final ArrayList<ArrayList<KeyValue>> rows)
throws Exception {
try {
fetch_time += DateTime.nanoTime() - fetch_start;
if (rows == null) {
close(true);
return null;
} else if (exception != null) {
close(false);
// don't need to handleException here as it's already taken care of
// due to the fact that exception was set.
if (LOG.isDebugEnabled()) {
LOG.debug("Closing scanner as there was an exception: " + scanner);
}
return null;
}
// used for UID resolution if a filter is involved
final List<Deferred<Object>> lookups =
filters != null && !filters.isEmpty() ?
new ArrayList<Deferred<Object>>(rows.size()) : null;
rows_pre_filter += rows.size();
for (final ArrayList<KeyValue> row : rows) {
final byte[] key = row.get(0).key();
if (RowKey.rowKeyContainsMetric(metric, key) != 0) {
close(false);
handleException(new IllegalDataException(
"HBase returned a row that doesn't match"
+ " our scanner (" + scanner + ")! " + row + " does not start"
+ " with " + Arrays.toString(metric) + " on scanner " + this));
return null;
}
// calculate estimated data point count. We don't want to deserialize
// the byte arrays so we'll just get a rough estimate of compacted
// columns.
for (final KeyValue kv : row) {
if (kv.qualifier().length % 2 == 0) {
if (kv.qualifier().length == 2 || kv.qualifier().length == 4) {
++dps_pre_filter;
} else {
// for now we'll assume that all compacted columns are of the
// same precision. This is likely incorrect.
if (Internal.inMilliseconds(kv.qualifier())) {
dps_pre_filter += (kv.qualifier().length / 4);
} else {
dps_pre_filter += (kv.qualifier().length / 2);
}
}
} else if (kv.qualifier()[0] == AppendDataPoints.APPEND_COLUMN_PREFIX) {
// with appends we don't have a good rough estimate as the length
// can vary widely with the value length variability. Therefore we
// have to iterate.
int idx = 0;
int qlength = 0;
while (idx < kv.value().length) {
qlength = Internal.getQualifierLength(kv.value(), idx);
idx += qlength + Internal.getValueLengthFromQualifier(kv.value(), idx);
++dps_pre_filter;
}
}
}
// If any filters have made it this far then we need to resolve
// the row key UIDs to their names for string comparison. We'll
// try to avoid the resolution with some sets but we may dupe
// resolve a few times.
// TODO - more efficient resolution
// TODO - byte set instead of a string for the uid may be faster
if (filters != null && !filters.isEmpty()) {
lookups.clear();
final String tsuid =
UniqueId.uidToString(UniqueId.getTSUIDFromKey(key,
TSDB.metrics_width(), Const.TIMESTAMP_BYTES));
if (skips.contains(tsuid)) {
continue;
}
if (!keepers.contains(tsuid)) {
final long uid_start = DateTime.nanoTime();
/** CB to called after all of the UIDs have been resolved */
class MatchCB implements Callback<Object, ArrayList<Boolean>> {
@Override
public Object call(final ArrayList<Boolean> matches)
throws Exception {
for (final boolean matched : matches) {
if (!matched) {
skips.add(tsuid);
return null;
}
}
// matched all, good data
keepers.add(tsuid);
processRow(key, row);
return null;
}
}
/** Resolves all of the row key UIDs to their strings for filtering */
class GetTagsCB implements
Callback<Deferred<ArrayList<Boolean>>, Map<String, String>> {
@Override
public Deferred<ArrayList<Boolean>> call(
final Map<String, String> tags) throws Exception {
uid_resolve_time += (DateTime.nanoTime() - uid_start);
uids_resolved += tags.size();
final List<Deferred<Boolean>> matches =
new ArrayList<Deferred<Boolean>>(filters.size());
for (final TagVFilter filter : filters) {
matches.add(filter.match(tags));
}
return Deferred.group(matches);
}
}
lookups.add(Tags.getTagsAsync(tsdb, key)
.addCallbackDeferring(new GetTagsCB())
.addBoth(new MatchCB()));
} else {
processRow(key, row);
}
} else {
processRow(key, row);
}
}
// either we need to wait on the UID resolutions or we can go ahead
// if we don't have filters.
if (lookups != null && lookups.size() > 0) {
class GroupCB implements Callback<Object, ArrayList<Object>> {
@Override
public Object call(final ArrayList<Object> group) throws Exception {
return scan();
}
}
return Deferred.group(lookups).addCallback(new GroupCB());
} else {
return scan();
}
} catch (final RuntimeException e) {
LOG.error("Unexpected exception on scanner " + this, e);
close(false);
handleException(e);
return null;
}
}
/**
* Finds or creates the span for this row, compacts it and stores it. Also
* fires off a delete request for the row if told to.
* @param key The row key to use for fetching the span
* @param row The row to add
*/
void processRow(final byte[] key, final ArrayList<KeyValue> row) {
++rows_post_filter;
if (delete) {
final DeleteRequest del = new DeleteRequest(tsdb.dataTable(), key);
tsdb.getClient().delete(del);
}
// calculate estimated data point count. We don't want to deserialize
// the byte arrays so we'll just get a rough estimate of compacted
// columns.
for (final KeyValue kv : row) {
if (kv.qualifier().length % 2 == 0) {
if (kv.qualifier().length == 2 || kv.qualifier().length == 4) {
++dps_post_filter;
} else {
// for now we'll assume that all compacted columns are of the
// same precision. This is likely incorrect.
if (Internal.inMilliseconds(kv.qualifier())) {
dps_post_filter += (kv.qualifier().length / 4);
} else {
dps_post_filter += (kv.qualifier().length / 2);
}
}
} else if (kv.qualifier()[0] == AppendDataPoints.APPEND_COLUMN_PREFIX) {
// with appends we don't have a good rough estimate as the length
// can vary widely with the value length variability. Therefore we
// have to iterate.
int idx = 0;
int qlength = 0;
while (idx < kv.value().length) {
qlength = Internal.getQualifierLength(kv.value(), idx);
idx += qlength + Internal.getValueLengthFromQualifier(kv.value(), idx);
++dps_post_filter;
}
}
}
final KeyValue compacted;
// the scanner
final long compaction_start = DateTime.nanoTime();
try {
final List<Annotation> notes = Lists.newArrayList();
compacted = tsdb.compact(row, notes);
if (!notes.isEmpty()) {
synchronized (annotations) {
List<Annotation> map_notes = annotations.get(key);
if (map_notes == null) {
annotations.put(key, notes);
} else {
map_notes.addAll(notes);
}
}
}
} catch (IllegalDataException idex) {
compaction_time += (DateTime.nanoTime() - compaction_start);
close(false);
handleException(idex);
return;
}
compaction_time += (DateTime.nanoTime() - compaction_start);
if (compacted != null) { // Can be null if we ignored all KVs.
kvs.add(compacted);
}
}
/**
* Closes the scanner and sets the various stats after filtering
* @param ok Whether or not the scanner closed with an exception or
* closed due to natural causes (e.g. ran out of data or we wanted to stop
* it early)
*/
void close(final boolean ok) {
scanner.close();
if (query_stats != null) {
query_stats.addScannerStat(query_index, index, QueryStat.SCANNER_TIME,
DateTime.nanoTime() - scanner_start);
// Scanner Stats
/* Uncomment when AsyncHBase has this feature:
query_stats.addScannerStat(query_index, index,
QueryStat.ROWS_FROM_STORAGE, scanner.getRowsFetched());
query_stats.addScannerStat(query_index, index,
QueryStat.COLUMNS_FROM_STORAGE, scanner.getColumnsFetched());
query_stats.addScannerStat(query_index, index,
QueryStat.BYTES_FROM_STORAGE, scanner.getBytesFetched()); */
query_stats.addScannerStat(query_index, index,
QueryStat.HBASE_TIME, fetch_time);
query_stats.addScannerStat(query_index, index,
QueryStat.SUCCESSFUL_SCAN, ok ? 1 : 0);
// Post Scan stats
query_stats.addScannerStat(query_index, index,
QueryStat.ROWS_PRE_FILTER, rows_pre_filter);
query_stats.addScannerStat(query_index, index,
QueryStat.DPS_PRE_FILTER, dps_pre_filter);
query_stats.addScannerStat(query_index, index,
QueryStat.ROWS_POST_FILTER, rows_post_filter);
query_stats.addScannerStat(query_index, index,
QueryStat.DPS_POST_FILTER, dps_post_filter);
query_stats.addScannerStat(query_index, index,
QueryStat.SCANNER_UID_TO_STRING_TIME, uid_resolve_time);
query_stats.addScannerStat(query_index, index,
QueryStat.UID_PAIRS_RESOLVED, uids_resolved);
query_stats.addScannerStat(query_index, index,
QueryStat.COMPACTION_TIME, compaction_time);
}
if (ok && exception == null) {
validateAndTriggerCallback(kvs, annotations);
} else {
completed_tasks.incrementAndGet();
}
}
}
/**
* Called each time a scanner completes with valid or empty data.
* @param kvs The compacted columns fetched by the scanner
* @param annotations The annotations fetched by the scanners
*/
private void validateAndTriggerCallback(final List<KeyValue> kvs,
final Map<byte[], List<Annotation>> annotations) {
final int tasks = completed_tasks.incrementAndGet();
if (kvs.size() > 0) {
kv_map.put(tasks, kvs);
}
for (final byte[] key : annotations.keySet()) {
final List<Annotation> notes = annotations.get(key);
if (notes.size() > 0) {
// Optimistic write, expecting unique row keys
annotation_map.put(key, notes);
}
}
if (tasks >= Const.SALT_BUCKETS()) {
try {
mergeAndReturnResults();
} catch (final Exception ex) {
results.callback(ex);
}
}
}
/**
* If one or more of the scanners throws an exception then we should close it
* and pass the exception here so that we can catch and return it to the
* caller. If all of the scanners have finished, this will callback to the
* caller immediately.
* @param e The exception to store.
*/
private void handleException(final Exception e) {
// make sure only one scanner can set the exception
completed_tasks.incrementAndGet();
if (exception == null) {
synchronized (this) {
if (exception == null) {
exception = e;
// fail once and fast on the first scanner to throw an exception
try {
mergeAndReturnResults();
} catch (Exception ex) {
LOG.error("Failed merging and returning results, "
+ "calling back with exception", ex);
results.callback(ex);
}
} else {
// TODO - it would be nice to close and cancel the other scanners but
// for now we have to wait for them to finish and/or throw exceptions.
LOG.error("Another scanner threw an exception", e);
}
}
}
}
}
|
package org.xwiki.test.junit5.mockito;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Parameter;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Optional;
import javax.inject.Named;
import org.apache.commons.lang3.StringUtils;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.ExtensionContext.Namespace;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import org.junit.jupiter.api.extension.TestInstances;
import org.mockito.MockitoAnnotations;
import org.xwiki.component.annotation.ComponentAnnotationLoader;
import org.xwiki.component.descriptor.ComponentDescriptor;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.util.ReflectionUtils;
import org.xwiki.test.mockito.MockitoComponentManager;
import org.xwiki.test.mockito.MockitoComponentMocker;
/**
* JUnit5 extension to help write unit tests for XWiki Components.
* <p>
* For example:
*
* <pre>
* {@code
* @ComponentTest
* @ComponentList({
* Component3Impl.class
* })
* public class MyComponentTest
* {
* @Mock
* private List<String> list;
*
* @MockComponent
* private Component1Role component1;
*
* @InjectMocks
* @InjectMockComponents
* private Component4Impl component4;
*
* @InjectComponentManager
* private MockitoComponentManager componentManager;
*
* @BeforeEach
* public void before(MockitoComponentManager componentManager)
* {
* ...
* }
*
* @Test
* public void test1(MockitoComponentManager componentManager)
* {
* ...
* }
*
* @Test
* public void test2(ComponentManager componentManager)
* {
* ...
* }
* ...
* }
* }
* </pre>
*
* @version $Id$
* @since 10.3RC1
*/
public class MockitoComponentManagerExtension implements BeforeEachCallback, AfterEachCallback, ParameterResolver
{
private static final ComponentAnnotationLoader LOADER = new ComponentAnnotationLoader();
@Override
public void beforeEach(ExtensionContext context) throws Exception
{
Optional<TestInstances> testInstances = context.getTestInstances();
if (testInstances.isPresent()) {
// Initialize all test classes, including nested ones.
for (Object testInstance : testInstances.get().getAllInstances()) {
initializeTestInstance(testInstance, context);
}
}
}
private void initializeTestInstance(Object testInstance, ExtensionContext context) throws Exception
{
// Make sure tests don't leak one on another
removeComponentManager(context);
// We initialize the CM in 4 steps:
// - We create an empty instance of it
// - We inject the component manager in the @InjectComponentManager annotated fields
// - We create mocks for all @MockComponent annotations.
// - We initialize the CM. This handles in the following order:
// - @BeforeComponent
// - @BeforeComponent("<testname>")
// - @ComponentList and @AllComponents
// - @AfterComponent
// - @AfterComponent("<testname>")
// - We inject @InjectMockComponents fields
// Note: We handle @MockComponent before @InjectMockComponents to allow the test to have methods annotated with
// @BeforeComponent which can configure mocks defined with @MockComponent annotations, so that when
// @InjectMockComponents component are injected, if they implement Initializable, the test can have prepared
// any component setup so that the call to initialize() will work fine.
// Note: We initialize the CM after handling @MockComponent so that it's possible use mocks injected with
// @MockComponent inside @BeforeComponent and @AfterComponent methods.
loadComponentManager(context);
MockitoComponentManager mcm = new MockitoComponentManager();
saveComponentManager(context, mcm);
// Inject the Mockito Component Manager in all fields annotated with @InjectComponentManager
for (Field field : ReflectionUtils.getAllFields(testInstance.getClass())) {
if (field.isAnnotationPresent(InjectComponentManager.class)) {
ReflectionUtils.setFieldValue(testInstance, field.getName(), mcm);
}
}
// Register a mock component for all fields annotated with @MockComponent
for (Field field : ReflectionUtils.getAllFields(testInstance.getClass())) {
if (field.isAnnotationPresent(MockComponent.class)) {
// Get the hint from the @Named annotation (if any)
Named namedAnnotation = field.getAnnotation(Named.class);
Object mockComponent;
if (namedAnnotation != null) {
mockComponent = mcm.registerMockComponent(field.getGenericType(), namedAnnotation.value());
} else {
mockComponent = mcm.registerMockComponent(field.getGenericType());
}
ReflectionUtils.setFieldValue(testInstance, field.getName(), mockComponent);
}
}
initializeMockitoComponentManager(testInstance, mcm, context);
// Create & register a component instance of all fields annotated with @InjectMockComponents with all its
// @Inject-annotated fields injected with mocks or real implementations.
for (Field field : ReflectionUtils.getAllFields(testInstance.getClass())) {
InjectMockComponents annotation = field.getAnnotation(InjectMockComponents.class);
if (annotation != null) {
processInjectMockComponents(testInstance, field, annotation, mcm);
}
}
// Make sure this is executed last since if we want to combine it with @InjectMockComponents annotation, we
// need the field to be non-null when this line executes or otherwise Mockito will not inject anything...
// Also note that all fields annotated with @InjectMocks will have their fields replaced by all mocks found
// in the test class.
MockitoAnnotations.initMocks(testInstance);
}
protected void processInjectMockComponents(Object testInstance, Field field, InjectMockComponents annotation,
MockitoComponentManager mcm) throws Exception
{
// Must not be an instance
if (field.getType().isInterface()) {
throw new Exception(String.format("The type of the field [%s] annotated with @%s cannot be an interface.",
InjectMockComponents.class.getSimpleName(), field.getName()));
}
// Find Component descriptors
List<ComponentDescriptor<?>> descriptors = LOADER.getComponentsDescriptors(field.getType());
ComponentDescriptor<?> descriptor = getDescriptor(annotation.role(), descriptors, field);
MockitoComponentMocker<?> mocker =
new MockitoComponentMocker<>(mcm, field.getType(), descriptor.getRoleType(), descriptor.getRoleHint());
mocker.mockComponent(testInstance);
Object component = mcm.getInstance(descriptor.getRoleType(), descriptor.getRoleHint());
ReflectionUtils.setFieldValue(testInstance, field.getName(), component);
}
/**
* To be overridden by extensions if they need to perform additional initializations.
*
* @param testInstance the test instance being initialized
* @param mcm the already created (but not initialized) Mockito Component Manager
* @param context the extension context
* @throws Exception if the initialization fails
*/
protected void initializeMockitoComponentManager(Object testInstance, MockitoComponentManager mcm,
ExtensionContext context)
throws Exception
{
Optional<Method> testMethod = context.getTestMethod();
if (testMethod.isPresent()) {
mcm.initializeTest(testInstance, testMethod.get(), mcm);
}
}
@Override
public void afterEach(ExtensionContext extensionContext)
{
MockitoComponentManager mcm = loadComponentManager(extensionContext);
if (mcm != null) {
mcm.dispose();
}
}
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException
{
Parameter parameter = parameterContext.getParameter();
return ComponentManager.class.isAssignableFrom(parameter.getType());
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException
{
return loadComponentManager(extensionContext);
}
private ComponentDescriptor<?> getDescriptor(Class<?> role, List<ComponentDescriptor<?>> descriptors, Field field)
throws Exception
{
// When the role is InjectMockComponents.class it means that no role has been set by the user, see the
// InjectMockComponents javadoc.
// For disambiguation we support 2 ways:
// - specify a role class in the @InjectMockComponents annotation
// - specify a role string in the @Named annotation
if (!isRolePresent(role) && !field.isAnnotationPresent(Named.class)) {
if (descriptors.isEmpty()) {
// Does not make sense to ask for the descriptor of a class which does not have any associated
// descriptor.
throw new Exception(
String.format("The component under field [%s] is not implementing any role.", field.getName()));
} else if (descriptors.size() > 1) {
// Note that we can have several descriptors if the component has one role but several hints. In this
// case we can just take the first descriptor since it won't matter.
if (areRolesIdentical(descriptors)) {
return descriptors.get(0);
} else {
// Force user to specify a role in case of several
throw new Exception(String.format(
"The component under field [%s] is implementing several roles ([%s]). "
+ "Please disambiguate by using the \"role\" parameter of the @%s annotation.",
field.getName(),
StringUtils.join(descriptors, ','),
InjectMockComponents.class.getSimpleName()));
}
} else {
return descriptors.get(0);
}
} else {
for (ComponentDescriptor<?> descriptor : descriptors) {
if (isRolePresent(role)) {
Class<?> roleClass = ReflectionUtils.getTypeClass(descriptor.getRoleType());
if (roleClass.equals(role)) {
return descriptor;
}
} else {
String roleHint = field.getAnnotation(Named.class).value();
if (descriptor.getRoleHint().equals(roleHint)) {
return descriptor;
}
}
}
throw new Exception(String.format(
"The role type specified in the @%s annotation for field [%s] isn't " + "implemented by the component.",
field.getName(), InjectMockComponents.class.getSimpleName()));
}
}
private boolean areRolesIdentical(List<ComponentDescriptor<?>> descriptors)
{
boolean areSame = true;
Type type = null;
for (ComponentDescriptor descriptor : descriptors) {
if (type != null && !type.equals(descriptor.getRoleType())) {
areSame = false;
break;
} else if (type == null) {
type = descriptor.getRoleType();
}
}
return areSame;
}
private boolean isRolePresent(Class<?> role)
{
return !role.equals(InjectMockComponents.class);
}
protected MockitoComponentManager loadComponentManager(ExtensionContext context)
{
ExtensionContext.Store store = getGlobalRootStore(context);
Class<?> testClass = context.getRequiredTestClass();
return store.get(testClass, MockitoComponentManager.class);
}
private void removeComponentManager(ExtensionContext context)
{
ExtensionContext.Store store = getGlobalRootStore(context);
Class<?> testClass = context.getRequiredTestClass();
store.remove(testClass);
}
private void saveComponentManager(ExtensionContext context, MockitoComponentManager componentManager)
{
ExtensionContext.Store store = getGlobalRootStore(context);
Class<?> testClass = context.getRequiredTestClass();
store.put(testClass, componentManager);
}
private static ExtensionContext.Store getGlobalRootStore(ExtensionContext context)
{
return context.getRoot().getStore(Namespace.GLOBAL);
}
}
|
package hex.tree.drf;
import hex.Distribution;
import hex.ModelCategory;
import hex.schemas.DRFV3;
import hex.tree.*;
import hex.tree.DTree.DecidedNode;
import hex.tree.DTree.LeafNode;
import hex.tree.DTree.UndecidedNode;
import water.AutoBuffer;
import water.Job;
import water.Key;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.util.Log;
import water.util.Timer;
import java.util.Arrays;
import java.util.Random;
import static hex.genmodel.GenModel.getPrediction;
import static hex.tree.drf.TreeMeasuresCollector.asSSE;
import static hex.tree.drf.TreeMeasuresCollector.asVotes;
/** Gradient Boosted Trees
*
* Based on "Elements of Statistical Learning, Second Edition, page 387"
*/
public class DRF extends SharedTree<hex.tree.drf.DRFModel, hex.tree.drf.DRFModel.DRFParameters, hex.tree.drf.DRFModel.DRFOutput> {
protected int _mtry;
@Override public ModelCategory[] can_build() {
return new ModelCategory[]{
ModelCategory.Regression,
ModelCategory.Binomial,
ModelCategory.Multinomial,
};
}
@Override public BuilderVisibility builderVisibility() { return BuilderVisibility.Stable; };
// Called from an http request
public DRF( hex.tree.drf.DRFModel.DRFParameters parms) { super("DRF",parms); init(false); }
@Override public DRFV3 schema() { return new DRFV3(); }
/** Start the DRF training Job on an F/J thread.
* @param work
* @param restartTimer*/
@Override public Job<hex.tree.drf.DRFModel> trainModelImpl(long work, boolean restartTimer) {
return start(new DRFDriver(), work, restartTimer);
}
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made
* by the front-end whenever the GUI is clicked, and needs to be fast;
* heavy-weight prep needs to wait for the trainModel() call.
*/
@Override public void init(boolean expensive) {
super.init(expensive);
// Initialize local variables
if (!(0.0 < _parms._sample_rate && _parms._sample_rate <= 1.0))
throw new IllegalArgumentException("Sample rate should be interval [0,1] but it is " + _parms._sample_rate);
if( _parms._mtries < 1 && _parms._mtries != -1 ) error("_mtries", "mtries must be -1 (converted to sqrt(features)), or >= 1 but it is " + _parms._mtries);
if( _train != null ) {
int ncols = _train.numCols();
if( _parms._mtries != -1 && !(1 <= _parms._mtries && _parms._mtries < ncols))
error("_mtries","Computed mtries should be -1 or in interval [1,"+ncols+"] but it is " + _parms._mtries);
}
if (_parms._distribution == Distribution.Family.AUTO) {
if (_nclass == 1) _parms._distribution = Distribution.Family.gaussian;
if (_nclass >= 2) _parms._distribution = Distribution.Family.multinomial;
}
if (expensive) {
_initialPrediction = isClassifier() ? 0 : getInitialValue();
}
if (_parms._sample_rate == 1f && _valid == null)
error("_sample_rate", "Sample rate is 100% and no validation dataset is specified. There are no OOB data to compute out-of-bag error estimation!");
if (hasOffsetCol())
error("_offset_column", "Offsets are not yet supported for DRF.");
if (hasOffsetCol() && isClassifier()) {
error("_offset_column", "Offset is only supported for regression.");
}
}
// A standard DTree with a few more bits. Support for sampling during
// training, and replaying the sample later on the identical dataset to
// e.g. compute OOBEE.
static class DRFTree extends DTree {
final int _mtrys; // Number of columns to choose amongst in splits
final long _seeds[]; // One seed for each chunk, for sampling
final transient Random _rand; // RNG for split decisions & sampling
DRFTree( Frame fr, int ncols, char nbins, char nbins_cats, char nclass, double min_rows, int mtrys, long seed ) {
super(fr._names, ncols, nbins, nbins_cats, nclass, min_rows, seed);
_mtrys = mtrys;
_rand = createRNG(seed);
_seeds = new long[fr.vecs()[0].nChunks()];
for( int i=0; i<_seeds.length; i++ )
_seeds[i] = _rand.nextLong();
}
// Return a deterministic chunk-local RNG. Can be kinda expensive.
public Random rngForChunk( int cidx ) {
long seed = _seeds[cidx];
return createRNG(seed);
}
}
/** Fill work columns:
* - classification: set 1 in the corresponding wrk col according to row response
* - regression: copy response into work column (there is only 1 work column)
*/
private class SetWrkTask extends MRTask<SetWrkTask> {
@Override public void map( Chunk chks[] ) {
Chunk cy = chk_resp(chks);
for( int i=0; i<cy._len; i++ ) {
if( cy.isNA(i) ) continue;
if (isClassifier()) {
int cls = (int)cy.at8(i);
chk_work(chks,cls).set(i,1L);
} else {
float pred = (float) cy.atd(i);
chk_work(chks,0).set(i,pred);
}
}
}
}
private class DRFDriver extends Driver {
// Classification or Regression:
// Tree votes/SSE of individual trees on OOB rows
public transient TreeMeasuresCollector.TreeMeasures _treeMeasuresOnOOB;
// Tree votes/SSE per individual features on permutated OOB rows
public transient TreeMeasuresCollector.TreeMeasures[/*features*/] _treeMeasuresOnSOOB;
// Variable importance beased on tree split decisions
private transient float[/*nfeatures*/] _improvPerVar;
private void initTreeMeasurements() {
_improvPerVar = new float[_ncols];
final int ntrees = _parms._ntrees;
// Preallocate tree votes
if (_model._output.isClassifier()) {
_treeMeasuresOnOOB = new TreeMeasuresCollector.TreeVotes(ntrees);
_treeMeasuresOnSOOB = new TreeMeasuresCollector.TreeVotes[_ncols];
for (int i=0; i<_ncols; i++) _treeMeasuresOnSOOB[i] = new TreeMeasuresCollector.TreeVotes(ntrees);
} else {
_treeMeasuresOnOOB = new TreeMeasuresCollector.TreeSSE(ntrees);
_treeMeasuresOnSOOB = new TreeMeasuresCollector.TreeSSE[_ncols];
for (int i=0; i<_ncols; i++) _treeMeasuresOnSOOB[i] = new TreeMeasuresCollector.TreeSSE(ntrees);
}
}
@Override protected void buildModel() {
// Start with class distribution as null-model
// FIXME: Test/Investigate this
// if( _nclass >= 2 ) {
// for( int c=0; c<_nclass; c++ ) {
// final double init = _model._output._priorClassDist[c];
// new MRTask() {
// @Override public void map(Chunk tree) { for( int i=0; i<tree._len; i++ ) tree.set(i, init); }
// }.doAll(vec_tree(_train,c));
_mtry = (_parms._mtries==-1) ? // classification: mtry=sqrt(_ncols), regression: mtry=_ncols/3
( isClassifier() ? Math.max((int)Math.sqrt(_ncols),1) : Math.max(_ncols/3,1)) : _parms._mtries;
// How many trees was in already in provided checkpointed model
int ntreesFromCheckpoint = _parms.hasCheckpoint() ?
((SharedTreeModel.SharedTreeParameters) _parms._checkpoint.<SharedTreeModel>get()._parms)._ntrees : 0;
if (!(1 <= _mtry && _mtry <= _ncols)) throw new IllegalArgumentException("Computed mtry should be in interval <1,"+_ncols+"> but it is " + _mtry);
// Initialize TreeVotes for classification, MSE arrays for regression
initTreeMeasurements();
// Append number of trees participating in on-the-fly scoring
_train.add("OUT_BAG_TREES", _response.makeZero());
// Prepare working columns
new SetWrkTask().doAll(_train);
// If there was a check point recompute tree_<_> and oob columns based on predictions from previous trees
// but only if OOB validation is requested.
if (_parms.hasCheckpoint()) {
Timer t = new Timer();
// Compute oob votes for each output level
new OOBScorer(_ncols, _nclass, numSpecialCols(), _parms._sample_rate, _model._output._treeKeys).doAll(_train);
Log.info("Reconstructing oob stats from checkpointed model took " + t);
}
// The RNG used to pick split columns
Random rand = createRNG(_parms._seed);
// To be deterministic get random numbers for previous trees and
// put random generator to the same state
for (int i = 0; i < ntreesFromCheckpoint; i++) rand.nextLong();
int tid;
// Prepare tree statistics
// Build trees until we hit the limit
for( tid=0; tid < _ntrees; tid++) { // Building tid-tree
if (tid!=0 || !_parms.hasCheckpoint()) { // do not make initial scoring if model already exist
double training_r2 = doScoringAndSaveModel(false, true, _parms._build_tree_one_node);
if( training_r2 >= _parms._r2_stopping ) {
doScoringAndSaveModel(true, true, _parms._build_tree_one_node);
return; // Stop when approaching round-off error
}
}
// At each iteration build K trees (K = nclass = response column domain size)
// TODO: parallelize more? build more than k trees at each time, we need to care about temporary data
// Idea: launch more DRF at once.
Timer kb_timer = new Timer();
buildNextKTrees(_train,_mtry,_parms._sample_rate,rand,tid);
Log.info((tid+1) + ". tree was built " + kb_timer.toString());
DRF.this.update(1);
if( !isRunning() ) return; // If canceled during building, do not bulkscore
}
doScoringAndSaveModel(true, true, _parms._build_tree_one_node);
}
// Build the next random k-trees representing tid-th tree
private void buildNextKTrees(Frame fr, int mtrys, float sample_rate, Random rand, int tid) {
// We're going to build K (nclass) trees - each focused on correcting
// errors for a single class.
final DTree[] ktrees = new DTree[_nclass];
// Initial set of histograms. All trees; one leaf per tree (the root
// leaf); all columns
DHistogram hcs[][][] = new DHistogram[_nclass][1/*just root leaf*/][_ncols];
// Adjust real bins for the top-levels
int adj_nbins = Math.max(_parms._nbins_top_level,_parms._nbins);
// Use for all k-trees the same seed. NOTE: this is only to make a fair
// view for all k-trees
final double[] _distribution = _model._output._distribution;
long rseed = rand.nextLong();
// Initially setup as-if an empty-split had just happened
for (int k = 0; k < _nclass; k++) {
if (_distribution[k] != 0) { // Ignore missing classes
// The Boolean Optimization
// This optimization assumes the 2nd tree of a 2-class system is the
// inverse of the first (and that the same columns were picked)
if( k==1 && _nclass==2 && _model.binomialOpt()) continue;
ktrees[k] = new DRFTree(fr, _ncols, (char)_parms._nbins, (char)_parms._nbins_cats, (char)_nclass, _parms._min_rows, mtrys, rseed);
new DRFUndecidedNode(ktrees[k], -1, DHistogram.initialHist(fr, _ncols, adj_nbins, _parms._nbins_cats, hcs[k][0])); // The "root" node
}
}
// Sample - mark the lines by putting 'OUT_OF_BAG' into nid(<klass>) vector
Timer t_1 = new Timer();
Sample ss[] = new Sample[_nclass];
for( int k=0; k<_nclass; k++)
if (ktrees[k] != null) ss[k] = new Sample((DRFTree)ktrees[k], sample_rate).dfork(0,new Frame(vec_nids(fr,k),vec_resp(fr)), _parms._build_tree_one_node);
for( int k=0; k<_nclass; k++)
if( ss[k] != null ) ss[k].getResult();
Log.debug("Sampling took: + " + t_1);
int[] leafs = new int[_nclass]; // Define a "working set" of leaf splits, from leafs[i] to tree._len for each tree i
// One Big Loop till the ktrees are of proper depth.
// Adds a layer to the trees each pass.
Timer t_2 = new Timer();
int depth=0;
for( ; depth<_parms._max_depth; depth++ ) {
if( !isRunning() ) return;
hcs = buildLayer(fr, _parms._nbins, _parms._nbins_cats, ktrees, leafs, hcs, true, _parms._build_tree_one_node);
// If we did not make any new splits, then the tree is split-to-death
if( hcs == null ) break;
}
Log.debug("Tree build took: " + t_2);
// Each tree bottomed-out in a DecidedNode; go 1 more level and insert
// LeafNodes to hold predictions.
Timer t_3 = new Timer();
for( int k=0; k<_nclass; k++ ) {
DTree tree = ktrees[k];
if( tree == null ) continue;
int leaf = leafs[k] = tree.len();
for( int nid=0; nid<leaf; nid++ ) {
if( tree.node(nid) instanceof DecidedNode ) {
DecidedNode dn = tree.decided(nid);
if( dn._split._col == -1 ) { // No decision here, no row should have this NID now
if( nid==0 ) { // Handle the trivial non-splitting tree
LeafNode ln = new DRFLeafNode(tree, -1, 0);
ln._pred = (float)(isClassifier() ? _model._output._priorClassDist[k] : _initialPrediction);
}
continue;
}
for( int i=0; i<dn._nids.length; i++ ) {
int cnid = dn._nids[i];
if( cnid == -1 || // Bottomed out (predictors or responses known constant)
tree.node(cnid) instanceof UndecidedNode || // Or chopped off for depth
(tree.node(cnid) instanceof DecidedNode && // Or not possible to split
((DecidedNode)tree.node(cnid))._split.col()==-1) ) {
LeafNode ln = new DRFLeafNode(tree,nid);
ln._pred = (float)dn.pred(i); // Set prediction into the leaf
dn._nids[i] = ln.nid(); // Mark a leaf here
}
}
}
}
} // -- k-trees are done
Log.debug("Nodes propagation: " + t_3);
// Move rows into the final leaf rows
Timer t_4 = new Timer();
CollectPreds cp = new CollectPreds(ktrees,leafs,_model.defaultThreshold()).doAll(fr,_parms._build_tree_one_node);
if (isClassifier()) asVotes(_treeMeasuresOnOOB).append(cp.rightVotes, cp.allRows); // Track right votes over OOB rows for this tree
else /* regression */ asSSE (_treeMeasuresOnOOB).append(cp.sse, cp.allRows);
Log.debug("CollectPreds done: " + t_4);
// Grow the model by K-trees
_model._output.addKTrees(ktrees);
}
// Collect and write predictions into leafs.
private class CollectPreds extends MRTask<CollectPreds> {
final DTree _trees[]; // Read-only, shared (except at the histograms in the Nodes)
double _threshold; // Sum of squares for this tree only
/* @OUT */ long rightVotes; // number of right votes over OOB rows (performed by this tree) represented by DTree[] _trees
/* @OUT */ long allRows; // number of all OOB rows (sampled by this tree)
/* @OUT */ float sse; // Sum of squares for this tree only
CollectPreds(DTree trees[], int leafs[], double threshold) { _trees=trees; _threshold = threshold; }
final boolean importance = true;
@Override public void map( Chunk[] chks ) {
final Chunk y = importance ? chk_resp(chks) : null; // Response
final double[] rpred = importance ? new double[1+_nclass] : null; // Row prediction
final double[] rowdata = importance ? new double[_ncols] : null; // Pre-allocated row data
final Chunk oobt = chk_oobt(chks); // Out-of-bag rows counter over all trees
// Iterate over all rows
for( int row=0; row<oobt._len; row++ ) {
final boolean wasOOBRow = ScoreBuildHistogram.isOOBRow((int)chk_nids(chks,0).at8(row));
// For all tree (i.e., k-classes)
for( int k=0; k<_nclass; k++ ) {
final DTree tree = _trees[k];
if( tree == null ) continue; // Empty class is ignored
final Chunk nids = chk_nids(chks, k); // Node-ids for this tree/class
int nid = (int)nids.at8(row); // Get Node to decide from
// Update only out-of-bag rows
// This is out-of-bag row - but we would like to track on-the-fly prediction for the row
if( wasOOBRow) {
final Chunk ct = chk_tree(chks,k); // k-tree working column holding votes for given row
nid = ScoreBuildHistogram.oob2Nid(nid);
if( tree.node(nid) instanceof UndecidedNode ) // If we bottomed out the tree
nid = tree.node(nid).pid(); // Then take parent's decision
int leafnid;
if( tree.root() instanceof LeafNode ) {
leafnid = 0;
} else {
DecidedNode dn = tree.decided(nid); // Must have a decision point
if (dn._split.col() == -1) // Unable to decide?
dn = tree.decided(tree.node(nid).pid()); // Then take parent's decision
leafnid = dn.ns(chks, row); // Decide down to a leafnode
}
// Setup Tree(i) - on the fly prediction of i-tree for row-th row
// - for classification: cumulative number of votes for this row
// - for regression: cumulative sum of prediction of each tree - has to be normalized by number of trees
double prediction = ((LeafNode) tree.node(leafnid)).pred(); // Prediction for this k-class and this row
if (importance) rpred[1 + k] = (float) prediction; // for both regression and classification
ct.set(row, (float) (ct.atd(row) + prediction));
}
// reset help column for this row and this k-class
nids.set(row, 0);
} /* end of k-trees iteration */
// For this tree this row is out-of-bag - i.e., a tree voted for this row
if (wasOOBRow) oobt.set(row, oobt.atd(row) + 1); // track number of trees
if (importance) {
if (wasOOBRow && !y.isNA(row)) {
if (isClassifier()) {
int treePred = getPrediction(rpred, _model._output._priorClassDist, data_row(chks, row, rowdata), _threshold);
int actuPred = (int) y.at8(row);
if (treePred==actuPred) rightVotes++; // No miss !
} else { // regression
double treePred = rpred[1];
double actuPred = y.atd(row);
sse += (actuPred-treePred)*(actuPred-treePred);
}
allRows++;
}
}
}
}
@Override public void reduce(CollectPreds mrt) {
rightVotes += mrt.rightVotes;
allRows += mrt.allRows;
sse += mrt.sse;
}
}
@Override protected DRFModel makeModel( Key modelKey, DRFModel.DRFParameters parms, double mse_train, double mse_valid ) {
return new DRFModel(modelKey,parms,new DRFModel.DRFOutput(DRF.this,mse_train,mse_valid));
}
}
@Override protected DecidedNode makeDecided( UndecidedNode udn, DHistogram hs[] ) {
return new DRFDecidedNode(udn,hs);
}
// DRF DTree decision node: same as the normal DecidedNode, but
// specifies a decision algorithm given complete histograms on all
// columns. DRF algo: find the lowest error amongst *all* columns.
static class DRFDecidedNode extends DecidedNode {
DRFDecidedNode( UndecidedNode n, DHistogram[] hs ) { super(n,hs); }
@Override public UndecidedNode makeUndecidedNode(DHistogram[] hs ) {
return new DRFUndecidedNode(_tree,_nid,hs);
}
// Find the column with the best split (lowest score). Unlike RF, DRF
// scores on all columns and selects splits on all columns.
@Override public DTree.Split bestCol( UndecidedNode u, DHistogram[] hs ) {
DTree.Split best = new DTree.Split(-1,-1,null,(byte)0,Double.MAX_VALUE,Double.MAX_VALUE,Double.MAX_VALUE,0L,0L,0,0);
if( hs == null ) return best;
for( int i=0; i<u._scoreCols.length; i++ ) {
int col = u._scoreCols[i];
DTree.Split s = hs[col].scoreMSE(col, _tree._min_rows);
if( s == null ) continue;
if( s.se() < best.se() ) best = s;
if( s.se() <= 0 ) break; // No point in looking further!
}
return best;
}
}
// DRF DTree undecided node: same as the normal UndecidedNode, but specifies
// a list of columns to score on now, and then decide over later.
static class DRFUndecidedNode extends UndecidedNode {
DRFUndecidedNode( DTree tree, int pid, DHistogram hs[] ) { super(tree,pid,hs); }
// Randomly select mtry columns to 'score' in following pass over the data.
@Override public int[] scoreCols( DHistogram[] hs ) {
DRFTree tree = (DRFTree)_tree;
int[] cols = new int[hs.length];
int len=0;
// Gather all active columns to choose from.
for( int i=0; i<hs.length; i++ ) {
if( hs[i]==null ) continue; // Ignore not-tracked cols
assert hs[i]._min < hs[i]._maxEx && hs[i].nbins() > 1 : "broken histo range "+hs[i];
cols[len++] = i; // Gather active column
}
int choices = len; // Number of columns I can choose from
assert choices > 0;
// Draw up to mtry columns at random without replacement.
for( int i=0; i<tree._mtrys; i++ ) {
if( len == 0 ) break; // Out of choices!
int idx2 = tree._rand.nextInt(len);
int col = cols[idx2]; // The chosen column
cols[idx2] = cols[--len]; // Compress out of array; do not choose again
cols[len] = col; // Swap chosen in just after 'len'
}
assert choices - len > 0;
return Arrays.copyOfRange(cols, len, choices);
}
}
static class DRFLeafNode extends LeafNode {
DRFLeafNode( DTree tree, int pid ) { super(tree,pid); }
DRFLeafNode( DTree tree, int pid, int nid ) { super(tree, pid, nid); }
// Insert just the predictions: a single byte/short if we are predicting a
// single class, or else the full distribution.
@Override protected AutoBuffer compress(AutoBuffer ab) { assert !Double.isNaN(_pred); return ab.put4f(_pred); }
@Override protected int size() { return 4; }
}
// Deterministic sampling
static class Sample extends MRTask<Sample> {
final DRFTree _tree;
final float _rate;
Sample( DRFTree tree, float rate ) { _tree = tree; _rate = rate; }
@Override public void map( Chunk nids, Chunk ys ) {
Random rand = _tree.rngForChunk(nids.cidx());
for( int row=0; row<nids._len; row++ )
if( rand.nextFloat() >= _rate || Double.isNaN(ys.atd(row)) ) {
nids.set(row, ScoreBuildHistogram.OUT_OF_BAG); // Flag row as being ignored by sampling
}
}
}
// Read the 'tree' columns, do model-specific math and put the results in the
// fs[] array, and return the sum. Dividing any fs[] element by the sum
// turns the results into a probability distribution.
@Override protected double score1( Chunk chks[], double weight, double offset, double fs[/*nclass*/], int row ) {
double sum = 0;
if (_nclass > 2 || (_nclass == 2 && !_model.binomialOpt())) {
for (int k = 0; k < _nclass; k++)
sum += (fs[k+1] = chk_tree(chks, k).atd(row) / chk_oobt(chks).atd(row));
}
else if (_nclass==2 && _model.binomialOpt()) {
fs[1] = chk_tree(chks, 0).atd(row) / chk_oobt(chks).atd(row);
assert(fs[1] >= 0 && fs[1] <= 1);
fs[2] = 1. - fs[1];
}
else { //regression
// average per trees voted for this row (only trees which have row in "out-of-bag"
sum += (fs[0] = chk_tree(chks, 0).atd(row) / chk_oobt(chks).atd(row) );
fs[1] = 0;
}
return sum;
}
}
|
package javaslang.match;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.function.Function;
import javaslang.AssertionsExtensions;
import javaslang.Tuples;
import javaslang.Tuples.Tuple2;
import javaslang.lambda.SerializableFunction;
import org.junit.Test;
public class PatternTest {
@Test
public void shouldNotInstantiable() {
AssertionsExtensions.assertThat(Patterns.class).isNotInstantiable();
}
// -- pattern creation
@Test
public void shouldCreatePatternOfArity1() {
assertThat(Pattern.of(t -> null, Tuples.of(1))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity2() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity3() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity4() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity5() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity6() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity7() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity8() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7, 8))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity9() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7, 8, 9))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity10() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity11() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity12() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).isNotNull();
}
@Test
public void shouldCreatePatternOfArity13() {
assertThat(Pattern.of(t -> null, Tuples.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).isNotNull();
}
// -- pattern matching
@Test
// DEV-NOTE: implemented to satisfy code coverage
public void shouldComparePrototype() {
assertThat(new Patterns.UnaryPrototype(o -> true).equals(new Object())).isTrue();
}
@Test
public void shouldMatchFunctionWithoutCapturedArgsBySignature() {
final SerializableFunction<Integer, String> function = i -> String.valueOf(i);
final Tuple2<SerializableFunction<Integer, String>, Tuple2<Class<Integer>, Class<String>>> match = Patterns
.Function(Integer.class, String.class)
.apply(function)
.get();
final Tuple2<Class<Integer>, Class<String>> decomposition = match._2;
assertThat(decomposition).isEqualTo(Tuples.of(Integer.class, String.class));
}
@Test
public void shouldMatchFunctionWithCapturedArgsBySignature() {
final Function<Integer, String> f = i -> String.valueOf(i);
final SerializableFunction<Integer, String> function = i -> f.apply(i);
final Tuple2<SerializableFunction<Integer, String>, Tuple2<Class<Integer>, Class<String>>> match = Patterns
.Function(Integer.class, String.class)
.apply(function)
.get();
final Tuple2<Class<Integer>, Class<String>> decomposition = match._2;
assertThat(decomposition).isEqualTo(Tuples.of(Integer.class, String.class));
}
}
|
package hex.tree.gbm;
import hex.ModelCategory;
import hex.schemas.GBMV3;
import hex.tree.*;
import hex.tree.DTree.DecidedNode;
import hex.tree.DTree.LeafNode;
import hex.tree.DTree.UndecidedNode;
import water.*;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
import water.fvec.C0DChunk;
import water.fvec.Chunk;
import water.fvec.Vec;
import water.util.FrameUtils;
import water.util.Log;
import water.util.Timer;
import water.util.ArrayUtils;
/** Gradient Boosted Trees
*
* Based on "Elements of Statistical Learning, Second Edition, page 387"
*/
public class GBM extends SharedTree<GBMModel,GBMModel.GBMParameters,GBMModel.GBMOutput> {
@Override public ModelCategory[] can_build() {
return new ModelCategory[]{
ModelCategory.Regression,
ModelCategory.Binomial,
ModelCategory.Multinomial,
};
}
@Override public BuilderVisibility builderVisibility() { return BuilderVisibility.Stable; }
// Called from an http request
public GBM( GBMModel.GBMParameters parms) { super("GBM",parms); init(false); }
@Override public GBMV3 schema() { return new GBMV3(); }
/** Start the GBM training Job on an F/J thread. */
@Override public Job<GBMModel> trainModel() {
return start(new GBMDriver(), _parms._ntrees/*work for progress bar*/);
}
/** Initialize the ModelBuilder, validating all arguments and preparing the
* training frame. This call is expected to be overridden in the subclasses
* and each subclass will start with "super.init();". This call is made
* by the front-end whenever the GUI is clicked, and needs to be fast;
* heavy-weight prep needs to wait for the trainModel() call.
*
* Validate the learning rate and distribution family. */
@Override public void init(boolean expensive) {
super.init(expensive);
// Initialize response based on given distribution family.
// Regression: initially predict the response mean
// Binomial: just class 0 (class 1 in the exact inverse prediction)
// Multinomial: Class distribution which is not a single value.
// However there is this weird tension on the initial value for
// classification: If you guess 0's (no class is favored over another),
// then with your first GBM tree you'll typically move towards the correct
// answer a little bit (assuming you have decent predictors) - and
// immediately the Confusion Matrix shows good results which gradually
// improve... BUT the Means Squared Error will suck for unbalanced sets,
// even as the CM is good. That's because we want the predictions for the
// common class to be large and positive, and the rare class to be negative
// and instead they start around 0. Guessing initial zero's means the MSE
// is so bad, that the R^2 metric is typically negative (usually it's
// between 0 and 1).
// If instead you guess the mean (reversed through the loss function), then
// the zero-tree GBM model reports an MSE equal to the response variance -
// and an initial R^2 of zero. More trees gradually improves the R^2 as
// expected. However, all the minority classes have large guesses in the
// wrong direction, and it takes a long time (lotsa trees) to correct that
// - so your CM sucks for a long time.
double mean = 0;
if (expensive) {
if (error_count() > 0) {
GBM.this.updateValidationMessages();
throw H2OModelBuilderIllegalArgumentException.makeFromBuilder(GBM.this);
}
mean = responseMean();
_initialPrediction = _nclass == 1 ? mean
: (_nclass == 2 ? -0.5 * Math.log(mean / (1.0 - mean)) : 0.0);
if (_parms._distribution == GBMModel.GBMParameters.Family.AUTO) {
if (_nclass == 1) _parms._distribution = GBMModel.GBMParameters.Family.gaussian;
if (_nclass == 2) _parms._distribution = GBMModel.GBMParameters.Family.bernoulli;
if (_nclass >= 3) _parms._distribution = GBMModel.GBMParameters.Family.multinomial;
}
if (hasOffset() && isClassifier() && _parms._distribution != GBMModel.GBMParameters.Family.bernoulli) {
error("_offset_column", "Offset is only supported for regression or binary classification with the Bernoulli distribution.");
}
if (hasOffset() && _parms._distribution == GBMModel.GBMParameters.Family.bernoulli) {
if (_offset.max() > 1)
error("_offset_column", "Offset cannot be larger than 1 for Bernoulli distribution.");
}
}
switch( _parms._distribution) {
case bernoulli:
if( _nclass != 2 /*&& !couldBeBool(_response)*/)
error("_distribution", "Binomial requires the response to be a 2-class categorical");
else if( _response != null ) {
// Bernoulli: initial prediction is log( mean(y)/(1-mean(y)) )
_initialPrediction = Math.log(mean / (1.0 - mean)); // mean can be weighted mean
}
break;
case multinomial:
if (!isClassifier()) error("_distribution", "Multinomial requires an enum response.");
break;
case gaussian:
if (isClassifier()) error("_distribution", "Gaussian requires the response to be numeric.");
break;
case AUTO:
break;
default:
error("_distribution","Invalid distribution: " + _parms._distribution);
}
if( !(0. < _parms._learn_rate && _parms._learn_rate <= 1.0) )
error("_learn_rate", "learn_rate must be between 0 and 1");
}
private class GBMDriver extends Driver {
@Override protected void buildModel() {
if (hasOffset() && _parms._distribution == GBMModel.GBMParameters.Family.bernoulli) {
Log.info("Running Newton-Raphson iteration to find the initial value since offsets are specified.");
Log.info("Iteration 0: initial value: " + _initialPrediction + " (starting value)");
double delta;
int count=0;
double tol = 1e-4;
int N=1; //one step is enough - same as R
//From R GBM vignette:
//For speed, gbm() does only one step of the Newton-Raphson algorithm
//rather than iterating to convergence. No appreciable loss of accuracy
//since the next boosting iteration will simply correct for the prior iterations
//inadequacy.
_initialPrediction = 0;
do {
double newInit = new NewtonRaphson(_initialPrediction).doAll(_train).value();
delta = Math.abs(_initialPrediction - newInit);
_initialPrediction = newInit;
Log.info("Iteration " + ++count + ": initial value: " + _initialPrediction);
} while (count < N && delta >= tol);
if (delta > tol) Log.warn("Not fully converged.");
Log.info("Newton-Raphson iteration ran for " + count + " iteration(s). Final residual: " + delta);
}
_model._output._init_f = _initialPrediction; //always write the initial value here (not just for Bernoulli)
if( _initialPrediction != 0.0 ) { // Only non-zero for regression or bernoulli
final double init = _initialPrediction;
new MRTask() {
@Override
public void map(Chunk tree) {
for (int i = 0; i < tree._len; i++) tree.set(i, init);
}
}.doAll(vec_tree(_train, 0), _parms._build_tree_one_node); // Only setting tree-column 0
}
// Reconstruct the working tree state from the checkpoint
if( _parms._checkpoint ) {
Timer t = new Timer();
new ResidualsCollector(_ncols, _nclass, (hasOffset()?1:0)+(hasWeights()?1:0),_model._output._treeKeys).doAll(_train, _parms._build_tree_one_node);
Log.info("Reconstructing tree residuals stats from checkpointed model took " + t);
}
// Loop over the K trees
for( int tid=0; tid<_parms._ntrees; tid++) {
// During first iteration model contains 0 trees, then 1-tree, ...
// No need to score a checkpoint with no extra trees added
if( tid!=0 || !_parms._checkpoint ) { // do not make initial scoring if model already exist
double training_r2 = doScoringAndSaveModel(false, false, _parms._build_tree_one_node);
if( training_r2 >= _parms._r2_stopping )
return; // Stop when approaching round-off error
}
// ESL2, page 387
// Step 2a: Compute prediction (prob distribution) from prior tree results:
// Work <== f(Tree)
new ComputeProb().doAll(_train, _parms._build_tree_one_node);
// ESL2, page 387
// Step 2b i: Compute residuals from the prediction (probability distribution)
// Work <== f(Work)
new ComputeRes().doAll(_train, _parms._build_tree_one_node);
// ESL2, page 387, Step 2b ii, iii, iv
Timer kb_timer = new Timer();
buildNextKTrees();
Log.info((tid+1) + ". tree was built in " + kb_timer.toString());
GBM.this.update(1);
if( !isRunning() ) return; // If canceled during building, do not bulkscore
}
// Final scoring (skip if job was cancelled)
doScoringAndSaveModel(true, false, _parms._build_tree_one_node);
}
/**
* Iteration to find a consistent initial value for bernoulli with offsets
*/
private class NewtonRaphson extends MRTask<NewtonRaphson> {
double _init;
double _num;
double _denom;
public double value() {
return _init + _num/_denom;
}
NewtonRaphson(double init) { _init = init; }
@Override public void map( Chunk chks[] ) {
Chunk ys = chk_resp(chks);
Chunk offset = chk_offset(chks);
Chunk weight = hasWeights() ? chk_weight(chks) : new C0DChunk(1, chks[0]._len);
for( int row = 0; row < ys._len; row++) {
double w = weight.atd(row);
if (ys.isNA(row)) continue;
double y = ys.atd(row);
double o = offset.atd(row);
double p = 1./(1.+Math.exp(-(o+_init)));
_num += w*(y-p);
_denom += w*p*(1.-p);
}
}
@Override
public void reduce(NewtonRaphson mrt) {
_num += mrt._num;
_denom += mrt._denom;
}
}
// Compute Prediction from prior tree results.
// Classification (multinomial): Probability Distribution of loglikelyhoods
// Prob_k = exp(Work_k)/sum_all_K exp(Work_k)
// Classification (bernoulli): Probability of y = 1 given logit link function
// Prob_0 = 1/(1 + exp(Work)), Prob_1 = 1/(1 + exp(-Work))
// Regression: Just prior tree results
// Work <== f(Tree)
class ComputeProb extends MRTask<ComputeProb> {
@Override public void map( Chunk chks[] ) {
Chunk ys = chk_resp(chks);
Chunk offset = hasOffset() ? chk_offset(chks) : new C0DChunk(0, chks[0]._len);
if( _parms._distribution == GBMModel.GBMParameters.Family.bernoulli ) {
Chunk tr = chk_tree(chks,0);
Chunk wk = chk_work(chks,0);
for( int row = 0; row < ys._len; row++)
// wk.set(row, 1.0f/(1f+Math.exp(-tr.atd(row))) ); // Prob_1
wk.set(row, 1.0f / (1f + Math.exp(tr.atd(row) + offset.atd(row)))); // Prob_0
} else if( _nclass > 1 ) { // Classification
double fs[] = new double[_nclass+1];
for( int row=0; row<ys._len; row++ ) {
double weight = hasWeights() ? chk_weight(chks).atd(row) : 1;
double sum = score1(chks, weight,0.0 /*offset*/,fs,row);
if( Double.isInfinite(sum) ) // Overflow (happens for constant responses)
for( int k=0; k<_nclass; k++ )
chk_work(chks,k).set(row,Double.isInfinite(fs[k+1])?1.0f:0.0f);
else
for( int k=0; k<_nclass; k++ ) // Save as a probability distribution
chk_work(chks,k).set(row,(float)(fs[k+1]/sum));
}
} else { // Regression
Chunk tr = chk_tree(chks,0); // Prior tree sums
Chunk wk = chk_work(chks,0); // Predictions
for( int row=0; row<ys._len; row++ )
wk.set(row,(float)(tr.atd(row) + offset.atd(row)));
}
}
}
// Compute Residuals from Actuals
// Work <== f(Work)
class ComputeRes extends MRTask<ComputeRes> {
@Override public void map( Chunk chks[] ) {
Chunk ys = chk_resp(chks);
if( _parms._distribution == GBMModel.GBMParameters.Family.bernoulli ) {
for(int row = 0; row < ys._len; row++) {
if( ys.isNA(row) ) continue;
int y = (int)ys.at8(row); // zero-based response variable
Chunk wk = chk_work(chks,0);
// wk.set(row, y-(float)wk.atd(row)); // wk.atd(row) is Prob_1
wk.set(row, y-1f+(float)wk.atd(row)); // wk.atd(row) is Prob_0
}
} else if( _nclass > 1 ) { // Classification
for( int row=0; row<ys._len; row++ ) {
if( ys.isNA(row) ) continue;
int y = (int)ys.at8(row); // zero-based response variable
// Actual is '1' for class 'y' and '0' for all other classes
for( int k=0; k<_nclass; k++ ) {
if( _model._output._distribution[k] != 0 ) {
Chunk wk = chk_work(chks,k);
wk.set(row, (y==k?1f:0f)-(float)wk.atd(row) );
}
}
}
} else { // Regression
Chunk wk = chk_work(chks,0); // Prediction==>Residuals
for( int row=0; row<ys._len; row++ )
wk.set(row, (float)(ys.atd(row)-wk.atd(row)) );
}
}
}
// Build the next k-trees, which is trying to correct the residual error from
// the prior trees. From ESL2, page 387. Step 2b ii, iii.
private void buildNextKTrees() {
// We're going to build K (nclass) trees - each focused on correcting
// errors for a single class.
final DTree[] ktrees = new DTree[_nclass];
// Initial set of histograms. All trees; one leaf per tree (the root
// leaf); all columns
DHistogram hcs[][][] = new DHistogram[_nclass][1/*just root leaf*/][_ncols];
// Adjust real bins for the top-levels
int adj_nbins = Math.max(_parms._nbins_top_level,_parms._nbins);
for( int k=0; k<_nclass; k++ ) {
// Initially setup as-if an empty-split had just happened
if( _model._output._distribution[k] != 0 ) {
// The Boolean Optimization
// This optimization assumes the 2nd tree of a 2-class system is the
// inverse of the first. This is false for DRF (and true for GBM) -
// DRF picks a random different set of columns for the 2nd tree.
if( k==1 && _nclass==2 ) continue;
ktrees[k] = new DTree(_train._names,_ncols,(char)_parms._nbins,(char)_parms._nbins_cats, (char)_nclass,_parms._min_rows);
new GBMUndecidedNode(ktrees[k],-1,DHistogram.initialHist(_train,_ncols,adj_nbins,_parms._nbins_cats,hcs[k][0]) ); // The "root" node
}
}
int[] leafs = new int[_nclass]; // Define a "working set" of leaf splits, from here to tree._len
// ESL2, page 387. Step 2b ii.
// One Big Loop till the ktrees are of proper depth.
// Adds a layer to the trees each pass.
int depth=0;
for( ; depth<_parms._max_depth; depth++ ) {
if( !isRunning() ) return;
hcs = buildLayer(_train, adj_nbins, _parms._nbins_cats, ktrees, leafs, hcs, false, _parms._build_tree_one_node);
// If we did not make any new splits, then the tree is split-to-death
if( hcs == null ) break;
}
// Each tree bottomed-out in a DecidedNode; go 1 more level and insert
// LeafNodes to hold predictions.
for( int k=0; k<_nclass; k++ ) {
DTree tree = ktrees[k];
if( tree == null ) continue;
int leaf = leafs[k] = tree.len();
for( int nid=0; nid<leaf; nid++ ) {
if( tree.node(nid) instanceof DecidedNode ) {
DecidedNode dn = tree.decided(nid);
if( dn._split._col == -1 ) { // No decision here, no row should have this NID now
if( nid==0 ) // Handle the trivial non-splitting tree
new GBMLeafNode(tree,-1,0);
continue;
}
for( int i=0; i<dn._nids.length; i++ ) {
int cnid = dn._nids[i];
if( cnid == -1 || // Bottomed out (predictors or responses known constant)
tree.node(cnid) instanceof UndecidedNode || // Or chopped off for depth
(tree.node(cnid) instanceof DecidedNode && // Or not possible to split
((DecidedNode)tree.node(cnid))._split.col()==-1) )
dn._nids[i] = new GBMLeafNode(tree,nid).nid(); // Mark a leaf here
}
}
}
} // -- k-trees are done
// ESL2, page 387. Step 2b iii. Compute the gammas, and store them back
// into the tree leaves. Includes learn_rate.
// For classification (bernoulli):
// gamma_i = sum res_i / sum p_i*(1 - p_i) where p_i = y_i - res_i
// For classification (multinomial):
// gamma_i_k = (nclass-1)/nclass * (sum res_i / sum (|res_i|*(1-|res_i|)))
// For regression (gaussian):
// gamma_i = sum res_i / count(res_i)
GammaPass gp = new GammaPass(ktrees,leafs,_parms._distribution == GBMModel.GBMParameters.Family.bernoulli).doAll(_train);
double m1class = _nclass > 1 && _parms._distribution != GBMModel.GBMParameters.Family.bernoulli ? (double)(_nclass-1)/_nclass : 1.0; // K-1/K for multinomial
for( int k=0; k<_nclass; k++ ) {
final DTree tree = ktrees[k];
if( tree == null ) continue;
for( int i=0; i<tree._len-leafs[k]; i++ ) {
float gf = (float)(_parms._learn_rate * m1class * gp._rss[k][i] / gp._gss[k][i]);
if( gp._gss[k][i]==0 ) // Bad split; all corrections sum to zero
gf = (float)(Math.signum(gp._rss[k][i])*1e4);
// In the multinomial case, check for very large values (which will get exponentiated later)
// Note that gss can be *zero* while rss is non-zero - happens when some rows in the same
// split are perfectly predicted true, and others perfectly predicted false.
if( _parms._distribution == GBMModel.GBMParameters.Family.multinomial ) {
if ( gf > 1e4 ) gf = 1e4f; // Cap prediction, will already overflow during Math.exp(gf)
else if( gf < -1e4 ) gf = -1e4f;
}
assert !Float.isNaN(gf) && !Float.isInfinite(gf);
((LeafNode) tree.node(leafs[k] + i))._pred = gf;
}
}
// ESL2, page 387. Step 2b iv. Cache the sum of all the trees, plus the
// new tree, in the 'tree' columns. Also, zap the NIDs for next pass.
// Tree <== f(Tree)
// Nids <== 0
new MRTask() {
@Override public void map( Chunk chks[] ) {
// For all tree/klasses
for( int k=0; k<_nclass; k++ ) {
final DTree tree = ktrees[k];
if( tree == null ) continue;
final Chunk nids = chk_nids(chks,k);
final Chunk ct = chk_tree(chks,k);
for( int row=0; row<nids._len; row++ ) {
int nid = (int)nids.at8(row);
if( nid < 0 ) continue;
// Prediction stored in Leaf is cut to float to be deterministic in reconstructing
// <tree_klazz> fields from tree prediction
ct.set(row, (float)(ct.atd(row) + ((LeafNode)tree.node(nid))._pred));
nids.set(row, 0);
}
}
}
}.doAll(_train);
// Grow the model by K-trees
_model._output.addKTrees(ktrees);
}
// ESL2, page 387. Step 2b iii.
// Nids <== f(Nids)
private class GammaPass extends MRTask<GammaPass> {
final DTree _trees[]; // Read-only, shared (except at the histograms in the Nodes)
final int _leafs[]; // Number of active leaves (per tree)
final boolean _isBernoulli;
// Per leaf: sum(res);
double _rss[/*tree/klass*/][/*tree-relative node-id*/];
// Per leaf: multinomial: sum(|res|*1-|res|), gaussian: sum(1), bernoulli: sum((y-res)*(1-y+res))
double _gss[/*tree/klass*/][/*tree-relative node-id*/];
GammaPass(DTree trees[], int leafs[], boolean isBernoulli) { _leafs=leafs; _trees=trees; _isBernoulli = isBernoulli; }
@Override public void map( Chunk[] chks ) {
_gss = new double[_nclass][];
_rss = new double[_nclass][];
final Chunk resp = chk_resp(chks); // Response for this frame
// For all tree/klasses
for( int k=0; k<_nclass; k++ ) {
final DTree tree = _trees[k];
final int leaf = _leafs[k];
if( tree == null ) continue; // Empty class is ignored
// A leaf-biased array of all active Tree leaves.
final double gs[] = _gss[k] = new double[tree._len-leaf];
final double rs[] = _rss[k] = new double[tree._len-leaf];
final Chunk nids = chk_nids(chks,k); // Node-ids for this tree/class
final Chunk ress = chk_work(chks,k); // Residuals for this tree/class
// If we have all constant responses, then we do not split even the
// root and the residuals should be zero.
if( tree.root() instanceof LeafNode ) continue;
for( int row=0; row<nids._len; row++ ) { // For all rows
int nid = (int)nids.at8(row); // Get Node to decide from
if( nid < 0 ) continue; // Missing response
if( tree.node(nid) instanceof UndecidedNode ) // If we bottomed out the tree
nid = tree.node(nid)._pid; // Then take parent's decision
DecidedNode dn = tree.decided(nid); // Must have a decision point
if( dn._split._col == -1 ) // Unable to decide?
dn = tree.decided(dn._pid); // Then take parent's decision
int leafnid = dn.ns(chks,row); // Decide down to a leafnode
assert leaf <= leafnid && leafnid < tree._len :
"leaf: " + leaf + " leafnid: " + leafnid + " tree._len: " + tree._len + "\ndn: " + dn;
assert tree.node(leafnid) instanceof LeafNode;
// Note: I can which leaf/region I end up in, but I do not care for
// the prediction presented by the tree. For GBM, we compute the
// sum-of-residuals (and sum/abs/mult residuals) for all rows in the
// leaf, and get our prediction from that.
nids.set(row, leafnid);
assert !ress.isNA(row);
// Compute numerator (rs) and denominator (gs) of gamma
double w = hasWeights() ? chk_weight(chks).atd(row) : 1;
double res = ress.atd(row);
double ares = Math.abs(res);
if( _isBernoulli ) {
double prob = resp.atd(row) - res;
gs[leafnid-leaf] += w*prob*(1-prob);
} else
gs[leafnid-leaf] += w*(_nclass > 1 ? ares*(1-ares) : 1);
rs[leafnid-leaf] += w*res;
}
}
}
@Override public void reduce( GammaPass gp ) {
ArrayUtils.add(_gss,gp._gss);
ArrayUtils.add(_rss,gp._rss);
}
}
@Override protected GBMModel makeModel( Key modelKey, GBMModel.GBMParameters parms, double mse_train, double mse_valid ) {
return new GBMModel(modelKey,parms,new GBMModel.GBMOutput(GBM.this,mse_train,mse_valid));
}
}
@Override protected DecidedNode makeDecided( UndecidedNode udn, DHistogram hs[] ) {
return new GBMDecidedNode(udn,hs);
}
// GBM DTree decision node: same as the normal DecidedNode, but
// specifies a decision algorithm given complete histograms on all
// columns. GBM algo: find the lowest error amongst *all* columns.
static class GBMDecidedNode extends DecidedNode {
GBMDecidedNode( UndecidedNode n, DHistogram[] hs ) { super(n,hs); }
@Override public UndecidedNode makeUndecidedNode(DHistogram[] hs ) {
return new GBMUndecidedNode(_tree,_nid,hs);
}
// Find the column with the best split (lowest score). Unlike RF, GBM
// scores on all columns and selects splits on all columns.
@Override public DTree.Split bestCol( UndecidedNode u, DHistogram[] hs ) {
DTree.Split best = new DTree.Split(-1,-1,null,(byte)0,Double.MAX_VALUE,Double.MAX_VALUE,Double.MAX_VALUE,0L,0L,0,0);
if( hs == null ) return best;
for( int i=0; i<hs.length; i++ ) {
if( hs[i]==null || hs[i].nbins() <= 1 ) continue;
DTree.Split s = hs[i].scoreMSE(i,_tree._min_rows);
if( s == null ) continue;
if( s.se() < best.se() )
best = s;
if( s.se() <= 0 ) break; // No point in looking further!
}
return best;
}
}
// GBM DTree undecided node: same as the normal UndecidedNode, but specifies
// a list of columns to score on now, and then decide over later.
// GBM algo: use all columns
static class GBMUndecidedNode extends UndecidedNode {
GBMUndecidedNode( DTree tree, int pid, DHistogram hs[] ) { super(tree,pid,hs); }
// Randomly select mtry columns to 'score' in following pass over the data.
// In GBM, we use all columns (as opposed to RF, which uses a random subset).
@Override public int[] scoreCols( DHistogram[] hs ) { return null; }
}
static class GBMLeafNode extends LeafNode {
GBMLeafNode( DTree tree, int pid ) { super(tree,pid); }
GBMLeafNode( DTree tree, int pid, int nid ) { super(tree, pid, nid); }
// Insert just the predictions: a single byte/short if we are predicting a
// single class, or else the full distribution.
@Override protected AutoBuffer compress(AutoBuffer ab) { assert !Double.isNaN(_pred); return ab.put4f(_pred); }
@Override protected int size() { return 4; }
}
// Read the 'tree' columns, do model-specific math and put the results in the
// fs[] array, and return the sum. Dividing any fs[] element by the sum
// turns the results into a probability distribution.
@Override protected double score1( Chunk chks[], double weight, double offset, double fs[/*nclass*/], int row ) {
if( _parms._distribution == GBMModel.GBMParameters.Family.bernoulli ) {
fs[1] = 1.0/(1.0+Math.exp(chk_tree(chks,0).atd(row)));
fs[2] = 1.0-fs[1];
return 1; // f2 = 1.0 - f1; so f1+f2 = 1.0
}
if( _nclass == 1 ) // Regression
return fs[0]=chk_tree(chks,0).atd(row) + offset;
if( _nclass == 2 ) { // The Boolean Optimization
// This optimization assumes the 2nd tree of a 2-class system is the
// inverse of the first. Fill in the missing tree
fs[1] = Math.exp(chk_tree(chks,0).atd(row));
fs[2] = 1.0/fs[1];
return fs[1]+fs[2];
}
// Multinomial loss function; sum(exp(data)). Load tree data
for( int k=0; k<_nclass; k++ )
fs[k+1]=chk_tree(chks,k).atd(row);
// Rescale to avoid Infinities; return sum(exp(data))
return hex.genmodel.GenModel.log_rescale(fs);
}
}
|
package org.cactoos.list;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
/**
* Test case for {@link Repeat}.
*
* @author Kirill (g4s8.public@gmail.com)
* @version $Id$
* @since 0.1
*/
public final class RepeatTest {
/**
* Test all elements are same.
*
* @throws Exception if failed
*/
@Test
public void allSameTest() throws Exception {
final int size = 42;
final int element = 11;
MatcherAssert.assertThat(
new LengthOfIterable(
new FilteredIterable<>(
new Repeat<>(
element,
size
),
input -> input == element
)
).asValue(),
Matchers.equalTo(size)
);
}
/**
* Test empty 'repeat' size.
*
* @throws Exception if failed
*/
@Test
public void emptyTest() throws Exception {
MatcherAssert.assertThat(
new LengthOfIterable(
new Repeat<>(0, 0)
).asValue(),
Matchers.equalTo(0)
);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.