gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.bulatnig.smpp.session.impl; import org.bulatnig.smpp.net.Connection; import org.bulatnig.smpp.pdu.CommandId; import org.bulatnig.smpp.pdu.CommandStatus; import org.bulatnig.smpp.pdu.Pdu; import org.bulatnig.smpp.pdu.PduException; import org.bulatnig.smpp.pdu.impl.EnquireLink; import org.bulatnig.smpp.pdu.impl.EnquireLinkResp; import org.bulatnig.smpp.pdu.impl.Unbind; import org.bulatnig.smpp.session.MessageListener; import org.bulatnig.smpp.session.Session; import org.bulatnig.smpp.session.State; import org.bulatnig.smpp.session.StateListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * Asynchronous session implementation. * * @author Bulat Nigmatullin */ public class BasicSession implements Session { private static final Logger logger = LoggerFactory.getLogger(BasicSession.class); private final Connection conn; private int smscResponseTimeout = DEFAULT_SMSC_RESPONSE_TIMEOUT; private int pingTimeout = DEFAULT_ENQUIRE_LINK_TIMEOUT; private int reconnectTimeout = DEFAULT_RECONNECT_TIMEOUT; private MessageListener messageListener = new DefaultMessageListener(); private StateListener stateListener = new DefaultStateListener(); private PingThread pingThread; private ReadThread readThread; private Pdu bindPdu; private volatile long sequenceNumber = 0; private volatile long lastActivity; private volatile State state = State.DISCONNECTED; public BasicSession(Connection conn) { this.conn = conn; } @Override public void setMessageListener(MessageListener messageListener) { this.messageListener = messageListener; } @Override public void setStateListener(StateListener stateListener) { this.stateListener = stateListener; } @Override public void setSmscResponseTimeout(int timeout) { this.smscResponseTimeout = timeout; } @Override public void setEnquireLinkTimeout(int timeout) { this.pingTimeout = timeout; } @Override public void setReconnectTimeout(int timeout) { this.reconnectTimeout = timeout; } @Override public synchronized Pdu open(Pdu pdu) throws PduException, IOException { bindPdu = pdu; return open(); } @Override public synchronized long nextSequenceNumber() { if (sequenceNumber == 2147483647L) sequenceNumber = 1; else sequenceNumber++; return sequenceNumber; } @Override public synchronized boolean send(Pdu pdu) throws PduException { if (State.CONNECTED != state) return false; try { conn.write(pdu); return true; } catch (IOException e) { logger.debug("Send failed.", e); reconnect(e); return false; } } @Override public synchronized void close() { if (State.RECONNECTING == state || closeInternal(null)) updateState(State.DISCONNECTED); } private synchronized Pdu open() throws PduException, IOException { logger.trace("Opening new session..."); conn.open(); logger.trace("TCP connection established. Sending bind request."); bindPdu.setSequenceNumber(nextSequenceNumber()); conn.write(bindPdu); ScheduledExecutorService es = Executors.newSingleThreadScheduledExecutor(); es.schedule(new Runnable() { @Override public void run() { logger.warn("Bind response timed out."); conn.close(); } }, smscResponseTimeout, TimeUnit.MILLISECONDS); logger.trace("Bind request sent. Waiting for bind response."); try { Pdu bindResp = conn.read(); es.shutdownNow(); logger.trace("Bind response command status: {}.", bindResp.getCommandStatus()); if (CommandStatus.ESME_ROK == bindResp.getCommandStatus()) { updateLastActivity(); pingThread = new PingThread(); pingThread.setName("Ping"); pingThread.start(); readThread = new ReadThread(); Thread t2 = new Thread(readThread); t2.setName("Read"); t2.start(); updateState(State.CONNECTED); logger.trace("Session successfully opened."); } return bindResp; } finally { if (!es.isShutdown()) es.shutdownNow(); } } /** * Actually close session. * * @param reason exception, caused session close, or null * @return session closed */ private synchronized boolean closeInternal(Exception reason) { if (State.DISCONNECTED != state) { logger.trace("Closing session..."); pingThread.stopAndInterrupt(); pingThread = null; if (!(reason instanceof IOException) && readThread.run) { try { synchronized (conn) { Pdu unbind = new Unbind(); unbind.setSequenceNumber(nextSequenceNumber()); send(unbind); conn.wait(smscResponseTimeout); } } catch (Exception e) { logger.debug("Unbind request send failed.", e); } } readThread.stop(); readThread = null; conn.close(); logger.trace("Session closed."); return true; } else { logger.trace("Session already closed."); return false; } } private void reconnect(Exception reason) { // only one thread should do reconnect boolean doReconnect = false; synchronized (state) { if (State.RECONNECTING != state) { doReconnect = true; state = State.RECONNECTING; } } if (doReconnect) { closeInternal(reason); new Thread(new ReconnectThread(reason)).start(); } } private void updateLastActivity() { lastActivity = System.currentTimeMillis(); } private void updateState(State newState) { updateState(newState, null); } private void updateState(State newState, Exception e) { this.state = newState; stateListener.changed(newState, e); } private class PingThread extends Thread { private volatile boolean run = true; @Override public void run() { logger.trace("Ping thread started."); try { while (run) { logger.trace("Checking last activity."); try { Thread.sleep(pingTimeout); if (pingTimeout < (System.currentTimeMillis() - lastActivity)) { long prevLastActivity = lastActivity; Pdu enquireLink = new EnquireLink(); enquireLink.setSequenceNumber(nextSequenceNumber()); send(enquireLink); synchronized (conn) { conn.wait(smscResponseTimeout); } if (run && lastActivity == prevLastActivity) { reconnect(new IOException("Enquire link response not received. Session closed.")); } } } catch (InterruptedException e) { logger.trace("Ping thread interrupted."); } } } catch (PduException e) { if (run) { logger.warn("EnquireLink request failed.", e); run = false; reconnect(e); } } finally { logger.trace("Ping thread stopped."); } } void stopAndInterrupt() { run = false; interrupt(); } } private class ReadThread implements Runnable { private volatile boolean run = true; @Override public void run() { logger.trace("Read thread started."); try { while (run) { Pdu request = conn.read(); updateLastActivity(); Pdu response; if (CommandId.ENQUIRE_LINK == request.getCommandId()) { response = new EnquireLinkResp(); response.setSequenceNumber(request.getSequenceNumber()); send(response); } else if (CommandId.ENQUIRE_LINK_RESP == request.getCommandId()) { synchronized (conn) { conn.notifyAll(); } } else if (CommandId.UNBIND_RESP == request.getCommandId()) { synchronized (conn) { conn.notifyAll(); } stop(); } else { messageListener.received(request); } } } catch (PduException e) { if (run) { logger.warn("Incoming message parsing failed.", e); run = false; reconnect(e); } } catch (IOException e) { if (run) { logger.warn("Reading IO failure.", e); run = false; reconnect(e); } } finally { logger.trace("Read thread stopped."); } } void stop() { run = false; } } private class ReconnectThread implements Runnable { private final Exception reason; private ReconnectThread(Exception reason) { this.reason = reason; } @Override public void run() { logger.debug("Reconnect started."); stateListener.changed(state, reason); boolean reconnectSuccessful = false; while (!reconnectSuccessful && state == State.RECONNECTING) { logger.debug("Reconnecting..."); try { Pdu bindResponse = open(); if (CommandStatus.ESME_ROK == bindResponse.getCommandStatus()) { reconnectSuccessful = true; } else { logger.warn("Reconnect failed. Bind response error code: {}.", bindResponse.getCommandStatus()); } } catch (Exception e) { logger.warn("Reconnect failed.", e); try { Thread.sleep(reconnectTimeout); } catch (InterruptedException e1) { logger.trace("Reconnect sleep interrupted.", e1); } } } if (reconnectSuccessful) state = State.CONNECTED; logger.debug("Reconnect done."); } } }
/***************************************************************************** * Copyright 2007-2015 DCA-FEEC-UNICAMP * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * Patricia Rocha de Toro, Elisa Calhau de Castro, Ricardo Ribeiro Gudwin *****************************************************************************/ package model; import java.util.logging.Logger; import util.Constants; /** * * @author ecalhau */ public class CageFSM { private Cage cage; static Logger log = Logger.getLogger(CageFSM.class.getCanonicalName()); private enum State { EMPTY_OPENED { @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_APPLE; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NUT; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_JEWEL; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm){ cfsm.state = EMPTY_CLOSED; sendMessage(cfsm); } }, EMPTY_CLOSED { @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_APPLE; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NUT; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_JEWEL; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = EMPTY_OPENED; sendMessage(cfsm); } }, FULL_CLOSED_APPLE { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = EMPTY_CLOSED; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MN; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MJ; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_APPLE; sendMessage(cfsm); } }, FULL_OPENED_APPLE { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = EMPTY_OPENED; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MN; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MJ; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_APPLE; sendMessage(cfsm); } }, FULL_CLOSED_NUT { @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = EMPTY_CLOSED; sendMessage(cfsm); } @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MN; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NJ; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NUT; sendMessage(cfsm); } }, FULL_OPENED_NUT { @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = EMPTY_OPENED; sendMessage(cfsm); } @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MN; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NJ; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NUT; sendMessage(cfsm); } }, FULL_OPENED_JEWEL { @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = EMPTY_OPENED; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NJ; sendMessage(cfsm); } @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MJ; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_JEWEL; sendMessage(cfsm); } }, FULL_CLOSED_JEWEL { @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = EMPTY_CLOSED; sendMessage(cfsm); } @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MJ; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NJ; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_JEWEL; sendMessage(cfsm); } }, FULL_OPENED_MN { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NUT; sendMessage(cfsm); } @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_APPLE; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MNJ; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MN; sendMessage(cfsm); } }, FULL_CLOSED_MN { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NUT; sendMessage(cfsm); } @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_APPLE; sendMessage(cfsm); } @Override void processADDJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MNJ; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MN; sendMessage(cfsm); } }, FULL_OPENED_MJ { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_JEWEL; sendMessage(cfsm); } @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_APPLE; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MNJ; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MJ; sendMessage(cfsm); } }, FULL_CLOSED_MJ { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_JEWEL; sendMessage(cfsm); } @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_APPLE; sendMessage(cfsm); } @Override void processADDNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MNJ; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MJ; sendMessage(cfsm); } }, FULL_OPENED_NJ { @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_JEWEL; sendMessage(cfsm); } @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NUT; sendMessage(cfsm); } @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MNJ; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NJ; sendMessage(cfsm); } }, FULL_CLOSED_NJ { @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_JEWEL; sendMessage(cfsm); } @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NUT; sendMessage(cfsm); } @Override void processADDPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MNJ; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NJ; sendMessage(cfsm); } }, FULL_OPENED_MNJ { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_NJ; sendMessage(cfsm); } @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MJ; sendMessage(cfsm); } @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MN; sendMessage(cfsm); } @Override void processCloseAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MNJ; sendMessage(cfsm); } }, FULL_CLOSED_MNJ { @Override void processDELPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_NJ; sendMessage(cfsm); } @Override void processDELNPFAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MJ; sendMessage(cfsm); } @Override void processDELJewelAction(CageFSM cfsm) { cfsm.state = FULL_CLOSED_MN; sendMessage(cfsm); } @Override void processOpenAction(CageFSM cfsm) { cfsm.state = FULL_OPENED_MNJ; sendMessage(cfsm); } }; public void exec(FSMEvent.CageFSMEvent ev, CageFSM cfsm) { log.info("State is: "+cfsm.getState()); log.info("Received event: "+ev+" and changed state."); switch (ev) { case Add_PF_Action: processADDPFAction(cfsm); break; case Add_NPF_Action: processADDNPFAction(cfsm); break; case Add_J_Action: processADDJewelAction(cfsm); break; case Del_PF_Action: processDELPFAction(cfsm); break; case Del_NPF_Action: processDELNPFAction(cfsm); break; case Del_J_Action: processDELJewelAction(cfsm); break; case Open: processOpenAction(cfsm); break; case Close: processCloseAction(cfsm); break; } cfsm.getCage().setStatus(cfsm.getStateToInt()); } /** * Override the next methods according to each state * * @param cfsm */ void processADDPFAction(CageFSM cfsm) { log.info("Do nothing"); sendMessage(cfsm); } void processADDNPFAction(CageFSM cfsm) { log.info("Do nothing"); sendMessage(cfsm); } void processADDJewelAction(CageFSM cfsm) { log.info("Do nothing"); sendMessage(cfsm); } void processDELPFAction(CageFSM cfsm) { log.info("Do nothing"); sendMessage(cfsm); } void processDELNPFAction(CageFSM cfsm) { log.info("Do nothing"); sendMessage(cfsm); } void processDELJewelAction(CageFSM cfsm){ log.info("Do nothing"); sendMessage(cfsm); } void processOpenAction(CageFSM cfsm){ log.info("Do nothing"); sendMessage(cfsm); } void processCloseAction(CageFSM cfsm){ log.info("Do nothing"); sendMessage(cfsm); } } private CageFSM.State state = CageFSM.State.EMPTY_OPENED; //initial public CageFSM(Cage c){ this.cage = c; } public Cage getCage() { return cage; } public CageFSM.State getState() { return state; } //Stage instance "mapped" to an int. This is understood by other classes: public int getStateToInt() { switch (state) { case EMPTY_OPENED: return Constants.EMPTY_OPENED; case EMPTY_CLOSED: return Constants.EMPTY_CLOSED; case FULL_CLOSED_APPLE: return Constants.FULL_CLOSED_APPLE; case FULL_OPENED_APPLE: return Constants.FULL_OPENED_APPLE; case FULL_CLOSED_NUT: return Constants.FULL_CLOSED_NUT; case FULL_OPENED_NUT: return Constants.FULL_OPENED_NUT; case FULL_OPENED_JEWEL: return Constants.FULL_OPENED_JEWEL; case FULL_CLOSED_JEWEL: return Constants.FULL_CLOSED_JEWEL; case FULL_OPENED_MNJ: return Constants.FULL_OPENED_MNJ; case FULL_CLOSED_MNJ: return Constants.FULL_CLOSED_MNJ; case FULL_OPENED_MN: return Constants.FULL_OPENED_MN; case FULL_CLOSED_MN: return Constants.FULL_CLOSED_MN; case FULL_OPENED_MJ: return Constants.FULL_OPENED_MJ; case FULL_CLOSED_MJ: return Constants.FULL_CLOSED_MJ; case FULL_OPENED_NJ: return Constants.FULL_OPENED_NJ; case FULL_CLOSED_NJ: return Constants.FULL_CLOSED_NJ; default: log.severe("Error in CageFSM::getStateToInt: state does not exist!!!"); return -1; } } public void setState(CageFSM.State state) { this.state = state; } public void processEvent(FSMEvent.CageFSMEvent ev){ log.info("Event received: " + ev); this.state.exec(ev, this); } private static void sendMessage(CageFSM cfsm) { log.info("Current state is: " + cfsm.getState()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.jdbc.thin; import java.io.Serializable; import java.sql.DriverPropertyInfo; import java.sql.SQLException; import java.util.Arrays; import java.util.Properties; import java.util.StringTokenizer; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.configuration.ClientConnectorConfiguration; import org.apache.ignite.internal.processors.odbc.SqlStateCode; import org.apache.ignite.internal.processors.query.NestedTxMode; import org.apache.ignite.internal.util.HostAndPortRange; import org.apache.ignite.internal.util.typedef.F; import org.jetbrains.annotations.Nullable; /** * Holds JDBC connection properties. */ public class ConnectionPropertiesImpl implements ConnectionProperties, Serializable { /** */ private static final long serialVersionUID = 0L; /** Prefix for property names. */ public static final String PROP_PREFIX = "ignite.jdbc."; /** Default socket buffer size. */ private static final int DFLT_SOCK_BUFFER_SIZE = 64 * 1024; /** Property: schema. */ private static final String PROP_SCHEMA = "schema"; /** Connection URL. */ private String url; /** Addresses. */ private HostAndPortRange [] addrs; /** Schema name. Hidden property. Is used to set default schema name part of the URL. */ private StringProperty schema = new StringProperty(PROP_SCHEMA, "Schema name of the connection", "PUBLIC", null, false, null); /** Distributed joins property. */ private BooleanProperty distributedJoins = new BooleanProperty( "distributedJoins", "Enable distributed joins", false, false); /** Enforce join order property. */ private BooleanProperty enforceJoinOrder = new BooleanProperty( "enforceJoinOrder", "Enable enforce join order", false, false); /** Collocated property. */ private BooleanProperty collocated = new BooleanProperty( "collocated", "Enable collocated query", false, false); /** Replicated only property. */ private BooleanProperty replicatedOnly = new BooleanProperty( "replicatedOnly", "Specify if the all queries contain only replicated tables", false, false); /** Auto close server cursor property. */ private BooleanProperty autoCloseServerCursor = new BooleanProperty( "autoCloseServerCursor", "Enable auto close server cursors when last piece of result set is retrieved. " + "If the server-side cursor is already closed, you may get an exception when trying to call " + "`ResultSet.getMetadata()` method.", false, false); /** TCP no delay property. */ private BooleanProperty tcpNoDelay = new BooleanProperty( "tcpNoDelay", "TCP no delay flag", true, false); /** Lazy query execution property. */ private BooleanProperty lazy = new BooleanProperty( "lazy", "Enable lazy query execution", false, false); /** Socket send buffer size property. */ private IntegerProperty socketSendBuffer = new IntegerProperty( "socketSendBuffer", "Socket send buffer size", DFLT_SOCK_BUFFER_SIZE, false, 0, Integer.MAX_VALUE); /** Socket receive buffer size property. */ private IntegerProperty socketReceiveBuffer = new IntegerProperty( "socketReceiveBuffer", "Socket send buffer size", DFLT_SOCK_BUFFER_SIZE, false, 0, Integer.MAX_VALUE); /** Executes update queries on ignite server nodes flag. */ private BooleanProperty skipReducerOnUpdate = new BooleanProperty( "skipReducerOnUpdate", "Enable execution update queries on ignite server nodes", false, false); /** Nested transactions handling strategy. */ private StringProperty nestedTxMode = new StringProperty( "nestedTransactionsMode", "Way to handle nested transactions", NestedTxMode.ERROR.name(), new String[] { NestedTxMode.COMMIT.name(), NestedTxMode.ERROR.name(), NestedTxMode.IGNORE.name() }, false, new PropertyValidator() { private static final long serialVersionUID = 0L; @Override public void validate(String mode) throws SQLException { if (!F.isEmpty(mode)) { try { NestedTxMode.valueOf(mode.toUpperCase()); } catch (IllegalArgumentException e) { throw new SQLException("Invalid nested transactions handling mode, allowed values: " + Arrays.toString(nestedTxMode.choices), SqlStateCode.CLIENT_CONNECTION_FAILED); } } } }); /** SSL: Use SSL connection to Ignite node. */ private StringProperty sslMode = new StringProperty("sslMode", "The SSL mode of the connection", SSL_MODE_DISABLE, new String[] {SSL_MODE_DISABLE, SSL_MODE_REQUIRE}, false, null); /** SSL: Client certificate key store url. */ private StringProperty sslProtocol = new StringProperty("sslProtocol", "SSL protocol name", null, null, false, null); /** SSL: Key algorithm name. */ private StringProperty sslKeyAlgorithm = new StringProperty("sslKeyAlgorithm", "SSL key algorithm name", "SunX509", null, false, null); /** SSL: Client certificate key store url. */ private StringProperty sslClientCertificateKeyStoreUrl = new StringProperty("sslClientCertificateKeyStoreUrl", "Client certificate key store URL", null, null, false, null); /** SSL: Client certificate key store password. */ private StringProperty sslClientCertificateKeyStorePassword = new StringProperty("sslClientCertificateKeyStorePassword", "Client certificate key store password", null, null, false, null); /** SSL: Client certificate key store type. */ private StringProperty sslClientCertificateKeyStoreType = new StringProperty("sslClientCertificateKeyStoreType", "Client certificate key store type", null, null, false, null); /** SSL: Trusted certificate key store url. */ private StringProperty sslTrustCertificateKeyStoreUrl = new StringProperty("sslTrustCertificateKeyStoreUrl", "Trusted certificate key store URL", null, null, false, null); /** SSL Trusted certificate key store password. */ private StringProperty sslTrustCertificateKeyStorePassword = new StringProperty("sslTrustCertificateKeyStorePassword", "Trusted certificate key store password", null, null, false, null); /** SSL: Trusted certificate key store type. */ private StringProperty sslTrustCertificateKeyStoreType = new StringProperty("sslTrustCertificateKeyStoreType", "Trusted certificate key store type", null, null, false, null); /** SSL: Trust all certificates. */ private BooleanProperty sslTrustAll = new BooleanProperty("sslTrustAll", "Trust all certificates", false, false); /** SSL: Custom class name that implements Factory&lt;SSLSocketFactory&gt;. */ private StringProperty sslFactory = new StringProperty("sslFactory", "Custom class name that implements Factory<SSLSocketFactory>", null, null, false, null); /** User name to authenticate the client on the server side. */ private StringProperty user = new StringProperty( "user", "User name to authenticate the client on the server side", null, null, false, null); /** User's password. */ private StringProperty passwd = new StringProperty( "password", "User's password", null, null, false, null); /** Data page scan flag. */ private BooleanProperty dataPageScanEnabled = new BooleanProperty("dataPageScanEnabled", "Whether data page scan for queries is allowed. If not specified, server defines the default behaviour.", null, false); /** affinity awareness flag. */ private BooleanProperty affinityAwareness = new BooleanProperty( "affinityAwareness", "Whether jdbc thin affinity awareness is enabled.", false, false); /** Update batch size (the size of internal batches are used for INSERT/UPDATE/DELETE operation). */ private IntegerProperty updateBatchSize = new IntegerProperty("updateBatchSize", "Update bach size (the size of internal batches are used for INSERT/UPDATE/DELETE operation). " + "Set to 1 to prevent deadlock on update where keys sequence are different " + "in several concurrent updates.", null, false, 1, Integer.MAX_VALUE); /** Properties array. */ private final ConnectionProperty [] propsArray = { distributedJoins, enforceJoinOrder, collocated, replicatedOnly, autoCloseServerCursor, tcpNoDelay, lazy, socketSendBuffer, socketReceiveBuffer, skipReducerOnUpdate, nestedTxMode, sslMode, sslProtocol, sslKeyAlgorithm, sslClientCertificateKeyStoreUrl, sslClientCertificateKeyStorePassword, sslClientCertificateKeyStoreType, sslTrustCertificateKeyStoreUrl, sslTrustCertificateKeyStorePassword, sslTrustCertificateKeyStoreType, sslTrustAll, sslFactory, user, passwd, dataPageScanEnabled, affinityAwareness, updateBatchSize }; /** {@inheritDoc} */ @Override public String getSchema() { return schema.value(); } /** {@inheritDoc} */ @Override public void setSchema(String schema) { this.schema.setValue(schema); } /** {@inheritDoc} */ @Override public String getUrl() { if (url != null) return url; else { if (F.isEmpty(getAddresses())) return null; StringBuilder sbUrl = new StringBuilder(JdbcThinUtils.URL_PREFIX); HostAndPortRange [] addrs = getAddresses(); for (int i = 0; i < addrs.length; i++) { if (i > 0) sbUrl.append(','); sbUrl.append(addrs[i].toString()); } if (!F.isEmpty(getSchema())) sbUrl.append('/').append(getSchema()); return sbUrl.toString(); } } /** {@inheritDoc} */ @Override public void setUrl(String url) throws SQLException { this.url = url; init(url, new Properties()); } /** {@inheritDoc} */ @Override public HostAndPortRange[] getAddresses() { return addrs; } /** {@inheritDoc} */ @Override public void setAddresses(HostAndPortRange[] addrs) { this.addrs = addrs; } /** {@inheritDoc} */ @Override public boolean isDistributedJoins() { return distributedJoins.value(); } /** {@inheritDoc} */ @Override public void setDistributedJoins(boolean val) { distributedJoins.setValue(val); } /** {@inheritDoc} */ @Override public boolean isEnforceJoinOrder() { return enforceJoinOrder.value(); } /** {@inheritDoc} */ @Override public void setEnforceJoinOrder(boolean val) { enforceJoinOrder.setValue(val); } /** {@inheritDoc} */ @Override public boolean isCollocated() { return collocated.value(); } /** {@inheritDoc} */ @Override public void setCollocated(boolean val) { collocated.setValue(val); } /** {@inheritDoc} */ @Override public boolean isReplicatedOnly() { return replicatedOnly.value(); } /** {@inheritDoc} */ @Override public void setReplicatedOnly(boolean val) { replicatedOnly.setValue(val); } /** {@inheritDoc} */ @Override public boolean isAutoCloseServerCursor() { return autoCloseServerCursor.value(); } /** {@inheritDoc} */ @Override public void setAutoCloseServerCursor(boolean val) { autoCloseServerCursor.setValue(val); } /** {@inheritDoc} */ @Override public int getSocketSendBuffer() { return socketSendBuffer.value(); } /** {@inheritDoc} */ @Override public void setSocketSendBuffer(int size) throws SQLException { socketSendBuffer.setValue(size); } /** {@inheritDoc} */ @Override public int getSocketReceiveBuffer() { return socketReceiveBuffer.value(); } /** {@inheritDoc} */ @Override public void setSocketReceiveBuffer(int size) throws SQLException { socketReceiveBuffer.setValue(size); } /** {@inheritDoc} */ @Override public boolean isTcpNoDelay() { return tcpNoDelay.value(); } /** {@inheritDoc} */ @Override public void setTcpNoDelay(boolean val) { tcpNoDelay.setValue(val); } /** {@inheritDoc} */ @Override public boolean isLazy() { return lazy.value(); } /** {@inheritDoc} */ @Override public void setLazy(boolean val) { lazy.setValue(val); } /** {@inheritDoc} */ @Override public boolean isSkipReducerOnUpdate() { return skipReducerOnUpdate.value(); } /** {@inheritDoc} */ @Override public void setSkipReducerOnUpdate(boolean val) { skipReducerOnUpdate.setValue(val); } /** {@inheritDoc} */ @Override public String getSslMode() { return sslMode.value(); } /** {@inheritDoc} */ @Override public void setSslMode(String mode) { sslMode.setValue(mode); } /** {@inheritDoc} */ @Override public String getSslProtocol() { return sslProtocol.value(); } /** {@inheritDoc} */ @Override public void setSslProtocol(String sslProtocol) { this.sslProtocol.setValue(sslProtocol); } /** {@inheritDoc} */ @Override public String getSslKeyAlgorithm() { return sslKeyAlgorithm.value(); } /** {@inheritDoc} */ @Override public void setSslKeyAlgorithm(String keyAlgorithm) { sslKeyAlgorithm.setValue(keyAlgorithm); } /** {@inheritDoc} */ @Override public String getSslClientCertificateKeyStoreUrl() { return sslClientCertificateKeyStoreUrl.value(); } /** {@inheritDoc} */ @Override public void setSslClientCertificateKeyStoreUrl(String url) { sslClientCertificateKeyStoreUrl.setValue(url); } /** {@inheritDoc} */ @Override public String getSslClientCertificateKeyStorePassword() { return sslClientCertificateKeyStorePassword.value(); } /** {@inheritDoc} */ @Override public void setSslClientCertificateKeyStorePassword(String passwd) { sslClientCertificateKeyStorePassword.setValue(passwd); } /** {@inheritDoc} */ @Override public String getSslClientCertificateKeyStoreType() { return sslClientCertificateKeyStoreType.value(); } /** {@inheritDoc} */ @Override public void setSslClientCertificateKeyStoreType(String ksType) { sslClientCertificateKeyStoreType.setValue(ksType); } /** {@inheritDoc} */ @Override public String getSslTrustCertificateKeyStoreUrl() { return sslTrustCertificateKeyStoreUrl.value(); } /** {@inheritDoc} */ @Override public void setSslTrustCertificateKeyStoreUrl(String url) { sslTrustCertificateKeyStoreUrl.setValue(url); } /** {@inheritDoc} */ @Override public String getSslTrustCertificateKeyStorePassword() { return sslTrustCertificateKeyStorePassword.value(); } /** {@inheritDoc} */ @Override public void setSslTrustCertificateKeyStorePassword(String passwd) { sslTrustCertificateKeyStorePassword.setValue(passwd); } /** {@inheritDoc} */ @Override public String getSslTrustCertificateKeyStoreType() { return sslTrustCertificateKeyStoreType.value(); } /** {@inheritDoc} */ @Override public void setSslTrustCertificateKeyStoreType(String ksType) { sslTrustCertificateKeyStoreType.setValue(ksType); } /** {@inheritDoc} */ @Override public boolean isSslTrustAll() { return sslTrustAll.value(); } /** {@inheritDoc} */ @Override public void setSslTrustAll(boolean trustAll) { this.sslTrustAll.setValue(trustAll); } /** {@inheritDoc} */ @Override public String getSslFactory() { return sslFactory.value(); } /** {@inheritDoc} */ @Override public void setSslFactory(String sslFactory) { this.sslFactory.setValue(sslFactory); } /** {@inheritDoc} */ @Override public String nestedTxMode() { return nestedTxMode.value(); } /** {@inheritDoc} */ @Override public void nestedTxMode(String val) { nestedTxMode.setValue(val); } /** {@inheritDoc} */ @Override public void setUsername(String name) { user.setValue(name); } /** {@inheritDoc} */ @Override public String getUsername() { return user.value(); } /** {@inheritDoc} */ @Override public void setPassword(String passwd) { this.passwd.setValue(passwd); } /** {@inheritDoc} */ @Override public String getPassword() { return passwd.value(); } /** {@inheritDoc} */ @Override public @Nullable Boolean isDataPageScanEnabled() { return dataPageScanEnabled.value(); } /** {@inheritDoc} */ @Override public void setDataPageScanEnabled(@Nullable Boolean dataPageScanEnabled) { this.dataPageScanEnabled.setValue(dataPageScanEnabled); } /** {@inheritDoc} */ @Override public boolean isAffinityAwareness() { return affinityAwareness.value(); } /** {@inheritDoc} */ @Override public void setAffinityAwareness(boolean affinityAwareness) { this.affinityAwareness.setValue(affinityAwareness); } /** {@inheritDoc} */ @Override public @Nullable Integer getUpdateBatchSize() { return updateBatchSize.value(); } /** {@inheritDoc} */ @Override public void setUpdateBatchSize(@Nullable Integer updateBatchSize) throws SQLException { this.updateBatchSize.setValue(updateBatchSize); } /** * @param url URL connection. * @param props Environment properties. * @throws SQLException On error. */ public void init(String url, Properties props) throws SQLException { Properties props0 = (Properties)props.clone(); if (!F.isEmpty(url)) parseUrl(url, props0); for (ConnectionProperty aPropsArray : propsArray) aPropsArray.init(props0); if (!F.isEmpty(props.getProperty("user"))) { setUsername(props.getProperty("user")); setPassword(props.getProperty("password")); } } /** * Validates and parses connection URL. * * @param url URL. * @param props Properties. * @throws SQLException On error. */ private void parseUrl(String url, Properties props) throws SQLException { if (F.isEmpty(url)) throw new SQLException("URL cannot be null or empty."); if (!url.startsWith(JdbcThinUtils.URL_PREFIX)) throw new SQLException("URL must start with \"" + JdbcThinUtils.URL_PREFIX + "\""); String nakedUrl = url.substring(JdbcThinUtils.URL_PREFIX.length()).trim(); parseUrl0(nakedUrl, props); } /** * Parse naked URL (i.e. without {@link JdbcThinUtils#URL_PREFIX}). * * @param url Naked URL. * @param props Properties. * @throws SQLException If failed. */ private void parseUrl0(String url, Properties props) throws SQLException { // Determine mode - semicolon or ampersand. int semicolonPos = url.indexOf(";"); int slashPos = url.indexOf("/"); int queryPos = url.indexOf("?"); boolean semicolonMode; if (semicolonPos == -1 && slashPos == -1 && queryPos == -1) // No special char -> any mode could be used, choose semicolon for simplicity. semicolonMode = true; else { if (semicolonPos != -1) { // Use semicolon mode if it appears earlier than slash or query. semicolonMode = (slashPos == -1 || semicolonPos < slashPos) && (queryPos == -1 || semicolonPos < queryPos); } else // Semicolon is not found. semicolonMode = false; } if (semicolonMode) parseUrlWithSemicolon(url, props); else parseUrlWithQuery(url, props); } /** * Parse URL in semicolon mode. * * @param url Naked URL * @param props Properties. * @throws SQLException If failed. */ private void parseUrlWithSemicolon(String url, Properties props) throws SQLException { int pathPartEndPos = url.indexOf(';'); if (pathPartEndPos == -1) pathPartEndPos = url.length(); String pathPart = url.substring(0, pathPartEndPos); String paramPart = null; if (pathPartEndPos > 0 && pathPartEndPos < url.length()) paramPart = url.substring(pathPartEndPos + 1, url.length()); parseEndpoints(pathPart); if (!F.isEmpty(paramPart)) parseParameters(paramPart, props, ";"); } /** * Parse URL in query mode. * * @param url Naked URL * @param props Properties. * @throws SQLException If failed. */ private void parseUrlWithQuery(String url, Properties props) throws SQLException { int pathPartEndPos = url.indexOf('?'); if (pathPartEndPos == -1) pathPartEndPos = url.length(); String pathPart = url.substring(0, pathPartEndPos); String paramPart = null; if (pathPartEndPos > 0 && pathPartEndPos < url.length()) paramPart = url.substring(pathPartEndPos + 1, url.length()); String[] pathParts = pathPart.split("/"); parseEndpoints(pathParts[0]); if (pathParts.length > 2) { throw new SQLException("Invalid URL format (only schema name is allowed in URL path parameter " + "'host:port[/schemaName]'): " + this.url, SqlStateCode.CLIENT_CONNECTION_FAILED); } setSchema(pathParts.length == 2 ? pathParts[1] : null); if (!F.isEmpty(paramPart)) parseParameters(paramPart, props, "&"); } /** * Parse endpoints. * * @param endpointStr Endpoint string. * @throws SQLException If failed. */ private void parseEndpoints(String endpointStr) throws SQLException { String [] endpoints = endpointStr.split(","); if (endpoints.length > 0) addrs = new HostAndPortRange[endpoints.length]; for (int i = 0; i < endpoints.length; ++i ) { try { addrs[i] = HostAndPortRange.parse(endpoints[i], ClientConnectorConfiguration.DFLT_PORT, ClientConnectorConfiguration.DFLT_PORT, "Invalid endpoint format (should be \"host[:portRangeFrom[..portRangeTo]]\")"); } catch (IgniteCheckedException e) { throw new SQLException(e.getMessage(), SqlStateCode.CLIENT_CONNECTION_FAILED, e); } } if (F.isEmpty(addrs) || F.isEmpty(addrs[0].host())) throw new SQLException("Host name is empty", SqlStateCode.CLIENT_CONNECTION_FAILED); } /** * Validates and parses URL parameters. * * @param paramStr Parameters string. * @param props Properties. * @param delimChar Delimiter character. * @throws SQLException If failed. */ private void parseParameters(String paramStr, Properties props, String delimChar) throws SQLException { StringTokenizer st = new StringTokenizer(paramStr, delimChar); boolean insideBrace = false; String key = null; String val = null; while (st.hasMoreTokens()) { String token = st.nextToken(); if (!insideBrace) { int eqSymPos = token.indexOf('='); if (eqSymPos < 0) { throw new SQLException("Invalid parameter format (should be \"key1=val1" + delimChar + "key2=val2" + delimChar + "...\"): " + token); } if (eqSymPos == token.length()) throw new SQLException("Invalid parameter format (key and value cannot be empty): " + token); key = token.substring(0, eqSymPos); val = token.substring(eqSymPos + 1, token.length()); if (val.startsWith("{")) { val = val.substring(1); insideBrace = true; } } else val += delimChar + token; if (val.endsWith("}")) { insideBrace = false; val = val.substring(0, val.length() - 1); } if (val.contains("{") || val.contains("}")) { throw new SQLException("Braces cannot be escaped in the value. " + "Please use the connection Properties for such values. [property=" + key + ']'); } if (!insideBrace) { if (key.isEmpty() || val.isEmpty()) throw new SQLException("Invalid parameter format (key and value cannot be empty): " + token); if (PROP_SCHEMA.equalsIgnoreCase(key)) setSchema(val); else props.setProperty(PROP_PREFIX + key, val); } } } /** * @return Driver's properties info array. */ public DriverPropertyInfo[] getDriverPropertyInfo() { DriverPropertyInfo[] infos = new DriverPropertyInfo[propsArray.length]; for (int i = 0; i < propsArray.length; ++i) infos[i] = propsArray[i].getDriverPropertyInfo(); return infos; } /** * @return Properties set contains connection parameters. */ public Properties storeToProperties() { Properties props = new Properties(); for (ConnectionProperty prop : propsArray) { if (prop.valueObject() != null) props.setProperty(PROP_PREFIX + prop.getName(), prop.valueObject()); } return props; } /** * */ private interface PropertyValidator extends Serializable { /** * @param val String representation of the property value to validate. * @throws SQLException On validation fails. */ void validate(String val) throws SQLException; } /** * */ private abstract static class ConnectionProperty implements Serializable { /** */ private static final long serialVersionUID = 0L; /** Name. */ protected String name; /** Property description. */ protected String desc; /** Default value. */ protected Object dfltVal; /** * An array of possible values if the value may be selected * from a particular set of values; otherwise null. */ protected String [] choices; /** Required flag. */ protected boolean required; /** Property validator. */ protected PropertyValidator validator; /** * @param name Name. * @param desc Description. * @param dfltVal Default value. * @param choices Possible values. * @param required {@code true} if the property is required. */ ConnectionProperty(String name, String desc, Object dfltVal, String[] choices, boolean required) { this.name = name; this.desc = desc; this.dfltVal = dfltVal; this.choices = choices; this.required = required; } /** * @param name Name. * @param desc Description. * @param dfltVal Default value. * @param choices Possible values. * @param required {@code true} if the property is required. * @param validator Property validator. */ ConnectionProperty(String name, String desc, Object dfltVal, String[] choices, boolean required, PropertyValidator validator) { this.name = name; this.desc = desc; this.dfltVal = dfltVal; this.choices = choices; this.required = required; this.validator = validator; } /** * @return Default value. */ Object getDfltVal() { return dfltVal; } /** * @return Property name. */ String getName() { return name; } /** * @return Array of possible values if the value may be selected * from a particular set of values; otherwise null */ String[] choices() { return choices; } /** * @param props Properties. * @throws SQLException On error. */ void init(Properties props) throws SQLException { String strVal = props.getProperty(PROP_PREFIX + name); if (required && strVal == null) { throw new SQLException("Property '" + name + "' is required but not defined", SqlStateCode.CLIENT_CONNECTION_FAILED); } if (validator != null) validator.validate(strVal); checkChoices(strVal); props.remove(name); init(strVal); } /** * @param strVal Checked value. * @throws SQLException On check error. */ protected void checkChoices(String strVal) throws SQLException { if (strVal == null) return; if (choices != null) { for (String ch : choices) { if (ch.equalsIgnoreCase(strVal)) return; } throw new SQLException("Invalid property value. [name=" + name + ", val=" + strVal + ", choices=" + Arrays.toString(choices) + ']', SqlStateCode.CLIENT_CONNECTION_FAILED); } } /** * @param str String representation of the * @throws SQLException on error. */ abstract void init(String str) throws SQLException; /** * @return String representation of the property value. */ abstract String valueObject(); /** * @return JDBC property info object. */ DriverPropertyInfo getDriverPropertyInfo() { DriverPropertyInfo dpi = new DriverPropertyInfo(name, valueObject()); dpi.choices = choices(); dpi.required = required; dpi.description = desc; return dpi; } } /** * */ private static class BooleanProperty extends ConnectionProperty { /** */ private static final long serialVersionUID = 0L; /** Bool choices. */ private static final String [] boolChoices = new String[] {Boolean.TRUE.toString(), Boolean.FALSE.toString()}; /** Value. */ private Boolean val; /** * @param name Name. * @param desc Description. * @param dfltVal Default value. * @param required {@code true} if the property is required. */ BooleanProperty(String name, String desc, @Nullable Boolean dfltVal, boolean required) { super(name, desc, dfltVal, boolChoices, required); val = dfltVal; } /** * @return Property value. */ @Nullable Boolean value() { return val; } /** {@inheritDoc} */ @Override void init(String str) throws SQLException { if (str == null) val = (Boolean)dfltVal; else { if (Boolean.TRUE.toString().equalsIgnoreCase(str)) val = true; else if (Boolean.FALSE.toString().equalsIgnoreCase(str)) val = false; else throw new SQLException("Failed to parse boolean property [name=" + name + ", value=" + str + ']', SqlStateCode.CLIENT_CONNECTION_FAILED); } } /** {@inheritDoc} */ @Override String valueObject() { if (val == null) return null; return Boolean.toString(val); } /** * @param val Property value to set. */ void setValue(Boolean val) { this.val = val; } } /** * */ private abstract static class NumberProperty extends ConnectionProperty { /** */ private static final long serialVersionUID = 0L; /** Value. */ protected Number val; /** Allowed value range. */ private Number [] range; /** * @param name Name. * @param desc Description. * @param dfltVal Default value. * @param required {@code true} if the property is required. * @param min Lower bound of allowed range. * @param max Upper bound of allowed range. */ NumberProperty(String name, String desc, Number dfltVal, boolean required, Number min, Number max) { super(name, desc, dfltVal, null, required); val = dfltVal; range = new Number[] {min, max}; } /** {@inheritDoc} */ @Override void init(String str) throws SQLException { if (str == null) val = dfltVal != null ? (int)dfltVal : null; else { try { setValue(parse(str)); } catch (NumberFormatException e) { throw new SQLException("Failed to parse int property [name=" + name + ", value=" + str + ']', SqlStateCode.CLIENT_CONNECTION_FAILED); } } } /** * @param str String value. * @return Number value. * @throws NumberFormatException On parse error. */ protected abstract Number parse(String str) throws NumberFormatException; /** {@inheritDoc} */ @Override String valueObject() { return val != null ? String.valueOf(val) : null; } /** * @param val Property value. * @throws SQLException On error. */ void setValue(Number val) throws SQLException { if (range != null) { if (val.doubleValue() < range[0].doubleValue()) { throw new SQLException("Property cannot be lower than " + range[0].toString() + " [name=" + name + ", value=" + val.toString() + ']', SqlStateCode.CLIENT_CONNECTION_FAILED); } if (val.doubleValue() > range[1].doubleValue()) { throw new SQLException("Property cannot be upper than " + range[1].toString() + " [name=" + name + ", value=" + val.toString() + ']', SqlStateCode.CLIENT_CONNECTION_FAILED); } } this.val = val; } } /** * */ private static class IntegerProperty extends NumberProperty { /** */ private static final long serialVersionUID = 0L; /** * @param name Name. * @param desc Description. * @param dfltVal Default value. * @param required {@code true} if the property is required. * @param min Lower bound of allowed range. * @param max Upper bound of allowed range. */ IntegerProperty(String name, String desc, Number dfltVal, boolean required, int min, int max) { super(name, desc, dfltVal, required, min, max); } /** {@inheritDoc} */ @Override protected Number parse(String str) throws NumberFormatException { return Integer.parseInt(str); } /** * @return Property value. */ Integer value() { return val != null ? val.intValue() : null; } } /** * */ private static class StringProperty extends ConnectionProperty { /** */ private static final long serialVersionUID = 0L; /** Value */ private String val; /** * @param name Name. * @param desc Description. * @param dfltVal Default value. * @param choices Possible values. * @param required {@code true} if the property is required. * @param validator Property value validator. */ StringProperty(String name, String desc, String dfltVal, String[] choices, boolean required, PropertyValidator validator) { super(name, desc, dfltVal, choices, required, validator); val = dfltVal; } /** * @param val Property value. */ void setValue(String val) { this.val = val; } /** * @return Property value. */ String value() { return val; } /** {@inheritDoc} */ @Override void init(String str) throws SQLException { if (validator != null) validator.validate(str); if (str == null) val = (String)dfltVal; else val = str; } /** {@inheritDoc} */ @Override String valueObject() { return val; } } }
/* * Copyright LWJGL. All rights reserved. * License terms: http://lwjgl.org/license.php */ package util; import org.joml.Vector3f; import org.lwjgl.BufferUtils; import java.io.*; import java.nio.ByteBuffer; import java.nio.FloatBuffer; import java.util.ArrayList; import java.util.List; import java.util.zip.ZipInputStream; /** * A simple Wavefront obj file loader. * <p> * Does not load material files. * * @author Kai Burjack */ public class WavefrontMeshLoader { public static class Mesh { public FloatBuffer positions; public FloatBuffer normals; public int numVertices; public float boundingSphereRadius; public List<MeshObject> objects = new ArrayList<MeshObject>(); } private static class WavefrontInfo { int numberOfVertices; int numberOfFaces; int numberOfNormals; } public class MeshObject { public String name; public int first; public int count; public Vector3f min = new Vector3f(Float.MAX_VALUE, Float.MAX_VALUE, Float.MAX_VALUE); public Vector3f max = new Vector3f(Float.MIN_VALUE, Float.MIN_VALUE, Float.MIN_VALUE); public String toString() { return name + "(" + min + " " + max + ")"; } } private boolean fourComponentPosition; public WavefrontMeshLoader() { } public boolean isFourComponentPosition() { return fourComponentPosition; } public void setFourComponentPosition(boolean fourComponentPosition) { this.fourComponentPosition = fourComponentPosition; } private static WavefrontInfo getInfo(BufferedReader reader) throws IOException { String line = ""; WavefrontInfo info = new WavefrontInfo(); while (true) { line = reader.readLine(); if (line == null) { break; } if (line.startsWith("v ")) { info.numberOfVertices++; } else if (line.startsWith("f ")) { info.numberOfFaces++; } else if (line.startsWith("vn ")) { info.numberOfNormals++; } } return info; } private static byte[] readSingleFileZip(String zipResource) throws IOException { ZipInputStream zipStream = new ZipInputStream(WavefrontMeshLoader.class.getClassLoader().getResourceAsStream( zipResource)); zipStream.getNextEntry(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int read = 0; while ((read = zipStream.read(buffer)) > 0) { baos.write(buffer, 0, read); } zipStream.close(); return baos.toByteArray(); } public Mesh loadMesh(String resource) throws IOException { byte[] arr = readSingleFileZip(resource); WavefrontInfo info = getInfo(new BufferedReader(new InputStreamReader(new ByteArrayInputStream(arr)))); // Allocate buffers for all vertices/normal ByteBuffer positionByteBuffer = BufferUtils.createByteBuffer(3 * info.numberOfVertices * 4); ByteBuffer normalByteBuffer = BufferUtils.createByteBuffer(3 * info.numberOfNormals * 4); FloatBuffer positions = positionByteBuffer.asFloatBuffer(); FloatBuffer normals = normalByteBuffer.asFloatBuffer(); // Allocate buffers for the actual face vertices/normals ByteBuffer positionDataByteBuffer = BufferUtils.createByteBuffer((fourComponentPosition ? 4 : 3) * 3 * info.numberOfFaces * 4); ByteBuffer normalDataByteBuffer = BufferUtils.createByteBuffer(3 * 3 * info.numberOfFaces * 4); FloatBuffer positionData = positionDataByteBuffer.asFloatBuffer(); FloatBuffer normalData = normalDataByteBuffer.asFloatBuffer(); Mesh mesh = new Mesh(); MeshObject object = null; float minX = 1E38f, minY = 1E38f, minZ = 1E38f; float maxX = -1E38f, maxY = -1E38f, maxZ = -1E38f; BufferedReader reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(arr))); String line; int faceIndex = 0; Vector3f tmp = new Vector3f(); while ((line = reader.readLine()) != null) { if (line.startsWith("o ")) { String name = line.substring(2); object = new MeshObject(); object.name = name; object.first = faceIndex; mesh.objects.add(object); } else if (line.startsWith("vn ")) { String[] ns = line.split(" +"); float x = Float.parseFloat(ns[1]); float y = Float.parseFloat(ns[2]); float z = Float.parseFloat(ns[3]); normals.put(x).put(y).put(z); } else if (line.startsWith("v ")) { String[] vs = line.split(" +"); float x = Float.parseFloat(vs[1]); float y = Float.parseFloat(vs[2]); float z = Float.parseFloat(vs[3]); positions.put(x).put(y).put(z); } else if (line.startsWith("f")) { String[] fs = line.split(" +"); String[] f1 = fs[1].split("/"); String[] f2 = fs[2].split("/"); String[] f3 = fs[3].split("/"); int v1 = Integer.parseInt(f1[0]); int v2 = Integer.parseInt(f2[0]); int v3 = Integer.parseInt(f3[0]); int n1 = Integer.parseInt(f1[2]); int n2 = Integer.parseInt(f2[2]); int n3 = Integer.parseInt(f3[2]); float ver1X = positions.get(3 * (v1 - 1) + 0); float ver1Y = positions.get(3 * (v1 - 1) + 1); float ver1Z = positions.get(3 * (v1 - 1) + 2); minX = minX < ver1X ? minX : ver1X; minY = minY < ver1Y ? minY : ver1Y; minZ = minZ < ver1Z ? minZ : ver1Z; maxX = maxX > ver1X ? maxX : ver1X; maxY = maxY > ver1Y ? maxY : ver1Y; maxZ = maxZ > ver1Z ? maxZ : ver1Z; tmp.set(ver1X, ver1Y, ver1Z); if (object != null) { object.min.min(tmp); object.max.max(tmp); } float ver2X = positions.get(3 * (v2 - 1) + 0); float ver2Y = positions.get(3 * (v2 - 1) + 1); float ver2Z = positions.get(3 * (v2 - 1) + 2); minX = minX < ver2X ? minX : ver2X; minY = minY < ver2Y ? minY : ver2Y; minZ = minZ < ver2Z ? minZ : ver2Z; maxX = maxX > ver2X ? maxX : ver2X; maxY = maxY > ver2Y ? maxY : ver2Y; maxZ = maxZ > ver2Z ? maxZ : ver2Z; tmp.set(ver2X, ver2Y, ver2Z); if (object != null) { object.min.min(tmp); object.max.max(tmp); } float ver3X = positions.get(3 * (v3 - 1) + 0); float ver3Y = positions.get(3 * (v3 - 1) + 1); float ver3Z = positions.get(3 * (v3 - 1) + 2); minX = minX < ver3X ? minX : ver3X; minY = minY < ver3Y ? minY : ver3Y; minZ = minZ < ver3Z ? minZ : ver3Z; maxX = maxX > ver3X ? maxX : ver3X; maxY = maxY > ver3Y ? maxY : ver3Y; maxZ = maxZ > ver3Z ? maxZ : ver3Z; tmp.set(ver3X, ver3Y, ver3Z); if (object != null) { object.min.min(tmp); object.max.max(tmp); } positionData.put(ver1X).put(ver1Y).put(ver1Z); if (fourComponentPosition) { positionData.put(1.0f); } positionData.put(ver2X).put(ver2Y).put(ver2Z); if (fourComponentPosition) { positionData.put(1.0f); } positionData.put(ver3X).put(ver3Y).put(ver3Z); if (fourComponentPosition) { positionData.put(1.0f); } float norm1X = normals.get(3 * (n1 - 1) + 0); float norm1Y = normals.get(3 * (n1 - 1) + 1); float norm1Z = normals.get(3 * (n1 - 1) + 2); float norm2X = normals.get(3 * (n2 - 1) + 0); float norm2Y = normals.get(3 * (n2 - 1) + 1); float norm2Z = normals.get(3 * (n2 - 1) + 2); float norm3X = normals.get(3 * (n3 - 1) + 0); float norm3Y = normals.get(3 * (n3 - 1) + 1); float norm3Z = normals.get(3 * (n3 - 1) + 2); normalData.put(norm1X).put(norm1Y).put(norm1Z); normalData.put(norm2X).put(norm2Y).put(norm2Z); normalData.put(norm3X).put(norm3Y).put(norm3Z); faceIndex++; if (object != null) { object.count++; } } } if (mesh.objects.isEmpty()) { object = new MeshObject(); object.count = faceIndex; mesh.objects.add(object); } positionData.flip(); normalData.flip(); mesh.boundingSphereRadius = Math.max(maxX - minX, Math.max(maxY - minY, maxZ - minZ)) * 0.5f; mesh.positions = positionData; mesh.normals = normalData; mesh.numVertices = positionData.limit() / (fourComponentPosition ? 4 : 3); return mesh; } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.04.07 at 04:43:56 PM EDT // package org.oasis_open.docs.s_ramp.ns.s_ramp_v1; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; import javax.xml.namespace.QName; /** * <p>Java class for BaseArtifactType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="BaseArtifactType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{http://docs.oasis-open.org/s-ramp/ns/s-ramp-v1.0}classifiedBy" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{http://docs.oasis-open.org/s-ramp/ns/s-ramp-v1.0}relationship" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{http://docs.oasis-open.org/s-ramp/ns/s-ramp-v1.0}property" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="artifactType" use="required" type="{http://docs.oasis-open.org/s-ramp/ns/s-ramp-v1.0}baseArtifactEnum" /> * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="description" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="createdBy" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="version" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="uuid" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="createdTimestamp" use="required" type="{http://www.w3.org/2001/XMLSchema}dateTime" /> * &lt;attribute name="lastModifiedTimestamp" use="required" type="{http://www.w3.org/2001/XMLSchema}dateTime" /> * &lt;attribute name="lastModifiedBy" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;anyAttribute/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "classifiedBy", "relationship", "property" }) @XmlSeeAlso({ BaseArtifactType.class }) public abstract class OriginalBaseArtifactType implements Serializable { private static final long serialVersionUID = 4145021817646718347L; @XmlSchemaType(name = "anyURI") protected List<String> classifiedBy; protected List<Relationship> relationship; protected List<Property> property; @XmlAttribute(name = "artifactType", required = true) protected BaseArtifactEnum artifactType; @XmlAttribute(name = "name", required = true) protected String name; @XmlAttribute(name = "description") protected String description; @XmlAttribute(name = "createdBy", required = true) protected String createdBy; @XmlAttribute(name = "version") protected String version; @XmlAttribute(name = "uuid", required = true) protected String uuid; @XmlAttribute(name = "createdTimestamp", required = true) @XmlSchemaType(name = "dateTime") protected XMLGregorianCalendar createdTimestamp; @XmlAttribute(name = "lastModifiedTimestamp", required = true) @XmlSchemaType(name = "dateTime") protected XMLGregorianCalendar lastModifiedTimestamp; @XmlAttribute(name = "lastModifiedBy", required = true) protected String lastModifiedBy; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the classifiedBy property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the classifiedBy property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClassifiedBy().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClassifiedBy() { if (classifiedBy == null) { classifiedBy = new ArrayList<String>(); } return this.classifiedBy; } /** * Gets the value of the relationship property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the relationship property. * * <p> * For example, to add a new item, do as follows: * <pre> * getRelationship().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Relationship } * * */ public List<Relationship> getRelationship() { if (relationship == null) { relationship = new ArrayList<Relationship>(); } return this.relationship; } /** * Gets the value of the property property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the property property. * * <p> * For example, to add a new item, do as follows: * <pre> * getProperty().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Property } * * */ public List<Property> getProperty() { if (property == null) { property = new ArrayList<Property>(); } return this.property; } /** * Gets the value of the artifactType property. * * @return * possible object is * {@link BaseArtifactEnum } * */ public BaseArtifactEnum getArtifactType() { return artifactType; } /** * Sets the value of the artifactType property. * * @param value * allowed object is * {@link BaseArtifactEnum } * */ public void setArtifactType(BaseArtifactEnum value) { this.artifactType = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the createdBy property. * * @return * possible object is * {@link String } * */ public String getCreatedBy() { return createdBy; } /** * Sets the value of the createdBy property. * * @param value * allowed object is * {@link String } * */ public void setCreatedBy(String value) { this.createdBy = value; } /** * Gets the value of the version property. * * @return * possible object is * {@link String } * */ public String getVersion() { return version; } /** * Sets the value of the version property. * * @param value * allowed object is * {@link String } * */ public void setVersion(String value) { this.version = value; } /** * Gets the value of the uuid property. * * @return * possible object is * {@link String } * */ public String getUuid() { return uuid; } /** * Sets the value of the uuid property. * * @param value * allowed object is * {@link String } * */ public void setUuid(String value) { this.uuid = value; } /** * Gets the value of the createdTimestamp property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getCreatedTimestamp() { return createdTimestamp; } /** * Sets the value of the createdTimestamp property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setCreatedTimestamp(XMLGregorianCalendar value) { this.createdTimestamp = value; } /** * Gets the value of the lastModifiedTimestamp property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getLastModifiedTimestamp() { return lastModifiedTimestamp; } /** * Sets the value of the lastModifiedTimestamp property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setLastModifiedTimestamp(XMLGregorianCalendar value) { this.lastModifiedTimestamp = value; } /** * Gets the value of the lastModifiedBy property. * * @return * possible object is * {@link String } * */ public String getLastModifiedBy() { return lastModifiedBy; } /** * Sets the value of the lastModifiedBy property. * * @param value * allowed object is * {@link String } * */ public void setLastModifiedBy(String value) { this.lastModifiedBy = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import static com.google.common.base.Joiner.on; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.junit.Assume.assumeFalse; import static org.openqa.selenium.WaitingConditions.elementValueToEqual; import static org.openqa.selenium.testing.TestUtilities.getEffectivePlatform; import static org.openqa.selenium.testing.drivers.Browser.CHROME; import static org.openqa.selenium.testing.drivers.Browser.FIREFOX; import static org.openqa.selenium.testing.drivers.Browser.IE; import static org.openqa.selenium.testing.drivers.Browser.SAFARI; import org.junit.Test; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.NotYetImplemented; import org.openqa.selenium.testing.drivers.Browser; public class TypingTest extends JUnit4TestBase { private static void checkRecordedKeySequence(WebElement element, int expectedKeyCode) { assertThat(element.getText().trim()).contains( String.format("down: %1$d", expectedKeyCode), String.format("up: %1$d", expectedKeyCode)); } private static String getValueText(WebElement el) { // Standardize on \n and strip any trailing whitespace. return el.getAttribute("value").replace("\r\n", "\n").trim(); } @Test public void testShouldFireKeyPressEvents() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("a"); WebElement result = driver.findElement(By.id("result")); assertThat(result.getText()).contains("press:"); } @Test public void testShouldFireKeyDownEvents() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("I"); WebElement result = driver.findElement(By.id("result")); assertThat(result.getText()).contains("down:"); } @Test public void testShouldFireKeyUpEvents() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("a"); WebElement result = driver.findElement(By.id("result")); assertThat(result.getText()).contains("up:"); } @Test public void testShouldTypeLowerCaseLetters() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("abc def"); assertThat(keyReporter.getAttribute("value")).isEqualTo("abc def"); } @Test public void testShouldBeAbleToTypeCapitalLetters() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("ABC DEF"); assertThat(keyReporter.getAttribute("value")).isEqualTo("ABC DEF"); } @Test public void testShouldBeAbleToTypeQuoteMarks() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("\""); assertThat(keyReporter.getAttribute("value")).isEqualTo("\""); } @Test public void testShouldBeAbleToTypeTheAtCharacter() { // simon: I tend to use a US/UK or AUS keyboard layout with English // as my primary language. There are consistent reports that we're // not handling i18nised keyboards properly. This test exposes this // in a lightweight manner when my keyboard is set to the DE mapping // and we're using IE. driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("@"); assertThat(keyReporter.getAttribute("value")).isEqualTo("@"); } @Test public void testShouldBeAbleToMixUpperAndLowerCaseLetters() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("me@eXample.com"); assertThat(keyReporter.getAttribute("value")).isEqualTo("me@eXample.com"); } @Test public void testArrowKeysShouldNotBePrintable() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys(Keys.ARROW_LEFT); assertThat(keyReporter.getAttribute("value")).isEqualTo(""); } @Test public void testShouldBeAbleToUseArrowKeys() { driver.get(pages.javascriptPage); WebElement keyReporter = driver.findElement(By.id("keyReporter")); keyReporter.sendKeys("tet", Keys.ARROW_LEFT, "s"); assertThat(keyReporter.getAttribute("value")).isEqualTo("test"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testWillSimulateAKeyUpWhenEnteringTextIntoInputElements() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyUp")); element.sendKeys("I like cheese"); WebElement result = driver.findElement(By.id("result")); assertThat(result.getText()).isEqualTo("I like cheese"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testWillSimulateAKeyDownWhenEnteringTextIntoInputElements() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyDown")); element.sendKeys("I like cheese"); WebElement result = driver.findElement(By.id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here assertThat(result.getText()).isEqualTo("I like chees"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testWillSimulateAKeyPressWhenEnteringTextIntoInputElements() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyPress")); element.sendKeys("I like cheese"); WebElement result = driver.findElement(By.id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here assertThat(result.getText()).isEqualTo("I like chees"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testWillSimulateAKeyUpWhenEnteringTextIntoTextAreas() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyUpArea")); element.sendKeys("I like cheese"); WebElement result = driver.findElement(By.id("result")); assertThat(result.getText()).isEqualTo("I like cheese"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testWillSimulateAKeyDownWhenEnteringTextIntoTextAreas() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyDownArea")); element.sendKeys("I like cheese"); WebElement result = driver.findElement(By.id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here assertThat(result.getText()).isEqualTo("I like chees"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testWillSimulateAKeyPressWhenEnteringTextIntoTextAreas() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyPressArea")); element.sendKeys("I like cheese"); WebElement result = driver.findElement(By.id("result")); // Because the key down gets the result before the input element is // filled, we're a letter short here assertThat(result.getText()).isEqualTo("I like chees"); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testShouldFireFocusKeyEventsInTheRightOrder() { driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("theworks")); element.sendKeys("a"); assertThat(result.getText().trim()).isEqualTo("focus keydown keypress keyup"); } @Test public void testShouldReportKeyCodeOfArrowKeys() { assumeFalse(Browser.detect() == Browser.LEGACY_OPERA && getEffectivePlatform(driver).is(Platform.WINDOWS)); driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys(Keys.ARROW_DOWN); checkRecordedKeySequence(result, 40); element.sendKeys(Keys.ARROW_UP); checkRecordedKeySequence(result, 38); element.sendKeys(Keys.ARROW_LEFT); checkRecordedKeySequence(result, 37); element.sendKeys(Keys.ARROW_RIGHT); checkRecordedKeySequence(result, 39); // And leave no rubbish/printable keys in the "keyReporter" assertThat(element.getAttribute("value")).isEqualTo(""); } @Test public void testShouldReportKeyCodeOfArrowKeysUpDownEvents() { assumeFalse(Browser.detect() == Browser.LEGACY_OPERA && getEffectivePlatform(driver).is(Platform.WINDOWS)); driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys(Keys.ARROW_DOWN); assertThat(result.getText().trim()).contains("down: 40", "up: 40"); element.sendKeys(Keys.ARROW_UP); assertThat(result.getText().trim()).contains("down: 38", "up: 38"); element.sendKeys(Keys.ARROW_LEFT); assertThat(result.getText().trim()).contains("down: 37", "up: 37"); element.sendKeys(Keys.ARROW_RIGHT); assertThat(result.getText().trim()).contains("down: 39", "up: 39"); // And leave no rubbish/printable keys in the "keyReporter" assertThat(element.getAttribute("value")).isEqualTo(""); } @Test public void testNumericNonShiftKeys() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); String numericLineCharsNonShifted = "`1234567890-=[]\\;,.'/42"; element.sendKeys(numericLineCharsNonShifted); assertThat(element.getAttribute("value")).isEqualTo(numericLineCharsNonShifted); } @Test @NotYetImplemented(value = FIREFOX, reason = "https://github.com/mozilla/geckodriver/issues/646") public void testNumericShiftKeys() { driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); String numericShiftsEtc = "~!@#$%^&*()_+{}:\"<>?|END~"; element.sendKeys(numericShiftsEtc); assertThat(element.getAttribute("value")).isEqualTo(numericShiftsEtc); assertThat(result.getText().trim()).contains(" up: 16"); } @Test public void testLowerCaseAlphaKeys() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); String lowerAlphas = "abcdefghijklmnopqrstuvwxyz"; element.sendKeys(lowerAlphas); assertThat(element.getAttribute("value")).isEqualTo(lowerAlphas); } @Test @NotYetImplemented(value = FIREFOX, reason = "https://github.com/mozilla/geckodriver/issues/646") public void testUppercaseAlphaKeys() { driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); String upperAlphas = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; element.sendKeys(upperAlphas); assertThat(element.getAttribute("value")).isEqualTo(upperAlphas); assertThat(result.getText().trim()).contains(" up: 16"); } @Test @NotYetImplemented(value = FIREFOX, reason = "https://github.com/mozilla/geckodriver/issues/646") public void testAllPrintableKeys() { driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); String allPrintable = "!\"#$%&'()*+,-./0123456789:;<=>?@ ABCDEFGHIJKLMNO" + "PQRSTUVWXYZ [\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"; element.sendKeys(allPrintable); assertThat(element.getAttribute("value")).isEqualTo(allPrintable); assertThat(result.getText().trim()).contains(" up: 16"); } @Test public void testArrowKeysAndPageUpAndDown() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("a" + Keys.LEFT + "b" + Keys.RIGHT + Keys.UP + Keys.DOWN + Keys.PAGE_UP + Keys.PAGE_DOWN + "1"); assertThat(element.getAttribute("value")).isEqualTo("ba1"); } @Test public void testHomeAndEndAndPageUpAndPageDownKeys() { assumeFalse("FIXME: macs don't have HOME keys, would PGUP work?", getEffectivePlatform(driver).is(Platform.MAC)); driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("abc" + Keys.HOME + "0" + Keys.LEFT + Keys.RIGHT + Keys.PAGE_UP + Keys.PAGE_DOWN + Keys.END + "1" + Keys.HOME + "0" + Keys.PAGE_UP + Keys.END + "111" + Keys.HOME + "00"); assertThat(element.getAttribute("value")).isEqualTo("0000abc1111"); } @Test public void testDeleteAndBackspaceKeys() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("abcdefghi"); assertThat(element.getAttribute("value")).isEqualTo("abcdefghi"); element.sendKeys(Keys.LEFT, Keys.LEFT, Keys.DELETE); assertThat(element.getAttribute("value")).isEqualTo("abcdefgi"); element.sendKeys(Keys.LEFT, Keys.LEFT, Keys.BACK_SPACE); assertThat(element.getAttribute("value")).isEqualTo("abcdfgi"); } @Test @NotYetImplemented(value = FIREFOX) public void testSpecialSpaceKeys() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("abcd" + Keys.SPACE + "fgh" + Keys.SPACE + "ij"); assertThat(element.getAttribute("value")).isEqualTo("abcd fgh ij"); } @Test @NotYetImplemented(value = FIREFOX) @NotYetImplemented(value = SAFARI, reason = "Enters dot instead of comma") @Ignore(value = CHROME, reason = "https://bugs.chromium.org/p/chromedriver/issues/detail?id=3999") public void testNumberPadKeys() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("abcd" + Keys.MULTIPLY + Keys.SUBTRACT + Keys.ADD + Keys.DECIMAL + Keys.SEPARATOR + Keys.NUMPAD0 + Keys.NUMPAD9 + Keys.ADD + Keys.SEMICOLON + Keys.EQUALS + Keys.DIVIDE + Keys.NUMPAD3 + "abcd"); assertThat(element.getAttribute("value")).isEqualTo("abcd*-+.,09+;=/3abcd"); } @Test @Ignore(value = IE, reason = "F4 triggers address bar") public void testFunctionKeys() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("FUNCTION" + Keys.F4 + "-KEYS" + Keys.F4); element.sendKeys("" + Keys.F4 + "-TOO" + Keys.F4); assertThat(element.getAttribute("value")).isEqualTo("FUNCTION-KEYS-TOO"); } @Test @NotYetImplemented(SAFARI) public void testShiftSelectionDeletes() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("abcd efgh"); assertThat(element.getAttribute("value")).isEqualTo("abcd efgh"); element.sendKeys(Keys.SHIFT, Keys.LEFT, Keys.LEFT, Keys.LEFT); element.sendKeys(Keys.DELETE); assertThat(element.getAttribute("value")).isEqualTo("abcd e"); } @Test @NotYetImplemented(value = FIREFOX, reason = "https://github.com/mozilla/geckodriver/issues/646") public void testChordControlHomeShiftEndDelete() { assumeFalse("FIXME: macs don't have HOME keys, would PGUP work?", getEffectivePlatform(driver).is(Platform.MAC)); driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("!\"#$%&'()*+,-./0123456789:;<=>?@ ABCDEFG"); element.sendKeys(Keys.HOME); element.sendKeys("" + Keys.SHIFT + Keys.END); assertThat(result.getText()).contains(" up: 16"); element.sendKeys(Keys.DELETE); assertThat(element.getAttribute("value")).isEqualTo(""); } // control-x control-v here for cut & paste tests, these work on windows // and linux, but not on the MAC. @Test @NotYetImplemented(value = FIREFOX, reason = "https://github.com/mozilla/geckodriver/issues/646") public void testChordReveseShiftHomeSelectionDeletes() { assumeFalse("FIXME: macs don't have HOME keys, would PGUP work?", getEffectivePlatform(driver).is(Platform.MAC)); driver.get(pages.javascriptPage); WebElement result = driver.findElement(By.id("result")); WebElement element = driver.findElement(By.id("keyReporter")); element.sendKeys("done" + Keys.HOME); assertThat(element.getAttribute("value")).isEqualTo("done"); element.sendKeys(Keys.SHIFT + "ALL " + Keys.HOME); assertThat(element.getAttribute("value")).isEqualTo("ALL done"); element.sendKeys(Keys.DELETE); assertThat(element.getAttribute("value")).isEqualTo("done"); element.sendKeys("" + Keys.END + Keys.SHIFT + Keys.HOME); assertThat(element.getAttribute("value")).isEqualTo("done"); assertThat(result.getText().trim()).contains(" up: 16"); element.sendKeys(Keys.DELETE); assertThat(element.getAttribute("value")).isEqualTo(""); } @Test @Ignore(value = CHROME, reason = "https://bugs.chromium.org/p/chromedriver/issues/detail?id=3999") @NotYetImplemented(value = FIREFOX, reason = "https://github.com/mozilla/geckodriver/issues/646") public void testChordControlCutAndPaste() { assumeFalse("FIXME: macs don't have HOME keys, would PGUP work?", getEffectivePlatform(driver).is(Platform.MAC)); driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.id("keyReporter")); WebElement result = driver.findElement(By.id("result")); String paste = "!\"#$%&'()*+,-./0123456789:;<=>?@ ABCDEFG"; element.sendKeys(paste); assertThat(element.getAttribute("value")).isEqualTo(paste); element.sendKeys(Keys.HOME); element.sendKeys("" + Keys.SHIFT + Keys.END); assertThat(result.getText().trim()).contains(" up: 16"); element.sendKeys(Keys.CONTROL, "x"); assertThat(element.getAttribute("value")).isEqualTo(""); element.sendKeys(Keys.CONTROL, "v"); wait.until(elementValueToEqual(element, paste)); // Cut the last 3 letters. element.sendKeys("" + Keys.LEFT + Keys.LEFT + Keys.LEFT + Keys.SHIFT + Keys.END); element.sendKeys(Keys.CONTROL, "x"); assertThat(element.getAttribute("value")).isEqualTo(paste.substring(0, paste.length() - 3)); // Paste the last 3 letters. element.sendKeys(Keys.CONTROL, "v"); assertThat(element.getAttribute("value")).isEqualTo(paste); element.sendKeys(Keys.HOME); element.sendKeys(Keys.CONTROL, "v"); element.sendKeys(Keys.CONTROL, "v" + "v"); element.sendKeys(Keys.CONTROL, "v" + "v" + "v"); assertThat(element.getAttribute("value")).isEqualTo("EFGEFGEFGEFGEFGEFG" + paste); element.sendKeys("" + Keys.END + Keys.SHIFT + Keys.HOME + Keys.NULL + Keys.DELETE); assertThat(element.getAttribute("value")).isEqualTo(""); } @Test public void testShouldTypeIntoInputElementsThatHaveNoTypeAttribute() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("no-type")); element.sendKeys("should say cheese"); assertThat(element.getAttribute("value")).isEqualTo("should say cheese"); } @Test public void testShouldNotTypeIntoElementsThatPreventKeyDownEvents() { driver.get(pages.javascriptPage); WebElement silent = driver.findElement(By.name("suppress")); silent.sendKeys("s"); assertThat(silent.getAttribute("value")).isEqualTo(""); } @Test @NotYetImplemented(value = SAFARI, reason = "getText does not normalize spaces") public void testGenerateKeyPressEventEvenWhenElementPreventsDefault() { driver.get(pages.javascriptPage); WebElement silent = driver.findElement(By.name("suppress")); WebElement result = driver.findElement(By.id("result")); silent.sendKeys("s"); assertThat(result.getText().trim()).isIn("", "mouseover"); } @Test public void testShouldBeAbleToTypeOnAnEmailInputField() { driver.get(pages.formPage); WebElement email = driver.findElement(By.id("email")); email.sendKeys("foobar"); assertThat(email.getAttribute("value")).isEqualTo("foobar"); } @Test public void testShouldBeAbleToTypeOnANumberInputField() { driver.get(pages.formPage); WebElement email = driver.findElement(By.id("age")); email.sendKeys("33"); assertThat(email.getAttribute("value")).isEqualTo("33"); } @Test public void testShouldThrowIllegalArgumentException() { driver.get(pages.formPage); WebElement email = driver.findElement(By.id("age")); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> email.sendKeys((CharSequence[]) null)); } @Test public void canSafelyTypeOnElementThatIsRemovedFromTheDomOnKeyPress() { driver.get(appServer.whereIs("key_tests/remove_on_keypress.html")); WebElement input = driver.findElement(By.id("target")); WebElement log = driver.findElement(By.id("log")); assertThat(log.getAttribute("value")).isEqualTo(""); input.sendKeys("b"); assertThat(getValueText(log)).isEqualTo(on('\n').join( "keydown (target)", "keyup (target)", "keyup (body)")); input.sendKeys("a"); // Some drivers (IE, Firefox) do not always generate the final keyup event since the element // is removed from the DOM in response to the keypress (note, this is a product of how events // are generated and does not match actual user behavior). String expected = String.join("\n", "keydown (target)", "keyup (target)", "keyup (body)", "keydown (target)", "a pressed; removing"); assertThat(getValueText(log)).isIn(expected, expected + "\nkeyup (body)"); } @Test public void canClearNumberInputAfterTypingInvalidInput() { driver.get(pages.formPage); WebElement input = driver.findElement(By.id("age")); input.sendKeys("e"); input.clear(); input.sendKeys("3"); assertThat(input.getAttribute("value")).isEqualTo("3"); } @Test public void canTypeSingleNewLineCharacterIntoTextArea() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("emptyTextArea")); element.sendKeys("\n"); shortWait.until(ExpectedConditions.attributeToBe(element, "value", "\n")); } @Test public void canTypeMultipleNewLineCharactersIntoTextArea() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("emptyTextArea")); element.sendKeys("\n\n\n"); shortWait.until(ExpectedConditions.attributeToBe(element, "value", "\n\n\n")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.commit; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.junit.Assert.assertEquals; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.concurrent.AbstractExecutorService; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import org.apache.jackrabbit.oak.plugins.observation.Filter; import org.apache.jackrabbit.oak.plugins.observation.FilteringAwareObserver; import org.apache.jackrabbit.oak.plugins.observation.FilteringObserver; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.junit.Test; public class PrefilteringBackgroundObserverTest { private final boolean EXCLUDED = true; private final boolean INCLUDED = false; private List<Runnable> runnableQ; private ExecutorService executor; private CompositeObserver compositeObserver; private List<ContentChanged> received; private FilteringObserver filteringObserver; private CommitInfo includingCommitInfo = new CommitInfo("includingSession", CommitInfo.OAK_UNKNOWN); private CommitInfo excludingCommitInfo = new CommitInfo("excludingSession", CommitInfo.OAK_UNKNOWN); private int resetCallCnt; public void init(int queueLength) throws Exception { runnableQ = new LinkedList<Runnable>(); executor = new EnqueuingExecutorService(runnableQ); compositeObserver = new CompositeObserver(); received = new LinkedList<ContentChanged>(); filteringObserver = new FilteringObserver(executor, queueLength, new Filter() { @Override public boolean excludes(NodeState root, CommitInfo info) { if (info == includingCommitInfo) { return false; } else if (info == excludingCommitInfo) { return true; } else if (info.isExternal()) { return false; } throw new IllegalStateException("only supporting include or exclude"); } }, new FilteringAwareObserver() { NodeState previous; @Override public void contentChanged(NodeState before, NodeState after, CommitInfo info) { received.add(new ContentChanged(after, info)); if (previous !=null && previous != before) { resetCallCnt++; } previous = after; } }); compositeObserver.addObserver(filteringObserver); } private final class EnqueuingExecutorService extends AbstractExecutorService { private final List<Runnable> runnableQ; private EnqueuingExecutorService(List<Runnable> runnableQ) { this.runnableQ = runnableQ; } @Override public void execute(Runnable command) { runnableQ.add(command); } @Override public List<Runnable> shutdownNow() { throw new IllegalStateException("nyi"); } @Override public void shutdown() { throw new IllegalStateException("nyi"); } @Override public boolean isTerminated() { throw new IllegalStateException("nyi"); } @Override public boolean isShutdown() { throw new IllegalStateException("nyi"); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { throw new IllegalStateException("nyi"); } } class ContentChanged { NodeState root; CommitInfo info; ContentChanged(NodeState root, CommitInfo info) { this.root = root; this.info = info; } } private static void executeRunnables(final List<Runnable> runnableQ, int num) { for(int i=0; i<num; i++) { for (Runnable runnable : new ArrayList<Runnable>(runnableQ)) { runnable.run(); } } } private static NodeState p(int k) { return EMPTY_NODE.builder().setProperty("p", k).getNodeState(); } @Test public void testFlipping() throws Exception { final int queueLength = 2000; init(queueLength); // initialize observer with an initial contentChanged // (see ChangeDispatcher#addObserver) { compositeObserver.contentChanged(p(-1), CommitInfo.EMPTY_EXTERNAL); } // Part 1 : first run with filtersEvaluatedMapWithEmptyObservers - empty or null shouldn't matter, it's excluded in both cases for (int k = 0; k < 1000; k++) { CommitInfo info; if (k%2==1) { info = includingCommitInfo; } else { info = excludingCommitInfo; } final NodeState p = p(k); compositeObserver.contentChanged(p, info); if (k%10 == 0) { executeRunnables(runnableQ, 10); } } executeRunnables(runnableQ, 10); assertEquals(500, received.size()); // changed from 501 with OAK-5121 assertEquals(499, resetCallCnt); // changed from 500 with OAK-5121 // Part 2 : run with filtersEvaluatedMapWithNullObservers - empty or null shouldn't matter, it's excluded in both cases received.clear(); resetCallCnt = 0; for (int k = 0; k < 1000; k++) { CommitInfo info; if (k%2==1) { info = includingCommitInfo; } else { info = excludingCommitInfo; } final NodeState p = p(k); compositeObserver.contentChanged(p, info); if (k%10 == 0) { executeRunnables(runnableQ, 10); } } executeRunnables(runnableQ, 10); assertEquals(500, received.size()); assertEquals(500, resetCallCnt); // Part 3 : unlike the method name suggests, this variant tests with the filter disabled, so should receive all events normally received.clear(); resetCallCnt = 0; for (int k = 0; k < 1000; k++) { CommitInfo info; if (k%2==1) { info = includingCommitInfo; } else { info = includingCommitInfo; } final NodeState p = p(k); compositeObserver.contentChanged(p, info); if (k%10 == 0) { executeRunnables(runnableQ, 10); } } executeRunnables(runnableQ, 10); assertEquals(1000, received.size()); assertEquals(0, resetCallCnt); } @Test public void testFlipping2() throws Exception { doTestFullQueue(6, new TestPattern(INCLUDED, 1, true, 1, 0), new TestPattern(EXCLUDED, 5, true, 0, 0), new TestPattern(INCLUDED, 2, true, 2, 1), new TestPattern(EXCLUDED, 1, true, 0, 0), new TestPattern(INCLUDED, 2, true, 2, 1)); } @Test public void testQueueNotFull() throws Exception { doTestFullQueue(20, // start: empty queue new TestPattern(EXCLUDED, 1000, false, 0, 0), // here: still empty, just the previousRoot is set to remember above NOOPs new TestPattern(INCLUDED, 5, false, 0, 0), // here: 5 changes are in the queue, the queue fits 20, way to go new TestPattern(EXCLUDED, 500, false, 0, 0), // still 5 in the queue new TestPattern(INCLUDED, 5, false, 0, 0), // now we added 2, queue still not full new TestPattern(EXCLUDED, 0 /* only flush*/, true, 10, 1) ); } @Test public void testIncludeOnQueueFull() throws Exception { doTestFullQueue(7, // start: empty queue new TestPattern(EXCLUDED, 1000, false, 0, 0, 0, 0), // here: still empty, just the previousRoot is set to remember above NOOPs new TestPattern(INCLUDED, 5, false, 0, 0, 0, 6), // here: 1 init and 5 changes are in the queue, the queue fits 7, so queue is almost full new TestPattern(EXCLUDED, 500, false, 0, 0, 6, 6), // still 6 in the queue, of 7 new TestPattern(INCLUDED, 5, false, 0, 0, 6, 7), // now we added 2 (one NOOP and one of those 5), so the queue got full (==7) new TestPattern(EXCLUDED, 0 /* only flush*/, true, 5, 0, 7, 0) ); } @Test public void testExcludeOnQueueFull2() throws Exception { doTestFullQueue(1, // start: empty queue new TestPattern(INCLUDED, 10, false, 0, 0), new TestPattern(EXCLUDED, 0 /* only flush*/, true, 1, 0), new TestPattern(INCLUDED, 10, false, 0, 0), new TestPattern(EXCLUDED, 10, false, 0, 0), new TestPattern(INCLUDED, 10, false, 0, 0), new TestPattern(EXCLUDED, 0 /* only flush*/, true, 1, 0), new TestPattern(INCLUDED, 10, false, 0, 0), new TestPattern(EXCLUDED, 10, false, 0, 0), new TestPattern(EXCLUDED, 0 /* only flush*/, true, 1, 0), new TestPattern(EXCLUDED, 10, false, 0, 0), new TestPattern(INCLUDED, 10, false, 0, 0), new TestPattern(EXCLUDED, 0 /* only flush*/, true, 1, 0)); } @Test public void testExcludeOnQueueFull1() throws Exception { doTestFullQueue(4, // start: empty queue new TestPattern(EXCLUDED, 1, false, 0, 0, 0, 0), // here: still empty, just the previousRoot is set to remember above NOOP new TestPattern(INCLUDED, 3, false, 0, 0, 0, 4), // here: 3 changes are in the queue, the queue fits 3, so it just got full now new TestPattern(EXCLUDED, 1, false, 0, 0, 4, 4), // still full but it's ignored, so doesn't have any queue length effect new TestPattern(INCLUDED, 3, false, 0, 0, 4, 4), // adding 3 will not work, it will result in an overflow entry new TestPattern(EXCLUDED, 0 /* only flush*/, true, 3, 0, 4, 0), new TestPattern(INCLUDED, 1, false, 0, 0, 0, 1), new TestPattern(EXCLUDED, 0 /* only flush*/, true, 1, 0, 1, 0) ); } class TestPattern { final boolean flush; final boolean excluded; final int numEvents; final int expectedNumEvents; final int expectedNumResetCalls; private int expectedQueueSizeAtStart = -1; private int expectedQueueSizeAtEnd = -1; TestPattern(boolean excluded, int numEvents, boolean flush, int expectedNumEvents, int expectedNumResetCalls) { this.flush = flush; this.excluded = excluded; this.numEvents = numEvents; this.expectedNumEvents = expectedNumEvents; this.expectedNumResetCalls = expectedNumResetCalls; } TestPattern(boolean excluded, int numEvents, boolean flush, int expectedNumEvents, int expectedNumResetCalls, int expectedQueueSizeAtStart, int expectedQueueSizeAtEnd) { this(excluded, numEvents, flush, expectedNumEvents, expectedNumResetCalls); this.expectedQueueSizeAtStart = expectedQueueSizeAtStart; this.expectedQueueSizeAtEnd = expectedQueueSizeAtEnd; } @Override public String toString() { return "excluded="+excluded+", numEvents="+numEvents+", flush="+flush+", expectedNumEvents="+expectedNumEvents+", expectedNumResetCalls="+expectedNumResetCalls; } } private void doTestFullQueue(int queueLength, TestPattern... testPatterns) throws Exception { init(queueLength); // initialize observer with an initial contentChanged // (see ChangeDispatcher#addObserver) { compositeObserver.contentChanged(p(-1), CommitInfo.EMPTY_EXTERNAL); } // remove above first event right away executeRunnables(runnableQ, 5); received.clear(); resetCallCnt = 0; int k = 0; int loopCnt = 0; for (TestPattern testPattern : testPatterns) { k++; if (testPattern.expectedQueueSizeAtStart >= 0) { assertEquals("loopCnt="+loopCnt+", queue size mis-match at start", testPattern.expectedQueueSizeAtStart, filteringObserver.getBackgroundObserver().getMBean().getQueueSize()); } for(int i=0; i<testPattern.numEvents; i++) { CommitInfo info; if (!testPattern.excluded) { info = includingCommitInfo; } else { info = excludingCommitInfo; } k++; compositeObserver.contentChanged(p(k), info); } if (testPattern.flush) { executeRunnables(runnableQ, testPattern.numEvents + testPattern.expectedNumEvents + testPattern.expectedNumResetCalls + 10); } assertEquals("loopCnt="+loopCnt, testPattern.expectedNumEvents, received.size()); assertEquals("loopCnt="+loopCnt, testPattern.expectedNumResetCalls, resetCallCnt); received.clear(); resetCallCnt = 0; loopCnt++; if (testPattern.expectedQueueSizeAtEnd >= 0) { assertEquals("loopCnt="+loopCnt+", queue size mis-match at end", testPattern.expectedQueueSizeAtEnd, filteringObserver.getBackgroundObserver().getMBean().getQueueSize()); } } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.dmn.feel.util; import org.kie.dmn.feel.lang.EvaluationContext; import org.kie.dmn.feel.lang.FEELProperty; import org.kie.dmn.feel.lang.ast.InfixOpNode; import org.kie.dmn.feel.runtime.Range; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.time.Duration; import java.time.Period; import java.time.temporal.ChronoField; import java.time.temporal.Temporal; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiPredicate; import java.util.regex.Pattern; import java.util.stream.Stream; public class EvalHelper { public static final Logger LOG = LoggerFactory.getLogger( EvalHelper.class ); private static final Pattern SPACES_PATTERN = Pattern.compile( "[\\s\u00A0]+" ); public static String normalizeVariableName(String name) { return SPACES_PATTERN.matcher( name.trim() ).replaceAll( " " ); } public static BigDecimal getBigDecimalOrNull(Object value) { if ( !(value instanceof Number || value instanceof String) ) { return null; } if ( !BigDecimal.class.isAssignableFrom( value.getClass() ) ) { if ( value instanceof Long || value instanceof Integer || value instanceof Short || value instanceof Byte || value instanceof AtomicLong || value instanceof AtomicInteger ) { value = new BigDecimal( ((Number) value).longValue(), MathContext.DECIMAL128 ); } else if ( value instanceof BigInteger ) { value = new BigDecimal( ((BigInteger) value).toString(), MathContext.DECIMAL128 ); } else if ( value instanceof String ) { // we need to remove leading zeros to prevent octal conversion value = new BigDecimal( ((String) value).replaceFirst("^0+(?!$)", ""), MathContext.DECIMAL128 ); } else { value = new BigDecimal( ((Number) value).doubleValue(), MathContext.DECIMAL128 ); } } return (BigDecimal) value; } public static Object coerceNumber(Object value) { if ( value instanceof Number && !(value instanceof BigDecimal) ) { return getBigDecimalOrNull( value ); } else { return value; } } public static Boolean getBooleanOrNull(Object value) { if ( value == null || !(value instanceof Boolean) ) { return null; } return (Boolean) value; } public static String unescapeString(String text) { if ( text == null ) { return null; } if ( text.length() >= 2 && text.startsWith( "\"" ) && text.endsWith( "\"" ) ) { // remove the quotes text = text.substring( 1, text.length() - 1 ); } if ( text.indexOf( '\\' ) >= 0 ) { // might require un-escaping StringBuilder r = new StringBuilder(); for ( int i = 0; i < text.length(); i++ ) { char c = text.charAt( i ); if ( c == '\\' ) { if ( text.length() > i + 1 ) { i++; char cn = text.charAt( i ); switch ( cn ) { case 'b': r.append( '\b' ); break; case 't': r.append( '\t' ); break; case 'n': r.append( '\n' ); break; case 'f': r.append( '\f' ); break; case 'r': r.append( '\r' ); break; case '"': r.append( '"' ); break; case '\'': r.append( '\'' ); break; case '\\': r.append( '\\' ); break; case 'u': { if ( text.length() >= i + 5 ) { // escape unicode String hex = text.substring( i + 1, i + 5 ); char[] chars = Character.toChars( Integer.parseInt( hex, 16 ) ); r.append( chars ); i += 4; } else { // not really unicode r.append( "\\" ).append( cn ); } break; } } } } else { r.append( c ); } } text = r.toString(); } return text; } public static Object getValue(Object current, String property) throws IllegalAccessException, InvocationTargetException { if ( current == null ) { return null; } else if ( current instanceof Map ) { current = ((Map) current).get( property ); } else if ( current instanceof Period ) { switch ( property ) { case "years": current = ((Period) current).getYears(); break; case "months": current = ((Period) current).getMonths()%12; break; case "days": current = ((Period) current).getDays()%30; break; default: return null; } } else if ( current instanceof Duration ) { switch ( property ) { case "days": current = ((Duration) current).toDays(); break; case "hours": current = ((Duration) current).toHours()%24; break; case "minutes": current = ((Duration) current).toMinutes()%60; break; case "seconds": current = ((Duration) current).getSeconds()%60; break; default: return null; } } else if ( current instanceof Temporal ) { switch ( property ) { case "year": current = ((Temporal) current).get( ChronoField.YEAR ); break; case "month": current = ((Temporal) current).get( ChronoField.MONTH_OF_YEAR ); break; case "day": current = ((Temporal) current).get( ChronoField.DAY_OF_MONTH ); break; case "hour": current = ((Temporal) current).get( ChronoField.HOUR_OF_DAY ); break; case "minute": current = ((Temporal) current).get( ChronoField.MINUTE_OF_HOUR ); break; case "second": current = ((Temporal) current).get( ChronoField.SECOND_OF_MINUTE ); break; case "time offset": case "timezone": current = Duration.ofSeconds( ((Temporal) current).get( ChronoField.OFFSET_SECONDS ) ); break; default: return null; } } else { Method getter = getGenericAccessor( current.getClass(), property ); if ( getter != null ) { current = getter.invoke( current ); } else { return null; } } return coerceNumber( current ); } /** * FEEL annotated or else Java accessor. * @param clazz * @param field * @return */ public static Method getGenericAccessor(Class<?> clazz, String field) { LOG.trace( "getGenericAccessor({}, {})", clazz, field ); return Stream.of( clazz.getMethods() ) .filter( m -> Optional.ofNullable( m.getAnnotation( FEELProperty.class ) ) .map( ann -> ann.value().equals( field ) ) .orElse( false ) ) .findFirst() .orElse( getAccessor( clazz, field ) ); } /** * JavaBean -spec compliant accessor. * @param clazz * @param field * @return */ public static Method getAccessor(Class<?> clazz, String field) { LOG.trace( "getAccessor({}, {})", clazz, field ); try { return clazz.getMethod( "get" + ucFirst( field ) ); } catch ( NoSuchMethodException e ) { try { return clazz.getMethod( field ); } catch ( NoSuchMethodException e1 ) { try { return clazz.getMethod( "is" + ucFirst( field ) ); } catch ( NoSuchMethodException e2 ) { return null; } } } } /** * Inverse of {@link #getAccessor(Class, String)} */ public static Optional<String> propertyFromAccessor(Method accessor) { if ( accessor.getParameterCount() != 0 || accessor.getReturnType().equals( Void.class ) ) { return Optional.empty(); } String methodName = accessor.getName(); if ( methodName.startsWith( "get" ) ) { return Optional.of( lcFirst( methodName.substring( 3, methodName.length() ) ) ); } else if ( methodName.startsWith( "is" ) ) { return Optional.of( lcFirst( methodName.substring( 2, methodName.length() ) ) ); } else { return Optional.of( lcFirst( methodName ) ); } } public static String ucFirst(final String name) { return name.toUpperCase().charAt( 0 ) + name.substring( 1 ); } public static String lcFirst(final String name) { return name.toLowerCase().charAt( 0 ) + name.substring( 1 ); } /** * Compares left and right operands using the given predicate and returns TRUE/FALSE accordingly * * @param left * @param right * @param ctx * @param op * @return */ public static Boolean compare(Object left, Object right, EvaluationContext ctx, BiPredicate<Comparable, Comparable> op) { if ( left == null || right == null ) { return null; } else if ( (left instanceof Period && right instanceof Period ) ) { // periods have special compare semantics in FEEL as it ignores "days". Only months and years are compared Period lp = (Period) left; Period rp = (Period) right; Integer l = lp.getYears() * 12 + lp.getMonths(); Integer r = rp.getYears() * 12 + rp.getMonths(); return op.test( l, r ); } else if ( (left instanceof String && right instanceof String) || (left instanceof Number && right instanceof Number) || (left instanceof Boolean && right instanceof Boolean) || (left instanceof Comparable && left.getClass().isAssignableFrom( right.getClass() )) ) { Comparable l = (Comparable) left; Comparable r = (Comparable) right; return op.test( l, r ); } return null; } /** * Compares left and right for equality applying FEEL semantics to specific data types * * @param left * @param right * @param ctx * @return */ public static Boolean isEqual(Object left, Object right, EvaluationContext ctx ) { if ( left == null || right == null ) { return left == right; } // spec defines that "a=[a]", i.e., singleton collections should be treated as the single element // and vice-versa if( left instanceof Collection && !(right instanceof Collection) && ((Collection)left).size() == 1 ) { left = ((Collection)left).toArray()[0]; } else if( right instanceof Collection && !(left instanceof Collection) && ((Collection)right).size()==1 ) { right = ((Collection) right).toArray()[0]; } if( left instanceof Range && right instanceof Range ) { return isEqual( (Range)left, (Range) right ); } else if( left instanceof Iterable && right instanceof Iterable ) { return isEqual( (Iterable)left, (Iterable) right ); } else if( left instanceof Map && right instanceof Map ) { return isEqual( (Map)left, (Map) right ); } return compare( left, right, ctx, (l, r) -> l.compareTo( r ) == 0 ); } private static Boolean isEqual(Range left, Range right) { return left.equals( right ); } private static Boolean isEqual(Iterable left, Iterable right) { Iterator li = left.iterator(); Iterator ri = right.iterator(); while( li.hasNext() && ri.hasNext() ) { Object l = li.next(); Object r = ri.next(); if ( !isEqual( l, r ) ) return false; } return li.hasNext() == ri.hasNext(); } private static Boolean isEqual(Map<?,?> left, Map<?,?> right) { if( left.size() != right.size() ) { return false; } for( Map.Entry le : left.entrySet() ) { Object l = le.getValue(); Object r = right.get( le.getKey() ); if ( !isEqual( l, r ) ) return false; } return true; } private static Boolean isEqual(Object l, Object r) { if( l instanceof Iterable && r instanceof Iterable && !isEqual( (Iterable) l, (Iterable) r ) ) { return false; } else if( l instanceof Map && r instanceof Map && !isEqual( (Map) l, (Map) r ) ) { return false; } else if( l != null && r != null && !l.equals( r ) ) { return false; } else if( ( l == null || r == null ) && l != r ) { return false; } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.cache.hdfs.internal.hoplog; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.TreeSet; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.experimental.categories.Category; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.hdfs.HDFSIOException; import com.gemstone.gemfire.cache.hdfs.HDFSStore; import com.gemstone.gemfire.cache.hdfs.HDFSStoreMutator; import com.gemstone.gemfire.cache.hdfs.internal.PersistedEventImpl; import com.gemstone.gemfire.cache.hdfs.internal.SortedHoplogPersistedEvent; import com.gemstone.gemfire.cache.hdfs.internal.hoplog.AbstractHoplogOrganizer.HoplogComparator; import com.gemstone.gemfire.cache.hdfs.internal.hoplog.HDFSRegionDirector.HdfsRegionManager; import com.gemstone.gemfire.cache.hdfs.internal.hoplog.HoplogOrganizer.Compactor; import com.gemstone.gemfire.cache.hdfs.internal.hoplog.TieredCompactionJUnitTest.TestHoplog; import com.gemstone.gemfire.cache.hdfs.internal.hoplog.mapreduce.HoplogUtil; import com.gemstone.gemfire.internal.cache.persistence.soplog.TrackedReference; import com.gemstone.gemfire.internal.util.BlobHelper; import com.gemstone.gemfire.test.junit.categories.HoplogTest; import com.gemstone.gemfire.test.junit.categories.IntegrationTest; import dunit.DistributedTestCase; import dunit.DistributedTestCase.ExpectedException; @Category({IntegrationTest.class, HoplogTest.class}) public class HdfsSortedOplogOrganizerJUnitTest extends BaseHoplogTestCase { /** * Tests flush operation */ public void testFlush() throws Exception { int count = 10; int bucketId = (int) System.nanoTime(); HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, bucketId); // flush and create hoplog ArrayList<TestEvent> items = new ArrayList<TestEvent>(); for (int i = 0; i < count; i++) { items.add(new TestEvent(("key-" + i), ("value-" + System.nanoTime()))); } organizer.flush(items.iterator(), count); // check file existence in bucket directory FileStatus[] hoplogs = getBucketHoplogs(getName() + "/" + bucketId, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); // only one hoplog should exists assertEquals(1, hoplogs.length); assertEquals(count, organizer.sizeEstimate()); assertEquals(0, stats.getActiveReaderCount()); } /** * Tests reads from a set of hoplogs containing both valid and stale KVs */ public void testReopen() throws Exception { int bucketId = (int) System.nanoTime(); HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, bucketId); // flush and create hoplog ArrayList<TestEvent> items = new ArrayList<TestEvent>(); for (int i = 0; i < 100; i++) { items.add(new TestEvent("" + i, ("1-1"))); } organizer.flush(items.iterator(), items.size()); Hoplog hoplog = organizer.getSortedOplogs().iterator().next().get(); byte[] keyBytes1 = BlobHelper.serializeToBlob("1"); hoplog.close(); for (int i = 0; i < 10; i++) { Path path = new Path(testDataDir, getName() + "/" + bucketId + "/" + hoplog.getFileName()); HFileSortedOplog oplog = new HFileSortedOplog(hdfsStore, path, blockCache, stats, storeStats); oplog.getReader().read(keyBytes1); oplog.close(false); } } /** * Tests reads from a set of hoplogs containing both valid and stale KVs */ public void testRead() throws Exception { doRead(regionManager); } // public void testNewReaderWithNameNodeHA() throws Exception { // deleteMiniClusterDir(); // int nn1port = AvailablePortHelper.getRandomAvailableTCPPort(); // int nn2port = AvailablePortHelper.getRandomAvailableTCPPort(); // // MiniDFSCluster cluster = initMiniHACluster(nn1port, nn2port); // initClientHAConf(nn1port, nn2port); // // HDFSStoreImpl store1 = (HDFSStoreImpl) hsf.create("Store-1"); // regionfactory.setHDFSStoreName(store1.getName()); // Region<Object, Object> region1 = regionfactory.create("region-1"); // HdfsRegionManager regionManager1 = ((LocalRegion)region1).getHdfsRegionManager(); // // HoplogOrganizer<SortedHoplogPersistedEvent> organizer = doRead(regionManager1); // organizer.close(); // // dunit.DistributedTestCase.ExpectedException ex = DistributedTestCase.addExpectedException("java.io.EOFException"); // NameNode nnode2 = cluster.getNameNode(1); // assertTrue(nnode2.isStandbyState()); // cluster.shutdownNameNode(0); // cluster.transitionToActive(1); // assertFalse(nnode2.isStandbyState()); // // organizer = new HdfsSortedOplogOrganizer(regionManager1, 0); // byte[] keyBytes1 = BlobHelper.serializeToBlob("1"); // byte[] keyBytes3 = BlobHelper.serializeToBlob("3"); // byte[] keyBytes4 = BlobHelper.serializeToBlob("4"); // assertEquals("2-1", organizer.read(keyBytes1).getValue()); // assertEquals("3-3", organizer.read(keyBytes3).getValue()); // assertEquals("1-4", organizer.read(keyBytes4).getValue()); // ex.remove(); // // region1.destroyRegion(); // store1.destroy(); // cluster.shutdown(); // FileUtils.deleteDirectory(new File("hdfs-test-cluster")); // } // public void testActiveReaderWithNameNodeHA() throws Exception { // deleteMiniClusterDir(); // int nn1port = AvailablePortHelper.getRandomAvailableTCPPort(); // int nn2port = AvailablePortHelper.getRandomAvailableTCPPort(); // // MiniDFSCluster cluster = initMiniHACluster(nn1port, nn2port); // initClientHAConf(nn1port, nn2port); // // HDFSStoreImpl store1 = (HDFSStoreImpl) hsf.create("Store-1"); // regionfactory.setHDFSStoreName(store1.getName()); // Region<Object, Object> region1 = regionfactory.create("region-1"); // HdfsRegionManager regionManager1 = ((LocalRegion)region1).getHdfsRegionManager(); // // HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager1, 0); // ArrayList<TestEvent> items = new ArrayList<TestEvent>(); // for (int i = 100000; i < 101000; i++) { // items.add(new TestEvent(("" + i), (i + " some string " + i))); // } // organizer.flush(items.iterator(), items.size()); // organizer.getSortedOplogs().get(0).get().getReader(); // // dunit.DistributedTestCase.ExpectedException ex = DistributedTestCase.addExpectedException("java.io.EOFException"); // NameNode nnode2 = cluster.getNameNode(1); // assertTrue(nnode2.isStandbyState()); // cluster.shutdownNameNode(0); // cluster.transitionToActive(1); // assertFalse(nnode2.isStandbyState()); // // for (int i = 100000; i < 100500; i++) { // byte[] keyBytes1 = BlobHelper.serializeToBlob("" + i); // assertEquals(i + " some string " + i, organizer.read(keyBytes1).getValue()); // } // ex.remove(); // region1.destroyRegion(); // store1.destroy(); // cluster.shutdown(); // FileUtils.deleteDirectory(new File("hdfs-test-cluster")); // } // public void testFlushWithNameNodeHA() throws Exception { // deleteMiniClusterDir(); // int nn1port = AvailablePortHelper.getRandomAvailableTCPPort(); // int nn2port = AvailablePortHelper.getRandomAvailableTCPPort(); // // MiniDFSCluster cluster = initMiniHACluster(nn1port, nn2port); // // initClientHAConf(nn1port, nn2port); // HDFSStoreImpl store1 = (HDFSStoreImpl) hsf.create("Store-1"); // // regionfactory.setHDFSStoreName(store1.getName()); // Region<Object, Object> region1 = regionfactory.create("region-1"); // HdfsRegionManager regionManager1 = ((LocalRegion)region1).getHdfsRegionManager(); // // HoplogOrganizer<SortedHoplogPersistedEvent> organizer = new HdfsSortedOplogOrganizer(regionManager1, 0); // ArrayList<TestEvent> items = new ArrayList<TestEvent>(); // items.add(new TestEvent(("1"), ("1-1"))); // organizer.flush(items.iterator(), items.size()); // // dunit.DistributedTestCase.ExpectedException ex = DistributedTestCase.addExpectedException("java.io.EOFException"); // NameNode nnode2 = cluster.getNameNode(1); // assertTrue(nnode2.isStandbyState()); // cluster.shutdownNameNode(0); // cluster.transitionToActive(1); // assertFalse(nnode2.isStandbyState()); // // items.add(new TestEvent(("4"), ("1-4"))); // organizer.flush(items.iterator(), items.size()); // byte[] keyBytes1 = BlobHelper.serializeToBlob("1"); // byte[] keyBytes4 = BlobHelper.serializeToBlob("4"); // assertEquals("1-1", organizer.read(keyBytes1).getValue()); // assertEquals("1-4", organizer.read(keyBytes4).getValue()); // ex.remove(); // // region1.destroyRegion(); // store1.destroy(); // cluster.shutdown(); // FileUtils.deleteDirectory(new File("hdfs-test-cluster")); // } public HoplogOrganizer<SortedHoplogPersistedEvent> doRead(HdfsRegionManager rm) throws Exception { HoplogOrganizer<SortedHoplogPersistedEvent> organizer = new HdfsSortedOplogOrganizer(rm, 0); // flush and create hoplog ArrayList<TestEvent> items = new ArrayList<TestEvent>(); items.add(new TestEvent(("1"), ("1-1"))); items.add(new TestEvent(("4"), ("1-4"))); organizer.flush(items.iterator(), items.size()); items.clear(); items.add(new TestEvent(("1"), ("2-1"))); items.add(new TestEvent(("3"), ("2-3"))); organizer.flush(items.iterator(), items.size()); items.clear(); items.add(new TestEvent(("3"), ("3-3"))); items.add(new TestEvent(("5"), ("3-5"))); organizer.flush(items.iterator(), items.size()); // check file existence in bucket directory FileStatus[] hoplogs = getBucketHoplogs(rm.getStore().getFileSystem(), rm.getRegionFolder() + "/" + 0, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); // expect 3 files are 3 flushes assertEquals(3, hoplogs.length); byte[] keyBytes1 = BlobHelper.serializeToBlob("1"); byte[] keyBytes3 = BlobHelper.serializeToBlob("3"); byte[] keyBytes4 = BlobHelper.serializeToBlob("4"); // expect key 1 from hoplog 2 assertEquals("2-1", organizer.read(keyBytes1).getValue()); // expect key 3 from hoplog 3 assertEquals("3-3", organizer.read(keyBytes3).getValue()); // expect key 4 from hoplog 1 assertEquals("1-4", organizer.read(keyBytes4).getValue()); return organizer; } /** * Tests bucket organizer initialization during startup. Existing hoplogs should identified and * returned */ public void testHoplogIdentification() throws Exception { // create one empty file and one directories in bucket directory Path bucketPath = new Path(testDataDir, getName() + "/0"); FileSystem fs = hdfsStore.getFileSystem(); fs.createNewFile(new Path(bucketPath, "temp_file")); fs.mkdirs(new Path(bucketPath, "temp_dir")); // create 2 hoplogs files each of type flush, minor and major hoplog HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, 0); String[] extensions = { HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MINOR_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MINOR_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION}; for (String string : extensions) { Hoplog oplog = organizer.getTmpSortedOplog(null, string); createHoplog(0, oplog); organizer.makeLegitimate(oplog); } // create a temp hoplog Hoplog oplog = organizer.getTmpSortedOplog(null, HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION); createHoplog(0, oplog); // bucket directory should have 6 hoplogs, 1 temp log, 1 misc file and 1 directory FileStatus[] results = fs.listStatus(bucketPath); assertEquals(9, results.length); // only two are hoplogs List<Hoplog> list = organizer.identifyAndLoadSortedOplogs(true); assertEquals(6, list.size()); } public void testExpiryMarkerIdentification() throws Exception { // epxired hoplogs from the list below should be deleted String[] files = { "0-1-1231" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-2-1232" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION, "0-3-1233" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-4-1234" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-5-1235" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION }; Path bucketPath = new Path(testDataDir, getName() + "/0"); FileSystem fs = hdfsStore.getFileSystem(); for (String file : files) { Hoplog oplog = new HFileSortedOplog(hdfsStore, new Path(bucketPath, file), blockCache, stats, storeStats); createHoplog(10, oplog); } String marker1 = "0-4-1234" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION; fs.createNewFile(new Path(bucketPath, marker1)); String marker2 = "0-5-1235" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION; fs.createNewFile(new Path(bucketPath, marker2)); FileStatus[] hoplogs = getBucketHoplogs(getName() + "/0", ""); assertEquals(7, hoplogs.length); HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer( regionManager, 0); FileStatus[] markers = organizer.getExpiryMarkers(); // one hoplog and one exp marker will be deletion targets assertEquals(2, markers.length); for (FileStatus marker : markers) { String name = marker.getPath().getName(); assertTrue(name.equals(marker1) || name.equals(marker2)); } organizer.close(); } public void testExpiredHoplogCleanup() throws Exception { // epxired hoplogs from the list below should be deleted String[] files = { "0-1-0000" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-1-1111" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-1-1111" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION, "0-2-0000" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION, "0-2-2222" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION, "0-3-0000" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-3-3333" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-3-3333" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION, "0-4-4444" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION }; Path bucketPath = new Path(testDataDir, getName() + "/0"); FileSystem fs = hdfsStore.getFileSystem(); for (String file : files) { if (file.endsWith(AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION)) { fs.createNewFile(new Path(bucketPath, file)); continue; } Hoplog oplog = new HFileSortedOplog(hdfsStore, new Path(bucketPath, file), blockCache, stats, storeStats); createHoplog(10, oplog); } FileStatus[] hoplogs = getBucketHoplogs(getName() + "/0", ""); assertEquals(9, hoplogs.length); long target = System.currentTimeMillis(); TimeUnit.SECONDS.sleep(1); // all but minor compacted files from below this will not be deleted as it // is after target delete time files = new String[] { "0-4-4444" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION, "0-5-5555" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION, "0-5-5555" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-6-6666" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION }; for (String file : files) { if (file.endsWith(AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION)) { fs.createNewFile(new Path(bucketPath, file)); continue; } Hoplog oplog = new HFileSortedOplog(hdfsStore, new Path(bucketPath, file), blockCache, stats, storeStats); createHoplog(10, oplog); } hoplogs = getBucketHoplogs(getName() + "/0", ""); assertEquals(13, hoplogs.length); int hopSize = 0; for (FileStatus file : hoplogs) { if(file.getLen() > hopSize) { hopSize = (int) file.getLen(); } } final AtomicInteger behavior = new AtomicInteger(0); HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, 0) { @Override protected FileStatus[] getExpiryMarkers() throws IOException { if (behavior.get() == 1) { ArrayList<FileStatus> markers = new ArrayList<FileStatus>(); for (FileStatus marker : super.getExpiryMarkers()) { markers.add(marker); } // inject a dummy old expiry marker for major compacted file long age = 2 * HDFSStore.DEFAULT_MAJOR_COMPACTION_INTERVAL_MINS * 60 * 1000; String markerName = "0-2-2222" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION + EXPIRED_HOPLOG_EXTENSION; FileStatus marker = new FileStatus(0, false, 1, 1024, System.currentTimeMillis() - age, new Path(bucketPath, markerName)); markers.add(marker); return markers.toArray(new FileStatus[markers.size()]); } return super.getExpiryMarkers(); } }; List<FileStatus> list = organizer.getOptimizationTargets(target); assertEquals(6, list.size()); behavior.set(1); list = organizer.getOptimizationTargets(target); assertEquals(8, list.size()); assertEquals(9 * hopSize, stats.getStoreUsageBytes()); int count = organizer.deleteExpiredFiles(list); assertEquals(8, count); assertEquals(5 * hopSize, stats.getStoreUsageBytes()); List<FileStatus> tmp = new ArrayList<FileStatus>(Arrays.asList(hoplogs)); for (Iterator<FileStatus> iter = tmp.iterator(); iter.hasNext();) { hoplogs = getBucketHoplogs(getName() + "/0", ""); FileStatus file = iter.next(); for (FileStatus hoplog : hoplogs) { if(hoplog.getPath().getName().startsWith("0-5-5555")) { fail("this file should have been deleted" + hoplog.getPath().getName()); } if (hoplog.getPath().getName().equals(file.getPath().getName())) { iter.remove(); break; } } } assertEquals(7, tmp.size()); organizer.close(); } public void testAlterPurgeInterval() throws Exception { // epxired hoplogs from the list below should be deleted String[] files = { "0-1-0000" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-1-1111" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-2-2222" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-4-4444" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION }; Path bucketPath = new Path(testDataDir, getName() + "/0"); hdfsStore.getFileSystem(); for (String file : files) { Hoplog oplog = new HFileSortedOplog(hdfsStore, new Path(bucketPath, file), blockCache, stats, storeStats); createHoplog(10, oplog); } FileStatus[] hoplogs = getBucketHoplogs(getName() + "/0", ""); int hopSize = 0; for (FileStatus file : hoplogs) { if(file.getLen() > hopSize) { hopSize = (int) file.getLen(); } } final AtomicInteger behavior = new AtomicInteger(0); HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, 0) { @Override protected FileStatus[] getExpiryMarkers() throws IOException { if (behavior.get() == 1) { ArrayList<FileStatus> markers = new ArrayList<FileStatus>(); // inject dummy old expiry markers long age = 120 * 1000; // 120 seconds old String markerName = "0-2-2222" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION + EXPIRED_HOPLOG_EXTENSION; FileStatus marker = new FileStatus(0, false, 1, 1024, System.currentTimeMillis() - age, new Path(bucketPath, markerName)); markers.add(marker); markerName = "0-4-4444" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION + EXPIRED_HOPLOG_EXTENSION; marker = new FileStatus(0, false, 1, 1024, System.currentTimeMillis() - age, new Path(bucketPath, markerName)); markers.add(marker); return markers.toArray(new FileStatus[markers.size()]); } return super.getExpiryMarkers(); } }; behavior.set(1); int count = organizer.initiateCleanup(); assertEquals(0, count); HDFSStoreMutator mutator = hdfsStore.createHdfsStoreMutator(); mutator.setPurgeInterval(1); hdfsStore.alter(mutator); count = organizer.initiateCleanup(); assertEquals(4, count); } public void testInUseExpiredHoplogCleanup() throws Exception { Path bucketPath = new Path(testDataDir, getName() + "/0"); FileSystem fs = hdfsStore.getFileSystem(); String[] files = new String[] { "0-1-1231" + AbstractHoplogOrganizer.FLUSH_HOPLOG_EXTENSION, "0-2-1232" + AbstractHoplogOrganizer.MAJOR_HOPLOG_EXTENSION, "0-3-1233" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-4-1234" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION, "0-5-1235" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION }; for (String file : files) { Hoplog oplog = new HFileSortedOplog(hdfsStore, new Path(bucketPath, file), blockCache, stats, storeStats); createHoplog(10, oplog); } final HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer( regionManager, 0); List<TrackedReference<Hoplog>> hopRefs = organizer.getSortedOplogs(); assertEquals(files.length, hopRefs.size()); // this is expiry marker for one of the files that will be compacted below. // While compaction is going on file deletion should not happen files = new String[] { "0-5-1235" + AbstractHoplogOrganizer.MINOR_HOPLOG_EXTENSION + AbstractHoplogOrganizer.EXPIRED_HOPLOG_EXTENSION }; for (String file : files) { fs.createNewFile(new Path(bucketPath, file)); } FileStatus[] hoplogs = getBucketHoplogs(getName() + "/0", ""); assertEquals(hopRefs.size() + files.length, hoplogs.length); TimeUnit.MILLISECONDS.sleep(200); long target = System.currentTimeMillis(); List<FileStatus> list = organizer.getOptimizationTargets(target); assertEquals(2, list.size()); for (TrackedReference<Hoplog> ref : hopRefs) { ref.increment("test"); } fs.delete(new Path(bucketPath, files[0]), false); TimeUnit.MILLISECONDS.sleep(50); organizer.markSortedOplogForDeletion(hopRefs, false); list = organizer.getOptimizationTargets(target); assertEquals(0, list.size()); organizer.close(); } /** * Tests max sequence initialization when file already exists and server starts */ public void testSeqInitialization() throws Exception { // create many hoplogs files HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, 0); String[] extensions = { HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MINOR_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION, HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION}; for (String string : extensions) { Hoplog oplog = organizer.getTmpSortedOplog(null, string); createHoplog(1, oplog); organizer.makeLegitimate(oplog); } // a organizer should start creating files starting at 6 as five files already existed organizer = new HdfsSortedOplogOrganizer(regionManager, 0); Hoplog oplog = organizer.getTmpSortedOplog(null, HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION); createHoplog(1, oplog); organizer.makeLegitimate(oplog); assertEquals(6, HdfsSortedOplogOrganizer.getSequenceNumber(oplog)); organizer.close(); } /** * Tests temp file creation and making file legitimate */ public void testMakeLegitimate() throws Exception { HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, 0); // create empty tmp hoplog Hoplog oplog = organizer.getTmpSortedOplog(null, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); createHoplog(0, oplog); Path hoplogPath = new Path(testDataDir, getName() + "/0/" + oplog.getFileName()); FileSystem fs = hdfsStore.getFileSystem(); FileStatus hoplogStatus = fs.getFileStatus(hoplogPath); assertNotNull(hoplogStatus); organizer.makeLegitimate(oplog); try { hoplogStatus = fs.getFileStatus(hoplogPath); assertNull(hoplogStatus); } catch (FileNotFoundException e) { // tmp file is renamed hence should not exist, exception expected } assertTrue(oplog.getFileName().endsWith(HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION)); hoplogPath = new Path(testDataDir, getName() + "/0/" + oplog.getFileName()); hoplogStatus = fs.getFileStatus(hoplogPath); assertNotNull(hoplogStatus); } /** * Tests hoplog file name comparator */ public void testHoplogFileComparator() throws IOException { String name1 = "bucket1-10-3.hop"; String name2 = "bucket1-1-20.hop"; String name3 = "bucket1-30-201.hop"; String name4 = "bucket1-100-201.hop"; TreeSet<TrackedReference<Hoplog>> list = new TreeSet<TrackedReference<Hoplog>>(new HoplogComparator()); // insert soplog is the list out of expected order hdfsStore.getFileSystem(); list.add(new TrackedReference<Hoplog>(new HFileSortedOplog(hdfsStore, new Path(testDataDir, name2), blockCache, stats, storeStats))); list.add(new TrackedReference<Hoplog>(new HFileSortedOplog(hdfsStore, new Path(testDataDir, name4), blockCache, stats, storeStats))); list.add(new TrackedReference<Hoplog>(new HFileSortedOplog(hdfsStore, new Path(testDataDir, name1), blockCache, stats, storeStats))); list.add(new TrackedReference<Hoplog>(new HFileSortedOplog(hdfsStore, new Path(testDataDir, name3), blockCache, stats, storeStats))); Iterator<TrackedReference<Hoplog>> iter = list.iterator(); assertEquals(name4, iter.next().get().getFileName()); assertEquals(name3, iter.next().get().getFileName()); assertEquals(name2, iter.next().get().getFileName()); assertEquals(name1, iter.next().get().getFileName()); } /** * Tests clear on a set of hoplogs. */ public void testClear() throws Exception { int bucketId = (int) System.nanoTime(); HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, bucketId); // flush and create hoplog ArrayList<TestEvent> items = new ArrayList<TestEvent>(); items.add(new TestEvent(("1"), ("1-1"))); items.add(new TestEvent(("4"), ("1-4"))); organizer.flush(items.iterator(), items.size()); items.clear(); items.add(new TestEvent(("1"), ("2-1"))); items.add(new TestEvent(("3"), ("2-3"))); organizer.flush(items.iterator(), items.size()); items.clear(); items.add(new TestEvent(("3"), ("3-3"))); items.add(new TestEvent(("5"), ("3-5"))); organizer.flush(items.iterator(), items.size()); // check file existence in bucket directory FileStatus[] hoplogs = getBucketHoplogs(getName() + "/" + bucketId, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); // expect 3 files are 3 flushes assertEquals(3, hoplogs.length); organizer.clear(); // check that all files are now expired hoplogs = getBucketHoplogs(getName() + "/" + bucketId, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); FileStatus[] exs = getBucketHoplogs(getName() + "/" + bucketId, HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION); FileStatus[] valids = HdfsSortedOplogOrganizer.filterValidHoplogs(hoplogs, exs); assertEquals(Collections.EMPTY_LIST, Arrays.asList(valids)); assertEquals(0, stats.getActiveFileCount()); assertEquals(0, stats.getInactiveFileCount()); } public void testFixedIntervalMajorCompaction() throws Exception { final AtomicInteger majorCReqCount = new AtomicInteger(0); final Compactor compactor = new AbstractCompactor() { @Override public boolean compact(boolean isMajor, boolean isForced) throws IOException { majorCReqCount.incrementAndGet(); return true; } }; HdfsSortedOplogOrganizer organizer = new HdfsSortedOplogOrganizer(regionManager, 0) { @Override public synchronized Compactor getCompactor() { return compactor; } }; regionManager.addOrganizer(0, organizer); System.setProperty(HoplogConfig.JANITOR_INTERVAL_SECS, "1"); HDFSRegionDirector.resetJanitor(); alterMajorCompaction(hdfsStore, true); // create hoplog in the past, 90 seconds before current time organizer.hoplogCreated(getName(), 0, new TestHoplog(hdfsStore, 100, System.currentTimeMillis() - 90000)); TimeUnit.MILLISECONDS.sleep(50); organizer.hoplogCreated(getName(), 0, new TestHoplog(hdfsStore, 100, System.currentTimeMillis() - 90000)); List<TrackedReference<Hoplog>> hoplogs = organizer.getSortedOplogs(); assertEquals(2, hoplogs.size()); for (int i = 0; i < 3; i++) { TimeUnit.SECONDS.sleep(1); assertEquals(0, majorCReqCount.get()); } HDFSStoreMutator mutator = hdfsStore.createHdfsStoreMutator(); mutator.setMajorCompactionInterval(1); hdfsStore.alter(mutator); TimeUnit.SECONDS.sleep(5); assertTrue(3 < majorCReqCount.get()); } public void testCorruptHfileBucketFail() throws Exception { // create a corrupt file FileSystem fs = hdfsStore.getFileSystem(); for (int i = 0; i < 113; i++) { FSDataOutputStream opStream = fs.create(new Path(testDataDir.getName() + "/region-1/" + i + "/1-1-1.hop")); opStream.writeBytes("Some random corrupt file"); opStream.close(); } // create region with store regionfactory.setHDFSStoreName(HDFS_STORE_NAME); Region<Object, Object> region1 = regionfactory.create("region-1"); ExpectedException ex = DistributedTestCase.addExpectedException("CorruptHFileException"); try { region1.get("key"); fail("get should have failed with corrupt file error"); } catch (HDFSIOException e) { // expected } finally { ex.remove(); } region1.destroyRegion(); } public void testMaxOpenReaders() throws Exception { System.setProperty("hoplog.bucket.max.open.files", "5"); HoplogOrganizer<? extends PersistedEventImpl> organizer = new HdfsSortedOplogOrganizer(regionManager, 0); ArrayList<TestEvent> items = new ArrayList<TestEvent>(); for (int i = 0; i < 10; i++) { items.clear(); items.add(new TestEvent("" + i, "" + i)); organizer.flush(items.iterator(), items.size()); } HdfsSortedOplogOrganizer bucket = (HdfsSortedOplogOrganizer) organizer; List<TrackedReference<Hoplog>> hoplogs = bucket.getSortedOplogs(); int closedCount = 0 ; for (TrackedReference<Hoplog> hoplog : hoplogs) { HFileSortedOplog hfile = (HFileSortedOplog) hoplog.get(); if (hfile.isClosed()) { closedCount++; } } assertEquals(10, closedCount); assertEquals(10, stats.getActiveFileCount()); assertEquals(0, stats.getActiveReaderCount()); byte[] keyBytes1 = BlobHelper.serializeToBlob("1"); organizer.read(keyBytes1).getValue(); closedCount = 0 ; for (TrackedReference<Hoplog> hoplog : hoplogs) { HFileSortedOplog hfile = (HFileSortedOplog) hoplog.get(); if (hfile.isClosed()) { closedCount++; } } assertEquals(5, closedCount); assertEquals(10, stats.getActiveFileCount()); assertEquals(0, stats.getInactiveFileCount()); assertEquals(5, stats.getActiveReaderCount()); organizer.getCompactor().compact(false, false); assertEquals(1, stats.getActiveFileCount()); assertEquals(0, stats.getActiveReaderCount()); assertEquals(0, stats.getInactiveFileCount()); } public void testConcurrentReadInactiveClose() throws Exception { final HoplogOrganizer<? extends PersistedEventImpl> organizer = regionManager.create(0); alterMinorCompaction(hdfsStore, true); ArrayList<TestEvent> items = new ArrayList<TestEvent>(); for (int i = 0; i < 4; i++) { items.clear(); items.add(new TestEvent("" + i, "" + i)); organizer.flush(items.iterator(), items.size()); } final byte[] keyBytes1 = BlobHelper.serializeToBlob("1"); class ReadTask implements Runnable { public void run() { try { organizer.read(keyBytes1); } catch (IOException e) { e.printStackTrace(); } } } ScheduledExecutorService[] readers = new ScheduledExecutorService[10]; for (int i = 0; i < readers.length; i++) { readers[i] = Executors.newSingleThreadScheduledExecutor(); readers[i].scheduleWithFixedDelay(new ReadTask(), 0, 1, TimeUnit.MILLISECONDS); } for (int i = 0; i < 100; i++) { items.clear(); items.add(new TestEvent("" + i, "" + i)); organizer.flush(items.iterator(), items.size()); } for (int i = 0; i < readers.length; i++) { readers[i].shutdown(); readers[i].awaitTermination(1, TimeUnit.SECONDS); TimeUnit.MILLISECONDS.sleep(50); } for (int i = 0; i < 20; i++) { if (stats.getActiveFileCount() < 4) { break; } organizer.getCompactor().compact(false, false); } organizer.performMaintenance(); TimeUnit.SECONDS.sleep(1); assertTrue("" + stats.getActiveFileCount(), stats.getActiveFileCount() <= 4); assertEquals(stats.getActiveReaderCount(), stats.getActiveReaderCount()); assertEquals(0, stats.getInactiveFileCount()); } public void testEmptyBucketCleanup() throws Exception { HdfsSortedOplogOrganizer o = new HdfsSortedOplogOrganizer(regionManager, 0); long target = System.currentTimeMillis(); o.getOptimizationTargets(target); // making sure empty bucket is not causing IO errors. no assertion needed // for this test case. } public void testExpiredFilterAtStartup() throws Exception { HdfsSortedOplogOrganizer bucket = new HdfsSortedOplogOrganizer(regionManager, 0); ArrayList<TestEvent> items = new ArrayList<TestEvent>(); items.add(new TestEvent(("1"), ("1-1"))); items.add(new TestEvent(("4"), ("1-4"))); bucket.flush(items.iterator(), items.size()); items.clear(); items.add(new TestEvent(("1"), ("2-1"))); items.add(new TestEvent(("3"), ("2-3"))); bucket.flush(items.iterator(), items.size()); FileStatus[] files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); assertEquals(2, files.length); files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION); assertEquals(0, files.length); HdfsSortedOplogOrganizer bucket2 = new HdfsSortedOplogOrganizer(regionManager, 0); List<TrackedReference<Hoplog>> hoplogs = bucket2.getSortedOplogs(); assertEquals(2, hoplogs.size()); bucket.clear(); files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); assertEquals(2, files.length); files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION); assertEquals(2, files.length); bucket2 = new HdfsSortedOplogOrganizer(regionManager, 0); hoplogs = bucket2.getSortedOplogs(); assertEquals(0, hoplogs.size()); items.clear(); items.add(new TestEvent(("1"), ("2-1"))); items.add(new TestEvent(("3"), ("2-3"))); bucket.flush(items.iterator(), items.size()); bucket2 = new HdfsSortedOplogOrganizer(regionManager, 0); hoplogs = bucket2.getSortedOplogs(); assertEquals(1, hoplogs.size()); bucket.close(); bucket2.close(); } public void testExpireFilterRetartAfterClear() throws Exception { HdfsSortedOplogOrganizer bucket = new HdfsSortedOplogOrganizer(regionManager, 0); ArrayList<TestEvent> items = new ArrayList<TestEvent>(); items.add(new TestEvent(("1"), ("1-1"))); items.add(new TestEvent(("4"), ("1-4"))); bucket.flush(items.iterator(), items.size()); items.clear(); items.add(new TestEvent(("1"), ("2-1"))); items.add(new TestEvent(("3"), ("2-3"))); bucket.flush(items.iterator(), items.size()); FileStatus[] files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); assertEquals(2, files.length); files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION); assertEquals(0, files.length); HdfsSortedOplogOrganizer bucket2 = new HdfsSortedOplogOrganizer(regionManager, 0); List<TrackedReference<Hoplog>> hoplogs = bucket2.getSortedOplogs(); assertEquals(2, hoplogs.size()); bucket.clear(); files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); assertEquals(2, files.length); files = getBucketHoplogs(getName() + "/" + 0, HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION); assertEquals(2, files.length); bucket2 = new HdfsSortedOplogOrganizer(regionManager, 0); hoplogs = bucket2.getSortedOplogs(); assertEquals(0, hoplogs.size()); bucket.close(); bucket2.close(); } /** * tests maintenance does not fail even if there are no hoplogs */ public void testNoFileJanitor() throws Exception { HoplogOrganizer<? extends PersistedEventImpl> organizer; organizer = regionManager.create(0); organizer.performMaintenance(); } public void testValidHoplogRegex() { String[] valid = {"1-1-1.hop", "1-1-1.ihop", "1-1-1.chop"}; String[] invalid = {"1-1-1.khop", "1-1-1.hop.tmphop", "1-1-1.hop.ehop", "1-1-.hop", "-1-1.hop"}; for (String string : valid) { Matcher matcher = HdfsSortedOplogOrganizer.SORTED_HOPLOG_PATTERN.matcher(string); assertTrue(matcher.matches()); } for (String string : invalid) { Matcher matcher = HdfsSortedOplogOrganizer.SORTED_HOPLOG_PATTERN.matcher(string); assertFalse(matcher.matches()); } } public void testOneHoplogMajorCompaction() throws Exception { HoplogOrganizer<? extends PersistedEventImpl> organizer = new HdfsSortedOplogOrganizer(regionManager, 0); alterMajorCompaction(hdfsStore, true); ArrayList<TestEvent> items = new ArrayList<TestEvent>(); items.add(new TestEvent(("1"), ("1-1"))); organizer.flush(items.iterator(),items.size()); FileStatus[] files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.FLUSH_HOPLOG_EXTENSION); assertEquals(1, files.length); //Minor compaction will not perform on 1 .hop file organizer.getCompactor().compact(false, false); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.MINOR_HOPLOG_EXTENSION); assertEquals(0, files.length); //Major compaction will perform on 1 .hop file organizer.getCompactor().compact(true, false); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION); assertEquals(1, files.length); String hoplogName =files[0].getPath().getName(); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.MINOR_HOPLOG_EXTENSION); assertEquals(0, files.length); organizer.getCompactor().compact(true, false); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION); assertEquals(1, files.length); assertEquals(hoplogName, files[0].getPath().getName()); //Minor compaction does not convert major compacted file organizer.getCompactor().compact(false, false); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.MINOR_HOPLOG_EXTENSION); assertEquals(0, files.length); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.MAJOR_HOPLOG_EXTENSION); assertEquals(1, files.length); assertEquals(hoplogName, files[0].getPath().getName()); files = getBucketHoplogs(getName() + "/0", HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION); assertEquals(1, files.length); assertNotSame(hoplogName + HdfsSortedOplogOrganizer.EXPIRED_HOPLOG_EXTENSION, files[0].getPath().getName() ); } public void testExposeCleanupInterval() throws Exception { FileSystem fs = hdfsStore.getFileSystem(); Path cleanUpIntervalPath = new Path(hdfsStore.getHomeDir(), HoplogConfig.CLEAN_UP_INTERVAL_FILE_NAME); assertTrue(fs.exists(cleanUpIntervalPath)); long interval = HDFSStore.DEFAULT_OLD_FILE_CLEANUP_INTERVAL_MINS *60 * 1000; assertEquals(interval, HoplogUtil.readCleanUpIntervalMillis(fs,cleanUpIntervalPath)); } @Override protected void setUp() throws Exception { System.setProperty(HoplogConfig.JANITOR_INTERVAL_SECS, "" + HoplogConfig.JANITOR_INTERVAL_SECS_DEFAULT); super.setUp(); } }
/* * Copyright (c) 2010-2017 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.certification.test; import com.evolveum.midpoint.notifications.api.transports.Message; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.CertCampaignTypeUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.DebugUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.testng.annotations.Test; import java.io.File; import java.util.Collection; import java.util.Date; import java.util.List; import static com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationResponseType.ACCEPT; import static com.evolveum.midpoint.xml.ns._public.common.common_3.AccessCertificationResponseType.NO_RESPONSE; import static com.evolveum.midpoint.xml.ns._public.common.common_3.ActivationStatusType.ENABLED; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.assertTrue; /** * Very simple certification test. * Tests just the basic functionality, along with security features. * * @author mederly */ @ContextConfiguration(locations = {"classpath:ctx-certification-test-main.xml"}) @DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS) public class TestEscalation extends AbstractCertificationTest { private static final File CERT_DEF_FILE = new File(COMMON_DIR, "certification-of-eroot-user-assignments-escalations.xml"); //protected static final String CERT_DEF_OID = "399e117a-baaa-4e59-b845-21bb838cb7bc"; private AccessCertificationDefinitionType certificationDefinition; private String campaignOid; @Override public void initSystem(Task initTask, OperationResult initResult) throws Exception { super.initSystem(initTask, initResult); DebugUtil.setPrettyPrintBeansAs(PrismContext.LANG_YAML); certificationDefinition = repoAddObjectFromFile(CERT_DEF_FILE, AccessCertificationDefinitionType.class, initResult).asObjectable(); importTriggerTask(initResult); } @Test public void test010CreateCampaign() throws Exception { final String TEST_NAME = "test010CreateCampaign"; TestUtil.displayTestTitle(this, TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); // WHEN TestUtil.displayWhen(TEST_NAME); AccessCertificationCampaignType campaign = certificationService.createCampaign(certificationDefinition.getOid(), task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); assertNotNull("Created campaign is null", campaign); campaignOid = campaign.getOid(); campaign = getObject(AccessCertificationCampaignType.class, campaignOid).asObjectable(); display("campaign", campaign); assertSanityAfterCampaignCreate(campaign, certificationDefinition); assertPercentCompleteAll(campaign, 100, 100, 100); // no cases, no problems } @Test public void test013SearchAllCases() throws Exception { final String TEST_NAME = "test013SearchAllCases"; TestUtil.displayTestTitle(this, TEST_NAME); searchWithNoCasesExpected(TEST_NAME); } @SuppressWarnings("SameParameterValue") private void searchWithNoCasesExpected(String TEST_NAME) throws Exception { // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); // WHEN TestUtil.displayWhen(TEST_NAME); List<AccessCertificationCaseType> caseList = modelService.searchContainers( AccessCertificationCaseType.class, CertCampaignTypeUtil.createCasesForCampaignQuery(campaignOid, prismContext), null, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); display("caseList", caseList); assertEquals("Unexpected cases in caseList", 0, caseList.size()); } @Test public void test021OpenFirstStage() throws Exception { final String TEST_NAME = "test021OpenFirstStage"; TestUtil.displayTestTitle(this, TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); // WHEN TestUtil.displayWhen(TEST_NAME); certificationService.openNextStage(campaignOid, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign in stage 1", campaign); assertSanityAfterCampaignStart(campaign, certificationDefinition, 7); checkAllCasesSanity(campaign.getCase()); List<AccessCertificationCaseType> caseList = campaign.getCase(); // no responses -> NO_RESPONSE in all cases assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_COO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_CEO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ORG_EROOT_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_JACK_OID, ROLE_CEO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_JACK_OID, ORG_EROOT_OID, NO_RESPONSE, NO_RESPONSE, null); assertPercentCompleteAll(campaign, 0, 0, 0); assertEquals("Wrong # of triggers", 2, campaign.getTrigger().size()); // completion + timed-action display("dummy transport", dummyTransport); } @Test public void test032SearchAllCases() throws Exception { final String TEST_NAME = "test032SearchAllCases"; TestUtil.displayTestTitle(this, TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); // WHEN TestUtil.displayWhen(TEST_NAME); List<AccessCertificationCaseType> caseList = modelService.searchContainers( AccessCertificationCaseType.class, null, null, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); display("caseList", caseList); checkAllCasesSanity(caseList); } @Test public void test050SearchWorkItems() throws Exception { final String TEST_NAME = "test050SearchWorkItems"; TestUtil.displayTestTitle(this, TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); // WHEN TestUtil.displayWhen(TEST_NAME); List<AccessCertificationWorkItemType> workItems = certificationService.searchOpenWorkItems( CertCampaignTypeUtil.createWorkItemsForCampaignQuery(campaignOid, prismContext), false, null, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); display("workItems", workItems); assertEquals("Wrong number of certification work items", 7, workItems.size()); checkAllWorkItemsSanity(workItems); } @Test public void test100RecordDecision() throws Exception { final String TEST_NAME = "test100RecordDecision"; TestUtil.displayTestTitle(this, TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestCertificationBasic.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); List<AccessCertificationCaseType> caseList = queryHelper.searchCases(campaignOid, null, null, result); AccessCertificationCaseType superuserCase = findCase(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID); // WHEN TestUtil.displayWhen(TEST_NAME); AccessCertificationWorkItemType workItem = CertCampaignTypeUtil.findWorkItem(superuserCase, 1, 1, USER_ADMINISTRATOR_OID); long id = superuserCase.asPrismContainerValue().getId(); certificationService.recordDecision(campaignOid, id, workItem.getId(), ACCEPT, "no comment", task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); caseList = queryHelper.searchCases(campaignOid, null, null, result); display("caseList", caseList); checkAllCasesSanity(caseList); superuserCase = findCase(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID); assertEquals("changed case ID", Long.valueOf(id), superuserCase.asPrismContainerValue().getId()); assertSingleDecision(superuserCase, ACCEPT, "no comment", 1, 1, USER_ADMINISTRATOR_OID, ACCEPT, false); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); assertPercentCompleteAll(campaign, Math.round(100.0f/7.0f), Math.round(100.0f/7.0f), Math.round(100.0f/7.0f)); // 1 reviewer per case (always administrator) } @Test public void test110Escalate() throws Exception { final String TEST_NAME = "test110Escalate"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P2D"); // first escalation is at P1D waitForTaskNextRun(TASK_TRIGGER_SCANNER_OID, true, 20000, true); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); List<AccessCertificationCaseType> caseList = queryHelper.searchCases(campaignOid, null, null, result); display("caseList", caseList); checkAllCasesSanity(caseList); AccessCertificationCaseType ceoCase = findCase(caseList, USER_JACK_OID, ROLE_CEO_OID); display("CEO case after escalation", ceoCase); AccessCertificationWorkItemType workItem = CertCampaignTypeUtil.findWorkItem(ceoCase, 1, 1, USER_ADMINISTRATOR_OID); assertObjectRefs("assignees", false, workItem.getAssigneeRef(), USER_JACK_OID, USER_ADMINISTRATOR_OID); assertEquals("Wrong originalAssignee OID", USER_ADMINISTRATOR_OID, workItem.getOriginalAssigneeRef().getOid()); final WorkItemEscalationLevelType NEW_ESCALATION_LEVEL = new WorkItemEscalationLevelType().number(1).name("jack-level"); assertEquals("Wrong escalation info", NEW_ESCALATION_LEVEL, workItem.getEscalationLevel()); assertEquals("Wrong # of events", 1, ceoCase.getEvent().size()); WorkItemEscalationEventType event = (WorkItemEscalationEventType) ceoCase.getEvent().get(0); assertNotNull("No timestamp in event", event.getTimestamp()); assertEquals("Wrong initiatorRef OID", USER_ADMINISTRATOR_OID, event.getInitiatorRef().getOid()); assertEquals("Wrong workItemId", workItem.getId(), event.getWorkItemId()); assertObjectRefs("assigneeBefore", false, event.getAssigneeBefore(), USER_ADMINISTRATOR_OID); assertObjectRefs("delegatedTo", false, event.getDelegatedTo(), USER_JACK_OID); assertEquals("Wrong delegationMethod", WorkItemDelegationMethodType.ADD_ASSIGNEES, event.getDelegationMethod()); assertEquals("Wrong new escalation level", NEW_ESCALATION_LEVEL, event.getNewEscalationLevel()); AccessCertificationCaseType superuserCase = findCase(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID); AccessCertificationWorkItemType superuserWorkItem = CertCampaignTypeUtil.findWorkItem(superuserCase, 1, 1, USER_ADMINISTRATOR_OID); //noinspection SimplifiedTestNGAssertion assertEquals("Escalation info present even if it shouldn't be", null, superuserWorkItem.getEscalationLevel()); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); assertPercentCompleteAll(campaign, Math.round(100.0f/7.0f), Math.round(100.0f/7.0f), Math.round(100.0f/7.0f)); // 1 reviewer per case (always administrator) AccessCertificationStageType currentStage = CertCampaignTypeUtil.getCurrentStage(campaign); assertNotNull(currentStage); assertEquals("Wrong new stage escalation level", NEW_ESCALATION_LEVEL, currentStage.getEscalationLevel()); display("campaign after escalation", campaign); assertEquals("Wrong # of triggers", 2, campaign.getTrigger().size()); // completion + timed-action (P3D) display("dummy transport", dummyTransport); List<Message> messages = dummyTransport.getMessages("dummy:simpleReviewerNotifier"); assertEquals("Wrong # of dummy notifications", 3, messages.size()); // original + new approver + deputy of administrator } @Test public void test120EscalateAgain() throws Exception { final String TEST_NAME = "test120EscalateAgain"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P4D"); // second escalation is at P3D waitForTaskNextRun(TASK_TRIGGER_SCANNER_OID, true, 20000, true); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); List<AccessCertificationCaseType> caseList = queryHelper.searchCases(campaignOid, null, null, result); display("caseList", caseList); checkAllCasesSanity(caseList); AccessCertificationCaseType ceoCase = findCase(caseList, USER_JACK_OID, ROLE_CEO_OID); display("CEO case after escalation", ceoCase); AccessCertificationWorkItemType workItem = CertCampaignTypeUtil.findWorkItem(ceoCase, 1, 1, USER_ELAINE_OID); assertNotNull("No work item found", workItem); assertObjectRefs("assignees", false, workItem.getAssigneeRef(), USER_ELAINE_OID); assertEquals("Wrong originalAssignee OID", USER_ADMINISTRATOR_OID, workItem.getOriginalAssigneeRef().getOid()); final WorkItemEscalationLevelType OLD_ESCALATION_LEVEL = new WorkItemEscalationLevelType().number(1).name("jack-level"); final WorkItemEscalationLevelType NEW_ESCALATION_LEVEL = new WorkItemEscalationLevelType().number(2).name("elaine-level"); assertEquals("Wrong escalation info", NEW_ESCALATION_LEVEL, workItem.getEscalationLevel()); assertEquals("Wrong # of events", 2, ceoCase.getEvent().size()); WorkItemEscalationEventType event = (WorkItemEscalationEventType) ceoCase.getEvent().get(1); assertNotNull("No timestamp in event", event.getTimestamp()); assertEquals("Wrong initiatorRef OID", USER_ADMINISTRATOR_OID, event.getInitiatorRef().getOid()); assertEquals("Wrong workItemId", workItem.getId(), event.getWorkItemId()); assertObjectRefs("assigneeBefore", false, event.getAssigneeBefore(), USER_ADMINISTRATOR_OID, USER_JACK_OID); assertObjectRefs("delegatedTo", false, event.getDelegatedTo(), USER_ELAINE_OID); assertEquals("Wrong delegationMethod", WorkItemDelegationMethodType.REPLACE_ASSIGNEES, event.getDelegationMethod()); assertEquals("Wrong old escalation level", OLD_ESCALATION_LEVEL, event.getEscalationLevel()); assertEquals("Wrong new escalation level", NEW_ESCALATION_LEVEL, event.getNewEscalationLevel()); AccessCertificationCaseType superuserCase = findCase(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID); AccessCertificationWorkItemType superuserWorkItem = CertCampaignTypeUtil.findWorkItem(superuserCase, 1, 1, USER_ADMINISTRATOR_OID); //noinspection SimplifiedTestNGAssertion assertEquals("Escalation info present even if it shouldn't be", null, superuserWorkItem.getEscalationLevel()); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); assertPercentCompleteAll(campaign, Math.round(100.0f/7.0f), Math.round(100.0f/7.0f), Math.round(100.0f/7.0f)); // 1 reviewer per case (always administrator) AccessCertificationStageType currentStage = CertCampaignTypeUtil.getCurrentStage(campaign); assertNotNull(currentStage); assertEquals("Wrong new stage escalation level", NEW_ESCALATION_LEVEL, currentStage.getEscalationLevel()); display("campaign after escalation", campaign); assertEquals("Wrong # of triggers", 1, campaign.getTrigger().size()); // completion display("dummy transport", dummyTransport); List<Message> messages = dummyTransport.getMessages("dummy:simpleReviewerNotifier"); assertEquals("Wrong # of dummy notifications", 1, messages.size()); // new approver } @Test public void test130Remediation() throws Exception { final String TEST_NAME = "test130Remediation"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P15D"); // stage ends at P14D waitForTaskNextRun(TASK_TRIGGER_SCANNER_OID, true, 20000, true); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign after remediation", campaign); assertStateAndStage(campaign, AccessCertificationCampaignStateType.IN_REMEDIATION, 2); } @Test public void test140Close() throws Exception { final String TEST_NAME = "test140Close"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P16D"); certificationManager.closeCampaign(campaignOid, true, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign after close", campaign); assertStateAndStage(campaign, AccessCertificationCampaignStateType.CLOSED, 2); assertEquals("Wrong # of triggers", 1, campaign.getTrigger().size()); // reiterate } @Test public void test200AutomaticReiteration() throws Exception { final String TEST_NAME = "test200AutomaticReiteration"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P18D"); // campaign ends at P16D, reiteration scheduled to P17D waitForTaskNextRun(TASK_TRIGGER_SCANNER_OID, true, 20000, true); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign in stage 1", campaign); assertSanityAfterCampaignStart(campaign, certificationDefinition, 7, 2, 2, new Date(clock.currentTimeMillis())); List<AccessCertificationCaseType> caseList = campaign.getCase(); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID, ACCEPT, ACCEPT, null); // from iteration 1 assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_COO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_CEO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ORG_EROOT_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_JACK_OID, ROLE_CEO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_JACK_OID, ORG_EROOT_OID, NO_RESPONSE, NO_RESPONSE, null); // current iteration/stage: // - 6 cases (all except admin->super), 6 work items, no decisions assertPercentCompleteCurrent(campaign, 0, 0, 0); // current stage (all iterations): // - 7 cases, 7 work items; one case is complete/decided, one work item is done assertPercentCompleteCurrentStage(campaign, 14, 14, 14); // current iteration (all stages) // - 6 cases, 6 work items, no decisions assertPercentCompleteCurrentIteration(campaign, 0, 0, 0); // all stages, all iterations // - 7 cases, 1 complete, 1 decided // - 1 of 7 work items done assertPercentCompleteAll(campaign, 14, 14, 14); assertEquals("Wrong # of triggers", 2, campaign.getTrigger().size()); // completion + timed-action display("dummy transport", dummyTransport); } @Test public void test300Close() throws Exception { final String TEST_NAME = "test300Close"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P19D"); // +1 day relative to previous test certificationManager.closeCampaign(campaignOid, true, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign after close", campaign); assertStateAndStage(campaign, AccessCertificationCampaignStateType.CLOSED, 2); assertEquals("Wrong # of triggers", 0, campaign.getTrigger().size()); // no more automated reiterations } @Test public void test310ManualReiteration() throws Exception { final String TEST_NAME = "test310ManualReiteration"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P20D"); // +1 day relative to previous test certificationManager.reiterateCampaign(campaignOid, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign after reiteration", campaign); assertStateStageIteration(campaign, AccessCertificationCampaignStateType.CREATED, 0, 3); // current iteration/stage: // - 6 cases (all except admin->super), 0 work items // - so, these cases are all complete but none is decided // - no work items so they are all OK assertPercentCompleteCurrent(campaign, 100, 0, 100); // current stage (all iterations): // - 6 cases, 0 work items => the same numbers assertPercentCompleteCurrentStage(campaign, 100, 0, 100); // current iteration (all stages) -> the same assertPercentCompleteCurrentIteration(campaign, 100, 0, 100); // all stages, all iterations // - 7 cases, 1 complete, 1 decided // - 1 of 7 work items done assertPercentCompleteAll(campaign, 14, 14, 14); } @Test public void test320OpenFirstStage() throws Exception { final String TEST_NAME = "test320OpenFirstStage"; TestUtil.displayTestTitle(this, TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); // WHEN TestUtil.displayWhen(TEST_NAME); certificationService.openNextStage(campaignOid, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign in stage 1", campaign); assertSanityAfterCampaignStart(campaign, certificationDefinition, 7, 3, 3, new Date(clock.currentTimeMillis())); List<AccessCertificationCaseType> caseList = campaign.getCase(); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID, ACCEPT, ACCEPT, null); // from iteration 1 assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_COO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ROLE_CEO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_ADMINISTRATOR_OID, ORG_EROOT_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_JACK_OID, ROLE_CEO_OID, NO_RESPONSE, NO_RESPONSE, null); assertCaseOutcome(caseList, USER_JACK_OID, ORG_EROOT_OID, NO_RESPONSE, NO_RESPONSE, null); // current iteration/stage: // - 6 cases (all except admin->super), 6 work items, no decisions assertPercentCompleteCurrent(campaign, 0, 0, 0); // current stage (all iterations): // - 7 cases, 7 work items; one case is complete/decided, one work item is done assertPercentCompleteCurrentStage(campaign, 14, 14, 14); // current iteration (all stages) // - 6 cases, 6 work items, no decisions assertPercentCompleteCurrentIteration(campaign, 0, 0, 0); // all stages, all iterations // - 7 cases, 1 complete, 1 decided // - 1 of 7 work items done assertPercentCompleteAll(campaign, 14, 14, 14); assertEquals("Wrong # of triggers", 2, campaign.getTrigger().size()); // completion + timed-action display("dummy transport", dummyTransport); } @Test public void test400Close() throws Exception { final String TEST_NAME = "test300Close"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P21D"); // +1 day relative to previous test certificationManager.closeCampaign(campaignOid, true, task, result); // THEN TestUtil.displayThen(TEST_NAME); result.computeStatus(); TestUtil.assertSuccess(result); AccessCertificationCampaignType campaign = getCampaignWithCases(campaignOid); display("campaign after close", campaign); assertStateAndStage(campaign, AccessCertificationCampaignStateType.CLOSED, 2); assertEquals("Wrong # of triggers", 0, campaign.getTrigger().size()); // no more automated reiterations } @Test public void test410ManualReiterationUnavailable() throws Exception { final String TEST_NAME = "test410ManualReiterationUnavailable"; TestUtil.displayTestTitle(this, TEST_NAME); login(getUserFromRepo(USER_ADMINISTRATOR_OID)); // GIVEN Task task = taskManager.createTaskInstance(TestEscalation.class.getName() + "." + TEST_NAME); task.setOwner(userAdministrator.asPrismObject()); OperationResult result = task.getResult(); dummyTransport.clearMessages(); // WHEN TestUtil.displayWhen(TEST_NAME); clock.resetOverride(); clock.overrideDuration("P22D"); // +1 day relative to previous test try { certificationManager.reiterateCampaign(campaignOid, task, result); fail("unexpected success"); } catch (IllegalStateException e) { // THEN System.err.println("got expected exception: " + e.getMessage()); e.printStackTrace(); assertTrue("wrong exception message", e.getMessage().contains("maximum number of iterations (3) was reached")); } } @SuppressWarnings("Duplicates") private void checkAllCasesSanity(Collection<AccessCertificationCaseType> caseList) { assertEquals("Wrong number of certification cases", 7, caseList.size()); checkCaseSanity(caseList, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID, userAdministrator); checkCaseSanity(caseList, USER_ADMINISTRATOR_OID, ROLE_COO_OID, userAdministrator); checkCaseSanity(caseList, USER_ADMINISTRATOR_OID, ROLE_CEO_OID, userAdministrator); checkCaseSanity(caseList, USER_ADMINISTRATOR_OID, ORG_EROOT_OID, userAdministrator); checkCaseSanity(caseList, USER_JACK_OID, ROLE_CEO_OID, userJack, ORG_GOVERNOR_OFFICE_OID, ORG_SCUMM_BAR_OID, ENABLED); checkCaseSanity(caseList, USER_JACK_OID, ORG_EROOT_OID, userJack); } @SuppressWarnings("Duplicates") private void checkAllWorkItemsSanity(Collection<AccessCertificationWorkItemType> workItems) { assertEquals("Wrong number of certification work items", 7, workItems.size()); checkWorkItemSanity(workItems, USER_ADMINISTRATOR_OID, ROLE_SUPERUSER_OID, userAdministrator); checkWorkItemSanity(workItems, USER_ADMINISTRATOR_OID, ROLE_COO_OID, userAdministrator); checkWorkItemSanity(workItems, USER_ADMINISTRATOR_OID, ROLE_CEO_OID, userAdministrator); checkWorkItemSanity(workItems, USER_ADMINISTRATOR_OID, ORG_EROOT_OID, userAdministrator); checkWorkItemSanity(workItems, USER_JACK_OID, ROLE_CEO_OID, userJack, ORG_GOVERNOR_OFFICE_OID, ORG_SCUMM_BAR_OID, ENABLED); checkWorkItemSanity(workItems, USER_JACK_OID, ORG_EROOT_OID, userJack); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.beans; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.RandomAccessFile; import java.util.Set; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import org.apache.geode.cache.CacheFactory; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.management.GemFireProperties; import org.apache.geode.management.internal.cli.CliUtil; /** * Various Utility Functions to be used by MBeans */ public class BeanUtilFuncs { /** * returns the tail of the log file * * @return tail of the log file */ public static String tailSystemLog(File logFile, final int numLines) throws IOException { if (logFile == null || logFile.equals(new File(""))) { return null; } int maxBuffer = 65500; // DataOutput.writeUTF will only accept 65535 bytes long fileLength = logFile.length(); if (fileLength == 0) { return null; } byte[] buffer = (fileLength > maxBuffer) ? new byte[maxBuffer] : new byte[(int) fileLength]; int readSize = buffer.length; RandomAccessFile f = new RandomAccessFile(logFile, "r"); int linesRead = 0; long seekPosition = fileLength - readSize; StringBuilder returnStr = new StringBuilder(); StringBuilder workingString = new StringBuilder(); String separator = System.getProperty("line.separator"); while (linesRead < numLines) { f.seek(seekPosition); f.read(buffer, 0, readSize); workingString.insert(0, new String(buffer, 0, readSize)); String[] splits = workingString.toString().split("\\r?\\n"); // The first element may be part of a line, so we don't count that // but we need to save it for the next time around. if (splits.length > 1) { for (int i = splits.length - 1; i > 0; i--) { returnStr.insert(0, separator).insert(0, splits[i]); if (++linesRead >= numLines) { break; } } } if (seekPosition == 0 && linesRead < numLines) { returnStr.insert(0, separator).insert(0, splits[0]); break; } workingString = new StringBuilder(splits[0]); if (readSize < seekPosition) { seekPosition = seekPosition - readSize; } else { readSize = (int) seekPosition; seekPosition = 0; } } f.close(); returnStr.insert(0, separator).insert(0, "SystemLog:"); return returnStr.toString(); } /** * @return tail of log */ public static String tailSystemLog(DistributionConfig sc, final int numLines) throws IOException { File logFile = sc.getLogFile(); if (logFile == null || logFile.equals(new File(""))) { return null; } if (!logFile.isAbsolute()) { logFile = new File(logFile.getAbsolutePath()); } return tailSystemLog(logFile, numLines); } public static DistributedMember getDistributedMemberByNameOrId(String memberNameOrId) { DistributedMember memberFound = null; if (memberNameOrId != null) { InternalCache cache = (InternalCache) CacheFactory.getAnyInstance(); Set<DistributedMember> memberSet = CliUtil.getAllMembers(cache); for (DistributedMember member : memberSet) { if (memberNameOrId.equals(member.getId()) || memberNameOrId.equals(member.getName())) { memberFound = member; break; } } } return memberFound; } public static GemFireProperties initGemfireProperties(DistributionConfig config) { String memberGroups = ""; String configFile = null; String includeFile = null; GemFireProperties gemFirePropertyData = new GemFireProperties(); gemFirePropertyData.setMemberName(config.getName()); gemFirePropertyData.setMemberGroups(memberGroups); gemFirePropertyData.setMcastPort(config.getMcastPort()); gemFirePropertyData.setMcastAddress(config.getMcastAddress().getHostAddress()); gemFirePropertyData.setBindAddress(config.getBindAddress()); gemFirePropertyData.setTcpPort(config.getTcpPort()); gemFirePropertyData.setCacheXMLFile(config.getCacheXmlFile().getAbsolutePath()); gemFirePropertyData.setConfigFile(configFile); gemFirePropertyData.setMcastTTL(config.getMcastTtl()); gemFirePropertyData.setServerBindAddress(config.getServerBindAddress()); gemFirePropertyData.setLocators(config.getLocators()); gemFirePropertyData.setStartLocator(config.getStartLocator()); gemFirePropertyData.setLogFile(config.getLogFile().getAbsolutePath()); gemFirePropertyData.setLogLevel(config.getLogLevel()); gemFirePropertyData.setStatisticSamplingEnabled(config.getStatisticSamplingEnabled()); gemFirePropertyData.setStatisticArchiveFile(config.getStatisticArchiveFile().getAbsolutePath()); gemFirePropertyData.setIncludeFile(includeFile); gemFirePropertyData.setAckWaitThreshold(config.getAckWaitThreshold()); gemFirePropertyData.setAckSevereAlertThreshold(config.getAckSevereAlertThreshold()); gemFirePropertyData.setArchiveFileSizeLimit(config.getArchiveFileSizeLimit()); gemFirePropertyData.setArchiveDiskSpaceLimit(config.getArchiveDiskSpaceLimit()); gemFirePropertyData.setLogFileSizeLimit(config.getLogFileSizeLimit()); gemFirePropertyData.setLogDiskSpaceLimit(config.getLogDiskSpaceLimit()); gemFirePropertyData.setClusterSSLEnabled(config.getClusterSSLEnabled()); gemFirePropertyData.setClusterSSLCiphers(config.getClusterSSLCiphers()); gemFirePropertyData.setClusterSSLProtocols(config.getClusterSSLProtocols()); gemFirePropertyData .setClusterSSLRequireAuthentication(config.getClusterSSLRequireAuthentication()); gemFirePropertyData.setClusterSSLKeyStore(config.getClusterSSLKeyStore()); gemFirePropertyData.setClusterSSLKeyStoreType(config.getClusterSSLKeyStoreType()); gemFirePropertyData.setClusterSSLKeyStorePassword(config.getClusterSSLKeyStorePassword()); gemFirePropertyData.setClusterSSLTrustStore(config.getClusterSSLTrustStore()); gemFirePropertyData.setClusterSSLTrustStorePassword(config.getClusterSSLTrustStorePassword()); gemFirePropertyData.setServerSSLEnabled(config.getServerSSLEnabled()); gemFirePropertyData.setServerSSLCiphers(config.getServerSSLCiphers()); gemFirePropertyData.setServerSSLProtocols(config.getServerSSLProtocols()); gemFirePropertyData .setServerSSLRequireAuthentication(config.getServerSSLRequireAuthentication()); gemFirePropertyData.setServerSSLKeyStore(config.getServerSSLKeyStore()); gemFirePropertyData.setServerSSLKeyStoreType(config.getServerSSLKeyStoreType()); gemFirePropertyData.setServerSSLKeyStorePassword(config.getServerSSLKeyStorePassword()); gemFirePropertyData.setServerSSLTrustStore(config.getServerSSLTrustStore()); gemFirePropertyData.setServerSSLTrustStorePassword(config.getServerSSLTrustStorePassword()); gemFirePropertyData.setGatewaySSLEnabled(config.getGatewaySSLEnabled()); gemFirePropertyData.setGatewaySSLCiphers(config.getGatewaySSLCiphers()); gemFirePropertyData.setGatewaySSLProtocols(config.getGatewaySSLProtocols()); gemFirePropertyData .setGatewaySSLRequireAuthentication(config.getGatewaySSLRequireAuthentication()); gemFirePropertyData.setGatewaySSLKeyStore(config.getGatewaySSLKeyStore()); gemFirePropertyData.setGatewaySSLKeyStoreType(config.getGatewaySSLKeyStoreType()); gemFirePropertyData.setGatewaySSLKeyStorePassword(config.getGatewaySSLKeyStorePassword()); gemFirePropertyData.setGatewaySSLTrustStore(config.getGatewaySSLTrustStore()); gemFirePropertyData.setGatewaySSLTrustStorePassword(config.getGatewaySSLTrustStorePassword()); gemFirePropertyData.setJmxManagerSSLEnabled(config.getJmxManagerSSLEnabled()); gemFirePropertyData.setJmxManagerSSLCiphers(config.getJmxManagerSSLCiphers()); gemFirePropertyData.setJmxManagerSSLProtocols(config.getJmxManagerSSLProtocols()); gemFirePropertyData .setJmxManagerSSLRequireAuthentication(config.getJmxManagerSSLRequireAuthentication()); gemFirePropertyData.setJmxManagerSSLKeyStore(config.getJmxManagerSSLKeyStore()); gemFirePropertyData.setJmxManagerSSLKeyStoreType(config.getJmxManagerSSLKeyStoreType()); gemFirePropertyData.setJmxManagerSSLKeyStorePassword(config.getJmxManagerSSLKeyStorePassword()); gemFirePropertyData.setJmxManagerSSLTrustStore(config.getJmxManagerSSLTrustStore()); gemFirePropertyData .setJmxManagerSSLTrustStorePassword(config.getJmxManagerSSLTrustStorePassword()); gemFirePropertyData.setHttpServiceSSLEnabled(config.getHttpServiceSSLEnabled()); gemFirePropertyData.setHttpServiceSSLCiphers(config.getHttpServiceSSLCiphers()); gemFirePropertyData.setHttpServiceSSLProtocols(config.getHttpServiceSSLProtocols()); gemFirePropertyData .setHttpServiceSSLRequireAuthentication(config.getHttpServiceSSLRequireAuthentication()); gemFirePropertyData.setHttpServiceSSLKeyStore(config.getHttpServiceSSLKeyStore()); gemFirePropertyData.setHttpServiceSSLKeyStoreType(config.getHttpServiceSSLKeyStoreType()); gemFirePropertyData .setHttpServiceSSLKeyStorePassword(config.getHttpServiceSSLKeyStorePassword()); gemFirePropertyData.setHttpServiceSSLTrustStore(config.getHttpServiceSSLTrustStore()); gemFirePropertyData .setHttpServiceSSLTrustStorePassword(config.getHttpServiceSSLTrustStorePassword()); gemFirePropertyData.setSocketLeaseTime(config.getSocketLeaseTime()); gemFirePropertyData.setSocketBufferSize(config.getSocketBufferSize()); gemFirePropertyData.setMcastSendBufferSize(config.getMcastSendBufferSize()); gemFirePropertyData.setMcastRecvBufferSize(config.getMcastRecvBufferSize()); gemFirePropertyData.setMcastByteAllowance(config.getMcastFlowControl().getByteAllowance()); gemFirePropertyData .setMcastRechargeThreshold(config.getMcastFlowControl().getRechargeThreshold()); gemFirePropertyData.setMcastRechargeBlockMs(config.getMcastFlowControl().getRechargeBlockMs()); gemFirePropertyData.setUdpFragmentSize(config.getUdpFragmentSize()); gemFirePropertyData.setUdpRecvBufferSize(config.getUdpRecvBufferSize()); gemFirePropertyData.setDisableTcp(config.getDisableTcp()); gemFirePropertyData.setEnableTimeStatistics(config.getEnableTimeStatistics()); gemFirePropertyData .setEnableNetworkPartitionDetection(config.getEnableNetworkPartitionDetection()); gemFirePropertyData.setMemberTimeout(config.getMemberTimeout()); gemFirePropertyData.setMembershipPortRange(config.getMembershipPortRange()); gemFirePropertyData.setConserveSockets(config.getConserveSockets()); gemFirePropertyData.setRoles(config.getRoles()); gemFirePropertyData.setMaxWaitTimeForReconnect(config.getMaxWaitTimeForReconnect()); gemFirePropertyData.setMaxNumReconnectTries(config.getMaxNumReconnectTries()); gemFirePropertyData.setAsyncDistributionTimeout(config.getAsyncDistributionTimeout()); gemFirePropertyData.setAsyncQueueTimeout(config.getAsyncQueueTimeout()); gemFirePropertyData.setAsyncMaxQueueSize(config.getAsyncMaxQueueSize()); gemFirePropertyData.setClientConflation(config.getClientConflation()); gemFirePropertyData.setDurableClientId(config.getDurableClientId()); gemFirePropertyData.setDurableClientTimeout(config.getDurableClientTimeout()); gemFirePropertyData.setSecurityClientAuthInit(config.getSecurityClientAuthInit()); gemFirePropertyData.setSecurityClientAuthenticator(config.getSecurityClientAuthenticator()); gemFirePropertyData.setSecurityClientDHAlgo(config.getSecurityClientDHAlgo()); gemFirePropertyData.setSecurityPeerAuthInit(config.getSecurityPeerAuthInit()); gemFirePropertyData.setSecurityPeerAuthenticator(config.getSecurityPeerAuthenticator()); gemFirePropertyData.setSecurityClientAccessor(config.getSecurityClientAccessor()); gemFirePropertyData.setSecurityClientAccessorPP(config.getSecurityClientAccessorPP()); gemFirePropertyData.setSecurityLogLevel(config.getSecurityLogLevel()); gemFirePropertyData.setSecurityLogFile(config.getSecurityLogFile().getAbsolutePath()); gemFirePropertyData.setSecurityPeerMembershipTimeout(config.getSecurityPeerMembershipTimeout()); gemFirePropertyData.setRemoveUnresponsiveClient(config.getRemoveUnresponsiveClient()); gemFirePropertyData.setDeltaPropagation(config.getDeltaPropagation()); gemFirePropertyData.setRedundancyZone(config.getRedundancyZone()); gemFirePropertyData.setEnforceUniqueHost(config.getEnforceUniqueHost()); gemFirePropertyData.setStatisticSampleRate(config.getStatisticSampleRate()); gemFirePropertyData.setUdpSendBufferSize(config.getUdpSendBufferSize()); gemFirePropertyData.setJmxManager(config.getJmxManager()); gemFirePropertyData.setJmxManagerStart(config.getJmxManagerStart()); gemFirePropertyData.setJmxManagerPort(config.getJmxManagerPort()); gemFirePropertyData.setJmxManagerBindAddress(config.getJmxManagerBindAddress()); gemFirePropertyData.setJmxManagerHostnameForClients(config.getJmxManagerHostnameForClients()); gemFirePropertyData.setJmxManagerPasswordFile(config.getJmxManagerPasswordFile()); gemFirePropertyData.setJmxManagerAccessFile(config.getJmxManagerAccessFile()); gemFirePropertyData.setJmxManagerHttpPort(config.getJmxManagerHttpPort()); gemFirePropertyData.setJmxManagerUpdateRate(config.getJmxManagerUpdateRate()); gemFirePropertyData.setHttpServicePort(config.getHttpServicePort()); gemFirePropertyData.setHttpServiceBindAddress(config.getHttpServiceBindAddress()); gemFirePropertyData.setStartDevRestApi(config.getStartDevRestApi()); gemFirePropertyData.setSSLCiphers(config.getSSLCiphers()); gemFirePropertyData .setSecurableCommunicationChannel(config.getSecurableCommunicationChannels()); gemFirePropertyData .setSSLWebServiceRequireAuthentication(config.getSSLWebRequireAuthentication()); gemFirePropertyData.setSSLKeyStore(config.getSSLKeyStore()); gemFirePropertyData.setSSLKeyStoreType(config.getSSLKeyStoreType()); gemFirePropertyData.setSSLKeyStorePassword(config.getSSLKeyStorePassword()); gemFirePropertyData.setSSLTrustStore(config.getSSLTrustStore()); gemFirePropertyData.setSSLTrustStorePassword(config.getSSLTrustStorePassword()); gemFirePropertyData.setClusterSSLAlias(config.getClusterSSLAlias()); gemFirePropertyData.setServerSSLAlias(config.getServerSSLAlias()); gemFirePropertyData.setJmxSSLAlias(config.getJMXSSLAlias()); gemFirePropertyData.setGatewaySSLAlias(config.getGatewaySSLAlias()); gemFirePropertyData.setLocatorSSLAlias(config.getLocatorSSLAlias()); gemFirePropertyData.setHttpServiceSSLAlias(config.getHTTPServiceSSLAlias()); gemFirePropertyData.setSSLDefaultAlias(config.getSSLDefaultAlias()); return gemFirePropertyData; } /** * Compresses a given String. It is encoded using ISO-8859-1, So any decompression of the * compressed string should also use ISO-8859-1 * * @param str String to be compressed. * @return compressed bytes */ public static byte[] compress(String str) throws IOException { if (str == null || str.length() == 0) { return null; } ByteArrayOutputStream out = new ByteArrayOutputStream(); GZIPOutputStream gzip = new GZIPOutputStream(out); gzip.write(str.getBytes("UTF-8")); gzip.close(); byte[] outBytes = out.toByteArray(); return outBytes; } /** * @param bytes bytes to be decompressed * @return a decompressed String */ public static String decompress(byte[] bytes) throws IOException { if (bytes == null || bytes.length == 0) { return null; } GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(bytes)); BufferedReader bf = new BufferedReader(new InputStreamReader(gis, "UTF-8")); String outStr = ""; String line; while ((line = bf.readLine()) != null) { outStr += line; } return outStr; } }
package org.spongycastle.crypto.engines; import java.math.BigInteger; import java.security.SecureRandom; import org.spongycastle.crypto.CipherParameters; import org.spongycastle.crypto.DataLengthException; import org.spongycastle.crypto.Digest; import org.spongycastle.crypto.params.CramerShoupKeyParameters; import org.spongycastle.crypto.params.CramerShoupPrivateKeyParameters; import org.spongycastle.crypto.params.CramerShoupPublicKeyParameters; import org.spongycastle.crypto.params.ParametersWithRandom; import org.spongycastle.util.BigIntegers; /** * Essentially the Cramer-Shoup encryption / decryption algorithms according to * "A practical public key cryptosystem provably secure against adaptive chosen ciphertext attack." (Crypto 1998) */ public class CramerShoupCoreEngine { private static final BigInteger ONE = BigInteger.valueOf(1); private CramerShoupKeyParameters key; private SecureRandom random; private boolean forEncryption; private String label = null; /** * initialise the CramerShoup engine. * * @param forEncryption whether this engine should encrypt or decrypt * @param param the necessary CramerShoup key parameters. * @param label the label for labelled CS as {@link String} */ public void init(boolean forEncryption, CipherParameters param, String label) { init(forEncryption, param); this.label = label; } /** * initialise the CramerShoup engine. * * @param forEncryption whether this engine should encrypt or decrypt * @param param the necessary CramerShoup key parameters. */ public void init(boolean forEncryption, CipherParameters param) { SecureRandom providedRandom = null; if (param instanceof ParametersWithRandom) { ParametersWithRandom rParam = (ParametersWithRandom)param; key = (CramerShoupKeyParameters)rParam.getParameters(); providedRandom = rParam.getRandom(); } else { key = (CramerShoupKeyParameters)param; } this.random = initSecureRandom(forEncryption, providedRandom); this.forEncryption = forEncryption; } /** * Return the maximum size for an input block to this engine. For Cramer * Shoup this is always one byte less than the key size on encryption, and * the same length as the key size on decryption. * * @return maximum size for an input block. * <p/> * TODO: correct? */ public int getInputBlockSize() { int bitSize = key.getParameters().getP().bitLength(); if (forEncryption) { return (bitSize + 7) / 8 - 1; } else { return (bitSize + 7) / 8; } } /** * Return the maximum size for an output block to this engine. For Cramer * Shoup this is always one byte less than the key size on decryption, and * the same length as the key size on encryption. * * @return maximum size for an output block. * <p/> * TODO: correct? */ public int getOutputBlockSize() { int bitSize = key.getParameters().getP().bitLength(); if (forEncryption) { return (bitSize + 7) / 8; } else { return (bitSize + 7) / 8 - 1; } } public BigInteger convertInput(byte[] in, int inOff, int inLen) { if (inLen > (getInputBlockSize() + 1)) { throw new DataLengthException("input too large for Cramer Shoup cipher."); } else if (inLen == (getInputBlockSize() + 1) && forEncryption) { throw new DataLengthException("input too large for Cramer Shoup cipher."); } byte[] block; if (inOff != 0 || inLen != in.length) { block = new byte[inLen]; System.arraycopy(in, inOff, block, 0, inLen); } else { block = in; } BigInteger res = new BigInteger(1, block); if (res.compareTo(key.getParameters().getP()) >= 0) { throw new DataLengthException("input too large for Cramer Shoup cipher."); } return res; } public byte[] convertOutput(BigInteger result) { byte[] output = result.toByteArray(); if (!forEncryption) { if (output[0] == 0 && output.length > getOutputBlockSize()) { // have ended up with an extra zero byte, copy down. byte[] tmp = new byte[output.length - 1]; System.arraycopy(output, 1, tmp, 0, tmp.length); return tmp; } if (output.length < getOutputBlockSize()) {// have ended up with less bytes than normal, lengthen byte[] tmp = new byte[getOutputBlockSize()]; System.arraycopy(output, 0, tmp, tmp.length - output.length, output.length); return tmp; } } else { if (output[0] == 0) { // have ended up with an extra zero byte, copy down. byte[] tmp = new byte[output.length - 1]; System.arraycopy(output, 1, tmp, 0, tmp.length); return tmp; } } return output; } public CramerShoupCiphertext encryptBlock(BigInteger input) { CramerShoupCiphertext result = null; if (!key.isPrivate() && this.forEncryption && key instanceof CramerShoupPublicKeyParameters) { CramerShoupPublicKeyParameters pk = (CramerShoupPublicKeyParameters)key; BigInteger p = pk.getParameters().getP(); BigInteger g1 = pk.getParameters().getG1(); BigInteger g2 = pk.getParameters().getG2(); BigInteger h = pk.getH(); if (!isValidMessage(input, p)) { return result; } BigInteger r = generateRandomElement(p, random); BigInteger u1, u2, v, e, a; u1 = g1.modPow(r, p); u2 = g2.modPow(r, p); e = h.modPow(r, p).multiply(input).mod(p); Digest digest = pk.getParameters().getH(); byte[] u1Bytes = u1.toByteArray(); digest.update(u1Bytes, 0, u1Bytes.length); byte[] u2Bytes = u2.toByteArray(); digest.update(u2Bytes, 0, u2Bytes.length); byte[] eBytes = e.toByteArray(); digest.update(eBytes, 0, eBytes.length); if (this.label != null) { byte[] lBytes = this.label.getBytes(); digest.update(lBytes, 0, lBytes.length); } byte[] out = new byte[digest.getDigestSize()]; digest.doFinal(out, 0); a = new BigInteger(1, out); v = pk.getC().modPow(r, p).multiply(pk.getD().modPow(r.multiply(a), p)).mod(p); result = new CramerShoupCiphertext(u1, u2, e, v); } return result; } public BigInteger decryptBlock(CramerShoupCiphertext input) throws CramerShoupCiphertextException { BigInteger result = null; if (key.isPrivate() && !this.forEncryption && key instanceof CramerShoupPrivateKeyParameters) { CramerShoupPrivateKeyParameters sk = (CramerShoupPrivateKeyParameters)key; BigInteger p = sk.getParameters().getP(); Digest digest = sk.getParameters().getH(); byte[] u1Bytes = input.getU1().toByteArray(); digest.update(u1Bytes, 0, u1Bytes.length); byte[] u2Bytes = input.getU2().toByteArray(); digest.update(u2Bytes, 0, u2Bytes.length); byte[] eBytes = input.getE().toByteArray(); digest.update(eBytes, 0, eBytes.length); if (this.label != null) { byte[] lBytes = this.label.getBytes(); digest.update(lBytes, 0, lBytes.length); } byte[] out = new byte[digest.getDigestSize()]; digest.doFinal(out, 0); BigInteger a = new BigInteger(1, out); BigInteger v = input.u1.modPow(sk.getX1().add(sk.getY1().multiply(a)), p). multiply(input.u2.modPow(sk.getX2().add(sk.getY2().multiply(a)), p)).mod(p); // check correctness of ciphertext if (input.v.equals(v)) { result = input.e.multiply(input.u1.modPow(sk.getZ(), p).modInverse(p)).mod(p); } else { throw new CramerShoupCiphertextException("Sorry, that ciphertext is not correct"); } } return result; } private BigInteger generateRandomElement(BigInteger p, SecureRandom random) { return BigIntegers.createRandomInRange(ONE, p.subtract(ONE), random); } /** * just checking whether the message m is actually less than the group order p */ private boolean isValidMessage(BigInteger m, BigInteger p) { return m.compareTo(p) < 0; } protected SecureRandom initSecureRandom(boolean needed, SecureRandom provided) { return !needed ? null : (provided != null) ? provided : new SecureRandom(); } /** * CS exception for wrong cipher-texts */ public static class CramerShoupCiphertextException extends Exception { private static final long serialVersionUID = -6360977166495345076L; public CramerShoupCiphertextException(String msg) { super(msg); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Streams; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.FailAction; import com.google.devtools.build.lib.actions.MutableActionGraph.ActionConflictException; import com.google.devtools.build.lib.analysis.Allowlist; import com.google.devtools.build.lib.analysis.AnalysisUtils; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.FileProvider; import com.google.devtools.build.lib.analysis.FilesToRunProvider; import com.google.devtools.build.lib.analysis.MakeVariableSupplier.MapBackedMakeVariableSupplier; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetBuilder; import com.google.devtools.build.lib.analysis.RuleConfiguredTargetFactory; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.RuleErrorConsumer; import com.google.devtools.build.lib.analysis.Runfiles; import com.google.devtools.build.lib.analysis.RunfilesProvider; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesInfo; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.packages.AttributeMap; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction; import com.google.devtools.build.lib.packages.RawAttributeMapper; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.packages.Type; import com.google.devtools.build.lib.rules.cpp.CcCommon.CcFlagsSupplier; import com.google.devtools.build.lib.rules.cpp.CcCompilationHelper.CompilationInfo; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.Link.LinkTargetType; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.util.FileTypeSet; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; /** * A ConfiguredTarget for <code>cc_library</code> rules. */ public abstract class CcLibrary implements RuleConfiguredTargetFactory { /** A string constant for the name of archive library(.a, .lo) output group. */ public static final String ARCHIVE_LIBRARY_OUTPUT_GROUP_NAME = "archive"; /** A string constant for the name of dynamic library output group. */ public static final String DYNAMIC_LIBRARY_OUTPUT_GROUP_NAME = "dynamic_library"; /** A string constant for the name of Windows def file output group. */ public static final String DEF_FILE_OUTPUT_GROUP_NAME = "def_file"; public static final String IMPLICIT_OUTPUTS_ALLOWLIST = "allowed_cc_lib_implicit_outputs"; private final CppSemantics semantics; protected CcLibrary(CppSemantics semantics) { this.semantics = semantics; } // These file extensions don't generate object files. private static final FileTypeSet NO_OBJECT_GENERATING_FILETYPES = FileTypeSet.of( CppFileTypes.CPP_HEADER, CppFileTypes.ARCHIVE, CppFileTypes.PIC_ARCHIVE, CppFileTypes.ALWAYS_LINK_LIBRARY, CppFileTypes.ALWAYS_LINK_PIC_LIBRARY, CppFileTypes.SHARED_LIBRARY, CppFileTypes.VERSIONED_SHARED_LIBRARY); @Override public ConfiguredTarget create(RuleContext context) throws InterruptedException, RuleErrorException, ActionConflictException { RuleConfiguredTargetBuilder builder = new RuleConfiguredTargetBuilder(context); boolean linkStatic = context.attributes().get("linkstatic", Type.BOOLEAN); init( semantics, context, builder, /* additionalCopts= */ ImmutableList.of(), /* soFilename= */ null, context.attributes().get("alwayslink", Type.BOOLEAN), /* neverLink= */ false, linkStatic, /* addDynamicRuntimeInputArtifactsToRunfiles= */ false); return builder.build(); } public static void init( CppSemantics semantics, RuleContext ruleContext, RuleConfiguredTargetBuilder targetBuilder, ImmutableList<String> additionalCopts, PathFragment soFilename, boolean alwaysLink, boolean neverLink, boolean linkStatic, boolean addDynamicRuntimeInputArtifactsToRunfiles) throws RuleErrorException, InterruptedException { CcCommon.checkRuleLoadedThroughMacro(ruleContext); semantics.validateDeps(ruleContext); if (ruleContext.hasErrors()) { addEmptyRequiredProviders(targetBuilder); return; } final CcCommon common = new CcCommon(ruleContext); common.reportInvalidOptions(ruleContext); CcToolchainProvider ccToolchain = common.getToolchain(); CppConfiguration cppConfiguration = ruleContext.getFragment(CppConfiguration.class); ImmutableMap.Builder<String, String> toolchainMakeVariables = ImmutableMap.builder(); ccToolchain.addGlobalMakeVariables(toolchainMakeVariables); ruleContext.initConfigurationMakeVariableContext( new MapBackedMakeVariableSupplier(toolchainMakeVariables.build()), new CcFlagsSupplier(ruleContext)); FdoContext fdoContext = common.getFdoContext(); FeatureConfiguration featureConfiguration = CcCommon.configureFeaturesOrReportRuleError(ruleContext, ccToolchain, semantics); PrecompiledFiles precompiledFiles = new PrecompiledFiles(ruleContext); semantics.validateAttributes(ruleContext); if (ruleContext.hasErrors()) { addEmptyRequiredProviders(targetBuilder); return; } ImmutableList<TransitiveInfoCollection> deps = ImmutableList.copyOf(ruleContext.getPrerequisites("deps")); if (ruleContext.hasErrors()) { addEmptyRequiredProviders(targetBuilder); return; } Iterable<CcInfo> ccInfosFromDeps = AnalysisUtils.getProviders(deps, CcInfo.PROVIDER); CcCompilationHelper compilationHelper = new CcCompilationHelper( ruleContext, ruleContext, ruleContext.getLabel(), CppHelper.getGrepIncludes(ruleContext), semantics, featureConfiguration, ccToolchain, fdoContext, TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation()), /* shouldProcessHeaders= */ true) .fromCommon(common, additionalCopts) .addSources(common.getSources()) .addPrivateHeaders(common.getPrivateHeaders()) .addPublicHeaders(common.getHeaders()) .setCodeCoverageEnabled(CcCompilationHelper.isCodeCoverageEnabled(ruleContext)) .addCcCompilationContexts( Streams.stream(ccInfosFromDeps) .map(CcInfo::getCcCompilationContext) .collect(ImmutableList.toImmutableList())) .addCcCompilationContexts( ImmutableList.of(CcCompilationHelper.getStlCcCompilationContext(ruleContext))) .setHeadersCheckingMode(semantics.determineHeadersCheckingMode(ruleContext)); CcLinkingHelper linkingHelper = new CcLinkingHelper( ruleContext, ruleContext.getLabel(), ruleContext, ruleContext, semantics, featureConfiguration, ccToolchain, fdoContext, ruleContext.getConfiguration(), ruleContext.getFragment(CppConfiguration.class), ruleContext.getSymbolGenerator(), TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation())) .fromCommon(ruleContext, common) .setGrepIncludes(CppHelper.getGrepIncludes(ruleContext)) .setTestOrTestOnlyTarget(ruleContext.isTestOnlyTarget()) .addLinkopts(common.getLinkopts()) .emitInterfaceSharedLibraries(true) .setAlwayslink(alwaysLink) .setNeverLink(neverLink) .addLinkstamps(ruleContext.getPrerequisites("linkstamp")); Artifact soImplArtifact = null; boolean supportsDynamicLinker = ccToolchain.supportsDynamicLinker(featureConfiguration); // TODO(djasper): This is hacky. We should actually try to figure out whether we generate // ccOutputs. boolean createDynamicLibrary = !linkStatic && supportsDynamicLinker && (appearsToHaveObjectFiles(ruleContext.attributes()) || featureConfiguration.isEnabled(CppRuleClasses.HEADER_MODULE_CODEGEN)); if (soFilename != null) { if (!soFilename.getPathString().endsWith(".so")) { // Sanity check. ruleContext.attributeError("outs", "file name must end in '.so'"); } if (createDynamicLibrary) { soImplArtifact = ruleContext.getBinArtifact(soFilename); } } if (ruleContext.getRule().isAttrDefined("textual_hdrs", BuildType.LABEL_LIST)) { compilationHelper.addPublicTextualHeaders( ruleContext.getPrerequisiteArtifacts("textual_hdrs").list()); } if (ruleContext.getRule().isAttrDefined("include_prefix", Type.STRING) && ruleContext.attributes().isAttributeValueExplicitlySpecified("include_prefix")) { compilationHelper.setIncludePrefix( ruleContext.attributes().get("include_prefix", Type.STRING)); } if (ruleContext.getRule().isAttrDefined("strip_include_prefix", Type.STRING) && ruleContext.attributes().isAttributeValueExplicitlySpecified("strip_include_prefix")) { compilationHelper.setStripIncludePrefix( ruleContext.attributes().get("strip_include_prefix", Type.STRING)); } if (common.getLinkopts().contains("-static")) { ruleContext.attributeWarning("linkopts", "Using '-static' here won't work. " + "Did you mean to use 'linkstatic=1' instead?"); } linkingHelper.setShouldCreateDynamicLibrary(createDynamicLibrary); linkingHelper.setLinkerOutputArtifact(soImplArtifact); // If the reason we're not creating a dynamic library is that the toolchain // doesn't support it, then register an action which complains when triggered, // which only happens when some rule explicitly depends on the dynamic library. if (!createDynamicLibrary && !supportsDynamicLinker) { ImmutableList.Builder<Artifact> dynamicLibraries = ImmutableList.builder(); dynamicLibraries.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, ruleContext.getConfiguration(), LinkTargetType.NODEPS_DYNAMIC_LIBRARY)); if (CppHelper.useInterfaceSharedLibraries( cppConfiguration, ccToolchain, featureConfiguration)) { dynamicLibraries.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, ruleContext.getConfiguration(), LinkTargetType.INTERFACE_DYNAMIC_LIBRARY)); } ruleContext.registerAction(new FailAction(ruleContext.getActionOwner(), dynamicLibraries.build(), "Toolchain does not support dynamic linking")); } else if (!createDynamicLibrary && ruleContext.attributes().isConfigurable("srcs")) { // If "srcs" is configurable, the .so output is always declared because the logic that // determines implicit outs doesn't know which value of "srcs" will ultimately get chosen. // Here, where we *do* have the correct value, it may not contain any source files to // generate an .so with. If that's the case, register a fake generating action to prevent // a "no generating action for this artifact" error. ImmutableList.Builder<Artifact> dynamicLibraries = ImmutableList.builder(); dynamicLibraries.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, ruleContext.getConfiguration(), LinkTargetType.NODEPS_DYNAMIC_LIBRARY)); if (CppHelper.useInterfaceSharedLibraries( cppConfiguration, ccToolchain, featureConfiguration)) { dynamicLibraries.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, ruleContext.getConfiguration(), LinkTargetType.INTERFACE_DYNAMIC_LIBRARY)); } ruleContext.registerAction(new FailAction(ruleContext.getActionOwner(), dynamicLibraries.build(), "configurable \"srcs\" triggers an implicit .so output " + "even though there are no sources to compile in this configuration")); } CompilationInfo compilationInfo = compilationHelper.compile(ruleContext::ruleError); CcCompilationOutputs precompiledFilesObjects = CcCompilationOutputs.builder() .addObjectFiles(precompiledFiles.getObjectFiles(/* usePic= */ true)) .addPicObjectFiles(precompiledFiles.getObjectFiles(/* usePic= */ true)) .build(); CcCompilationOutputs ccCompilationOutputs = CcCompilationOutputs.builder() .merge(precompiledFilesObjects) .merge(compilationInfo.getCcCompilationOutputs()) .build(); // Generate .a and .so outputs even without object files to fulfill the rule class // contract wrt. implicit output files, if the contract says so. Behavior here differs // between Bazel and Blaze. CcLinkingOutputs ccLinkingOutputs = CcLinkingOutputs.EMPTY; if (ruleContext.getRule().getImplicitOutputsFunction() != ImplicitOutputsFunction.NONE || !ccCompilationOutputs.isEmpty()) { if (featureConfiguration.isEnabled(CppRuleClasses.TARGETS_WINDOWS)) { String dllNameSuffix = CppHelper.getDLLHashSuffix(ruleContext, featureConfiguration); linkingHelper.setLinkedDLLNameSuffix(dllNameSuffix); Artifact generatedDefFile = null; Artifact defParser = common.getDefParser(); if (defParser != null) { try { generatedDefFile = CppHelper.createDefFileActions( ruleContext, defParser, ccCompilationOutputs.getObjectFiles(false), ccToolchain .getFeatures() .getArtifactNameForCategory( ArtifactCategory.DYNAMIC_LIBRARY, ruleContext.getLabel().getName() + dllNameSuffix)); targetBuilder.addOutputGroup(DEF_FILE_OUTPUT_GROUP_NAME, generatedDefFile); } catch (EvalException e) { throw ruleContext.throwWithRuleError(e); } } linkingHelper.setDefFile( CppHelper.getWindowsDefFileForLinking( ruleContext, common.getWinDefFile(), generatedDefFile, featureConfiguration)); } ccLinkingOutputs = linkingHelper.link(ccCompilationOutputs); } ImmutableSortedMap.Builder<String, NestedSet<Artifact>> outputGroups = ImmutableSortedMap.naturalOrder(); if (!ccLinkingOutputs.isEmpty()) { outputGroups.putAll( addLinkerOutputArtifacts( ruleContext, ccToolchain, cppConfiguration, ruleContext.getConfiguration(), ccCompilationOutputs, featureConfiguration)); } List<LibraryToLink> precompiledLibraries = convertPrecompiledLibrariesToLibraryToLink( ruleContext, common, ruleContext.getFragment(CppConfiguration.class).forcePic(), precompiledFiles); if (!ccCompilationOutputs.isEmpty()) { checkIfLinkOutputsCollidingWithPrecompiledFiles( ruleContext, ccLinkingOutputs, precompiledLibraries); } ImmutableList<LibraryToLink> libraryToLinks = createLibrariesToLinkList( ccLinkingOutputs.getLibraryToLink(), precompiledLibraries, ccCompilationOutputs.isEmpty()); CcLinkingContext ccLinkingContext = linkingHelper.buildCcLinkingContextFromLibrariesToLink( neverLink ? ImmutableList.of() : libraryToLinks, compilationInfo.getCcCompilationContext()); CcNativeLibraryProvider ccNativeLibraryProvider = CppHelper.collectNativeCcLibraries(ruleContext.getPrerequisites("deps"), libraryToLinks); /* * We always generate a static library, even if there aren't any source files. * This keeps things simpler by avoiding special cases when making use of the library. * For example, this is needed to ensure that building a library with "bazel build" * will also build all of the library's "deps". * However, we only generate a dynamic library if there are source files. */ // For now, we don't add the precompiled libraries to the files to build. NestedSetBuilder<Artifact> filesBuilder = NestedSetBuilder.stableOrder(); if (!ccLinkingOutputs.isEmpty()) { LibraryToLink artifactsToBuild = ccLinkingOutputs.getLibraryToLink(); if (artifactsToBuild.getStaticLibrary() != null) { filesBuilder.add(artifactsToBuild.getStaticLibrary()); } if (artifactsToBuild.getPicStaticLibrary() != null) { filesBuilder.add(artifactsToBuild.getPicStaticLibrary()); } if (!featureConfiguration.isEnabled(CppRuleClasses.TARGETS_WINDOWS)) { if (artifactsToBuild.getResolvedSymlinkDynamicLibrary() != null) { filesBuilder.add(artifactsToBuild.getResolvedSymlinkDynamicLibrary()); } else if (artifactsToBuild.getDynamicLibrary() != null) { filesBuilder.add(artifactsToBuild.getDynamicLibrary()); } if (artifactsToBuild.getResolvedSymlinkInterfaceLibrary() != null) { filesBuilder.add(artifactsToBuild.getResolvedSymlinkInterfaceLibrary()); } else if (artifactsToBuild.getInterfaceLibrary() != null) { filesBuilder.add(artifactsToBuild.getInterfaceLibrary()); } } } if (!featureConfiguration.isEnabled(CppRuleClasses.HEADER_MODULE_CODEGEN)) { warnAboutEmptyLibraries(ruleContext, ccCompilationOutputs, linkStatic); } NestedSet<Artifact> filesToBuild = filesBuilder.build(); List<Artifact> instrumentedObjectFiles = new ArrayList<>(); instrumentedObjectFiles.addAll(compilationInfo.getCcCompilationOutputs().getObjectFiles(false)); instrumentedObjectFiles.addAll(compilationInfo.getCcCompilationOutputs().getObjectFiles(true)); InstrumentedFilesInfo instrumentedFilesProvider = common.getInstrumentedFilesProvider( instrumentedObjectFiles, /* withBaselineCoverage= */ true, /* virtualToOriginalHeaders= */ NestedSetBuilder.create(Order.STABLE_ORDER)); CppHelper.maybeAddStaticLinkMarkerProvider(targetBuilder, ruleContext); Runfiles.Builder builder = new Runfiles.Builder(ruleContext.getWorkspaceName()); builder.addDataDeps(ruleContext); builder.add(ruleContext, RunfilesProvider.DEFAULT_RUNFILES); if (addDynamicRuntimeInputArtifactsToRunfiles) { try { builder.addTransitiveArtifacts( ccToolchain.getDynamicRuntimeLinkInputs(featureConfiguration)); } catch (EvalException e) { throw ruleContext.throwWithRuleError(e); } } Runfiles runfiles = builder.build(); Runfiles.Builder defaultRunfiles = new Runfiles.Builder(ruleContext.getWorkspaceName()) .merge(runfiles) .addArtifacts(LibraryToLink.getDynamicLibrariesForRuntime(!neverLink, libraryToLinks)); Runfiles.Builder dataRunfiles = new Runfiles.Builder(ruleContext.getWorkspaceName()) .merge(runfiles) .addArtifacts( LibraryToLink.getDynamicLibrariesForRuntime( /* linkingStatically= */ false, libraryToLinks)); Map<String, NestedSet<Artifact>> currentOutputGroups = CcCompilationHelper.buildOutputGroupsForEmittingCompileProviders( compilationInfo.getCcCompilationOutputs(), compilationInfo.getCcCompilationContext(), ruleContext.getFragment(CppConfiguration.class), ccToolchain, featureConfiguration, ruleContext, /* generateHeaderTokensGroup= */ true, /* addSelfHeaderTokens= */ true); CcStarlarkApiProvider.maybeAdd(ruleContext, targetBuilder); targetBuilder .setFilesToBuild(filesToBuild) .addProvider(ccNativeLibraryProvider) .addNativeDeclaredProvider( CcInfo.builder() .setCcCompilationContext(compilationInfo.getCcCompilationContext()) .setCcLinkingContext(ccLinkingContext) .setCcDebugInfoContext( CppHelper.mergeCcDebugInfoContexts( compilationInfo.getCcCompilationOutputs(), ccInfosFromDeps)) .build()) .addOutputGroups( CcCommon.mergeOutputGroups(ImmutableList.of(currentOutputGroups, outputGroups.build()))) .addNativeDeclaredProvider(instrumentedFilesProvider) .addProvider(RunfilesProvider.withData(defaultRunfiles.build(), dataRunfiles.build())) .addOutputGroup( OutputGroupInfo.HIDDEN_TOP_LEVEL, collectHiddenTopLevelArtifacts( ruleContext, ccToolchain, ccCompilationOutputs, featureConfiguration)); maybeAddDeniedImplicitOutputsProvider(targetBuilder, ruleContext); } private static void maybeAddDeniedImplicitOutputsProvider( RuleConfiguredTargetBuilder targetBuilder, RuleContext ruleContext) { if (ruleContext.getRule().getImplicitOutputsFunction() != ImplicitOutputsFunction.NONE && !Allowlist.isAvailable(ruleContext, IMPLICIT_OUTPUTS_ALLOWLIST)) { targetBuilder.addNativeDeclaredProvider(new DeniedImplicitOutputMarkerProvider()); } } private static NestedSet<Artifact> collectHiddenTopLevelArtifacts( RuleContext ruleContext, CcToolchainProvider toolchain, CcCompilationOutputs ccCompilationOutputs, FeatureConfiguration featureConfiguration) { // Ensure that we build all the dependencies, otherwise users may get confused. NestedSetBuilder<Artifact> artifactsToForceBuilder = NestedSetBuilder.stableOrder(); CppConfiguration cppConfiguration = ruleContext.getFragment(CppConfiguration.class); boolean processHeadersInDependencies = cppConfiguration.processHeadersInDependencies(); boolean usePic = toolchain.usePicForDynamicLibraries(cppConfiguration, featureConfiguration); artifactsToForceBuilder.addTransitive( ccCompilationOutputs.getFilesToCompile(processHeadersInDependencies, usePic)); for (OutputGroupInfo dep : ruleContext.getPrerequisites("deps", OutputGroupInfo.STARLARK_CONSTRUCTOR)) { artifactsToForceBuilder.addTransitive( dep.getOutputGroup(OutputGroupInfo.HIDDEN_TOP_LEVEL)); } return artifactsToForceBuilder.build(); } private static void warnAboutEmptyLibraries(RuleContext ruleContext, CcCompilationOutputs ccCompilationOutputs, boolean linkstaticAttribute) { if (ccCompilationOutputs.getObjectFiles(false).isEmpty() && ccCompilationOutputs.getObjectFiles(true).isEmpty()) { if (!linkstaticAttribute && appearsToHaveObjectFiles(ruleContext.attributes())) { ruleContext.attributeWarning("linkstatic", "setting 'linkstatic=1' is recommended if there are no object files"); } } else { if (!linkstaticAttribute && !appearsToHaveObjectFiles(ruleContext.attributes())) { Artifact element = Iterables.getFirst( ccCompilationOutputs.getObjectFiles(false), ccCompilationOutputs.getObjectFiles(true).get(0)); ruleContext.attributeWarning("srcs", "this library appears at first glance to have no object files, " + "but on closer inspection it does have something to link, e.g. " + element.prettyPrint() + ". " + "(You may have used some very confusing rule names in srcs? " + "Or the library consists entirely of a linker script?) " + "Bazel assumed linkstatic=1, but this may be inappropriate. " + "You may need to add an explicit '.cc' file to 'srcs'. " + "Alternatively, add 'linkstatic=1' to suppress this warning"); } } } /** * Returns true if the rule (which must be a cc_library rule) appears to have object files. * This only looks at the rule itself, not at any other rules (from this package or other * packages) that it might reference. * * <p>In some cases, this may return "true" even though the rule actually has no object files. * For example, it will return true for a rule such as * <code>cc_library(name = 'foo', srcs = [':bar'])</code> because we can't tell what ':bar' is; * it might be a genrule that generates a source file, or it might be a genrule that generates a * header file. Likewise, * <code>cc_library(name = 'foo', srcs = select({':a': ['foo.cc'], ':b': []}))</code> returns * "true" even though the sources *may* be empty. This reflects the fact that there's no way * to tell which value "srcs" will take without knowing the rule's configuration. * * <p>In other cases, this may return "false" even though the rule actually does have object * files. For example, it will return false for a rule such as * <code>cc_library(name = 'foo', srcs = ['bar.h'])</code> but as in the other example above, * we can't tell whether 'bar.h' is a file name or a rule name, and 'bar.h' could in fact be the * name of a genrule that generates a source file. */ public static boolean appearsToHaveObjectFiles(AttributeMap rule) { if ((rule instanceof RawAttributeMapper) && rule.isConfigurable("srcs")) { // Since this method gets called by loading phase logic (e.g. the cc_library implicit outputs // function), the attribute mapper may not be able to resolve configurable attributes. When // that's the case, there's no way to know which value a configurable "srcs" will take, so // we conservatively assume object files are possible. return true; } List<Label> srcs = rule.get("srcs", BuildType.LABEL_LIST); if (srcs != null) { for (Label srcfile : srcs) { /* * We cheat a little bit here by looking at the file extension * of the Label treated as file name. In general that might * not necessarily work, because of the possibility that the * user might give a rule a funky name ending in one of these * extensions, e.g. * genrule(name = 'foo.h', outs = ['foo.cc'], ...) // Funky rule name! * cc_library(name = 'bar', srcs = ['foo.h']) // This DOES have object files. */ if (!NO_OBJECT_GENERATING_FILETYPES.matches(srcfile.getName())) { return true; } } } return false; } /** * Adds linker output artifacts to the given map, to be registered on the configured target as * output groups. */ private static Map<String, NestedSet<Artifact>> addLinkerOutputArtifacts( RuleContext ruleContext, CcToolchainProvider ccToolchain, CppConfiguration cppConfiguration, BuildConfiguration configuration, CcCompilationOutputs ccCompilationOutputs, FeatureConfiguration featureConfiguration) throws RuleErrorException { NestedSetBuilder<Artifact> archiveFile = new NestedSetBuilder<>(Order.STABLE_ORDER); NestedSetBuilder<Artifact> dynamicLibrary = new NestedSetBuilder<>(Order.STABLE_ORDER); ImmutableSortedMap.Builder<String, NestedSet<Artifact>> outputGroups = ImmutableSortedMap.naturalOrder(); if (!ruleContext.attributes().has("alwayslink", Type.BOOLEAN) || !ruleContext.attributes().has("linkstatic", Type.BOOLEAN)) { return outputGroups.build(); } if (ruleContext.attributes().get("alwayslink", Type.BOOLEAN)) { archiveFile.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, configuration, Link.LinkTargetType.ALWAYS_LINK_STATIC_LIBRARY, /* linkedArtifactNameSuffix= */ "")); } else { archiveFile.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, configuration, Link.LinkTargetType.STATIC_LIBRARY, /* linkedArtifactNameSuffix= */ "")); } if (!ruleContext.attributes().get("linkstatic", Type.BOOLEAN) && !ccCompilationOutputs.isEmpty()) { dynamicLibrary.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, configuration, Link.LinkTargetType.NODEPS_DYNAMIC_LIBRARY, CppHelper.getDLLHashSuffix(ruleContext, featureConfiguration))); if (CppHelper.useInterfaceSharedLibraries( cppConfiguration, ccToolchain, featureConfiguration)) { dynamicLibrary.add( CppHelper.getLinkedArtifact( ruleContext, ccToolchain, configuration, LinkTargetType.INTERFACE_DYNAMIC_LIBRARY, /* linkedArtifactNameSuffix= */ "")); } } outputGroups.put(ARCHIVE_LIBRARY_OUTPUT_GROUP_NAME, archiveFile.build()); outputGroups.put(DYNAMIC_LIBRARY_OUTPUT_GROUP_NAME, dynamicLibrary.build()); return outputGroups.build(); } private static ImmutableList<LibraryToLink> createLibrariesToLinkList( @Nullable LibraryToLink outputLibrary, List<LibraryToLink> precompiledLibraries, boolean ccCompilationOutputsIsEmpty) { ImmutableList.Builder<LibraryToLink> librariesToLink = ImmutableList.builder(); librariesToLink.addAll(precompiledLibraries); // For cc_library if it contains precompiled libraries we link them. If it contains normal // sources we link them as well, if it doesn't contain normal sources, then we don't do // anything else if there were precompiled libraries. However, if there are no precompiled // libraries and there are no normal sources, then we use the implicitly created link output // files if they exist. if (!ccCompilationOutputsIsEmpty || (precompiledLibraries.isEmpty() && isContentsOfCcLinkingOutputsImplicitlyCreated( ccCompilationOutputsIsEmpty, outputLibrary == null))) { if (outputLibrary != null) { librariesToLink.add(outputLibrary); } } return librariesToLink.build(); } private static boolean isContentsOfCcLinkingOutputsImplicitlyCreated( boolean ccCompilationOutputsIsEmpty, boolean ccLinkingOutputsIsEmpty) { return ccCompilationOutputsIsEmpty && !ccLinkingOutputsIsEmpty; } private static ImmutableMap<String, Artifact> buildMapIdentifierToArtifact( RuleErrorConsumer ruleErrorConsumer, Iterable<Artifact> artifacts) { Map<String, Artifact> libraries = new LinkedHashMap<>(); for (Artifact artifact : artifacts) { String identifier = CcLinkingOutputs.libraryIdentifierOf(artifact); if (libraries.containsKey(identifier)) { ruleErrorConsumer.attributeError( "srcs", String.format( "Trying to link twice a library with the same identifier '%s', files: %s and %s", identifier, artifact.toDetailString(), libraries.get(identifier).toDetailString())); } libraries.put(identifier, artifact); } return ImmutableMap.copyOf(libraries); } /* * Add the libraries from srcs, if any. For static/mostly static * linking we setup the dynamic libraries if there are no static libraries * to choose from. Path to the libraries will be mangled to avoid using * absolute path names on the -rpath, but library filenames will be * preserved (since some libraries might have SONAME tag) - symlink will * be created to the parent directory instead. * * For compatibility with existing BUILD files, any ".a" or ".lo" files listed in * srcs are assumed to be position-independent code, or at least suitable for * inclusion in shared libraries, unless they end with ".nopic.a" or ".nopic.lo". * * Note that some target platforms do not require shared library code to be PIC. */ private static List<LibraryToLink> convertPrecompiledLibrariesToLibraryToLink( RuleErrorConsumer ruleErrorConsumer, CcCommon common, boolean forcePic, PrecompiledFiles precompiledFiles) { ImmutableList.Builder<LibraryToLink> librariesToLink = ImmutableList.builder(); Map<String, Artifact> staticLibraries = buildMapIdentifierToArtifact(ruleErrorConsumer, precompiledFiles.getStaticLibraries()); Map<String, Artifact> picStaticLibraries = buildMapIdentifierToArtifact(ruleErrorConsumer, precompiledFiles.getPicStaticLibraries()); Map<String, Artifact> alwayslinkStaticLibraries = buildMapIdentifierToArtifact( ruleErrorConsumer, precompiledFiles.getAlwayslinkStaticLibraries()); Map<String, Artifact> alwayslinkPicStaticLibraries = buildMapIdentifierToArtifact( ruleErrorConsumer, precompiledFiles.getPicAlwayslinkLibraries()); Map<String, Artifact> dynamicLibraries = buildMapIdentifierToArtifact(ruleErrorConsumer, precompiledFiles.getSharedLibraries()); Set<String> identifiersUsed = new HashSet<>(); for (Map.Entry<String, Artifact> staticLibraryEntry : Iterables.concat(staticLibraries.entrySet(), alwayslinkStaticLibraries.entrySet())) { LibraryToLink.Builder libraryToLinkBuilder = LibraryToLink.builder(); String identifier = staticLibraryEntry.getKey(); libraryToLinkBuilder.setLibraryIdentifier(identifier); boolean hasPic = picStaticLibraries.containsKey(identifier); boolean hasAlwaysPic = alwayslinkPicStaticLibraries.containsKey(identifier); if (hasPic || hasAlwaysPic) { Artifact picStaticLibrary = null; if (hasPic) { picStaticLibrary = picStaticLibraries.get(identifier); } else { picStaticLibrary = alwayslinkPicStaticLibraries.get(identifier); } libraryToLinkBuilder.setPicStaticLibrary(picStaticLibrary); } if (!forcePic || !(hasPic || hasAlwaysPic)) { libraryToLinkBuilder.setStaticLibrary(staticLibraryEntry.getValue()); } if (dynamicLibraries.containsKey(identifier)) { Artifact library = dynamicLibraries.get(identifier); Artifact symlink = common.getDynamicLibrarySymlink(library, true); libraryToLinkBuilder.setDynamicLibrary(symlink); libraryToLinkBuilder.setResolvedSymlinkDynamicLibrary(library); } libraryToLinkBuilder.setAlwayslink(alwayslinkStaticLibraries.containsKey(identifier)); identifiersUsed.add(identifier); librariesToLink.add(libraryToLinkBuilder.build()); } for (Map.Entry<String, Artifact> picStaticLibraryEntry : Iterables.concat(picStaticLibraries.entrySet(), alwayslinkPicStaticLibraries.entrySet())) { String identifier = picStaticLibraryEntry.getKey(); if (identifiersUsed.contains(identifier)) { continue; } LibraryToLink.Builder libraryToLinkBuilder = LibraryToLink.builder(); libraryToLinkBuilder.setLibraryIdentifier(identifier); libraryToLinkBuilder.setPicStaticLibrary(picStaticLibraryEntry.getValue()); if (dynamicLibraries.containsKey(identifier)) { Artifact library = dynamicLibraries.get(identifier); Artifact symlink = common.getDynamicLibrarySymlink(library, true); libraryToLinkBuilder.setDynamicLibrary(symlink); libraryToLinkBuilder.setResolvedSymlinkDynamicLibrary(library); } libraryToLinkBuilder.setAlwayslink(alwayslinkPicStaticLibraries.containsKey(identifier)); identifiersUsed.add(identifier); librariesToLink.add(libraryToLinkBuilder.build()); } for (Map.Entry<String, Artifact> dynamicLibraryEntry : dynamicLibraries.entrySet()) { String identifier = dynamicLibraryEntry.getKey(); if (identifiersUsed.contains(identifier)) { continue; } LibraryToLink.Builder libraryToLinkBuilder = LibraryToLink.builder(); libraryToLinkBuilder.setLibraryIdentifier(identifier); Artifact library = dynamicLibraryEntry.getValue(); Artifact symlink = common.getDynamicLibrarySymlink(library, true); libraryToLinkBuilder.setDynamicLibrary(symlink); libraryToLinkBuilder.setResolvedSymlinkDynamicLibrary(library); librariesToLink.add(libraryToLinkBuilder.build()); } return librariesToLink.build(); } private static void checkIfLinkOutputsCollidingWithPrecompiledFiles( RuleContext ruleContext, CcLinkingOutputs ccLinkingOutputs, List<LibraryToLink> precompiledLibraries) { String identifier = ccLinkingOutputs.getLibraryToLink().getLibraryIdentifier(); for (LibraryToLink precompiledLibrary : precompiledLibraries) { if (identifier.equals(precompiledLibrary.getLibraryIdentifier())) { ruleContext.ruleError( "Can't put library with identifier '" + precompiledLibrary.getLibraryIdentifier() + "' into the srcs of a " + ruleContext.getRuleClassNameForLogging() + " with the same name (" + ruleContext.getRule().getName() + ") which also contains other code or objects to link"); } } } private static void addEmptyRequiredProviders(RuleConfiguredTargetBuilder builder) { builder.addProvider(RunfilesProvider.EMPTY); builder.addProvider(FileProvider.EMPTY); builder.addProvider(FilesToRunProvider.EMPTY); } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.dx.dex.file; import com.android.dex.util.ExceptionWithContext; import com.android.dx.util.AnnotatedOutput; import com.android.dx.util.Hex; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Map; import java.util.NoSuchElementException; import java.util.TreeMap; /** * A section of a {@code .dex} file which consists of a sequence of * {@link OffsettedItem} objects, which may each be of a different concrete * class and/or size. * * <b>Note:</b> It is invalid for an item in an instance of this class to * have a larger alignment requirement than the alignment of this instance. */ public final class MixedItemSection extends Section { static enum SortType { /** no sorting */ NONE, /** sort by type only */ TYPE, /** sort in class-major order, with instances sorted per-class */ INSTANCE; }; /** {@code non-null;} sorter which sorts instances by type */ private static final Comparator<OffsettedItem> TYPE_SORTER = new Comparator<OffsettedItem>() { public int compare(OffsettedItem item1, OffsettedItem item2) { ItemType type1 = item1.itemType(); ItemType type2 = item2.itemType(); return type1.compareTo(type2); } }; /** {@code non-null;} the items in this part */ private final ArrayList<OffsettedItem> items; /** {@code non-null;} items that have been explicitly interned */ private final HashMap<OffsettedItem, OffsettedItem> interns; /** {@code non-null;} how to sort the items */ private final SortType sort; /** * {@code >= -1;} the current size of this part, in bytes, or {@code -1} * if not yet calculated */ private int writeSize; /** * Constructs an instance. The file offset is initially unknown. * * @param name {@code null-ok;} the name of this instance, for annotation * purposes * @param file {@code non-null;} file that this instance is part of * @param alignment {@code > 0;} alignment requirement for the final output; * must be a power of 2 * @param sort how the items should be sorted in the final output */ public MixedItemSection(String name, DexFile file, int alignment, SortType sort) { super(name, file, alignment); this.items = new ArrayList<OffsettedItem>(100); this.interns = new HashMap<OffsettedItem, OffsettedItem>(100); this.sort = sort; this.writeSize = -1; } /** {@inheritDoc} */ @Override public Collection<? extends Item> items() { return items; } /** {@inheritDoc} */ @Override public int writeSize() { throwIfNotPrepared(); return writeSize; } /** {@inheritDoc} */ @Override public int getAbsoluteItemOffset(Item item) { OffsettedItem oi = (OffsettedItem) item; return oi.getAbsoluteOffset(); } /** * Gets the size of this instance, in items. * * @return {@code >= 0;} the size */ public int size() { return items.size(); } /** * Writes the portion of the file header that refers to this instance. * * @param out {@code non-null;} where to write */ public void writeHeaderPart(AnnotatedOutput out) { throwIfNotPrepared(); if (writeSize == -1) { throw new RuntimeException("write size not yet set"); } int sz = writeSize; int offset = (sz == 0) ? 0 : getFileOffset(); String name = getName(); if (name == null) { name = "<unnamed>"; } int spaceCount = 15 - name.length(); char[] spaceArr = new char[spaceCount]; Arrays.fill(spaceArr, ' '); String spaces = new String(spaceArr); if (out.annotates()) { out.annotate(4, name + "_size:" + spaces + Hex.u4(sz)); out.annotate(4, name + "_off: " + spaces + Hex.u4(offset)); } out.writeInt(sz); out.writeInt(offset); } /** * Adds an item to this instance. This will in turn tell the given item * that it has been added to this instance. It is invalid to add the * same item to more than one instance, nor to add the same items * multiple times to a single instance. * * @param item {@code non-null;} the item to add */ public void add(OffsettedItem item) { throwIfPrepared(); try { if (item.getAlignment() > getAlignment()) { throw new IllegalArgumentException( "incompatible item alignment"); } } catch (NullPointerException ex) { // Elucidate the exception. throw new NullPointerException("item == null"); } items.add(item); } /** * Interns an item in this instance, returning the interned instance * (which may not be the one passed in). This will add the item if no * equal item has been added. * * @param item {@code non-null;} the item to intern * @return {@code non-null;} the equivalent interned instance */ public <T extends OffsettedItem> T intern(T item) { throwIfPrepared(); OffsettedItem result = interns.get(item); if (result != null) { return (T) result; } add(item); interns.put(item, item); return item; } /** * Gets an item which was previously interned. * * @param item {@code non-null;} the item to look for * @return {@code non-null;} the equivalent already-interned instance */ public <T extends OffsettedItem> T get(T item) { throwIfNotPrepared(); OffsettedItem result = interns.get(item); if (result != null) { return (T) result; } throw new NoSuchElementException(item.toString()); } /** * Writes an index of contents of the items in this instance of the * given type. If there are none, this writes nothing. If there are any, * then the index is preceded by the given intro string. * * @param out {@code non-null;} where to write to * @param itemType {@code non-null;} the item type of interest * @param intro {@code non-null;} the introductory string for non-empty indices */ public void writeIndexAnnotation(AnnotatedOutput out, ItemType itemType, String intro) { throwIfNotPrepared(); TreeMap<String, OffsettedItem> index = new TreeMap<String, OffsettedItem>(); for (OffsettedItem item : items) { if (item.itemType() == itemType) { String label = item.toHuman(); index.put(label, item); } } if (index.size() == 0) { return; } out.annotate(0, intro); for (Map.Entry<String, OffsettedItem> entry : index.entrySet()) { String label = entry.getKey(); OffsettedItem item = entry.getValue(); out.annotate(0, item.offsetString() + ' ' + label + '\n'); } } /** {@inheritDoc} */ @Override protected void prepare0() { DexFile file = getFile(); /* * It's okay for new items to be added as a result of an * addContents() call; we just have to deal with the possibility. */ int i = 0; for (;;) { int sz = items.size(); if (i >= sz) { break; } for (/*i*/; i < sz; i++) { OffsettedItem one = items.get(i); one.addContents(file); } } } /** * Places all the items in this instance at particular offsets. This * will call {@link OffsettedItem#place} on each item. If an item * does not know its write size before the call to {@code place}, * it is that call which is responsible for setting the write size. * This method may only be called once per instance; subsequent calls * will throw an exception. */ public void placeItems() { throwIfNotPrepared(); switch (sort) { case INSTANCE: { Collections.sort(items); break; } case TYPE: { Collections.sort(items, TYPE_SORTER); break; } } int sz = items.size(); int outAt = 0; for (int i = 0; i < sz; i++) { OffsettedItem one = items.get(i); try { int placedAt = one.place(this, outAt); if (placedAt < outAt) { throw new RuntimeException("bogus place() result for " + one); } outAt = placedAt + one.writeSize(); } catch (RuntimeException ex) { throw ExceptionWithContext.withContext(ex, "...while placing " + one); } } writeSize = outAt; } /** {@inheritDoc} */ @Override protected void writeTo0(AnnotatedOutput out) { boolean annotates = out.annotates(); boolean first = true; DexFile file = getFile(); int at = 0; for (OffsettedItem one : items) { if (annotates) { if (first) { first = false; } else { out.annotate(0, "\n"); } } int alignMask = one.getAlignment() - 1; int writeAt = (at + alignMask) & ~alignMask; if (at != writeAt) { out.writeZeroes(writeAt - at); at = writeAt; } one.writeTo(file, out); at += one.writeSize(); } if (at != writeSize) { throw new RuntimeException("output size mismatch"); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.client.editor.util; import java.util.ArrayList; import java.util.function.Consumer; import org.waveprotocol.wave.client.editor.EditorContext; import org.waveprotocol.wave.client.editor.content.misc.CaretAnnotations; import org.waveprotocol.wave.model.document.MutableAnnotationSet; import org.waveprotocol.wave.model.document.RangedAnnotation; import org.waveprotocol.wave.model.document.ReadableAnnotationSet; import org.waveprotocol.wave.model.document.util.Annotations; import org.waveprotocol.wave.model.document.util.Range; import org.waveprotocol.wave.model.document.util.RangedAnnotationImpl; import org.waveprotocol.wave.model.util.CollectionUtils; import org.waveprotocol.wave.model.util.Preconditions; import org.waveprotocol.wave.model.util.ReadableStringSet; import org.waveprotocol.wave.model.util.ReadableStringSet.Proc; import org.waveprotocol.wave.model.util.ValueUtils; public class EditorAnnotationUtil { /** Un-constructable utility class. */ private EditorAnnotationUtil() {} /** * Finds the first of the given keys that covers the entire selected range, and returns its value. * * @param editor Contains the non-null selection and document * @param keys Keys to look through * @return The first value annotation that covers the range at one of the given keys, else null. */ public static String getFirstAnnotationOverSelection(EditorContext editor, String... keys) { Range range = Preconditions.checkNotNull( editor.getSelectionHelper().getSelectionRange(), "Editor must have selection").asRange(); return getFirstCoveringAnnotationOverRange(editor.getDocument(), editor.getCaretAnnotations(), keys, range.getStart(), range.getEnd()); } /** * Looks through a list of annotation keys, finding the first to cover the given range, * and return its annotation value. If the range is collapsed, it's assumed that the * desired annotation set is stored in the caret parameter. * * @param doc Document to check for annotations. * @param caret Annotations at the current selection. * @param keys Keys to look through * @param start Start of range to check * @param end End of range to check * @return The first value annotation that covers the range at one of the given keys, else null. */ public static String getFirstCoveringAnnotationOverRange(MutableAnnotationSet<String> doc, CaretAnnotations caret, String[] keys, int start, int end) { // iterate through each key: for (String key : keys) { String value = getAnnotationOverRangeIfFull(doc, caret, key, start, end); if (value != null) { return value; } } return null; // none found. } /** * Returns an annotation over the selected range only if the entire range has a single annotation. * If the annotation changes or if the range is not annotated (annotated with null), returns null. * * @param editor Editor whose annotations are to be checked, with non-null selection and doc. * @param key Key of annotation to retrieve */ public static String getAnnotationOverSelectionIfFull(EditorContext editor, String key) { Range range = Preconditions.checkNotNull( editor.getSelectionHelper().getSelectionRange(), "Editor must have selection").asRange(); return getAnnotationOverRangeIfFull(editor.getDocument(), editor.getCaretAnnotations(), key, range.getStart(), range.getEnd()); } /** * Returns an annotation over a range only if the entire range has a single annotation. * If the annotation changes or if the range is not annotated (annotated with null), returns null. * If the range is collapsed it's assumed that the desired annotation set is in the caret param. * * @param doc Document to check for annotations. * @param caret Annotations at the current selection. * @param key Key of annotation to retrieve * @param start Start offset of range. * @param end End offset of range. */ public static String getAnnotationOverRangeIfFull(MutableAnnotationSet<String> doc, CaretAnnotations caret, String key, int start, int end) { if (start == end) { // Try to use the information about the cursor, even if it doesn't match // where the selection is. return caret.getAnnotation(key); } String currentValue = doc.getAnnotation(start, key); if (doc.firstAnnotationChange(start, end, key, currentValue) == -1) { // no change, fully annotated return currentValue; } // change is found, so return: return null; } /** * Sets the annotation key to a particular value over the entire selected range in an editor. * * @param editor Editor to set the annotation, with non-null selection and doc. * @param key Annotation key to set. * @param value Annotation value to set key to. */ public static void setAnnotationOverSelection(EditorContext editor, String key, String value) { Range range = Preconditions.checkNotNull( editor.getSelectionHelper().getSelectionRange(), "Editor must have selection").asRange(); setAnnotationOverRange(editor.getDocument(), editor.getCaretAnnotations(), key, value, range.getStart(), range.getEnd()); } /** * Sets the annotation key to a particular value over an entire range. * If the range is collapsed it's assumed that the desired annotation set is in the caret param. * * @param doc Document to set the annotation in. * @param caret Collapsed-range annotations. * @param key key of annotation to set. * @param value value to set annotation to. * @param start start of range to set over. * @param end end of range to set over. */ public static void setAnnotationOverRange(MutableAnnotationSet<String> doc, CaretAnnotations caret, String key, String value, int start, int end) { // simple switch depending on whether the range is collapsed: if (start == end) { caret.setAnnotation(key, value); } else { doc.setAnnotation(start, end, key, value); } } /** * Clears all annotations for a set of keys over the current selected range. * * @param editor Editor whose annotations are to be cleared, with non-null selection and doc. * @param keys List of annotation keys to clear * @return true if annotations were actually changed */ public static boolean clearAnnotationsOverSelection(EditorContext editor, String... keys) { Range range = Preconditions.checkNotNull( editor.getSelectionHelper().getSelectionRange(), "Editor must have selection").asRange(); return clearAnnotationsOverRange(editor.getDocument(), editor.getCaretAnnotations(), keys, range.getStart(), range.getEnd()); } /** * Clears all annotations over a particular range in the editor's document. * If the range is collapsed it's assumed that the desired annotation set is in the caret param. * * @param doc Document to check for annotations. * @param caret Annotations at the current collapsed range. * @param keys List of annotation keys to clear * @param start Start offset of range. * @param end End offset of range. * @return true if annotations were actually changed */ public static <T extends Object> boolean clearAnnotationsOverRange(MutableAnnotationSet<T> doc, CaretAnnotations caret, String[] keys, int start, int end) { boolean wasRemoved = false; if (start == end) { // clear from caret annotation if collapsed range for (String key : keys) { if (caret.getAnnotation(key) != null) { caret.setAnnotation(key, null); // remove if present wasRemoved = true; } } } else { // clear from the entire range for (String key : keys) { if (doc.firstAnnotationChange(start, end, key, null) != -1) { doc.setAnnotation(start, end, key, null); // remove if present wasRemoved = true; } } } return wasRemoved; } /** * Same as {{@link #clearAnnotationsOverRange(MutableAnnotationSet, CaretAnnotations, String[], int, int)} * but allowing params keys as ReadableStringSet */ public static <T extends Object> void clearAnnotationsOverRange(MutableAnnotationSet<T> doc, CaretAnnotations caret, ReadableStringSet keys, int start, int end) { if (start == end) { // clear from caret annotation if collapsed range keys.each(new Proc(){ @Override public void apply(String key) { if (caret.getAnnotation(key) != null) { caret.setAnnotation(key, null); // remove if present } } }); } else { // clear from the entire range keys.each(new Proc(){ @Override public void apply(String key) { if (doc.firstAnnotationChange(start, end, key, null) != -1) { doc.setAnnotation(start, end, key, null); // remove if present } } }); } } /** * Finds the range of an adjacent or containing non-null range of contiguous * value for a given annotation key. * * If there are two ranges (the given location being at their boundary), then * prefer the one to the right. * * If there are no ranges (the key is null on either side of the location), * null is returned. * * @param doc * @param key * @param location * @return the range, or null if none found */ public static <V> Range getEncompassingAnnotationRange( final ReadableAnnotationSet<V> doc, String key, int location) { V value = doc.getAnnotation(location, key); if (value == null && location > 0) { value = doc.getAnnotation(location - 1, key); } if (value == null) { return null; } int start = doc.lastAnnotationChange(0, location, key, value); int end = doc.firstAnnotationChange(location, doc.size(), key, value); assert start < end : "Range should not be collapsed"; return new Range(start, end); } /** * Given the editor state, this examines the current caret annotations and adds any that * can be inferred from the position, given the alignment type. * * @param doc Document to check for annotations. * @param caret Current annotation styles at the caret. * @param keys Keys to supplement over the caret styles. * @param location Location of the caret in the document. * @param leftAlign Whether the annotations come from the left or right. */ public static void supplementAnnotations(final MutableAnnotationSet<String> doc, final CaretAnnotations caret, final ReadableStringSet keys, final int location, final boolean leftAlign) { // by default, everything inherits from the left, so for now, no need! if (leftAlign) { return; } // supplement anything that's missing and different: keys.each(new Proc() { @Override public void apply(String key) { if (!caret.hasAnnotation(key)) { String newValue = Annotations.getAlignedAnnotation(doc, location, key, leftAlign); String oldValue = doc.getAnnotation(location - 1, key); if (!ValueUtils.equal(newValue, oldValue)) { caret.setAnnotation(key, newValue); } } } }); } /** * Set an annotation in the interval, if there is any annotation (with same key) within the interval, add the value to it. * * @param doc Document * @param key the annotation key * @param value the annotation value * @param start start location * @param end end location */ public static void setAnnotationWithOverlap(final MutableAnnotationSet<String> doc, String key, String value, int start, int end) { final int[] rRange = { start, end }; doc.rangedAnnotations(start, end, CollectionUtils.newStringSet(key)) .forEach(new Consumer<RangedAnnotation<String>>(){ @Override public void accept(RangedAnnotation<String> anot) { if (!anot.key().equals(key) || anot.value() == null) return; if (rRange[0] >= rRange[1]) return; if (anot.start() <= rRange[0] && anot.end() < rRange[1]) { // Case 1 // // |------ anot -----| // |-------- range ----- // // results: // // anot anot+new // |----|------------|---- range' // int s1 = anot.start(); int e1 = rRange[0]-1; int s2 = rRange[0]; int e2 = anot.end(); if (s1 < e1) { doc.setAnnotation(s1, e1, key, anot.value()); } if (s2 < e2) { doc.setAnnotation(s1, e1, key, anot.value()+","+value); } rRange[0] = e2+1; } else if (rRange[0] <= anot.start() && anot.end() <= rRange[1]) { // Case 2 // // |------ anot -----| // |-------- range ------------------- // // results: // // new anot+new // |-------|-----------------|---- range' // int s1 = rRange[0]; int e1 = anot.start()-1; int s2 = anot.start(); int e2 = anot.end(); if (s1 < e1) { doc.setAnnotation(s1, e1, key, value); } if (s2 < e2) { doc.setAnnotation(s2, e2, key, value+","+anot.value()); } rRange[0] = e2+1; } else if (rRange[0] <= anot.start() && rRange[1] < anot.end()) { // Case 3 // // |-------- anot ------- // |-------- range -----| // // results: // // new anot+new // |-------|------------|---- anot // int s1 = rRange[0]; int e1 = anot.start()-1; int s2 = anot.start(); int e2 = rRange[1]; if (s1 < e1) doc.setAnnotation(s1, s1, key, value); if (s2 < e2) doc.setAnnotation(s2, e2, key, anot.value()+","+value); // the last part of 'anot' will remain cause the // setAnnotation() logic rRange[0] = e2+1; } } }); // if there is still a range not overlapped, create annotation if (rRange[0] < rRange[1]) doc.setAnnotation(rRange[0], rRange[1], key, value); } /** * Collect all annotations with same key and containing same value in the provided range * * @param doc Document * @param key the annotation key * @param value the annotation value * @param start start location * @param end end location * @return */ public static Iterable<RangedAnnotation<String>> getAnnotationSpread(final MutableAnnotationSet<String> doc, String key, String value, int start, int end) { final ArrayList<RangedAnnotation<String>> resultSet = new ArrayList<RangedAnnotation<String>>(); doc.rangedAnnotations(start, end, CollectionUtils.newStringSet(key)) .forEach(new Consumer<RangedAnnotation<String>>(){ @Override public void accept(RangedAnnotation<String> anot) { if (anot.key().equals(key) && anot.value() != null && anot.value().contains(value)) { resultSet.add(new RangedAnnotationImpl<String>(anot)); } } }); return resultSet; } }
package timelogger.presentation.ui.manager; import javax.swing.JPanel; import java.awt.Dimension; import javax.swing.SpringLayout; import javax.swing.JScrollPane; import javax.swing.border.TitledBorder; import javax.swing.JLabel; import javax.swing.JTextField; import javax.swing.JComboBox; import javax.swing.JTextArea; import javax.swing.JButton; import javax.swing.Box; import javax.swing.border.MatteBorder; import timelogger.presentation.UIFacade; import timelogger.utilities.Constants; import java.awt.Color; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; public class createTaskPanel extends JPanel { /** * */ private JTextField titolo; private JComboBox cbxConsulente; private Box verticalBox = Box.createVerticalBox(); /** * Create the panel. */ public createTaskPanel() { setPreferredSize(new Dimension(800, 600)); SpringLayout springLayout = new SpringLayout(); setLayout(springLayout); JPanel taskDataPanel = new JPanel(); springLayout.putConstraint(SpringLayout.SOUTH, taskDataPanel, 190, SpringLayout.NORTH, this); taskDataPanel.setBorder(new TitledBorder(null, "Dati del task", TitledBorder.LEADING, TitledBorder.TOP, null, null)); springLayout.putConstraint(SpringLayout.NORTH, taskDataPanel, 20, SpringLayout.NORTH, this); springLayout.putConstraint(SpringLayout.WEST, taskDataPanel, 10, SpringLayout.WEST, this); springLayout.putConstraint(SpringLayout.EAST, taskDataPanel, -10, SpringLayout.EAST, this); add(taskDataPanel); SpringLayout sl_taskDataPanel = new SpringLayout(); taskDataPanel.setLayout(sl_taskDataPanel); JLabel lblTitolo = new JLabel("Titolo"); sl_taskDataPanel.putConstraint(SpringLayout.NORTH, lblTitolo, 10, SpringLayout.NORTH, taskDataPanel); sl_taskDataPanel.putConstraint(SpringLayout.WEST, lblTitolo, 10, SpringLayout.WEST, taskDataPanel); taskDataPanel.add(lblTitolo); JLabel lblConsulente = new JLabel("Consulente"); sl_taskDataPanel.putConstraint(SpringLayout.WEST, lblConsulente, 0, SpringLayout.WEST, lblTitolo); sl_taskDataPanel.putConstraint(SpringLayout.SOUTH, lblConsulente, 35, SpringLayout.SOUTH, lblTitolo); taskDataPanel.add(lblConsulente); JLabel lblNTask = new JLabel("N\u00B0 task"); sl_taskDataPanel.putConstraint(SpringLayout.WEST, lblNTask, 0, SpringLayout.WEST, lblTitolo); sl_taskDataPanel.putConstraint(SpringLayout.SOUTH, lblNTask, 35, SpringLayout.SOUTH, lblConsulente); taskDataPanel.add(lblNTask); titolo = new JTextField(); sl_taskDataPanel.putConstraint(SpringLayout.NORTH, titolo, 0, SpringLayout.NORTH, lblTitolo); sl_taskDataPanel.putConstraint(SpringLayout.WEST, titolo, 70, SpringLayout.EAST, lblTitolo); sl_taskDataPanel.putConstraint(SpringLayout.EAST, titolo, 270, SpringLayout.EAST, lblTitolo); taskDataPanel.add(titolo); titolo.setColumns(10); cbxConsulente = new JComboBox(); sl_taskDataPanel.putConstraint(SpringLayout.NORTH, cbxConsulente, 0, SpringLayout.NORTH, lblConsulente); sl_taskDataPanel.putConstraint(SpringLayout.WEST, cbxConsulente, 0, SpringLayout.WEST, titolo); sl_taskDataPanel.putConstraint(SpringLayout.EAST, cbxConsulente, 0, SpringLayout.EAST, titolo); taskDataPanel.add(cbxConsulente); JComboBox cbxNumeroTask = new JComboBox(); sl_taskDataPanel.putConstraint(SpringLayout.NORTH, cbxNumeroTask, -3, SpringLayout.NORTH, lblNTask); sl_taskDataPanel.putConstraint(SpringLayout.WEST, cbxNumeroTask, 0, SpringLayout.WEST, titolo); sl_taskDataPanel.putConstraint(SpringLayout.EAST, cbxNumeroTask, 154, SpringLayout.WEST, taskDataPanel); taskDataPanel.add(cbxNumeroTask); JTextArea textArea = new JTextArea(); textArea.setBorder(new MatteBorder(1, 1, 1, 1, Color.LIGHT_GRAY)); sl_taskDataPanel.putConstraint(SpringLayout.NORTH, textArea, -5, SpringLayout.NORTH, lblConsulente); sl_taskDataPanel.putConstraint(SpringLayout.SOUTH, textArea, 10, SpringLayout.SOUTH, cbxNumeroTask); sl_taskDataPanel.putConstraint(SpringLayout.EAST, textArea, -10, SpringLayout.EAST, taskDataPanel); taskDataPanel.add(textArea); JLabel lblDescrizione = new JLabel("Descrizione"); sl_taskDataPanel.putConstraint(SpringLayout.WEST, textArea, 0, SpringLayout.WEST, lblDescrizione); sl_taskDataPanel.putConstraint(SpringLayout.NORTH, lblDescrizione, 0, SpringLayout.NORTH, lblTitolo); sl_taskDataPanel.putConstraint(SpringLayout.WEST, lblDescrizione, 67, SpringLayout.EAST, titolo); taskDataPanel.add(lblDescrizione); JButton btnReset = new JButton("Reset"); sl_taskDataPanel.putConstraint(SpringLayout.SOUTH, btnReset, -5, SpringLayout.SOUTH, taskDataPanel); sl_taskDataPanel.putConstraint(SpringLayout.EAST, btnReset, -10, SpringLayout.EAST, taskDataPanel); taskDataPanel.add(btnReset); JButton btnInserisci = new JButton("Inserisci"); btnInserisci.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent arg0) { createSingleTask(); } }); sl_taskDataPanel.putConstraint(SpringLayout.SOUTH, btnInserisci, 0, SpringLayout.SOUTH, btnReset); sl_taskDataPanel.putConstraint(SpringLayout.EAST, btnInserisci, -41, SpringLayout.WEST, btnReset); taskDataPanel.add(btnInserisci); JPanel taskPanel = new JPanel(); springLayout.putConstraint(SpringLayout.NORTH, taskPanel, 10, SpringLayout.SOUTH, taskDataPanel); springLayout.putConstraint(SpringLayout.WEST, taskPanel, 10, SpringLayout.WEST, this); springLayout.putConstraint(SpringLayout.SOUTH, taskPanel, -40, SpringLayout.SOUTH, this); springLayout.putConstraint(SpringLayout.EAST, taskPanel, -10, SpringLayout.EAST, this); taskPanel.setBorder(new TitledBorder(null, "Task finora inseriti", TitledBorder.LEADING, TitledBorder.TOP, null, null)); add(taskPanel); SpringLayout sl_taskPanel = new SpringLayout(); taskPanel.setLayout(sl_taskPanel); JScrollPane scrollPanel = new JScrollPane(); sl_taskPanel.putConstraint(SpringLayout.NORTH, scrollPanel, 0, SpringLayout.NORTH, taskPanel); sl_taskPanel.putConstraint(SpringLayout.WEST, scrollPanel, 0, SpringLayout.WEST, taskPanel); sl_taskPanel.putConstraint(SpringLayout.SOUTH, scrollPanel, 0, SpringLayout.SOUTH, taskPanel); sl_taskPanel.putConstraint(SpringLayout.EAST, scrollPanel, 0, SpringLayout.EAST, taskPanel); springLayout.putConstraint(SpringLayout.NORTH, scrollPanel, 232, SpringLayout.NORTH, this); springLayout.putConstraint(SpringLayout.WEST, scrollPanel, 10, SpringLayout.WEST, this); springLayout.putConstraint(SpringLayout.SOUTH, scrollPanel, -40, SpringLayout.SOUTH, this); springLayout.putConstraint(SpringLayout.EAST, scrollPanel, -10, SpringLayout.EAST, this); taskPanel.add(scrollPanel); scrollPanel.setViewportView(verticalBox); JButton btnBack = new JButton("Back"); btnBack.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { indietro(); } }); btnBack.setPreferredSize(new Dimension(100, 25)); btnBack.setMinimumSize(new Dimension(100, 25)); btnBack.setMaximumSize(new Dimension(100, 25)); springLayout.putConstraint(SpringLayout.NORTH, btnBack, 6, SpringLayout.SOUTH, taskPanel); springLayout.putConstraint(SpringLayout.WEST, btnBack, 31, SpringLayout.WEST, this); add(btnBack); JButton btnNext = new JButton("Next"); btnNext.setPreferredSize(new Dimension(100, 25)); btnNext.setMinimumSize(new Dimension(100, 25)); btnNext.setMaximumSize(new Dimension(100, 25)); springLayout.putConstraint(SpringLayout.NORTH, btnNext, 6, SpringLayout.SOUTH, taskPanel); springLayout.putConstraint(SpringLayout.EAST, btnNext, -10, SpringLayout.EAST, this); add(btnNext); } public Box getVerticalBox() { return verticalBox; } public void indietro() { UIFacade.getInstance().toGuiAction(Constants.getInstance().mainWindowCommand, null); } public JComboBox getCbxConsulente() { return cbxConsulente; } protected void createSingleTask() { //this. //UIFacade.getInstance().setAData("taskList", this.vBoxInvoices); //UIFacade.getInstance().setAData("invoicesPanel", this.invoicesPanel); //ControlFacade.getInstance().setData("DataFatturaInserita", invoiceDateInvoice.getDate()); //UIFacade.getInstance().toSysAction(Constants.getInstance().operationCreateNewInvoice,this.vBoxInvoices); } }
package com.wb.nextgenlibrary.util.utils; import java.io.File; import java.io.FileFilter; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Date; import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import java.util.regex.Pattern; //import com.crashlytics.android.Crashlytics; import com.wb.nextgenlibrary.NextGenExperience; import android.content.Context; import android.net.Uri; import android.os.Environment; import android.util.Log; /** * A wrapper around android.util.Log with support for sensitive logging and logging toggle */ public class NextGenLogger { static final String LOGGER_NAME = "com.flixster.FlixsterVideoLogger"; static final int MAX_NUMBER_OF_LOG_FILES = 7; private static Logger logger = null; private static FileHandler fh = null; private static String currentLogFilename = null; private static String getTodaysLogFileName(){ SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyy-MM-dd"); Date date = new Date(); return F.LOGGER_FILE_DIR + F.LOGGER_FILE_NAME + dateFormat.format(date) + F.LOGGER_FILE_EXT; } private static File[] getExistingLogFiles(){ File f = new File(Environment.getExternalStorageDirectory() + F.LOGGER_FILE_DIR); final Pattern p = Pattern.compile(F.LOGGER_FILE_NAME + ".*" + F.LOGGER_FILE_EXT); // search for files matching the pattern of "flixsterUserLog.*.txt"; File[] flists = f.listFiles(new FileFilter() { @Override public boolean accept(File file) { return p.matcher(file.getName()).matches(); } }); return flists; } private static File[] getLCKFiles(){ File f = new File(Environment.getExternalStorageDirectory() + F.LOGGER_FILE_DIR); final Pattern p = Pattern.compile(F.LOGGER_FILE_NAME + ".*" + F.LOGGER_FILE_EXT +".lck"); // search for files matching the pattern of "flixsterUserLog.*.txt"; File[] flists = f.listFiles(new FileFilter() { @Override public boolean accept(File file) { return p.matcher(file.getName()).matches(); } }); return flists; } private static ArrayList<Uri> getLogFileUriListForEmail(){ ArrayList<Uri> retUris = new ArrayList<Uri>(); File[] flists = getExistingLogFiles(); if (flists.length <= MAX_NUMBER_OF_LOG_FILES){ // attach everything for (int i = 0; i< flists.length; i++){ retUris.add(Uri.parse("file://" + flists[i].getAbsolutePath())); } }else{ SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyy-MM-dd"); Arrays.sort(flists); Calendar cal = Calendar.getInstance(); for (int i =0; retUris.size() < MAX_NUMBER_OF_LOG_FILES; i++){ String filename = Environment.getExternalStorageDirectory() + F.LOGGER_FILE_DIR + F.LOGGER_FILE_NAME + dateFormat.format(cal.getTime()) + F.LOGGER_FILE_EXT; if ((F.LOGGER_FILE_NAME + dateFormat.format(cal.getTime()) + F.LOGGER_FILE_EXT).compareTo(flists[0].getName()) < 0) break; File file = new File(filename); if(file.exists()) retUris.add(Uri.parse("file://" + filename)); cal.add(Calendar.DATE, -1); } } return retUris; } private static void init(){ if (logger == null){ // initialize logger object logger = Logger.getLogger(LOGGER_NAME); logger.setLevel(Level.CONFIG); logger.setUseParentHandlers(false); // do not output to console } if (StringHelper.isEmpty(currentLogFilename) || !getTodaysLogFileName().equals(currentLogFilename) ){ // this is when we need to create new file if (fh != null) // if file handler exist, remove it as we need to create a new log file with today's date. logger.removeHandler(fh); File dir = new File(Environment.getExternalStorageDirectory(), F.LOGGER_FILE_DIR); if (!dir.exists()) { // if the directory does not exist, create one. dir.mkdirs(); } else { // directory exists, check and clear up old log files File[] logFiles = getExistingLogFiles(); if (logFiles != null && logFiles.length > MAX_NUMBER_OF_LOG_FILES){ // clean up only when there are more than 7 log files Arrays.sort(logFiles); for (int i = 0; i < logFiles.length - MAX_NUMBER_OF_LOG_FILES; i++){ logFiles[i].delete(); } } File[] lckFiles = getLCKFiles(); if (lckFiles != null && lckFiles.length > 0){ for (int i = 0; i < lckFiles.length; i++){ lckFiles[i].delete(); } } } try { currentLogFilename = getTodaysLogFileName(); fh = new FileHandler(Environment.getExternalStorageDirectory() + currentLogFilename, false); } catch (Exception e) { e.printStackTrace(); } if (fh != null){ fh.setFormatter(new SimpleFormatter()); logger.addHandler(fh); } } } private static Logger getLogger() { init(); return logger; } /* @return If the app is in diagnostic mode which supports logging of all default levels */ private static boolean isDiagnosticMode() { return NextGenExperience.isDiagnosticMode(); } /** A sensitive verbose msg is only logged on dev builds or in diagnostic mode */ @SuppressWarnings("unused") public static void sv(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.v(tag, msg); //else // Crashlytics.log(Log.VERBOSE, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } /** A sensitive debug msg is only logged on dev builds or in diagnostic mode */ @SuppressWarnings("unused") public static void sd(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.d(tag, msg); //else // Crashlytics.log(Log.DEBUG, tag, msg); } /** A sensitive info msg is only logged on dev builds or in diagnostic mode */ @SuppressWarnings("unused") public static void si(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.i(tag, msg); //else // Crashlytics.log(Log.INFO, tag, msg); } /** Forced logging */ public static void fd(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.d(tag, msg); //else // Crashlytics.log(Log.DEBUG, tag, msg); } @SuppressWarnings("unused") public static void v(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.v(tag, msg); //else // Crashlytics.log(Log.VERBOSE, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } @SuppressWarnings("unused") public static void v(String tag, String msg, Throwable tr) { if (NextGenExperience.isDebugBuild()) Log.v(tag, msg, tr); //else // Crashlytics.log(Log.VERBOSE, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } @SuppressWarnings("unused") public static void d(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.d(tag, msg); //else // Crashlytics.log(Log.DEBUG, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } @SuppressWarnings("unused") public static void d(String tag, String msg, Throwable tr) { if (NextGenExperience.isDebugBuild()) Log.d(tag, msg, tr); //else // Crashlytics.log(Log.DEBUG, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } public static void i(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.i(tag, msg); //else // Crashlytics.log(Log.INFO, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } public static void i(String tag, String msg, Throwable tr) { if (NextGenExperience.isDebugBuild()) Log.i(tag, msg, tr); //else // Crashlytics.log(Log.INFO, tag, msg); getLogger().log(Level.INFO, tag + "\t" + msg); } public static void w(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.w(tag, msg); //else // Crashlytics.log(Log.WARN, tag, msg); getLogger().log(Level.WARNING, tag + "\t" + msg); } public static void w(String tag, String msg, Throwable tr) { if (NextGenExperience.isDebugBuild()) Log.w(tag, msg, tr); //else // Crashlytics.log(Log.WARN, tag, msg); getLogger().log(Level.WARNING, tag + "\t" + msg, tr); } /** A sensitive warning msg is only logged on dev builds or in diagnostic + admin mode */ public static void sw(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.w(tag, msg); //else // Crashlytics.log(Log.WARN, tag, msg); getLogger().log(Level.WARNING, tag + "\t" + msg); } public static void w(String tag, Throwable tr) { if (NextGenExperience.isDebugBuild()) Log.w(tag, tr); //else // Crashlytics.log(Log.WARN, tag, tr != null ? tr.getMessage() : ""); getLogger().log(Level.WARNING, tag, tr); } public static void e(String tag, String msg) { if (NextGenExperience.isDebugBuild()) Log.e(tag, msg); //else // Crashlytics.log(Log.ERROR, tag, msg); getLogger().log(Level.SEVERE, tag + "\t" + msg); } public static void e(String tag, String msg, Throwable tr) { if (NextGenExperience.isDebugBuild()) Log.e(tag, msg, tr); //else // Crashlytics.log(Log.ERROR, tag, msg); getLogger().log(Level.SEVERE, tag + "\t" + msg, tr); } public static void logButtonClick(Object context, String buttonDescp){ String msg = "Button clicked: " + buttonDescp + " in Activity: " + context.getClass().toString(); if (NextGenExperience.isDebugBuild()) Log.i(F.TAG_USER_ACTION, msg); //else // Crashlytics.log(Log.INFO, F.TAG_USER_ACTION, msg); getLogger().log(Level.INFO, F.TAG_USER_ACTION + "\t" + msg); } public static void logScreensView(Context context, String viewDescp, String action){ String msg = "View : " + viewDescp + " in Activity: " + context.getClass().toString() + " Action: " + action; if (NextGenExperience.isDebugBuild()) Log.i(F.TAG_USER_ACTION, msg); //else // Crashlytics.log(Log.INFO, F.TAG_USER_ACTION, msg); getLogger().log(Level.INFO, F.TAG_USER_ACTION + "\t" + msg); } public enum EmailTypeEnum{ TYPE_FEEDBACK("Feedback"), TYPE_REPORT_PROBLEM("Report a Problem"); String stringVal; EmailTypeEnum(String val){ stringVal = val; } } }
package com.paduch.showcurrentaddress; import android.app.PendingIntent; import android.appwidget.AppWidgetManager; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.location.Address; import android.location.Criteria; import android.location.Geocoder; import android.location.Location; import android.location.LocationListener; import android.location.LocationManager; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Bundle; import android.text.format.DateFormat; import android.util.Log; import java.security.Provider; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.List; import java.util.Locale; import com.paduch.showcurrentaddres.R; import com.paduch.showcurrentaddres.R.string; /** * Created by marcel on 2014-12-16. */ public class LocationListenerClass implements LocationListener { private static final int PERIOD = 1000 * 60 * 1; private double dLatitude=0; private double dLongitude=0; private long mTime=-1; private List<Address> locName =null; private Location bestLocation; private Geocoder mGeocoder; LocationManager mLocationManager; private String locAccuracy = ""; private String status = "Loading"; private Context context; public LocationListenerClass(LocationManager mLocationManager, Geocoder mGeocoder, Context context){ super(); this.mLocationManager = mLocationManager; this.mGeocoder=mGeocoder; this.context=context; } public String getName(){ if(locName != null) { return locName.get(0).getAddressLine(0) + "," + locName.get(0).getAddressLine(1); } else{ return status; } } public void setStatus(String status){ this.status=status; } public String getAccuracy(){ return locAccuracy; } public void setAccuracy(String provider){ if(provider.equals(LocationManager.GPS_PROVIDER)){ locAccuracy="GPS"; } else if (provider.equals(LocationManager.NETWORK_PROVIDER)){ locAccuracy="Non-GPS"; } else if (provider.equals(LocationManager.PASSIVE_PROVIDER)){ locAccuracy="Unknown";//chuj wie czy takie cos jak passive_provider moze byc zwracane przez klase location } else{ locAccuracy="..."; } } public void start(){ String provider= LocationManager.GPS_PROVIDER; if(mLocationManager.isProviderEnabled(provider)) { Location location = mLocationManager.getLastKnownLocation(provider); if(location != null){ setLongitude(location.getLongitude()); setLatitude(location.getLatitude()); mTime = location.getTime(); updateLocationName(); } } else{ status = context.getResources().getString(R.string.no_location); } mLocationManager.requestSingleUpdate(provider, this, null); Log.d("listenerStart","requestUpdates"); } public double getLatitude(){ return dLatitude; } public double getLongitude(){ return dLongitude; } public void setLatitude(double latitude){ dLatitude=latitude; } public void setLongitude(double longitude){ dLongitude=longitude; } public String getTime(){ if(mTime == -1){ return "..."; } else{ SimpleDateFormat formatter; if(DateFormat.is24HourFormat(context)){ formatter = new SimpleDateFormat("HH:mm:ss", Locale.getDefault()); } else{ formatter = new SimpleDateFormat("hh:mm:ss", Locale.getDefault()); } Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(mTime); return formatter.format(calendar.getTime()); } } @Override public void onLocationChanged(Location location) { if (location != null) { if(isBetterLocation(location, bestLocation)) { bestLocation=location; setLatitude(location.getLatitude()); setLongitude(location.getLongitude()); mTime=location.getTime(); setAccuracy(location.getProvider()); updateLocationName(); Log.d("LocationChanged - provider:", location.getProvider().toString()); } } } @Override public void onStatusChanged(String provider, int status, Bundle extras) { } @Override public void onProviderEnabled(String provider) { } @Override public void onProviderDisabled(String provider) { } private void updateLocationName() { try { locName = mGeocoder.getFromLocation(this.getLatitude(),this.getLongitude(),1); Log.d("queryLocName",locName.toString()); } catch(Exception e){ e.printStackTrace(); setStatus(context.getResources().getString(R.string.error)); } } protected boolean isBetterLocation(Location location, Location currentBestLocation) { if (currentBestLocation == null) { // A new location is always better than no location // Log.d("BetterLocation", "New Location"); return true; } // Check whether the new location fix is newer or older long timeDelta = location.getTime() - currentBestLocation.getTime(); boolean isSignificantlyNewer = timeDelta > PERIOD; boolean isSignificantlyOlder = timeDelta < -PERIOD; boolean isNewer = timeDelta > 0; // If it's been more than two minutes since the current location, use the new location // because the user has likely moved if (isSignificantlyNewer) { return true; // If the new location is more than two minutes older, it must be worse } else if (isSignificantlyOlder) { return false; } // Check whether the new location fix is more or less accurate int accuracyDelta = (int) (location.getAccuracy() - currentBestLocation.getAccuracy()); boolean isLessAccurate = accuracyDelta > 0; boolean isMoreAccurate = accuracyDelta < 0; boolean isSignificantlyLessAccurate = accuracyDelta > 200; // Check if the old and new location are from the same provider boolean isFromSameProvider = isSameProvider(location.getProvider(), currentBestLocation.getProvider()); // Determine location quality using a combination of timeliness and accuracy if (isMoreAccurate) { // Log.d("BetterLocation", "More Accurate"); return true; } else if (isNewer && !isLessAccurate) { // Log.d("BetterLocation", "Newer And better"); return true; } else if (isNewer && !isSignificantlyLessAccurate && isFromSameProvider) { // Log.d("BetterLocation", "Trzecia opcj"); return true; } return false; } /** Checks whether two providers are the same */ private boolean isSameProvider(String provider1, String provider2) { if (provider1 == null) { return provider2 == null; } return provider1.equals(provider2); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexAction; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.optimize.OptimizeAction; import org.elasticsearch.action.admin.indices.optimize.OptimizeRequest; import org.elasticsearch.action.admin.indices.recovery.RecoveryAction; import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.count.CountAction; import org.elasticsearch.action.count.CountRequest; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.deletebyquery.DeleteByQueryAction; import org.elasticsearch.action.deletebyquery.DeleteByQueryRequest; import org.elasticsearch.action.exists.ExistsAction; import org.elasticsearch.action.exists.ExistsRequest; import org.elasticsearch.action.explain.ExplainAction; import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.mlt.MoreLikeThisRequest; import org.elasticsearch.action.percolate.MultiPercolateAction; import org.elasticsearch.action.percolate.MultiPercolateRequest; import org.elasticsearch.action.percolate.PercolateAction; import org.elasticsearch.action.percolate.PercolateRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.suggest.SuggestAction; import org.elasticsearch.action.suggest.SuggestRequest; import org.elasticsearch.action.support.QuerySourceBuilder; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsAction; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.settings.ClusterDynamicSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.search.action.SearchServiceTransportAction; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import org.apache.lucene.util.LuceneTestCase.Slow; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.*; import static org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.*; @ClusterScope(scope = Scope.SUITE, numClientNodes = 1) @Slow public class IndicesRequestTests extends ElasticsearchIntegrationTest { private final List<String> indices = new ArrayList<>(); @Override protected int minimumNumberOfShards() { //makes sure that a reduce is always needed when searching return 2; } @Override protected int minimumNumberOfReplicas() { //makes sure that write operations get sent to the replica as well //so we are able to intercept those messages and check them return 1; } @Override protected Settings nodeSettings(int nodeOrdinal) { return ImmutableSettings.settingsBuilder() .put(super.nodeSettings(nodeOrdinal)) .put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, InterceptingTransportService.class.getName()) .build(); } @Before public void setup() { int numIndices = iterations(1, 5); for (int i = 0; i < numIndices; i++) { indices.add("test" + i); } for (String index : indices) { assertAcked(prepareCreate(index).addAlias(new Alias(index + "-alias"))); } ensureGreen(); } @After public void cleanUp() { assertAllRequestsHaveBeenConsumed(); indices.clear(); } @Test public void testGetFieldMappings() { String getFieldMappingsShardAction = GetFieldMappingsAction.NAME + "[index][s]"; interceptTransportActions(getFieldMappingsShardAction); GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); getFieldMappingsRequest.indices(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().getFieldMappings(getFieldMappingsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(getFieldMappingsRequest, getFieldMappingsShardAction); } @Test public void testAnalyze() { String analyzeShardAction = AnalyzeAction.NAME + "[s]"; interceptTransportActions(analyzeShardAction); AnalyzeRequest analyzeRequest = new AnalyzeRequest(randomIndexOrAlias()); analyzeRequest.text("text"); internalCluster().clientNodeClient().admin().indices().analyze(analyzeRequest).actionGet(); clearInterceptedActions(); assertSameIndices(analyzeRequest, analyzeShardAction); } @Test public void testIndex() { String[] indexShardActions = new String[]{IndexAction.NAME, IndexAction.NAME + "[r]"}; interceptTransportActions(indexShardActions); IndexRequest indexRequest = new IndexRequest(randomIndexOrAlias(), "type", "id").source("field", "value"); internalCluster().clientNodeClient().index(indexRequest).actionGet(); clearInterceptedActions(); assertSameIndices(indexRequest, indexShardActions); } @Test public void testDelete() { String[] deleteShardActions = new String[]{DeleteAction.NAME, DeleteAction.NAME + "[r]"}; interceptTransportActions(deleteShardActions); DeleteRequest deleteRequest = new DeleteRequest(randomIndexOrAlias(), "type", "id"); internalCluster().clientNodeClient().delete(deleteRequest).actionGet(); clearInterceptedActions(); assertSameIndices(deleteRequest, deleteShardActions); } @Test public void testUpdate() { //update action goes to the primary, index op gets executed locally, then replicated String[] updateShardActions = new String[]{UpdateAction.NAME, IndexAction.NAME + "[r]"}; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").doc("field1", "value1"); UpdateResponse updateResponse = internalCluster().clientNodeClient().update(updateRequest).actionGet(); assertThat(updateResponse.isCreated(), equalTo(false)); clearInterceptedActions(); assertSameIndices(updateRequest, updateShardActions); } @Test public void testUpdateUpsert() { //update action goes to the primary, index op gets executed locally, then replicated String[] updateShardActions = new String[]{UpdateAction.NAME, IndexAction.NAME + "[r]"}; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").upsert("field", "value").doc("field1", "value1"); UpdateResponse updateResponse = internalCluster().clientNodeClient().update(updateRequest).actionGet(); assertThat(updateResponse.isCreated(), equalTo(true)); clearInterceptedActions(); assertSameIndices(updateRequest, updateShardActions); } @Test public void testUpdateDelete() { //update action goes to the primary, delete op gets executed locally, then replicated String[] updateShardActions = new String[]{UpdateAction.NAME, DeleteAction.NAME + "[r]"}; interceptTransportActions(updateShardActions); String indexOrAlias = randomIndexOrAlias(); client().prepareIndex(indexOrAlias, "type", "id").setSource("field", "value").get(); UpdateRequest updateRequest = new UpdateRequest(indexOrAlias, "type", "id").script("ctx.op='delete'"); UpdateResponse updateResponse = internalCluster().clientNodeClient().update(updateRequest).actionGet(); assertThat(updateResponse.isCreated(), equalTo(false)); clearInterceptedActions(); assertSameIndices(updateRequest, updateShardActions); } @Test public void testDeleteByQuery() { String[] deleteByQueryShardActions = new String[]{DeleteByQueryAction.NAME + "[s]", DeleteByQueryAction.NAME + "[s][r]"}; interceptTransportActions(deleteByQueryShardActions); DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(randomIndicesOrAliases()).source(new QuerySourceBuilder().setQuery(QueryBuilders.matchAllQuery())); internalCluster().clientNodeClient().deleteByQuery(deleteByQueryRequest).actionGet(); clearInterceptedActions(); assertSameIndices(deleteByQueryRequest, deleteByQueryShardActions); } @Test public void testBulk() { String[] bulkShardActions = new String[]{BulkAction.NAME + "[s]", BulkAction.NAME + "[s][r]"}; interceptTransportActions(bulkShardActions); List<String> indices = new ArrayList<>(); BulkRequest bulkRequest = new BulkRequest(); int numIndexRequests = iterations(1, 10); for (int i = 0; i < numIndexRequests; i++) { String indexOrAlias = randomIndexOrAlias(); bulkRequest.add(new IndexRequest(indexOrAlias, "type", "id").source("field", "value")); indices.add(indexOrAlias); } int numDeleteRequests = iterations(1, 10); for (int i = 0; i < numDeleteRequests; i++) { String indexOrAlias = randomIndexOrAlias(); bulkRequest.add(new DeleteRequest(indexOrAlias, "type", "id")); indices.add(indexOrAlias); } int numUpdateRequests = iterations(1, 10); for (int i = 0; i < numUpdateRequests; i++) { String indexOrAlias = randomIndexOrAlias(); bulkRequest.add(new UpdateRequest(indexOrAlias, "type", "id").doc("field1", "value1")); indices.add(indexOrAlias); } internalCluster().clientNodeClient().bulk(bulkRequest).actionGet(); clearInterceptedActions(); assertIndicesSubset(indices, bulkShardActions); } @Test public void testGet() { String getShardAction = GetAction.NAME + "[s]"; interceptTransportActions(getShardAction); GetRequest getRequest = new GetRequest(randomIndexOrAlias(), "type", "id"); internalCluster().clientNodeClient().get(getRequest).actionGet(); clearInterceptedActions(); assertSameIndices(getRequest, getShardAction); } @Test public void testExplain() { String explainShardAction = ExplainAction.NAME + "[s]"; interceptTransportActions(explainShardAction); ExplainRequest explainRequest = new ExplainRequest(randomIndexOrAlias(), "type", "id").source(new QuerySourceBuilder().setQuery(QueryBuilders.matchAllQuery())); internalCluster().clientNodeClient().explain(explainRequest).actionGet(); clearInterceptedActions(); assertSameIndices(explainRequest, explainShardAction); } @Test public void testTermVector() { String termVectorShardAction = TermVectorsAction.NAME + "[s]"; interceptTransportActions(termVectorShardAction); TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "type", "id"); internalCluster().clientNodeClient().termVectors(termVectorsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(termVectorsRequest, termVectorShardAction); } @Test public void testMultiTermVector() { String multiTermVectorsShardAction = MultiTermVectorsAction.NAME + "[shard][s]"; interceptTransportActions(multiTermVectorsShardAction); List<String> indices = new ArrayList<>(); MultiTermVectorsRequest multiTermVectorsRequest = new MultiTermVectorsRequest(); int numDocs = iterations(1, 30); for (int i = 0; i < numDocs; i++) { String indexOrAlias = randomIndexOrAlias(); multiTermVectorsRequest.add(indexOrAlias, "type", Integer.toString(i)); indices.add(indexOrAlias); } internalCluster().clientNodeClient().multiTermVectors(multiTermVectorsRequest).actionGet(); clearInterceptedActions(); assertIndicesSubset(indices, multiTermVectorsShardAction); } @Test public void testMultiGet() { String multiGetShardAction = MultiGetAction.NAME + "[shard][s]"; interceptTransportActions(multiGetShardAction); List<String> indices = new ArrayList<>(); MultiGetRequest multiGetRequest = new MultiGetRequest(); int numDocs = iterations(1, 30); for (int i = 0; i < numDocs; i++) { String indexOrAlias = randomIndexOrAlias(); multiGetRequest.add(indexOrAlias, "type", Integer.toString(i)); indices.add(indexOrAlias); } internalCluster().clientNodeClient().multiGet(multiGetRequest).actionGet(); clearInterceptedActions(); assertIndicesSubset(indices, multiGetShardAction); } @Test public void testCount() { String countShardAction = CountAction.NAME + "[s]"; interceptTransportActions(countShardAction); CountRequest countRequest = new CountRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().count(countRequest).actionGet(); clearInterceptedActions(); assertSameIndices(countRequest, countShardAction); } @Test public void testExists() { String existsShardAction = ExistsAction.NAME + "[s]"; interceptTransportActions(existsShardAction); ExistsRequest existsRequest = new ExistsRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().exists(existsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(existsRequest, existsShardAction); } @Test public void testFlush() { String flushShardAction = FlushAction.NAME + "[s]"; interceptTransportActions(flushShardAction); FlushRequest flushRequest = new FlushRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().flush(flushRequest).actionGet(); clearInterceptedActions(); assertSameIndices(flushRequest, flushShardAction); } @Test public void testOptimize() { String optimizeShardAction = OptimizeAction.NAME + "[s]"; interceptTransportActions(optimizeShardAction); OptimizeRequest optimizeRequest = new OptimizeRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().optimize(optimizeRequest).actionGet(); clearInterceptedActions(); assertSameIndices(optimizeRequest, optimizeShardAction); } @Test public void testRefresh() { String refreshShardAction = RefreshAction.NAME + "[s]"; interceptTransportActions(refreshShardAction); RefreshRequest refreshRequest = new RefreshRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().refresh(refreshRequest).actionGet(); clearInterceptedActions(); assertSameIndices(refreshRequest, refreshShardAction); } @Test public void testClearCache() { String clearCacheAction = ClearIndicesCacheAction.NAME + "[s]"; interceptTransportActions(clearCacheAction); ClearIndicesCacheRequest clearIndicesCacheRequest = new ClearIndicesCacheRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().clearCache(clearIndicesCacheRequest).actionGet(); clearInterceptedActions(); assertSameIndices(clearIndicesCacheRequest, clearCacheAction); } @Test public void testRecovery() { String recoveryAction = RecoveryAction.NAME + "[s]"; interceptTransportActions(recoveryAction); RecoveryRequest recoveryRequest = new RecoveryRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().recoveries(recoveryRequest).actionGet(); clearInterceptedActions(); assertSameIndices(recoveryRequest, recoveryAction); } @Test public void testSegments() { String segmentsAction = IndicesSegmentsAction.NAME + "[s]"; interceptTransportActions(segmentsAction); IndicesSegmentsRequest segmentsRequest = new IndicesSegmentsRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().segments(segmentsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(segmentsRequest, segmentsAction); } @Test public void testIndicesStats() { String indicesStats = IndicesStatsAction.NAME + "[s]"; interceptTransportActions(indicesStats); IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest().indices(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().stats(indicesStatsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(indicesStatsRequest, indicesStats); } @Test public void testSuggest() { String suggestAction = SuggestAction.NAME + "[s]"; interceptTransportActions(suggestAction); SuggestRequest suggestRequest = new SuggestRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().suggest(suggestRequest).actionGet(); clearInterceptedActions(); assertSameIndices(suggestRequest, suggestAction); } @Test public void testValidateQuery() { String validateQueryShardAction = ValidateQueryAction.NAME + "[s]"; interceptTransportActions(validateQueryShardAction); ValidateQueryRequest validateQueryRequest = new ValidateQueryRequest(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().validateQuery(validateQueryRequest).actionGet(); clearInterceptedActions(); assertSameIndices(validateQueryRequest, validateQueryShardAction); } @Test public void testPercolate() { String percolateShardAction = PercolateAction.NAME + "[s]"; interceptTransportActions(percolateShardAction); client().prepareIndex("test-get", "type", "1").setSource("field","value").get(); PercolateRequest percolateRequest = new PercolateRequest().indices(randomIndicesOrAliases()).documentType("type"); if (randomBoolean()) { percolateRequest.getRequest(new GetRequest("test-get", "type", "1")); } else { percolateRequest.source("\"field\":\"value\""); } internalCluster().clientNodeClient().percolate(percolateRequest).actionGet(); clearInterceptedActions(); assertSameIndices(percolateRequest, percolateShardAction); } @Test public void testMultiPercolate() { String multiPercolateShardAction = MultiPercolateAction.NAME + "[shard][s]"; interceptTransportActions(multiPercolateShardAction); client().prepareIndex("test-get", "type", "1").setSource("field", "value").get(); MultiPercolateRequest multiPercolateRequest = new MultiPercolateRequest(); List<String> indices = new ArrayList<>(); int numRequests = iterations(1, 30); for (int i = 0; i < numRequests; i++) { String[] indicesOrAliases = randomIndicesOrAliases(); Collections.addAll(indices, indicesOrAliases); PercolateRequest percolateRequest = new PercolateRequest().indices(indicesOrAliases).documentType("type"); if (randomBoolean()) { percolateRequest.getRequest(new GetRequest("test-get", "type", "1")); } else { percolateRequest.source("\"field\":\"value\""); } multiPercolateRequest.add(percolateRequest); } internalCluster().clientNodeClient().multiPercolate(multiPercolateRequest).actionGet(); clearInterceptedActions(); assertIndicesSubset(indices, multiPercolateShardAction); } @Test public void testOpenIndex() { interceptTransportActions(OpenIndexAction.NAME); OpenIndexRequest openIndexRequest = new OpenIndexRequest(randomUniqueIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().open(openIndexRequest).actionGet(); clearInterceptedActions(); assertSameIndices(openIndexRequest, OpenIndexAction.NAME); } @Test public void testCloseIndex() { interceptTransportActions(CloseIndexAction.NAME); CloseIndexRequest closeIndexRequest = new CloseIndexRequest(randomUniqueIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().close(closeIndexRequest).actionGet(); clearInterceptedActions(); assertSameIndices(closeIndexRequest, CloseIndexAction.NAME); } @Test public void testDeleteIndex() { interceptTransportActions(DeleteIndexAction.NAME); String[] randomIndicesOrAliases = randomUniqueIndicesOrAliases(); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(randomIndicesOrAliases); assertAcked(internalCluster().clientNodeClient().admin().indices().delete(deleteIndexRequest).actionGet()); clearInterceptedActions(); assertSameIndices(deleteIndexRequest, DeleteIndexAction.NAME); } @Test public void testGetMappings() { interceptTransportActions(GetMappingsAction.NAME); GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().getMappings(getMappingsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(getMappingsRequest, GetMappingsAction.NAME); } @Test public void testPutMapping() { interceptTransportActions(PutMappingAction.NAME); PutMappingRequest putMappingRequest = new PutMappingRequest(randomUniqueIndicesOrAliases()).type("type").source("field", "type=string"); internalCluster().clientNodeClient().admin().indices().putMapping(putMappingRequest).actionGet(); clearInterceptedActions(); assertSameIndices(putMappingRequest, PutMappingAction.NAME); } @Test public void testGetSettings() { interceptTransportActions(GetSettingsAction.NAME); GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(randomIndicesOrAliases()); internalCluster().clientNodeClient().admin().indices().getSettings(getSettingsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(getSettingsRequest, GetSettingsAction.NAME); } @Test public void testUpdateSettings() { interceptTransportActions(UpdateSettingsAction.NAME); UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(randomIndicesOrAliases()).settings(ImmutableSettings.builder().put("refresh_interval", -1)); internalCluster().clientNodeClient().admin().indices().updateSettings(updateSettingsRequest).actionGet(); clearInterceptedActions(); assertSameIndices(updateSettingsRequest, UpdateSettingsAction.NAME); } @Test public void testSearchQueryThenFetch() throws Exception { interceptTransportActions(SearchServiceTransportAction.QUERY_ACTION_NAME, SearchServiceTransportAction.FETCH_ID_ACTION_NAME, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); SearchResponse searchResponse = internalCluster().clientNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), greaterThan(0l)); clearInterceptedActions(); assertSameIndices(searchRequest, SearchServiceTransportAction.QUERY_ACTION_NAME, SearchServiceTransportAction.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices assertSameIndicesOptionalRequests(searchRequest, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); } @Test public void testSearchDfsQueryThenFetch() throws Exception { interceptTransportActions(SearchServiceTransportAction.DFS_ACTION_NAME, SearchServiceTransportAction.QUERY_ID_ACTION_NAME, SearchServiceTransportAction.FETCH_ID_ACTION_NAME, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); SearchResponse searchResponse = internalCluster().clientNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), greaterThan(0l)); clearInterceptedActions(); assertSameIndices(searchRequest, SearchServiceTransportAction.DFS_ACTION_NAME, SearchServiceTransportAction.QUERY_ID_ACTION_NAME, SearchServiceTransportAction.FETCH_ID_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices assertSameIndicesOptionalRequests(searchRequest, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); } @Test public void testSearchQueryAndFetch() throws Exception { interceptTransportActions(SearchServiceTransportAction.QUERY_FETCH_ACTION_NAME, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_AND_FETCH); SearchResponse searchResponse = internalCluster().clientNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), greaterThan(0l)); clearInterceptedActions(); assertSameIndices(searchRequest, SearchServiceTransportAction.QUERY_FETCH_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices assertSameIndicesOptionalRequests(searchRequest, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); } @Test public void testSearchDfsQueryAndFetch() throws Exception { interceptTransportActions(SearchServiceTransportAction.QUERY_QUERY_FETCH_ACTION_NAME, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_AND_FETCH); SearchResponse searchResponse = internalCluster().clientNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), greaterThan(0l)); clearInterceptedActions(); assertSameIndices(searchRequest, SearchServiceTransportAction.QUERY_QUERY_FETCH_ACTION_NAME); //free context messages are not necessarily sent, but if they are, check their indices assertSameIndicesOptionalRequests(searchRequest, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); } @Test public void testSearchScan() throws Exception { interceptTransportActions(SearchServiceTransportAction.SCAN_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.SCAN).scroll(new TimeValue(500)); SearchResponse searchResponse = internalCluster().clientNodeClient().search(searchRequest).actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), greaterThan(0l)); client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); clearInterceptedActions(); assertSameIndices(searchRequest, SearchServiceTransportAction.SCAN_ACTION_NAME); } @Test public void testMoreLikeThis() { interceptTransportActions(GetAction.NAME + "[s]", SearchServiceTransportAction.QUERY_ACTION_NAME, SearchServiceTransportAction.FETCH_ID_ACTION_NAME, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); String[] randomIndicesOrAliases = randomIndicesOrAliases(); for (int i = 0; i < randomIndicesOrAliases.length; i++) { client().prepareIndex(randomIndicesOrAliases[i], "type", "id-" + i).setSource("field", "value").get(); } refresh(); assertAcked(prepareCreate("test-get").addAlias(new Alias("alias-get"))); client().prepareIndex("test-get", "type", "1").setSource("field","value").get(); String indexGet = randomBoolean() ? "test-get" : "alias-get"; MoreLikeThisRequest moreLikeThisRequest = new MoreLikeThisRequest(indexGet).type("type").id("1") .searchIndices(randomIndicesOrAliases()); internalCluster().clientNodeClient().moreLikeThis(moreLikeThisRequest).actionGet(); clearInterceptedActions(); //get might end up being executed locally, only optionally over the transport assertSameIndicesOptionalRequests(new String[]{indexGet}, GetAction.NAME + "[s]"); //query might end up being executed locally as well, only optionally over the transport assertSameIndicesOptionalRequests(moreLikeThisRequest.searchIndices(), SearchServiceTransportAction.QUERY_ACTION_NAME); //free context messages are not necessarily sent through the transport, but if they are, check their indices assertSameIndicesOptionalRequests(moreLikeThisRequest.searchIndices(), SearchServiceTransportAction.FETCH_ID_ACTION_NAME, SearchServiceTransportAction.FREE_CONTEXT_ACTION_NAME); } private static void assertSameIndices(IndicesRequest originalRequest, String... actions) { assertSameIndices(originalRequest, false, actions); } private static void assertSameIndicesOptionalRequests(IndicesRequest originalRequest, String... actions) { assertSameIndices(originalRequest, true, actions); } private static void assertSameIndices(IndicesRequest originalRequest, boolean optional, String... actions) { for (String action : actions) { List<TransportRequest> requests = consumeTransportRequests(action); if (!optional) { assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); } for (TransportRequest internalRequest : requests) { assertThat(internalRequest, instanceOf(IndicesRequest.class)); assertThat(internalRequest.getClass().getName(), ((IndicesRequest)internalRequest).indices(), equalTo(originalRequest.indices())); assertThat(((IndicesRequest)internalRequest).indicesOptions(), equalTo(originalRequest.indicesOptions())); } } } private static void assertSameIndicesOptionalRequests(String[] indices, String... actions) { assertSameIndices(indices, true, actions); } private static void assertSameIndices(String[] indices, boolean optional, String... actions) { for (String action : actions) { List<TransportRequest> requests = consumeTransportRequests(action); if (!optional) { assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); } for (TransportRequest internalRequest : requests) { assertThat(internalRequest, instanceOf(IndicesRequest.class)); assertThat(internalRequest.getClass().getName(), ((IndicesRequest)internalRequest).indices(), equalTo(indices)); } } } private static void assertIndicesSubset(List<String> indices, String... actions) { //indices returned by each bulk shard request need to be a subset of the original indices for (String action : actions) { List<TransportRequest> requests = consumeTransportRequests(action); assertThat("no internal requests intercepted for action [" + action + "]", requests.size(), greaterThan(0)); for (TransportRequest internalRequest : requests) { assertThat(internalRequest, instanceOf(IndicesRequest.class)); for (String index : ((IndicesRequest) internalRequest).indices()) { assertThat(indices, hasItem(index)); } } } } private String randomIndexOrAlias() { String index = randomFrom(indices); if (randomBoolean()) { return index + "-alias"; } else { return index; } } private String[] randomIndicesOrAliases() { int count = randomIntBetween(1, indices.size() * 2); //every index has an alias String[] indices = new String[count]; for (int i = 0; i < count; i++) { indices[i] = randomIndexOrAlias(); } return indices; } private String[] randomUniqueIndicesOrAliases() { Set<String> uniqueIndices = new HashSet<>(); int count = randomIntBetween(1, this.indices.size()); while (uniqueIndices.size() < count) { uniqueIndices.add(randomFrom(this.indices)); } String[] indices = new String[count]; int i = 0; for (String index : uniqueIndices) { indices[i++] = randomBoolean() ? index + "-alias" : index; } return indices; } private static void assertAllRequestsHaveBeenConsumed() { Iterable<TransportService> transportServices = internalCluster().getInstances(TransportService.class); for (TransportService transportService : transportServices) { assertThat(((InterceptingTransportService)transportService).requests.entrySet(), emptyIterable()); } } private static void clearInterceptedActions() { Iterable<TransportService> transportServices = internalCluster().getInstances(TransportService.class); for (TransportService transportService : transportServices) { ((InterceptingTransportService) transportService).clearInterceptedActions(); } } private static void interceptTransportActions(String... actions) { Iterable<TransportService> transportServices = internalCluster().getInstances(TransportService.class); for (TransportService transportService : transportServices) { ((InterceptingTransportService) transportService).interceptTransportActions(actions); } } private static List<TransportRequest> consumeTransportRequests(String action) { List<TransportRequest> requests = new ArrayList<>(); Iterable<TransportService> transportServices = internalCluster().getInstances(TransportService.class); for (TransportService transportService : transportServices) { List<TransportRequest> transportRequests = ((InterceptingTransportService) transportService).consumeRequests(action); if (transportRequests != null) { requests.addAll(transportRequests); } } return requests; } public static class InterceptingTransportService extends TransportService { private final Set<String> actions = new HashSet<>(); private final Map<String, List<TransportRequest>> requests = new HashMap<>(); @Inject public InterceptingTransportService(Settings settings, Transport transport, ThreadPool threadPool) { super(settings, transport, threadPool); } synchronized List<TransportRequest> consumeRequests(String action) { return requests.remove(action); } synchronized void interceptTransportActions(String... actions) { Collections.addAll(this.actions, actions); } synchronized void clearInterceptedActions() { actions.clear(); } @Override public <Request extends TransportRequest> void registerRequestHandler(String action, Class<Request> request, String executor, boolean forceExecution, TransportRequestHandler<Request> handler) { super.registerRequestHandler(action, request, executor, forceExecution, new InterceptingRequestHandler(action, handler)); } private class InterceptingRequestHandler implements TransportRequestHandler { private final TransportRequestHandler requestHandler; private final String action; InterceptingRequestHandler(String action, TransportRequestHandler requestHandler) { this.requestHandler = requestHandler; this.action = action; } @Override public void messageReceived(TransportRequest request, TransportChannel channel) throws Exception { synchronized (InterceptingTransportService.this) { if (actions.contains(action)) { List<TransportRequest> requestList = requests.get(action); if (requestList == null) { requestList = new ArrayList<>(); requestList.add(request); requests.put(action, requestList); } else { requestList.add(request); } } } requestHandler.messageReceived(request, channel); } } } }
package ru.yandex.qatools.matchers.collection; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; /** * Created with IntelliJ IDEA. * User: lanwen * Date: 27.05.13 * Time: 2:26 */ public class HasSameItemsAsListMatcherTest { @Test public void listNotSameOrderButEqual() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("3", "2", "1"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected)); } @Test(expected = AssertionError.class) public void listNotSameOrderNotEqualWithSortCheck() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("3", "2", "1"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void listNotEqualSortedAndContainsNewItems() throws Exception { List<Integer> actual = asList(1, 2, 3, 4); List<Integer> expected = asList(3, 2, 1); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected)); } @Test(expected = AssertionError.class) public void listNotEqualSortedAndNotContainsSomeItems() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("3", "2", "1", "4"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected)); } @Test(expected = AssertionError.class) public void listNotEqualSortedAndContainsNewItemsWithSortCheck() throws Exception { List<String> actual = asList("1", "2", "3", "4"); List<String> expected = asList("3", "2", "1"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void listNotEqualSortedAndNotContainsSomeItemsWithSortCheck() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("3", "2", "1", "4"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void listNotEqualSortedAndNotStartsWithDifferItemWithSortCheck() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("4", "2", "1", "3"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void actualListHasLastItemTwice() throws Exception { List<String> actual = asList("1", "2", "3", "3"); List<String> expected = asList("1", "2", "3"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected)); } @Test(expected = AssertionError.class) public void actualListHasItemsTwiceAndNotSameSorted() throws Exception { List<String> actual = asList("1", "2", "2", "3", "3"); List<String> expected = asList("1", "2", "3", "3", "2"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void actualListHasNotLastItemTwice() throws Exception { List<String> actual = asList("1", "2", "2", "3"); List<String> expected = asList("1", "2", "3"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected)); } @Test(expected = AssertionError.class) public void expectedListHasLastItemTwice() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("1", "2", "3", "3"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void expectedListHasNotLastItemTwice() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("1", "2", "2", "3"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected)); } @Test(expected = AssertionError.class) public void expectedListHasNotLastItemTwiceWithSort() throws Exception { List<String> actual = asList("1", "2", "3"); List<String> expected = asList("1", "2", "2", "3"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void listNotEqualSortedAndStartsWithDifferItemWithSortCheck() throws Exception { List<String> actual = asList("4", "1", "2", "3"); List<String> expected = asList("3", "2", "1"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void listEqualSortedAndStartsWithDifferItemWithSortCheck() throws Exception { List<String> actual = asList("1", "2", "3", "4"); List<String> expected = asList("5", "1", "2", "3", "4"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test(expected = AssertionError.class) public void emptyListActual() throws Exception { assertThat(new ArrayList<String>(), HasSameItemsAsListMatcher.hasSameItemsAsList(asList("Abc", "DeF", "gHI")).sameSorted()); } @Test(expected = AssertionError.class) public void emptyListExpected() throws Exception { assertThat(asList("Abc", "DeF", "gHI"), HasSameItemsAsListMatcher.hasSameItemsAsList(new ArrayList<String>()).sameSorted()); } @Test(expected = AssertionError.class) public void withoutOverrideWrapperGetFail() throws Exception { List<String> actual = asList("aBc", "DEf", "gHi"); List<String> expected = asList("Abc", "DeF", "gHI"); assertThat(actual, HasSameItemsAsListMatcher.hasSameItemsAsList(expected).sameSorted()); } @Test public void overrideWrapperToIgnoreCaseGetSuccess() throws Exception { List<String> actual = asList("aBc", "DEf", "gHi"); List<String> expected = asList("Abc", "DeF", "gHI"); assertThat(actual, HasSameItemsAsListMatcher .hasSameItemsAsList(expected) .sameSorted() .useWrapperFactory(new WrapperFactory<String>() { @Override public Wrapper<String> newWrapper() { return new Wrapper<String>() { @Override public boolean safelyEquals(String actual, String expected) { return actual.toLowerCase().equals(expected.toLowerCase()); } @Override public String asString(String obj) { return obj; } }; } })); } @Test(expected = IllegalArgumentException.class) public void factoryCantBeNull() throws Exception { List<String> actual = asList("aBc", "DEf", "gHi"); List<String> expected = asList("Abc", "DeF", "gHI"); assertThat(actual, HasSameItemsAsListMatcher .hasSameItemsAsList(expected) .sameSorted() .useWrapperFactory(null)); } @Test(expected = IllegalStateException.class) public void factoryCantProduceNullWrapper() throws Exception { List<String> actual = asList("aBc", "DEf", "gHi"); List<String> expected = asList("Abc", "DeF", "gHI"); assertThat(actual, HasSameItemsAsListMatcher .hasSameItemsAsList(expected) .sameSorted() .useWrapperFactory(new NullWrapperFactory())); } private class NullWrapperFactory implements WrapperFactory<String> { @Override public Wrapper<String> newWrapper() { return null; } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.support.model; import java.io.Serializable; /** * <p> * A summary of a Trusted Advisor check result, including the alert status, last * refresh, and number of resources examined. * </p> */ public class TrustedAdvisorCheckSummary implements Serializable, Cloneable { /** * <p> * The unique identifier for the Trusted Advisor check. * </p> */ private String checkId; /** * <p> * The time of the last refresh of the check. * </p> */ private String timestamp; /** * <p> * The alert status of the check: "ok" (green), "warning" (yellow), "error" * (red), or "not_available". * </p> */ private String status; /** * <p> * Specifies whether the Trusted Advisor check has flagged resources. * </p> */ private Boolean hasFlaggedResources; private TrustedAdvisorResourcesSummary resourcesSummary; /** * <p> * Summary information that relates to the category of the check. Cost * Optimizing is the only category that is currently supported. * </p> */ private TrustedAdvisorCategorySpecificSummary categorySpecificSummary; /** * <p> * The unique identifier for the Trusted Advisor check. * </p> * * @param checkId * The unique identifier for the Trusted Advisor check. */ public void setCheckId(String checkId) { this.checkId = checkId; } /** * <p> * The unique identifier for the Trusted Advisor check. * </p> * * @return The unique identifier for the Trusted Advisor check. */ public String getCheckId() { return this.checkId; } /** * <p> * The unique identifier for the Trusted Advisor check. * </p> * * @param checkId * The unique identifier for the Trusted Advisor check. * @return Returns a reference to this object so that method calls can be * chained together. */ public TrustedAdvisorCheckSummary withCheckId(String checkId) { setCheckId(checkId); return this; } /** * <p> * The time of the last refresh of the check. * </p> * * @param timestamp * The time of the last refresh of the check. */ public void setTimestamp(String timestamp) { this.timestamp = timestamp; } /** * <p> * The time of the last refresh of the check. * </p> * * @return The time of the last refresh of the check. */ public String getTimestamp() { return this.timestamp; } /** * <p> * The time of the last refresh of the check. * </p> * * @param timestamp * The time of the last refresh of the check. * @return Returns a reference to this object so that method calls can be * chained together. */ public TrustedAdvisorCheckSummary withTimestamp(String timestamp) { setTimestamp(timestamp); return this; } /** * <p> * The alert status of the check: "ok" (green), "warning" (yellow), "error" * (red), or "not_available". * </p> * * @param status * The alert status of the check: "ok" (green), "warning" (yellow), * "error" (red), or "not_available". */ public void setStatus(String status) { this.status = status; } /** * <p> * The alert status of the check: "ok" (green), "warning" (yellow), "error" * (red), or "not_available". * </p> * * @return The alert status of the check: "ok" (green), "warning" (yellow), * "error" (red), or "not_available". */ public String getStatus() { return this.status; } /** * <p> * The alert status of the check: "ok" (green), "warning" (yellow), "error" * (red), or "not_available". * </p> * * @param status * The alert status of the check: "ok" (green), "warning" (yellow), * "error" (red), or "not_available". * @return Returns a reference to this object so that method calls can be * chained together. */ public TrustedAdvisorCheckSummary withStatus(String status) { setStatus(status); return this; } /** * <p> * Specifies whether the Trusted Advisor check has flagged resources. * </p> * * @param hasFlaggedResources * Specifies whether the Trusted Advisor check has flagged resources. */ public void setHasFlaggedResources(Boolean hasFlaggedResources) { this.hasFlaggedResources = hasFlaggedResources; } /** * <p> * Specifies whether the Trusted Advisor check has flagged resources. * </p> * * @return Specifies whether the Trusted Advisor check has flagged * resources. */ public Boolean getHasFlaggedResources() { return this.hasFlaggedResources; } /** * <p> * Specifies whether the Trusted Advisor check has flagged resources. * </p> * * @param hasFlaggedResources * Specifies whether the Trusted Advisor check has flagged resources. * @return Returns a reference to this object so that method calls can be * chained together. */ public TrustedAdvisorCheckSummary withHasFlaggedResources( Boolean hasFlaggedResources) { setHasFlaggedResources(hasFlaggedResources); return this; } /** * <p> * Specifies whether the Trusted Advisor check has flagged resources. * </p> * * @return Specifies whether the Trusted Advisor check has flagged * resources. */ public Boolean isHasFlaggedResources() { return this.hasFlaggedResources; } /** * @param resourcesSummary */ public void setResourcesSummary( TrustedAdvisorResourcesSummary resourcesSummary) { this.resourcesSummary = resourcesSummary; } /** * @return */ public TrustedAdvisorResourcesSummary getResourcesSummary() { return this.resourcesSummary; } /** * @param resourcesSummary * @return Returns a reference to this object so that method calls can be * chained together. */ public TrustedAdvisorCheckSummary withResourcesSummary( TrustedAdvisorResourcesSummary resourcesSummary) { setResourcesSummary(resourcesSummary); return this; } /** * <p> * Summary information that relates to the category of the check. Cost * Optimizing is the only category that is currently supported. * </p> * * @param categorySpecificSummary * Summary information that relates to the category of the check. * Cost Optimizing is the only category that is currently supported. */ public void setCategorySpecificSummary( TrustedAdvisorCategorySpecificSummary categorySpecificSummary) { this.categorySpecificSummary = categorySpecificSummary; } /** * <p> * Summary information that relates to the category of the check. Cost * Optimizing is the only category that is currently supported. * </p> * * @return Summary information that relates to the category of the check. * Cost Optimizing is the only category that is currently supported. */ public TrustedAdvisorCategorySpecificSummary getCategorySpecificSummary() { return this.categorySpecificSummary; } /** * <p> * Summary information that relates to the category of the check. Cost * Optimizing is the only category that is currently supported. * </p> * * @param categorySpecificSummary * Summary information that relates to the category of the check. * Cost Optimizing is the only category that is currently supported. * @return Returns a reference to this object so that method calls can be * chained together. */ public TrustedAdvisorCheckSummary withCategorySpecificSummary( TrustedAdvisorCategorySpecificSummary categorySpecificSummary) { setCategorySpecificSummary(categorySpecificSummary); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCheckId() != null) sb.append("CheckId: " + getCheckId() + ","); if (getTimestamp() != null) sb.append("Timestamp: " + getTimestamp() + ","); if (getStatus() != null) sb.append("Status: " + getStatus() + ","); if (getHasFlaggedResources() != null) sb.append("HasFlaggedResources: " + getHasFlaggedResources() + ","); if (getResourcesSummary() != null) sb.append("ResourcesSummary: " + getResourcesSummary() + ","); if (getCategorySpecificSummary() != null) sb.append("CategorySpecificSummary: " + getCategorySpecificSummary()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TrustedAdvisorCheckSummary == false) return false; TrustedAdvisorCheckSummary other = (TrustedAdvisorCheckSummary) obj; if (other.getCheckId() == null ^ this.getCheckId() == null) return false; if (other.getCheckId() != null && other.getCheckId().equals(this.getCheckId()) == false) return false; if (other.getTimestamp() == null ^ this.getTimestamp() == null) return false; if (other.getTimestamp() != null && other.getTimestamp().equals(this.getTimestamp()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getHasFlaggedResources() == null ^ this.getHasFlaggedResources() == null) return false; if (other.getHasFlaggedResources() != null && other.getHasFlaggedResources().equals( this.getHasFlaggedResources()) == false) return false; if (other.getResourcesSummary() == null ^ this.getResourcesSummary() == null) return false; if (other.getResourcesSummary() != null && other.getResourcesSummary().equals( this.getResourcesSummary()) == false) return false; if (other.getCategorySpecificSummary() == null ^ this.getCategorySpecificSummary() == null) return false; if (other.getCategorySpecificSummary() != null && other.getCategorySpecificSummary().equals( this.getCategorySpecificSummary()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCheckId() == null) ? 0 : getCheckId().hashCode()); hashCode = prime * hashCode + ((getTimestamp() == null) ? 0 : getTimestamp().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getHasFlaggedResources() == null) ? 0 : getHasFlaggedResources().hashCode()); hashCode = prime * hashCode + ((getResourcesSummary() == null) ? 0 : getResourcesSummary() .hashCode()); hashCode = prime * hashCode + ((getCategorySpecificSummary() == null) ? 0 : getCategorySpecificSummary().hashCode()); return hashCode; } @Override public TrustedAdvisorCheckSummary clone() { try { return (TrustedAdvisorCheckSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/entity_type.proto package com.google.cloud.dialogflow.v2; /** * * * <pre> * The request message for * [EntityTypes.BatchDeleteEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest} */ public final class BatchDeleteEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) BatchDeleteEntityTypesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use BatchDeleteEntityTypesRequest.newBuilder() to construct. private BatchDeleteEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchDeleteEntityTypesRequest() { parent_ = ""; entityTypeNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BatchDeleteEntityTypesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { entityTypeNames_ = new com.google.protobuf.LazyStringArrayList(); mutable_bitField0_ |= 0x00000002; } entityTypeNames_.add(s); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { entityTypeNames_ = entityTypeNames_.getUnmodifiableView(); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_BatchDeleteEntityTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_BatchDeleteEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.class, com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ENTITY_TYPE_NAMES_FIELD_NUMBER = 2; private com.google.protobuf.LazyStringList entityTypeNames_; /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public com.google.protobuf.ProtocolStringList getEntityTypeNamesList() { return entityTypeNames_; } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public int getEntityTypeNamesCount() { return entityTypeNames_.size(); } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public java.lang.String getEntityTypeNames(int index) { return entityTypeNames_.get(index); } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public com.google.protobuf.ByteString getEntityTypeNamesBytes(int index) { return entityTypeNames_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getParentBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } for (int i = 0; i < entityTypeNames_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, entityTypeNames_.getRaw(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getParentBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } { int dataSize = 0; for (int i = 0; i < entityTypeNames_.size(); i++) { dataSize += computeStringSizeNoTag(entityTypeNames_.getRaw(i)); } size += dataSize; size += 1 * getEntityTypeNamesList().size(); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest other = (com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) obj; boolean result = true; result = result && getParent().equals(other.getParent()); result = result && getEntityTypeNamesList().equals(other.getEntityTypeNamesList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (getEntityTypeNamesCount() > 0) { hash = (37 * hash) + ENTITY_TYPE_NAMES_FIELD_NUMBER; hash = (53 * hash) + getEntityTypeNamesList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [EntityTypes.BatchDeleteEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntityTypes]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_BatchDeleteEntityTypesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_BatchDeleteEntityTypesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.class, com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; entityTypeNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.EntityTypeProto .internal_static_google_cloud_dialogflow_v2_BatchDeleteEntityTypesRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest build() { com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest buildPartial() { com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest result = new com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.parent_ = parent_; if (((bitField0_ & 0x00000002) == 0x00000002)) { entityTypeNames_ = entityTypeNames_.getUnmodifiableView(); bitField0_ = (bitField0_ & ~0x00000002); } result.entityTypeNames_ = entityTypeNames_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) { return mergeFrom((com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest other) { if (other == com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (!other.entityTypeNames_.isEmpty()) { if (entityTypeNames_.isEmpty()) { entityTypeNames_ = other.entityTypeNames_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureEntityTypeNamesIsMutable(); entityTypeNames_.addAll(other.entityTypeNames_); } onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The name of the agent to delete all entities types for. Format: * `projects/&lt;Project ID&gt;/agent`. * </pre> * * <code>string parent = 1;</code> */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private com.google.protobuf.LazyStringList entityTypeNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; private void ensureEntityTypeNamesIsMutable() { if (!((bitField0_ & 0x00000002) == 0x00000002)) { entityTypeNames_ = new com.google.protobuf.LazyStringArrayList(entityTypeNames_); bitField0_ |= 0x00000002; } } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public com.google.protobuf.ProtocolStringList getEntityTypeNamesList() { return entityTypeNames_.getUnmodifiableView(); } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public int getEntityTypeNamesCount() { return entityTypeNames_.size(); } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public java.lang.String getEntityTypeNames(int index) { return entityTypeNames_.get(index); } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public com.google.protobuf.ByteString getEntityTypeNamesBytes(int index) { return entityTypeNames_.getByteString(index); } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public Builder setEntityTypeNames(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureEntityTypeNamesIsMutable(); entityTypeNames_.set(index, value); onChanged(); return this; } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public Builder addEntityTypeNames(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureEntityTypeNamesIsMutable(); entityTypeNames_.add(value); onChanged(); return this; } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public Builder addAllEntityTypeNames(java.lang.Iterable<java.lang.String> values) { ensureEntityTypeNamesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, entityTypeNames_); onChanged(); return this; } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public Builder clearEntityTypeNames() { entityTypeNames_ = com.google.protobuf.LazyStringArrayList.EMPTY; bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The names entity types to delete. All names must point to the * same agent as `parent`. * </pre> * * <code>repeated string entity_type_names = 2;</code> */ public Builder addEntityTypeNamesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureEntityTypeNamesIsMutable(); entityTypeNames_.add(value); onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest) private static final com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest(); } public static com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchDeleteEntityTypesRequest> PARSER = new com.google.protobuf.AbstractParser<BatchDeleteEntityTypesRequest>() { @java.lang.Override public BatchDeleteEntityTypesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BatchDeleteEntityTypesRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<BatchDeleteEntityTypesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchDeleteEntityTypesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.remote; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.EnvironmentalExecException; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.MetadataProvider; import com.google.devtools.build.lib.actions.cache.VirtualActionInput; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.remote.Digests.ActionKey; import com.google.devtools.build.lib.remote.TreeNodeRepository.TreeNode; import com.google.devtools.build.lib.remote.blobstore.SimpleBlobStore; import com.google.devtools.build.lib.util.io.FileOutErr; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.remoteexecution.v1test.ActionResult; import com.google.devtools.remoteexecution.v1test.Command; import com.google.devtools.remoteexecution.v1test.Digest; import com.google.devtools.remoteexecution.v1test.Directory; import com.google.devtools.remoteexecution.v1test.DirectoryNode; import com.google.devtools.remoteexecution.v1test.FileNode; import com.google.devtools.remoteexecution.v1test.OutputFile; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collection; /** * A RemoteActionCache implementation that uses a concurrent map as a distributed storage for files * and action output. * * <p>The thread safety is guaranteed by the underlying map. * * <p>Note that this class is used from src/tools/remote_worker. */ @ThreadSafe public final class SimpleBlobStoreActionCache implements RemoteActionCache { private static final int MAX_BLOB_SIZE_FOR_INLINE = 10 * 1024; private final SimpleBlobStore blobStore; public SimpleBlobStoreActionCache(SimpleBlobStore blobStore) { this.blobStore = blobStore; } @Override public void ensureInputsPresent( TreeNodeRepository repository, Path execRoot, TreeNode root, Command command) throws IOException, InterruptedException { repository.computeMerkleDigests(root); uploadBlob(command.toByteArray()); for (Directory directory : repository.treeToDirectories(root)) { uploadBlob(directory.toByteArray()); } // TODO(ulfjack): Only upload files that aren't in the CAS yet? for (TreeNode leaf : repository.leaves(root)) { uploadFileContents(leaf.getActionInput(), execRoot, repository.getInputFileCache()); } } public void downloadTree(Digest rootDigest, Path rootLocation) throws IOException, InterruptedException { Directory directory = Directory.parseFrom(downloadBlob(rootDigest)); for (FileNode file : directory.getFilesList()) { downloadFileContents( file.getDigest(), rootLocation.getRelative(file.getName()), file.getIsExecutable()); } for (DirectoryNode child : directory.getDirectoriesList()) { downloadTree(child.getDigest(), rootLocation.getRelative(child.getName())); } } private Digest uploadFileContents(Path file) throws IOException, InterruptedException { Digest digest = Digests.computeDigest(file); try (InputStream in = file.getInputStream()) { return uploadStream(digest, in); } } private Digest uploadFileContents( ActionInput input, Path execRoot, MetadataProvider inputCache) throws IOException, InterruptedException { if (input instanceof VirtualActionInput) { byte[] blob = ((VirtualActionInput) input).getBytes().toByteArray(); return uploadBlob(blob, Digests.computeDigest(blob)); } try (InputStream in = execRoot.getRelative(input.getExecPathString()).getInputStream()) { return uploadStream(Digests.getDigestFromInputCache(input, inputCache), in); } } @Override public void download(ActionResult result, Path execRoot, FileOutErr outErr) throws ExecException, IOException, InterruptedException { try { for (OutputFile file : result.getOutputFilesList()) { if (!file.getContent().isEmpty()) { createFile( file.getContent().toByteArray(), execRoot.getRelative(file.getPath()), file.getIsExecutable()); } else { downloadFileContents( file.getDigest(), execRoot.getRelative(file.getPath()), file.getIsExecutable()); } } if (!result.getOutputDirectoriesList().isEmpty()) { throw new UnsupportedOperationException(); } downloadOutErr(result, outErr); } catch (IOException downloadException) { try { // Delete any (partially) downloaded output files, since any subsequent local execution // of this action may expect none of the output files to exist. for (OutputFile file : result.getOutputFilesList()) { execRoot.getRelative(file.getPath()).delete(); } outErr.getOutputPath().delete(); outErr.getErrorPath().delete(); } catch (IOException e) { // If deleting of output files failed, we abort the build with a decent error message as // any subsequent local execution failure would likely be incomprehensible. // We don't propagate the downloadException, as this is a recoverable error and the cause // of the build failure is really that we couldn't delete output files. throw new EnvironmentalExecException("Failed to delete output files after incomplete " + "download. Cannot continue with local execution.", e, true); } throw downloadException; } } private void downloadOutErr(ActionResult result, FileOutErr outErr) throws IOException, InterruptedException { if (!result.getStdoutRaw().isEmpty()) { result.getStdoutRaw().writeTo(outErr.getOutputStream()); outErr.getOutputStream().flush(); } else if (result.hasStdoutDigest()) { downloadFileContents(result.getStdoutDigest(), outErr.getOutputPath(), /*executable=*/false); } if (!result.getStderrRaw().isEmpty()) { result.getStderrRaw().writeTo(outErr.getErrorStream()); outErr.getErrorStream().flush(); } else if (result.hasStderrDigest()) { downloadFileContents(result.getStderrDigest(), outErr.getErrorPath(), /*executable=*/false); } } @Override public void upload( ActionKey actionKey, Path execRoot, Collection<Path> files, FileOutErr outErr, boolean uploadAction) throws IOException, InterruptedException { ActionResult.Builder result = ActionResult.newBuilder(); upload(result, execRoot, files); if (outErr.getErrorPath().exists()) { Digest stderr = uploadFileContents(outErr.getErrorPath()); result.setStderrDigest(stderr); } if (outErr.getOutputPath().exists()) { Digest stdout = uploadFileContents(outErr.getOutputPath()); result.setStdoutDigest(stdout); } if (uploadAction) { blobStore.putActionResult(actionKey.getDigest().getHash(), result.build().toByteArray()); } } public void upload(ActionResult.Builder result, Path execRoot, Collection<Path> files) throws IOException, InterruptedException { for (Path file : files) { // TODO(ulfjack): Maybe pass in a SpawnResult here, add a list of output files to that, and // rely on the local spawn runner to stat the files, instead of statting here. if (!file.exists()) { continue; } if (file.isDirectory()) { // TODO(olaola): to implement this for a directory, will need to create or pass a // TreeNodeRepository to call uploadTree. throw new UnsupportedOperationException("Storing a directory is not yet supported."); } // TODO(olaola): inline small file contents here. // First put the file content to cache. Digest digest = uploadFileContents(file); // Add to protobuf. result .addOutputFilesBuilder() .setPath(file.relativeTo(execRoot).getPathString()) .setDigest(digest) .setIsExecutable(file.isExecutable()); } } public void uploadOutErr(ActionResult.Builder result, byte[] stdout, byte[] stderr) throws IOException, InterruptedException { if (stdout.length <= MAX_BLOB_SIZE_FOR_INLINE) { result.setStdoutRaw(ByteString.copyFrom(stdout)); } else if (stdout.length > 0) { result.setStdoutDigest(uploadBlob(stdout)); } if (stderr.length <= MAX_BLOB_SIZE_FOR_INLINE) { result.setStderrRaw(ByteString.copyFrom(stderr)); } else if (stderr.length > 0) { result.setStderrDigest(uploadBlob(stderr)); } } private void downloadFileContents(Digest digest, Path dest, boolean executable) throws IOException, InterruptedException { FileSystemUtils.createDirectoryAndParents(dest.getParentDirectory()); try (OutputStream out = dest.getOutputStream()) { downloadBlob(digest, out); } dest.setExecutable(executable); } private void createFile(byte[] contents, Path dest, boolean executable) throws IOException { FileSystemUtils.createDirectoryAndParents(dest.getParentDirectory()); try (OutputStream stream = dest.getOutputStream()) { stream.write(contents); } dest.setExecutable(executable); } public Digest uploadBlob(byte[] blob) throws IOException, InterruptedException { return uploadBlob(blob, Digests.computeDigest(blob)); } private Digest uploadBlob(byte[] blob, Digest digest) throws IOException, InterruptedException { return uploadStream(digest, new ByteArrayInputStream(blob)); } public Digest uploadStream(Digest digest, InputStream in) throws IOException, InterruptedException { blobStore.put(digest.getHash(), digest.getSizeBytes(), in); return digest; } private void downloadBlob(Digest digest, OutputStream out) throws IOException, InterruptedException { if (digest.getSizeBytes() == 0) { return; } boolean success = blobStore.get(digest.getHash(), out); if (!success) { throw new CacheNotFoundException(digest); } } public byte[] downloadBlob(Digest digest) throws IOException, InterruptedException { if (digest.getSizeBytes() == 0) { return new byte[0]; } ByteArrayOutputStream out = new ByteArrayOutputStream(); downloadBlob(digest, out); return out.toByteArray(); } public boolean containsKey(Digest digest) throws IOException, InterruptedException { return blobStore.containsKey(digest.getHash()); } @Override public ActionResult getCachedActionResult(ActionKey actionKey) throws IOException, InterruptedException { try { byte[] data = downloadActionResult(actionKey.getDigest()); return ActionResult.parseFrom(data); } catch (InvalidProtocolBufferException | CacheNotFoundException e) { return null; } } private byte[] downloadActionResult(Digest digest) throws IOException, InterruptedException { if (digest.getSizeBytes() == 0) { return new byte[0]; } // This unconditionally downloads the whole blob into memory! ByteArrayOutputStream out = new ByteArrayOutputStream(); boolean success = blobStore.getActionResult(digest.getHash(), out); if (!success) { throw new CacheNotFoundException(digest); } return out.toByteArray(); } public void setCachedActionResult(ActionKey actionKey, ActionResult result) throws IOException, InterruptedException { blobStore.putActionResult(actionKey.getDigest().getHash(), result.toByteArray()); } @Override public void close() { blobStore.close(); } }
package com.planet_ink.coffee_mud.Items.MiscMagic; import com.planet_ink.coffee_mud.Items.Basic.StdItem; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright (c) 2005-2010, Bo Zimmerman All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. The Playing Card This item represents a single card in a deck of 52 without wild cards. The value of the card is set by changing the baseEnvStats().ability() value to the numeric representation of the suit and card value from 2-14. Methods then exist to parse the ability score into usable values and encodings. The card uses bits 0-3 to represent value 2-14, bits 4,5 to represent the suit, and bit 6 to represent whether the card is face-up or face-down. The card with automatically set its own name and display text based on the encoding. */ public class StdPlayingCard extends StdItem implements MiscMagic, PlayingCard { public String ID(){ return "StdPlayingCard";} protected int oldAbility=0; public StdPlayingCard() { super(); name="A card"; displayText="A card lies here."; secretIdentity=""; baseEnvStats().setWeight(0); setBaseValue(0); recoverEnvStats(); } protected boolean abilityImbuesMagic(){return false;} // the encoded suit public int getBitEncodedSuit(){return envStats().ability()&(16+32);} // the encoded value from 2-14 public int getBitEncodedValue(){return envStats().ability()&(1+2+4+8);} // whether the card is face up public boolean isFaceUp(){return (envStats().ability()&64)==64;} // set the card face up by turning on bit 64 public void turnFaceUp(){ baseEnvStats().setAbility(baseEnvStats().ability()|64); recoverEnvStats();} // set the card face down by turning off bits 64 and up. public void turnFaceDown(){ baseEnvStats().setAbility(baseEnvStats().ability()&(63)); recoverEnvStats();} // return the suit of this card as a single letter string public String getStringEncodedSuit() { switch(getBitEncodedSuit()) { case 0: return "S"; case 16: return "C"; case 32: return "H"; case 48: return "D"; } return " "; } // return the value of this card as a short string // face cards are only a single letter public String getStringEncodedValue() { switch(getBitEncodedValue()) { case 1: case 14: return "A"; case 11: return "J"; case 12: return "Q"; case 13: return "K"; case 2:case 3:case 4:case 5:case 6:case 7:case 8:case 9:case 10: return ""+getBitEncodedValue(); } return "0"; } // return the english-word representation of the value // passed to this method. Since this method is static, // it may be called as a utility function and does not // necessarily represent THIS card object. public String getCardValueLongDescription(int value) { value=value&(1+2+4+8); switch(value) { case 1: return "ace"; case 2: return "two"; case 3: return "three"; case 4: return "four"; case 5: return "five"; case 6: return "six"; case 7: return "seven"; case 8: return "eight"; case 9: return "nine"; case 10: return "ten"; case 11: return "jack"; case 12: return "queen"; case 13: return "king"; case 14: return "ace"; } return "Unknown"; } // return partial english-word representation of the value // passed to this method. By partial I mean numeric for // number cards and words otherwise. Since this method is static, // it may be called as a utility function and does not // necessarily represent THIS card object. public String getCardValueShortDescription(int value) { value=value&(1+2+4+8); switch(value) { case 1: return "ace"; case 11: return "jack"; case 12: return "queen"; case 13: return "king"; case 14: return "ace"; default: return ""+value; } } // return an english-word, color-coded representation // of the suit passed to this method. Since this method is static, // it may be called as a utility function and does not // necessarily represent THIS card object. public String getSuitDescription(int suit) { suit=suit&(16+32); switch(suit) { case 0: return "^pspades^?"; case 16: return "^pclubs^p"; case 32: return "^rhearts^?"; case 48: return "^rdiamonds^?"; } return ""; } // recoverEnvStats() is a kind of event handler // that is called whenever something changes in // the environment of this object. This method // normally causes the object to reevaluate its // state. // In this case, we compare the current card // value with a cached and saved one to determine // if the NAME and DISPLAY TEXT of the card should // be updated. public void recoverEnvStats() { super.recoverEnvStats(); if(oldAbility!=envStats.ability()) { oldAbility=envStats().ability(); String suitStr=getSuitDescription(envStats().ability()); String cardStr=getCardValueShortDescription(envStats().ability()); if((suitStr.length()==0)||(cardStr.length()==0)) { name="A mangled card"; displayText="A mangled playing card lies here."; } else { name="the "+cardStr+" of "+suitStr; displayText="a playing card, "+name+", lies here"; } //CMLib.flags().setGettable(this,false); } } }
/* * Copyright (C) 2015-2016 Rinde van Lon, iMinds-DistriNet, KU Leuven * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.rinde.gpem17.eval; import static com.google.common.base.Preconditions.checkArgument; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import com.github.rinde.logistics.pdptw.mas.comm.AuctionCommModel.AuctionEvent; import com.github.rinde.logistics.pdptw.mas.comm.Bidder; import com.github.rinde.rinsim.central.SolverTimeMeasurement; import com.github.rinde.rinsim.experiment.Experiment.SimArgs; import com.github.rinde.rinsim.experiment.Experiment.SimulationResult; import com.github.rinde.rinsim.scenario.Scenario; import com.github.rinde.rinsim.scenario.gendreau06.Gendreau06ObjectiveFunction; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.io.Files; /** * * @author Rinde van Lon */ public class VanLonHolvoetResultWriter extends ResultWriter { final Map<File, Map<String, String>> scenarioPropsCache; final String dataset; boolean createTmpResultFiles; public VanLonHolvoetResultWriter(File target, Gendreau06ObjectiveFunction objFunc, String datasetPath, boolean rt, boolean createFinalFiles, boolean createTmpFiles, boolean minimizeIO) { super(target, objFunc, rt, createFinalFiles, minimizeIO); createTmpResultFiles = createTmpFiles; dataset = datasetPath; scenarioPropsCache = new LinkedHashMap<>(); } @Override public void receive(SimulationResult result) { if (createTmpResultFiles) { final String configName = result.getSimArgs().getMasConfig().getName(); final File targetFile = new File(experimentDirectory, configName + ".csv"); if (!targetFile.exists()) { createCSVWithHeader(targetFile); } appendSimResult(result, targetFile); if (realtime) { writeTimeLog(result); } writeBidComputationTimeMeasurements(result); } } void writeBidComputationTimeMeasurements(SimulationResult result) { if (!(result.getResultObject() instanceof SimResult)) { return; } final SimResult info = (SimResult) result.getResultObject(); if (!info.getAuctionEvents().isEmpty() && !info.getTimeMeasurements().isEmpty()) { final SimArgs simArgs = result.getSimArgs(); final Scenario scenario = simArgs.getScenario(); final String id = Joiner.on("-").join( simArgs.getMasConfig().getName(), scenario.getProblemClass().getId(), scenario.getProblemInstanceId(), simArgs.getRandomSeed(), simArgs.getRepetition()); File statsDir = new File(experimentDirectory, "computation-time-stats"); statsDir.mkdirs(); final File auctionsFile = new File(statsDir, id + "-auctions.csv"); final File compFile = new File(statsDir, id + "-bid-computations.csv"); StringBuilder auctionContents = new StringBuilder(); auctionContents.append("auction_start,auction_end,num_bids") .append(System.lineSeparator()); for (AuctionEvent e : info.getAuctionEvents()) { Joiner.on(",").appendTo(auctionContents, e.getAuctionStartTime(), e.getTime(), e.getNumBids()); auctionContents.append(System.lineSeparator()); } ImmutableListMultimap<Bidder<?>, SolverTimeMeasurement> measurements = info.getTimeMeasurements(); StringBuilder compContents = new StringBuilder(); compContents .append("bidder_id,comp_start_sim_time,route_length,duration_ns") .append(System.lineSeparator()); int bidderId = 0; for (Bidder<?> bidder : measurements.keySet()) { List<SolverTimeMeasurement> ms = measurements.get(bidder); for (SolverTimeMeasurement m : ms) { // int available = m.input().getAvailableParcels().size(); // int total = GlobalStateObjects.allParcels(m.input()).size(); // int pickedUp = total - available; // (available * 2) + pickedUp; int routeLength = m.input().getVehicles().get(0).getRoute().get().size(); Joiner.on(",").appendTo(compContents, bidderId, m.input().getTime(), routeLength, m.durationNs()); compContents.append(System.lineSeparator()); } bidderId++; } try { Files.write(auctionContents, auctionsFile, Charsets.UTF_8); Files.write(compContents, compFile, Charsets.UTF_8); } catch (IOException e1) { throw new IllegalStateException(e1); } } } @Override void appendSimResult(SimulationResult sr, File destFile) { try { String line = appendTo(sr, new StringBuilder()).toString(); Files.append(line, destFile, Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } } Map<String, String> getScenarioProps(File f) { if (scenarioPropsCache.containsKey(f)) { return scenarioPropsCache.get(f); } try { List<String> propsStrings = Files.readLines(f, Charsets.UTF_8); final Map<String, String> properties = Splitter.on("\n") .withKeyValueSeparator(" = ") .split(Joiner.on("\n").join(propsStrings)); scenarioPropsCache.put(f, properties); return properties; } catch (IOException e) { throw new IllegalStateException(e); } } StringBuilder appendTo(SimulationResult sr, StringBuilder sb) { final String pc = sr.getSimArgs().getScenario().getProblemClass().getId(); final String id = sr.getSimArgs().getScenario().getProblemInstanceId(); final String scenarioName = Joiner.on("-").join(pc, id); File scenFile = new File(new StringBuilder().append(dataset).append("/") .append(scenarioName).append(".properties").toString()); final Map<String, String> properties = getScenarioProps(scenFile); final ImmutableMap.Builder<Enum<?>, Object> map = ImmutableMap.<Enum<?>, Object>builder() .put(OutputFields.SCENARIO_ID, scenarioName) .put(OutputFields.DYNAMISM, properties.get("dynamism_bin")) .put(OutputFields.URGENCY, properties.get("urgency")) .put(OutputFields.SCALE, properties.get("scale")) .put(OutputFields.NUM_ORDERS, properties.get("AddParcelEvent")) .put(OutputFields.NUM_VEHICLES, properties.get("AddVehicleEvent")) .put(OutputFields.RANDOM_SEED, sr.getSimArgs().getRandomSeed()) .put(OutputFields.REPETITION, sr.getSimArgs().getRepetition()); addSimOutputs(map, sr, objectiveFunction); return appendValuesTo(sb, map.build(), getFields()) .append(System.lineSeparator()); } @Override Iterable<Enum<?>> getFields() { return ImmutableList.<Enum<?>>copyOf(OutputFields.values()); } static <T extends Enum<?>> StringBuilder appendValuesTo(StringBuilder sb, Map<T, Object> props, Iterable<T> keys) { final List<Object> values = new ArrayList<>(); for (final T p : keys) { checkArgument(props.containsKey(p)); values.add(props.get(p)); } Joiner.on(",").appendTo(sb, values); return sb; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.partitions; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.*; import org.apache.cassandra.db.rows.EncodingStats; import org.apache.cassandra.db.rows.Row; import org.apache.cassandra.db.rows.Rows; import org.apache.cassandra.index.transactions.UpdateTransaction; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.ObjectSizes; import org.apache.cassandra.utils.btree.BTree; import org.apache.cassandra.utils.btree.UpdateFunction; import org.apache.cassandra.utils.concurrent.Locks; import org.apache.cassandra.utils.concurrent.OpOrder; import org.apache.cassandra.utils.memory.HeapAllocator; import org.apache.cassandra.utils.memory.MemtableAllocator; /** * A thread-safe and atomic Partition implementation. * * Operations (in particular addAll) on this implementation are atomic and * isolated (in the sense of ACID). Typically a addAll is guaranteed that no * other thread can see the state where only parts but not all rows have * been added. */ public class AtomicBTreePartition extends AbstractBTreePartition { public static final long EMPTY_SIZE = ObjectSizes.measure(new AtomicBTreePartition(CFMetaData.createFake("keyspace", "table"), DatabaseDescriptor.getPartitioner().decorateKey(ByteBuffer.allocate(1)), null)); // Reserved values for wasteTracker field. These values must not be consecutive (see avoidReservedValues) private static final int TRACKER_NEVER_WASTED = 0; private static final int TRACKER_PESSIMISTIC_LOCKING = Integer.MAX_VALUE; // The granularity with which we track wasted allocation/work; we round up private static final int ALLOCATION_GRANULARITY_BYTES = 1024; // The number of bytes we have to waste in excess of our acceptable realtime rate of waste (defined below) private static final long EXCESS_WASTE_BYTES = 10 * 1024 * 1024L; private static final int EXCESS_WASTE_OFFSET = (int) (EXCESS_WASTE_BYTES / ALLOCATION_GRANULARITY_BYTES); // Note this is a shift, because dividing a long time and then picking the low 32 bits doesn't give correct rollover behavior private static final int CLOCK_SHIFT = 17; // CLOCK_GRANULARITY = 1^9ns >> CLOCK_SHIFT == 132us == (1/7.63)ms private static final AtomicIntegerFieldUpdater<AtomicBTreePartition> wasteTrackerUpdater = AtomicIntegerFieldUpdater.newUpdater(AtomicBTreePartition.class, "wasteTracker"); private static final AtomicReferenceFieldUpdater<AtomicBTreePartition, Holder> refUpdater = AtomicReferenceFieldUpdater.newUpdater(AtomicBTreePartition.class, Holder.class, "ref"); /** * (clock + allocation) granularity are combined to give us an acceptable (waste) allocation rate that is defined by * the passage of real time of ALLOCATION_GRANULARITY_BYTES/CLOCK_GRANULARITY, or in this case 7.63Kb/ms, or 7.45Mb/s * * in wasteTracker we maintain within EXCESS_WASTE_OFFSET before the current time; whenever we waste bytes * we increment the current value if it is within this window, and set it to the min of the window plus our waste * otherwise. */ private volatile int wasteTracker = TRACKER_NEVER_WASTED; private final MemtableAllocator allocator; private volatile Holder ref; public AtomicBTreePartition(CFMetaData metadata, DecoratedKey partitionKey, MemtableAllocator allocator) { // involved in potential bug? partition columns may be a subset if we alter columns while it's in memtable super(metadata, partitionKey); this.allocator = allocator; this.ref = EMPTY; } protected Holder holder() { return ref; } protected boolean canHaveShadowedData() { return true; } /** * Adds a given update to this in-memtable partition. * * @return an array containing first the difference in size seen after merging the updates, and second the minimum * time detla between updates. */ public long[] addAllWithSizeDelta(final PartitionUpdate update, OpOrder.Group writeOp, UpdateTransaction indexer) { RowUpdater updater = new RowUpdater(this, allocator, writeOp, indexer); DeletionInfo inputDeletionInfoCopy = null; boolean monitorOwned = false; try { if (usePessimisticLocking()) { Locks.monitorEnterUnsafe(this); monitorOwned = true; } indexer.start(); while (true) { Holder current = ref; updater.ref = current; updater.reset(); if (!update.deletionInfo().getPartitionDeletion().isLive()) indexer.onPartitionDeletion(update.deletionInfo().getPartitionDeletion()); if (update.deletionInfo().hasRanges()) update.deletionInfo().rangeIterator(false).forEachRemaining(indexer::onRangeTombstone); DeletionInfo deletionInfo; if (update.deletionInfo().mayModify(current.deletionInfo)) { if (inputDeletionInfoCopy == null) inputDeletionInfoCopy = update.deletionInfo().copy(HeapAllocator.instance); deletionInfo = current.deletionInfo.mutableCopy().add(inputDeletionInfoCopy); updater.allocated(deletionInfo.unsharedHeapSize() - current.deletionInfo.unsharedHeapSize()); } else { deletionInfo = current.deletionInfo; } PartitionColumns columns = update.columns().mergeTo(current.columns); Row newStatic = update.staticRow(); Row staticRow = newStatic.isEmpty() ? current.staticRow : (current.staticRow.isEmpty() ? updater.apply(newStatic) : updater.apply(current.staticRow, newStatic)); Object[] tree = BTree.update(current.tree, update.metadata().comparator, update, update.rowCount(), updater); EncodingStats newStats = current.stats.mergeWith(update.stats()); if (tree != null && refUpdater.compareAndSet(this, current, new Holder(columns, tree, deletionInfo, staticRow, newStats))) { updater.finish(); return new long[]{ updater.dataSize, updater.colUpdateTimeDelta }; } else if (!monitorOwned) { boolean shouldLock = usePessimisticLocking(); if (!shouldLock) { shouldLock = updateWastedAllocationTracker(updater.heapSize); } if (shouldLock) { Locks.monitorEnterUnsafe(this); monitorOwned = true; } } } } finally { indexer.commit(); if (monitorOwned) Locks.monitorExitUnsafe(this); } } public boolean usePessimisticLocking() { return wasteTracker == TRACKER_PESSIMISTIC_LOCKING; } /** * Update the wasted allocation tracker state based on newly wasted allocation information * * @param wastedBytes the number of bytes wasted by this thread * @return true if the caller should now proceed with pessimistic locking because the waste limit has been reached */ private boolean updateWastedAllocationTracker(long wastedBytes) { // Early check for huge allocation that exceeds the limit if (wastedBytes < EXCESS_WASTE_BYTES) { // We round up to ensure work < granularity are still accounted for int wastedAllocation = ((int) (wastedBytes + ALLOCATION_GRANULARITY_BYTES - 1)) / ALLOCATION_GRANULARITY_BYTES; int oldTrackerValue; while (TRACKER_PESSIMISTIC_LOCKING != (oldTrackerValue = wasteTracker)) { // Note this time value has an arbitrary offset, but is a constant rate 32 bit counter (that may wrap) int time = (int) (System.nanoTime() >>> CLOCK_SHIFT); int delta = oldTrackerValue - time; if (oldTrackerValue == TRACKER_NEVER_WASTED || delta >= 0 || delta < -EXCESS_WASTE_OFFSET) delta = -EXCESS_WASTE_OFFSET; delta += wastedAllocation; if (delta >= 0) break; if (wasteTrackerUpdater.compareAndSet(this, oldTrackerValue, avoidReservedValues(time + delta))) return false; } } // We have definitely reached our waste limit so set the state if it isn't already wasteTrackerUpdater.set(this, TRACKER_PESSIMISTIC_LOCKING); // And tell the caller to proceed with pessimistic locking return true; } private static int avoidReservedValues(int wasteTracker) { if (wasteTracker == TRACKER_NEVER_WASTED || wasteTracker == TRACKER_PESSIMISTIC_LOCKING) return wasteTracker + 1; return wasteTracker; } // the function we provide to the btree utilities to perform any column replacements private static final class RowUpdater implements UpdateFunction<Row, Row> { final AtomicBTreePartition updating; final MemtableAllocator allocator; final OpOrder.Group writeOp; final UpdateTransaction indexer; final int nowInSec; Holder ref; Row.Builder regularBuilder; long dataSize; long heapSize; long colUpdateTimeDelta = Long.MAX_VALUE; final MemtableAllocator.DataReclaimer reclaimer; List<Row> inserted; // TODO: replace with walk of aborted BTree private RowUpdater(AtomicBTreePartition updating, MemtableAllocator allocator, OpOrder.Group writeOp, UpdateTransaction indexer) { this.updating = updating; this.allocator = allocator; this.writeOp = writeOp; this.indexer = indexer; this.nowInSec = FBUtilities.nowInSeconds(); this.reclaimer = allocator.reclaimer(); } private Row.Builder builder(Clustering clustering) { boolean isStatic = clustering == Clustering.STATIC_CLUSTERING; // We know we only insert/update one static per PartitionUpdate, so no point in saving the builder if (isStatic) return allocator.rowBuilder(writeOp); if (regularBuilder == null) regularBuilder = allocator.rowBuilder(writeOp); return regularBuilder; } public Row apply(Row insert) { Row data = Rows.copy(insert, builder(insert.clustering())).build(); indexer.onInserted(insert); this.dataSize += data.dataSize(); this.heapSize += data.unsharedHeapSizeExcludingData(); if (inserted == null) inserted = new ArrayList<>(); inserted.add(data); return data; } public Row apply(Row existing, Row update) { Row.Builder builder = builder(existing.clustering()); colUpdateTimeDelta = Math.min(colUpdateTimeDelta, Rows.merge(existing, update, builder, nowInSec)); Row reconciled = builder.build(); indexer.onUpdated(existing, reconciled); dataSize += reconciled.dataSize() - existing.dataSize(); heapSize += reconciled.unsharedHeapSizeExcludingData() - existing.unsharedHeapSizeExcludingData(); if (inserted == null) inserted = new ArrayList<>(); inserted.add(reconciled); discard(existing); return reconciled; } protected void reset() { this.dataSize = 0; this.heapSize = 0; if (inserted != null) { for (Row row : inserted) abort(row); inserted.clear(); } reclaimer.cancel(); } protected void abort(Row abort) { reclaimer.reclaimImmediately(abort); } protected void discard(Row discard) { reclaimer.reclaim(discard); } public boolean abortEarly() { return updating.ref != ref; } public void allocated(long heapSize) { this.heapSize += heapSize; } protected void finish() { allocator.onHeap().adjust(heapSize, writeOp); reclaimer.commit(); } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnTlvIntervalVer15 implements OFBsnTlvInterval { private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvIntervalVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int LENGTH = 8; private final static long DEFAULT_VALUE = 0x0L; // OF message fields private final long value; // // Immutable default instance final static OFBsnTlvIntervalVer15 DEFAULT = new OFBsnTlvIntervalVer15( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFBsnTlvIntervalVer15(long value) { this.value = U32.normalize(value); } // Accessors for OF message fields @Override public int getType() { return 0x3a; } @Override public long getValue() { return value; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } public OFBsnTlvInterval.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnTlvInterval.Builder { final OFBsnTlvIntervalVer15 parentMessage; // OF message fields private boolean valueSet; private long value; BuilderWithParent(OFBsnTlvIntervalVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0x3a; } @Override public long getValue() { return value; } @Override public OFBsnTlvInterval.Builder setValue(long value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFBsnTlvInterval build() { long value = this.valueSet ? this.value : parentMessage.value; // return new OFBsnTlvIntervalVer15( value ); } } static class Builder implements OFBsnTlvInterval.Builder { // OF message fields private boolean valueSet; private long value; @Override public int getType() { return 0x3a; } @Override public long getValue() { return value; } @Override public OFBsnTlvInterval.Builder setValue(long value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } // @Override public OFBsnTlvInterval build() { long value = this.valueSet ? this.value : DEFAULT_VALUE; return new OFBsnTlvIntervalVer15( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnTlvInterval> { @Override public OFBsnTlvInterval readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0x3a short type = bb.readShort(); if(type != (short) 0x3a) throw new OFParseError("Wrong type: Expected=0x3a(0x3a), got="+type); int length = U16.f(bb.readShort()); if(length != 8) throw new OFParseError("Wrong length: Expected=8(8), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long value = U32.f(bb.readInt()); OFBsnTlvIntervalVer15 bsnTlvIntervalVer15 = new OFBsnTlvIntervalVer15( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnTlvIntervalVer15); return bsnTlvIntervalVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnTlvIntervalVer15Funnel FUNNEL = new OFBsnTlvIntervalVer15Funnel(); static class OFBsnTlvIntervalVer15Funnel implements Funnel<OFBsnTlvIntervalVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnTlvIntervalVer15 message, PrimitiveSink sink) { // fixed value property type = 0x3a sink.putShort((short) 0x3a); // fixed value property length = 8 sink.putShort((short) 0x8); sink.putLong(message.value); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnTlvIntervalVer15> { @Override public void write(ByteBuf bb, OFBsnTlvIntervalVer15 message) { // fixed value property type = 0x3a bb.writeShort((short) 0x3a); // fixed value property length = 8 bb.writeShort((short) 0x8); bb.writeInt(U32.t(message.value)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnTlvIntervalVer15("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTlvIntervalVer15 other = (OFBsnTlvIntervalVer15) obj; if( value != other.value) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (value ^ (value >>> 32)); return result; } }
package com.stc.pattysmorestuff.tileentity.furnaces; import com.stc.pattysmorestuff.blocks.furnaces.BlockObsidianFurnace; import com.stc.pattysmorestuff.configuration.ConfigPreInit; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.inventory.*; import net.minecraft.item.*; import net.minecraft.item.crafting.FurnaceRecipes; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntityLockable; import net.minecraft.util.EnumFacing; import net.minecraft.util.ITickable; import net.minecraft.util.NonNullList; import net.minecraft.util.datafix.DataFixer; import net.minecraft.util.datafix.FixTypes; import net.minecraft.util.datafix.walkers.ItemStackDataLists; import net.minecraft.util.math.MathHelper; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public class TileEntityObsidianFurnace extends TileEntityLockable implements ITickable, ISidedInventory { private static final int[] SLOTS_TOP = new int[] {0}; private static final int[] SLOTS_BOTTOM = new int[] {2, 1}; private static final int[] SLOTS_SIDES = new int[] {1}; /** The ItemStacks that hold the items currently being used in the furnace */ private NonNullList<ItemStack> furnaceItemStacks = NonNullList.<ItemStack>withSize(3, ItemStack.EMPTY); /** The number of ticks that the furnace will keep burning */ private int furnaceBurnTime; /** The number of ticks that a fresh copy of the currently-burning item would keep the furnace burning for */ private int currentItemBurnTime; private int cookTime; private int totalCookTime; private String furnaceCustomName; /** * Returns the number of slots in the inventory. */ public int getSizeInventory() { return this.furnaceItemStacks.size(); } public boolean isEmpty() { for (ItemStack itemstack : this.furnaceItemStacks) { if (!itemstack.isEmpty()) { return false; } } return true; } /** * Returns the stack in the given slot. */ public ItemStack getStackInSlot(int index) { return this.furnaceItemStacks.get(index); } /** * Removes up to a specified number of items from an inventory slot and returns them in a new stack. */ public ItemStack decrStackSize(int index, int count) { return ItemStackHelper.getAndSplit(this.furnaceItemStacks, index, count); } /** * Removes a stack from the given slot and returns it. */ public ItemStack removeStackFromSlot(int index) { return ItemStackHelper.getAndRemove(this.furnaceItemStacks, index); } /** * Sets the given item stack to the specified slot in the inventory (can be crafting or armor sections). */ public void setInventorySlotContents(int index, ItemStack stack) { ItemStack itemstack = (ItemStack)this.furnaceItemStacks.get(index); boolean flag = !stack.isEmpty() && stack.isItemEqual(itemstack) && ItemStack.areItemStackTagsEqual(stack, itemstack); this.furnaceItemStacks.set(index, stack); if (stack.getCount() > this.getInventoryStackLimit()) { stack.setCount(this.getInventoryStackLimit()); } if (index == 0 && !flag) { this.totalCookTime = this.getCookTime(stack); this.cookTime = 0; this.markDirty(); } } /** * Get the name of this object. For players this returns their username */ public String getName() { return this.hasCustomName() ? this.furnaceCustomName : "Obsidian Furnace"; } /** * Returns true if this thing is named */ public boolean hasCustomName() { return this.furnaceCustomName != null && !this.furnaceCustomName.isEmpty(); } public void setCustomInventoryName(String p_145951_1_) { this.furnaceCustomName = p_145951_1_; } public static void registerFixesFurnace(DataFixer fixer) { fixer.registerWalker(FixTypes.BLOCK_ENTITY, new ItemStackDataLists(TileEntityObsidianFurnace.class, new String[] {"Items"})); } public void readFromNBT(NBTTagCompound compound) { super.readFromNBT(compound); this.furnaceItemStacks = NonNullList.<ItemStack>withSize(this.getSizeInventory(), ItemStack.EMPTY); ItemStackHelper.loadAllItems(compound, this.furnaceItemStacks); this.furnaceBurnTime = compound.getInteger("BurnTime"); this.cookTime = compound.getInteger("CookTime"); this.totalCookTime = compound.getInteger("CookTimeTotal"); this.currentItemBurnTime = getItemBurnTime(this.furnaceItemStacks.get(1)); if (compound.hasKey("CustomName", 8)) { this.furnaceCustomName = compound.getString("CustomName"); } } public NBTTagCompound writeToNBT(NBTTagCompound compound) { super.writeToNBT(compound); compound.setInteger("BurnTime", (short)this.furnaceBurnTime); compound.setInteger("CookTime", (short)this.cookTime); compound.setInteger("CookTimeTotal", (short)this.totalCookTime); ItemStackHelper.saveAllItems(compound, this.furnaceItemStacks); if (this.hasCustomName()) { compound.setString("CustomName", this.furnaceCustomName); } return compound; } /** * Returns the maximum stack size for a inventory slot. Seems to always be 64, possibly will be extended. */ public int getInventoryStackLimit() { return 64; } /** * Furnace isBurning */ public boolean isBurning() { return this.furnaceBurnTime > 0; } @SideOnly(Side.CLIENT) public static boolean isBurning(IInventory inventory) { return inventory.getField(0) > 0; } /** * Like the old updateEntity(), except more generic. */ public void update() { boolean flag = this.isBurning(); boolean flag1 = false; if (this.isBurning()) { --this.furnaceBurnTime; } if (!this.world.isRemote) { ItemStack itemstack = this.furnaceItemStacks.get(1); if (this.isBurning() || !itemstack.isEmpty() && !((ItemStack)this.furnaceItemStacks.get(0)).isEmpty()) { if (!this.isBurning() && this.canSmelt()) { this.furnaceBurnTime = getItemBurnTime(itemstack); this.currentItemBurnTime = this.furnaceBurnTime; if (this.isBurning()) { flag1 = true; if (!itemstack.isEmpty()) { Item item = itemstack.getItem(); itemstack.shrink(1); if (itemstack.isEmpty()) { ItemStack item1 = item.getContainerItem(itemstack); this.furnaceItemStacks.set(1, item1); } } } } if (this.isBurning() && this.canSmelt()) { ++this.cookTime; if (this.cookTime == this.totalCookTime) { this.cookTime = 0; this.totalCookTime = this.getCookTime(this.furnaceItemStacks.get(0)); this.smeltItem(); flag1 = true; } } else { this.cookTime = 0; } } else if (!this.isBurning() && this.cookTime > 0) { this.cookTime = MathHelper.clamp(this.cookTime - 2, 0, this.totalCookTime); } if (flag != this.isBurning()) { flag1 = true; BlockObsidianFurnace.setState(this.isBurning(), this.world, this.pos); } } if (flag1) { this.markDirty(); } } //Lower the number the faster items cook/smelt public int getCookTime(ItemStack stack) { return ConfigPreInit.obsidianFurnaceCookSpeed; } /** * Returns true if the furnace can smelt an item, i.e. has a source item, destination stack isn't full, etc. */ private boolean canSmelt() { if (((ItemStack)this.furnaceItemStacks.get(0)).isEmpty()) { return false; } else { ItemStack itemstack = FurnaceRecipes.instance().getSmeltingResult(this.furnaceItemStacks.get(0)); if (itemstack.isEmpty()) { return false; } else { ItemStack itemstack1 = this.furnaceItemStacks.get(2); if (itemstack1.isEmpty()) { return true; } else if (!itemstack1.isItemEqual(itemstack)) { return false; } else if (itemstack1.getCount() + itemstack.getCount() <= this.getInventoryStackLimit() && itemstack1.getCount() + itemstack.getCount() <= itemstack1.getMaxStackSize()) // Forge fix: make furnace respect stack sizes in furnace recipes { return true; } else { return itemstack1.getCount() + itemstack.getCount() <= itemstack.getMaxStackSize(); // Forge fix: make furnace respect stack sizes in furnace recipes } } } } /** * Turn one item from the furnace source stack into the appropriate smelted item in the furnace result stack */ public void smeltItem() { if (this.canSmelt()) { ItemStack itemstack = (ItemStack)this.furnaceItemStacks.get(0); ItemStack itemstack1 = FurnaceRecipes.instance().getSmeltingResult(itemstack); ItemStack itemstack2 = (ItemStack)this.furnaceItemStacks.get(2); if (itemstack2.isEmpty()) { this.furnaceItemStacks.set(2, itemstack1.copy()); } else if (itemstack2.getItem() == itemstack1.getItem()) { itemstack2.grow(itemstack1.getCount()); } if (itemstack.getItem() == Item.getItemFromBlock(Blocks.SPONGE) && itemstack.getMetadata() == 1 && !((ItemStack)this.furnaceItemStacks.get(1)).isEmpty() && ((ItemStack)this.furnaceItemStacks.get(1)).getItem() == Items.BUCKET) { this.furnaceItemStacks.set(1, new ItemStack(Items.WATER_BUCKET)); } itemstack.shrink(1); } } /** * Returns the number of ticks that the supplied fuel item will keep the furnace burning, or 0 if the item isn't * fuel */ public static int getItemBurnTime(ItemStack stack) { if (stack.isEmpty()) { return 0; } else { Item item = stack.getItem(); if (item instanceof net.minecraft.item.ItemBlock && Block.getBlockFromItem(item) != Blocks.AIR) { Block block = Block.getBlockFromItem(item); if (block == Blocks.WOODEN_SLAB) { return ConfigPreInit.obsidianFWoodenSlabBurnTime; } if (block.getDefaultState().getMaterial() == Material.WOOD) { return ConfigPreInit.obsidianFWoodMaterialBurnTime; } if (block == Blocks.COAL_BLOCK) { return ConfigPreInit.obsidianFCoalBlockBurnTime; } } if (item instanceof ItemTool && "WOOD".equals(((ItemTool)item).getToolMaterialName())) return ConfigPreInit.obsidianFToolWoodBurnTime; if (item instanceof ItemSword && "WOOD".equals(((ItemSword)item).getToolMaterialName())) return ConfigPreInit.obsidianFSwordWoodBurnTime; if (item instanceof ItemHoe && "WOOD".equals(((ItemHoe)item).getMaterialName())) return ConfigPreInit.obsidianFHoeWoodBurnTime; if (item == Items.STICK) return ConfigPreInit.obsidianFStickBurnTime; if (item == Items.COAL) return ConfigPreInit.obsidianFCoalBurnTime; if (item == Items.LAVA_BUCKET) return ConfigPreInit.obsidianFLavaBucketBurnTime; if (item == Item.getItemFromBlock(Blocks.SAPLING)) return ConfigPreInit.obsidianFSaplingBurnTime; if (item == Items.BLAZE_ROD) return ConfigPreInit.obsidianFBlazeRodBurnTime; return net.minecraftforge.fml.common.registry.GameRegistry.getFuelValue(stack); } } public static boolean isItemFuel(ItemStack stack) { /** * Returns the number of ticks that the supplied fuel item will keep the furnace burning, or 0 if the item isn't * fuel */ return getItemBurnTime(stack) > 0; } /** * Don't rename this method to canInteractWith due to conflicts with Container */ public boolean isUsableByPlayer(EntityPlayer player) { if (this.world.getTileEntity(this.pos) != this) { return false; } else { return player.getDistanceSq((double)this.pos.getX() + 0.5D, (double)this.pos.getY() + 0.5D, (double)this.pos.getZ() + 0.5D) <= 64.0D; } } public void openInventory(EntityPlayer player) { } public void closeInventory(EntityPlayer player) { } /** * Returns true if automation is allowed to insert the given stack (ignoring stack size) into the given slot. For * guis use Slot.isItemValid */ public boolean isItemValidForSlot(int index, ItemStack stack) { if (index == 2) { return false; } else if (index != 1) { return true; } else { ItemStack itemstack = (ItemStack)this.furnaceItemStacks.get(1); return isItemFuel(stack) || SlotFurnaceFuel.isBucket(stack) && itemstack.getItem() != Items.BUCKET; } } public int[] getSlotsForFace(EnumFacing side) { if (side == EnumFacing.DOWN) { return SLOTS_BOTTOM; } else { return side == EnumFacing.UP ? SLOTS_TOP : SLOTS_SIDES; } } /** * Returns true if automation can insert the given item in the given slot from the given side. */ public boolean canInsertItem(int index, ItemStack itemStackIn, EnumFacing direction) { return this.isItemValidForSlot(index, itemStackIn); } /** * Returns true if automation can extract the given item in the given slot from the given side. */ public boolean canExtractItem(int index, ItemStack stack, EnumFacing direction) { if (direction == EnumFacing.DOWN && index == 1) { Item item = stack.getItem(); if (item != Items.WATER_BUCKET && item != Items.BUCKET) { return false; } } return true; } public String getGuiID() { return "minecraft:furnace"; } public Container createContainer(InventoryPlayer playerInventory, EntityPlayer playerIn) { return new ContainerFurnace(playerInventory, this); } public int getField(int id) { switch (id) { case 0: return this.furnaceBurnTime; case 1: return this.currentItemBurnTime; case 2: return this.cookTime; case 3: return this.totalCookTime; default: return 0; } } public void setField(int id, int value) { switch (id) { case 0: this.furnaceBurnTime = value; break; case 1: this.currentItemBurnTime = value; break; case 2: this.cookTime = value; break; case 3: this.totalCookTime = value; } } public int getFieldCount() { return 4; } public void clear() { this.furnaceItemStacks.clear(); } net.minecraftforge.items.IItemHandler handlerTop = new net.minecraftforge.items.wrapper.SidedInvWrapper(this, net.minecraft.util.EnumFacing.UP); net.minecraftforge.items.IItemHandler handlerBottom = new net.minecraftforge.items.wrapper.SidedInvWrapper(this, net.minecraft.util.EnumFacing.DOWN); net.minecraftforge.items.IItemHandler handlerSide = new net.minecraftforge.items.wrapper.SidedInvWrapper(this, net.minecraft.util.EnumFacing.WEST); @SuppressWarnings("unchecked") @Override public <T> T getCapability(net.minecraftforge.common.capabilities.Capability<T> capability, net.minecraft.util.EnumFacing facing) { if (facing != null && capability == net.minecraftforge.items.CapabilityItemHandler.ITEM_HANDLER_CAPABILITY) if (facing == EnumFacing.DOWN) return (T) handlerBottom; else if (facing == EnumFacing.UP) return (T) handlerTop; else return (T) handlerSide; return super.getCapability(capability, facing); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/table_spec.proto package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Normal BigQuery table spec. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.TableSpec} */ public final class TableSpec extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.TableSpec) TableSpecOrBuilder { private static final long serialVersionUID = 0L; // Use TableSpec.newBuilder() to construct. private TableSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TableSpec() { groupedEntry_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TableSpec(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TableSpec( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); groupedEntry_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1beta1_TableSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1beta1_TableSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.TableSpec.class, com.google.cloud.datacatalog.v1beta1.TableSpec.Builder.class); } public static final int GROUPED_ENTRY_FIELD_NUMBER = 1; private volatile java.lang.Object groupedEntry_; /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The groupedEntry. */ @java.lang.Override public java.lang.String getGroupedEntry() { java.lang.Object ref = groupedEntry_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); groupedEntry_ = s; return s; } } /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for groupedEntry. */ @java.lang.Override public com.google.protobuf.ByteString getGroupedEntryBytes() { java.lang.Object ref = groupedEntry_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); groupedEntry_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupedEntry_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, groupedEntry_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupedEntry_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, groupedEntry_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.TableSpec)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.TableSpec other = (com.google.cloud.datacatalog.v1beta1.TableSpec) obj; if (!getGroupedEntry().equals(other.getGroupedEntry())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + GROUPED_ENTRY_FIELD_NUMBER; hash = (53 * hash) + getGroupedEntry().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.TableSpec parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.datacatalog.v1beta1.TableSpec prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Normal BigQuery table spec. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.TableSpec} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.TableSpec) com.google.cloud.datacatalog.v1beta1.TableSpecOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1beta1_TableSpec_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1beta1_TableSpec_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.TableSpec.class, com.google.cloud.datacatalog.v1beta1.TableSpec.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.TableSpec.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); groupedEntry_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.TableSpecOuterClass .internal_static_google_cloud_datacatalog_v1beta1_TableSpec_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.TableSpec getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.TableSpec.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.TableSpec build() { com.google.cloud.datacatalog.v1beta1.TableSpec result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.TableSpec buildPartial() { com.google.cloud.datacatalog.v1beta1.TableSpec result = new com.google.cloud.datacatalog.v1beta1.TableSpec(this); result.groupedEntry_ = groupedEntry_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.TableSpec) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.TableSpec) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.TableSpec other) { if (other == com.google.cloud.datacatalog.v1beta1.TableSpec.getDefaultInstance()) return this; if (!other.getGroupedEntry().isEmpty()) { groupedEntry_ = other.groupedEntry_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.datacatalog.v1beta1.TableSpec parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.datacatalog.v1beta1.TableSpec) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object groupedEntry_ = ""; /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The groupedEntry. */ public java.lang.String getGroupedEntry() { java.lang.Object ref = groupedEntry_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); groupedEntry_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for groupedEntry. */ public com.google.protobuf.ByteString getGroupedEntryBytes() { java.lang.Object ref = groupedEntry_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); groupedEntry_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The groupedEntry to set. * @return This builder for chaining. */ public Builder setGroupedEntry(java.lang.String value) { if (value == null) { throw new NullPointerException(); } groupedEntry_ = value; onChanged(); return this; } /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearGroupedEntry() { groupedEntry_ = getDefaultInstance().getGroupedEntry(); onChanged(); return this; } /** * * * <pre> * Output only. If the table is a dated shard, i.e., with name pattern `[prefix]YYYYMMDD`, * `grouped_entry` is the Data Catalog resource name of the date sharded * grouped entry, for example, * `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. * Otherwise, `grouped_entry` is empty. * </pre> * * <code> * string grouped_entry = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for groupedEntry to set. * @return This builder for chaining. */ public Builder setGroupedEntryBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); groupedEntry_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.TableSpec) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.TableSpec) private static final com.google.cloud.datacatalog.v1beta1.TableSpec DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.TableSpec(); } public static com.google.cloud.datacatalog.v1beta1.TableSpec getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TableSpec> PARSER = new com.google.protobuf.AbstractParser<TableSpec>() { @java.lang.Override public TableSpec parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TableSpec(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TableSpec> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TableSpec> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.TableSpec getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* --------------------------------------------------------------------------- Open Asset Import Library - Java Binding (jassimp) --------------------------------------------------------------------------- Copyright (c) 2006-2012, assimp team All rights reserved. Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the assimp team, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission of the assimp team. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. --------------------------------------------------------------------------- */ package org.gearvrf.jassimp; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.EnumMap; import java.util.List; import java.util.Map; import java.util.Set; /** * Data structure for a material.<p> * * Depending on the imported scene and scene format, individual properties * might be present or not. A list of all imported properties can be retrieved * via {@link #getProperties()}.<p> * * This class offers <code>getXXX()</code> for all supported properties. These * methods are fail-save, i.e., will return a default value when the * corresponding property is not set. To change the built in default values, * use the <code>setDefaultXXX()</code> methods.<p> * * If your application expects a certain set of properties to be available, * the {@link #hasProperties(Set)} method can be used to check whether all * these properties are actually set. If this check fails, you can still * use this material via the <code>getXXX()</code> methods without special * error handling code as the implementation guarantees to return default * values for missing properties. This check will not work on texture related * properties (i.e., properties starting with <code>TEX_</code>). */ public final class AiMaterial { /** * Enumerates all supported material properties. */ public static enum PropertyKey { /** * Name. */ NAME("?mat.name", String.class), /** * Two-sided flag. */ TWO_SIDED("$mat.twosided", Integer.class), /** * Shading mode. */ SHADING_MODE("$mat.shadingm", AiShadingMode.class), /** * Wireframe flag. */ WIREFRAME("$mat.wireframe", Integer.class), /** * Blend mode. */ BLEND_MODE("$mat.blend", AiBlendMode.class), /** * Opacity. */ OPACITY("$mat.opacity", Float.class), /** * Bump scaling. */ BUMP_SCALING("$mat.bumpscaling", Float.class), /** * Shininess. */ SHININESS("$mat.shininess", Float.class), /** * Reflectivity. */ REFLECTIVITY("$mat.reflectivity", Float.class), /** * Shininess strength. */ SHININESS_STRENGTH("$mat.shinpercent", Float.class), /** * Refract index. */ REFRACTI("$mat.refracti", Float.class), /** * Diffuse color. */ COLOR_DIFFUSE("$clr.diffuse", Object.class), /** * Ambient color. */ COLOR_AMBIENT("$clr.ambient", Object.class), /** * Ambient color. */ COLOR_SPECULAR("$clr.specular", Object.class), /** * Emissive color. */ COLOR_EMISSIVE("$clr.emissive", Object.class), /** * Transparent color. */ COLOR_TRANSPARENT("$clr.transparent", Object.class), /** * Reflective color. */ COLOR_REFLECTIVE("$clr.reflective", Object.class), /** * Global background image. */ GLOBAL_BACKGROUND_IMAGE("?bg.global", String.class), /** * Texture file path. */ TEX_FILE("$tex.file", String.class), /** * Texture uv index. */ TEX_UV_INDEX("$tex.uvwsrc", Integer.class), /** * Texture blend factor. */ TEX_BLEND("$tex.blend", Float.class), /** * Texture operation. */ TEX_OP("$tex.op", AiTextureOp.class), /** * Texture map mode for u axis. */ TEX_MAP_MODE_U("$tex.mapmodeu", AiTextureMapMode.class), /** * Texture map mode for v axis. */ TEX_MAP_MODE_V("$tex.mapmodev", AiTextureMapMode.class), /** * Texture map mode for w axis. */ TEX_MAP_MODE_W("$tex.mapmodew", AiTextureMapMode.class); /** * Constructor. * * @param key key name as used by assimp * @param type key type, used for casts and checks */ private PropertyKey(String key, Class<?> type) { m_key = key; m_type = type; } /** * Key. */ private final String m_key; /** * Type. */ private final Class<?> m_type; } /** * A very primitive RTTI system for the contents of material properties. */ public static enum PropertyType { /** * Array of single-precision (32 Bit) floats. */ FLOAT(0x1), /** * The material property is a string. */ STRING(0x3), /** * Array of (32 Bit) integers. */ INTEGER(0x4), /** * Simple binary buffer, content undefined. Not convertible to anything. */ BUFFER(0x5); /** * Utility method for converting from c/c++ based integer enums to java * enums.<p> * * This method is intended to be used from JNI and my change based on * implementation needs. * * @param rawValue an integer based enum value (as defined by assimp) * @return the enum value corresponding to rawValue */ static PropertyType fromRawValue(int rawValue) { for (PropertyType type : PropertyType.values()) { if (type.m_rawValue == rawValue) { return type; } } throw new IllegalArgumentException("unexptected raw value: " + rawValue); } /** * Constructor. * * @param rawValue maps java enum to c/c++ integer enum values */ private PropertyType(int rawValue) { m_rawValue = rawValue; } /** * The mapped c/c++ integer enum value. */ private final int m_rawValue; } /** * Data structure for a single material property.<p> * * As an user, you'll probably never need to deal with this data structure. * Just use the provided get() family of functions to query material * properties easily. */ public static final class Property { /** * Constructor. * * @param key * @param semantic * @param index * @param type * @param data */ Property(String key, int semantic, int index, int type, Object data) { m_key = key; m_semantic = semantic; m_index = index; m_type = PropertyType.fromRawValue(type); m_data = data; } /** * Constructor. * * @param key * @param semantic * @param index * @param type * @param dataLen */ Property(String key, int semantic, int index, int type, int dataLen) { m_key = key; m_semantic = semantic; m_index = index; m_type = PropertyType.fromRawValue(type); ByteBuffer b = ByteBuffer.allocateDirect(dataLen); b.order(ByteOrder.nativeOrder()); m_data = b; } /** * Returns the key of the property.<p> * * Keys are generally case insensitive. * * @return the key */ public String getKey() { return m_key; } /** * Textures: Specifies their exact usage semantic. * For non-texture properties, this member is always 0 * (or, better-said, #aiTextureType_NONE). * * @return the semantic */ public int getSemantic() { return m_semantic; } /** * Textures: Specifies the index of the texture. * For non-texture properties, this member is always 0. * * @return the index */ public int getIndex() { return m_index; } /** * Type information for the property.<p> * * Defines the data layout inside the data buffer. This is used * by the library internally to perform debug checks and to * utilize proper type conversions. * (It's probably a hacky solution, but it works.) * * @return the type */ public PropertyType getType() { return m_type; } /** * Binary buffer to hold the property's value. * The size of the buffer is always mDataLength. * * @return the data */ Object getData() { return m_data; } /** * Key. */ private final String m_key; /** * Semantic. */ private final int m_semantic; /** * Index. */ private final int m_index; /** * Type. */ private final PropertyType m_type; /** * Data. */ private final Object m_data; } /** * Constructor. */ AiMaterial() { /* nothing to do */ } /** * Checks whether the given set of properties is available. * * @param keys the keys to check * @return true if all properties are available, false otherwise */ public boolean hasProperties(Set<PropertyKey> keys) { for (PropertyKey key : keys) { if (null == getProperty(key.m_key)) { return false; } } return true; } /** * Sets a default value.<p> * * The passed in Object must match the type of the key as returned by * the corresponding <code>getXXX()</code> method. * * @param key the key * @param defaultValue the new default, may not be null * @throws IllegalArgumentException if defaultValue is null or has a wrong * type */ public void setDefault(PropertyKey key, Object defaultValue) { if (null == defaultValue) { throw new IllegalArgumentException("defaultValue may not be null"); } if (key.m_type != defaultValue.getClass()) { throw new IllegalArgumentException( "defaultValue has wrong type, " + "expected: " + key.m_type + ", found: " + defaultValue.getClass()); } m_defaults.put(key, defaultValue); } // {{ Fail-save Getters /** * Returns the name of the material.<p> * * If missing, defaults to empty string * * @return the name */ public String getName() { return getTyped(PropertyKey.NAME, String.class); } /** * Returns the two-sided flag.<p> * * If missing, defaults to 0 * * @return the two-sided flag */ public int getTwoSided() { return getTyped(PropertyKey.TWO_SIDED, Integer.class); } /** * Returns the shading mode.<p> * * If missing, defaults to {@link AiShadingMode#FLAT} * * @return the shading mode */ public AiShadingMode getShadingMode() { Property p = getProperty(PropertyKey.SHADING_MODE.m_key); if (null == p || null == p.getData()) { return (AiShadingMode) m_defaults.get(PropertyKey.SHADING_MODE); } return AiShadingMode.fromRawValue((Integer) p.getData()); } /** * Returns the wireframe flag.<p> * * If missing, defaults to 0 * * @return the wireframe flag */ public int getWireframe() { return getTyped(PropertyKey.WIREFRAME, Integer.class); } /** * Returns the blend mode.<p> * * If missing, defaults to {@link AiBlendMode#DEFAULT} * * @return the blend mode */ public AiBlendMode getBlendMode() { Property p = getProperty(PropertyKey.BLEND_MODE.m_key); if (null == p || null == p.getData()) { return (AiBlendMode) m_defaults.get(PropertyKey.BLEND_MODE); } return AiBlendMode.fromRawValue((Integer) p.getData()); } /** * Returns the opacity.<p> * * If missing, defaults to 1.0 * * @return the opacity */ public float getOpacity() { return getTyped(PropertyKey.OPACITY, Float.class); } /** * Returns the bump scaling factor.<p> * * If missing, defaults to 1.0 * * @return the bump scaling factor */ public float getBumpScaling() { return getTyped(PropertyKey.BUMP_SCALING, Float.class); } /** * Returns the shininess.<p> * * If missing, defaults to 1.0 * * @return the shininess */ public float getShininess() { return getTyped(PropertyKey.SHININESS, Float.class); } /** * Returns the reflectivity.<p> * * If missing, defaults to 0.0 * * @return the reflectivity */ public float getReflectivity() { return getTyped(PropertyKey.REFLECTIVITY, Float.class); } /** * Returns the shininess strength.<p> * * If missing, defaults to 0.0 * * @return the shininess strength */ public float getShininessStrength() { return getTyped(PropertyKey.SHININESS_STRENGTH, Float.class); } /** * Returns the refract index.<p> * * If missing, defaults to 0.0 * * @return the refract index */ public float getRefractIndex() { return getTyped(PropertyKey.REFRACTI, Float.class); } /** * Returns the diffuse color.<p> * * If missing, defaults to opaque white (1.0, 1.0, 1.0, 1.0)<p> * * This method is part of the wrapped API (see {@link AiWrapperProvider} * for details on wrappers).<p> * * The built-in behavior is to return a {@link AiVector}. * * @param wrapperProvider the wrapper provider (used for type inference) * @return the diffuse color */ @SuppressWarnings("unchecked") public <V3, M4, C, N, Q> C getDiffuseColor( AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) { Property p = getProperty(PropertyKey.COLOR_DIFFUSE.m_key); if (null == p || null == p.getData()) { Object def = m_defaults.get(PropertyKey.COLOR_DIFFUSE); if (def == null) { return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f); } return (C) def; } return (C) p.getData(); } /** * Returns the ambient color.<p> * * If missing, defaults to opaque white (1.0, 1.0, 1.0, 1.0)<p> * * This method is part of the wrapped API (see {@link AiWrapperProvider} * for details on wrappers).<p> * * The built-in behavior is to return a {@link AiVector}. * * @param wrapperProvider the wrapper provider (used for type inference) * @return the ambient color */ @SuppressWarnings("unchecked") public <V3, M4, C, N, Q> C getAmbientColor( AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) { Property p = getProperty(PropertyKey.COLOR_AMBIENT.m_key); if (null == p || null == p.getData()) { Object def = m_defaults.get(PropertyKey.COLOR_AMBIENT); if (def == null) { return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f); } return (C) def; } return (C) p.getData(); } /** * Returns the specular color.<p> * * If missing, defaults to opaque white (1.0, 1.0, 1.0, 1.0)<p> * * This method is part of the wrapped API (see {@link AiWrapperProvider} * for details on wrappers).<p> * * The built-in behavior is to return a {@link AiVector}. * * @param wrapperProvider the wrapper provider (used for type inference) * @return the specular color */ @SuppressWarnings("unchecked") public <V3, M4, C, N, Q> C getSpecularColor( AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) { Property p = getProperty(PropertyKey.COLOR_SPECULAR.m_key); if (null == p || null == p.getData()) { Object def = m_defaults.get(PropertyKey.COLOR_SPECULAR); if (def == null) { return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f); } return (C) def; } return (C) p.getData(); } /** * Returns the emissive color.<p> * * If missing, defaults to opaque white (1.0, 1.0, 1.0, 1.0)<p> * * This method is part of the wrapped API (see {@link AiWrapperProvider} * for details on wrappers).<p> * * The built-in behavior is to return a {@link AiVector}. * * @param wrapperProvider the wrapper provider (used for type inference) * @return the emissive color */ @SuppressWarnings("unchecked") public <V3, M4, C, N, Q> C getEmissiveColor( AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) { Property p = getProperty(PropertyKey.COLOR_EMISSIVE.m_key); if (null == p || null == p.getData()) { Object def = m_defaults.get(PropertyKey.COLOR_EMISSIVE); if (def == null) { return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f); } return (C) def; } return (C) p.getData(); } /** * Returns the transparent color.<p> * * If missing, defaults to opaque white (1.0, 1.0, 1.0, 1.0)<p> * * This method is part of the wrapped API (see {@link AiWrapperProvider} * for details on wrappers).<p> * * The built-in behavior is to return a {@link AiVector}. * * @param wrapperProvider the wrapper provider (used for type inference) * @return the transparent color */ @SuppressWarnings("unchecked") public <V3, M4, C, N, Q> C getTransparentColor( AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) { Property p = getProperty(PropertyKey.COLOR_TRANSPARENT.m_key); if (null == p || null == p.getData()) { Object def = m_defaults.get(PropertyKey.COLOR_TRANSPARENT); if (def == null) { return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f); } return (C) def; } return (C) p.getData(); } /** * Returns the reflective color.<p> * * If missing, defaults to opaque white (1.0, 1.0, 1.0, 1.0)<p> * * This method is part of the wrapped API (see {@link AiWrapperProvider} * for details on wrappers).<p> * * The built-in behavior is to return a {@link AiVector}. * * @param wrapperProvider the wrapper provider (used for type inference) * @return the reflective color */ @SuppressWarnings("unchecked") public <V3, M4, C, N, Q> C getReflectiveColor( AiWrapperProvider<V3, M4, C, N, Q> wrapperProvider) { Property p = getProperty(PropertyKey.COLOR_REFLECTIVE.m_key); if (null == p || null == p.getData()) { Object def = m_defaults.get(PropertyKey.COLOR_REFLECTIVE); if (def == null) { return (C) Jassimp.wrapColor4(1.0f, 1.0f, 1.0f, 1.0f); } return (C) def; } return (C) p.getData(); } /** * Returns the global background image.<p> * * If missing, defaults to empty string * * @return the global background image */ public String getGlobalBackgroundImage() { return getTyped(PropertyKey.GLOBAL_BACKGROUND_IMAGE, String.class); } /** * Returns the number of textures of the given type. * * @param type the type * @return the number of textures */ public int getNumTextures(AiTextureType type) { return m_numTextures.get(type); } /** * Returns the texture file.<p> * * If missing, defaults to empty string * * @param type the texture type * @param index the index in the texture stack * @return the file * @throws IndexOutOfBoundsException if index is invalid */ public String getTextureFile(AiTextureType type, int index) { checkTexRange(type, index); return getTyped(PropertyKey.TEX_FILE, type, index, String.class); } /** * Returns the index of the UV coordinate set used by the texture.<p> * * If missing, defaults to 0 * * @param type the texture type * @param index the index in the texture stack * @return the UV index * @throws IndexOutOfBoundsException if index is invalid */ public int getTextureUVIndex(AiTextureType type, int index) { checkTexRange(type, index); return getTyped(PropertyKey.TEX_UV_INDEX, type, index, Integer.class); } /** * Returns the blend factor of the texture.<p> * * If missing, defaults to 1.0 * * @param type the texture type * @param index the index in the texture stack * @return the blend factor */ public float getBlendFactor(AiTextureType type, int index) { checkTexRange(type, index); return getTyped(PropertyKey.TEX_BLEND, type, index, Float.class); } /** * Returns the texture operation.<p> * * If missing, defaults to {@link AiTextureOp#ADD} * * @param type the texture type * @param index the index in the texture stack * @return the texture operation */ public AiTextureOp getTextureOp(AiTextureType type, int index) { checkTexRange(type, index); Property p = getProperty(PropertyKey.TEX_OP.m_key); if (null == p || null == p.getData()) { return (AiTextureOp) m_defaults.get(PropertyKey.TEX_OP); } return AiTextureOp.fromRawValue((Integer) p.getData()); } /** * Returns the texture mapping mode for the u axis.<p> * * If missing, defaults to {@link AiTextureMapMode#CLAMP} * * @param type the texture type * @param index the index in the texture stack * @return the texture mapping mode */ public AiTextureMapMode getTextureMapModeU(AiTextureType type, int index) { checkTexRange(type, index); Property p = getProperty(PropertyKey.TEX_MAP_MODE_U.m_key); if (null == p || null == p.getData()) { return (AiTextureMapMode) m_defaults.get( PropertyKey.TEX_MAP_MODE_U); } return AiTextureMapMode.fromRawValue((Integer) p.getData()); } /** * Returns the texture mapping mode for the v axis.<p> * * If missing, defaults to {@link AiTextureMapMode#CLAMP} * * @param type the texture type * @param index the index in the texture stack * @return the texture mapping mode */ public AiTextureMapMode getTextureMapModeV(AiTextureType type, int index) { checkTexRange(type, index); Property p = getProperty(PropertyKey.TEX_MAP_MODE_V.m_key); if (null == p || null == p.getData()) { return (AiTextureMapMode) m_defaults.get( PropertyKey.TEX_MAP_MODE_V); } return AiTextureMapMode.fromRawValue((Integer) p.getData()); } /** * Returns the texture mapping mode for the w axis.<p> * * If missing, defaults to {@link AiTextureMapMode#CLAMP} * * @param type the texture type * @param index the index in the texture stack * @return the texture mapping mode */ public AiTextureMapMode getTextureMapModeW(AiTextureType type, int index) { checkTexRange(type, index); Property p = getProperty(PropertyKey.TEX_MAP_MODE_W.m_key); if (null == p || null == p.getData()) { return (AiTextureMapMode) m_defaults.get( PropertyKey.TEX_MAP_MODE_W); } return AiTextureMapMode.fromRawValue((Integer) p.getData()); } /** * Returns all information related to a single texture. * * @param type the texture type * @param index the index in the texture stack * @return the texture information */ public AiTextureInfo getTextureInfo(AiTextureType type, int index) { return new AiTextureInfo(type, index, getTextureFile(type, index), getTextureUVIndex(type, index), getBlendFactor(type, index), getTextureOp(type, index), getTextureMapModeW(type, index), getTextureMapModeW(type, index), getTextureMapModeW(type, index)); } // }} // {{ Generic Getters /** * Returns a single property based on its key. * * @param key the key * @return the property or null if the property is not set */ public Property getProperty(String key) { for (Property property : m_properties) { if (property.getKey().equals(key)) { return property; } } return null; } /** * Returns a single property based on its key. * * @param key the key * @param semantic the semantic type (texture type) * @param index the index * @return the property or null if the property is not set */ public Property getProperty(String key, int semantic, int index) { for (Property property : m_properties) { if (property.getKey().equals(key) && property.m_semantic == semantic && property.m_index == index) { return property; } } return null; } /** * Returns all properties of the material. * * @return the list of properties */ public List<Property> getProperties() { return m_properties; } // }} /** * Helper method. Returns typed property data. * * @param <T> type * @param key the key * @param clazz type * @return the data */ private <T> T getTyped(PropertyKey key, Class<T> clazz) { Property p = getProperty(key.m_key); if (null == p || null == p.getData()) { return clazz.cast(m_defaults.get(key)); } return clazz.cast(p.getData()); } /** * Helper method. Returns typed property data. * * @param <T> type * @param key the key * @param type the texture type * @param index the texture index * @param clazz type * @return the data */ private <T> T getTyped(PropertyKey key, AiTextureType type, int index, Class<T> clazz) { Property p = getProperty(key.m_key, AiTextureType.toRawValue(type), index); if (null == p || null == p.getData()) { return clazz.cast(m_defaults.get(key)); } return clazz.cast(p.getData()); } /** * Checks that index is valid an throw an exception if not. * * @param type the type * @param index the index to check */ private void checkTexRange(AiTextureType type, int index) { if (index < 0 || index > m_numTextures.get(type)) { throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + m_numTextures.get(type)); } } /** * Defaults for missing properties. */ private Map<PropertyKey, Object> m_defaults = new EnumMap<PropertyKey, Object>(PropertyKey.class); { setDefault(PropertyKey.NAME, ""); setDefault(PropertyKey.TWO_SIDED, 0); setDefault(PropertyKey.SHADING_MODE, AiShadingMode.FLAT); setDefault(PropertyKey.WIREFRAME, 0); setDefault(PropertyKey.BLEND_MODE, AiBlendMode.DEFAULT); setDefault(PropertyKey.OPACITY, 1.0f); setDefault(PropertyKey.BUMP_SCALING, 1.0f); setDefault(PropertyKey.SHININESS, 1.0f); setDefault(PropertyKey.REFLECTIVITY, 0.0f); setDefault(PropertyKey.SHININESS_STRENGTH, 0.0f); setDefault(PropertyKey.REFRACTI, 0.0f); /* bypass null checks for colors */ m_defaults.put(PropertyKey.COLOR_DIFFUSE, null); m_defaults.put(PropertyKey.COLOR_AMBIENT, null); m_defaults.put(PropertyKey.COLOR_SPECULAR, null); m_defaults.put(PropertyKey.COLOR_EMISSIVE, null); m_defaults.put(PropertyKey.COLOR_TRANSPARENT, null); m_defaults.put(PropertyKey.COLOR_REFLECTIVE, null); setDefault(PropertyKey.GLOBAL_BACKGROUND_IMAGE, ""); /* texture related values */ setDefault(PropertyKey.TEX_FILE, ""); setDefault(PropertyKey.TEX_UV_INDEX, 0); setDefault(PropertyKey.TEX_BLEND, 1.0f); setDefault(PropertyKey.TEX_OP, AiTextureOp.ADD); setDefault(PropertyKey.TEX_MAP_MODE_U, AiTextureMapMode.CLAMP); setDefault(PropertyKey.TEX_MAP_MODE_V, AiTextureMapMode.CLAMP); setDefault(PropertyKey.TEX_MAP_MODE_W, AiTextureMapMode.CLAMP); /* ensure we have defaults for everything */ for (PropertyKey key : PropertyKey.values()) { if (!m_defaults.containsKey(key)) { throw new IllegalStateException("missing default for: " + key); } } } /** * This method is used by JNI, do not call or modify. * * @param type the type * @param number the number */ @SuppressWarnings("unused") private void setTextureNumber(int type, int number) { m_numTextures.put(AiTextureType.fromRawValue(type), number); } /** * List of properties. */ private final List<Property> m_properties = new ArrayList<Property>(); /** * Number of textures for each type. */ private final Map<AiTextureType, Integer> m_numTextures = new EnumMap<AiTextureType, Integer>(AiTextureType.class); }
package com.github.clans.fab; import android.annotation.TargetApi; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Outline; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.RectF; import android.graphics.Xfermode; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.LayerDrawable; import android.graphics.drawable.RippleDrawable; import android.graphics.drawable.ShapeDrawable; import android.graphics.drawable.StateListDrawable; import android.graphics.drawable.shapes.OvalShape; import android.graphics.drawable.shapes.Shape; import android.os.Build; import android.os.Parcel; import android.os.Parcelable; import android.os.SystemClock; import android.util.AttributeSet; import android.view.GestureDetector; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.ViewOutlineProvider; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.ImageButton; import android.widget.TextView; public class FloatingActionButton extends ImageButton { public static final int SIZE_NORMAL = 0; public static final int SIZE_MINI = 1; int mFabSize; boolean mShowShadow; int mShadowColor; int mShadowRadius = Util.dpToPx(getContext(), 4f); int mShadowXOffset = Util.dpToPx(getContext(), 1f); int mShadowYOffset = Util.dpToPx(getContext(), 3f); private static final Xfermode PORTER_DUFF_CLEAR = new PorterDuffXfermode(PorterDuff.Mode.CLEAR); private static final long PAUSE_GROWING_TIME = 200; private static final double BAR_SPIN_CYCLE_TIME = 500; private static final int BAR_MAX_LENGTH = 270; private int mColorNormal; private int mColorPressed; private int mColorDisabled; private int mColorRipple; private Drawable mIcon; private int mIconSize = Util.dpToPx(getContext(), 24f); private Animation mShowAnimation; private Animation mHideAnimation; private String mLabelText; private OnClickListener mClickListener; private Drawable mBackgroundDrawable; private boolean mUsingElevation; private boolean mUsingElevationCompat; // Progress private boolean mProgressBarEnabled; private int mProgressWidth = Util.dpToPx(getContext(), 6f); private int mProgressColor; private int mProgressBackgroundColor; private boolean mShouldUpdateButtonPosition; private float mOriginalX = -1; private float mOriginalY = -1; private boolean mButtonPositionSaved; private RectF mProgressCircleBounds = new RectF(); private Paint mBackgroundPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private Paint mProgressPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private boolean mProgressIndeterminate; private long mLastTimeAnimated; private float mSpinSpeed = 195.0f; //The amount of degrees per second private long mPausedTimeWithoutGrowing = 0; private double mTimeStartGrowing; private boolean mBarGrowingFromFront = true; private int mBarLength = 16; private float mBarExtraLength; private float mCurrentProgress; private float mTargetProgress; private int mProgress; private boolean mAnimateProgress; private boolean mShouldProgressIndeterminate; private boolean mShouldSetProgress; private int mProgressMax = 100; private boolean mShowProgressBackground; public FloatingActionButton(Context context) { this(context, null); } public FloatingActionButton(Context context, AttributeSet attrs) { this(context, attrs, 0); } public FloatingActionButton(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs, defStyleAttr); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public FloatingActionButton(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(context, attrs, defStyleAttr); } private void init(Context context, AttributeSet attrs, int defStyleAttr) { TypedArray attr = context.obtainStyledAttributes(attrs, R.styleable.FloatingActionButton, defStyleAttr, 0); mColorNormal = attr.getColor(R.styleable.FloatingActionButton_fab_colorNormal, 0xFFDA4336); mColorPressed = attr.getColor(R.styleable.FloatingActionButton_fab_colorPressed, 0xFFE75043); mColorDisabled = attr.getColor(R.styleable.FloatingActionButton_fab_colorDisabled, 0xFFAAAAAA); mColorRipple = attr.getColor(R.styleable.FloatingActionButton_fab_colorRipple, 0x99FFFFFF); mShowShadow = attr.getBoolean(R.styleable.FloatingActionButton_fab_showShadow, true); mShadowColor = attr.getColor(R.styleable.FloatingActionButton_fab_shadowColor, 0x66000000); mShadowRadius = attr.getDimensionPixelSize(R.styleable.FloatingActionButton_fab_shadowRadius, mShadowRadius); mShadowXOffset = attr.getDimensionPixelSize(R.styleable.FloatingActionButton_fab_shadowXOffset, mShadowXOffset); mShadowYOffset = attr.getDimensionPixelSize(R.styleable.FloatingActionButton_fab_shadowYOffset, mShadowYOffset); mFabSize = attr.getInt(R.styleable.FloatingActionButton_fab_size, SIZE_NORMAL); mLabelText = attr.getString(R.styleable.FloatingActionButton_fab_label); mShouldProgressIndeterminate = attr.getBoolean(R.styleable.FloatingActionButton_fab_progress_indeterminate, false); mProgressColor = attr.getColor(R.styleable.FloatingActionButton_fab_progress_color, 0xFF009688); mProgressBackgroundColor = attr.getColor(R.styleable.FloatingActionButton_fab_progress_backgroundColor, 0x4D000000); mProgressMax = attr.getInt(R.styleable.FloatingActionButton_fab_progress_max, mProgressMax); mShowProgressBackground = attr.getBoolean(R.styleable.FloatingActionButton_fab_progress_showBackground, true); if (attr.hasValue(R.styleable.FloatingActionButton_fab_progress)) { mProgress = attr.getInt(R.styleable.FloatingActionButton_fab_progress, 0); mShouldSetProgress = true; } if (attr.hasValue(R.styleable.FloatingActionButton_fab_elevationCompat)) { float elevation = attr.getDimensionPixelOffset(R.styleable.FloatingActionButton_fab_elevationCompat, 0); if (isInEditMode()) { setElevation(elevation); } else { setElevationCompat(elevation); } } initShowAnimation(attr); initHideAnimation(attr); attr.recycle(); if (isInEditMode()) { if (mShouldProgressIndeterminate) { setIndeterminate(true); } else if (mShouldSetProgress) { saveButtonOriginalPosition(); setProgress(mProgress, false); } } // updateBackground(); setClickable(true); } private void initShowAnimation(TypedArray attr) { int resourceId = attr.getResourceId(R.styleable.FloatingActionButton_fab_showAnimation, R.anim.fab_scale_up); mShowAnimation = AnimationUtils.loadAnimation(getContext(), resourceId); } private void initHideAnimation(TypedArray attr) { int resourceId = attr.getResourceId(R.styleable.FloatingActionButton_fab_hideAnimation, R.anim.fab_scale_down); mHideAnimation = AnimationUtils.loadAnimation(getContext(), resourceId); } private int getCircleSize() { return getResources().getDimensionPixelSize(mFabSize == SIZE_NORMAL ? R.dimen.fab_size_normal : R.dimen.fab_size_mini); } private int calculateMeasuredWidth() { int width = getCircleSize() + calculateShadowWidth(); if (mProgressBarEnabled) { width += mProgressWidth * 2; } return width; } private int calculateMeasuredHeight() { int height = getCircleSize() + calculateShadowHeight(); if (mProgressBarEnabled) { height += mProgressWidth * 2; } return height; } int calculateShadowWidth() { return hasShadow() ? getShadowX() * 2 : 0; } int calculateShadowHeight() { return hasShadow() ? getShadowY() * 2 : 0; } private int getShadowX() { return mShadowRadius + Math.abs(mShadowXOffset); } private int getShadowY() { return mShadowRadius + Math.abs(mShadowYOffset); } private float calculateCenterX() { return (float) (getMeasuredWidth() / 2); } private float calculateCenterY() { return (float) (getMeasuredHeight() / 2); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { // super.onMeasure(widthMeasureSpec, heightMeasureSpec); setMeasuredDimension(calculateMeasuredWidth(), calculateMeasuredHeight()); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (mProgressBarEnabled) { if (mShowProgressBackground) { canvas.drawArc(mProgressCircleBounds, 360, 360, false, mBackgroundPaint); } boolean shouldInvalidate = false; if (mProgressIndeterminate) { shouldInvalidate = true; long deltaTime = SystemClock.uptimeMillis() - mLastTimeAnimated; float deltaNormalized = deltaTime * mSpinSpeed / 1000.0f; updateProgressLength(deltaTime); mCurrentProgress += deltaNormalized; if (mCurrentProgress > 360f) { mCurrentProgress -= 360f; } mLastTimeAnimated = SystemClock.uptimeMillis(); float from = mCurrentProgress - 90; float to = mBarLength + mBarExtraLength; if (isInEditMode()) { from = 0; to = 135; } canvas.drawArc(mProgressCircleBounds, from, to, false, mProgressPaint); } else { if (mCurrentProgress != mTargetProgress) { shouldInvalidate = true; float deltaTime = (float) (SystemClock.uptimeMillis() - mLastTimeAnimated) / 1000; float deltaNormalized = deltaTime * mSpinSpeed; if (mCurrentProgress > mTargetProgress) { mCurrentProgress = Math.max(mCurrentProgress - deltaNormalized, mTargetProgress); } else { mCurrentProgress = Math.min(mCurrentProgress + deltaNormalized, mTargetProgress); } mLastTimeAnimated = SystemClock.uptimeMillis(); } canvas.drawArc(mProgressCircleBounds, -90, mCurrentProgress, false, mProgressPaint); } if (shouldInvalidate) { invalidate(); } } } private void updateProgressLength(long deltaTimeInMillis) { if (mPausedTimeWithoutGrowing >= PAUSE_GROWING_TIME) { mTimeStartGrowing += deltaTimeInMillis; if (mTimeStartGrowing > BAR_SPIN_CYCLE_TIME) { mTimeStartGrowing -= BAR_SPIN_CYCLE_TIME; mPausedTimeWithoutGrowing = 0; mBarGrowingFromFront = !mBarGrowingFromFront; } float distance = (float) Math.cos((mTimeStartGrowing / BAR_SPIN_CYCLE_TIME + 1) * Math.PI) / 2 + 0.5f; float length = BAR_MAX_LENGTH - mBarLength; if (mBarGrowingFromFront) { mBarExtraLength = distance * length; } else { float newLength = length * (1 - distance); mCurrentProgress += (mBarExtraLength - newLength); mBarExtraLength = newLength; } } else { mPausedTimeWithoutGrowing += deltaTimeInMillis; } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { saveButtonOriginalPosition(); if (mShouldProgressIndeterminate) { setIndeterminate(true); mShouldProgressIndeterminate = false; } else if (mShouldSetProgress) { setProgress(mProgress, mAnimateProgress); mShouldSetProgress = false; } else if (mShouldUpdateButtonPosition) { updateButtonPosition(); mShouldUpdateButtonPosition = false; } super.onSizeChanged(w, h, oldw, oldh); setupProgressBounds(); setupProgressBarPaints(); updateBackground(); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void setLayoutParams(ViewGroup.LayoutParams params) { if (params instanceof ViewGroup.MarginLayoutParams && mUsingElevationCompat) { ((ViewGroup.MarginLayoutParams) params).leftMargin += getShadowX(); ((ViewGroup.MarginLayoutParams) params).topMargin += getShadowY(); ((ViewGroup.MarginLayoutParams) params).rightMargin += getShadowX(); ((ViewGroup.MarginLayoutParams) params).bottomMargin += getShadowY(); } super.setLayoutParams(params); } void updateBackground() { LayerDrawable layerDrawable; if (hasShadow()) { layerDrawable = new LayerDrawable(new Drawable[]{ new Shadow(), createFillDrawable(), getIconDrawable() }); } else { layerDrawable = new LayerDrawable(new Drawable[]{ createFillDrawable(), getIconDrawable() }); } int iconSize = -1; if (getIconDrawable() != null) { iconSize = Math.max(getIconDrawable().getIntrinsicWidth(), getIconDrawable().getIntrinsicHeight()); } int iconOffset = (getCircleSize() - (iconSize > 0 ? iconSize : mIconSize)) / 2; int circleInsetHorizontal = hasShadow() ? mShadowRadius + Math.abs(mShadowXOffset) : 0; int circleInsetVertical = hasShadow() ? mShadowRadius + Math.abs(mShadowYOffset) : 0; if (mProgressBarEnabled) { circleInsetHorizontal += mProgressWidth; circleInsetVertical += mProgressWidth; } /*layerDrawable.setLayerInset( mShowShadow ? 1 : 0, circleInsetHorizontal, circleInsetVertical, circleInsetHorizontal, circleInsetVertical );*/ layerDrawable.setLayerInset( hasShadow() ? 2 : 1, circleInsetHorizontal + iconOffset, circleInsetVertical + iconOffset, circleInsetHorizontal + iconOffset, circleInsetVertical + iconOffset ); setBackgroundCompat(layerDrawable); } protected Drawable getIconDrawable() { if (mIcon != null) { return mIcon; } else { return new ColorDrawable(Color.TRANSPARENT); } } @TargetApi(Build.VERSION_CODES.LOLLIPOP) private Drawable createFillDrawable() { StateListDrawable drawable = new StateListDrawable(); drawable.addState(new int[]{-android.R.attr.state_enabled}, createCircleDrawable(mColorDisabled)); drawable.addState(new int[]{android.R.attr.state_pressed}, createCircleDrawable(mColorPressed)); drawable.addState(new int[]{}, createCircleDrawable(mColorNormal)); if (Util.hasLollipop()) { RippleDrawable ripple = new RippleDrawable(new ColorStateList(new int[][]{{}}, new int[]{mColorRipple}), drawable, null); setOutlineProvider(new ViewOutlineProvider() { @Override public void getOutline(View view, Outline outline) { outline.setOval(0, 0, view.getWidth(), view.getHeight()); } }); setClipToOutline(true); mBackgroundDrawable = ripple; return ripple; } mBackgroundDrawable = drawable; return drawable; } private Drawable createCircleDrawable(int color) { CircleDrawable shapeDrawable = new CircleDrawable(new OvalShape()); shapeDrawable.getPaint().setColor(color); return shapeDrawable; } @SuppressWarnings("deprecation") @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void setBackgroundCompat(Drawable drawable) { if (Util.hasJellyBean()) { setBackground(drawable); } else { setBackgroundDrawable(drawable); } } private void saveButtonOriginalPosition() { if (!mButtonPositionSaved) { if (mOriginalX == -1) { mOriginalX = getX(); } if (mOriginalY == -1) { mOriginalY = getY(); } mButtonPositionSaved = true; } } private void updateButtonPosition() { float x; float y; if (mProgressBarEnabled) { x = mOriginalX > getX() ? getX() + mProgressWidth : getX() - mProgressWidth; y = mOriginalY > getY() ? getY() + mProgressWidth : getY() - mProgressWidth; } else { x = mOriginalX; y = mOriginalY; } setX(x); setY(y); } private void setupProgressBarPaints() { mBackgroundPaint.setColor(mProgressBackgroundColor); mBackgroundPaint.setStyle(Paint.Style.STROKE); mBackgroundPaint.setStrokeWidth(mProgressWidth); mProgressPaint.setColor(mProgressColor); mProgressPaint.setStyle(Paint.Style.STROKE); mProgressPaint.setStrokeWidth(mProgressWidth); } private void setupProgressBounds() { int circleInsetHorizontal = hasShadow() ? getShadowX() : 0; int circleInsetVertical = hasShadow() ? getShadowY() : 0; mProgressCircleBounds = new RectF( circleInsetHorizontal + mProgressWidth / 2, circleInsetVertical + mProgressWidth / 2, calculateMeasuredWidth() - circleInsetHorizontal - mProgressWidth / 2, calculateMeasuredHeight() - circleInsetVertical - mProgressWidth / 2 ); } Animation getShowAnimation() { return mShowAnimation; } Animation getHideAnimation() { return mHideAnimation; } void playShowAnimation() { mHideAnimation.cancel(); startAnimation(mShowAnimation); } void playHideAnimation() { mShowAnimation.cancel(); startAnimation(mHideAnimation); } OnClickListener getOnClickListener() { return mClickListener; } Label getLabelView() { return (Label) getTag(R.id.fab_label); } void setColors(int colorNormal, int colorPressed, int colorRipple) { mColorNormal = colorNormal; mColorPressed = colorPressed; mColorRipple = colorRipple; } @TargetApi(Build.VERSION_CODES.LOLLIPOP) void onActionDown() { if (mBackgroundDrawable instanceof StateListDrawable) { StateListDrawable drawable = (StateListDrawable) mBackgroundDrawable; drawable.setState(new int[]{android.R.attr.state_enabled, android.R.attr.state_pressed}); } else if (Util.hasLollipop()) { RippleDrawable ripple = (RippleDrawable) mBackgroundDrawable; ripple.setState(new int[]{android.R.attr.state_enabled, android.R.attr.state_pressed}); ripple.setHotspot(calculateCenterX(), calculateCenterY()); ripple.setVisible(true, true); } } @TargetApi(Build.VERSION_CODES.LOLLIPOP) void onActionUp() { if (mBackgroundDrawable instanceof StateListDrawable) { StateListDrawable drawable = (StateListDrawable) mBackgroundDrawable; drawable.setState(new int[]{android.R.attr.state_enabled}); } else if (Util.hasLollipop()) { RippleDrawable ripple = (RippleDrawable) mBackgroundDrawable; ripple.setState(new int[]{android.R.attr.state_enabled}); ripple.setHotspot(calculateCenterX(), calculateCenterY()); ripple.setVisible(true, true); } } @Override public boolean onTouchEvent(MotionEvent event) { if (mClickListener != null && isEnabled()) { Label label = (Label) getTag(R.id.fab_label); if (label == null) return super.onTouchEvent(event); int action = event.getAction(); switch (action) { case MotionEvent.ACTION_UP: if (label != null) { label.onActionUp(); } break; } mGestureDetector.onTouchEvent(event); } return super.onTouchEvent(event); } GestureDetector mGestureDetector = new GestureDetector(getContext(), new GestureDetector.SimpleOnGestureListener() { @Override public boolean onDown(MotionEvent e) { Label label = (Label) getTag(R.id.fab_label); if (label != null) { label.onActionDown(); } onActionDown(); return super.onDown(e); } @Override public boolean onSingleTapUp(MotionEvent e) { Label label = (Label) getTag(R.id.fab_label); if (label != null) { label.onActionUp(); } onActionUp(); return super.onSingleTapUp(e); } }); @Override public Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); ProgressSavedState ss = new ProgressSavedState(superState); ss.mCurrentProgress = this.mCurrentProgress; ss.mTargetProgress = this.mTargetProgress; ss.mSpinSpeed = this.mSpinSpeed; ss.mProgressWidth = this.mProgressWidth; ss.mProgressColor = this.mProgressColor; ss.mProgressBackgroundColor = this.mProgressBackgroundColor; ss.mShouldProgressIndeterminate = this.mProgressIndeterminate; ss.mShouldSetProgress = this.mProgressBarEnabled && mProgress > 0 && !this.mProgressIndeterminate; ss.mProgress = this.mProgress; ss.mAnimateProgress = this.mAnimateProgress; ss.mShowProgressBackground = this.mShowProgressBackground; return ss; } @Override public void onRestoreInstanceState(Parcelable state) { if (!(state instanceof ProgressSavedState)) { super.onRestoreInstanceState(state); return; } ProgressSavedState ss = (ProgressSavedState) state; super.onRestoreInstanceState(ss.getSuperState()); this.mCurrentProgress = ss.mCurrentProgress; this.mTargetProgress = ss.mTargetProgress; this.mSpinSpeed = ss.mSpinSpeed; this.mProgressWidth = ss.mProgressWidth; this.mProgressColor = ss.mProgressColor; this.mProgressBackgroundColor = ss.mProgressBackgroundColor; this.mShouldProgressIndeterminate = ss.mShouldProgressIndeterminate; this.mShouldSetProgress = ss.mShouldSetProgress; this.mProgress = ss.mProgress; this.mAnimateProgress = ss.mAnimateProgress; this.mShowProgressBackground = ss.mShowProgressBackground; this.mLastTimeAnimated = SystemClock.uptimeMillis(); } private class CircleDrawable extends ShapeDrawable { private int circleInsetHorizontal; private int circleInsetVertical; private CircleDrawable() { } private CircleDrawable(Shape s) { super(s); circleInsetHorizontal = hasShadow() ? mShadowRadius + Math.abs(mShadowXOffset) : 0; circleInsetVertical = hasShadow() ? mShadowRadius + Math.abs(mShadowYOffset) : 0; if (mProgressBarEnabled) { circleInsetHorizontal += mProgressWidth; circleInsetVertical += mProgressWidth; } } @Override public void draw(Canvas canvas) { setBounds(circleInsetHorizontal, circleInsetVertical, calculateMeasuredWidth() - circleInsetHorizontal, calculateMeasuredHeight() - circleInsetVertical); super.draw(canvas); } } private class Shadow extends Drawable { private Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); private Paint mErase = new Paint(Paint.ANTI_ALIAS_FLAG); private float mRadius; private Shadow() { this.init(); } private void init() { setLayerType(LAYER_TYPE_SOFTWARE, null); mPaint.setStyle(Paint.Style.FILL); mPaint.setColor(mColorNormal); mErase.setXfermode(PORTER_DUFF_CLEAR); if (!isInEditMode()) { mPaint.setShadowLayer(mShadowRadius, mShadowXOffset, mShadowYOffset, mShadowColor); } mRadius = getCircleSize() / 2; if (mProgressBarEnabled && mShowProgressBackground) { mRadius += mProgressWidth; } } @Override public void draw(Canvas canvas) { canvas.drawCircle(calculateCenterX(), calculateCenterY(), mRadius, mPaint); canvas.drawCircle(calculateCenterX(), calculateCenterY(), mRadius, mErase); } @Override public void setAlpha(int alpha) { } @Override public void setColorFilter(ColorFilter cf) { } @Override public int getOpacity() { return 0; } } static class ProgressSavedState extends BaseSavedState { float mCurrentProgress; float mTargetProgress; float mSpinSpeed; int mProgress; int mProgressWidth; int mProgressColor; int mProgressBackgroundColor; boolean mProgressBarEnabled; boolean mProgressBarVisibilityChanged; boolean mProgressIndeterminate; boolean mShouldProgressIndeterminate; boolean mShouldSetProgress; boolean mAnimateProgress; boolean mShowProgressBackground; ProgressSavedState(Parcelable superState) { super(superState); } private ProgressSavedState(Parcel in) { super(in); this.mCurrentProgress = in.readFloat(); this.mTargetProgress = in.readFloat(); this.mProgressBarEnabled = in.readInt() != 0; this.mSpinSpeed = in.readFloat(); this.mProgress = in.readInt(); this.mProgressWidth = in.readInt(); this.mProgressColor = in.readInt(); this.mProgressBackgroundColor = in.readInt(); this.mProgressBarVisibilityChanged = in.readInt() != 0; this.mProgressIndeterminate = in.readInt() != 0; this.mShouldProgressIndeterminate = in.readInt() != 0; this.mShouldSetProgress = in.readInt() != 0; this.mAnimateProgress = in.readInt() != 0; this.mShowProgressBackground = in.readInt() != 0; } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeFloat(this.mCurrentProgress); out.writeFloat(this.mTargetProgress); out.writeInt((mProgressBarEnabled ? 1 : 0)); out.writeFloat(this.mSpinSpeed); out.writeInt(this.mProgress); out.writeInt(this.mProgressWidth); out.writeInt(this.mProgressColor); out.writeInt(this.mProgressBackgroundColor); out.writeInt(this.mProgressBarVisibilityChanged ? 1 : 0); out.writeInt(this.mProgressIndeterminate ? 1 : 0); out.writeInt(this.mShouldProgressIndeterminate ? 1 : 0); out.writeInt(this.mShouldSetProgress ? 1 : 0); out.writeInt(this.mAnimateProgress ? 1 : 0); out.writeInt(this.mShowProgressBackground ? 1 : 0); } public static final Parcelable.Creator<ProgressSavedState> CREATOR = new Parcelable.Creator<ProgressSavedState>() { public ProgressSavedState createFromParcel(Parcel in) { return new ProgressSavedState(in); } public ProgressSavedState[] newArray(int size) { return new ProgressSavedState[size]; } }; } /* ===== API methods ===== */ @Override public void setImageDrawable(Drawable drawable) { if (mIcon != drawable) { mIcon = drawable; updateBackground(); } } @Override public void setImageResource(int resId) { Drawable drawable = getResources().getDrawable(resId); if (mIcon != drawable) { mIcon = drawable; updateBackground(); } } @Override public void setOnClickListener(final OnClickListener l) { super.setOnClickListener(l); mClickListener = l; View label = (View) getTag(R.id.fab_label); if (label != null) { label.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (mClickListener != null) { mClickListener.onClick(FloatingActionButton.this); } } }); } } /** * Sets the size of the <b>FloatingActionButton</b> and invalidates its layout. * * @param size size of the <b>FloatingActionButton</b>. Accepted values: SIZE_NORMAL, SIZE_MINI. */ public void setButtonSize(int size) { if (size != SIZE_NORMAL && size != SIZE_MINI) { throw new IllegalArgumentException("Use @FabSize constants only!"); } if (mFabSize != size) { mFabSize = size; updateBackground(); } } public int getButtonSize() { return mFabSize; } public void setColorNormal(int color) { if (mColorNormal != color) { mColorNormal = color; updateBackground(); } } public void setColorNormalResId(int colorResId) { setColorNormal(getResources().getColor(colorResId)); } public int getColorNormal() { return mColorNormal; } public void setColorPressed(int color) { if (color != mColorPressed) { mColorPressed = color; updateBackground(); } } public void setColorPressedResId(int colorResId) { setColorPressed(getResources().getColor(colorResId)); } public int getColorPressed() { return mColorPressed; } public void setColorRipple(int color) { if (color != mColorRipple) { mColorRipple = color; updateBackground(); } } public void setColorRippleResId(int colorResId) { setColorRipple(getResources().getColor(colorResId)); } public int getColorRipple() { return mColorRipple; } public void setShowShadow(boolean show) { if (mShowShadow != show) { mShowShadow = show; updateBackground(); } } public boolean hasShadow() { return !mUsingElevation && mShowShadow; } /** * Sets the shadow radius of the <b>FloatingActionButton</b> and invalidates its layout. * * @param dimenResId the resource identifier of the dimension */ public void setShadowRadius(int dimenResId) { int shadowRadius = getResources().getDimensionPixelSize(dimenResId); if (mShadowRadius != shadowRadius) { mShadowRadius = shadowRadius; requestLayout(); updateBackground(); } } /** * Sets the shadow radius of the <b>FloatingActionButton</b> and invalidates its layout. * <p> * Must be specified in density-independent (dp) pixels, which are then converted into actual * pixels (px). * * @param shadowRadiusDp shadow radius specified in density-independent (dp) pixels */ public void setShadowRadius(float shadowRadiusDp) { mShadowRadius = Util.dpToPx(getContext(), shadowRadiusDp); requestLayout(); updateBackground(); } public int getShadowRadius() { return mShadowRadius; } /** * Sets the shadow x offset of the <b>FloatingActionButton</b> and invalidates its layout. * * @param dimenResId the resource identifier of the dimension */ public void setShadowXOffset(int dimenResId) { int shadowXOffset = getResources().getDimensionPixelSize(dimenResId); if (mShadowXOffset != shadowXOffset) { mShadowXOffset = shadowXOffset; requestLayout(); updateBackground(); } } /** * Sets the shadow x offset of the <b>FloatingActionButton</b> and invalidates its layout. * <p> * Must be specified in density-independent (dp) pixels, which are then converted into actual * pixels (px). * * @param shadowXOffsetDp shadow radius specified in density-independent (dp) pixels */ public void setShadowXOffset(float shadowXOffsetDp) { mShadowXOffset = Util.dpToPx(getContext(), shadowXOffsetDp); requestLayout(); updateBackground(); } public int getShadowXOffset() { return mShadowXOffset; } /** * Sets the shadow y offset of the <b>FloatingActionButton</b> and invalidates its layout. * * @param dimenResId the resource identifier of the dimension */ public void setShadowYOffset(int dimenResId) { int shadowYOffset = getResources().getDimensionPixelSize(dimenResId); if (mShadowYOffset != shadowYOffset) { mShadowYOffset = shadowYOffset; requestLayout(); updateBackground(); } } /** * Sets the shadow y offset of the <b>FloatingActionButton</b> and invalidates its layout. * <p> * Must be specified in density-independent (dp) pixels, which are then converted into actual * pixels (px). * * @param shadowYOffsetDp shadow radius specified in density-independent (dp) pixels */ public void setShadowYOffset(float shadowYOffsetDp) { mShadowYOffset = Util.dpToPx(getContext(), shadowYOffsetDp); requestLayout(); updateBackground(); } public int getShadowYOffset() { return mShadowYOffset; } public void setShadowColorResource(int colorResId) { int shadowColor = getResources().getColor(colorResId); if (mShadowColor != shadowColor) { mShadowColor = shadowColor; updateBackground(); } } public void setShadowColor(int color) { if (mShadowColor != color) { mShadowColor = color; updateBackground(); } } public int getShadowColor() { return mShadowColor; } /** * Checks whether <b>FloatingActionButton</b> is hidden * * @return true if <b>FloatingActionButton</b> is hidden, false otherwise */ public boolean isHidden() { return getVisibility() == INVISIBLE; } /** * Makes the <b>FloatingActionButton</b> to appear and sets its visibility to {@link #VISIBLE} * * @param animate if true - plays "show animation" */ public void show(boolean animate) { if (isHidden()) { if (animate) { playShowAnimation(); } super.setVisibility(VISIBLE); } } /** * Makes the <b>FloatingActionButton</b> to disappear and sets its visibility to {@link #INVISIBLE} * * @param animate if true - plays "hide animation" */ public void hide(boolean animate) { if (!isHidden()) { if (animate) { playHideAnimation(); } super.setVisibility(INVISIBLE); } } public void toggle(boolean animate) { if (isHidden()) { show(animate); } else { hide(animate); } } public void setLabelText(String text) { mLabelText = text; TextView labelView = getLabelView(); if (labelView != null) { labelView.setText(text); } } public String getLabelText() { return mLabelText; } public void setShowAnimation(Animation showAnimation) { mShowAnimation = showAnimation; } public void setHideAnimation(Animation hideAnimation) { mHideAnimation = hideAnimation; } public void setLabelVisibility(int visibility) { Label labelView = getLabelView(); if (labelView != null) { labelView.setVisibility(visibility); labelView.setHandleVisibilityChanges(visibility == VISIBLE); } } public int getLabelVisibility() { TextView labelView = getLabelView(); if (labelView != null) { return labelView.getVisibility(); } return -1; } @Override public void setElevation(float elevation) { if (Util.hasLollipop() && elevation > 0) { super.setElevation(elevation); if (!isInEditMode()) { mUsingElevation = true; mShowShadow = false; } updateBackground(); } } /** * Sets the shadow color and radius to mimic the native elevation. * * <p><b>API 21+</b>: Sets the native elevation of this view, in pixels. Updates margins to * make the view hold its position in layout across different platform versions.</p> */ @TargetApi(Build.VERSION_CODES.LOLLIPOP) public void setElevationCompat(float elevation) { mShadowColor = 0x26000000; mShadowRadius = Math.round(elevation / 2); mShadowXOffset = 0; mShadowYOffset = Math.round(mFabSize == SIZE_NORMAL ? elevation : elevation / 2); if (Util.hasLollipop()) { super.setElevation(elevation); mUsingElevationCompat = true; mShowShadow = false; updateBackground(); ViewGroup.LayoutParams layoutParams = getLayoutParams(); if (layoutParams != null) { setLayoutParams(layoutParams); } } else { mShowShadow = true; updateBackground(); } } /** * <p>Change the indeterminate mode for the progress bar. In indeterminate * mode, the progress is ignored and the progress bar shows an infinite * animation instead.</p> * * @param indeterminate true to enable the indeterminate mode */ public synchronized void setIndeterminate(boolean indeterminate) { if (!indeterminate) { mCurrentProgress = 0.0f; } mProgressBarEnabled = indeterminate; mShouldUpdateButtonPosition = true; mProgressIndeterminate = indeterminate; mLastTimeAnimated = SystemClock.uptimeMillis(); setupProgressBounds(); saveButtonOriginalPosition(); updateBackground(); } public synchronized void setMax(int max) { mProgressMax = max; } public synchronized int getMax() { return mProgressMax; } public synchronized void setProgress(int progress, boolean animate) { if (mProgressIndeterminate) return; mProgress = progress; mAnimateProgress = animate; if (!mButtonPositionSaved) { mShouldSetProgress = true; return; } mProgressBarEnabled = true; mShouldUpdateButtonPosition = true; setupProgressBounds(); saveButtonOriginalPosition(); updateBackground(); if (progress < 0) { progress = 0; } else if (progress > mProgressMax) { progress = mProgressMax; } if (progress == mTargetProgress) { return; } mTargetProgress = mProgressMax > 0 ? (progress / (float) mProgressMax) * 360 : 0; mLastTimeAnimated = SystemClock.uptimeMillis(); if (!animate) { mCurrentProgress = mTargetProgress; } invalidate(); } public synchronized int getProgress() { return mProgressIndeterminate ? 0 : mProgress; } public synchronized void hideProgress() { mProgressBarEnabled = false; mShouldUpdateButtonPosition = true; updateBackground(); } public synchronized void setShowProgressBackground(boolean show) { mShowProgressBackground = show; } public synchronized boolean isProgressBackgroundShown() { return mShowProgressBackground; } @Override public void setEnabled(boolean enabled) { super.setEnabled(enabled); Label label = (Label) getTag(R.id.fab_label); if (label != null) { label.setEnabled(enabled); } } @Override public void setVisibility(int visibility) { super.setVisibility(visibility); Label label = (Label) getTag(R.id.fab_label); if (label != null) { label.setVisibility(visibility); } } }
package com.j256.ormlite.jdbc; import java.io.IOException; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Properties; import com.j256.ormlite.db.DatabaseType; import com.j256.ormlite.db.DatabaseTypeUtils; import com.j256.ormlite.logger.Logger; import com.j256.ormlite.logger.LoggerFactory; import com.j256.ormlite.misc.IOUtils; import com.j256.ormlite.support.BaseConnectionSource; import com.j256.ormlite.support.ConnectionSource; import com.j256.ormlite.support.DatabaseConnection; import com.j256.ormlite.support.DatabaseConnectionProxyFactory; /** * Implementation of the ConnectionSource interface that supports what is needed by ORMLite. This is not thread-safe nor * synchronized and under the covers uses a single database connection. For other dataSources, see the * {@link DataSourceConnectionSource} class. * * <p> * <b> NOTE: </b> If you are using the Spring type wiring in Java, {@link #initialize} should be called after all of the * set methods. In Spring XML, init-method="initialize" should be used. * </p> * * @author graywatson */ public class JdbcConnectionSource extends BaseConnectionSource implements ConnectionSource { private static Logger logger = LoggerFactory.getLogger(JdbcConnectionSource.class); private String url; private String username; private String password; protected DatabaseConnection connection; protected DatabaseType databaseType; protected boolean initialized = false; private static DatabaseConnectionProxyFactory connectionProxyFactory; /** * Constructor for Spring type wiring if you are using the set methods. If you are using Spring then your should * use: init-method="initialize" */ public JdbcConnectionSource() { // for spring type wiring } /** * Create a data source for a particular database URL. * * @param url * The database URL which should start jdbc:... * @throws SQLException * If the driver associated with the database driver is not found in the classpath. */ public JdbcConnectionSource(String url) throws SQLException { this(url, null, null, null); } /** * Create a data source for a particular database URL. The databaseType is usually determined from the databaseUrl * so most users should call {@link #JdbcConnectionSource(String)} instead. If, however, you need to force the class * to use a specific DatabaseType then this constructor should be used. * * @param url * The database URL which should start jdbc:... * @param databaseType * Database to associate with this connection source. * @throws SQLException * If the driver associated with the database driver is not found in the classpath. */ public JdbcConnectionSource(String url, DatabaseType databaseType) throws SQLException { this(url, null, null, databaseType); } /** * Create a data source for a particular database URL with username and password permissions. * * @param url * The database URL which should start jdbc:... * @param username * Username for permissions on the database. * @param password * Password for permissions on the database. * @throws SQLException * If the driver associated with the database driver is not found in the classpath. */ public JdbcConnectionSource(String url, String username, String password) throws SQLException { this(url, username, password, null); } /** * Create a data source for a particular database URL with username and password permissions. The databaseType is * usually determined from the databaseUrl so most users should call * {@link #JdbcConnectionSource(String, String, String)} instead. If, however, you need to force the class to use a * specific DatabaseType then this constructor should be used. * * @param url * The database URL which should start jdbc:... * @param username * Username for permissions on the database. * @param password * Password for permissions on the database. * @param databaseType * Database to associate with this connection source. * @throws SQLException * If the driver associated with the database driver is not found in the classpath. */ public JdbcConnectionSource(String url, String username, String password, DatabaseType databaseType) throws SQLException { this.url = url; this.username = username; this.password = password; this.databaseType = databaseType; initialize(); } /** * Initialize the class after the setters have been called. If you are using the no-arg constructor and Spring type * wiring, this should be called after all of the set methods. * * @throws SQLException * If the driver associated with the database URL is not found in the classpath. */ public void initialize() throws SQLException { if (initialized) { return; } if (url == null) { throw new SQLException("url was never set on " + getClass().getSimpleName()); } if (databaseType == null) { databaseType = DatabaseTypeUtils.createDatabaseType(url); } databaseType.loadDriver(); databaseType.setDriver(DriverManager.getDriver(url)); initialized = true; } public void close() throws IOException { if (!initialized) { throw new IOException(getClass().getSimpleName() + " was not initialized properly"); } if (connection != null) { connection.close(); logger.debug("closed connection #{}", connection.hashCode()); connection = null; } } public void closeQuietly() { IOUtils.closeQuietly(this); } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public DatabaseConnection getReadOnlyConnection() throws SQLException { if (!initialized) { throw new SQLException(getClass().getSimpleName() + " was not initialized properly"); } return getReadWriteConnection(); } public DatabaseConnection getReadWriteConnection() throws SQLException { if (!initialized) { throw new SQLException(getClass().getSimpleName() + " was not initialized properly"); } if (connection != null) { if (connection.isClosed()) { throw new SQLException("Connection has already been closed"); } else { return connection; } } connection = makeConnection(logger); return connection; } public void releaseConnection(DatabaseConnection connection) throws SQLException { if (!initialized) { throw new SQLException(getClass().getSimpleName() + " was not initialized properly"); } // noop right now } @SuppressWarnings("unused") public boolean saveSpecialConnection(DatabaseConnection connection) throws SQLException { // noop since this is a single connection source return true; } public void clearSpecialConnection(DatabaseConnection connection) { // noop since this is a single connection source } public DatabaseType getDatabaseType() { if (!initialized) { throw new IllegalStateException(getClass().getSimpleName() + " was not initialized properly"); } return databaseType; } public boolean isOpen() { return connection != null; } public boolean isSingleConnection() { return true; } // not required public void setUsername(String username) { this.username = username; } // not required public void setPassword(String password) { this.password = password; } // not required public void setDatabaseType(DatabaseType databaseType) { this.databaseType = databaseType; } /** * Set to enable connection proxying. Set to null to disable. */ public static void setDatabaseConnectionProxyFactory(DatabaseConnectionProxyFactory connectionProxyFactory) { JdbcConnectionSource.connectionProxyFactory = connectionProxyFactory; } /** * Make a connection to the database. * * @param logger * This is here so we can use the right logger associated with the sub-class. */ @SuppressWarnings("resource") protected DatabaseConnection makeConnection(Logger logger) throws SQLException { Properties properties = new Properties(); if (username != null) { properties.setProperty("user", username); } if (password != null) { properties.setProperty("password", password); } DatabaseConnection connection = new JdbcDatabaseConnection(DriverManager.getConnection(url, properties)); // by default auto-commit is set to true connection.setAutoCommit(true); if (connectionProxyFactory != null) { connection = connectionProxyFactory.createProxy(connection); } logger.debug("opened connection to {} got #{}", url, connection.hashCode()); return connection; } }
/* * JBoss, Home of Professional Open Source * Copyright 2012, Red Hat, Inc. and/or its affiliates, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hibernate.validator.test.internal.metadata.provider; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.Target; import java.util.List; import java.util.Map; import javax.validation.Constraint; import javax.validation.ConstraintDeclarationException; import javax.validation.Payload; import javax.validation.Valid; import javax.validation.constraints.NotNull; import javax.validation.groups.ConvertGroup; import javax.validation.groups.Default; import javax.validation.metadata.ConstraintDescriptor; import org.joda.time.DateMidnight; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import org.hibernate.validator.constraints.ScriptAssert; import org.hibernate.validator.internal.engine.DefaultParameterNameProvider; import org.hibernate.validator.internal.engine.valuehandling.UnwrapMode; import org.hibernate.validator.internal.metadata.core.AnnotationProcessingOptionsImpl; import org.hibernate.validator.internal.metadata.core.ConstraintHelper; import org.hibernate.validator.internal.metadata.core.MetaConstraint; import org.hibernate.validator.internal.metadata.location.ConstraintLocation; import org.hibernate.validator.internal.metadata.provider.AnnotationMetaDataProvider; import org.hibernate.validator.internal.metadata.raw.BeanConfiguration; import org.hibernate.validator.internal.metadata.raw.ConfigurationSource; import org.hibernate.validator.internal.metadata.raw.ConstrainedElement.ConstrainedElementKind; import org.hibernate.validator.internal.metadata.raw.ConstrainedExecutable; import org.hibernate.validator.internal.metadata.raw.ConstrainedField; import org.hibernate.validator.internal.metadata.raw.ConstrainedType; import org.hibernate.validator.testutil.TestForIssue; import org.hibernate.validator.valuehandling.UnwrapValidatedValue; import static java.lang.annotation.ElementType.TYPE; import static java.lang.annotation.RetentionPolicy.RUNTIME; import static org.fest.assertions.Assertions.assertThat; import static org.hibernate.validator.internal.util.CollectionHelper.newHashMap; import static org.testng.Assert.assertEquals; /** * Unit test for {@link AnnotationMetaDataProvider}. * * @author Gunnar Morling */ public class AnnotationMetaDataProviderTest extends AnnotationMetaDataProviderTestBase { private AnnotationMetaDataProvider provider; @BeforeMethod public void setUpProvider() { provider = new AnnotationMetaDataProvider( new ConstraintHelper(), new DefaultParameterNameProvider(), new AnnotationProcessingOptionsImpl() ); } @Test public void testGetConstructorMetaData() throws Exception { List<BeanConfiguration<? super Foo>> beanConfigurations = provider.getBeanConfigurationForHierarchy( Foo.class ); assertThat( beanConfigurations ).hasSize( 2 ); ConstrainedExecutable constructor = findConstrainedConstructor( beanConfigurations, Foo.class, String.class ); assertThat( constructor.getKind() ).isEqualTo( ConstrainedElementKind.CONSTRUCTOR ); assertThat( constructor.isConstrained() ).isTrue(); assertThat( constructor.isCascading() ).isFalse(); assertThat( constructor.getConstraints() ).hasSize( 1 ); MetaConstraint<?> constraint = constructor.getConstraints().iterator().next(); assertThat( constraint.getDescriptor().getAnnotation().annotationType() ).isEqualTo( NotNull.class ); assertThat( constraint.getElementType() ).isEqualTo( ElementType.CONSTRUCTOR ); } @Test public void testGetCrossParameterMetaData() throws Exception { //when List<BeanConfiguration<? super Calendar>> beanConfigurations = provider.getBeanConfigurationForHierarchy( Calendar.class ); ConstrainedExecutable createEvent = findConstrainedMethod( beanConfigurations, Calendar.class, "createEvent", DateMidnight.class, DateMidnight.class ); //then assertThat( createEvent.isConstrained() ).isTrue(); assertThat( createEvent.isCascading() ).isFalse(); assertThat( createEvent.getKind() ).isEqualTo( ConstrainedElementKind.METHOD ); assertThat( createEvent.getConstraints() ).as( "No return value constraints expected" ).isEmpty(); assertThat( createEvent.getCrossParameterConstraints() ).hasSize( 1 ); ConstraintLocation location = createEvent.getLocation(); assertThat( location.getMember() ).isEqualTo( Calendar.class.getMethod( "createEvent", DateMidnight.class, DateMidnight.class ) ); MetaConstraint<?> constraint = createEvent.getCrossParameterConstraints().iterator().next(); assertThat( constraint.getDescriptor() .getAnnotation() .annotationType() ).isEqualTo( ConsistentDateParameters.class ); assertThat( constraint.getLocation().getTypeForValidatorResolution() ).isEqualTo( Object[].class ); } @Test public void configurationsHaveAnnotationSource() { for ( BeanConfiguration<? super User> configuration : provider.getBeanConfigurationForHierarchy( User.class ) ) { assertThat( configuration.getSource() ).isEqualTo( ConfigurationSource.ANNOTATION ); } } @Test public void noGroupConversionOnField() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedField field = findConstrainedField( beanConfigurations, User.class, "mail" ); //then assertThat( field.getGroupConversions() ).isEmpty(); } @Test public void singleGroupConversionOnField() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedField field = findConstrainedField( beanConfigurations, User.class, "phone" ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicNumber.class ); assertThat( field.getGroupConversions() ).isEqualTo( expected ); } @Test public void multipleGroupConversionsOnField() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedField field = findConstrainedField( beanConfigurations, User.class, "address" ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicPostal.class ); expected.put( Complete.class, FullPostal.class ); assertThat( field.getGroupConversions() ).isEqualTo( expected ); } @Test(expectedExceptions = ConstraintDeclarationException.class, expectedExceptionsMessageRegExp = "HV000124.*") public void multipleGroupConversionsOnFieldWithSameFromCauseException() { provider.getBeanConfigurationForHierarchy( User2.class ); } @Test public void noGroupConversionOnMethod() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable method = findConstrainedMethod( beanConfigurations, User.class, "getMail1" ); //then assertThat( method.getGroupConversions() ).isEmpty(); } @Test public void singleGroupConversionOnMethod() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable method = findConstrainedMethod( beanConfigurations, User.class, "getPhone1" ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicNumber.class ); assertThat( method.getGroupConversions() ).isEqualTo( expected ); } @Test public void multipleGroupConversionsOnMethod() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable method = findConstrainedMethod( beanConfigurations, User.class, "getAddress1" ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicPostal.class ); expected.put( Complete.class, FullPostal.class ); assertThat( method.getGroupConversions() ).isEqualTo( expected ); } @Test public void noGroupConversionOnParameter() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable method = findConstrainedMethod( beanConfigurations, User.class, "setMail1", String.class ); //then assertThat( method.getParameterMetaData( 0 ).getGroupConversions() ).isEmpty(); } @Test public void singleGroupConversionOnParameter() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable method = findConstrainedMethod( beanConfigurations, User.class, "setPhone1", PhoneNumber.class ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicNumber.class ); assertThat( method.getParameterMetaData( 0 ).getGroupConversions() ).isEqualTo( expected ); } @Test public void multipleGroupConversionsOnParameter() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable method = findConstrainedMethod( beanConfigurations, User.class, "setAddress1", Address.class ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicPostal.class ); expected.put( Complete.class, FullPostal.class ); assertThat( method.getParameterMetaData( 0 ).getGroupConversions() ).isEqualTo( expected ); } @Test(expectedExceptions = ConstraintDeclarationException.class, expectedExceptionsMessageRegExp = "HV000124.*") public void multipleGroupConversionsOnParameterWithSameFromCauseException() { provider.getBeanConfigurationForHierarchy( User4.class ); } @Test public void singleGroupConversionOnConstructor() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable constructor = findConstrainedConstructor( beanConfigurations, User.class ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicNumber.class ); assertThat( constructor.getGroupConversions() ).isEqualTo( expected ); } @Test public void multipleGroupConversionsOnConstructorParameter() throws Exception { //when List<BeanConfiguration<? super User>> beanConfigurations = provider.getBeanConfigurationForHierarchy( User.class ); ConstrainedExecutable constructor = findConstrainedConstructor( beanConfigurations, User.class, Address.class ); //then Map<Class<?>, Class<?>> expected = newHashMap(); expected.put( Default.class, BasicPostal.class ); expected.put( Complete.class, FullPostal.class ); assertThat( constructor.getParameterMetaData( 0 ).getGroupConversions() ).isEqualTo( expected ); } @Test @TestForIssue(jiraKey = "HV-626") public void onlyLocallyDefinedConstraintsAreConsidered() { List<BeanConfiguration<? super Person>> beanConfigurations = provider.getBeanConfigurationForHierarchy( Person.class ); ConstrainedType personType = findConstrainedType( beanConfigurations, Person.class ); assertThat( personType.getConstraints() ).hasSize( 1 ); ConstraintDescriptor<?> constraintInSubType = personType.getConstraints() .iterator() .next() .getDescriptor(); assertThat( constraintInSubType.getAnnotation().annotationType() ).isEqualTo( ScriptAssert.class ); ConstrainedType personBaseType = findConstrainedType( beanConfigurations, PersonBase.class ); assertThat( personBaseType.getConstraints() ).hasSize( 1 ); ConstraintDescriptor<?> constraintInSuperType = personBaseType.getConstraints() .iterator() .next() .getDescriptor(); assertThat( constraintInSuperType.getAnnotation().annotationType() ).isEqualTo( ClassLevelConstraint.class ); } @Test(expectedExceptions = ConstraintDeclarationException.class, expectedExceptionsMessageRegExp = "HV000124.*") public void groupConversionWithSameFromInSingleAndListAnnotationCauseException() { provider.getBeanConfigurationForHierarchy( User3.class ); } @Test @TestForIssue(jiraKey = "HV-819") public void unwrapValidatedValueOnField() throws Exception { List<BeanConfiguration<? super GolfPlayer>> beanConfigurations = provider.getBeanConfigurationForHierarchy( GolfPlayer.class ); ConstrainedField constrainedField = findConstrainedField( beanConfigurations, GolfPlayer.class, "name" ); assertEquals( constrainedField.unwrapMode(), UnwrapMode.UNWRAP ); } @Test @TestForIssue(jiraKey = "HV-925") public void testAutomaticUnwrapValidatedValueOnField() throws Exception { List<BeanConfiguration<? super GolfPlayer>> beanConfigurations = provider.getBeanConfigurationForHierarchy( GolfPlayer.class ); ConstrainedField constrainedField = findConstrainedField( beanConfigurations, GolfPlayer.class, "nickname" ); assertEquals( constrainedField.unwrapMode(), UnwrapMode.AUTOMATIC ); } @Test @TestForIssue(jiraKey = "HV-819") public void unwrapValidatedValueOnProperty() throws Exception { List<BeanConfiguration<? super GolfPlayer>> beanConfigurations = provider.getBeanConfigurationForHierarchy( GolfPlayer.class ); ConstrainedExecutable constrainedMethod = findConstrainedMethod( beanConfigurations, GolfPlayer.class, "getHandicap" ); assertEquals( constrainedMethod.unwrapMode(), UnwrapMode.UNWRAP ); } @Test @TestForIssue(jiraKey = "HV-925") public void testSkipUnwrapValidatedValueOnProperty() throws Exception { List<BeanConfiguration<? super GolfPlayer>> beanConfigurations = provider.getBeanConfigurationForHierarchy( GolfPlayer.class ); ConstrainedExecutable constrainedMethod = findConstrainedMethod( beanConfigurations, GolfPlayer.class, "getScore" ); assertEquals( constrainedMethod.unwrapMode(), UnwrapMode.SKIP_UNWRAP ); } @Test @TestForIssue(jiraKey = "HV-819") public void unwrapValidatedValueOnMethod() throws Exception { List<BeanConfiguration<? super GolfPlayer>> beanConfigurations = provider.getBeanConfigurationForHierarchy( GolfPlayer.class ); ConstrainedExecutable constrainedMethod = findConstrainedMethod( beanConfigurations, GolfPlayer.class, "enterTournament" ); assertEquals( constrainedMethod.unwrapMode(), UnwrapMode.UNWRAP ); } @Test @TestForIssue(jiraKey = "HV-819") public void unwrapValidatedValueOnConstructor() throws Exception { @SuppressWarnings("rawtypes") List<BeanConfiguration<? super Wrapper>> beanConfigurations = provider.getBeanConfigurationForHierarchy( Wrapper.class ); ConstrainedExecutable constrainedConstructor = findConstrainedConstructor( beanConfigurations, Wrapper.class, Object.class ); assertEquals( constrainedConstructor.unwrapMode(), UnwrapMode.UNWRAP ); } @Test @TestForIssue(jiraKey = "HV-819") public void unwrapValidatedValueOnParameter() throws Exception { List<BeanConfiguration<? super GolfPlayer>> beanConfigurations = provider.getBeanConfigurationForHierarchy( GolfPlayer.class ); ConstrainedExecutable constrainedMethod = findConstrainedMethod( beanConfigurations, GolfPlayer.class, "practice", Wrapper.class ); assertEquals( constrainedMethod.getParameterMetaData( 0 ).unwrapMode(), UnwrapMode.UNWRAP ); } private static class Foo { @NotNull public Foo(@NotNull String foo) { } } private static class Calendar { @ConsistentDateParameters public void createEvent(DateMidnight start, DateMidnight end) { } } public interface Complete extends Default { } public interface BasicPostal { } public interface FullPostal extends BasicPostal { } private interface BasicNumber { } private static class Address { } private static class PhoneNumber { } @SuppressWarnings("unused") private static class User { private final String mail = null; @Valid @ConvertGroup(from = Default.class, to = BasicNumber.class) private final PhoneNumber phone = null; @Valid @ConvertGroup.List({ @ConvertGroup(from = Default.class, to = BasicPostal.class), @ConvertGroup(from = Complete.class, to = FullPostal.class) }) private final Address address = null; @Valid @ConvertGroup(from = Default.class, to = BasicNumber.class) public User() { } public User( @Valid @ConvertGroup.List({ @ConvertGroup(from = Default.class, to = BasicPostal.class), @ConvertGroup(from = Complete.class, to = FullPostal.class) }) Address address) { } public String getMail1() { return null; } public void setMail1(String mail) { } @Valid @ConvertGroup(from = Default.class, to = BasicNumber.class) public PhoneNumber getPhone1() { return null; } public void setPhone1(@Valid @ConvertGroup(from = Default.class, to = BasicNumber.class) PhoneNumber phone) { } @Valid @ConvertGroup.List({ @ConvertGroup(from = Default.class, to = BasicPostal.class), @ConvertGroup(from = Complete.class, to = FullPostal.class) }) public Address getAddress1() { return null; } public void setAddress1( @Valid @ConvertGroup.List({ @ConvertGroup(from = Default.class, to = BasicPostal.class), @ConvertGroup(from = Complete.class, to = FullPostal.class) }) Address address) { } } private static class User2 { @Valid @ConvertGroup.List({ @ConvertGroup(from = Default.class, to = BasicPostal.class), @ConvertGroup(from = Default.class, to = FullPostal.class) }) private final Address address = null; } private static class User3 { @Valid @ConvertGroup(from = Default.class, to = BasicPostal.class) @ConvertGroup.List(@ConvertGroup(from = Default.class, to = FullPostal.class)) private final Address address = null; } private static class User4 { @SuppressWarnings("unused") public void setAddress( @Valid @ConvertGroup.List({ @ConvertGroup(from = Default.class, to = BasicPostal.class), @ConvertGroup(from = Default.class, to = FullPostal.class) }) Address address) { } } @ClassLevelConstraint("some script") private static class PersonBase { } @ScriptAssert(lang = "javascript", script = "some script") private static class Person extends PersonBase { } @Target({ TYPE }) @Retention(RUNTIME) @Constraint(validatedBy = { }) @Documented @Inherited public @interface ClassLevelConstraint { String message() default "{ClassLevelConstraint.message}"; Class<?>[] groups() default { }; Class<? extends Payload>[] payload() default { }; String value(); } private static class GolfPlayer { private Wrapper<String> nickname; @UnwrapValidatedValue private Wrapper<String> name; @UnwrapValidatedValue public Wrapper<Double> getHandicap() { return null; } @UnwrapValidatedValue(false) public Wrapper<Double> getScore() { return null; } @UnwrapValidatedValue public Wrapper<Boolean> enterTournament() { return null; } @SuppressWarnings("unused") public void practice(@UnwrapValidatedValue Wrapper<Integer> numberOfBalls) { } } private static class Wrapper<T> { @SuppressWarnings("unused") public T value; @UnwrapValidatedValue public Wrapper(T value) { this.value = value; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.filecache; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Random; import javax.security.auth.login.LoginException; import junit.framework.TestCase; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.TaskDistributedCacheManager.CacheFile; import org.apache.hadoop.mapred.DefaultTaskController; import org.apache.hadoop.mapred.JobID; import org.apache.hadoop.mapred.JobLocalizer; import org.apache.hadoop.mapred.TaskController; import org.apache.hadoop.mapred.TaskTracker; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RawLocalFileSystem; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.filecache.TaskDistributedCacheManager; import org.apache.hadoop.filecache.TrackerDistributedCacheManager; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; public class TestTrackerDistributedCacheManager extends TestCase { private static final Log LOG = LogFactory.getLog(TestTrackerDistributedCacheManager.class); protected String TEST_ROOT_DIR = new File(System.getProperty("test.build.data", "/tmp"), TestTrackerDistributedCacheManager.class.getSimpleName()) .getAbsolutePath(); protected File ROOT_MAPRED_LOCAL_DIR; protected int numLocalDirs = 6; private static final int TEST_FILE_SIZE = 4 * 1024; // 4K private static final int LOCAL_CACHE_LIMIT = 5 * 1024; //5K private static final int LOCAL_CACHE_SUBDIR_LIMIT = 1; protected Configuration conf; protected Path firstCacheFile; protected Path firstCacheFilePublic; protected Path secondCacheFile; protected Path secondCacheFilePublic; private FileSystem fs; protected LocalDirAllocator localDirAllocator = new LocalDirAllocator("mapred.local.dir"); protected TaskController taskController; @Override protected void setUp() throws IOException,InterruptedException { // Prepare the tests' root dir File TEST_ROOT = new File(TEST_ROOT_DIR); if (!TEST_ROOT.exists()) { TEST_ROOT.mkdirs(); } // Prepare the tests' mapred-local-dir ROOT_MAPRED_LOCAL_DIR = new File(TEST_ROOT_DIR, "mapred/local"); ROOT_MAPRED_LOCAL_DIR.mkdirs(); String []localDirs = new String[numLocalDirs]; for (int i = 0; i < numLocalDirs; i++) { File localDir = new File(ROOT_MAPRED_LOCAL_DIR, "0_" + i); localDirs[i] = localDir.getPath(); localDir.mkdir(); } conf = new Configuration(); conf.setStrings("mapred.local.dir", localDirs); conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///"); fs = FileSystem.get(conf); Class<? extends TaskController> taskControllerClass = conf.getClass( "mapred.task.tracker.task-controller", DefaultTaskController.class, TaskController.class); taskController = (TaskController) ReflectionUtils.newInstance( taskControllerClass, conf); // setup permissions for mapred local dir taskController.setup(localDirAllocator); // Create the temporary cache files to be used in the tests. firstCacheFile = new Path(TEST_ROOT_DIR, "firstcachefile"); secondCacheFile = new Path(TEST_ROOT_DIR, "secondcachefile"); firstCacheFilePublic = new Path(TEST_ROOT_DIR, "firstcachefileOne"); secondCacheFilePublic = new Path(TEST_ROOT_DIR, "secondcachefileOne"); createPublicTempFile(firstCacheFilePublic); createPublicTempFile(secondCacheFilePublic); createPrivateTempFile(firstCacheFile); createPrivateTempFile(secondCacheFile); } protected void refreshConf(Configuration conf) throws IOException { taskController.setConf(conf); taskController.setup(localDirAllocator); } /** * Whether the test can run on the machine * * @return true if test can run on the machine, false otherwise */ protected boolean canRun() { return true; } /** * This is the typical flow for using the DistributedCache classes. * * @throws IOException * @throws LoginException */ public void testManagerFlow() throws IOException, LoginException { if (!canRun()) { return; } // ****** Imitate JobClient code // Configures a task/job with both a regular file and a "classpath" file. Configuration subConf = new Configuration(conf); String userName = getJobOwnerName(); subConf.set("user.name", userName); JobID jobid = new JobID("jt",1); DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf); DistributedCache.addFileToClassPath(secondCacheFile, subConf, FileSystem.get(subConf)); TrackerDistributedCacheManager.determineTimestamps(subConf); TrackerDistributedCacheManager.determineCacheVisibilities(subConf); // ****** End of imitating JobClient code Path jobFile = new Path(TEST_ROOT_DIR, "job.xml"); FileOutputStream os = new FileOutputStream(new File(jobFile.toString())); subConf.writeXml(os); os.close(); // ****** Imitate TaskRunner code. TrackerDistributedCacheManager manager = new TrackerDistributedCacheManager(conf, taskController); TaskDistributedCacheManager handle = manager.newTaskDistributedCacheManager(jobid, subConf); assertNull(null, DistributedCache.getLocalCacheFiles(subConf)); File workDir = new File(new Path(TEST_ROOT_DIR, "workdir").toString()); handle.setupCache(subConf, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); JobLocalizer.downloadPrivateCache(subConf); // DOESN'T ACTUALLY HAPPEN IN THE TaskRunner (THIS IS A TODO) // handle.setupPrivateCache(localDirAllocator, TaskTracker // .getPrivateDistributedCacheDir(userName)); // // ****** End of imitating TaskRunner code Path[] localCacheFiles = DistributedCache.getLocalCacheFiles(subConf); assertNotNull(null, localCacheFiles); assertEquals(2, localCacheFiles.length); Path cachedFirstFile = localCacheFiles[0]; Path cachedSecondFile = localCacheFiles[1]; assertFileLengthEquals(firstCacheFile, cachedFirstFile); assertFalse("Paths should be different.", firstCacheFile.equals(cachedFirstFile)); assertEquals(1, handle.getClassPaths().size()); assertEquals(cachedSecondFile.toString(), handle.getClassPaths().get(0)); checkFilePermissions(localCacheFiles); // Cleanup handle.release(); manager.purgeCache(); assertFalse(pathToFile(cachedFirstFile).exists()); } /** * This DistributedCacheManager fails in localizing firstCacheFile. */ public class FakeTrackerDistributedCacheManager extends TrackerDistributedCacheManager { public FakeTrackerDistributedCacheManager(Configuration conf) throws IOException { super(conf, taskController); } @Override Path localizePublicCacheObject(Configuration conf, URI cache, long confFileStamp, CacheStatus cacheStatus, FileStatus fileStatus, boolean isArchive) throws IOException { if (cache.equals(firstCacheFilePublic.toUri())) { throw new IOException("fake fail"); } return super.localizePublicCacheObject(conf, cache, confFileStamp, cacheStatus, fileStatus, isArchive); } } public void testReferenceCount() throws IOException, LoginException, URISyntaxException, InterruptedException { if (!canRun()) { return; } TrackerDistributedCacheManager manager = new FakeTrackerDistributedCacheManager(conf); String userName = getJobOwnerName(); File workDir = new File(new Path(TEST_ROOT_DIR, "workdir").toString()); // Configures a job with a regular file Job job1 = new Job(conf); Configuration conf1 = job1.getConfiguration(); conf1.set("user.name", userName); DistributedCache.addCacheFile(secondCacheFile.toUri(), conf1); TrackerDistributedCacheManager.determineTimestamps(conf1); TrackerDistributedCacheManager.determineCacheVisibilities(conf1); // Task localizing for first job TaskDistributedCacheManager handle = manager .newTaskDistributedCacheManager(new JobID("jt", 1), conf1); handle.setupCache(conf1, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); JobLocalizer.downloadPrivateCache(conf1); handle.release(); for (TaskDistributedCacheManager.CacheFile c : handle.getCacheFiles()) { assertEquals(0, manager.getReferenceCount(c.getStatus())); } Path thirdCacheFile = new Path(TEST_ROOT_DIR, "thirdcachefile"); createPrivateTempFile(thirdCacheFile); // Configures another job with three regular files. Job job2 = new Job(conf); Configuration conf2 = job2.getConfiguration(); conf2.set("user.name", userName); // add a file that would get failed to localize DistributedCache.addCacheFile(firstCacheFilePublic.toUri(), conf2); // add a file that is already localized by different job DistributedCache.addCacheFile(secondCacheFile.toUri(), conf2); // add a file that is never localized DistributedCache.addCacheFile(thirdCacheFile.toUri(), conf2); TrackerDistributedCacheManager.determineTimestamps(conf2); TrackerDistributedCacheManager.determineCacheVisibilities(conf2); // Task localizing for second job // localization for the "firstCacheFile" will fail. handle = manager.newTaskDistributedCacheManager(new JobID("jt", 2), conf2); Throwable th = null; try { handle.setupCache(conf2, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); JobLocalizer.downloadPrivateCache(conf2); } catch (IOException e) { th = e; LOG.info("Exception during setup", e); } assertNotNull(th); assertTrue(th.getMessage().contains("fake fail")); handle.release(); th = null; for (TaskDistributedCacheManager.CacheFile c : handle.getCacheFiles()) { try { int refcount = manager.getReferenceCount(c.getStatus()); LOG.info("checking refcount " + c.uri + " of " + refcount); assertEquals(0, refcount); } catch (NullPointerException ie) { th = ie; LOG.info("Exception getting reference count for " + c.uri, ie); } } assertNotNull(th); fs.delete(thirdCacheFile, false); } /** * Tests that localization of distributed cache file happens in the desired * directory * @throws IOException * @throws LoginException */ public void testPublicPrivateCache() throws IOException, LoginException, InterruptedException { if (!canRun()) { return; } checkLocalizedPath(true); checkLocalizedPath(false); } private void appendStringArray(StringBuilder buffer, String[] data) { if (data != null && data.length != 0) { buffer.append(data[0]); for(int i=1; i < data.length; i++) { buffer.append(','); buffer.append(data[i]); } } } private void appendBooleanArray(StringBuilder buffer, boolean[] data) { if (data != null && data.length != 0) { buffer.append(data[0]); for(int i=1; i < data.length; i++) { buffer.append(','); buffer.append(data[i]); } } } private void appendLongArray(StringBuilder buffer, long[] data) { if (data != null && data.length != 0) { buffer.append(data[0]); for(int i=1; i < data.length; i++) { buffer.append(','); buffer.append(data[i]); } } } private void appendUriArray(StringBuilder buffer, URI[] data) { if (data != null && data.length != 0) { buffer.append(data[0]); for(int i=1; i < data.length; i++) { buffer.append(','); buffer.append(data[i]); } } } private void dumpState(Configuration conf1) throws IOException { StringBuilder buf = new StringBuilder(); buf.append("\nFiles:"); appendUriArray(buf, DistributedCache.getCacheFiles(conf1)); buf.append("\nArchives:"); appendUriArray(buf, DistributedCache.getCacheArchives(conf1)); buf.append("\nFile Visible:"); appendBooleanArray(buf, TrackerDistributedCacheManager.getFileVisibilities (conf1)); buf.append("\nArchive Visible:"); appendBooleanArray(buf, TrackerDistributedCacheManager.getArchiveVisibilities (conf1)); buf.append("\nFile timestamps:"); appendLongArray(buf, DistributedCache.getFileTimestamps(conf1)); buf.append("\nArchive timestamps:"); appendLongArray(buf, DistributedCache.getArchiveTimestamps(conf1)); LOG.info("state = " + buf.toString()); } private void checkLocalizedPath(boolean visibility) throws IOException, LoginException, InterruptedException { TrackerDistributedCacheManager manager = new TrackerDistributedCacheManager(conf, taskController); String userName = getJobOwnerName(); File workDir = new File(TEST_ROOT_DIR, "workdir"); Path cacheFile = new Path(TEST_ROOT_DIR, "fourthcachefile"); if (visibility) { createPublicTempFile(cacheFile); } else { createPrivateTempFile(cacheFile); } Configuration conf1 = new Configuration(conf); conf1.set("user.name", userName); DistributedCache.addCacheFile(cacheFile.toUri(), conf1); TrackerDistributedCacheManager.determineTimestamps(conf1); TrackerDistributedCacheManager.determineCacheVisibilities(conf1); dumpState(conf1); // Task localizing for job TaskDistributedCacheManager handle = manager .newTaskDistributedCacheManager(new JobID("jt", 1), conf1); handle.setupCache(conf1, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); JobLocalizer.downloadPrivateCache(conf1); TaskDistributedCacheManager.CacheFile c = handle.getCacheFiles().get(0); String distCacheDir; if (visibility) { distCacheDir = TaskTracker.getPublicDistributedCacheDir(); } else { distCacheDir = TaskTracker.getPrivateDistributedCacheDir(userName); } Path localizedPath = manager.getLocalCache(cacheFile.toUri(), conf1, distCacheDir, fs.getFileStatus(cacheFile), false, c.timestamp, visibility, c); assertTrue("Cache file didn't get localized in the expected directory. " + "Expected localization to happen within " + ROOT_MAPRED_LOCAL_DIR + "/" + distCacheDir + ", but was localized at " + localizedPath, localizedPath.toString().contains(distCacheDir)); if (visibility) { checkPublicFilePermissions(new Path[]{localizedPath}); } else { checkFilePermissions(new Path[]{localizedPath}); } } /** * Check proper permissions on the cache files * * @param localCacheFiles * @throws IOException */ protected void checkFilePermissions(Path[] localCacheFiles) throws IOException { // All the files should have executable permissions on them. for (Path p : localCacheFiles) { assertTrue("Cache file is not executable!", new File(p .toUri().getPath()).canExecute()); } } /** * Check permissions on the public cache files * * @param localCacheFiles * @throws IOException */ private void checkPublicFilePermissions(Path[] localCacheFiles) throws IOException { checkPublicFilePermissions(fs, localCacheFiles); } /** * Verify the permissions for a file localized as a public distributed * cache file * @param fs The Local FileSystem used to get the permissions * @param localCacheFiles The list of files whose permissions should be * verified. * @throws IOException */ public static void checkPublicFilePermissions(FileSystem fs, Path[] localCacheFiles) throws IOException { // All the files should have read and executable permissions for others for (Path p : localCacheFiles) { FsPermission perm = fs.getFileStatus(p).getPermission(); assertTrue("cache file is not readable / executable by owner: perm=" + perm.getUserAction(), perm.getUserAction() .implies(FsAction.READ_EXECUTE)); assertTrue("cache file is not readable / executable by group: perm=" + perm.getGroupAction(), perm.getGroupAction() .implies(FsAction.READ_EXECUTE)); assertTrue("cache file is not readable / executable by others: perm=" + perm.getOtherAction(), perm.getOtherAction() .implies(FsAction.READ_EXECUTE)); } } /** * Verify the ownership for files localized as a public distributed cache * file. * @param fs The Local FileSystem used to get the ownership * @param localCacheFiles THe list of files whose ownership should be * verified * @param owner The owner of the files * @param group The group owner of the files. * @throws IOException */ public static void checkPublicFileOwnership(FileSystem fs, Path[] localCacheFiles, String owner, String group) throws IOException { for (Path p: localCacheFiles) { assertEquals(owner, fs.getFileStatus(p).getOwner()); assertEquals(group, fs.getFileStatus(p).getGroup()); } } protected String getJobOwnerName() throws IOException { return UserGroupInformation.getLoginUser().getUserName(); } /** test delete cache */ public void testDeleteCache() throws Exception { if (!canRun()) { return; } // This test needs mapred.local.dir to be single directory // instead of four, because it assumes that both // firstcachefile and secondcachefile will be localized on same directory // so that second localization triggers deleteCache. // If mapred.local.dir is four directories, second localization might not // trigger deleteCache, if it is localized in different directory. Configuration conf2 = new Configuration(conf); conf2.set("mapred.local.dir", ROOT_MAPRED_LOCAL_DIR.toString()); conf2.setLong("local.cache.size", LOCAL_CACHE_LIMIT); refreshConf(conf2); TrackerDistributedCacheManager manager = new TrackerDistributedCacheManager(conf2, taskController); FileSystem localfs = FileSystem.getLocal(conf2); long now = System.currentTimeMillis(); String userName = getJobOwnerName(); conf2.set("user.name", userName); // We first test the size limit FileStatus stat = fs.getFileStatus(firstCacheFilePublic); CacheFile cfile1 = new CacheFile(firstCacheFilePublic.toUri(), CacheFile.FileType.REGULAR, true, stat.getModificationTime(), true); Path firstLocalCache = manager.getLocalCache(firstCacheFilePublic.toUri(), conf2, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(firstCacheFilePublic), false, fs.getFileStatus(firstCacheFilePublic).getModificationTime(), true, cfile1); manager.releaseCache(cfile1.getStatus()); //in above code,localized a file of size 4K and then release the cache // which will cause the cache be deleted when the limit goes out. // The below code localize another cache which's designed to //sweep away the first cache. stat = fs.getFileStatus(secondCacheFilePublic); CacheFile cfile2 = new CacheFile(secondCacheFilePublic.toUri(), CacheFile.FileType.REGULAR, true, stat.getModificationTime(), true); assertTrue("DistributedCache currently doesn't have cached file", localfs.exists(firstLocalCache)); Path secondLocalCache = manager.getLocalCache(secondCacheFilePublic.toUri(), conf2, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(secondCacheFilePublic), false, fs.getFileStatus(secondCacheFilePublic).getModificationTime(), true, cfile2); assertFalse("DistributedCache failed deleting old" + " cache when the cache store is full.", localfs.exists(firstLocalCache)); // find the root directory of distributed caches Path firstCursor = firstLocalCache; Path secondCursor = secondLocalCache; while (!firstCursor.equals(secondCursor)) { // Debug code, to see what these things look like System.err.println("cursors: " + firstCursor); System.err.println(" and " + secondCursor); firstCursor = firstCursor.getParent(); secondCursor = secondCursor.getParent(); } System.err.println("The final cursor is " + firstCursor); System.err.println("That directory ends up with " + localfs.listStatus(firstCursor).length + " subdirectories"); Path cachesBase = firstCursor; assertFalse ("DistributedCache did not delete the gensym'ed distcache " + "directory names when it deleted the files they contained " + "because they collectively exceeded the size limit.", localfs.listStatus(cachesBase).length > 1); conf2.setLong("local.cache.size", LOCAL_CACHE_LIMIT * 10); conf2.setLong("mapreduce.tasktracker.local.cache.numberdirectories", LOCAL_CACHE_SUBDIR_LIMIT); manager = new TrackerDistributedCacheManager(conf2, taskController); // Now we test the number of sub directories limit // Create the temporary cache files to be used in the tests. Path thirdCacheFile = new Path(TEST_ROOT_DIR, "thirdcachefile"); Path fourthCacheFile = new Path(TEST_ROOT_DIR, "fourthcachefile"); // Adding two more small files, so it triggers the number of sub directory // limit but does not trigger the file size limit. createPrivateTempFile(thirdCacheFile); createPrivateTempFile(fourthCacheFile); DistributedCache.setCacheFiles(new URI[]{thirdCacheFile.toUri()}, conf2); TrackerDistributedCacheManager.determineCacheVisibilities(conf2); TrackerDistributedCacheManager.determineTimestamps(conf2); stat = fs.getFileStatus(thirdCacheFile); CacheFile cfile3 = new CacheFile(thirdCacheFile.toUri(), CacheFile.FileType.REGULAR, false, stat.getModificationTime(), true); Path thirdLocalCache = manager.getLocalCache(thirdCacheFile.toUri(), conf2, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(thirdCacheFile), false, fs.getFileStatus(thirdCacheFile).getModificationTime(), false, cfile3); DistributedCache.setLocalFiles(conf2, thirdLocalCache.toString()); JobLocalizer.downloadPrivateCache(conf2); // Release the third cache so that it can be deleted while sweeping manager.releaseCache(cfile3.getStatus()); // Getting the fourth cache will make the number of sub directories becomes // 3 which is greater than 2. So the released cache will be deleted. stat = fs.getFileStatus(fourthCacheFile); CacheFile cfile4 = new CacheFile(fourthCacheFile.toUri(), CacheFile.FileType.REGULAR, false, stat.getModificationTime(), true); assertTrue("DistributedCache currently doesn't have cached file", localfs.exists(thirdLocalCache)); DistributedCache.setCacheFiles(new URI[]{fourthCacheFile.toUri()}, conf2); DistributedCache.setLocalFiles(conf2, thirdCacheFile.toUri().toString()); TrackerDistributedCacheManager.determineCacheVisibilities(conf2); TrackerDistributedCacheManager.determineTimestamps(conf2); Path fourthLocalCache = manager.getLocalCache(fourthCacheFile.toUri(), conf2, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(fourthCacheFile), false, fs.getFileStatus(fourthCacheFile).getModificationTime(), false, cfile4); assertFalse("DistributedCache failed deleting old" + " cache when the cache exceeds the number of sub directories limit.", localfs.exists(thirdLocalCache)); assertFalse ("DistributedCache did not delete the gensym'ed distcache " + "directory names when it deleted the files they contained " + "because there were too many.", localfs.listStatus(cachesBase).length > LOCAL_CACHE_SUBDIR_LIMIT); // Clean up the files created in this test new File(thirdCacheFile.toString()).delete(); new File(fourthCacheFile.toString()).delete(); } public void testFileSystemOtherThanDefault() throws Exception { if (!canRun()) { return; } TrackerDistributedCacheManager manager = new TrackerDistributedCacheManager(conf, taskController); conf.set("fs.fakefile.impl", conf.get("fs.file.impl")); String userName = getJobOwnerName(); conf.set("user.name", userName); Path fileToCache = new Path("fakefile:///" + firstCacheFile.toUri().getPath()); CacheFile file = new CacheFile(fileToCache.toUri(), CacheFile.FileType.REGULAR, false, 0, false); Path result = manager.getLocalCache(fileToCache.toUri(), conf, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(firstCacheFile), false, System.currentTimeMillis(), false, file); assertNotNull("DistributedCache cached file on non-default filesystem.", result); } static void createTempFile(Path p) throws IOException { createTempFile(p, TEST_FILE_SIZE); } static void createTempFile(Path p, int size) throws IOException { File f = new File(p.toString()); FileOutputStream os = new FileOutputStream(f); byte[] toWrite = new byte[size]; new Random().nextBytes(toWrite); os.write(toWrite); os.close(); FileSystem.LOG.info("created: " + p + ", size=" + size); } static void createPublicTempFile(Path p) throws IOException, InterruptedException { createTempFile(p); FileUtil.chmod(p.toString(), "0777",true); } static void createPrivateTempFile(Path p) throws IOException, InterruptedException { createTempFile(p); FileUtil.chmod(p.toString(), "0770",true); } @Override protected void tearDown() throws IOException { new File(firstCacheFile.toString()).delete(); new File(secondCacheFile.toString()).delete(); new File(firstCacheFilePublic.toString()).delete(); new File(secondCacheFilePublic.toString()).delete(); FileUtil.fullyDelete(new File(TEST_ROOT_DIR)); } protected void assertFileLengthEquals(Path a, Path b) throws FileNotFoundException { assertEquals("File sizes mismatch.", pathToFile(a).length(), pathToFile(b).length()); } protected File pathToFile(Path p) { return new File(p.toString()); } public static class FakeFileSystem extends RawLocalFileSystem { private long increment = 0; public FakeFileSystem() { super(); } public FileStatus getFileStatus(Path p) throws IOException { File f = pathToFile(p); return new FileStatus(f.length(), f.isDirectory(), 1, 128, f.lastModified() + increment, makeQualified(new Path(f.getPath()))); } void advanceClock(long millis) { increment += millis; } } public void testFreshness() throws Exception { if (!canRun()) { return; } Configuration myConf = new Configuration(conf); myConf.set("fs.default.name", "refresh:///"); myConf.setClass("fs.refresh.impl", FakeFileSystem.class, FileSystem.class); String userName = getJobOwnerName(); TrackerDistributedCacheManager manager = new TrackerDistributedCacheManager(myConf, taskController); // ****** Imitate JobClient code // Configures a task/job with both a regular file and a "classpath" file. Configuration subConf = new Configuration(myConf); subConf.set("user.name", userName); DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf); TrackerDistributedCacheManager.determineTimestamps(subConf); TrackerDistributedCacheManager.determineCacheVisibilities(subConf); // ****** End of imitating JobClient code // ****** Imitate TaskRunner code. TaskDistributedCacheManager handle = manager.newTaskDistributedCacheManager(new JobID("jt", 1), subConf); assertNull(null, DistributedCache.getLocalCacheFiles(subConf)); File workDir = new File(new Path(TEST_ROOT_DIR, "workdir").toString()); handle.setupCache(subConf, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); //TODO this doesn't really happen in the TaskRunner // handle.setupPrivateCache(localDirAllocator, TaskTracker // .getPrivateDistributedCacheDir(userName)); // ****** End of imitating TaskRunner code Path[] localCacheFiles = DistributedCache.getLocalCacheFiles(subConf); assertNotNull(null, localCacheFiles); assertEquals(1, localCacheFiles.length); Path cachedFirstFile = localCacheFiles[0]; assertFileLengthEquals(firstCacheFile, cachedFirstFile); assertFalse("Paths should be different.", firstCacheFile.equals(cachedFirstFile)); // release handle.release(); // change the file timestamp FileSystem fs = FileSystem.get(myConf); ((FakeFileSystem)fs).advanceClock(1); // running a task of the same job Throwable th = null; try { handle.setupCache(subConf, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); // handle.setupPrivateCache(localDirAllocator, TaskTracker // .getPrivateDistributedCacheDir(userName)); } catch (IOException ie) { th = ie; } assertNotNull("Throwable is null", th); assertTrue("Exception message does not match", th.getMessage().contains("has changed on HDFS since job started")); // release handle.release(); // submit another job Configuration subConf2 = new Configuration(myConf); subConf2.set("user.name", userName); DistributedCache.addCacheFile(firstCacheFile.toUri(), subConf2); TrackerDistributedCacheManager.determineTimestamps(subConf2); TrackerDistributedCacheManager.determineCacheVisibilities(subConf2); handle = manager.newTaskDistributedCacheManager(new JobID("jt", 2), subConf2); handle.setupCache(subConf2, TaskTracker.getPublicDistributedCacheDir(), TaskTracker.getPrivateDistributedCacheDir(userName)); Path[] localCacheFiles2 = DistributedCache.getLocalCacheFiles(subConf2); assertNotNull(null, localCacheFiles2); assertEquals(1, localCacheFiles2.length); Path cachedFirstFile2 = localCacheFiles2[0]; assertFileLengthEquals(firstCacheFile, cachedFirstFile2); assertFalse("Paths should be different.", firstCacheFile.equals(cachedFirstFile2)); // assert that two localizations point to different paths assertFalse("two jobs with different timestamps did not localize" + " in different paths", cachedFirstFile.equals(cachedFirstFile2)); // release handle.release(); } /** * Localize a file. After localization is complete, create a file, "myFile", * under the directory where the file is localized and ensure that it has * permissions different from what is set by default. Then, localize another * file. Verify that "myFile" has the right permissions. * @throws Exception */ public void testCustomPermissions() throws Exception { if (!canRun()) { return; } String userName = getJobOwnerName(); conf.set("user.name", userName); TrackerDistributedCacheManager manager = new TrackerDistributedCacheManager(conf, taskController); FileSystem localfs = FileSystem.getLocal(conf); long now = System.currentTimeMillis(); Path[] localCache = new Path[2]; FileStatus stat = fs.getFileStatus(firstCacheFile); CacheFile file = new CacheFile(firstCacheFilePublic.toUri(), CacheFile.FileType.REGULAR, true, stat.getModificationTime(), false); localCache[0] = manager.getLocalCache(firstCacheFilePublic.toUri(), conf, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(firstCacheFilePublic), false, fs.getFileStatus(firstCacheFilePublic).getModificationTime(), true, file); FsPermission myPermission = new FsPermission((short)0600); Path myFile = new Path(localCache[0].getParent(), "myfile.txt"); if (FileSystem.create(localfs, myFile, myPermission) == null) { throw new IOException("Could not create " + myFile); } try { stat = fs.getFileStatus(secondCacheFilePublic); file = new CacheFile(secondCacheFilePublic.toUri(), CacheFile.FileType.REGULAR, true, stat.getModificationTime(), false); localCache[1] = manager.getLocalCache(secondCacheFilePublic.toUri(), conf, TaskTracker.getPrivateDistributedCacheDir(userName), fs.getFileStatus(secondCacheFilePublic), false, fs.getFileStatus(secondCacheFilePublic).getModificationTime(), true, file); stat = localfs.getFileStatus(myFile); assertTrue(stat.getPermission().equals(myPermission)); // validate permissions of localized files. checkFilePermissions(localCache); } finally { localfs.delete(myFile, false); } } }
/***************************************************************************** * Copyright (C) Codehaus.org * * ------------------------------------------------------------------------- * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * *****************************************************************************/ package org.codehaus.jparsec; import static org.codehaus.jparsec.util.Checks.checkArgument; import java.io.IOException; import java.nio.CharBuffer; import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicReference; import org.codehaus.jparsec.annotations.Private; import org.codehaus.jparsec.error.ParserException; import org.codehaus.jparsec.functors.Map; import org.codehaus.jparsec.functors.Map2; import org.codehaus.jparsec.functors.Maps; import org.codehaus.jparsec.util.Checks; /** * Defines grammar and encapsulates parsing logic. * A {@link Parser} takes as input a {@link CharSequence} source and parses it when the * {@link #parse(CharSequence)} method is called. A value of type {@code T} will be returned if * parsing succeeds, or a {@link ParserException} is thrown to indicate parsing error. For example: * <pre> * Parser&lt;String> scanner = Scanners.IDENTIFIER; * assertEquals("foo", scanner.parse("foo")); * </pre> * * <p> {@code Parser}s are immutable and inherently covariant on the type parameter {@code T}. * Because Java generics has no native support for covariant type parameter, a workaround is to use * the {@link Parser#cast()} method to explicitly force covariance whenever needed. * * <p> {@code Parser}s run either on character level to scan the source, or on token level to parse * a list of {@link Token} objects returned from another parser. This other parser that returns the * list of tokens for token level parsing is hooked up via the {@link #from(Parser)} or * {@link #from(Parser, Parser)} method. * * <p> The following are important naming conventions used throughout the library: * <ul> * <li> A character level parser object that recognizes a single lexical word is called a scanner. * <li> A scanner that translates the recognized lexical word into a token is called a tokenizer. * <li > A character level parser object that does lexical analysis and returns a list of * {@link Token} is called a lexer. * <li> All {@code index} parameters are 0-based indexes in the original source. * </ul> * * @author Ben Yu */ public abstract class Parser<T> { Parser() {} /** * An atomic mutable reference to {@link Parser}. Is useful to work around circular dependency * between parser objects. * * <p> Example usage: <pre> * Parser.Reference&lt;Foo> ref = Parser.newReference(); * ... * Parser&lt;Bar> barParser = barParser(ref.lazy()); * Parser&lt;Foo> fooParser = fooParser(barParser); * ref.set(fooParser); * </pre> */ public static final class Reference<T> extends AtomicReference<Parser<T>> { private static final long serialVersionUID = -8778697271614979497L; private final Parser<T> lazy = new LazyParser<T>(this); /** * A {@link Parser} that delegates to the parser object referenced by {@code this} * during parsing time. */ public Parser<T> lazy() { return lazy; } } /** Creates a new instance of {@link Reference}. */ public static <T> Reference<T> newReference() { return new Reference<T>(); } /** A {@link Parser} that executes {@code this}, and returns {@code value} if succeeds. */ public final <R> Parser<R> retn(R value) { return next(Parsers.constant(value)); } /** * A {@link Parser} that sequentially executes {@code this} and then {@code parser}. * The return value of {@code parser} is preserved. */ public final <R> Parser<R> next(Parser<R> parser) { return Parsers.sequence(this, parser); } /** * A {@link Parser} that executes {@code this}, maps the result using {@code map} * to another {@code Parser} object to be executed as the next step. */ public final <To> Parser<To> next(Map<? super T, ? extends Parser<? extends To>> map) { return new BindNextParser<T, To>(this, map); } /** * A {@link Parser} that sequentially executes {@code this} and then {@code parser}, * whose return value is ignored. */ public final Parser<T> followedBy(Parser<?> parser) { return Parsers.sequence(this, parser, InternalFunctors.<T, Object>firstOfTwo()); } /** * A {@link Parser} that succeeds if {@code this} succeeds and the pattern recognized by * {@code parser} isn't following. */ public final Parser<T> notFollowedBy(Parser<?> parser) { return followedBy(parser.not()); } /** * {@code p.many()} is equivalent to {@code p*} in EBNF. The return values are collected and * returned in a {@link List}. */ public final Parser<List<T>> many() { return atLeast(0); } /** {@code p.skipMany()} is equivalent to {@code p*} in EBNF. The return values are discarded. */ public final Parser<Void> skipMany() { return skipAtLeast(0); } /** * {@code p.many1()} is equivalent to {@code p+} in EBNF. The return values are collected and * returned in a {@link List}. */ public final Parser<List<T>> many1() { return atLeast(1); } /** * {@code p.skipMany1()} is equivalent to {@code p+} in EBNF. The return values are discarded. */ public final Parser<Void> skipMany1() { return skipAtLeast(1); } /** * A {@link Parser} that runs {@code this} parser greedily for at least {@code min} times. * The return values are collected and returned in a {@link List}. */ public final Parser<List<T>> atLeast(int min) { return new RepeatAtLeastParser<T>(this, Checks.checkMin(min)); } /** * A {@link Parser} that runs {@code this} parser greedily for at least {@code min} times * and ignores the return values. */ public final Parser<Void> skipAtLeast(int min) { return new SkipAtLeastParser(this, Checks.checkMin(min)); } /** * A {@link Parser} that sequentially runs {@code this} for {@code n} times and ignores the * return values. */ public final Parser<Void> skipTimes(int n) { return skipTimes(n, n); } /** * A {@link Parser} that runs {@code this} for {@code n} times and collects the return values in a * {@link List}. */ public final Parser<List<T>> times(int n) { return times(n, n); } /** * A {@link Parser} that runs {@code this} parser for at least {@code min} times and up to * {@code max} times. The return values are collected and returned in {@link List}. */ public final Parser<List<T>> times(int min, int max) { Checks.checkMinMax(min, max); return new RepeatTimesParser<T>(this, min, max); } /** * A {@link Parser} that runs {@code this} parser for at least {@code min} times and up to * {@code max} times, with all the return values ignored. */ public final Parser<Void> skipTimes(int min, int max) { Checks.checkMinMax(min, max); return new SkipTimesParser(this, min, max); } /** * A {@link Parser} that runs {@code this} parser and transforms the return value using * {@code map}. */ public final <R> Parser<R> map(Map<? super T, ? extends R> map) { return new MapParser<T, R>(this, map); } /** * {@code p1.or(p2)} is equivalent to {@code p1 | p2} in EBNF. * * @param alternative the alternative parser to run if this fails. */ @SuppressWarnings("unchecked") public final Parser<T> or(Parser<? extends T> alternative) { return Parsers.or(this, alternative); } /** * {@code p.optional()} is equivalent to {@code p?} in EBNF. {@code null} is the result when * {@code this} fails with no partial match. */ public final Parser<T> optional() { return Parsers.plus(this, Parsers.<T>always()); } /** * A {@link Parser} that returns {@code defaultValue} if {@code this} fails with no partial match. */ public final Parser<T> optional(T defaultValue) { return Parsers.plus(this, Parsers.constant(defaultValue)); } /** A {@link Parser} that fails if {@code this} succeeds. Any input consumption is undone. */ public final Parser<?> not() { return not(toString()); } /** * A {@link Parser} that fails if {@code this} succeeds. Any input consumption is undone. * * @param unexpected the name of what we don't expect. */ public final Parser<?> not(String unexpected) { return peek().ifelse(Parsers.unexpected(unexpected), Parsers.always()); } /** A {@link Parser} that runs {@code this} and undoes any input consumption if succeeds. */ public final Parser<T> peek() { return new PeekParser<T>(this); } /** A {@link Parser} that undoes any partial match if {@code this} fails. */ public final Parser<T> atomic() { return new AtomicParser<T>(this); } /** * A {@link Parser} that runs {@code this} parser and sets the number of logical steps explicitly * to {@code n}. */ final Parser<T> step(int n) { checkArgument(n >= 0, "step < 0"); return new StepParser<T>(this, n); } /** * A {@link Parser} that returns {@code true} if {@code this} succeeds, {@code false} otherwise. */ public final Parser<Boolean> succeeds() { return ifelse(Parsers.TRUE, Parsers.FALSE); } /** A {@link Parser} that returns {@code true} if {@code this} fails, {@code false} otherwise. */ public final Parser<Boolean> fails() { return ifelse(Parsers.FALSE, Parsers.TRUE); } /** * A {@link Parser} that runs {@code consequence} if {@code this} succeeds, or * {@code alternative} otherwise. */ public final <R> Parser<R> ifelse( Parser<? extends R> consequence, Parser<? extends R> alternative) { return ifelse(Maps.constant(consequence), alternative); } /** * A {@link Parser} that runs {@code consequence} if {@code this} succeeds, or * {@code alternative} otherwise. */ public final <R> Parser<R> ifelse( Map<? super T, ? extends Parser<? extends R>> consequence, Parser<? extends R> alternative) { return new IfElseParser<R, T>(this, consequence, alternative); } /** * A {@link Parser} that reports reports an error about {@code name} expected, if {@code this} * fails with no partial match. */ public final Parser<T> label(String name) { return Parsers.plus(this, Parsers.<T>expect(name)); } /** * Casts {@code this} to a {@link Parser} of type {@code R}. * Use it only if you know the parser actually returns value of type {@code R}. */ @SuppressWarnings("unchecked") public final <R> Parser<R> cast() { return (Parser<R>) this; } /** * A {@link Parser} that runs {@code this} between {@code before} and {@code after}. * The return value of {@code this} is preserved. * * <p> Equivalent to {@link Parsers#between(Parser, Parser, Parser)}, which preserves the * natural order of the parsers in the argument list, but is a bit more verbose. */ public final Parser<T> between(Parser<?> before, Parser<?> after) { return before.next(followedBy(after)); } /** * A {@link Parser} that runs {@code this} 1 or more times separated by {@code delim}. * * <p> The return values are collected in a {@link List}. */ public final Parser<List<T>> sepBy1(Parser<?> delim) { final Parser<T> afterFirst = delim.step(0).next(this); Map<T, Parser<List<T>>> binder = new Map<T, Parser<List<T>>>() { public Parser<List<T>> map(T firstValue) { return new RepeatAtLeastParser<T>( afterFirst, 0, ListFactories.arrayListFactoryWithFirstElement(firstValue)); } }; return next(binder); } /** * A {@link Parser} that runs {@code this} 0 or more times separated by {@code delim}. * * <p> The return values are collected in a {@link List}. */ public final Parser<List<T>> sepBy(Parser<?> delim) { return Parsers.plus(sepBy1(delim), EmptyListParser.<T>instance()); } /** * A {@link Parser} that runs {@code this} for 0 or more times delimited and terminated by * {@code delim}. * * <p> The return values are collected in a {@link List}. */ public final Parser<List<T>> endBy(Parser<?> delim) { return followedBy(delim).many(); } /** * A {@link Parser} that runs {@code this} for 1 or more times delimited and terminated by * {@code delim}. * * <p> The return values are collected in a {@link List}. */ public final Parser<List<T>> endBy1(Parser<?> delim) { return followedBy(delim).many1(); } /** * A {@link Parser} that runs {@code this} for 1 ore more times separated and optionally * terminated by {@code delim}. For example: {@code "foo;foo;foo"} and {@code "foo;foo;"} both * matches {@code foo.sepEndBy1(semicolon)}. * * <p> The return values are collected in a {@link List}. */ public final Parser<List<T>> sepEndBy1(final Parser<?> delim) { return next(new Map<T, Parser<List<T>>>() { public Parser<List<T>> map(T first) { return new DelimitedListParser<T>( Parser.this, delim, ListFactories.arrayListFactoryWithFirstElement(first)); } }); } /** * A {@link Parser} that runs {@code this} for 0 ore more times separated and optionally * terminated by {@code delim}. For example: {@code "foo;foo;foo"} and {@code "foo;foo;"} both * matches {@code foo.sepEndBy(semicolon)}. * * <p> The return values are collected in a {@link List}. */ public final Parser<List<T>> sepEndBy(Parser<?> delim) { return Parsers.plus(sepEndBy1(delim), EmptyListParser.<T>instance()); } /** * A {@link Parser} that runs {@code op} for 0 or more times greedily, then runs {@code this}. * The {@link Map} objects returned from {@code op} are applied from right to left to the return * value of {@code p}. * * <p> {@code p.prefix(op)} is equivalent to {@code op* p} in EBNF. */ @SuppressWarnings("unchecked") public final Parser<T> prefix(Parser<? extends Map<? super T, ? extends T>> op) { return Parsers.sequence(op.many(), this, Parsers.PREFIX_OPERATOR_MAP2); } /** * A {@link Parser} that runs {@code this} and then runs {@code op} for 0 or more times greedily. * The {@link Map} objects returned from {@code op} are applied from left to right to the return * value of p. * * <p> {@code p.postfix(op)} is equivalent to {@code p op*} in EBNF. */ @SuppressWarnings("unchecked") public final Parser<T> postfix(Parser<? extends Map<? super T, ? extends T>> op) { return Parsers.sequence(this, op.many(), Parsers.POSTFIX_OPERATOR_MAP2); } /** * A {@link Parser} that parses non-associative infix operator. Runs {@code this} for the left * operand, and then runs {@code op} and {@code this} for the operator and the right operand * optionally. The {@link Map2} objects returned from {@code op} are applied to the return values * of the two operands, if any. * * <p> {@code p.infixn(op)} is equivalent to {@code p (op p)?} in EBNF. */ public final Parser<T> infixn(Parser<? extends Map2<? super T, ? super T, ? extends T>> op) { return Parsers.infixn(this, op); } /** * A {@link Parser} for left-associative infix operator. Runs {@code this} for the left operand, * and then runs {@code op} and {@code this} for the operator and the right operand for 0 or more * times greedily. The {@link Map2} objects returned from {@code op} are applied from left to right * to the return values of {@code this}, if any. * For example: {@code a + b + c + d} is evaluated as {@code (((a + b)+c)+d)}. * * <p> {@code p.infixl(op)} is equivalent to {@code p (op p)*} in EBNF. */ public final Parser<T> infixl( Parser<? extends Map2<? super T, ? super T, ? extends T>> op) { // somehow generics doesn't work if we inline the code here. return Parsers.infixl(this, op); } /** * A {@link Parser} for right-associative infix operator. Runs {@code this} for the left operand, * and then runs {@code op} and {@code this} for the operator and the right operand for 0 or more * times greedily. The {@link Map2} objects returned from {@code op} are applied from right to * left to the return values of {@code this}, if any. * For example: {@code a + b + c + d} is evaluated as {@code a + (b + (c + d))}. * * <p> {@code p.infixr(op)} is equivalent to {@code p (op p)*} in EBNF. */ public final Parser<T> infixr(Parser<? extends Map2<? super T, ? super T, ? extends T>> op) { return Parsers.infixr(this, op); } /** * A {@link Parser} that runs {@code this} and wraps the return value in a {@link Token}. * * <p> It is normally not necessary to call this method explicitly. {@link #lexer(Parser)} and * {@link #from(Parser, Parser)} both do the conversion automatically. */ public final Parser<Token> token() { return new ToTokenParser(this); } /** A {@link Parser} that returns the matched string in the original source. */ public final Parser<String> source() { return new ReturnSourceParser(this); } /** * A {@link Parser} that takes as input the {@link Token} collection returned by {@code lexer}, * and runs {@code this} to parse the tokens. * * <p> {@code this} must be a token level parser. */ public final Parser<T> from(Parser<? extends Collection<Token>> lexer) { return Parsers.nested(Parsers.tokens(lexer), followedBy(Parsers.EOF)); } /** * A {@link Parser} that takes as input the tokens returned by {@code tokenizer} * delimited by {@code delim}, and runs {@code this} to parse the tokens. * * <p> {@code this} must be a token level parser. */ public final Parser<T> from(Parser<?> tokenizer, Parser<Void> delim) { return from(tokenizer.lexer(delim)); } /** * A {@link Parser} that greedily runs {@code this} repeatedly, * and ignores the pattern recognized by {@code delim} before and after each occurrence. * The result tokens are wrapped in {@link Token} and are collected and returned in a * {@link List}. * * <p> It is normally not necessary to call this method explicitly. * {@link #from(Parser, Parser)} is more convenient for simple uses that just need to connect a * token level parser with a lexer that produces the tokens. * When more flexible control over the token list is needed, for example, * to parse indentation sensitive language, a pre-processor of the token list may be needed. * * <p> {@code this} must be a tokenizer that returns a token value. */ public Parser<List<Token>> lexer(Parser<?> delim) { return delim.optional().next(token().sepEndBy(delim)); } /** * Parses a source string. * * @param source the source string * @param moduleName the name of the module, this name appears in error message * @param sourceLocator maps an index of char into line and column numbers * @return the result */ final T parse(CharSequence source, String moduleName, SourceLocator sourceLocator) { return Parsers.parse(source, followedBy(Parsers.EOF), sourceLocator, moduleName); } /** * Parses {@code source}. * * @param source the source string * @param moduleName the name of the module, this name appears in error message * @return the result */ public final T parse(CharSequence source, String moduleName) { return parse(source, moduleName, new DefaultSourceLocator(source)); } /** Parses {@code source}. */ public final T parse(CharSequence source) { return parse(source, null); } /** Parses source read from {@code readable}. */ public final T parse(Readable readable) throws IOException { return parse(readable, null); } /** * Parses source read from {@code readable}. * * @param readable where the source is read from * @param moduleName the name of the module, this name appears in error message * @return the result */ public final T parse(Readable readable, String moduleName) throws IOException { StringBuilder builder = new StringBuilder(); copy(readable, builder); return parse(builder, moduleName); } /** Copies all content from {@code from} to {@code to}. */ @Private static void copy(Readable from, Appendable to) throws IOException { CharBuffer buf = CharBuffer.allocate(2048); for (;;) { int r = from.read(buf); if (r == -1) break; buf.flip(); to.append(buf, 0, r); } } @SuppressWarnings("unchecked") final T getReturn(ParseContext ctxt) { return (T) ctxt.result; } private ParserException asParserException(Throwable e, ParseContext ctxt) { if (e instanceof ParserException) return (ParserException) e; return new ParserException( e, null, ctxt.module, ctxt.locator.locate(ctxt.getIndex())); } final boolean run(ParseContext ctxt) { try { return apply(ctxt); } catch(RuntimeException e) { throw asParserException(e, ctxt); } } abstract boolean apply(ParseContext ctxt); }
/* * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* * * * * * * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package org.redkale.asm; /** * An {@link AnnotationVisitor} that generates annotations in bytecode form. * * @author Eric Bruneton * @author Eugene Kuleshov */ final class AnnotationWriter extends AnnotationVisitor { /** * The class writer to which this annotation must be added. */ private final ClassWriter cw; /** * The number of values in this annotation. */ private int size; /** * <code>true<code> if values are named, <code>false</code> otherwise. Annotation * writers used for annotation default and annotation arrays use unnamed * values. */ private final boolean named; /** * The annotation values in bytecode form. This byte vector only contains * the values themselves, i.e. the number of values must be stored as a * unsigned short just before these bytes. */ private final ByteVector bv; /** * The byte vector to be used to store the number of values of this * annotation. See {@link #bv}. */ private final ByteVector parent; /** * Where the number of values of this annotation must be stored in * {@link #parent}. */ private final int offset; /** * Next annotation writer. This field is used to store annotation lists. */ AnnotationWriter next; /** * Previous annotation writer. This field is used to store annotation lists. */ AnnotationWriter prev; // ------------------------------------------------------------------------ // Constructor // ------------------------------------------------------------------------ /** * Constructs a new {@link AnnotationWriter}. * * @param cw * the class writer to which this annotation must be added. * @param named * <code>true<code> if values are named, <code>false</code> otherwise. * @param bv * where the annotation values must be stored. * @param parent * where the number of annotation values must be stored. * @param offset * where in <code>parent</code> the number of annotation values must * be stored. */ AnnotationWriter(final ClassWriter cw, final boolean named, final ByteVector bv, final ByteVector parent, final int offset) { super(Opcodes.ASM6); this.cw = cw; this.named = named; this.bv = bv; this.parent = parent; this.offset = offset; } // ------------------------------------------------------------------------ // Implementation of the AnnotationVisitor abstract class // ------------------------------------------------------------------------ @Override public void visit(final String name, final Object value) { ++size; if (named) { bv.putShort(cw.newUTF8(name)); } if (value instanceof String) { bv.put12('s', cw.newUTF8((String) value)); } else if (value instanceof Byte) { bv.put12('B', cw.newInteger(((Byte) value).byteValue()).index); } else if (value instanceof Boolean) { int v = ((Boolean) value).booleanValue() ? 1 : 0; bv.put12('Z', cw.newInteger(v).index); } else if (value instanceof Character) { bv.put12('C', cw.newInteger(((Character) value).charValue()).index); } else if (value instanceof Short) { bv.put12('S', cw.newInteger(((Short) value).shortValue()).index); } else if (value instanceof Type) { bv.put12('c', cw.newUTF8(((Type) value).getDescriptor())); } else if (value instanceof byte[]) { byte[] v = (byte[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('B', cw.newInteger(v[i]).index); } } else if (value instanceof boolean[]) { boolean[] v = (boolean[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('Z', cw.newInteger(v[i] ? 1 : 0).index); } } else if (value instanceof short[]) { short[] v = (short[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('S', cw.newInteger(v[i]).index); } } else if (value instanceof char[]) { char[] v = (char[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('C', cw.newInteger(v[i]).index); } } else if (value instanceof int[]) { int[] v = (int[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('I', cw.newInteger(v[i]).index); } } else if (value instanceof long[]) { long[] v = (long[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('J', cw.newLong(v[i]).index); } } else if (value instanceof float[]) { float[] v = (float[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('F', cw.newFloat(v[i]).index); } } else if (value instanceof double[]) { double[] v = (double[]) value; bv.put12('[', v.length); for (int i = 0; i < v.length; i++) { bv.put12('D', cw.newDouble(v[i]).index); } } else { Item i = cw.newConstItem(value); bv.put12(".s.IFJDCS".charAt(i.type), i.index); } } @Override public void visitEnum(final String name, final String desc, final String value) { ++size; if (named) { bv.putShort(cw.newUTF8(name)); } bv.put12('e', cw.newUTF8(desc)).putShort(cw.newUTF8(value)); } @Override public AnnotationVisitor visitAnnotation(final String name, final String desc) { ++size; if (named) { bv.putShort(cw.newUTF8(name)); } // write tag and type, and reserve space for values count bv.put12('@', cw.newUTF8(desc)).putShort(0); return new AnnotationWriter(cw, true, bv, bv, bv.length - 2); } @Override public AnnotationVisitor visitArray(final String name) { ++size; if (named) { bv.putShort(cw.newUTF8(name)); } // write tag, and reserve space for array size bv.put12('[', 0); return new AnnotationWriter(cw, false, bv, bv, bv.length - 2); } @Override public void visitEnd() { if (parent != null) { byte[] data = parent.data; data[offset] = (byte) (size >>> 8); data[offset + 1] = (byte) size; } } // ------------------------------------------------------------------------ // Utility methods // ------------------------------------------------------------------------ /** * Returns the size of this annotation writer list. * * @return the size of this annotation writer list. */ int getSize() { int size = 0; AnnotationWriter aw = this; while (aw != null) { size += aw.bv.length; aw = aw.next; } return size; } /** * Puts the annotations of this annotation writer list into the given byte * vector. * * @param out * where the annotations must be put. */ void put(final ByteVector out) { int n = 0; int size = 2; AnnotationWriter aw = this; AnnotationWriter last = null; while (aw != null) { ++n; size += aw.bv.length; aw.visitEnd(); // in case user forgot to call visitEnd aw.prev = last; last = aw; aw = aw.next; } out.putInt(size); out.putShort(n); aw = last; while (aw != null) { out.putByteArray(aw.bv.data, 0, aw.bv.length); aw = aw.prev; } } /** * Puts the given annotation lists into the given byte vector. * * @param panns * an array of annotation writer lists. * @param off * index of the first annotation to be written. * @param out * where the annotations must be put. */ static void put(final AnnotationWriter[] panns, final int off, final ByteVector out) { int size = 1 + 2 * (panns.length - off); for (int i = off; i < panns.length; ++i) { size += panns[i] == null ? 0 : panns[i].getSize(); } out.putInt(size).putByte(panns.length - off); for (int i = off; i < panns.length; ++i) { AnnotationWriter aw = panns[i]; AnnotationWriter last = null; int n = 0; while (aw != null) { ++n; aw.visitEnd(); // in case user forgot to call visitEnd aw.prev = last; last = aw; aw = aw.next; } out.putShort(n); aw = last; while (aw != null) { out.putByteArray(aw.bv.data, 0, aw.bv.length); aw = aw.prev; } } } /** * Puts the given type reference and type path into the given bytevector. * LOCAL_VARIABLE and RESOURCE_VARIABLE target types are not supported. * * @param typeRef * a reference to the annotated type. See {@link TypeReference}. * @param typePath * the path to the annotated type argument, wildcard bound, array * element type, or static inner type within 'typeRef'. May be * <code>null</code> if the annotation targets 'typeRef' as a whole. * @param out * where the type reference and type path must be put. */ static void putTarget(int typeRef, TypePath typePath, ByteVector out) { switch (typeRef >>> 24) { case 0x00: // CLASS_TYPE_PARAMETER case 0x01: // METHOD_TYPE_PARAMETER case 0x16: // METHOD_FORMAL_PARAMETER out.putShort(typeRef >>> 16); break; case 0x13: // FIELD case 0x14: // METHOD_RETURN case 0x15: // METHOD_RECEIVER out.putByte(typeRef >>> 24); break; case 0x47: // CAST case 0x48: // CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT case 0x49: // METHOD_INVOCATION_TYPE_ARGUMENT case 0x4A: // CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT case 0x4B: // METHOD_REFERENCE_TYPE_ARGUMENT out.putInt(typeRef); break; // case 0x10: // CLASS_EXTENDS // case 0x11: // CLASS_TYPE_PARAMETER_BOUND // case 0x12: // METHOD_TYPE_PARAMETER_BOUND // case 0x17: // THROWS // case 0x42: // EXCEPTION_PARAMETER // case 0x43: // INSTANCEOF // case 0x44: // NEW // case 0x45: // CONSTRUCTOR_REFERENCE // case 0x46: // METHOD_REFERENCE default: out.put12(typeRef >>> 24, (typeRef & 0xFFFF00) >> 8); break; } if (typePath == null) { out.putByte(0); } else { int length = typePath.b[typePath.offset] * 2 + 1; out.putByteArray(typePath.b, typePath.offset, length); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.log4j.Appender; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; /** * A simple logger to handle the task-specific user logs. This class uses the * system property <code>hadoop.log.dir</code>. * */ public class TaskLog { private static final Log LOG = LogFactory.getLog(TaskLog.class.getName()); private static final File LOG_DIR = new File(System .getProperty("hadoop.log.dir"), "userlogs").getAbsoluteFile(); static LocalFileSystem localFS = null; static { try { localFS = FileSystem.getLocal(new Configuration()); } catch (IOException ioe) { LOG.warn("Getting local file system failed."); } if (!LOG_DIR.exists()) { LOG_DIR.mkdirs(); } } public static File getTaskLogFile(TaskAttemptID taskid, LogName filter) { return new File(getBaseDir(taskid.toString()), filter.toString()); } public static File getRealTaskLogFileLocation(TaskAttemptID taskid, LogName filter) { LogFileDetail l; try { l = getTaskLogFileDetail(taskid, filter); } catch (IOException ie) { LOG.error("getTaskLogFileDetail threw an exception " + ie); return null; } return new File(getBaseDir(l.location), filter.toString()); } private static class LogFileDetail { final static String LOCATION = "LOG_DIR:"; String location; long start; long length; } private static LogFileDetail getTaskLogFileDetail(TaskAttemptID taskid, LogName filter) throws IOException { return getLogFileDetail(taskid, filter, false); } private static LogFileDetail getLogFileDetail(TaskAttemptID taskid, LogName filter, boolean isCleanup) throws IOException { File indexFile = getIndexFile(taskid.toString(), isCleanup); BufferedReader fis = new BufferedReader(new java.io.FileReader( indexFile)); // the format of the index file is // LOG_DIR: <the dir where the task logs are really stored> // stdout:<start-offset in the stdout file> <length> // stderr:<start-offset in the stderr file> <length> // syslog:<start-offset in the syslog file> <length> LogFileDetail l = new LogFileDetail(); String str = fis.readLine(); if (str == null) { // the file doesn't have anything throw new IOException("Index file for the log of " + taskid + " doesn't exist."); } l.location = str.substring(str.indexOf(LogFileDetail.LOCATION) + LogFileDetail.LOCATION.length()); // special cases are the debugout and profile.out files. They are // guaranteed // to be associated with each task attempt since jvm reuse is disabled // when profiling/debugging is enabled if (filter.equals(LogName.DEBUGOUT) || filter.equals(LogName.PROFILE)) { l.length = new File(getBaseDir(l.location), filter.toString()) .length(); l.start = 0; fis.close(); return l; } str = fis.readLine(); while (str != null) { // look for the exact line containing the logname if (str.contains(filter.toString())) { str = str.substring(filter.toString().length() + 1); String[] startAndLen = str.split(" "); l.start = Long.parseLong(startAndLen[0]); l.length = Long.parseLong(startAndLen[1]); break; } str = fis.readLine(); } fis.close(); return l; } private static File getTmpIndexFile(String taskid) { return new File(getBaseDir(taskid), "log.tmp"); } public static File getIndexFile(String taskid) { return getIndexFile(taskid, false); } public static File getIndexFile(String taskid, boolean isCleanup) { if (isCleanup) { return new File(getBaseDir(taskid), "log.index.cleanup"); } else { return new File(getBaseDir(taskid), "log.index"); } } private static File getBaseDir(String taskid) { return new File(LOG_DIR, taskid); } private static long prevOutLength; private static long prevErrLength; private static long prevLogLength; private static void writeToIndexFile(TaskAttemptID firstTaskid, boolean isCleanup) throws IOException { // To ensure atomicity of updates to index file, write to temporary // index // file first and then rename. File tmpIndexFile = getTmpIndexFile(currentTaskid.toString()); BufferedOutputStream bos = new BufferedOutputStream( new FileOutputStream(tmpIndexFile, false)); DataOutputStream dos = new DataOutputStream(bos); // the format of the index file is // LOG_DIR: <the dir where the task logs are really stored> // STDOUT: <start-offset in the stdout file> <length> // STDERR: <start-offset in the stderr file> <length> // SYSLOG: <start-offset in the syslog file> <length> dos.writeBytes(LogFileDetail.LOCATION + firstTaskid.toString() + "\n" + LogName.STDOUT.toString() + ":"); dos.writeBytes(Long.toString(prevOutLength) + " "); dos.writeBytes(Long .toString(getTaskLogFile(firstTaskid, LogName.STDOUT).length() - prevOutLength) + "\n" + LogName.STDERR + ":"); dos.writeBytes(Long.toString(prevErrLength) + " "); dos.writeBytes(Long .toString(getTaskLogFile(firstTaskid, LogName.STDERR).length() - prevErrLength) + "\n" + LogName.SYSLOG.toString() + ":"); dos.writeBytes(Long.toString(prevLogLength) + " "); dos.writeBytes(Long .toString(getTaskLogFile(firstTaskid, LogName.SYSLOG).length() - prevLogLength) + "\n"); dos.close(); File indexFile = getIndexFile(currentTaskid.toString(), isCleanup); Path indexFilePath = new Path(indexFile.getAbsolutePath()); Path tmpIndexFilePath = new Path(tmpIndexFile.getAbsolutePath()); localFS.rename(tmpIndexFilePath, indexFilePath); } private static void resetPrevLengths(TaskAttemptID firstTaskid) { prevOutLength = getTaskLogFile(firstTaskid, LogName.STDOUT).length(); prevErrLength = getTaskLogFile(firstTaskid, LogName.STDERR).length(); prevLogLength = getTaskLogFile(firstTaskid, LogName.SYSLOG).length(); } private volatile static TaskAttemptID currentTaskid = null; public synchronized static void syncLogs(TaskAttemptID firstTaskid, TaskAttemptID taskid) throws IOException { syncLogs(firstTaskid, taskid, false); } @SuppressWarnings("unchecked") public synchronized static void syncLogs(TaskAttemptID firstTaskid, TaskAttemptID taskid, boolean isCleanup) throws IOException { System.out.flush(); System.err.flush(); Enumeration<Logger> allLoggers = LogManager.getCurrentLoggers(); while (allLoggers.hasMoreElements()) { Logger l = allLoggers.nextElement(); Enumeration<Appender> allAppenders = l.getAllAppenders(); while (allAppenders.hasMoreElements()) { Appender a = allAppenders.nextElement(); if (a instanceof TaskLogAppender) { ((TaskLogAppender) a).flush(); } } } if (currentTaskid != taskid) { currentTaskid = taskid; resetPrevLengths(firstTaskid); } writeToIndexFile(firstTaskid, isCleanup); } /** * The filter for userlogs. */ public static enum LogName { /** Log on the stdout of the task. */ STDOUT("stdout"), /** Log on the stderr of the task. */ STDERR("stderr"), /** Log on the map-reduce system logs of the task. */ SYSLOG("syslog"), /** The java profiler information. */ PROFILE("profile.out"), /** Log the debug script's stdout */ DEBUGOUT("debugout"); private String prefix; private LogName(String prefix) { this.prefix = prefix; } @Override public String toString() { return prefix; } } private static class TaskLogsPurgeFilter implements FileFilter { long purgeTimeStamp; TaskLogsPurgeFilter(long purgeTimeStamp) { this.purgeTimeStamp = purgeTimeStamp; } public boolean accept(File file) { LOG.debug("PurgeFilter - file: " + file + ", mtime: " + file.lastModified() + ", purge: " + purgeTimeStamp); return file.lastModified() < purgeTimeStamp; } } /** * Purge old user logs. * * @throws IOException */ public static synchronized void cleanup(int logsRetainHours) throws IOException { // Purge logs of tasks on this tasktracker if their // mtime has exceeded "mapred.task.log.retain" hours long purgeTimeStamp = System.currentTimeMillis() - (logsRetainHours * 60L * 60 * 1000); File[] oldTaskLogs = LOG_DIR.listFiles(new TaskLogsPurgeFilter( purgeTimeStamp)); if (oldTaskLogs != null) { for (int i = 0; i < oldTaskLogs.length; ++i) { FileUtil.fullyDelete(oldTaskLogs[i]); } } } static class Reader extends InputStream { private long bytesRemaining; private FileInputStream file; public Reader(TaskAttemptID taskid, LogName kind, long start, long end) throws IOException { this(taskid, kind, start, end, false); } /** * Read a log file from start to end positions. The offsets may be * negative, in which case they are relative to the end of the file. For * example, Reader(taskid, kind, 0, -1) is the entire file and * Reader(taskid, kind, -4197, -1) is the last 4196 bytes. * * @param taskid * the id of the task to read the log file for * @param kind * the kind of log to read * @param start * the offset to read from (negative is relative to tail) * @param end * the offset to read upto (negative is relative to tail) * @param isCleanup * whether the attempt is cleanup attempt or not * @throws IOException */ public Reader(TaskAttemptID taskid, LogName kind, long start, long end, boolean isCleanup) throws IOException { // find the right log file LogFileDetail fileDetail = getLogFileDetail(taskid, kind, isCleanup); // calculate the start and stop long size = fileDetail.length; if (start < 0) { start += size + 1; } if (end < 0) { end += size + 1; } start = Math.max(0, Math.min(start, size)); end = Math.max(0, Math.min(end, size)); start += fileDetail.start; end += fileDetail.start; bytesRemaining = end - start; file = new FileInputStream(new File( getBaseDir(fileDetail.location), kind.toString())); // skip upto start long pos = 0; while (pos < start) { long result = file.skip(start - pos); if (result < 0) { bytesRemaining = 0; break; } pos += result; } } @Override public int read() throws IOException { int result = -1; if (bytesRemaining > 0) { bytesRemaining -= 1; result = file.read(); } return result; } @Override public int read(byte[] buffer, int offset, int length) throws IOException { length = (int) Math.min(length, bytesRemaining); int bytes = file.read(buffer, offset, length); if (bytes > 0) { bytesRemaining -= bytes; } return bytes; } @Override public int available() throws IOException { return (int) Math.min(bytesRemaining, file.available()); } @Override public void close() throws IOException { file.close(); } } private static final String bashCommand = "bash"; private static final String tailCommand = "tail"; /** * Get the desired maximum length of task's logs. * * @param conf * the job to look in * @return the number of bytes to cap the log files at */ public static long getTaskLogLength(JobConf conf) { return conf.getLong("mapred.userlog.limit.kb", 100) * 1024; } /** * Wrap a command in a shell to capture stdout and stderr to files. If the * tailLength is 0, the entire output will be saved. * * @param cmd * The command and the arguments that should be run * @param stdoutFilename * The filename that stdout should be saved to * @param stderrFilename * The filename that stderr should be saved to * @param tailLength * The length of the tail to be saved. * @return the modified command that should be run */ public static List<String> captureOutAndError(List<String> cmd, File stdoutFilename, File stderrFilename, long tailLength) throws IOException { return captureOutAndError(null, cmd, stdoutFilename, stderrFilename, tailLength, null); } /** * Wrap a command in a shell to capture stdout and stderr to files. Setup * commands such as setting memory limit can be passed which will be * executed before exec. If the tailLength is 0, the entire output will be * saved. * * @param setup * The setup commands for the execed process. * @param cmd * The command and the arguments that should be run * @param stdoutFilename * The filename that stdout should be saved to * @param stderrFilename * The filename that stderr should be saved to * @param tailLength * The length of the tail to be saved. * @return the modified command that should be run */ public static List<String> captureOutAndError(List<String> setup, List<String> cmd, File stdoutFilename, File stderrFilename, long tailLength) throws IOException { return captureOutAndError(setup, cmd, stdoutFilename, stderrFilename, tailLength, null); } /** * Wrap a command in a shell to capture stdout and stderr to files. Setup * commands such as setting memory limit can be passed which will be * executed before exec. If the tailLength is 0, the entire output will be * saved. * * @param setup * The setup commands for the execed process. * @param cmd * The command and the arguments that should be run * @param stdoutFilename * The filename that stdout should be saved to * @param stderrFilename * The filename that stderr should be saved to * @param tailLength * The length of the tail to be saved. * @param pidFileName * The name of the pid-file * @return the modified command that should be run */ public static List<String> captureOutAndError(List<String> setup, List<String> cmd, File stdoutFilename, File stderrFilename, long tailLength, String pidFileName) throws IOException { String stdout = FileUtil.makeShellPath(stdoutFilename); String stderr = FileUtil.makeShellPath(stderrFilename); List<String> result = new ArrayList<String>(3); result.add(bashCommand); result.add("-c"); StringBuffer mergedCmd = new StringBuffer(); // Spit out the pid to pidFileName if (pidFileName != null) { mergedCmd.append("echo $$ > "); mergedCmd.append(pidFileName); mergedCmd.append(" ;"); } if (setup != null && setup.size() > 0) { mergedCmd.append(addCommand(setup, false)); mergedCmd.append(";"); } if (tailLength > 0) { mergedCmd.append("("); } else { mergedCmd.append("exec "); } mergedCmd.append(addCommand(cmd, true)); mergedCmd.append(" < /dev/null "); if (tailLength > 0) { mergedCmd.append(" | "); mergedCmd.append(tailCommand); mergedCmd.append(" -c "); mergedCmd.append(tailLength); mergedCmd.append(" >> "); mergedCmd.append(stdout); mergedCmd.append(" ; exit $PIPESTATUS ) 2>&1 | "); mergedCmd.append(tailCommand); mergedCmd.append(" -c "); mergedCmd.append(tailLength); mergedCmd.append(" >> "); mergedCmd.append(stderr); mergedCmd.append(" ; exit $PIPESTATUS"); } else { mergedCmd.append(" 1>> "); mergedCmd.append(stdout); mergedCmd.append(" 2>> "); mergedCmd.append(stderr); } result.add(mergedCmd.toString()); return result; } /** * Add quotes to each of the command strings and return as a single string * * @param cmd * The command to be quoted * @param isExecutable * makes shell path if the first argument is executable * @return returns The quoted string. * @throws IOException */ public static String addCommand(List<String> cmd, boolean isExecutable) throws IOException { StringBuffer command = new StringBuffer(); for (String s : cmd) { command.append('\''); if (isExecutable) { // the executable name needs to be expressed as a shell path for // the // shell to find it. command.append(FileUtil.makeShellPath(new File(s))); isExecutable = false; } else { command.append(s); } command.append('\''); command.append(" "); } return command.toString(); } /** * Wrap a command in a shell to capture debug script's stdout and stderr to * debugout. * * @param cmd * The command and the arguments that should be run * @param debugoutFilename * The filename that stdout and stderr should be saved to. * @return the modified command that should be run * @throws IOException */ public static List<String> captureDebugOut(List<String> cmd, File debugoutFilename) throws IOException { String debugout = FileUtil.makeShellPath(debugoutFilename); List<String> result = new ArrayList<String>(3); result.add(bashCommand); result.add("-c"); StringBuffer mergedCmd = new StringBuffer(); mergedCmd.append("exec "); boolean isExecutable = true; for (String s : cmd) { if (isExecutable) { // the executable name needs to be expressed as a shell path for // the // shell to find it. mergedCmd.append(FileUtil.makeShellPath(new File(s))); isExecutable = false; } else { mergedCmd.append(s); } mergedCmd.append(" "); } mergedCmd.append(" < /dev/null "); mergedCmd.append(" >"); mergedCmd.append(debugout); mergedCmd.append(" 2>&1 "); result.add(mergedCmd.toString()); return result; } } // TaskLog
/** * Copyright Microsoft Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoft.azure.storage.core; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map.Entry; import com.microsoft.azure.storage.Constants; import com.microsoft.azure.storage.StorageCredentials; import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.StorageUri; /** * RESERVED FOR INTERNAL USE. A class to help modify paths */ public final class PathUtility { /** * Adds a queryString to an URI. * * @param resourceURI * the URI of the resource * @param fieldCollection * the key/ values collection to append. * @return an appended URI. * @throws URISyntaxException * if the resulting URI is invalid. * @throws StorageException */ public static URI addToSingleUriQuery(final URI resourceURI, final HashMap<String, String[]> fieldCollection) throws URISyntaxException, StorageException { if (resourceURI == null) { return null; } final UriQueryBuilder outUri = new UriQueryBuilder(); // Generate new queryString for (final Entry<String, String[]> entry : fieldCollection.entrySet()) { for (final String val : entry.getValue()) { outUri.add(entry.getKey(), val); } } return outUri.addToURI(resourceURI); } /** * Adds a queryString to an URI. * * @param resourceURI * the URI of the resource * @param queryString * the query string to add * @return an appended URI. * @throws URISyntaxException * if the resulting URI is invalid. * @throws StorageException */ public static StorageUri addToQuery(final StorageUri resourceURI, final String queryString) throws URISyntaxException, StorageException { return new StorageUri(addToSingleUriQuery(resourceURI.getPrimaryUri(), parseQueryString(queryString)), addToSingleUriQuery(resourceURI.getSecondaryUri(), parseQueryString(queryString))); } /** * Adds a queryString to an URI. * * @param resourceURI * the URI of the resource * @param queryString * the query string to add * @return an appended URI. * @throws URISyntaxException * if the resulting URI is invalid. * @throws StorageException */ public static URI addToQuery(final URI resourceURI, final String queryString) throws URISyntaxException, StorageException { return addToSingleUriQuery(resourceURI, parseQueryString(queryString)); } /** * Appends a path to a list of URIs correctly using "/" as separator. * * @param uriList * The base Uri. * @param relativeOrAbslouteUri * The relative or absloute URI. * @return The appended Uri. * @throws URISyntaxException */ public static StorageUri appendPathToUri(final StorageUri uriList, final String relativeOrAbsoluteUri) throws URISyntaxException { return appendPathToUri(uriList, relativeOrAbsoluteUri, "/"); } /** * Appends a path to a list of URIs correctly using "/" as separator. * * @param uriList * The base Uri. * @param relativeOrAbslouteUri * The relative or absloute URI. * @return The appended Uri. * @throws URISyntaxException */ public static StorageUri appendPathToUri(final StorageUri uriList, final String relativeOrAbsoluteUri, final String separator) throws URISyntaxException { return new StorageUri(appendPathToSingleUri(uriList.getPrimaryUri(), relativeOrAbsoluteUri, separator), appendPathToSingleUri(uriList.getSecondaryUri(), relativeOrAbsoluteUri, separator)); } /** * Appends a path to a URI correctly using "/" as separator. * * @param uriList * The base Uri. * @param relativeOrAbslouteUri * The relative or absloute URI. * @return The appended Uri. * @throws URISyntaxException */ public static URI appendPathToSingleUri(final URI uri, final String relativeOrAbsoluteUri) throws URISyntaxException { return appendPathToSingleUri(uri, relativeOrAbsoluteUri, "/"); } /** * Appends a path to a URI correctly using the given separator. * * @param uri * The base Uri. * @param relativeUri * The relative URI. * @param separator * the separator to use. * @return The appended Uri. * @throws URISyntaxException * a valid Uri cannot be constructed */ public static URI appendPathToSingleUri(final URI uri, final String relativeUri, final String separator) throws URISyntaxException { if (uri == null) { return null; } if (relativeUri == null || relativeUri.isEmpty()) { return uri; } if (uri.getPath().length() == 0 && relativeUri.startsWith(separator)) { return new URI(uri.getScheme(), uri.getAuthority(), relativeUri, uri.getRawQuery(), uri.getRawFragment()); } final StringBuilder pathString = new StringBuilder(uri.getPath()); if (uri.getPath().endsWith(separator)) { pathString.append(relativeUri); } else { pathString.append(separator); pathString.append(relativeUri); } return new URI(uri.getScheme(), uri.getAuthority(), pathString.toString(), uri.getQuery(), uri.getFragment()); } /** * Gets the blob name from the URI. * * @param inURI * the resource address * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return the blobs name * @throws URISyntaxException */ public static String getBlobNameFromURI(final URI inURI, final boolean usePathStyleUris) throws URISyntaxException { return Utility.safeRelativize(new URI(getContainerURI(new StorageUri(inURI), usePathStyleUris).getPrimaryUri() .toString().concat("/")), inURI); } /** * Gets the canonical path for an object from the credentials. * * @param credentials * the credentials to use. * @param absolutePath * the Absolute path of the object. * @return the canonical path for an object from the credentials */ public static String getCanonicalPathFromCredentials(final StorageCredentials credentials, final String absolutePath) { final String account = credentials.getAccountName(); if (account == null) { final String errorMessage = SR.CANNOT_CREATE_SAS_FOR_GIVEN_CREDENTIALS; throw new IllegalArgumentException(errorMessage); } final StringBuilder builder = new StringBuilder("/"); builder.append(account); builder.append(absolutePath); return builder.toString(); } /** * Get the container name from address from the URI. * * @param resourceAddress * The container Uri. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return container name from address from the URI. * @throws IllegalArgumentException */ public static String getContainerNameFromUri(final URI resourceAddress, final boolean usePathStyleUris) { return getResourceNameFromUri(resourceAddress, usePathStyleUris, String.format("Invalid blob address '%s', missing container information", resourceAddress)); } /** * Gets the file name from the URI. * * @param resourceAddress * the file URI * @param usePathStyleUris * a value indicating if the address is a path style URI * @return the file's name */ public static String getFileNameFromURI(final URI resourceAddress, final boolean usePathStyleUris) { // generate an array of the different levels of the path final String[] pathSegments = resourceAddress.getRawPath().split("/"); // usePathStyleUris ? baseuri/accountname/sharename/objectname : accountname.baseuri/sharename/objectname final int shareIndex = usePathStyleUris ? 2 : 1; if (pathSegments.length - 1 <= shareIndex) { // legal file addresses cannot end with or before the sharename throw new IllegalArgumentException(String.format("Invalid file address '%s'.", resourceAddress)); } else { // in a legal file address the lowest level is the filename return pathSegments[pathSegments.length - 1]; } } /** * Get the name of the lowest level directory from the given directory address. * * @param resourceAddress * the directory URI * @param usePathStyleUris * a value indicating if the address is a path style URI * @return directory name from address from the URI */ public static String getDirectoryNameFromURI(final URI resourceAddress, final boolean usePathStyleUris) { // generate an array of the different levels of the path final String[] pathSegments = resourceAddress.getRawPath().split("/"); // usePathStyleUris ? baseuri/accountname/sharename/objectname : accountname.baseuri/sharename/objectname final int shareIndex = usePathStyleUris ? 2 : 1; if (pathSegments.length - 1 < shareIndex) { // if the sharename is missing or too close to the end throw new IllegalArgumentException(String.format("Invalid directory address '%s'.", resourceAddress)); } else if (pathSegments.length - 1 == shareIndex) { // this is the root directory; it has no name return ""; } else { // in a legal directory address the lowest level is the directory return pathSegments[pathSegments.length - 1]; } } /** * Get the share name from address from the URI. * * @param resourceAddress * The share Uri. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return share name from address from the URI. * @throws IllegalArgumentException */ public static String getShareNameFromUri(final URI resourceAddress, final boolean usePathStyleUris) { return getResourceNameFromUri(resourceAddress, usePathStyleUris, String.format("Invalid file address '%s', missing share information", resourceAddress)); } /** * Get the table name from address from the URI. * * @param resourceAddress * The table Uri. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return table name from address from the URI. * @throws IllegalArgumentException */ public static String getTableNameFromUri(final URI resourceAddress, final boolean usePathStyleUris) { return getResourceNameFromUri(resourceAddress, usePathStyleUris, String.format("Invalid table address '%s', missing table information", resourceAddress)); } /** * Get the container, queue or table name from address from the URI. * * @param resourceAddress * The queue Uri. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return container name from address from the URI. * @throws IllegalArgumentException */ private static String getResourceNameFromUri(final URI resourceAddress, final boolean usePathStyleUris, final String error) { Utility.assertNotNull("resourceAddress", resourceAddress); final String[] pathSegments = resourceAddress.getRawPath().split("/"); final int expectedPartsLength = usePathStyleUris ? 3 : 2; if (pathSegments.length < expectedPartsLength) { throw new IllegalArgumentException(error); } final String resourceName = usePathStyleUris ? pathSegments[2] : pathSegments[1]; return Utility.trimEnd(resourceName, '/'); } /** * Gets the container URI from a blob address * * @param blobAddress * the blob address * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return the container URI from a blob address * @throws URISyntaxException */ public static StorageUri getContainerURI(final StorageUri blobAddress, final boolean usePathStyleUris) throws URISyntaxException { final String containerName = getContainerNameFromUri(blobAddress.getPrimaryUri(), usePathStyleUris); final StorageUri containerUri = appendPathToUri(getServiceClientBaseAddress(blobAddress, usePathStyleUris), containerName); return containerUri; } /** * Gets the share URI from a file address * * @param fileAddress * the file address * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return the share URI from a file address * @throws URISyntaxException */ public static StorageUri getShareURI(final StorageUri fileAddress, final boolean usePathStyleUris) throws URISyntaxException { final String shareName = getShareNameFromUri(fileAddress.getPrimaryUri(), usePathStyleUris); final StorageUri shareUri = appendPathToUri(getServiceClientBaseAddress(fileAddress, usePathStyleUris), shareName); return shareUri; } /** * Get the queue name from address from the URI. * * @param resourceAddress * The queue Uri. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return container name from address from the URI. * @throws IllegalArgumentException */ public static String getQueueNameFromUri(final URI resourceAddress, final boolean usePathStyleUris) { return getResourceNameFromUri(resourceAddress, usePathStyleUris, String.format("Invalid queue URI '%s'.", resourceAddress)); } /** * Get the service client address from a complete Uri. * * @param address * Complete address of the resource. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return the service client address from a complete Uri. * @throws URISyntaxException */ public static String getServiceClientBaseAddress(final URI address, final boolean usePathStyleUris) throws URISyntaxException { if (address == null) { return null; } if (usePathStyleUris) { final String[] pathSegments = address.getRawPath().split("/"); if (pathSegments.length < 2) { final String error = String.format(SR.PATH_STYLE_URI_MISSING_ACCOUNT_INFORMATION); throw new IllegalArgumentException(error); } final StringBuilder completeAddress = new StringBuilder(new URI(address.getScheme(), address.getAuthority(), null, null, null).toString()); completeAddress.append("/"); completeAddress.append(Utility.trimEnd(pathSegments[1], '/')); return completeAddress.toString(); } else { return new URI(address.getScheme(), address.getAuthority(), null, null, null).toString(); } } /** * Get the service client address from a complete Uri. * * @param address * Complete address of the resource. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return the service client address from a complete Uri. * @throws StorageException */ public static StorageUri getServiceClientBaseAddress(final StorageUri addressUri) throws StorageException { boolean usePathStyleUris = Utility.determinePathStyleFromUri(addressUri.getPrimaryUri()); try { return getServiceClientBaseAddress(addressUri, usePathStyleUris); } catch (final URISyntaxException e) { throw Utility.generateNewUnexpectedStorageException(e); } } /** * Get the service client address from a complete Uri. * * @param address * Complete address of the resource. * @param usePathStyleUris * a value indicating if the address is a path style uri. * @return the service client address from a complete Uri. * @throws URISyntaxException */ public static StorageUri getServiceClientBaseAddress(final StorageUri addressUri, final boolean usePathStyleUris) throws URISyntaxException { return new StorageUri(new URI(getServiceClientBaseAddress(addressUri.getPrimaryUri(), usePathStyleUris)), addressUri.getSecondaryUri() != null ? new URI(getServiceClientBaseAddress(addressUri.getSecondaryUri(), usePathStyleUris)) : null); } /** * Parses a query string into a one to many hashmap. * * @param parseString * the string to parse * @return a HashMap<String, String[]> of the key values. * @throws StorageException */ public static HashMap<String, String[]> parseQueryString(String parseString) throws StorageException { final HashMap<String, String[]> retVals = new HashMap<String, String[]>(); if (Utility.isNullOrEmpty(parseString)) { return retVals; } // 1. Remove ? if present final int queryDex = parseString.indexOf("?"); if (queryDex >= 0 && parseString.length() > 0) { parseString = parseString.substring(queryDex + 1); } // 2. split name value pairs by splitting on the 'c&' character final String[] valuePairs = parseString.contains("&") ? parseString.split("&") : parseString.split(";"); // 3. for each field value pair parse into appropriate map entries for (int m = 0; m < valuePairs.length; m++) { final int equalDex = valuePairs[m].indexOf("="); if (equalDex < 0 || equalDex == valuePairs[m].length() - 1) { continue; } String key = valuePairs[m].substring(0, equalDex); String value = valuePairs[m].substring(equalDex + 1); key = Utility.safeDecode(key); value = Utility.safeDecode(value); // 3.1 add to map String[] values = retVals.get(key); if (values == null) { values = new String[] { value }; if (!value.equals(Constants.EMPTY_STRING)) { retVals.put(key, values); } } else if (!value.equals(Constants.EMPTY_STRING)) { final String[] newValues = new String[values.length + 1]; for (int j = 0; j < values.length; j++) { newValues[j] = values[j]; } newValues[newValues.length] = value; } } return retVals; } /** * Strips the Query and Fragment from the uri. * * @param inUri * the uri to alter * @return the stripped uri. * @throws StorageException */ public static URI stripSingleURIQueryAndFragment(final URI inUri) throws StorageException { if (inUri == null) { return null; } try { return new URI(inUri.getScheme(), inUri.getAuthority(), inUri.getPath(), null, null); } catch (final URISyntaxException e) { throw Utility.generateNewUnexpectedStorageException(e); } } /** * Strips the Query and Fragment from the uri. * * @param inUri * the uri to alter * @return the stripped uri. * @throws StorageException */ public static StorageUri stripURIQueryAndFragment(final StorageUri inUri) throws StorageException { return new StorageUri(stripSingleURIQueryAndFragment(inUri.getPrimaryUri()), stripSingleURIQueryAndFragment(inUri.getSecondaryUri())); } /** * Private Default Ctor. */ private PathUtility() { // No op } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.hadoop.fs.v2; import java.io.BufferedOutputStream; import java.io.Closeable; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FsStatus; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; import org.apache.ignite.igfs.IgfsBlockLocation; import org.apache.ignite.igfs.IgfsFile; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.internal.igfs.common.IgfsLogger; import org.apache.ignite.internal.processors.hadoop.igfs.HadoopIgfsEndpoint; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsInputStream; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsOutputStream; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsStreamDelegate; import org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsWrapper; import org.apache.ignite.internal.processors.igfs.IgfsHandshakeResponse; import org.apache.ignite.internal.processors.igfs.IgfsStatus; import org.apache.ignite.internal.processors.igfs.IgfsUtils; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_BATCH_SIZE; import static org.apache.ignite.configuration.FileSystemConfiguration.DFLT_IGFS_LOG_DIR; import static org.apache.ignite.hadoop.fs.v1.IgniteHadoopFileSystem.getFsHadoopUser; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_COLOCATED_WRITES; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_BATCH_SIZE; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_DIR; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_LOG_ENABLED; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_PREFER_LOCAL_WRITES; import static org.apache.ignite.internal.processors.hadoop.impl.fs.HadoopParameters.PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH; import static org.apache.ignite.internal.processors.hadoop.impl.igfs.HadoopIgfsUtils.parameter; import static org.apache.ignite.internal.processors.igfs.IgfsEx.IGFS_SCHEME; /** * {@code IGFS} Hadoop 2.x file system driver over file system API. To use * {@code IGFS} as Hadoop file system, you should configure this class * in Hadoop's {@code core-site.xml} as follows: * <pre name="code" class="xml"> * &lt;property&gt; * &lt;name&gt;fs.default.name&lt;/name&gt; * &lt;value&gt;igfs://ipc&lt;/value&gt; * &lt;/property&gt; * * &lt;property&gt; * &lt;name&gt;fs.igfs.impl&lt;/name&gt; * &lt;value&gt;org.apache.ignite.hadoop.fs.v2.IgniteHadoopFileSystem&lt;/value&gt; * &lt;/property&gt; * </pre> * You should also add Ignite JAR and all libraries to Hadoop classpath. To * do this, add following lines to {@code conf/hadoop-env.sh} script in Hadoop * distribution: * <pre name="code" class="bash"> * export IGNITE_HOME=/path/to/Ignite/distribution * export HADOOP_CLASSPATH=$IGNITE_HOME/ignite*.jar * * for f in $IGNITE_HOME/libs/*.jar; do * export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f; * done * </pre> * <h1 class="header">Data vs Clients Nodes</h1> * Hadoop needs to use its FileSystem remotely from client nodes as well as directly on * data nodes. Client nodes are responsible for basic file system operations as well as * accessing data nodes remotely. Usually, client nodes are started together * with {@code job-submitter} or {@code job-scheduler} processes, while data nodes are usually * started together with Hadoop {@code task-tracker} processes. * <p> * For sample client and data node configuration refer to {@code config/hadoop/default-config-client.xml} * and {@code config/hadoop/default-config.xml} configuration files in Ignite installation. */ public class IgniteHadoopFileSystem extends AbstractFileSystem implements Closeable { /** Logger. */ private static final Log LOG = LogFactory.getLog(IgniteHadoopFileSystem.class); /** Ensures that close routine is invoked at most once. */ private final AtomicBoolean closeGuard = new AtomicBoolean(); /** Grid remote client. */ private HadoopIgfsWrapper rmtClient; /** The name of the user this File System created on behalf of. */ private final String user; /** Working directory. */ private IgfsPath workingDir; /** URI. */ private final URI uri; /** Authority. */ private String uriAuthority; /** Client logger. */ private IgfsLogger clientLog; /** Server block size. */ private long grpBlockSize; /** Default replication factor. */ private short dfltReplication; /** Whether custom sequential reads before prefetch value is provided. */ private boolean seqReadsBeforePrefetchOverride; /** Custom-provided sequential reads before prefetch. */ private int seqReadsBeforePrefetch; /** Flag that controls whether file writes should be colocated on data node. */ private boolean colocateFileWrites; /** Prefer local writes. */ private boolean preferLocFileWrites; /** * @param name URI for file system. * @param cfg Configuration. * @throws URISyntaxException if name has invalid syntax. * @throws IOException If initialization failed. */ public IgniteHadoopFileSystem(URI name, Configuration cfg) throws URISyntaxException, IOException { super(HadoopIgfsEndpoint.normalize(name), IGFS_SCHEME, false, -1); uri = name; user = getFsHadoopUser(); try { initialize(name, cfg); } catch (IOException e) { // Close client if exception occurred. if (rmtClient != null) rmtClient.close(false); throw e; } workingDir = new IgfsPath("/user/" + user); } /** {@inheritDoc} */ @Override public void checkPath(Path path) { URI uri = path.toUri(); if (uri.isAbsolute()) { if (!F.eq(uri.getScheme(), IGFS_SCHEME)) throw new InvalidPathException("Wrong path scheme [expected=" + IGFS_SCHEME + ", actual=" + uri.getAuthority() + ']'); if (!F.eq(uri.getAuthority(), uriAuthority)) throw new InvalidPathException("Wrong path authority [expected=" + uriAuthority + ", actual=" + uri.getAuthority() + ']'); } } /** * Public setter that can be used by direct users of FS or Visor. * * @param colocateFileWrites Whether all ongoing file writes should be colocated. */ @SuppressWarnings("UnusedDeclaration") public void colocateFileWrites(boolean colocateFileWrites) { this.colocateFileWrites = colocateFileWrites; } /** * Enter busy state. * * @throws IOException If file system is stopped. */ private void enterBusy() throws IOException { if (closeGuard.get()) throw new IOException("File system is stopped."); } /** * Leave busy state. */ private void leaveBusy() { // No-op. } /** * @param name URI passed to constructor. * @param cfg Configuration passed to constructor. * @throws IOException If initialization failed. */ @SuppressWarnings("ConstantConditions") private void initialize(URI name, Configuration cfg) throws IOException { enterBusy(); try { if (rmtClient != null) throw new IOException("File system is already initialized: " + rmtClient); A.notNull(name, "name"); A.notNull(cfg, "cfg"); if (!IGFS_SCHEME.equals(name.getScheme())) throw new IOException("Illegal file system URI [expected=" + IGFS_SCHEME + "://[name]/[optional_path], actual=" + name + ']'); uriAuthority = name.getAuthority(); // Override sequential reads before prefetch if needed. seqReadsBeforePrefetch = parameter(cfg, PARAM_IGFS_SEQ_READS_BEFORE_PREFETCH, uriAuthority, 0); if (seqReadsBeforePrefetch > 0) seqReadsBeforePrefetchOverride = true; // In Ignite replication factor is controlled by data cache affinity. // We use replication factor to force the whole file to be stored on local node. dfltReplication = (short)cfg.getInt("dfs.replication", 3); // Get file colocation control flag. colocateFileWrites = parameter(cfg, PARAM_IGFS_COLOCATED_WRITES, uriAuthority, false); preferLocFileWrites = cfg.getBoolean(PARAM_IGFS_PREFER_LOCAL_WRITES, false); // Get log directory. String logDirCfg = parameter(cfg, PARAM_IGFS_LOG_DIR, uriAuthority, DFLT_IGFS_LOG_DIR); File logDirFile = U.resolveIgnitePath(logDirCfg); String logDir = logDirFile != null ? logDirFile.getAbsolutePath() : null; rmtClient = new HadoopIgfsWrapper(uriAuthority, logDir, cfg, LOG, user); // Handshake. IgfsHandshakeResponse handshake = rmtClient.handshake(logDir); grpBlockSize = handshake.blockSize(); Boolean logEnabled = parameter(cfg, PARAM_IGFS_LOG_ENABLED, uriAuthority, false); if (handshake.sampling() != null ? handshake.sampling() : logEnabled) { // Initiate client logger. if (logDir == null) throw new IOException("Failed to resolve log directory: " + logDirCfg); Integer batchSize = parameter(cfg, PARAM_IGFS_LOG_BATCH_SIZE, uriAuthority, DFLT_IGFS_LOG_BATCH_SIZE); clientLog = IgfsLogger.logger(uriAuthority, handshake.igfsName(), logDir, batchSize); } else clientLog = IgfsLogger.disabledLogger(); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public void close() throws IOException { if (closeGuard.compareAndSet(false, true)) { if (rmtClient == null) return; rmtClient.close(false); if (clientLog.isLogEnabled()) clientLog.close(); // Reset initialized resources. rmtClient = null; } } /** {@inheritDoc} */ @Override public URI getUri() { return uri; } /** {@inheritDoc} */ @Override public int getUriDefaultPort() { return -1; } /** {@inheritDoc} */ @Override public FsServerDefaults getServerDefaults() throws IOException { return new FsServerDefaults(grpBlockSize, (int)grpBlockSize, (int)grpBlockSize, dfltReplication, 64 * 1024, false, 0, DataChecksum.Type.NULL); } /** {@inheritDoc} */ @Override public boolean setReplication(Path f, short replication) throws IOException { return false; } /** {@inheritDoc} */ @Override public void setTimes(Path f, long mtime, long atime) throws IOException { if (mtime == -1 && atime == -1) return; rmtClient.setTimes(convert(f), atime, mtime); } /** {@inheritDoc} */ @Override public FsStatus getFsStatus() throws IOException { IgfsStatus status = rmtClient.fsStatus(); return new FsStatus(status.spaceTotal(), status.spaceUsed(), status.spaceTotal() - status.spaceUsed()); } /** {@inheritDoc} */ @Override public void setPermission(Path p, FsPermission perm) throws IOException { enterBusy(); try { A.notNull(p, "p"); if (rmtClient.update(convert(p), permission(perm)) == null) throw new IOException("Failed to set file permission (file not found?)" + " [path=" + p + ", perm=" + perm + ']'); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public void setOwner(Path p, String usr, String grp) throws IOException { A.notNull(p, "p"); A.notNull(usr, "username"); A.notNull(grp, "grpName"); enterBusy(); try { if (rmtClient.update(convert(p), F.asMap(IgfsUtils.PROP_USER_NAME, usr, IgfsUtils.PROP_GROUP_NAME, grp)) == null) { throw new IOException("Failed to set file permission (file not found?)" + " [path=" + p + ", username=" + usr + ", grpName=" + grp + ']'); } } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public FSDataInputStream open(Path f, int bufSize) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); HadoopIgfsStreamDelegate stream = seqReadsBeforePrefetchOverride ? rmtClient.open(path, seqReadsBeforePrefetch) : rmtClient.open(path); long logId = -1; if (clientLog.isLogEnabled()) { logId = IgfsLogger.nextId(); clientLog.logOpen(logId, path, bufSize, stream.length()); } if (LOG.isDebugEnabled()) LOG.debug("Opening input stream [thread=" + Thread.currentThread().getName() + ", path=" + path + ", bufSize=" + bufSize + ']'); HadoopIgfsInputStream igfsIn = new HadoopIgfsInputStream(stream, stream.length(), bufSize, LOG, clientLog, logId); if (LOG.isDebugEnabled()) LOG.debug("Opened input stream [path=" + path + ", delegate=" + stream + ']'); return new FSDataInputStream(igfsIn); } finally { leaveBusy(); } } /** {@inheritDoc} */ @SuppressWarnings("deprecation") @Override public FSDataOutputStream createInternal( Path f, EnumSet<CreateFlag> flag, FsPermission perm, int bufSize, short replication, long blockSize, Progressable progress, Options.ChecksumOpt checksumOpt, boolean createParent ) throws IOException { A.notNull(f, "f"); enterBusy(); boolean overwrite = flag.contains(CreateFlag.OVERWRITE); boolean append = flag.contains(CreateFlag.APPEND); boolean create = flag.contains(CreateFlag.CREATE); OutputStream out = null; try { IgfsPath path = convert(f); if (LOG.isDebugEnabled()) LOG.debug("Opening output stream in create [thread=" + Thread.currentThread().getName() + "path=" + path + ", overwrite=" + overwrite + ", bufSize=" + bufSize + ']'); Map<String, String> permMap = F.asMap(IgfsUtils.PROP_PERMISSION, toString(perm), IgfsUtils.PROP_PREFER_LOCAL_WRITES, Boolean.toString(preferLocFileWrites)); // Create stream and close it in the 'finally' section if any sequential operation failed. HadoopIgfsStreamDelegate stream; long logId = -1; if (append) { stream = rmtClient.append(path, create, permMap); if (clientLog.isLogEnabled()) { logId = IgfsLogger.nextId(); clientLog.logAppend(logId, path, bufSize); } if (LOG.isDebugEnabled()) LOG.debug("Opened output stream in append [path=" + path + ", delegate=" + stream + ']'); } else { stream = rmtClient.create(path, overwrite, colocateFileWrites, replication, blockSize, permMap); if (clientLog.isLogEnabled()) { logId = IgfsLogger.nextId(); clientLog.logCreate(logId, path, overwrite, bufSize, replication, blockSize); } if (LOG.isDebugEnabled()) LOG.debug("Opened output stream in create [path=" + path + ", delegate=" + stream + ']'); } assert stream != null; HadoopIgfsOutputStream igfsOut = new HadoopIgfsOutputStream(stream, LOG, clientLog, logId); bufSize = Math.max(64 * 1024, bufSize); out = new BufferedOutputStream(igfsOut, bufSize); FSDataOutputStream res = new FSDataOutputStream(out, null, 0); // Mark stream created successfully. out = null; return res; } finally { // Close if failed during stream creation. if (out != null) U.closeQuiet(out); leaveBusy(); } } /** {@inheritDoc} */ @Override public boolean supportsSymlinks() { return false; } /** {@inheritDoc} */ @Override public void renameInternal(Path src, Path dst) throws IOException { A.notNull(src, "src"); A.notNull(dst, "dst"); enterBusy(); try { IgfsPath srcPath = convert(src); IgfsPath dstPath = convert(dst); if (clientLog.isLogEnabled()) clientLog.logRename(srcPath, dstPath); rmtClient.rename(srcPath, dstPath); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public boolean delete(Path f, boolean recursive) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); boolean res = rmtClient.delete(path, recursive); if (clientLog.isLogEnabled()) clientLog.logDelete(path, recursive); return res; } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public void setVerifyChecksum(boolean verifyChecksum) throws IOException { // No-op. } /** {@inheritDoc} */ @Override public FileChecksum getFileChecksum(Path f) throws IOException { return null; } /** {@inheritDoc} */ @Override public FileStatus[] listStatus(Path f) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); Collection<IgfsFile> list = rmtClient.listFiles(path); if (list == null) throw new FileNotFoundException("File " + f + " does not exist."); List<IgfsFile> files = new ArrayList<>(list); FileStatus[] arr = new FileStatus[files.size()]; for (int i = 0; i < arr.length; i++) arr[i] = convert(files.get(i)); if (clientLog.isLogEnabled()) { String[] fileArr = new String[arr.length]; for (int i = 0; i < arr.length; i++) fileArr[i] = arr[i].getPath().toString(); clientLog.logListDirectory(path, fileArr); } return arr; } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public void mkdir(Path f, FsPermission perm, boolean createParent) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPath path = convert(f); rmtClient.mkdirs(path, permission(perm)); if (clientLog.isLogEnabled()) clientLog.logMakeDirectory(path); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public FileStatus getFileStatus(Path f) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsFile info = rmtClient.info(convert(f)); if (info == null) throw new FileNotFoundException("File not found: " + f); return convert(info); } finally { leaveBusy(); } } /** {@inheritDoc} */ @Override public BlockLocation[] getFileBlockLocations(Path path, long start, long len) throws IOException { A.notNull(path, "path"); IgfsPath igfsPath = convert(path); enterBusy(); try { long now = System.currentTimeMillis(); List<IgfsBlockLocation> affinity = new ArrayList<>( rmtClient.affinity(igfsPath, start, len)); BlockLocation[] arr = new BlockLocation[affinity.size()]; for (int i = 0; i < arr.length; i++) arr[i] = convert(affinity.get(i)); if (LOG.isDebugEnabled()) LOG.debug("Fetched file locations [path=" + path + ", fetchTime=" + (System.currentTimeMillis() - now) + ", locations=" + Arrays.asList(arr) + ']'); return arr; } finally { leaveBusy(); } } /** * Convert IGFS path into Hadoop path. * * @param path IGFS path. * @return Hadoop path. */ private Path convert(IgfsPath path) { return new Path(IGFS_SCHEME, uriAuthority, path.toString()); } /** * Convert Hadoop path into IGFS path. * * @param path Hadoop path. * @return IGFS path. */ @Nullable private IgfsPath convert(Path path) { if (path == null) return null; return path.isAbsolute() ? new IgfsPath(path.toUri().getPath()) : new IgfsPath(workingDir, path.toUri().getPath()); } /** * Convert IGFS affinity block location into Hadoop affinity block location. * * @param block IGFS affinity block location. * @return Hadoop affinity block location. */ private BlockLocation convert(IgfsBlockLocation block) { Collection<String> names = block.names(); Collection<String> hosts = block.hosts(); return new BlockLocation( names.toArray(new String[names.size()]) /* hostname:portNumber of data nodes */, hosts.toArray(new String[hosts.size()]) /* hostnames of data nodes */, block.start(), block.length() ) { @Override public String toString() { try { return "BlockLocation [offset=" + getOffset() + ", length=" + getLength() + ", hosts=" + Arrays.asList(getHosts()) + ", names=" + Arrays.asList(getNames()) + ']'; } catch (IOException e) { throw new RuntimeException(e); } } }; } /** * Convert IGFS file information into Hadoop file status. * * @param file IGFS file information. * @return Hadoop file status. */ private FileStatus convert(IgfsFile file) { return new FileStatus( file.length(), file.isDirectory(), dfltReplication, file.groupBlockSize(), file.modificationTime(), file.accessTime(), permission(file), file.property(IgfsUtils.PROP_USER_NAME, user), file.property(IgfsUtils.PROP_GROUP_NAME, "users"), convert(file.path())) { @Override public String toString() { return "FileStatus [path=" + getPath() + ", isDir=" + isDirectory() + ", len=" + getLen() + "]"; } }; } /** * Convert Hadoop permission into IGFS file attribute. * * @param perm Hadoop permission. * @return IGFS attributes. */ private Map<String, String> permission(FsPermission perm) { if (perm == null) perm = FsPermission.getDefault(); return F.asMap(IgfsUtils.PROP_PERMISSION, toString(perm)); } /** * @param perm Permission. * @return String. */ private static String toString(FsPermission perm) { return String.format("%04o", perm.toShort()); } /** * Convert IGFS file attributes into Hadoop permission. * * @param file File info. * @return Hadoop permission. */ private FsPermission permission(IgfsFile file) { String perm = file.property(IgfsUtils.PROP_PERMISSION, null); if (perm == null) return FsPermission.getDefault(); try { return new FsPermission((short)Integer.parseInt(perm, 8)); } catch (NumberFormatException ignore) { return FsPermission.getDefault(); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(IgniteHadoopFileSystem.class, this); } /** * Returns the user name this File System is created on behalf of. * @return the user name */ public String user() { return user; } }
package fr.adrienbrault.idea.symfony2plugin.tests.util.yaml; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.util.Function; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.php.lang.psi.elements.Parameter; import fr.adrienbrault.idea.symfony2plugin.tests.SymfonyLightCodeInsightFixtureTestCase; import fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper; import fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlPsiElementFactory; import fr.adrienbrault.idea.symfony2plugin.util.yaml.visitor.YamlServiceTag; import fr.adrienbrault.idea.symfony2plugin.util.yaml.visitor.YamlTagVisitor; import org.apache.commons.lang.StringUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.yaml.YAMLFileType; import org.jetbrains.yaml.psi.YAMLCompoundValue; import org.jetbrains.yaml.psi.YAMLFile; import org.jetbrains.yaml.psi.YAMLKeyValue; import org.jetbrains.yaml.psi.YAMLScalar; import org.jetbrains.yaml.psi.impl.YAMLHashImpl; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.function.Predicate; /** * @author Daniel Espendiller <daniel@espendiller.net> */ public class YamlHelperLightTest extends SymfonyLightCodeInsightFixtureTestCase { public void setUp() throws Exception { super.setUp(); myFixture.copyFileToProject("classes.php"); } public String getTestDataPath() { return "src/test/java/fr/adrienbrault/idea/symfony2plugin/tests/util/yaml/fixtures"; } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitTagsOnServiceDefinition */ public void testVisitTagsOnServiceDefinition() { YAMLKeyValue yamlKeyValue = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "foo:\n" + " tags:\n" + " - { name: kernel.event_listener, event: eventName, method: methodName }\n" + " - { name: kernel.event_listener2, event: eventName2, method: methodName2 }\n" ); ListYamlTagVisitor visitor = new ListYamlTagVisitor(); YamlHelper.visitTagsOnServiceDefinition(yamlKeyValue, visitor); assertEquals("kernel.event_listener", visitor.getItem(0).getName()); assertEquals("eventName", visitor.getItem(0).getAttribute("event")); assertEquals("methodName", visitor.getItem(0).getAttribute("method")); assertEquals("kernel.event_listener2", visitor.getItem(1).getName()); assertEquals("eventName2", visitor.getItem(1).getAttribute("event")); assertEquals("methodName2", visitor.getItem(1).getAttribute("method")); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitTagsOnServiceDefinition */ public void testVisitTagsOnServiceDefinitionForSymfony33TagsShortcut() { YAMLKeyValue yamlKeyValue = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "foo:\n" + " tags:\n" + " - kernel.event_listener\n" + " - kernel.event_listener2\n" ); ListYamlTagVisitor visitor = new ListYamlTagVisitor(); YamlHelper.visitTagsOnServiceDefinition(yamlKeyValue, visitor); assertEquals("kernel.event_listener", visitor.getItem(0).getName()); assertEquals("kernel.event_listener", visitor.getItem(0).getAttribute("name")); assertEquals("kernel.event_listener2", visitor.getItem(1).getName()); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitTagsOnServiceDefinition */ public void testVisitTagsOnServiceDefinitionWithQuote() { YAMLKeyValue yamlKeyValue = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "foo:\n" + " tags:\n" + " - { name: 'kernel.event_listener', event: 'eventName', method: 'methodName' }\n" ); ListYamlTagVisitor visitor = new ListYamlTagVisitor(); YamlHelper.visitTagsOnServiceDefinition(yamlKeyValue, visitor); assertEquals("kernel.event_listener", visitor.getItem().getName()); assertEquals("eventName", visitor.getItem().getAttribute("event")); assertEquals("methodName", visitor.getItem().getAttribute("method")); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitTagsOnServiceDefinition */ public void testVisitTagsOnServiceDefinitionWithDoubleQuote() { YAMLKeyValue yamlKeyValue = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "foo:\n" + " tags:\n" + " - { name: \"kernel.event_listener\", event: \"eventName\", method: \"methodName\" }\n" ); ListYamlTagVisitor visitor = new ListYamlTagVisitor(); YamlHelper.visitTagsOnServiceDefinition(yamlKeyValue, visitor); assertEquals("kernel.event_listener", visitor.getItem().getName()); assertEquals("eventName", visitor.getItem().getAttribute("event")); assertEquals("methodName", visitor.getItem().getAttribute("method")); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#findServiceInContext */ public void testFindServiceInContext() { assertEquals("foo", YamlHelper.findServiceInContext(myFixture.configureByText(YAMLFileType.YML, "" + "services:\n" + " foo:\n" + " tags:\n" + " - { name: fo<caret>o}\n" ).findElementAt(myFixture.getCaretOffset())).getKeyText()); assertEquals("foo", YamlHelper.findServiceInContext(myFixture.configureByText(YAMLFileType.YML, "" + "services:\n" + " foo:\n" + " class: fo<caret>o" ).findElementAt(myFixture.getCaretOffset())).getKeyText()); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#getYamlKeyValueAsString */ public void testGetYamlKeyValueAsString() { String[] strings = { "{ name: routing.loader, method: foo }", "{ name: routing.loader, method: 'foo' }", "{ name: routing.loader, method: \"foo\" }", }; for (String s : strings) { assertEquals("foo", YamlHelper.getYamlKeyValueAsString( YamlPsiElementFactory.createFromText(getProject(), YAMLHashImpl.class, s), "method" )); } } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#getYamlKeyValueStringOrArray */ public void testGetYamlKeyValueStringOrArray() { YAMLKeyValue fromText = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "foo:\n" + " tags: 'foo'\n" ); Collection<String> tags = YamlHelper.getYamlKeyValueStringOrArray(fromText,"tags"); assertTrue(tags.stream().anyMatch("foo"::equalsIgnoreCase)); YAMLKeyValue fromText2 = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "foo:\n" + " tags: ['bar1', 'bar2']\n" ); Collection<String> tags2 = YamlHelper.getYamlKeyValueStringOrArray(fromText2,"tags"); assertTrue(tags2.stream().anyMatch("bar1"::equalsIgnoreCase)); assertTrue(tags2.stream().anyMatch("bar2"::equalsIgnoreCase)); YAMLKeyValue fromText3 = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "foo:\n" + " tags:\n" + " - foo1\n" + " - foo2\n" ); Collection<String> tags3 = YamlHelper.getYamlKeyValueStringOrArray(fromText3,"tags"); assertTrue(tags3.stream().anyMatch("foo1"::equalsIgnoreCase)); assertTrue(tags3.stream().anyMatch("foo2"::equalsIgnoreCase)); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#collectServiceTags */ public void testCollectServiceTags() { YAMLKeyValue fromText = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "foo:\n" + " tags:\n" + " - { name: routing.loader, method: crossHint }\n" + " - { name: routing.loader1, method: crossHint }\n" ); assertNotNull(fromText); assertContainsElements(YamlHelper.collectServiceTags(fromText), "routing.loader", "routing.loader1"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#collectServiceTags */ public void testCollectServiceTagsForSymfony33TagsShortcut() { YAMLKeyValue fromText = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "foo:\n" + " tags:\n" + " - routing.loader_tags_1\n" + " - routing.loader_tags_2\n" ); assertNotNull(fromText); Set<String> collection = YamlHelper.collectServiceTags(fromText); assertContainsElements(collection, "routing.loader_tags_1"); assertContainsElements(collection, "routing.loader_tags_2"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#collectServiceTags */ public void testCollectServiceTagsForSymfony33TagsShortcutInline() { YAMLKeyValue fromText = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "foo:\n" + " tags: [routing.loader_tags_3, routing.loader_tags_4, 'routing.loader_tags_5']\n" ); assertNotNull(fromText); Set<String> collection = YamlHelper.collectServiceTags(fromText); assertContainsElements(collection, "routing.loader_tags_3"); assertContainsElements(collection, "routing.loader_tags_4"); assertContainsElements(collection, "routing.loader_tags_5"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#getYamlArrayOnSequenceOrArrayElements */ public void testGetYamlArrayOnSequenceOrArrayElements() { String[] strings = { "calls: [@foo, @bar] \n", "calls:\n - @foo\n - @bar\n", }; for (String s : strings) { YAMLCompoundValue fromText = YamlPsiElementFactory.createFromText(getProject(), YAMLCompoundValue.class, s); assertNotNull(fromText); List<PsiElement> elements = YamlHelper.getYamlArrayOnSequenceOrArrayElements(fromText); assertNotNull(elements); String join = StringUtils.join(ContainerUtil.map(elements, new Function<PsiElement, String>() { @Override public String fun(PsiElement psiElement) { return psiElement.getText(); } }), ","); assertTrue(join.contains("foo")); assertTrue(join.contains("bar")); } } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#getYamlArrayOnSequenceOrArrayElements */ public void testGetYamlArrayOnSequenceOrArrayElementsForArray() { YAMLCompoundValue fromText = YamlPsiElementFactory.createFromText(getProject(), YAMLCompoundValue.class, "" + "calls: [@foo, @bar] \n" ); assertNotNull(fromText); String join = StringUtils.join(ContainerUtil.map(YamlHelper.getYamlArrayOnSequenceOrArrayElements(fromText), new Function<PsiElement, String>() { @Override public String fun(PsiElement psiElement) { return psiElement.getText(); } }), ","); assertTrue(join.contains("foo")); assertTrue(join.contains("bar")); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#insertKeyIntoFile */ public void testInsertKeyIntoFile() { YAMLFile yamlFile = (YAMLFile) myFixture.configureByText(YAMLFileType.YML, "" + "foo:\n" + " bar:\n" + " car: test" ); YamlHelper.insertKeyIntoFile(yamlFile, "value", "foo", "bar", "apple"); assertEquals("" + "foo:\n" + " bar:\n" + " car: test\n" + " apple: value", yamlFile.getText() ); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#insertKeyIntoFile */ public void testInsertKeyIntoFileOnRoot() { YAMLFile yamlFile = (YAMLFile) myFixture.configureByText(YAMLFileType.YML, "" + "foo:\n" + " bar:\n" + " car: test" ); YamlHelper.insertKeyIntoFile(yamlFile, "value", "car", "bar", "apple"); assertEquals("" + "foo:\n" + " bar:\n" + " car: test\n" + "car:\n" + " bar:\n" + " apple: value", yamlFile.getText() ); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#insertKeyIntoFile * TODO empty file */ public void skipTestInsertKeyIntoEmptyFile() { YAMLFile yamlFile = (YAMLFile) myFixture.configureByText(YAMLFileType.YML, ""); YamlHelper.insertKeyIntoFile(yamlFile, "value", "car", "bar", "apple"); assertEquals("" + "foo:\n" + " bar:\n" + " car: test\n" + "car:\n" + " bar:\n" + " apple: value", yamlFile.getText() ); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#insertKeyIntoFile */ public void testInsertKeyWithArrayValue() { YAMLFile yamlFile = (YAMLFile) myFixture.configureByText(YAMLFileType.YML, "" + "services:\n" + " foo:\n" + " car: test" ); YAMLKeyValue yamlKeyValue = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "my_service:\n" + " class: foo\n" + " tag:\n" + " - foo\n" ); assertNotNull(yamlKeyValue); YamlHelper.insertKeyIntoFile(yamlFile, yamlKeyValue, "services"); String text = yamlFile.getText(); assertEquals("services:\n" + " foo:\n" + " car: test\n" + " my_service:\n" + " class: foo\n" + " tag:\n" + " - foo", text ); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#insertKeyIntoFile */ public void testInsertKeyValueWithMissingMainKeyInRoot() { YAMLFile yamlFile = (YAMLFile) myFixture.configureByText(YAMLFileType.YML, "foo: foo"); YAMLKeyValue yamlKeyValue = YamlPsiElementFactory.createFromText(getProject(), YAMLKeyValue.class, "" + "my_service:\n" + " class: foo\n" + " tag: foo" ); assertNotNull(yamlKeyValue); YamlHelper.insertKeyIntoFile(yamlFile, yamlKeyValue, "services"); assertEquals("" + "foo: foo\n" + "services:\n" + " my_service:\n" + " class: foo\n" + " tag: foo", yamlFile.getText() ); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitServiceCall */ public void testVisitServiceCall() { myFixture.configureByText(YAMLFileType.YML, "services:\n" + " foobar:\n" + " class: Foo\\Bar\n" + " calls:\n" + " - [ '<caret>' ]\n" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); YAMLScalar parent = (YAMLScalar) psiElement.getParent(); Collection<String> values = new ArrayList<>(); YamlHelper.visitServiceCall(parent, values::add); assertContainsElements(values, "Foo\\Bar"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitServiceCall */ public void testVisitServiceCallForNamedServices() { myFixture.configureByText(YAMLFileType.YML, "services:\n" + " Foo\\Bar:\n" + " calls:\n" + " - [ '<caret>' ]\n" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); YAMLScalar parent = (YAMLScalar) psiElement.getParent(); Collection<String> values = new ArrayList<>(); YamlHelper.visitServiceCall(parent, values::add); assertContainsElements(values, "Foo\\Bar"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitServiceCallArgument */ public void testVisitServiceCallArgument() { myFixture.configureByText(YAMLFileType.YML, "services:\n" + " foobar:\n" + " class: Foo\\Bar\n" + " calls:\n" + " - [ 'setBar', [@f<caret>oo] ]\n" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); YAMLScalar parent = (YAMLScalar) psiElement.getParent(); Collection<String> values = new ArrayList<>(); YamlHelper.visitServiceCallArgument(parent, parameterVisitor -> values.add(parameterVisitor.getClassName() + ":" + parameterVisitor.getMethod() + ":" + parameterVisitor.getParameterIndex()) ); assertContainsElements(values, "Foo\\Bar:setBar:0"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitServiceCallArgument */ public void testVisitServiceCallArgumentAsNamedService() { myFixture.configureByText(YAMLFileType.YML, "services:\n" + " Foo\\Bar:\n" + " calls:\n" + " - [ 'setBar', [@f<caret>oo] ]\n" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); YAMLScalar parent = (YAMLScalar) psiElement.getParent(); Collection<String> values = new ArrayList<>(); YamlHelper.visitServiceCallArgument(parent, parameterVisitor -> values.add(parameterVisitor.getClassName() + ":" + parameterVisitor.getMethod() + ":" + parameterVisitor.getParameterIndex()) ); assertContainsElements(values, "Foo\\Bar:setBar:0"); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitServiceCallArgumentMethodIndex */ public void testVisitServiceCallArgumentMethodIndex() { myFixture.configureByText(YAMLFileType.YML, "services:\n" + " foobar:\n" + " class: Foo\\Bar\n" + " calls:\n" + " - [ 'setBar', [@f<caret>oo] ]\n" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); YAMLScalar parent = (YAMLScalar) psiElement.getParent(); Collection<Parameter> parameters = new ArrayList<>(); YamlHelper.visitServiceCallArgumentMethodIndex(parent, parameters::add); assertNotNull(ContainerUtil.find(parameters, parameter -> "arg1".equals(parameter.getName()))); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#visitServiceCallArgumentMethodIndex */ public void testVisitServiceCallArgumentMethodIndexForNamedServices() { myFixture.configureByText(YAMLFileType.YML, "services:\n" + " Foo\\Bar:\n" + " calls:\n" + " - [ 'setBar', ['@foo', @f<caret>oo] ]\n" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); YAMLScalar parent = (YAMLScalar) psiElement.getParent(); Collection<Parameter> parameters = new ArrayList<>(); YamlHelper.visitServiceCallArgumentMethodIndex(parent, parameters::add); assertNotNull(ContainerUtil.find(parameters, parameter -> "arg2".equals(parameter.getName()))); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#getIndentSpaceForFile */ public void testGetIndentSpaceForFile() { assertEquals(2, getIndentForTextContent("parameters:\n foo: ~")); assertEquals(4, getIndentForTextContent("parameters:\n foo: ~")); assertEquals(4, getIndentForTextContent("parameters: ~")); assertEquals(4, getIndentForTextContent("parameters:\n" + " # foobar" + " foo: ~" )); } /** * @see fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper#getServiceDefinitionClassFromTagMethod */ public void testGetServiceDefinitionClassFromTagMethod() { myFixture.configureByText(YAMLFileType.YML, "" + "services:\n" + " foobar:\n" + " class: ClassName\\Foo\n" + " tags:\n" + " - { method: cross<caret>Hint }" ); PsiElement psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); assertEquals("ClassName\\Foo", YamlHelper.getServiceDefinitionClassFromTagMethod(psiElement)); myFixture.configureByText(YAMLFileType.YML, "" + "services:\n" + " ClassName\\Foo:\n" + " tags:\n" + " - { method: cross<caret>Hint }" ); psiElement = myFixture.getFile().findElementAt(myFixture.getCaretOffset()); assertEquals("ClassName\\Foo", YamlHelper.getServiceDefinitionClassFromTagMethod(psiElement)); } public void testGetTaggedServices() { PsiFile psiFile = myFixture.configureByText(YAMLFileType.YML, "" + "services:\n" + " foobar:\n" + " class: ClassName\\Foo\n" + " tags:\n" + " - { name: crossHint.test_222 }\n" + " foobar2:\n" + " class: ClassName\\Foo\n" + " tags: [ 'test.11' ]\n" ); Collection<YAMLKeyValue> taggedServices1 = YamlHelper.getTaggedServices((YAMLFile) psiFile, "crossHint.test_222"); assertTrue(taggedServices1.stream().anyMatch(yamlKeyValue -> "foobar".equals(yamlKeyValue.getKey().getText()))); Collection<YAMLKeyValue> taggedServices2 = YamlHelper.getTaggedServices((YAMLFile) psiFile, "test.11"); assertTrue(taggedServices2.stream().anyMatch(yamlKeyValue -> "foobar2".equals(yamlKeyValue.getKey().getText()))); } private int getIndentForTextContent(@NotNull String content) { return YamlHelper.getIndentSpaceForFile((YAMLFile) YamlPsiElementFactory.createDummyFile( getProject(), "foo.yml", content )); } private static class ListYamlTagVisitor implements YamlTagVisitor { private List<YamlServiceTag> items = new ArrayList<YamlServiceTag>(); @Override public void visit(@NotNull YamlServiceTag args) { items.add(args); } public YamlServiceTag getItem(int pos) { return items.get(pos); } public YamlServiceTag getItem() { return items.get(0); } } }
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.groestlcoin.examples; import com.google.groestlcoin.core.AbstractPeerEventListener; import com.google.groestlcoin.core.NetworkParameters; import com.google.groestlcoin.core.Peer; import com.google.groestlcoin.core.PeerGroup; import com.google.groestlcoin.discovery.DnsDiscovery; import com.google.groestlcoin.params.MainNetParams; import com.google.groestlcoin.utils.BriefLogFormatter; import com.google.common.collect.Lists; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.util.HashMap; import java.util.List; /** * Shows connected peers in a table view, so you can watch as they come and go. */ public class PeerMonitor { private NetworkParameters params; private PeerGroup peerGroup; private PeerTableModel peerTableModel; private PeerTableRenderer peerTableRenderer; private final HashMap<Peer, String> reverseDnsLookups = new HashMap<Peer, String>(); public static void main(String[] args) throws Exception { BriefLogFormatter.init(); new PeerMonitor(); } public PeerMonitor() { setupNetwork(); setupGUI(); peerGroup.start(); } private void setupNetwork() { params = MainNetParams.get(); peerGroup = new PeerGroup(params, null /* no chain */); peerGroup.setUserAgent("PeerMonitor", "1.0"); peerGroup.setMaxConnections(15); peerGroup.addPeerDiscovery(new DnsDiscovery(params)); peerGroup.addEventListener(new AbstractPeerEventListener() { @Override public void onPeerConnected(final Peer peer, int peerCount) { refreshUI(); lookupReverseDNS(peer); } @Override public void onPeerDisconnected(final Peer peer, int peerCount) { refreshUI(); synchronized (reverseDnsLookups) { reverseDnsLookups.remove(peer); } } }); } private void lookupReverseDNS(final Peer peer) { new Thread() { @Override public void run() { // This can take a looooong time. String reverseDns = peer.getAddress().getAddr().getCanonicalHostName(); synchronized (reverseDnsLookups) { reverseDnsLookups.put(peer, reverseDns); } refreshUI(); } }.start(); } private void refreshUI() { // Tell the Swing UI thread to redraw the peers table. SwingUtilities.invokeLater(new Runnable() { public void run() { peerTableModel.updateFromPeerGroup(); } }); } private void setupGUI() { JFrame window = new JFrame("Network monitor"); window.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); window.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent windowEvent) { System.out.println("Shutting down ..."); peerGroup.stopAndWait(); System.out.println("Shutdown complete."); System.exit(0); } }); JPanel panel = new JPanel(); JLabel instructions = new JLabel("Number of peers to connect to: "); final SpinnerNumberModel spinnerModel = new SpinnerNumberModel(4, 0, 100, 1); spinnerModel.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent changeEvent) { peerGroup.setMaxConnections(spinnerModel.getNumber().intValue()); } }); JSpinner numPeersSpinner = new JSpinner(spinnerModel); panel.add(instructions); panel.add(numPeersSpinner); window.getContentPane().add(panel, BorderLayout.NORTH); peerTableModel = new PeerTableModel(); JTable peerTable = new JTable(peerTableModel); peerTable.setAutoCreateRowSorter(true); peerTableRenderer = new PeerTableRenderer(peerTableModel); peerTable.setDefaultRenderer(String.class, peerTableRenderer); peerTable.setDefaultRenderer(Integer.class, peerTableRenderer); peerTable.setDefaultRenderer(Long.class, peerTableRenderer); peerTable.getColumnModel().getColumn(0).setPreferredWidth(300); JScrollPane scrollPane = new JScrollPane(peerTable); window.getContentPane().add(scrollPane, BorderLayout.CENTER); window.pack(); window.setSize(720, 480); window.setVisible(true); // Refresh the UI every half second to get the latest ping times. The event handler runs in the UI thread. new Timer(1000, new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { peerTableModel.updateFromPeerGroup(); } }).start(); } private class PeerTableModel extends AbstractTableModel { public static final int IP_ADDRESS = 0; public static final int PROTOCOL_VERSION = 1; public static final int USER_AGENT = 2; public static final int CHAIN_HEIGHT = 3; public static final int PING_TIME = 4; public static final int LAST_PING_TIME = 5; public List<Peer> connectedPeers = Lists.newArrayList(); public List<Peer> pendingPeers = Lists.newArrayList(); public void updateFromPeerGroup() { connectedPeers = peerGroup.getConnectedPeers(); pendingPeers = peerGroup.getPendingPeers(); fireTableDataChanged(); } public int getRowCount() { return connectedPeers.size() + pendingPeers.size(); } @Override public String getColumnName(int i) { switch (i) { case IP_ADDRESS: return "Address"; case PROTOCOL_VERSION: return "Protocol version"; case USER_AGENT: return "User Agent"; case CHAIN_HEIGHT: return "Chain height"; case PING_TIME: return "Average ping"; case LAST_PING_TIME: return "Last ping"; default: throw new RuntimeException(); } } public int getColumnCount() { return 6; } public Class<?> getColumnClass(int column) { switch (column) { case PROTOCOL_VERSION: return Integer.class; case CHAIN_HEIGHT: case PING_TIME: case LAST_PING_TIME: return Long.class; default: return String.class; } } public Object getValueAt(int row, int col) { if (row >= connectedPeers.size()) { // Peer that isn't connected yet. Peer peer = pendingPeers.get(row - connectedPeers.size()); switch (col) { case IP_ADDRESS: return getAddressForPeer(peer); case PROTOCOL_VERSION: return 0; case CHAIN_HEIGHT: case PING_TIME: case LAST_PING_TIME: return 0L; default: return "(pending)"; } } Peer peer = connectedPeers.get(row); switch (col) { case IP_ADDRESS: return getAddressForPeer(peer); case PROTOCOL_VERSION: return Integer.toString(peer.getPeerVersionMessage().clientVersion); case USER_AGENT: return peer.getPeerVersionMessage().subVer; case CHAIN_HEIGHT: return peer.getBestHeight(); case PING_TIME: case LAST_PING_TIME: return col == PING_TIME ? peer.getPingTime() : peer.getLastPingTime(); default: throw new RuntimeException(); } } private Object getAddressForPeer(Peer peer) { String s; synchronized (reverseDnsLookups) { s = reverseDnsLookups.get(peer); } if (s != null) return s; else return peer.getAddress().getAddr().getHostAddress(); } } private class PeerTableRenderer extends JLabel implements TableCellRenderer { private final PeerTableModel model; private final Font normal, bold; public PeerTableRenderer(PeerTableModel model) { super(); this.model = model; this.normal = new Font("Sans Serif", Font.PLAIN, 12); this.bold = new Font("Sans Serif", Font.BOLD, 12); } public Component getTableCellRendererComponent(JTable table, Object contents, boolean selected, boolean hasFocus, int row, int column) { row = table.convertRowIndexToModel(row); column = table.convertColumnIndexToModel(column); String str = contents.toString(); if (model.connectedPeers == null || model.pendingPeers == null) { setText(str); return this; } if (row >= model.connectedPeers.size()) { setFont(normal); setForeground(Color.LIGHT_GRAY); } else { if (model.connectedPeers.get(row) == peerGroup.getDownloadPeer()) setFont(bold); else setFont(normal); setForeground(Color.BLACK); // Mark chain heights that aren't normal but not for pending peers, as we don't know their heights yet. if (column == PeerTableModel.CHAIN_HEIGHT) { long height = (Long) contents; if (height != peerGroup.getMostCommonChainHeight()) { str = height + " \u2022 "; } } } boolean isPingColumn = column == PeerTableModel.PING_TIME || column == PeerTableModel.LAST_PING_TIME; if (isPingColumn && contents.equals(Long.MAX_VALUE)) { // We don't know the answer yet str = ""; } setText(str); return this; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Matchers; import org.mockito.Mockito; public class TestApplicationLimits { private static final Log LOG = LogFactory.getLog(TestApplicationLimits.class); final static int GB = 1024; LeafQueue queue; private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator(); RMContext rmContext = null; @Before public void setUp() throws IOException { CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(); YarnConfiguration conf = new YarnConfiguration(); setupQueueConfiguration(csConf); rmContext = TestUtils.getMockRMContext(); CapacitySchedulerContext csContext = mock(CapacitySchedulerContext.class); when(csContext.getConfiguration()).thenReturn(csConf); when(csContext.getConf()).thenReturn(conf); when(csContext.getMinimumResourceCapability()). thenReturn(Resources.createResource(GB, 1)); when(csContext.getMaximumResourceCapability()). thenReturn(Resources.createResource(16*GB, 32)); when(csContext.getClusterResource()). thenReturn(Resources.createResource(10 * 16 * GB, 10 * 32)); when(csContext.getApplicationComparator()). thenReturn(CapacityScheduler.applicationComparator); when(csContext.getQueueComparator()). thenReturn(CapacityScheduler.queueComparator); when(csContext.getResourceCalculator()). thenReturn(resourceCalculator); when(csContext.getRMContext()).thenReturn(rmContext); RMContainerTokenSecretManager containerTokenSecretManager = new RMContainerTokenSecretManager(conf); containerTokenSecretManager.rollMasterKey(); when(csContext.getContainerTokenSecretManager()).thenReturn( containerTokenSecretManager); Map<String, CSQueue> queues = new HashMap<String, CSQueue>(); CSQueue root = CapacityScheduler.parseQueue(csContext, csConf, null, "root", queues, queues, TestUtils.spyHook); queue = spy(new LeafQueue(csContext, A, root, null)); // Stub out ACL checks doReturn(true). when(queue).hasAccess(any(QueueACL.class), any(UserGroupInformation.class)); // Some default values doReturn(100).when(queue).getMaxApplications(); doReturn(25).when(queue).getMaxApplicationsPerUser(); } private static final String A = "a"; private static final String B = "b"; private void setupQueueConfiguration(CapacitySchedulerConfiguration conf) { // Define top-level queues conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {A, B}); final String Q_A = CapacitySchedulerConfiguration.ROOT + "." + A; conf.setCapacity(Q_A, 10); final String Q_B = CapacitySchedulerConfiguration.ROOT + "." + B; conf.setCapacity(Q_B, 90); conf.setUserLimit(CapacitySchedulerConfiguration.ROOT + "." + A, 50); conf.setUserLimitFactor(CapacitySchedulerConfiguration.ROOT + "." + A, 5.0f); LOG.info("Setup top-level queues a and b"); } private FiCaSchedulerApp getMockApplication(int appId, String user, Resource amResource) { FiCaSchedulerApp application = mock(FiCaSchedulerApp.class); ApplicationAttemptId applicationAttemptId = TestUtils.getMockApplicationAttemptId(appId, 0); doReturn(applicationAttemptId.getApplicationId()). when(application).getApplicationId(); doReturn(applicationAttemptId). when(application).getApplicationAttemptId(); doReturn(user).when(application).getUser(); doReturn(amResource).when(application).getAMResource(); return application; } @Test public void testAMResourceLimit() throws Exception { final String user_0 = "user_0"; final String user_1 = "user_1"; // This uses the default 10% of cluster value for the max am resources // which are allowed, at 80GB = 8GB for AM's at the queue level. The user // am limit is 4G initially (based on the queue absolute capacity) // when there is only 1 user, and drops to 2G (the userlimit) when there // is a second user Resource clusterResource = Resource.newInstance(80 * GB, 40); queue.updateClusterResource(clusterResource, new ResourceLimits( clusterResource)); ActiveUsersManager activeUsersManager = mock(ActiveUsersManager.class); when(queue.getActiveUsersManager()).thenReturn(activeUsersManager); assertEquals(Resource.newInstance(8 * GB, 1), queue.getAMResourceLimit()); assertEquals(Resource.newInstance(4 * GB, 1), queue.getUserAMResourceLimit()); // Two apps for user_0, both start int APPLICATION_ID = 0; FiCaSchedulerApp app_0 = getMockApplication(APPLICATION_ID++, user_0, Resource.newInstance(2 * GB, 1)); queue.submitApplicationAttempt(app_0, user_0); assertEquals(1, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); when(activeUsersManager.getNumActiveUsers()).thenReturn(1); FiCaSchedulerApp app_1 = getMockApplication(APPLICATION_ID++, user_0, Resource.newInstance(2 * GB, 1)); queue.submitApplicationAttempt(app_1, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); // AMLimits unchanged assertEquals(Resource.newInstance(8 * GB, 1), queue.getAMResourceLimit()); assertEquals(Resource.newInstance(4 * GB, 1), queue.getUserAMResourceLimit()); // One app for user_1, starts FiCaSchedulerApp app_2 = getMockApplication(APPLICATION_ID++, user_1, Resource.newInstance(2 * GB, 1)); queue.submitApplicationAttempt(app_2, user_1); assertEquals(3, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_1)); assertEquals(0, queue.getNumPendingApplications(user_1)); when(activeUsersManager.getNumActiveUsers()).thenReturn(2); // Now userAMResourceLimit drops to the queue configured 50% as there is // another user active assertEquals(Resource.newInstance(8 * GB, 1), queue.getAMResourceLimit()); assertEquals(Resource.newInstance(2 * GB, 1), queue.getUserAMResourceLimit()); // Second user_1 app cannot start FiCaSchedulerApp app_3 = getMockApplication(APPLICATION_ID++, user_1, Resource.newInstance(2 * GB, 1)); queue.submitApplicationAttempt(app_3, user_1); assertEquals(3, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_1)); assertEquals(1, queue.getNumPendingApplications(user_1)); // Now finish app so another should be activated queue.finishApplicationAttempt(app_2, A); assertEquals(3, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_1)); assertEquals(0, queue.getNumPendingApplications(user_1)); } @Test public void testLimitsComputation() throws Exception { CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(); setupQueueConfiguration(csConf); YarnConfiguration conf = new YarnConfiguration(); CapacitySchedulerContext csContext = mock(CapacitySchedulerContext.class); when(csContext.getConfiguration()).thenReturn(csConf); when(csContext.getConf()).thenReturn(conf); when(csContext.getMinimumResourceCapability()). thenReturn(Resources.createResource(GB, 1)); when(csContext.getMaximumResourceCapability()). thenReturn(Resources.createResource(16*GB, 16)); when(csContext.getApplicationComparator()). thenReturn(CapacityScheduler.applicationComparator); when(csContext.getQueueComparator()). thenReturn(CapacityScheduler.queueComparator); when(csContext.getResourceCalculator()).thenReturn(resourceCalculator); when(csContext.getRMContext()).thenReturn(rmContext); // Say cluster has 100 nodes of 16G each Resource clusterResource = Resources.createResource(100 * 16 * GB, 100 * 16); when(csContext.getClusterResource()).thenReturn(clusterResource); Map<String, CSQueue> queues = new HashMap<String, CSQueue>(); CSQueue root = CapacityScheduler.parseQueue(csContext, csConf, null, "root", queues, queues, TestUtils.spyHook); LeafQueue queue = (LeafQueue)queues.get(A); LOG.info("Queue 'A' -" + " AMResourceLimit=" + queue.getAMResourceLimit() + " UserAMResourceLimit=" + queue.getUserAMResourceLimit()); assertEquals(queue.getAMResourceLimit(), Resource.newInstance(160*GB, 1)); assertEquals(queue.getUserAMResourceLimit(), Resource.newInstance(80*GB, 1)); assertEquals( (int)(clusterResource.getMemory() * queue.getAbsoluteCapacity()), queue.getMetrics().getAvailableMB() ); // Add some nodes to the cluster & test new limits clusterResource = Resources.createResource(120 * 16 * GB); root.updateClusterResource(clusterResource, new ResourceLimits( clusterResource)); assertEquals(queue.getAMResourceLimit(), Resource.newInstance(192*GB, 1)); assertEquals(queue.getUserAMResourceLimit(), Resource.newInstance(96*GB, 1)); assertEquals( (int)(clusterResource.getMemory() * queue.getAbsoluteCapacity()), queue.getMetrics().getAvailableMB() ); // should return -1 if per queue setting not set assertEquals( (int)CapacitySchedulerConfiguration.UNDEFINED, csConf.getMaximumApplicationsPerQueue(queue.getQueuePath())); int expectedMaxApps = (int) (CapacitySchedulerConfiguration.DEFAULT_MAXIMUM_SYSTEM_APPLICATIIONS * queue.getAbsoluteCapacity()); assertEquals(expectedMaxApps, queue.getMaxApplications()); int expectedMaxAppsPerUser = Math.min(expectedMaxApps, (int)(expectedMaxApps * (queue.getUserLimit()/100.0f) * queue.getUserLimitFactor())); assertEquals(expectedMaxAppsPerUser, queue.getMaxApplicationsPerUser()); // should default to global setting if per queue setting not set assertEquals( (long)CapacitySchedulerConfiguration.DEFAULT_MAXIMUM_APPLICATIONMASTERS_RESOURCE_PERCENT, (long)csConf.getMaximumApplicationMasterResourcePerQueuePercent( queue.getQueuePath()) ); // Change the per-queue max AM resources percentage. csConf.setFloat( "yarn.scheduler.capacity." + queue.getQueuePath() + ".maximum-am-resource-percent", 0.5f); // Re-create queues to get new configs. queues = new HashMap<String, CSQueue>(); root = CapacityScheduler.parseQueue(csContext, csConf, null, "root", queues, queues, TestUtils.spyHook); clusterResource = Resources.createResource(100 * 16 * GB); queue = (LeafQueue)queues.get(A); assertEquals((long) 0.5, (long) csConf.getMaximumApplicationMasterResourcePerQueuePercent( queue.getQueuePath()) ); assertEquals(queue.getAMResourceLimit(), Resource.newInstance(800*GB, 1)); assertEquals(queue.getUserAMResourceLimit(), Resource.newInstance(400*GB, 1)); // Change the per-queue max applications. csConf.setInt( "yarn.scheduler.capacity." + queue.getQueuePath() + ".maximum-applications", 9999); // Re-create queues to get new configs. queues = new HashMap<String, CSQueue>(); root = CapacityScheduler.parseQueue(csContext, csConf, null, "root", queues, queues, TestUtils.spyHook); queue = (LeafQueue)queues.get(A); assertEquals(9999, (int)csConf.getMaximumApplicationsPerQueue(queue.getQueuePath())); assertEquals(9999, queue.getMaxApplications()); expectedMaxAppsPerUser = Math.min(9999, (int)(9999 * (queue.getUserLimit()/100.0f) * queue.getUserLimitFactor())); assertEquals(expectedMaxAppsPerUser, queue.getMaxApplicationsPerUser()); } @Test public void testActiveApplicationLimits() throws Exception { final String user_0 = "user_0"; final String user_1 = "user_1"; final String user_2 = "user_2"; assertEquals(Resource.newInstance(16 * GB, 1), queue.getAMResourceLimit()); assertEquals(Resource.newInstance(8 * GB, 1), queue.getUserAMResourceLimit()); int APPLICATION_ID = 0; // Submit first application FiCaSchedulerApp app_0 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_0, user_0); assertEquals(1, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); // Submit second application FiCaSchedulerApp app_1 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_1, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); // Submit third application, should remain pending due to user amlimit FiCaSchedulerApp app_2 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_2, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); // Finish one application, app_2 should be activated queue.finishApplicationAttempt(app_0, A); assertEquals(2, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); // Submit another one for user_0 FiCaSchedulerApp app_3 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_3, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); // Submit first app for user_1 FiCaSchedulerApp app_4 = getMockApplication(APPLICATION_ID++, user_1, Resources.createResource(8 * GB, 0)); queue.submitApplicationAttempt(app_4, user_1); assertEquals(3, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); assertEquals(1, queue.getNumActiveApplications(user_1)); assertEquals(0, queue.getNumPendingApplications(user_1)); // Submit first app for user_2, should block due to queue amlimit FiCaSchedulerApp app_5 = getMockApplication(APPLICATION_ID++, user_2, Resources.createResource(8 * GB, 0)); queue.submitApplicationAttempt(app_5, user_2); assertEquals(3, queue.getNumActiveApplications()); assertEquals(2, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); assertEquals(1, queue.getNumActiveApplications(user_1)); assertEquals(0, queue.getNumPendingApplications(user_1)); assertEquals(1, queue.getNumPendingApplications(user_2)); // Now finish one app of user_1 so app_5 should be activated queue.finishApplicationAttempt(app_4, A); assertEquals(3, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); assertEquals(0, queue.getNumActiveApplications(user_1)); assertEquals(0, queue.getNumPendingApplications(user_1)); assertEquals(1, queue.getNumActiveApplications(user_2)); assertEquals(0, queue.getNumPendingApplications(user_2)); } @Test public void testActiveLimitsWithKilledApps() throws Exception { final String user_0 = "user_0"; int APPLICATION_ID = 0; // Submit first application FiCaSchedulerApp app_0 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_0, user_0); assertEquals(1, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); assertTrue(queue.activeApplications.containsKey(app_0.getApplicationId())); // Submit second application FiCaSchedulerApp app_1 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_1, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); assertTrue(queue.activeApplications.containsKey(app_1.getApplicationId())); // Submit third application, should remain pending FiCaSchedulerApp app_2 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_2, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); assertTrue(queue.pendingApplications.contains(app_2)); // Submit fourth application, should remain pending FiCaSchedulerApp app_3 = getMockApplication(APPLICATION_ID++, user_0, Resources.createResource(4 * GB, 0)); queue.submitApplicationAttempt(app_3, user_0); assertEquals(2, queue.getNumActiveApplications()); assertEquals(2, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(2, queue.getNumPendingApplications(user_0)); assertTrue(queue.pendingApplications.contains(app_3)); // Kill 3rd pending application queue.finishApplicationAttempt(app_2, A); assertEquals(2, queue.getNumActiveApplications()); assertEquals(1, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(1, queue.getNumPendingApplications(user_0)); assertFalse(queue.pendingApplications.contains(app_2)); assertFalse(queue.activeApplications.containsKey(app_2.getApplicationId())); // Finish 1st application, app_3 should become active queue.finishApplicationAttempt(app_0, A); assertEquals(2, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(2, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); assertTrue(queue.activeApplications.containsKey(app_3.getApplicationId())); assertFalse(queue.pendingApplications.contains(app_3)); assertFalse(queue.activeApplications.containsKey(app_0.getApplicationId())); // Finish 2nd application queue.finishApplicationAttempt(app_1, A); assertEquals(1, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(1, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); assertFalse(queue.activeApplications.containsKey(app_1.getApplicationId())); // Finish 4th application queue.finishApplicationAttempt(app_3, A); assertEquals(0, queue.getNumActiveApplications()); assertEquals(0, queue.getNumPendingApplications()); assertEquals(0, queue.getNumActiveApplications(user_0)); assertEquals(0, queue.getNumPendingApplications(user_0)); assertFalse(queue.activeApplications.containsKey(app_3.getApplicationId())); } @Test public void testHeadroom() throws Exception { CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(); csConf.setUserLimit(CapacitySchedulerConfiguration.ROOT + "." + A, 25); setupQueueConfiguration(csConf); YarnConfiguration conf = new YarnConfiguration(); CapacitySchedulerContext csContext = mock(CapacitySchedulerContext.class); when(csContext.getConfiguration()).thenReturn(csConf); when(csContext.getConf()).thenReturn(conf); when(csContext.getMinimumResourceCapability()). thenReturn(Resources.createResource(GB)); when(csContext.getMaximumResourceCapability()). thenReturn(Resources.createResource(16*GB)); when(csContext.getApplicationComparator()). thenReturn(CapacityScheduler.applicationComparator); when(csContext.getQueueComparator()). thenReturn(CapacityScheduler.queueComparator); when(csContext.getResourceCalculator()).thenReturn(resourceCalculator); when(csContext.getRMContext()).thenReturn(rmContext); // Say cluster has 100 nodes of 16G each Resource clusterResource = Resources.createResource(100 * 16 * GB); when(csContext.getClusterResource()).thenReturn(clusterResource); Map<String, CSQueue> queues = new HashMap<String, CSQueue>(); CapacityScheduler.parseQueue(csContext, csConf, null, "root", queues, queues, TestUtils.spyHook); // Manipulate queue 'a' LeafQueue queue = TestLeafQueue.stubLeafQueue((LeafQueue)queues.get(A)); String host_0 = "host_0"; String rack_0 = "rack_0"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, rack_0, 0, 16*GB); final String user_0 = "user_0"; final String user_1 = "user_1"; RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); RMContext rmContext = TestUtils.getMockRMContext(); RMContext spyRMContext = spy(rmContext); ConcurrentMap<ApplicationId, RMApp> spyApps = spy(new ConcurrentHashMap<ApplicationId, RMApp>()); RMApp rmApp = mock(RMApp.class); ResourceRequest amResourceRequest = mock(ResourceRequest.class); Resource amResource = Resources.createResource(0, 0); when(amResourceRequest.getCapability()).thenReturn(amResource); when(rmApp.getAMResourceRequest()).thenReturn(amResourceRequest); Mockito.doReturn(rmApp).when(spyApps).get((ApplicationId)Matchers.any()); when(spyRMContext.getRMApps()).thenReturn(spyApps); Priority priority_1 = TestUtils.createMockPriority(1); // Submit first application with some resource-requests from user_0, // and check headroom final ApplicationAttemptId appAttemptId_0_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0_0 = new FiCaSchedulerApp( appAttemptId_0_0, user_0, queue, queue.getActiveUsersManager(), spyRMContext); queue.submitApplicationAttempt(app_0_0, user_0); List<ResourceRequest> app_0_0_requests = new ArrayList<ResourceRequest>(); app_0_0_requests.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority_1, recordFactory)); app_0_0.updateResourceRequests(app_0_0_requests); // Schedule to compute queue.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource)); Resource expectedHeadroom = Resources.createResource(10*16*GB, 1); assertEquals(expectedHeadroom, app_0_0.getHeadroom()); // Submit second application from user_0, check headroom final ApplicationAttemptId appAttemptId_0_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_0_1 = new FiCaSchedulerApp( appAttemptId_0_1, user_0, queue, queue.getActiveUsersManager(), spyRMContext); queue.submitApplicationAttempt(app_0_1, user_0); List<ResourceRequest> app_0_1_requests = new ArrayList<ResourceRequest>(); app_0_1_requests.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority_1, recordFactory)); app_0_1.updateResourceRequests(app_0_1_requests); // Schedule to compute queue.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource)); // Schedule to compute assertEquals(expectedHeadroom, app_0_0.getHeadroom()); assertEquals(expectedHeadroom, app_0_1.getHeadroom());// no change // Submit first application from user_1, check for new headroom final ApplicationAttemptId appAttemptId_1_0 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_1_0 = new FiCaSchedulerApp( appAttemptId_1_0, user_1, queue, queue.getActiveUsersManager(), spyRMContext); queue.submitApplicationAttempt(app_1_0, user_1); List<ResourceRequest> app_1_0_requests = new ArrayList<ResourceRequest>(); app_1_0_requests.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority_1, recordFactory)); app_1_0.updateResourceRequests(app_1_0_requests); // Schedule to compute queue.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource)); // Schedule to compute expectedHeadroom = Resources.createResource(10*16*GB / 2, 1); // changes assertEquals(expectedHeadroom, app_0_0.getHeadroom()); assertEquals(expectedHeadroom, app_0_1.getHeadroom()); assertEquals(expectedHeadroom, app_1_0.getHeadroom()); // Now reduce cluster size and check for the smaller headroom clusterResource = Resources.createResource(90*16*GB); queue.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource)); // Schedule to compute expectedHeadroom = Resources.createResource(9*16*GB / 2, 1); // changes assertEquals(expectedHeadroom, app_0_0.getHeadroom()); assertEquals(expectedHeadroom, app_0_1.getHeadroom()); assertEquals(expectedHeadroom, app_1_0.getHeadroom()); } @After public void tearDown() { } }
import java.io.*; public class Intcode { /* * Could be enum values but ints are just so much easier! */ /** * The Opcodes for the computer. */ public static final int ADD = 1; public static final int MULTIPLY = 2; public static final int INPUT_AND_STORE = 3; public static final int OUTPUT = 4; public static final int JUMP_IF_TRUE = 5; public static final int JUMP_IF_FALSE = 6; public static final int LESS_THAN = 7; public static final int EQUALS = 8; public static final int HALT = 99; /** * The Parameter modes. * * Parameters that an instruction writes to will never be in immediate mode. */ public static final int POSITION_MODE = 0; // parameter is interpreted as a position public static final int IMMEDIATE_MODE = 1; // parameter is interpreted as a value private static final int MAX_PARAMETERS = 3; public static final String DELIMITER = ","; /* * This implementation is stateless other than being placed * into debug mode where it will output whatever action it * takes. */ public Intcode (boolean debug) { _debug = debug; } public String parseAndExecute (String[] values, int initialInput1, int initialInput2) { String returnValue = ""; int inputParam = 1; if (_debug) System.out.println("Intcode inputs <"+initialInput1+", "+initialInput2+">"); for (int i = 0; i < values.length; i++) { String str = getOpcode(values[i]); int[] modes = getModes(values[i]); if (_debug) { System.out.println("\nWorking on element "+i+" which is command "+str+ " with parameter modes ..."); printModes(modes); } /* * Now factor in the parameter modes. */ switch (Integer.valueOf(str)) { case Intcode.ADD: { /* * Opcode 1 adds together numbers read from two positions * and stores the result in a third position. The three integers * immediately after the opcode tell you these three positions - the * first two indicate the positions from which you should read the * input values, and the third indicates the position at which * the output should be stored. */ int param1 = Integer.valueOf(values[i+1]); int param2 = Integer.valueOf(values[i+2]); int param3 = Integer.valueOf(values[i+3]); if (modes[0] == POSITION_MODE) param1 = Integer.valueOf(values[param1]); if (modes[1] == POSITION_MODE) param2 = Integer.valueOf(values[param2]); if (_debug) System.out.println("Adding "+param1+" and "+param2); int sum = param1+param2; if (_debug) System.out.println("Storing "+sum+" at position "+param3); values[param3] = String.valueOf(sum); i = i+3; // move the pointer on. } break; case Intcode.MULTIPLY: { /* * Opcode 2 works exactly like opcode 1, except it multiplies the * two inputs instead of adding them. Again, the three integers after * the opcode indicate where the inputs and outputs are, not their values. */ int param1 = Integer.valueOf(values[i+1]); int param2 = Integer.valueOf(values[i+2]); int param3 = Integer.valueOf(values[i+3]); if (modes[0] == POSITION_MODE) param1 = Integer.valueOf(values[param1]); if (modes[1] == POSITION_MODE) param2 = Integer.valueOf(values[param2]); if (_debug) System.out.println("Multiplying "+param1+" and "+param2); int product = Integer.valueOf(param1)*Integer.valueOf(param2); if (_debug) System.out.println("Storing "+product+" at position "+param3); values[param3] = String.valueOf(product); i = i+3; // move the pointer on. } break; case Intcode.INPUT_AND_STORE: { /* * Opcode 3 takes a single integer as input and saves it to * the position given by its only parameter. */ int param1 = Integer.valueOf(values[i+1]); if (_debug) System.out.println("Storing "+((inputParam == 1) ? initialInput1 : initialInput2)+" at position "+param1); values[param1] = String.valueOf(((inputParam == 1) ? initialInput1 : initialInput2)); inputParam++; // assume only 2! if (inputParam > 2) inputParam = 2; i = i+1; // move the pointer on. } break; case Intcode.OUTPUT: { /* * Opcode 4 outputs the value of its only parameter. */ int param1 = Integer.valueOf(values[i+1]); if (modes[0] == IMMEDIATE_MODE) returnValue = Integer.toString(param1); else returnValue = values[param1]; if (_debug) System.out.println("Outputting value "+returnValue+" from entry "+param1); i = i+1; // move the pointer on. } break; case Intcode.JUMP_IF_TRUE: { /* * If the first parameter is non-zero, it sets the instruction pointer to * the value from the second parameter. Otherwise, it does nothing. */ int param1 = Integer.valueOf(values[i+1]); int param2 = Integer.valueOf(values[i+2]); if (modes[0] == POSITION_MODE) param1 = Integer.valueOf(values[param1]); if (modes[1] == POSITION_MODE) param2 = Integer.valueOf(values[param2]); if (_debug) System.out.println("Checking "+param1+" != 0 and might jump to "+param2); if (param1 != 0) { i = param2 -1; // remember we're in a for-loop! if (_debug) System.out.println("Will jump to "+param2); } else i = i+2; } break; case Intcode.JUMP_IF_FALSE: { /* * If the first parameter is zero, it sets the instruction pointer to the value * from the second parameter. Otherwise, it does nothing. */ int param1 = Integer.valueOf(values[i+1]); int param2 = Integer.valueOf(values[i+2]); if (modes[0] == POSITION_MODE) param1 = Integer.valueOf(values[param1]); if (modes[1] == POSITION_MODE) param2 = Integer.valueOf(values[param2]); if (_debug) System.out.println("Checking "+param1+" == 0 and might jump to "+param2); if (param1 == 0) { i = param2 -1; // remember we're in a for-loop! if (_debug) System.out.println("Will jump to "+param2); } else i = i+2; } break; case Intcode.LESS_THAN: { /* * If the first parameter is less than the second parameter, it stores 1 * in the position given by the third parameter. Otherwise, it stores 0. */ int param1 = Integer.valueOf(values[i+1]); int param2 = Integer.valueOf(values[i+2]); int param3 = Integer.valueOf(values[i+3]); if (modes[0] == POSITION_MODE) param1 = Integer.valueOf(values[param1]); if (modes[1] == POSITION_MODE) param2 = Integer.valueOf(values[param2]); if (_debug) { System.out.println("Checking whether "+param1+" < "+param2); System.out.print("Storing "); } if (param1 < param2) { if (_debug) System.out.print("1"); values[param3] = "1"; } else { if (_debug) System.out.print("0"); values[param3] = "0"; } if (_debug) System.out.println(" at location "+param3); i = i+3; // move the pointer on. } break; case Intcode.EQUALS: { /* * If the first parameter is equal to the second parameter, it stores 1 * in the position given by the third parameter. Otherwise, it stores 0. */ int param1 = Integer.valueOf(values[i+1]); int param2 = Integer.valueOf(values[i+2]); int param3 = Integer.valueOf(values[i+3]); if (modes[0] == POSITION_MODE) param1 = Integer.valueOf(values[param1]); if (modes[1] == POSITION_MODE) param2 = Integer.valueOf(values[param2]); if (_debug) { System.out.println("Checking whether "+param1+" is equal to "+param2); System.out.print("Storing "); } if (param1 == param2) { if (_debug) System.out.print("1"); values[param3] = "1"; } else { if (_debug) System.out.print("0"); values[param3] = "0"; } if (_debug) System.out.println(" at location "+param3); i = i+3; // move the pointer on. } break; case Intcode.HALT: { /* * Means that the program is finished and should immediately halt. */ if (_debug) System.out.println("Halting execution with "+returnValue); return returnValue; } default: { System.out.println("Unknown opcode "+str+" encountered"); return "NaN"; } } } return returnValue; } private String getOpcode (String digits) { if (_debug) System.out.println("Command: "+digits); String opcode = null; if ((digits != null) && (digits.length() > 2)) opcode = digits.substring(digits.length()-2); else opcode = digits; if (_debug) System.out.println("Opcode: "+opcode); return opcode; } /* * Return the modes for the parameters, including default mode * (POSITION_MODE) if nothing is defined. */ private int[] getModes (String digits) { int[] theModes = new int[MAX_PARAMETERS]; for (int i = 0; i < MAX_PARAMETERS; i++) theModes[i] = POSITION_MODE; if ((digits != null) && (digits.length() > 2)) { String allModes = digits.substring(0, digits.length()-2); char[] modeArray = allModes.toCharArray(); for (int j = modeArray.length-1; j >= 0; j--) { if (modeArray[j] == '1') theModes[modeArray.length-j-1] = IMMEDIATE_MODE; } } if (_debug) printModes(theModes); return theModes; } private void printModes (int[] modes) { for (int i = 0; i < modes.length; i++) { System.out.println("Parameter "+i+" is "+((modes[i] == IMMEDIATE_MODE) ? "immediate mode": "position mode")); } } private boolean _debug; }
/* BeanUtils Version 1.0.0 Created by yangtu222 on 2017.08.05 Distributed under the permissive zlib License Get the latest version from here: https://github.com/yangtu222/BeanUtils This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ package com.tuyang.beanutils.internal.cache; import java.beans.PropertyDescriptor; import java.lang.ref.SoftReference; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import com.tuyang.beanutils.BeanCopier; import com.tuyang.beanutils.BeanCopyConvertor; import com.tuyang.beanutils.annotation.BeanCopySource; import com.tuyang.beanutils.annotation.CopyCollection; import com.tuyang.beanutils.annotation.CopyFeature; import com.tuyang.beanutils.annotation.CopyProperty; import com.tuyang.beanutils.config.BeanCopyConfig; import com.tuyang.beanutils.exception.BeanCopyException; import com.tuyang.beanutils.internal.convertors.ArrayConvertorFactory; import com.tuyang.beanutils.internal.convertors.EnumToEnumArrayConvertor; import com.tuyang.beanutils.internal.convertors.EnumToStringArrayConvertor; import com.tuyang.beanutils.internal.convertors.EnumToStringConvertor; import com.tuyang.beanutils.internal.convertors.ObjectToStringConvertor; import com.tuyang.beanutils.internal.convertors.ListToEnumArrayConvertor; import com.tuyang.beanutils.internal.convertors.StringToEnumArrayConvertor; import com.tuyang.beanutils.internal.convertors.ObjectToEnumConvertor; import com.tuyang.beanutils.internal.convertors.ObjectToStringArrayConvertor; import com.tuyang.beanutils.internal.dump.BeanCopyDump; import com.tuyang.beanutils.internal.factory.BeanCopierFactory; import com.tuyang.beanutils.internal.logger.Logger; import com.tuyang.beanutils.internal.utils.PropertyUtils; public class BeanCopyCache { private static Logger logger = Logger.getLogger(BeanCopyCache.class); private static Map<Long, SoftReference<BeanCopier>> beanCopyCacheMap = new ConcurrentHashMap<>(); private static BeanCopyConfig beanCopyConfig = BeanCopyConfig.instance(); private static BeanCopierFactory beanCopyFactory = null; public static void setBeanCopyConfig(BeanCopyConfig beanCopyConfig) { if( beanCopyConfig == null ) beanCopyConfig = new BeanCopyConfig(); BeanCopyCache.beanCopyConfig = beanCopyConfig; synchronized (BeanCopyCache.class) { if( beanCopyFactory != null && !beanCopyFactory.equals(beanCopyConfig.getBeanCopyFactory() ) ) beanCopyCacheMap.clear(); beanCopyFactory = null; } } public static BeanCopier getBeanCopy(Class<?> sourceClass, Class<?> targetClass, Class<?> optionClass) { long cacheKey = (((long)sourceClass.hashCode() ) << 16 )+ (long) targetClass.hashCode(); if( optionClass != null ) { cacheKey = (cacheKey <<16) + (long)optionClass.hashCode(); } else { cacheKey = (cacheKey <<16) + (long)targetClass.hashCode(); } BeanCopier beanCopy = null; SoftReference<BeanCopier> refBeanCopy = beanCopyCacheMap.get(cacheKey); if( refBeanCopy != null ) beanCopy = refBeanCopy.get(); if( BeanCopyConfig.instance().getDumpOption() == BeanCopyConfig.DumpOption.AutoDumpAlways ) { BeanCopyDump.dumpPropertyMapping(sourceClass, targetClass, optionClass); } if( beanCopy != null ) return beanCopy; synchronized (BeanCopyCache.class) { if( beanCopyFactory == null ) { try { beanCopyFactory = beanCopyConfig.getBeanCopyFactory().newInstance(); } catch (Exception e) { throw new BeanCopyException("BeanCopyConfig is not configured correctly!"); } } } CopyFeature[] features = parseBeanCopyFeatures(sourceClass, targetClass, optionClass); List<BeanCopyPropertyItem> itemList = buildBeanCopyPropertyItem(sourceClass, targetClass, optionClass); beanCopy = beanCopyFactory.createBeanCopier(sourceClass, targetClass, itemList, features); if( beanCopy != null ) { beanCopyCacheMap.put(cacheKey, new SoftReference<BeanCopier>(beanCopy)); } if( BeanCopyConfig.instance().getDumpOption() == BeanCopyConfig.DumpOption.AutoDumpAtFirstCopy ) { BeanCopyDump.dumpPropertyMapping(sourceClass, targetClass, optionClass, itemList); } return beanCopy; } private static CopyFeature[] parseBeanCopyFeatures(Class<?> sourceClass, Class<?> targetClass, Class<?> optionClass ) { if( optionClass != null ) { if( optionClass.isAnnotationPresent(BeanCopySource.class) ) { BeanCopySource source = optionClass.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = source.source(); if( sourceClassFromAnnotation.isAssignableFrom(sourceClass) ) { return source.features(); } else { //fix sourceClass is proxy class. if( sourceClass.getName().startsWith(sourceClassFromAnnotation.getName()) ) { return source.features(); } } } } if( targetClass.isAnnotationPresent(BeanCopySource.class) ) { BeanCopySource source = targetClass.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = source.source(); if( sourceClassFromAnnotation.isAssignableFrom(sourceClass) ) { return source.features(); } } return null; } @SuppressWarnings("rawtypes") public static List<BeanCopyPropertyItem> buildBeanCopyPropertyItem(Class<?> sourceClass, Class<?> targetClass, Class<?> optionClass ) { List<BeanCopyPropertyItem> itemList = new ArrayList<>(); Class<?> beanAnnotationSource = null; BeanCopySource beanCopySource = null; if( optionClass != null ) { if( optionClass.isAnnotationPresent(BeanCopySource.class) ) { beanCopySource = optionClass.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = beanCopySource.source(); if( sourceClassFromAnnotation.isAssignableFrom(sourceClass) ) { beanAnnotationSource = sourceClassFromAnnotation; } else { //fix sourceClass is proxy class. if( sourceClass.getName().startsWith(sourceClassFromAnnotation.getName()) ) { beanAnnotationSource = sourceClassFromAnnotation; } } } } if( beanAnnotationSource == null && targetClass.isAnnotationPresent(BeanCopySource.class) ) { beanCopySource = targetClass.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = beanCopySource.source(); if( sourceClassFromAnnotation.isAssignableFrom(sourceClass) ) { beanAnnotationSource = sourceClassFromAnnotation; } else { //fix sourceClass is proxy class. if( sourceClass.getName().startsWith(sourceClassFromAnnotation.getName()) ) { beanAnnotationSource = sourceClassFromAnnotation; } } } boolean enumThrowExceptions = true; boolean useObjectToStringCopy = false; CopyFeature[] features = null; if( beanCopySource != null ) { features = beanCopySource.features(); for( CopyFeature feature :features ) { if(feature == CopyFeature.IGNORE_ENUM_CONVERT_EXCEPTION ) { enumThrowExceptions = false; }else if( feature == CopyFeature.ENABLE_JAVA_BEAN_TO_STRING ) { useObjectToStringCopy = true; } } } PropertyDescriptor[] targetPds = PropertyUtils.getPropertyDescriptors(targetClass); for( PropertyDescriptor targetPd: targetPds ) { Method writeMethod = null; writeMethod = targetPd.getWriteMethod(); if( writeMethod == null ) continue; String propertyName = null; Field propertyField = null; Class<?> methodTargetType = null; Class<?> methodTargetArray = null; boolean targetIsArray = false; Method[] readMethods = null; Class<?> methodSourceType = null; Class<?> methodSourceArray = null; boolean sourceIsArray = false; Class<?> convertorClass = null; Object convertorObject = null; boolean isCollection = false; Class<?> propertyOptionClass = null; Class<?> collectionClass = null; propertyName = targetPd.getName(); propertyField = null; methodTargetType = writeMethod.getParameterTypes()[0]; methodTargetArray = methodTargetType; targetIsArray = methodTargetType.isArray(); if( targetIsArray ) { methodTargetType = methodTargetArray.getComponentType(); } propertyField = PropertyUtils.getClassField(targetClass, optionClass, propertyName); if( propertyField!= null && propertyField.isAnnotationPresent(CopyProperty.class)) { CopyProperty copyAnnotation = propertyField.getAnnotation(CopyProperty.class); String annotationPropertyName = copyAnnotation.property(); if( copyAnnotation.ignored() ) { continue; } if( !( annotationPropertyName == null || "".equals(annotationPropertyName) ) ) { if( beanAnnotationSource == null ) { logger.warn("BeanCopy: " + targetClass.getName() + " has no BeanCopySource annotation, but " + propertyName + " has BeanProperty annotation with property defined"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " has no BeanCopySource annotation, but " + propertyName + " has BeanProperty annotation with property defined" ); } else { propertyName = annotationPropertyName; } } String[] propertyNames = null; if( propertyName.contains(".") ) { propertyNames = propertyName.split("\\."); } else { propertyNames = new String[1]; propertyNames[0] = propertyName; } readMethods = new Method[propertyNames.length]; methodSourceType = sourceClass; for( int i = 0; i< propertyNames.length ; i++ ) { PropertyDescriptor sourcePd = PropertyUtils.getPropertyDescriptor(methodSourceType, propertyNames[i] ); if( sourcePd == null ) { logger.error("BeanCopy: " + methodSourceType.getName() + " has no property "+ propertyNames[i] +" defined.!" ); throw new BeanCopyException("BeanCopy: " + methodSourceType.getName() + " has no property "+ propertyNames[i] +" defined.!" ); } else { readMethods[i] = sourcePd.getReadMethod(); } methodSourceType = readMethods[i].getReturnType(); } methodSourceArray = methodSourceType; sourceIsArray = methodSourceArray.isArray(); if( sourceIsArray ) { methodSourceType = methodSourceArray.getComponentType(); } convertorClass = copyAnnotation.convertor(); convertorObject = null; if( convertorClass.equals(void.class) ) { convertorClass = null; } else if( !PropertyUtils.isInterfaceType(convertorClass, BeanCopyConvertor.class ) ) { convertorClass = null; logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor property is not a convertor class!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor property is not a convertor class!!"); } else if( convertorClass.isInterface() ) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor is a interface!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor is a interface!!"); } else { try { Type[] genericInterfaces = convertorClass.getGenericInterfaces(); ParameterizedType convertorInterface = null; for( Type type : genericInterfaces ) { ParameterizedType parameterizedType = (ParameterizedType) type; if( parameterizedType.getRawType().equals(BeanCopyConvertor.class) ) { convertorInterface = parameterizedType; } } Class<?> converterClassSource = null; Class<?> converterClassTarget = null; Type[] converterTypes = convertorInterface.getActualTypeArguments(); if( converterTypes[0] instanceof ParameterizedType ) { converterClassSource = (Class<?>) ((ParameterizedType)converterTypes[0]).getRawType(); } else { converterClassSource = (Class<?>) converterTypes[0]; } if( converterTypes[1] instanceof ParameterizedType ) { converterClassTarget = (Class<?>) ((ParameterizedType)converterTypes[1]).getRawType(); } else { converterClassTarget = (Class<?>) converterTypes[1]; } if( !( PropertyUtils.isAssignable(methodSourceType, converterClassSource) && PropertyUtils.isAssignable(methodTargetType, converterClassTarget ) ) ) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor does match the type!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor does match the type!!"); } } catch (Exception e) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor: get generic type error!!", e); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor: get generic type error!!", e); } } propertyOptionClass = copyAnnotation.optionClass(); if( propertyOptionClass.equals(void.class) || propertyOptionClass.equals(Void.class) ) { propertyOptionClass = null; } if( convertorClass != null && propertyOptionClass != null ) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor and optionClass cannot be set both!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor and optionClass cannot be set both!!"); } if( convertorClass == null ) { if( (!targetIsArray && sourceIsArray) || (targetIsArray && (!sourceIsArray && !PropertyUtils.isInterfaceType(methodSourceType, Collection.class) ) ) ) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Array type mismatch!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Array type mismatch!!"); } } if( ( PropertyUtils.isPrimitive(methodSourceType) || PropertyUtils.isPrimitive(methodTargetType) ) && propertyOptionClass != null ) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty optionClass cannot be set on primitive tpye!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Annotation BeanProperty convertor and optionClass on primitive tpye!!"); } if( convertorClass != null ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = isCollection; item.useBeanCopy = false; item.optionClass = null; item.convertorClass = convertorClass; item.convertorObject = (BeanCopyConvertor) convertorObject; itemList.add(item); } else if( !targetIsArray ) { if( PropertyUtils.isAssignable(methodTargetType, methodSourceType) ) { if( PropertyUtils.isPrimitive(methodTargetType) || methodTargetType.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = null; item.useBeanCopy = false; itemList.add(item); } else{ //deep copy. BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = null; item.useBeanCopy = true; itemList.add(item); } } else if( propertyOptionClass != null ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = propertyOptionClass; itemList.add(item); } else if( methodTargetType.isAnnotationPresent(BeanCopySource.class) ) { beanCopySource = methodTargetType.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = beanCopySource.source(); if( sourceClassFromAnnotation.equals(methodSourceType ) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = methodTargetType; itemList.add(item); } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } else if( methodTargetType.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = ObjectToEnumConvertor.class; item.convertorObject = new ObjectToEnumConvertor(methodTargetType, enumThrowExceptions); item.useBeanCopy = true; itemList.add(item); } else if( methodTargetType.equals(String.class) && methodSourceType.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = EnumToStringConvertor.class; item.convertorObject = new EnumToStringConvertor(); item.useBeanCopy = true; itemList.add(item); } else if( !(PropertyUtils.isPrimitive(methodSourceType) || PropertyUtils.isPrimitive(methodTargetType) ) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = null; item.useBeanCopy = true; itemList.add(item); } else { if( useObjectToStringCopy && methodTargetType.equals(String.class) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = ObjectToStringConvertor.class; item.convertorObject = new ObjectToStringConvertor(); item.useBeanCopy = false; itemList.add(item); } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } } else if( targetIsArray ) { if( methodTargetArray.equals(methodSourceArray) ) { //use deep copy. if( methodSourceType.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = null; item.convertorObject = new EnumToEnumArrayConvertor(); item.convertorClass = item.convertorObject.getClass(); itemList.add(item); } else { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = null; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, null); item.convertorClass = item.convertorObject.getClass(); itemList.add(item); } } else if( PropertyUtils.isAssignable(methodTargetType, methodSourceType) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = null; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, null); item.convertorClass = item.convertorObject.getClass(); itemList.add(item); } else if( PropertyUtils.isInterfaceType(methodSourceType, Collection.class) ) { if( methodTargetType.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = null; item.convertorObject = new ListToEnumArrayConvertor(methodTargetType, enumThrowExceptions); item.convertorClass = item.convertorObject.getClass(); itemList.add(item); } else { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = true; item.useBeanCopy = true; item.convertorObject = null; item.convertorClass = null; item.optionClass = propertyOptionClass; item.features = features; itemList.add(item); } } else if( propertyOptionClass != null ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, propertyOptionClass); item.convertorClass = item.convertorObject.getClass(); item.optionClass = null; itemList.add(item); } else if( methodTargetType.isAnnotationPresent(BeanCopySource.class) ) { BeanCopySource source = methodTargetType.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = source.source(); if( sourceClassFromAnnotation.equals(methodSourceType ) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, methodTargetType); item.convertorClass = item.convertorObject.getClass(); item.optionClass = null; itemList.add(item); } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } else if( methodSourceType.isEnum() && methodTargetType.equals(String.class)) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = EnumToStringArrayConvertor.class; item.convertorObject = new EnumToStringArrayConvertor(); item.useBeanCopy = true; itemList.add(item); } else if( methodSourceType.equals(String.class) && methodTargetType.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = StringToEnumArrayConvertor.class; item.convertorObject = new StringToEnumArrayConvertor(methodTargetType, enumThrowExceptions); item.useBeanCopy = true; itemList.add(item); } else if( !(PropertyUtils.isPrimitive(methodSourceType) || PropertyUtils.isPrimitive(methodTargetType) ) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = null; item.useBeanCopy = false; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, null); item.convertorClass = item.convertorObject.getClass(); item.optionClass = null; itemList.add(item); } else { if( useObjectToStringCopy && methodTargetType.equals(String.class) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.optionClass = null; item.convertorClass = ObjectToStringArrayConvertor.class; item.convertorObject = new ObjectToStringArrayConvertor(); item.useBeanCopy = false; itemList.add(item); } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } } } else if( propertyField!= null && propertyField.isAnnotationPresent(CopyCollection.class)){ CopyCollection copyAnnotation = propertyField.getAnnotation(CopyCollection.class); String annotationPropertyName = copyAnnotation.property(); if( copyAnnotation.ignored() ) { continue; } if( !( annotationPropertyName == null || "".equals(annotationPropertyName) ) ) { if( beanAnnotationSource == null ) { logger.warn("BeanCopy: " + targetClass.getName() + " has no BeanCopySource annotation, but " + propertyName + " has BeanProperty annotation with property defined"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " has no BeanCopySource annotation, but " + propertyName + " has BeanProperty annotation with property defined" ); } else { propertyName = annotationPropertyName; } } String[] propertyNames = null; if( propertyName.contains(".") ) { propertyNames = propertyName.split("\\."); } else { propertyNames = new String[1]; propertyNames[0] = propertyName; } readMethods = new Method[propertyNames.length]; methodSourceType = sourceClass; for( int i = 0; i< propertyNames.length ; i++ ) { PropertyDescriptor sourcePd = PropertyUtils.getPropertyDescriptor(methodSourceType, propertyNames[i] ); if( sourcePd == null ) { logger.error("BeanCopy: " + methodSourceType.getName() + " has no property "+ propertyNames[i] +" defined.!" ); throw new BeanCopyException("BeanCopy: " + methodSourceType.getName() + " has no property "+ propertyNames[i] +" defined.!" ); } else { readMethods[i] = sourcePd.getReadMethod(); } methodSourceType = readMethods[i].getReturnType(); } methodSourceArray = methodSourceType; sourceIsArray = methodSourceArray.isArray(); if( sourceIsArray ) { methodSourceType = methodSourceArray.getComponentType(); } propertyOptionClass = copyAnnotation.optionClass(); if( propertyOptionClass.equals(void.class) || propertyOptionClass.equals(Void.class) ) { propertyOptionClass = null; } if( !PropertyUtils.isInterfaceType(methodSourceArray, Collection.class) && !methodSourceArray.isArray() ) { logger.warn("BeanCopy: " + sourceClass.getName() + " property " + propertyName + " is not collection type!"); throw new BeanCopyException("BeanCopy: " + sourceClass.getName() + " property " + propertyName + " is not collection type!"); } if( !PropertyUtils.isInterfaceType(methodTargetType, Collection.class) ) { logger.warn("BeanCopy: " + targetClass.getName() + " property " + propertyName + " is not collection type!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " is not collection type!"); } isCollection = true; collectionClass = copyAnnotation.targetClass(); if( methodSourceArray.isArray() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = true; item.useBeanCopy = true; item.collectionClass = collectionClass; item.optionClass = propertyOptionClass; itemList.add(item); } else if( collectionClass.isEnum() ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = true; item.useBeanCopy = true; item.collectionClass = collectionClass; item.optionClass = propertyOptionClass; itemList.add(item); } else if( collectionClass.equals(String.class) && useObjectToStringCopy ){ BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = true; item.useBeanCopy = true; item.collectionClass = collectionClass; item.optionClass = propertyOptionClass; itemList.add(item); } else { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = true; item.useBeanCopy = false; item.collectionClass = collectionClass; item.optionClass = propertyOptionClass; itemList.add(item); } } else { //normal property. PropertyDescriptor sourcePd = PropertyUtils.getPropertyDescriptor(sourceClass, propertyName ); if( sourcePd == null ) { continue; } Method readMethod = sourcePd.getReadMethod(); if (readMethod == null ) { continue; } methodSourceType = readMethod.getReturnType(); methodSourceArray = methodSourceType; sourceIsArray = methodSourceArray.isArray(); if( sourceIsArray ) { methodSourceType = methodSourceArray.getComponentType(); } readMethods = new Method[1]; readMethods[0] = readMethod; if( (targetIsArray && !sourceIsArray) || (!targetIsArray && sourceIsArray) ) { logger.error("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Array type mismatch!!"); throw new BeanCopyException("BeanCopy: " + targetClass.getName() + " property " + propertyName + " Array type mismatch!!"); } if( !targetIsArray ) { if( PropertyUtils.isAssignable(methodTargetType, methodSourceType) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; itemList.add(item); // logger.debug("BeanCopy: Add Copy Item From " + sourceClass.getSimpleName() + "[" + propertyName+ "]" + // " To " + targetClass.getSimpleName() + "[" + writeMethod.getName() + "]"); } else if(methodTargetType.isAnnotationPresent(BeanCopySource.class) ) { BeanCopySource source = methodTargetType.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = source.source(); if( sourceClassFromAnnotation.equals(methodSourceType ) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.optionClass = methodTargetType; itemList.add(item); // logger.debug("BeanCopy: Add Copy Item From " + sourceClass.getSimpleName() + "[" + propertyName+ "]" + // " To " + targetClass.getSimpleName() + "[" + writeMethod.getName() + "]"); } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } if( targetIsArray ) { if( methodSourceArray.equals(methodTargetArray) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; itemList.add(item); // logger.debug("BeanCopy: Add Copy Item From " + sourceClass.getSimpleName() + "[" + propertyName+ "]" + // " To " + targetClass.getSimpleName() + "[" + writeMethod.getName() + "]"); } else if( PropertyUtils.isAssignable(methodTargetType, methodSourceType) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, null); item.convertorClass = item.convertorObject.getClass(); itemList.add(item); // logger.debug("BeanCopy: Add Copy Item From " + sourceClass.getSimpleName() + "[" + propertyName+ "]" + // " To " + targetClass.getSimpleName() + "[" + writeMethod.getName() + "]"); } else if( methodTargetType.isAnnotationPresent(BeanCopySource.class) ) { BeanCopySource source = methodTargetType.getAnnotation(BeanCopySource.class); Class<?> sourceClassFromAnnotation = source.source(); if( sourceClassFromAnnotation.equals(methodSourceType ) ) { BeanCopyPropertyItem item = new BeanCopyPropertyItem(); item.propertyName = propertyName; item.writeMethod = writeMethod; item.readMethods = readMethods; item.isCollection = false; item.useBeanCopy = false; item.convertorObject = ArrayConvertorFactory.getArrayConvertor(methodSourceType, methodTargetType, methodTargetType); item.convertorClass = item.convertorObject.getClass(); itemList.add(item); // logger.debug("BeanCopy: Add Copy Item From " + sourceClass.getSimpleName() + "[" + propertyName+ "]" + // " To " + targetClass.getSimpleName() + "[" + writeMethod.getName() + "]"); } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } else { logger.warn( "Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); throw new BeanCopyException("Property parameter does not match: " + sourceClass.getName() + "["+ propertyName+ "(" + methodSourceType.getSimpleName() + ")] : " + targetClass.getName() + "[" + targetPd.getName() + "(" + methodTargetType.getSimpleName() + ")]"); } } } } return itemList; } }
/* * * Derby - Class UpdateXXXTest * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.math.BigDecimal; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import junit.framework.Test; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * Tests updateXXX() methods on updatable resultsets. * This is done by creating a table which has n columns with * different SQL types. Then there is one testcase for each * updateXXX method, which calls updateXXX on all columns. */ final public class UpdateXXXTest extends BaseJDBCTestCase { /** * Constructor * @param name name of testcase. Should be the name of test method. */ public UpdateXXXTest(final String name) { super(name); } /** * Run in both embedded and client. */ public static Test suite() { BaseTestSuite suite = baseSuite("UpdateXXXTest"); suite.addTest( TestConfiguration.clientServerDecorator( baseSuite("UpdateXXXTest:client"))); return suite; } /** * Base suite of tests that will run in both embedded and client. * @param name Name for the suite. */ private static BaseTestSuite baseSuite(String name) { BaseTestSuite suite = new BaseTestSuite(name); suite.addTestSuite(UpdateXXXTest.class); // requires java.math.BigDecimal if (JDBC.vmSupportsJDBC3()) suite.addTest(new UpdateXXXTest("jdbc2testUpdateBigDecimal")); return suite; } /** * The setup creates a Connection to the database, and also * creates a table with one row. Then it creates an updatable * ResultSet which is positioned on the row. * @exception Exception any exception will cause test to fail with error. */ public void setUp() throws Exception { Connection con = getConnection(); try { con.setAutoCommit(false); Statement stmt = con.createStatement(); String createTableString = "CREATE TABLE " + TABLE_NAME + " (" + "F01 SMALLINT," + "F02 INTEGER," + "F03 BIGINT," + "F04 REAL," + "F05 FLOAT," + "F06 DOUBLE," + "F07 DECIMAL," + "F08 NUMERIC," + "F09 CHAR(100)," + "F10 VARCHAR(256) )"; println(createTableString); stmt.executeUpdate(createTableString); PreparedStatement ps = con.prepareStatement ("insert into " + TABLE_NAME + " values(?,?,?,?,?,?,?,?,?,?)"); ps.setShort(1, (short) 1); ps.setInt(2, 1); ps.setLong(3, 1L); ps.setFloat(4, 1.0f); ps.setDouble(5, 1.0); ps.setDouble(6, 1.0); // Use setString instead of setBigDecimal to // allow most of the test cases to run under J2ME ps.setString(7, "1"); ps.setString(8, "1"); ps.setString(9, "1"); ps.setString(10, "1"); ps.executeUpdate(); ps.close(); stmt.close(); } catch (SQLException e) { con.rollback(); throw e; } } /** * Tests calling updateString on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateString() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateString(i, "2"); assertEquals("Expected rs.getDouble(" + i + ") to match updated value", 2, (int) rs.getDouble(i)); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling updateInt on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateInt() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateInt(i, 2); assertEquals("Expected rs.getInt(" + i + ") to match updated value", 2, rs.getInt(i)); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling updateLong on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateLong() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateLong(i, 2L); assertEquals("Expected rs.getLong(" + i + ") to match updated value", 2L, rs.getLong(i)); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling updateShort on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateShort() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateShort(i, (short) 2); assertEquals("Expected rs.getShort(" + i + ") to match updated value", 2, (int) rs.getShort(i)); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling updateFloat on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateFloat() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateFloat(i, 2.0f); assertEquals("Expected rs.getFloat(" + i + ") to match updated value", 2, (int) rs.getFloat(i)); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling updateDouble on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateDouble() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateDouble(i, 2.0); assertEquals("Expected rs.getDouble(" + i + ") to match updated value", 2, (int) rs.getDouble(i)); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling update on all columns of the row. * @exception SQLException database access error. Causes test to * fail with an error. */ public void jdbc2testUpdateBigDecimal() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateBigDecimal(i, BigDecimal.valueOf(2L)); assertEquals("Expected rs.getBigDecimal(" + i + ") to match updated value", 2, rs.getBigDecimal(i).intValue()); } rs.updateRow(); rs.close(); checkColumnsAreUpdated(); s.close(); } /** * Tests calling updateObject with a null value on all columns. * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateObjectWithNull() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); Object value = null; for (int i = 1; i <= COLUMNS; i++) { rs.updateObject(i, value); assertNull("Expected rs.getObject(" + i + ") to be null", rs.getObject(i)); assertTrue("Expected rs.wasNull() to return true", rs.wasNull()); } rs.updateRow(); rs.close(); checkColumnsAreNull(); s.close(); } /** * Tests calling setNull on all columns * @exception SQLException database access error. Causes test to * fail with an error. */ public void testUpdateNull() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { rs.updateNull(i); assertNull("Expected rs.getObject(" + i + ") to be null", rs.getObject(i)); assertTrue("Expected rs.wasNull() to return true", rs.wasNull()); } rs.updateRow(); rs.close(); checkColumnsAreNull(); s.close(); } /** * Checks that the columns in the row are all SQL null. * @exception SQLException database access error. Causes test to * fail with an error. */ private void checkColumnsAreNull() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { assertNull("Expected column " + i + " to be null", rs.getObject(i)); assertTrue("Expected wasNull() after reading column " + i + " to be true when data is SQL Null on column", rs.wasNull()); } s.close(); } /** * Checks that the columns in the row are updated in the database. * Using a new ResultSet to do this check. * @exception SQLException database access error. Causes test to * fail with an error. */ private void checkColumnsAreUpdated() throws SQLException { Statement s = createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ResultSet rs = s.executeQuery(SELECT_STMT); rs.next(); for (int i = 1; i <= COLUMNS; i++) { int expectedVal = 2; // Since rs.getInt(i) on CHAR/VARCHAR columns with value 2.0 gives: // "ERROR 22018: Invalid character string format for type int" // we use getDouble(i). We cast it to int, because there is not // assertEquals(..) methods which takes double. int actualVal = (int) rs.getDouble(i); assertEquals("Unexpected value from rs.getDouble( + " + i + ")", expectedVal, actualVal); } s.close(); } /* Table name */ private static final String TABLE_NAME = "MultiTypeTable"; /* SQL String for the SELECT statement */ private static final String SELECT_STMT = "SELECT * FROM " + TABLE_NAME; /* Number of columns in table */ private static final int COLUMNS = 10; }
package liquibase.sqlgenerator.core; import static org.junit.Assert.assertEquals; import java.math.BigInteger; import liquibase.change.ColumnConfig; import liquibase.database.Database; import liquibase.database.core.DB2Database; import liquibase.database.core.DerbyDatabase; import liquibase.database.core.H2Database; import liquibase.database.core.HsqlDatabase; import liquibase.database.core.MSSQLDatabase; import liquibase.database.core.MySQLDatabase; import liquibase.database.core.OracleDatabase; import liquibase.database.core.PostgresDatabase; import liquibase.database.core.SQLiteDatabase; import liquibase.database.core.SybaseASADatabase; import liquibase.database.core.SybaseDatabase; import liquibase.datatype.DataTypeFactory; import liquibase.datatype.core.IntType; import liquibase.sql.Sql; import liquibase.sqlgenerator.AbstractSqlGeneratorTest; import liquibase.statement.AutoIncrementConstraint; import liquibase.statement.ForeignKeyConstraint; import liquibase.statement.core.CreateTableStatement; import liquibase.test.TestContext; import org.junit.Test; public class CreateTableGeneratorTest extends AbstractSqlGeneratorTest<CreateTableStatement> { protected static final String TABLE_NAME = "TABLE_NAME"; protected static final String CATALOG_NAME = "CATALOG_NAME"; protected static final String SCHEMA_NAME = "SCHEMA_NAME"; protected static final String COLUMN_NAME1 = "COLUMN1_NAME"; public CreateTableGeneratorTest() throws Exception { super(new CreateTableGenerator()); } @Override protected CreateTableStatement createSampleSqlStatement() { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn(COLUMN_NAME1, new IntType()); return statement; } @Test public void testWithColumnWithDefaultValue() { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof OracleDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn(COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("java.sql.Types.TIMESTAMP"), new ColumnConfig().setDefaultValue("null").getDefaultValueObject()); if (shouldBeImplementation(database)) { assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME TIMESTAMP DEFAULT null)", this.generatorUnderTest.generateSql(statement, database, null)[0].toSql()); } } } } @Test public void testWithColumnSpecificIntType() { for (Database database : TestContext.getInstance().getAllDatabases()) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn(COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("int(11) unsigned")); } } // @Test // public void createTable_standard() throws Exception { // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int", null, null) // .addColumn("name", "varchar(255)") // .addColumn("username", "varchar(255)", "'NEWUSER'")) { // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // assertNotNull(table.getColumn("id")); // assertNotNull(table.getColumn("name")); // assertNotNull(table.getColumn("username")); // // assertTrue(table.getColumn("id").isPrimaryKey()); // // assertNull(table.getColumn("name").getDefaultValue()); // assertTrue(table.getColumn("username").getDefaultValue().toString().indexOf("NEWUSER") >= 0); // // assertFalse(table.getColumn("id").isAutoIncrement()); // } // // }); // } // // @Test // public void createTable_autoincrementPK() throws Exception { // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int",null, null) // .addColumn("name", "varchar(255)") // .addColumn("username", "varchar(255)", "'NEWUSER'") // .addColumnConstraint(new AutoIncrementConstraint("id"))) { // // protected boolean supportsTest(Database database) { // return database.supportsAutoIncrement(); // } // // protected boolean expectedException(Database database, DatabaseException exception) { // return !database.supportsAutoIncrement(); // } // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // assertNotNull(table.getColumn("id")); // assertTrue(table.getColumn("id").isPrimaryKey()); // assertTrue(table.getColumn("id").isAutoIncrement()); // } // }); // } // // @Test // public void createTable_foreignKeyColumn() throws Exception { // final String foreignKeyName = "fk_test_parent"; // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int", null, null) // .addColumn("name", "varchar(255)") // .addColumn("parent_id", "int", new ForeignKeyConstraint(foreignKeyName, TABLE_NAME + "(id)"))) { // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // assertNotNull(table.getColumn("id")); // // ForeignKey foundForeignKey = snapshot.getForeignKey(foreignKeyName); // assertNotNull(foundForeignKey); // assertEquals(TABLE_NAME, foundForeignKey.getPrimaryKeyTable().getName().toUpperCase()); // assertEquals("ID", foundForeignKey.getPrimaryKeyColumns().toUpperCase()); // assertEquals(TABLE_NAME, foundForeignKey.getForeignKeyTable().getName().toUpperCase()); // assertEquals("PARENT_ID", foundForeignKey.getForeignKeyColumns().toUpperCase()); // // } // // }); // } // // @Test // public void createTable_deferrableForeignKeyColumn() throws Exception { // final String foreignKeyName = "fk_test_parent"; // // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int", null, null) // .addColumn("name", "varchar(255)") // .addColumn("parent_id", "int", // new ForeignKeyConstraint(foreignKeyName, TABLE_NAME + "(id)") // .setDeferrable(true) // .setInitiallyDeferred(true))) { // // protected boolean expectedException(Database database, DatabaseException exception) { // return !database.supportsInitiallyDeferrableColumns(); // } // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // assertNotNull(table.getColumn("id")); // // ForeignKey foundForeignKey = snapshot.getForeignKey(foreignKeyName); // assertNotNull(foundForeignKey); // assertEquals(TABLE_NAME, foundForeignKey.getPrimaryKeyTable().getName().toUpperCase()); // assertEquals("ID", foundForeignKey.getPrimaryKeyColumns().toUpperCase()); // assertEquals(TABLE_NAME, foundForeignKey.getForeignKeyTable().getName().toUpperCase()); // assertEquals("PARENT_ID", foundForeignKey.getForeignKeyColumns().toUpperCase()); // assertTrue(foundForeignKey.isDeferrable()); // assertTrue(foundForeignKey.isInitiallyDeferred()); // } // // }); // } // // @Test // public void createTable_deleteCascadeForeignKeyColumn() throws Exception { // final String foreignKeyName = "fk_test_parent"; // // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int", null, null) // .addColumn("name", "varchar(255)") // .addColumn("parent_id", "int", new ForeignKeyConstraint(foreignKeyName, FK_TABLE_NAME + "(id)").setDeleteCascade(true))) { // // protected void setup(Database database) throws Exception { // new Executor(database).execute(new CreateTableStatement(null, FK_TABLE_NAME) // .addPrimaryKeyColumn("id", "int",null, null) // .addColumn("name", "varchar(255)")); // super.setup(database); // } // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // assertNotNull(table.getColumn("id")); // // ForeignKey foundForeignKey = snapshot.getForeignKey(foreignKeyName); // assertNotNull(foundForeignKey); // assertEquals(FK_TABLE_NAME, foundForeignKey.getPrimaryKeyTable().getName().toUpperCase()); // assertEquals("ID", foundForeignKey.getPrimaryKeyColumns().toUpperCase()); // assertEquals(TABLE_NAME, foundForeignKey.getForeignKeyTable().getName().toUpperCase()); // assertEquals("PARENT_ID", foundForeignKey.getForeignKeyColumns().toUpperCase()); // //TODO: test when tested by diff assertTrue(foundForeignKey.isDeleteCascade()); // } // // }); // } // // @Test // public void createTable_uniqueColumn() throws Exception { // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int",null, null) // .addColumn("name", "varchar(255)") // .addColumn("username", "int", new UniqueConstraint("UQ_TESTCT_ID"), new NotNullConstraint())) { // // protected boolean expectedException(Database database) { // return !(database instanceof HsqlDatabase) || database instanceof H2Database; // } // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // assertNotNull(table.getColumn("id")); // // //todo: actually test for uniqueness when diff can check for it assertTrue(table.getColumn("username").isUnique()); // } // // }); // } // // @Test // public void addPrimaryKeyColumn_oneColumn() { // CreateTableStatement statement = new CreateTableStatement(null, "tableName"); // statement.addPrimaryKeyColumn("id", "int", null, null); // // assertEquals(1, statement.getPrimaryKeyConstraint().getColumns().size()); // } // // @Test // public void addPrimaryKeyColumn_multiColumn() { // CreateTableStatement statement = new CreateTableStatement(null, "tableName"); // statement.addPrimaryKeyColumn("id1", "int", null, null); // statement.addPrimaryKeyColumn("id2", "int", null, null); // // assertEquals(2, statement.getPrimaryKeyConstraint().getColumns().size()); // } // // @Test // public void addColumnConstraint_notNullConstraint() { // CreateTableStatement statement = new CreateTableStatement(null, "tableName"); // statement.addColumn("id", "int"); // // assertFalse(statement.getNotNullColumns().contains("id")); // // statement.addColumnConstraint(new NotNullConstraint("id")); // // assertTrue(statement.getNotNullColumns().contains("id")); // } // // @Test // public void addColumnConstraint_ForeignKeyConstraint() { // CreateTableStatement statement = new CreateTableStatement(null, "tableName"); // statement.addColumn("id", "int"); // // assertEquals(0, statement.getForeignKeyConstraints().size()); // // statement.addColumnConstraint(new ForeignKeyConstraint("fk_test", "fkTable(id)").setColumns("id")); // // assertEquals(1, statement.getForeignKeyConstraints().size()); // assertEquals("fk_test", statement.getForeignKeyConstraints().iterator().next().getForeignKeyName()); // } // // @Test // public void addColumnConstraint_UniqueConstraint() { // CreateTableStatement statement = new CreateTableStatement(null, "tableName"); // statement.addColumn("id", "int"); // // assertEquals(0, statement.getUniqueConstraints().size()); // // statement.addColumnConstraint(new UniqueConstraint("uq_test").addColumns("id")); // // assertEquals(1, statement.getUniqueConstraints().size()); // assertEquals("uq_test", statement.getUniqueConstraints().iterator().next().getConstraintName()); // } // // @Test // public void createTable_tablespace() throws Exception { // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(null, new CreateTableStatement(null, TABLE_NAME) // .addPrimaryKeyColumn("id", "int", null, null) // .addColumn("name", "varchar(255)") // .addColumn("username", "varchar(255)", "'NEWUSER'") // .setTablespace("liquibase2")) { // // protected boolean expectedException(Database database, DatabaseException exception) { // return !database.supportsTablespaces(); // } // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // // //todo: test that tablespace is correct when diff returns it // } // }); // } // // @Test // public void createTable_altSchema() throws Exception { // new DatabaseTestTemplate().testOnAvailableDatabases( // new SqlStatementDatabaseTest(TestContext.ALT_SCHEMA, new CreateTableStatement(TestContext.ALT_SCHEMA, TABLE_NAME) // .addPrimaryKeyColumn("id", "int", null, null) // .addColumn("name", "varchar(255)") // .addColumn("username", "varchar(255)", "'NEWUSER'")) { // // protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) { // assertNull(snapshot.getTable(TABLE_NAME)); // } // // protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) { // Table table = snapshot.getTable(TABLE_NAME); // assertNotNull(table); // assertEquals(TABLE_NAME.toUpperCase(), table.getName().toUpperCase()); // } // }); // } @Test public void testAutoIncrementDB2Database() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof DB2Database) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithDB2Database() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof DB2Database) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 0))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByDB2Database() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof DB2Database) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 0, INCREMENT BY 10))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementDerbyDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof DerbyDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithDerbyDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof DerbyDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 0))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByDerbyDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof DerbyDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 0, INCREMENT BY 10))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementH2Database() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof H2Database) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTO_INCREMENT)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithH2Database() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof H2Database) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTO_INCREMENT (START WITH 0))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByH2Database() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof H2Database) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTO_INCREMENT (START WITH 0, INCREMENT BY 10))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementHsqlDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof HsqlDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithHsqlDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof HsqlDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ONE, null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByHsqlDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof HsqlDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ONE, BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1, INCREMENT BY 10))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementMSSQLDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof MSSQLDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("Error on "+database, "CREATE TABLE [SCHEMA_NAME].[TABLE_NAME] ([COLUMN1_NAME] BIGINT IDENTITY (1, 1))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithMSSQLDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof MSSQLDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE [SCHEMA_NAME].[TABLE_NAME] ([COLUMN1_NAME] BIGINT IDENTITY (0, 1))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByMSSQLDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof MSSQLDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE [SCHEMA_NAME].[TABLE_NAME] ([COLUMN1_NAME] BIGINT IDENTITY (0, 10))", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementMySQLDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof MySQLDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTO_INCREMENT NULL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithMySQLDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof MySQLDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTO_INCREMENT NULL) AUTO_INCREMENT=2", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByMySQLDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof MySQLDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // increment by not supported by MySQL assertEquals("CREATE TABLE CATALOG_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTO_INCREMENT NULL) AUTO_INCREMENT=2", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementPostgresDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof PostgresDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGSERIAL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithPostgresDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof PostgresDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with supported over generated sequence assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGSERIAL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementByPostgresDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof PostgresDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.ZERO, BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with and increment by supported over generated sequence assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGSERIAL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementSQLiteDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SQLiteDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTOINCREMENT)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithSQLiteDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SQLiteDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with not supported by SQLlite assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTOINCREMENT)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementBySQLiteDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SQLiteDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with and increment by not supported by SQLite assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT AUTOINCREMENT)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementSybaseASADatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SybaseASADatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT DEFAULT AUTOINCREMENT NULL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithSybaseASADatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SybaseASADatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with not supported by SybaseASA assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT DEFAULT AUTOINCREMENT NULL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementBySybaseASADatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SybaseASADatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with and increment by not supported by SybaseASA assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (COLUMN1_NAME BIGINT DEFAULT AUTOINCREMENT NULL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementSybaseDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SybaseDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("Error with "+database, "CREATE TABLE [SCHEMA_NAME].[TABLE_NAME] ([COLUMN1_NAME] BIGINT IDENTITY NULL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithSybaseDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SybaseDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), null) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with not supported by Sybase assertEquals("CREATE TABLE [SCHEMA_NAME].[TABLE_NAME] ([COLUMN1_NAME] BIGINT IDENTITY NULL)", generatedSql[0].toSql()); } } } @Test public void testAutoIncrementStartWithIncrementBySybaseDatabase() throws Exception { for (Database database : TestContext.getInstance().getAllDatabases()) { if (database instanceof SybaseDatabase) { CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumn( COLUMN_NAME1, DataTypeFactory.getInstance().fromDescription("BIGINT{autoIncrement:true}"), new AutoIncrementConstraint(COLUMN_NAME1, BigInteger.valueOf(2), BigInteger.TEN) ); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); // start with and increment by not supported by Sybase assertEquals("CREATE TABLE [SCHEMA_NAME].[TABLE_NAME] ([COLUMN1_NAME] BIGINT IDENTITY NULL)", generatedSql[0].toSql()); } } } @Test public void createReferencesSchemaEscaped() throws Exception { Database database = new PostgresDatabase(); database.setOutputDefaultSchema(true); database.setDefaultSchemaName("my-schema"); CreateTableStatement statement = new CreateTableStatement(CATALOG_NAME, SCHEMA_NAME, TABLE_NAME); statement.addColumnConstraint(new ForeignKeyConstraint("fk_test_parent", TABLE_NAME + "(id)").setColumn("id")); Sql[] generatedSql = this.generatorUnderTest.generateSql(statement, database, null); assertEquals("CREATE TABLE SCHEMA_NAME.TABLE_NAME (, CONSTRAINT fk_test_parent FOREIGN KEY (id) REFERENCES my-schema.TABLE_NAME(id))", generatedSql[0].toSql()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.project; import com.facebook.presto.memory.context.AggregatedMemoryContext; import com.facebook.presto.memory.context.LocalMemoryContext; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.operator.CompletedWork; import com.facebook.presto.operator.DriverYieldSignal; import com.facebook.presto.operator.Work; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.block.LazyBlock; import com.facebook.presto.spi.block.VariableWidthBlock; import com.facebook.presto.spi.relation.CallExpression; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.gen.ExpressionProfiler; import com.facebook.presto.sql.gen.PageFunctionCompiler; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import io.airlift.slice.Slices; import io.airlift.testing.TestingTicker; import io.airlift.units.Duration; import org.openjdk.jol.info.ClassLayout; import org.testng.annotations.Test; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Supplier; import static com.facebook.presto.block.BlockAssertions.createLongSequenceBlock; import static com.facebook.presto.block.BlockAssertions.createSlicesBlock; import static com.facebook.presto.block.BlockAssertions.createStringsBlock; import static com.facebook.presto.execution.executor.PrioritizedSplitRunner.SPLIT_RUN_QUANTA; import static com.facebook.presto.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; import static com.facebook.presto.metadata.MetadataManager.createTestMetadataManager; import static com.facebook.presto.operator.PageAssertions.assertPageEquals; import static com.facebook.presto.operator.project.PageProcessor.MAX_BATCH_SIZE; import static com.facebook.presto.operator.project.PageProcessor.MAX_PAGE_SIZE_IN_BYTES; import static com.facebook.presto.operator.project.PageProcessor.MIN_PAGE_SIZE_IN_BYTES; import static com.facebook.presto.operator.project.SelectedPositions.positionsRange; import static com.facebook.presto.spi.function.OperatorType.ADD; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.VarcharType.VARCHAR; import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypes; import static com.facebook.presto.sql.relational.Expressions.call; import static com.facebook.presto.sql.relational.Expressions.constant; import static com.facebook.presto.sql.relational.Expressions.field; import static com.facebook.presto.testing.TestingConnectorSession.SESSION; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static java.lang.String.join; import static java.util.Collections.nCopies; import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; public class TestPageProcessor { private final ScheduledExecutorService executor = newSingleThreadScheduledExecutor(daemonThreadsNamed("test-%s")); @Test public void testProjectNoColumns() { PageProcessor pageProcessor = new PageProcessor(Optional.empty(), ImmutableList.of(), OptionalInt.of(MAX_BATCH_SIZE)); Page inputPage = new Page(createLongSequenceBlock(0, 100)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 1); Page outputPage = outputPages.get(0).orElse(null); assertEquals(outputPage.getChannelCount(), 0); assertEquals(outputPage.getPositionCount(), inputPage.getPositionCount()); } @Test public void testFilterNoColumns() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new TestingPageFilter(positionsRange(0, 50))), ImmutableList.of()); Page inputPage = new Page(createLongSequenceBlock(0, 100)); LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()); Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage); assertEquals(memoryContext.getBytes(), 0); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 1); Page outputPage = outputPages.get(0).orElse(null); assertEquals(outputPage.getChannelCount(), 0); assertEquals(outputPage.getPositionCount(), 50); } @Test public void testPartialFilter() { PageProcessor pageProcessor = new PageProcessor( Optional.of(new TestingPageFilter(positionsRange(25, 50))), ImmutableList.of(new InputPageProjection(0, BIGINT)), OptionalInt.of(MAX_BATCH_SIZE)); Page inputPage = new Page(createLongSequenceBlock(0, 100)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 1); assertPageEquals(ImmutableList.of(BIGINT), outputPages.get(0).orElse(null), new Page(createLongSequenceBlock(25, 75))); } @Test public void testSelectAllFilter() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectAllFilter()), ImmutableList.of(new InputPageProjection(0, BIGINT)), OptionalInt.of(MAX_BATCH_SIZE)); Page inputPage = new Page(createLongSequenceBlock(0, 100)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 1); assertPageEquals(ImmutableList.of(BIGINT), outputPages.get(0).orElse(null), new Page(createLongSequenceBlock(0, 100))); } @Test public void testSelectNoneFilter() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectNoneFilter()), ImmutableList.of(new InputPageProjection(0, BIGINT))); Page inputPage = new Page(createLongSequenceBlock(0, 100)); LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()); Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage); assertEquals(memoryContext.getBytes(), 0); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 0); } @Test public void testProjectEmptyPage() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectAllFilter()), ImmutableList.of(new InputPageProjection(0, BIGINT))); Page inputPage = new Page(createLongSequenceBlock(0, 0)); LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()); Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage); assertEquals(memoryContext.getBytes(), 0); // output should be one page containing no columns (only a count) List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 0); } @Test public void testSelectNoneFilterLazyLoad() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectNoneFilter()), ImmutableList.of(new InputPageProjection(1, BIGINT))); // if channel 1 is loaded, test will fail Page inputPage = new Page(createLongSequenceBlock(0, 100), new LazyBlock(100, lazyBlock -> { throw new AssertionError("Lazy block should not be loaded"); })); LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()); Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage); assertEquals(memoryContext.getBytes(), 0); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 0); } @Test public void testProjectLazyLoad() { PageProcessor pageProcessor = new PageProcessor(Optional.of(new SelectAllFilter()), ImmutableList.of(new LazyPagePageProjection()), OptionalInt.of(MAX_BATCH_SIZE)); // if channel 1 is loaded, test will fail Page inputPage = new Page(createLongSequenceBlock(0, 100), new LazyBlock(100, lazyBlock -> { throw new AssertionError("Lazy block should not be loaded"); })); LocalMemoryContext memoryContext = newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()); Iterator<Optional<Page>> output = pageProcessor.process(SESSION, new DriverYieldSignal(), memoryContext, inputPage); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 1); assertPageEquals(ImmutableList.of(BIGINT), outputPages.get(0).orElse(null), new Page(createLongSequenceBlock(0, 100))); } @Test public void testBatchedOutput() { PageProcessor pageProcessor = new PageProcessor(Optional.empty(), ImmutableList.of(new InputPageProjection(0, BIGINT)), OptionalInt.of(MAX_BATCH_SIZE)); Page inputPage = new Page(createLongSequenceBlock(0, (int) (MAX_BATCH_SIZE * 2.5))); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); assertEquals(outputPages.size(), 3); for (int i = 0; i < outputPages.size(); i++) { Page actualPage = outputPages.get(i).orElse(null); int offset = i * MAX_BATCH_SIZE; Page expectedPage = new Page(createLongSequenceBlock(offset, offset + Math.min(inputPage.getPositionCount() - offset, MAX_BATCH_SIZE))); assertPageEquals(ImmutableList.of(BIGINT), actualPage, expectedPage); } } @Test public void testAdaptiveBatchSize() { PageProcessor pageProcessor = new PageProcessor(Optional.empty(), ImmutableList.of(new InputPageProjection(0, VARCHAR)), OptionalInt.of(MAX_BATCH_SIZE)); // process large page which will reduce batch size Slice[] slices = new Slice[(int) (MAX_BATCH_SIZE * 2.5)]; Arrays.fill(slices, Slices.allocate(1024)); Page inputPage = new Page(createSlicesBlock(slices)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, new DriverYieldSignal(), inputPage); List<Optional<Page>> outputPages = ImmutableList.copyOf(output); int batchSize = MAX_BATCH_SIZE; for (Optional<Page> actualPage : outputPages) { Page expectedPage = new Page(createSlicesBlock(Arrays.copyOfRange(slices, 0, batchSize))); assertPageEquals(ImmutableList.of(VARCHAR), actualPage.orElse(null), expectedPage); if (actualPage.orElseThrow(() -> new AssertionError("page is not present")).getSizeInBytes() > MAX_PAGE_SIZE_IN_BYTES) { batchSize = batchSize / 2; } } // process small page which will increase batch size Arrays.fill(slices, Slices.allocate(128)); inputPage = new Page(createSlicesBlock(slices)); output = processAndAssertRetainedPageSize(pageProcessor, new DriverYieldSignal(), inputPage); outputPages = ImmutableList.copyOf(output); int offset = 0; for (Optional<Page> actualPage : outputPages) { Page expectedPage = new Page(createSlicesBlock(Arrays.copyOfRange(slices, 0, Math.min(inputPage.getPositionCount() - offset, batchSize)))); assertPageEquals(ImmutableList.of(VARCHAR), actualPage.orElse(null), expectedPage); offset += actualPage.orElseThrow(() -> new AssertionError("page is not present")).getPositionCount(); if (actualPage.orElseThrow(() -> new AssertionError("page is not present")).getSizeInBytes() < MIN_PAGE_SIZE_IN_BYTES) { batchSize = batchSize * 2; } } } @Test public void testOptimisticProcessing() { InvocationCountPageProjection firstProjection = new InvocationCountPageProjection(new InputPageProjection(0, VARCHAR)); InvocationCountPageProjection secondProjection = new InvocationCountPageProjection(new InputPageProjection(0, VARCHAR)); PageProcessor pageProcessor = new PageProcessor(Optional.empty(), ImmutableList.of(firstProjection, secondProjection), OptionalInt.of(MAX_BATCH_SIZE)); // process large page which will reduce batch size Slice[] slices = new Slice[(int) (MAX_BATCH_SIZE * 2.5)]; Arrays.fill(slices, Slices.allocate(1024)); Page inputPage = new Page(createSlicesBlock(slices)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); // batch size will be reduced before the first page is produced until the first block is within the page size bounds int batchSize = MAX_BATCH_SIZE; while (inputPage.getBlock(0).getRegionSizeInBytes(0, batchSize) > MAX_PAGE_SIZE_IN_BYTES) { batchSize /= 2; } int pageCount = 0; while (output.hasNext()) { Page actualPage = output.next().orElse(null); Block sliceBlock = createSlicesBlock(Arrays.copyOfRange(slices, 0, batchSize)); Page expectedPage = new Page(sliceBlock, sliceBlock); assertPageEquals(ImmutableList.of(VARCHAR, VARCHAR), actualPage, expectedPage); pageCount++; // batch size will be further reduced to fit within the bounds if (actualPage.getSizeInBytes() > MAX_PAGE_SIZE_IN_BYTES) { batchSize = batchSize / 2; } } // second project is invoked once per output page assertEquals(secondProjection.getInvocationCount(), pageCount); // the page processor saves the results when the page size is exceeded, so the first projection // will be invoked less times assertTrue(firstProjection.getInvocationCount() < secondProjection.getInvocationCount()); } @Test public void testRetainedSize() { PageProcessor pageProcessor = new PageProcessor( Optional.of(new SelectAllFilter()), ImmutableList.of(new InputPageProjection(0, VARCHAR), new InputPageProjection(1, VARCHAR)), OptionalInt.of(MAX_BATCH_SIZE)); // create 2 columns X 800 rows of strings with each string's size = 10KB // this can force previouslyComputedResults to be saved given the page is 16MB in size String value = join("", nCopies(10_000, "a")); List<String> values = nCopies(800, value); Page inputPage = new Page(createStringsBlock(values), createStringsBlock(values)); AggregatedMemoryContext memoryContext = newSimpleAggregatedMemoryContext(); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, new DriverYieldSignal(), memoryContext, inputPage); // force a compute // one block of previouslyComputedResults will be saved given the first column is with 8MB output.hasNext(); // verify we do not count block sizes twice // comparing with the input page, the output page also contains an extra instance size for previouslyComputedResults assertEquals(memoryContext.getBytes() - ClassLayout.parseClass(VariableWidthBlock.class).instanceSize(), inputPage.getRetainedSizeInBytes()); } @Test public void testYieldProjection() { // each projection can finish without yield // while between two projections, there is a yield int rows = 128; int columns = 20; DriverYieldSignal yieldSignal = new DriverYieldSignal(); PageProcessor pageProcessor = new PageProcessor( Optional.empty(), Collections.nCopies(columns, new YieldPageProjection(new InputPageProjection(0, VARCHAR))), OptionalInt.of(MAX_BATCH_SIZE)); Slice[] slices = new Slice[rows]; Arrays.fill(slices, Slices.allocate(rows)); Page inputPage = new Page(createSlicesBlock(slices)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, yieldSignal, inputPage); // Test yield signal works for page processor. // The purpose of this test is NOT to test the yield signal in page projection; we have other tests to cover that. // In page processor, we check yield signal after a column has been completely processed. // So we would like to set yield signal when the column has just finished processing in order to let page processor capture the yield signal when the block is returned. // Also, we would like to reset the yield signal before starting to process the next column in order NOT to yield per position inside the column. for (int i = 0; i < columns - 1; i++) { assertTrue(output.hasNext()); assertNull(output.next().orElse(null)); assertTrue(yieldSignal.isSet()); yieldSignal.reset(); } assertTrue(output.hasNext()); Page actualPage = output.next().orElse(null); assertNotNull(actualPage); assertTrue(yieldSignal.isSet()); yieldSignal.reset(); Block[] blocks = new Block[columns]; Arrays.fill(blocks, createSlicesBlock(Arrays.copyOfRange(slices, 0, rows))); Page expectedPage = new Page(blocks); assertPageEquals(Collections.nCopies(columns, VARCHAR), actualPage, expectedPage); assertFalse(output.hasNext()); } @Test public void testExpressionProfiler() { MetadataManager metadata = createTestMetadataManager(); CallExpression add10Expression = call( ADD.name(), metadata.getFunctionManager().resolveOperator(ADD, fromTypes(BIGINT, BIGINT)), BIGINT, field(0, BIGINT), constant(10L, BIGINT)); TestingTicker testingTicker = new TestingTicker(); PageFunctionCompiler functionCompiler = new PageFunctionCompiler(metadata, 0); Supplier<PageProjection> projectionSupplier = functionCompiler.compileProjection(add10Expression, Optional.empty()); PageProjection projection = projectionSupplier.get(); Page page = new Page(createLongSequenceBlock(1, 11)); ExpressionProfiler profiler = new ExpressionProfiler(testingTicker, SPLIT_RUN_QUANTA); for (int i = 0; i < 100; i++) { profiler.start(); Work<Block> work = projection.project(SESSION, new DriverYieldSignal(), page, SelectedPositions.positionsRange(0, page.getPositionCount())); if (i < 10) { // increment the ticker with a large value to mark the expression as expensive testingTicker.increment(10, SECONDS); profiler.stop(page.getPositionCount()); assertTrue(profiler.isExpressionExpensive()); } else { testingTicker.increment(0, NANOSECONDS); profiler.stop(page.getPositionCount()); assertFalse(profiler.isExpressionExpensive()); } work.process(); } } @Test public void testIncreasingBatchSize() { int rows = 1024; // We deliberately do not set the ticker, so that the expression is always cheap and the batch size gets doubled until other limits are hit TestingTicker testingTicker = new TestingTicker(); ExpressionProfiler profiler = new ExpressionProfiler(testingTicker, SPLIT_RUN_QUANTA); PageProcessor pageProcessor = new PageProcessor( Optional.empty(), ImmutableList.of(new InputPageProjection(0, BIGINT)), OptionalInt.of(1), profiler); Slice[] slices = new Slice[rows]; Arrays.fill(slices, Slices.allocate(rows)); Page inputPage = new Page(createSlicesBlock(slices)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); long previousPositionCount = 1; long totalPositionCount = 0; while (totalPositionCount < rows) { Optional<Page> page = output.next(); assertTrue(page.isPresent()); long positionCount = page.get().getPositionCount(); totalPositionCount += positionCount; // skip the first read && skip the last read, which can be a partial page if (positionCount > 1 && totalPositionCount != rows) { assertEquals(positionCount, previousPositionCount * 2); } previousPositionCount = positionCount; } } @Test public void testDecreasingBatchSize() { int rows = 1024; // We set the expensive expression threshold to 0, so the expression is always considered expensive and the batch size gets halved until it becomes 1 TestingTicker testingTicker = new TestingTicker(); ExpressionProfiler profiler = new ExpressionProfiler(testingTicker, new Duration(0, MILLISECONDS)); PageProcessor pageProcessor = new PageProcessor( Optional.empty(), ImmutableList.of(new InputPageProjection(0, BIGINT)), OptionalInt.of(512), profiler); Slice[] slices = new Slice[rows]; Arrays.fill(slices, Slices.allocate(rows)); Page inputPage = new Page(createSlicesBlock(slices)); Iterator<Optional<Page>> output = processAndAssertRetainedPageSize(pageProcessor, inputPage); long previousPositionCount = 1; long totalPositionCount = 0; while (totalPositionCount < rows) { Optional<Page> page = output.next(); assertTrue(page.isPresent()); long positionCount = page.get().getPositionCount(); totalPositionCount += positionCount; // the batch size doesn't get smaller than 1 if (positionCount > 1 && previousPositionCount != 1) { assertEquals(positionCount, previousPositionCount / 2); } previousPositionCount = positionCount; } } private Iterator<Optional<Page>> processAndAssertRetainedPageSize(PageProcessor pageProcessor, Page inputPage) { return processAndAssertRetainedPageSize(pageProcessor, new DriverYieldSignal(), inputPage); } private Iterator<Optional<Page>> processAndAssertRetainedPageSize(PageProcessor pageProcessor, DriverYieldSignal yieldSignal, Page inputPage) { return processAndAssertRetainedPageSize(pageProcessor, yieldSignal, newSimpleAggregatedMemoryContext(), inputPage); } private Iterator<Optional<Page>> processAndAssertRetainedPageSize(PageProcessor pageProcessor, DriverYieldSignal yieldSignal, AggregatedMemoryContext memoryContext, Page inputPage) { Iterator<Optional<Page>> output = pageProcessor.process( SESSION, yieldSignal, memoryContext.newLocalMemoryContext(PageProcessor.class.getSimpleName()), inputPage); assertEquals(memoryContext.getBytes(), 0); return output; } private static class InvocationCountPageProjection implements PageProjection { protected final PageProjection delegate; private int invocationCount; public InvocationCountPageProjection(PageProjection delegate) { this.delegate = delegate; } @Override public Type getType() { return delegate.getType(); } @Override public boolean isDeterministic() { return delegate.isDeterministic(); } @Override public InputChannels getInputChannels() { return delegate.getInputChannels(); } @Override public Work<Block> project(ConnectorSession session, DriverYieldSignal yieldSignal, Page page, SelectedPositions selectedPositions) { setInvocationCount(getInvocationCount() + 1); return delegate.project(session, yieldSignal, page, selectedPositions); } public int getInvocationCount() { return invocationCount; } public void setInvocationCount(int invocationCount) { this.invocationCount = invocationCount; } } private class YieldPageProjection extends InvocationCountPageProjection { public YieldPageProjection(PageProjection delegate) { super(delegate); } @Override public Work<Block> project(ConnectorSession session, DriverYieldSignal yieldSignal, Page page, SelectedPositions selectedPositions) { return new YieldPageProjectionWork(session, yieldSignal, page, selectedPositions); } private class YieldPageProjectionWork implements Work<Block> { private final DriverYieldSignal yieldSignal; private final Work<Block> work; public YieldPageProjectionWork(ConnectorSession session, DriverYieldSignal yieldSignal, Page page, SelectedPositions selectedPositions) { this.yieldSignal = yieldSignal; this.work = delegate.project(session, yieldSignal, page, selectedPositions); } @Override public boolean process() { assertTrue(work.process()); yieldSignal.setWithDelay(1, executor); yieldSignal.forceYieldForTesting(); return true; } @Override public Block getResult() { return work.getResult(); } } } public static class LazyPagePageProjection implements PageProjection { @Override public Type getType() { return BIGINT; } @Override public boolean isDeterministic() { return true; } @Override public InputChannels getInputChannels() { return new InputChannels(0, 1); } @Override public Work<Block> project(ConnectorSession session, DriverYieldSignal yieldSignal, Page page, SelectedPositions selectedPositions) { return new CompletedWork<>(page.getBlock(0).getLoadedBlock()); } } private static class TestingPageFilter implements PageFilter { private final SelectedPositions selectedPositions; public TestingPageFilter(SelectedPositions selectedPositions) { this.selectedPositions = selectedPositions; } @Override public boolean isDeterministic() { return true; } @Override public InputChannels getInputChannels() { return new InputChannels(0); } @Override public SelectedPositions filter(ConnectorSession session, Page page) { return selectedPositions; } } public static class SelectAllFilter implements PageFilter { @Override public boolean isDeterministic() { return true; } @Override public InputChannels getInputChannels() { return new InputChannels(0); } @Override public SelectedPositions filter(ConnectorSession session, Page page) { return positionsRange(0, page.getPositionCount()); } } private static class SelectNoneFilter implements PageFilter { @Override public boolean isDeterministic() { return true; } @Override public InputChannels getInputChannels() { return new InputChannels(0); } @Override public SelectedPositions filter(ConnectorSession session, Page page) { return positionsRange(0, 0); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/pubsub/v1/pubsub.proto package com.google.pubsub.v1; /** * <pre> * Configuration for a push delivery endpoint. * </pre> * * Protobuf type {@code google.pubsub.v1.PushConfig} */ public final class PushConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.pubsub.v1.PushConfig) PushConfigOrBuilder { private static final long serialVersionUID = 0L; // Use PushConfig.newBuilder() to construct. private PushConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PushConfig() { pushEndpoint_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private PushConfig( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownFieldProto3( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { java.lang.String s = input.readStringRequireUtf8(); pushEndpoint_ = s; break; } case 18: { if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { attributes_ = com.google.protobuf.MapField.newMapField( AttributesDefaultEntryHolder.defaultEntry); mutable_bitField0_ |= 0x00000002; } com.google.protobuf.MapEntry<java.lang.String, java.lang.String> attributes__ = input.readMessage( AttributesDefaultEntryHolder.defaultEntry.getParserForType(), extensionRegistry); attributes_.getMutableMap().put( attributes__.getKey(), attributes__.getValue()); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_PushConfig_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMapField( int number) { switch (number) { case 2: return internalGetAttributes(); default: throw new RuntimeException( "Invalid map field number: " + number); } } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_PushConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.pubsub.v1.PushConfig.class, com.google.pubsub.v1.PushConfig.Builder.class); } private int bitField0_; public static final int PUSH_ENDPOINT_FIELD_NUMBER = 1; private volatile java.lang.Object pushEndpoint_; /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public java.lang.String getPushEndpoint() { java.lang.Object ref = pushEndpoint_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pushEndpoint_ = s; return s; } } /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public com.google.protobuf.ByteString getPushEndpointBytes() { java.lang.Object ref = pushEndpoint_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pushEndpoint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ATTRIBUTES_FIELD_NUMBER = 2; private static final class AttributesDefaultEntryHolder { static final com.google.protobuf.MapEntry< java.lang.String, java.lang.String> defaultEntry = com.google.protobuf.MapEntry .<java.lang.String, java.lang.String>newDefaultInstance( com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_PushConfig_AttributesEntry_descriptor, com.google.protobuf.WireFormat.FieldType.STRING, "", com.google.protobuf.WireFormat.FieldType.STRING, ""); } private com.google.protobuf.MapField< java.lang.String, java.lang.String> attributes_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetAttributes() { if (attributes_ == null) { return com.google.protobuf.MapField.emptyMapField( AttributesDefaultEntryHolder.defaultEntry); } return attributes_; } public int getAttributesCount() { return internalGetAttributes().getMap().size(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public boolean containsAttributes( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetAttributes().getMap().containsKey(key); } /** * Use {@link #getAttributesMap()} instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getAttributes() { return getAttributesMap(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public java.util.Map<java.lang.String, java.lang.String> getAttributesMap() { return internalGetAttributes().getMap(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public java.lang.String getAttributesOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public java.lang.String getAttributesOrThrow( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getPushEndpointBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, pushEndpoint_); } com.google.protobuf.GeneratedMessageV3 .serializeStringMapTo( output, internalGetAttributes(), AttributesDefaultEntryHolder.defaultEntry, 2); unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getPushEndpointBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, pushEndpoint_); } for (java.util.Map.Entry<java.lang.String, java.lang.String> entry : internalGetAttributes().getMap().entrySet()) { com.google.protobuf.MapEntry<java.lang.String, java.lang.String> attributes__ = AttributesDefaultEntryHolder.defaultEntry.newBuilderForType() .setKey(entry.getKey()) .setValue(entry.getValue()) .build(); size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, attributes__); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.pubsub.v1.PushConfig)) { return super.equals(obj); } com.google.pubsub.v1.PushConfig other = (com.google.pubsub.v1.PushConfig) obj; boolean result = true; result = result && getPushEndpoint() .equals(other.getPushEndpoint()); result = result && internalGetAttributes().equals( other.internalGetAttributes()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PUSH_ENDPOINT_FIELD_NUMBER; hash = (53 * hash) + getPushEndpoint().hashCode(); if (!internalGetAttributes().getMap().isEmpty()) { hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (53 * hash) + internalGetAttributes().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.pubsub.v1.PushConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.pubsub.v1.PushConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.pubsub.v1.PushConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.pubsub.v1.PushConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.pubsub.v1.PushConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.pubsub.v1.PushConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.pubsub.v1.PushConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.pubsub.v1.PushConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.pubsub.v1.PushConfig parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.pubsub.v1.PushConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.pubsub.v1.PushConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.pubsub.v1.PushConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.pubsub.v1.PushConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Configuration for a push delivery endpoint. * </pre> * * Protobuf type {@code google.pubsub.v1.PushConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.pubsub.v1.PushConfig) com.google.pubsub.v1.PushConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_PushConfig_descriptor; } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMapField( int number) { switch (number) { case 2: return internalGetAttributes(); default: throw new RuntimeException( "Invalid map field number: " + number); } } @SuppressWarnings({"rawtypes"}) protected com.google.protobuf.MapField internalGetMutableMapField( int number) { switch (number) { case 2: return internalGetMutableAttributes(); default: throw new RuntimeException( "Invalid map field number: " + number); } } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_PushConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.pubsub.v1.PushConfig.class, com.google.pubsub.v1.PushConfig.Builder.class); } // Construct using com.google.pubsub.v1.PushConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } public Builder clear() { super.clear(); pushEndpoint_ = ""; internalGetMutableAttributes().clear(); return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.pubsub.v1.PubsubProto.internal_static_google_pubsub_v1_PushConfig_descriptor; } public com.google.pubsub.v1.PushConfig getDefaultInstanceForType() { return com.google.pubsub.v1.PushConfig.getDefaultInstance(); } public com.google.pubsub.v1.PushConfig build() { com.google.pubsub.v1.PushConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.pubsub.v1.PushConfig buildPartial() { com.google.pubsub.v1.PushConfig result = new com.google.pubsub.v1.PushConfig(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.pushEndpoint_ = pushEndpoint_; result.attributes_ = internalGetAttributes(); result.attributes_.makeImmutable(); result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.pubsub.v1.PushConfig) { return mergeFrom((com.google.pubsub.v1.PushConfig)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.pubsub.v1.PushConfig other) { if (other == com.google.pubsub.v1.PushConfig.getDefaultInstance()) return this; if (!other.getPushEndpoint().isEmpty()) { pushEndpoint_ = other.pushEndpoint_; onChanged(); } internalGetMutableAttributes().mergeFrom( other.internalGetAttributes()); this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.pubsub.v1.PushConfig parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.pubsub.v1.PushConfig) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object pushEndpoint_ = ""; /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public java.lang.String getPushEndpoint() { java.lang.Object ref = pushEndpoint_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pushEndpoint_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public com.google.protobuf.ByteString getPushEndpointBytes() { java.lang.Object ref = pushEndpoint_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); pushEndpoint_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public Builder setPushEndpoint( java.lang.String value) { if (value == null) { throw new NullPointerException(); } pushEndpoint_ = value; onChanged(); return this; } /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public Builder clearPushEndpoint() { pushEndpoint_ = getDefaultInstance().getPushEndpoint(); onChanged(); return this; } /** * <pre> * A URL locating the endpoint to which messages should be pushed. * For example, a Webhook endpoint might use "https://example.com/push". * </pre> * * <code>string push_endpoint = 1;</code> */ public Builder setPushEndpointBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pushEndpoint_ = value; onChanged(); return this; } private com.google.protobuf.MapField< java.lang.String, java.lang.String> attributes_; private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetAttributes() { if (attributes_ == null) { return com.google.protobuf.MapField.emptyMapField( AttributesDefaultEntryHolder.defaultEntry); } return attributes_; } private com.google.protobuf.MapField<java.lang.String, java.lang.String> internalGetMutableAttributes() { onChanged();; if (attributes_ == null) { attributes_ = com.google.protobuf.MapField.newMapField( AttributesDefaultEntryHolder.defaultEntry); } if (!attributes_.isMutable()) { attributes_ = attributes_.copy(); } return attributes_; } public int getAttributesCount() { return internalGetAttributes().getMap().size(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public boolean containsAttributes( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } return internalGetAttributes().getMap().containsKey(key); } /** * Use {@link #getAttributesMap()} instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getAttributes() { return getAttributesMap(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public java.util.Map<java.lang.String, java.lang.String> getAttributesMap() { return internalGetAttributes().getMap(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public java.lang.String getAttributesOrDefault( java.lang.String key, java.lang.String defaultValue) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public java.lang.String getAttributesOrThrow( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } java.util.Map<java.lang.String, java.lang.String> map = internalGetAttributes().getMap(); if (!map.containsKey(key)) { throw new java.lang.IllegalArgumentException(); } return map.get(key); } public Builder clearAttributes() { internalGetMutableAttributes().getMutableMap() .clear(); return this; } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public Builder removeAttributes( java.lang.String key) { if (key == null) { throw new java.lang.NullPointerException(); } internalGetMutableAttributes().getMutableMap() .remove(key); return this; } /** * Use alternate mutation accessors instead. */ @java.lang.Deprecated public java.util.Map<java.lang.String, java.lang.String> getMutableAttributes() { return internalGetMutableAttributes().getMutableMap(); } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public Builder putAttributes( java.lang.String key, java.lang.String value) { if (key == null) { throw new java.lang.NullPointerException(); } if (value == null) { throw new java.lang.NullPointerException(); } internalGetMutableAttributes().getMutableMap() .put(key, value); return this; } /** * <pre> * Endpoint configuration attributes. * Every endpoint has a set of API supported attributes that can be used to * control different aspects of the message delivery. * The currently supported attribute is `x-goog-version`, which you can * use to change the format of the pushed message. This attribute * indicates the version of the data expected by the endpoint. This * controls the shape of the pushed message (i.e., its fields and metadata). * The endpoint version is based on the version of the Pub/Sub API. * If not present during the `CreateSubscription` call, it will default to * the version of the API used to make such call. If not present during a * `ModifyPushConfig` call, its value will not be changed. `GetSubscription` * calls will always return a valid version, even if the subscription was * created without this attribute. * The possible values for this attribute are: * * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. * * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. * </pre> * * <code>map&lt;string, string&gt; attributes = 2;</code> */ public Builder putAllAttributes( java.util.Map<java.lang.String, java.lang.String> values) { internalGetMutableAttributes().getMutableMap() .putAll(values); return this; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.pubsub.v1.PushConfig) } // @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) private static final com.google.pubsub.v1.PushConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.pubsub.v1.PushConfig(); } public static com.google.pubsub.v1.PushConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PushConfig> PARSER = new com.google.protobuf.AbstractParser<PushConfig>() { public PushConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new PushConfig(input, extensionRegistry); } }; public static com.google.protobuf.Parser<PushConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PushConfig> getParserForType() { return PARSER; } public com.google.pubsub.v1.PushConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pluto.driver.services.container; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.util.Locale; import javax.portlet.CacheControl; import javax.portlet.PortletRequest; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.pluto.container.PortletContainer; import org.apache.pluto.container.PortletMimeResponseContext; import org.apache.pluto.container.PortletRequestContext; import org.apache.pluto.container.PortletWindow; import org.apache.pluto.container.util.PrintWriterServletOutputStream; /** * @version $Id$ * */ public abstract class PortletMimeResponseContextImpl extends PortletResponseContextImpl implements PortletMimeResponseContext { private static class CacheControlImpl implements CacheControl { private String eTag; private int expirationTime; private boolean publicScope; private boolean cachedContent; public CacheControlImpl() { } @Override public boolean useCachedContent() { return cachedContent; } @Override public String getETag() { return this.eTag; } @Override public int getExpirationTime() { return expirationTime; } @Override public boolean isPublicScope() { return publicScope; } @Override public void setETag(String eTag) { this.eTag = eTag; } @Override public void setExpirationTime(int expirationTime) { this.expirationTime = expirationTime; } @Override public void setPublicScope(boolean publicScope) { this.publicScope = publicScope; } @Override public void setUseCachedContent(boolean cachedContent) { this.cachedContent = cachedContent; } } private CacheControl cacheControl; private OutputStream outputStream; public PortletMimeResponseContextImpl(PortletContainer container, HttpServletRequest containerRequest, HttpServletResponse containerResponse, PortletWindow window, PortletRequestContext requestContext) { super(container, containerRequest, containerResponse, window, requestContext); } @Override public void close() { cacheControl = null; outputStream = null; super.close(); } @Override public void flushBuffer() throws IOException { if (!isClosed() && !isHeaderBufferActive()) { getServletResponse().flushBuffer(); } } @Override public int getBufferSize() { if (isHeaderBufferActive()) { // header request return headerData.getBufferSize(); } else { // render or resource request return getServletResponse().getBufferSize(); } } @Override public CacheControl getCacheControl() { if (isClosed()) { return null; } if (cacheControl == null) { cacheControl = new CacheControlImpl(); } return cacheControl; } @Override public String getCharacterEncoding() { return isClosed() ? null : getServletResponse().getCharacterEncoding(); } @Override public String getContentType() { String type = null; if (!isClosed()) { type = getServletResponse().getContentType(); if (type == null) { if (!getLifecycle().equals(PortletRequest.RESOURCE_PHASE)) { // default MIME type for Pluto type = "text/html"; } } else { // ignore charset parameter type = type.replaceAll("([^;]*).*", "$1"); } } return type; } @Override public Locale getLocale() { return isClosed() ? null : getServletResponse().getLocale(); } @Override public OutputStream getOutputStream() throws IOException, IllegalStateException { if (isClosed()) { return null; } if (isHeaderBufferActive()) { // header request return headerData.getBaoStream(); } else { // render or resource request if (outputStream == null) { try { outputStream = getServletResponse().getOutputStream(); } catch (IllegalStateException e) { // handle situation where underlying ServletResponse its getWriter() // has been called already anyway: return a wrapped PrintWriter in that case outputStream = new PrintWriterServletOutputStream(getServletResponse().getWriter(), getServletResponse().getCharacterEncoding()); } } return outputStream; } } @Override public PrintWriter getWriter() throws IOException, IllegalStateException { if (isClosed()) { return null; } if (isHeaderBufferActive()) { // header request return headerData.getWriter(); } else { // render or resource request return getServletResponse().getWriter(); } } @Override public boolean isCommitted() { if (isHeaderBufferActive()) { // header request return false; } else { return getServletResponse().isCommitted(); } } @Override public void reset() { if (!isClosed()) { if (isHeaderBufferActive()) { // header request headerData.reset(); } else { getServletResponse().reset(); } } } @Override public void resetBuffer() { if (!isClosed()) { if (isHeaderBufferActive()) { // header request headerData.resetBuffer(); } else { getServletResponse().resetBuffer(); } } } @Override public void setBufferSize(int size) { if (!isClosed()) { if (isHeaderBufferActive()) { // header request headerData.setBufferSize(size); } else { // render or resource request getServletResponse().setBufferSize(size); } } } @Override public void setContentType(String contentType) { // The content type is set by Pluto for the render & header phases if (!isClosed()) { if (getLifecycle().equals(PortletRequest.RESOURCE_PHASE)) { getServletResponse().setContentType(contentType); } else { String type = getServletResponse().getContentType(); if (type == null) { // default MIME type for Pluto type = "text/html"; } else { // ignore charset parameter type = type.replaceAll("([^;]*).*", "$1"); } if (!type.equals(contentType) && !contentType.matches("\\s*(?:\\*|\\*/\\s*\\*)\\s*")) { throw new IllegalArgumentException("Invalid content type: " + contentType); } } } } }
package org.semanticweb.cogExp.OWLAPIVerbaliser; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAnnotationPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLAnnotationPropertyRangeAxiom; import org.semanticweb.owlapi.model.OWLAnonymousIndividual; import org.semanticweb.owlapi.model.OWLAsymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataAllValuesFrom; import org.semanticweb.owlapi.model.OWLDataComplementOf; import org.semanticweb.owlapi.model.OWLDataExactCardinality; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLDataHasValue; import org.semanticweb.owlapi.model.OWLDataIntersectionOf; import org.semanticweb.owlapi.model.OWLDataMaxCardinality; import org.semanticweb.owlapi.model.OWLDataMinCardinality; import org.semanticweb.owlapi.model.OWLDataOneOf; import org.semanticweb.owlapi.model.OWLDataProperty; import org.semanticweb.owlapi.model.OWLDataPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLDataPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLDataPropertyExpression; import org.semanticweb.owlapi.model.OWLDataPropertyRangeAxiom; import org.semanticweb.owlapi.model.OWLDataRange; import org.semanticweb.owlapi.model.OWLDataSomeValuesFrom; import org.semanticweb.owlapi.model.OWLDataUnionOf; import org.semanticweb.owlapi.model.OWLDatatype; import org.semanticweb.owlapi.model.OWLDatatypeDefinitionAxiom; import org.semanticweb.owlapi.model.OWLDatatypeRestriction; import org.semanticweb.owlapi.model.OWLDeclarationAxiom; import org.semanticweb.owlapi.model.OWLDifferentIndividualsAxiom; import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; import org.semanticweb.owlapi.model.OWLDisjointDataPropertiesAxiom; import org.semanticweb.owlapi.model.OWLDisjointObjectPropertiesAxiom; import org.semanticweb.owlapi.model.OWLDisjointUnionAxiom; import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; import org.semanticweb.owlapi.model.OWLEquivalentDataPropertiesAxiom; import org.semanticweb.owlapi.model.OWLEquivalentObjectPropertiesAxiom; import org.semanticweb.owlapi.model.OWLFacetRestriction; import org.semanticweb.owlapi.model.OWLFunctionalDataPropertyAxiom; import org.semanticweb.owlapi.model.OWLFunctionalObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLHasKeyAxiom; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLInverseFunctionalObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLInverseObjectPropertiesAxiom; import org.semanticweb.owlapi.model.OWLIrreflexiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLNegativeDataPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLNegativeObjectPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLObjectAllValuesFrom; import org.semanticweb.owlapi.model.OWLObjectComplementOf; import org.semanticweb.owlapi.model.OWLObjectExactCardinality; import org.semanticweb.owlapi.model.OWLObjectHasSelf; import org.semanticweb.owlapi.model.OWLObjectHasValue; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; import org.semanticweb.owlapi.model.OWLObjectInverseOf; import org.semanticweb.owlapi.model.OWLObjectMaxCardinality; import org.semanticweb.owlapi.model.OWLObjectMinCardinality; import org.semanticweb.owlapi.model.OWLObjectOneOf; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyDomainAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; import org.semanticweb.owlapi.model.OWLObjectPropertyRangeAxiom; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLObjectUnionOf; import org.semanticweb.owlapi.model.OWLObjectVisitorEx; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLPropertyExpression; import org.semanticweb.owlapi.model.OWLPropertyRange; import org.semanticweb.owlapi.model.OWLReflexiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLSameIndividualAxiom; import org.semanticweb.owlapi.model.OWLSubAnnotationPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; import org.semanticweb.owlapi.model.OWLSubDataPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; import org.semanticweb.owlapi.model.OWLSymmetricObjectPropertyAxiom; import org.semanticweb.owlapi.model.OWLTransitiveObjectPropertyAxiom; import org.semanticweb.owlapi.model.SWRLBuiltInAtom; import org.semanticweb.owlapi.model.SWRLClassAtom; import org.semanticweb.owlapi.model.SWRLDataPropertyAtom; import org.semanticweb.owlapi.model.SWRLDataRangeAtom; import org.semanticweb.owlapi.model.SWRLDifferentIndividualsAtom; import org.semanticweb.owlapi.model.SWRLIndividualArgument; import org.semanticweb.owlapi.model.SWRLLiteralArgument; import org.semanticweb.owlapi.model.SWRLObjectPropertyAtom; import org.semanticweb.owlapi.model.SWRLRule; import org.semanticweb.owlapi.model.SWRLSameIndividualAtom; import org.semanticweb.owlapi.model.SWRLVariable; import com.google.common.base.Optional; public class VerbaliseOWLObjectVisitor implements OWLObjectVisitorEx<String>{ private static final String _space = VerbalisationManager.INSTANCE._space; private Obfuscator obfuscator; public void setObfuscator(Obfuscator obfuscator){ this.obfuscator = obfuscator; } public Obfuscator getObfuscator(){ return obfuscator; } public static List<OWLClassExpression> collectAndExpressions(OWLObject ob){ List<OWLClassExpression> result = new ArrayList<OWLClassExpression>(); if (ob instanceof OWLObjectIntersectionOf){ OWLObjectIntersectionOf ints = (OWLObjectIntersectionOf) ob; List<OWLClassExpression> subexprs = ints.getOperandsAsList(); for(OWLClassExpression subexpr : subexprs){ result.addAll(collectAndExpressions(subexpr)); } } else{ result.add((OWLClassExpression) ob); } return result; } public static boolean checkMultipleExistsAndForallPattern(OWLObjectIntersectionOf ints){ List<OWLClassExpression> exprs = ints.getOperandsAsList(); List<OWLClassExpression> existsexprs = new ArrayList<OWLClassExpression>(); List<OWLClassExpression> fillers = new ArrayList<OWLClassExpression>(); if (exprs.size()<2){ return false; } if (!(exprs.get(exprs.size()-1) instanceof OWLObjectAllValuesFrom)){ return false; } OWLObjectPropertyExpression commonpropexpr = null; for (OWLClassExpression expr: exprs){ if (expr!=exprs.get(exprs.size()-1)){ existsexprs.addAll(VerbaliseOWLObjectVisitor.collectAndExpressions(expr)); } } for (OWLClassExpression expr: existsexprs){ if (!(expr instanceof OWLObjectSomeValuesFrom)){ return false; } OWLObjectSomeValuesFrom someexpr = (OWLObjectSomeValuesFrom) expr; OWLObjectPropertyExpression propexpr = someexpr.getProperty(); fillers.add(someexpr.getFiller()); if (commonpropexpr==null){ commonpropexpr = propexpr; } if (!commonpropexpr.equals(propexpr)){ return false; } OWLClassExpression forallexp = ((OWLObjectAllValuesFrom) exprs.get(exprs.size()-1)).getFiller(); List<OWLClassExpression> fillers2 = new ArrayList<OWLClassExpression>(); if (forallexp instanceof OWLObjectUnionOf){ fillers2.addAll(((OWLObjectUnionOf) forallexp).getOperandsAsList()); } else fillers2.add(forallexp); if (!fillers2.contains(someexpr.getFiller())){ return false; } } return true; } public static boolean detectUnderCamel(String str){ // look for one occurrence of "_\Uppercase" int i = str.indexOf("_"); if (i>-1 && str.length()>i+1){ if (Character.isUpperCase(str.charAt(i+1))){ return true; } } return false; } public static String removeUnderCamel(String str){ List<String> results = new ArrayList<String>(); String resultstring = ""; while(str.length()>0){ int i = str.indexOf("_"); if (i>=0){ results.add(str.substring(0,i)); str = str.substring(i+1); } else{ results.add(str); str=""; } } // return results; for (String s : results){ if (s.length()>0 && Character.isUpperCase(s.charAt(0))) resultstring = resultstring + s.substring(0,1).toLowerCase() + s.substring(1,s.length()) + _space; else resultstring = resultstring + s + _space; } if (resultstring.length()>0 && resultstring.substring(resultstring.length()-1).equals(_space)) resultstring = resultstring.substring(0, resultstring.length()-1); return resultstring; } public static String removeUnderscores(String str){ if (str.length()<1) return ""; if (str.substring(0,1).equals("\"")){ str = str.substring(1); } List<String> results = new ArrayList<String>(); String resultstring = ""; while(str.length()>0){ int i = str.indexOf("_"); if (i>=0){ results.add(str.substring(0,i)); str = str.substring(i+1); } else{ i = str.indexOf("\""); if (i>=0){ results.add(str.substring(0,i)); } else results.add(str); str = ""; } } // return results; for (int i = 0; i< results.size(); i++){ resultstring = resultstring + results.get(i); if (i!=results.size()-1) resultstring = resultstring + _space; } return resultstring; } public static boolean detectCamelCase(String str){ if (str.length()<2) return false; // find first uppercase int u = -1; for (int i=0; i<str.length(); i++){ if (Character.isUpperCase(str.charAt(i))){ u = i; break; } } if (u==-1){ return false; } int j = -1; // find second uppercase if (str.length()>u && Character.isUpperCase(str.charAt(u+1))) return false; // <-- acronym case for (int i=u+2; i<str.length(); i++){ if (Character.isUpperCase(str.charAt(i))){ j = i; } } if (j==-1){ return false; } return true; } public static boolean detectLowCamelCase(String str){ if (str.length()<2) return false; // find first uppercase if (Character.isUpperCase(str.charAt(0))) return false; int u = -1; boolean second = false; for (int i=0; i<str.length(); i++){ if (Character.isUpperCase(str.charAt(i))){ u = i; break; } } for (int j=u+2; j<str.length(); j++){ if (Character.isUpperCase(str.charAt(j))){ second = true; break; } } if (!second){ return false; } return true; } public static String removeCamelCase(String str){ String resultstring = ""; for (int i=0; i<str.length(); i++){ if (Character.isUpperCase(str.charAt(i))){ if (i==0) resultstring = str.substring(i,i+1).toLowerCase() ; else resultstring = resultstring + " " + str.substring(i,i+1).toLowerCase() ; } else{ resultstring = resultstring + str.substring(i,i+1); } } return resultstring; } public static boolean detectAcronym(String str){ if (str.length()>1 && Character.isUpperCase(str.charAt(0)) && Character.isUpperCase(str.charAt(1)) ){ return true; } return false; } // VERBALIZE SUBCLASSOFAXIOM public String visit(OWLSubClassOfAxiom arg0) { // System.out.println("visit subclassof called "); // Left hand side String leftstring = ""; leftstring = arg0.getSubClass().accept(this); String somethingthat = "something that "; if (arg0.getSubClass() instanceof OWLObjectSomeValuesFrom){ OWLObjectSomeValuesFrom some1 = (OWLObjectSomeValuesFrom) arg0.getSubClass(); OWLClassExpression cl = VerbalisationManager.INSTANCE.getDomain(some1.getProperty().getNamedProperty()); if (cl!=null){ //somethingthat = cl.toString() + " that "; somethingthat = cl.accept(this) + " that "; } } if (arg0.getSubClass() instanceof OWLObjectIntersectionOf){ OWLObjectIntersectionOf intsect = (OWLObjectIntersectionOf) arg0.getSubClass(); OWLClassExpression clexpr = intsect.getOperandsAsList().get(0); List<OWLClassExpression> exprs = collectAndExpressions(clexpr); for (OWLClassExpression expr : exprs){ if (expr instanceof OWLObjectSomeValuesFrom){ OWLObjectSomeValuesFrom some1 = (OWLObjectSomeValuesFrom) expr; OWLClassExpression cl = VerbalisationManager.INSTANCE.getDomain(some1.getProperty().getNamedProperty()); if (cl!=null){ somethingthat = cl.accept(this) + " that "; } } } } if (arg0.getSubClass() instanceof OWLObjectIntersectionOf && VerbalisationManager.checkMultipleExistsPattern((OWLObjectIntersectionOf) arg0.getSubClass())){ leftstring = somethingthat + VerbalisationManager.pseudoNLStringMultipleExistsPattern((OWLObjectIntersectionOf) arg0.getSubClass(),obfuscator) ; } if (arg0.getSubClass() instanceof OWLObjectIntersectionOf && checkMultipleExistsAndForallPattern((OWLObjectIntersectionOf) arg0.getSubClass())){ leftstring = somethingthat + VerbalisationManager.pseudoNLStringMultipleExistsAndForallPattern((OWLObjectIntersectionOf) arg0.getSubClass()) + ","; } if (arg0.getSubClass() instanceof OWLObjectSomeValuesFrom){ leftstring = somethingthat + leftstring; } String middlestring = " "; if (!(arg0.getSuperClass() instanceof OWLObjectSomeValuesFrom) && !(arg0.getSuperClass() instanceof OWLObjectAllValuesFrom) && !(arg0.getSuperClass() instanceof OWLObjectIntersectionOf && checkMultipleExistsAndForallPattern((OWLObjectIntersectionOf) arg0.getSuperClass()) ) && !(arg0.getSuperClass() instanceof OWLObjectIntersectionOf && VerbalisationManager.checkMultipleExistsPattern((OWLObjectIntersectionOf) arg0.getSuperClass()) ) && !(arg0.getSuperClass() instanceof OWLObjectHasValue) && !(arg0.getSuperClass() instanceof OWLDataHasValue) ){ // System.out.println("DEBUG! " + arg0.getSuperClass()); leftstring = leftstring + " is"; } // this catches the simple case where the superclass is only a single existential // ... in this case, the "something that" is skipped. if (arg0.getSuperClass() instanceof OWLObjectSomeValuesFrom){ OWLObjectSomeValuesFrom some = (OWLObjectSomeValuesFrom) arg0.getSuperClass(); OWLClassExpression filler = some.getFiller(); OWLObjectPropertyExpression property = some.getProperty(); List<String> fillerstrs = new ArrayList<String>(); String middle = ""; if (filler instanceof OWLObjectSomeValuesFrom){ OWLObjectSomeValuesFrom some1 = (OWLObjectSomeValuesFrom) filler; OWLClassExpression cl = VerbalisationManager.INSTANCE.getDomain(some1.getProperty().getNamedProperty()); if (cl!=null){ // middle = cl.toString() + " that "; middle = cl.accept(this) + " that "; } else middle = "something that "; } if (arg0.getSuperClass() instanceof OWLObjectIntersectionOf && checkMultipleExistsAndForallPattern((OWLObjectIntersectionOf) arg0.getSuperClass())){ OWLObjectIntersectionOf intsect = (OWLObjectIntersectionOf) arg0.getSuperClass(); OWLClassExpression clexpr = intsect.getOperandsAsList().get(0); List<OWLClassExpression> exprs = collectAndExpressions(clexpr); middle = "something that "; for (OWLClassExpression expr : exprs){ if (expr instanceof OWLObjectSomeValuesFrom){ OWLObjectSomeValuesFrom some1 = (OWLObjectSomeValuesFrom) expr; OWLClassExpression cl = VerbalisationManager.INSTANCE.getDomain(some1.getProperty().getNamedProperty()); if (cl!=null){ middle = cl.toString() + " that "; } } } } fillerstrs.add(filler.accept(this)); return leftstring + " " + VerbalisationManager.verbaliseProperty(property, fillerstrs, middle,obfuscator); } if (arg0.getSuperClass() instanceof OWLObjectIntersectionOf && VerbalisationManager.checkMultipleExistsPattern((OWLObjectIntersectionOf) arg0.getSuperClass())){ return leftstring + " " + VerbalisationManager.pseudoNLStringMultipleExistsPattern((OWLObjectIntersectionOf) arg0.getSuperClass(),obfuscator); } if (arg0.getSuperClass() instanceof OWLObjectIntersectionOf && checkMultipleExistsAndForallPattern((OWLObjectIntersectionOf) arg0.getSuperClass())){ return leftstring + " " + VerbalisationManager.pseudoNLStringMultipleExistsAndForallPattern((OWLObjectIntersectionOf) arg0.getSuperClass()); } return leftstring + middlestring + arg0.getSuperClass().accept(this); } // VERBALIZE OWLCLASS public String visit(OWLClass ce) { // System.out.println("visiting OWL Class " + ce); return VerbalisationManager.INSTANCE.getClassNLString(ce); } public String verbaliseComplexIntersection(OWLObjectIntersectionOf arg0){ // System.out.println("complx int " + arg0); List<OWLClassExpression> operands = arg0.getOperandsAsList(); // distinguish operands by their types and order them appropriately List<OWLClassExpression> classExps = new ArrayList<OWLClassExpression>(); List<OWLObjectSomeValuesFrom> someExps = new ArrayList<OWLObjectSomeValuesFrom>(); List<OWLDataSomeValuesFrom> someDataExps = new ArrayList<OWLDataSomeValuesFrom>(); List<OWLDataHasValue> dataHasValueExps = new ArrayList<OWLDataHasValue>(); List<OWLObjectAllValuesFrom> allExps = new ArrayList<OWLObjectAllValuesFrom>(); List<OWLClassExpression> allelseExps = new ArrayList<OWLClassExpression>(); // loop and sort for (OWLClassExpression exp : operands){ if (exp instanceof OWLClass) classExps.add((OWLClassExpression) exp); else if (exp instanceof OWLObjectSomeValuesFrom) someExps.add((OWLObjectSomeValuesFrom) exp); else if (exp instanceof OWLDataSomeValuesFrom) someDataExps.add((OWLDataSomeValuesFrom) exp); else if (exp instanceof OWLDataHasValue) dataHasValueExps.add((OWLDataHasValue) exp); else if (exp instanceof OWLObjectAllValuesFrom) allExps.add((OWLObjectAllValuesFrom) exp); else allelseExps.add(exp); } // end of sorting loop // verbalise the "head" with the simple aggregator String head = VerbalisationManager.INSTANCE.getSimpleIntersectionNLString(classExps); // is there more? if (someExps.size()>0 || allExps.size()>0 || allelseExps.size()>0 || someDataExps.size()>0 || dataHasValueExps.size()>0){ head += " that "; } if (dataHasValueExps.size()==1){ head += dataHasValueExps.get(0).accept(this); return head; } if (someExps.size()>0 && allExps.size()==0){ head = VerbalisationManager.INSTANCE.verbaliseComplexIntersection(arg0,obfuscator); return head; /* // TODO: this is not aggregating in case there is a multiple exists pattern boolean isFirst = true; for(OWLObjectSomeValuesFrom some : someExps){ if (!isFirst) head += " and "; head += some.accept(this); isFirst = false; } */ } if (someExps.size()>0 && allExps.size()>0) head += " and "; if (allExps.size()>0){ // TODO: this is not aggregating in case there is a multiple exists pattern boolean isFirst = true; for(OWLObjectAllValuesFrom all : allExps){ if (!isFirst) head += " and "; head += all.accept(this); isFirst = false; } } if (allExps.size()>0 && allelseExps.size()>0) head += " and "; if (allelseExps.size()>0){ head += "is "; // TODO: this is not aggregating in case there is a multiple exists pattern boolean isFirst = true; for(OWLClassExpression allelse : allelseExps){ if (!isFirst) head += " and "; head += allelse.accept(this); isFirst = false; } } return head; } public String visit(OWLObjectIntersectionOf arg0) { String resultstring = ""; if (VerbalisationManager.checkMultipleExistsPattern(arg0)){ return " something that " + VerbalisationManager.pseudoNLStringMultipleExistsPattern(arg0,obfuscator); } else{ if (checkMultipleExistsAndForallPattern(arg0)){ return " something that " + VerbalisationManager.pseudoNLStringMultipleExistsAndForallPattern(arg0); } else{ boolean onlysimpleclasses = true; for (OWLClassExpression exp: ((OWLObjectIntersectionOf) arg0).getOperandsAsList()){ if (!(exp instanceof OWLClass)){onlysimpleclasses = false;} } if (onlysimpleclasses){ return VerbalisationManager.INSTANCE.getSimpleIntersectionNLString(arg0);} resultstring = verbaliseComplexIntersection(arg0); } } return resultstring; } public String visit(OWLObjectUnionOf arg0) { String resultstring = ""; if (((OWLObjectUnionOf) arg0).getOperandsAsList().size()==2){ List<OWLClassExpression> exprs = ((OWLObjectUnionOf) arg0).getOperandsAsList(); return VerbalisationManager.aggregateRepeated(exprs.get(0).accept(this), exprs.get(1).accept(this), " or "); } boolean firstp = true; for (OWLClassExpression exp: ((OWLObjectUnionOf) arg0).getOperandsAsList()){ if (!firstp){ resultstring = resultstring + " or ";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring; } public String visit(OWLObjectComplementOf arg0) { String resultstring = "not " + arg0.getOperand().accept(this); return resultstring; } public String visit(OWLObjectAllValuesFrom existsexpr) { OWLObjectPropertyExpression property = existsexpr.getProperty(); OWLClassExpression filler = existsexpr.getFiller(); List<String> fillerstrs = new ArrayList<String>(); String middlestring = "nothing but "; if (filler instanceof OWLObjectSomeValuesFrom) middlestring = middlestring + "something that "; fillerstrs.add(filler.accept(this)); return VerbalisationManager.verbaliseProperty(property, fillerstrs, middlestring,obfuscator); } public String visit(OWLObjectSomeValuesFrom existsexpr) { // System.out.println("DEBUG -- " + existsexpr); OWLObjectPropertyExpression property = existsexpr.getProperty(); // changed! String propfragment = property.getNamedProperty().getIRI().getFragment(); Optional<String> propfragment = property.getNamedProperty().getIRI().getRemainder(); OWLClassExpression filler = existsexpr.getFiller(); List<String> fillerstrs = new ArrayList<String>(); String middle = ""; if(filler instanceof OWLObjectSomeValuesFrom){ OWLObjectSomeValuesFrom some1 = (OWLObjectSomeValuesFrom) filler; OWLClassExpression cl = VerbalisationManager.INSTANCE.getDomain(some1.getProperty().getNamedProperty()); if (cl !=null){ middle = cl.toString(); }else{ middle = "something that "; } } fillerstrs.add(filler.accept(this)); String str = VerbalisationManager.verbaliseProperty(property,fillerstrs,middle,obfuscator); // System.out.println("DEBUG visit " + str); return str; } public String visit(OWLEquivalentClassesAxiom arg0) { String result = ""; OWLClass classexp = null; List<OWLClassExpression> exprs = ((OWLEquivalentClassesAxiom) arg0).getClassExpressionsAsList(); for (OWLClassExpression ex: exprs){ if (ex instanceof OWLClass){ classexp = (OWLClass) ex; break; } } if (classexp!=null){ result += "According to its definition, "; result += classexp.accept(this); result += " is "; boolean firstp = true; for (OWLClassExpression ex:exprs){ if (!ex.equals(classexp)){ if (!firstp){ result += "and "; firstp = false; } if (ex instanceof OWLObjectSomeValuesFrom) result += "something that "; result += ex.accept(this); } } } else{ result += exprs.get(0).accept(this); result += " is the same as "; boolean firstp = true; for (OWLClassExpression ex:exprs){ if (!ex.equals(exprs.get(0))){ if (!firstp){ result += "and "; firstp = false; } if (ex instanceof OWLObjectSomeValuesFrom) result += "something that "; result += ex.accept(this); } } } return result; } public String visit(OWLNegativeObjectPropertyAssertionAxiom arg0) { return "neg obj prop(" + arg0.getProperty().accept(this) + "," + arg0.getObject().accept(this) + "," + arg0.getSubject().accept(this) + ")"; } public String visit(OWLAsymmetricObjectPropertyAxiom arg0) { return "asymmetr" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLReflexiveObjectPropertyAxiom arg0) { return "reflexive" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLDisjointClassesAxiom arg0) { String resultstring = ""; List<OWLClassExpression> exprs = arg0.getClassExpressionsAsList(); // check if there is an atomic class expression in there, and put it in a special variable OWLClass classexp = null; for (OWLClassExpression candidate : exprs){ if (candidate instanceof OWLClass){ classexp= (OWLClass) candidate; break; } } if (classexp!=null){ resultstring += "No "; String str = classexp.accept(this); if (str.indexOf("a ")==0) str = str.substring(2); if (str.indexOf("an ")==0) str = str.substring(3); resultstring += str + " is "; boolean firstp = true; for (OWLClassExpression exp : exprs){ if (!exp.equals(classexp)){ if (!firstp) {resultstring = resultstring + " or ";} firstp = false; resultstring = resultstring + exp.accept(this); } } } else{ resultstring += "Nothing that is "; boolean firstp = true; boolean twop = true; for (OWLClassExpression exp : exprs){ if (!twop) {resultstring = resultstring + " or ";} resultstring = resultstring + exp.accept(this); if (firstp) {firstp = false; resultstring = resultstring + " is "; } if (firstp=false) twop=false; } } return resultstring; } public String visit(OWLDataPropertyDomainAxiom arg0) { // TODO: treat this as syntactic sugar, convert this to the form \exists r. \top \sqsubseteq X and output return "dom(" + arg0.getProperty().accept(this) + "," + arg0.getDomain() + ")"; } public String visit(OWLObjectPropertyDomainAxiom arg0) { // treat this as syntactic sugar, convert this to the form \exists r. \top \sqsubseteq X and output /* OWLObjectPropertyExpression prop = arg0.getProperty(); OWLClassExpression classexp = arg0.getDomain(); OWLDataFactory dataFactory=OWLAPIManagerManager.INSTANCE.getDataFactory(); OWLSubClassOfAxiom subclax = dataFactory.getOWLSubClassOfAxiom( dataFactory.getOWLObjectSomeValuesFrom(prop,dataFactory.getOWLThing()), classexp); return subclax.accept(this); */ OWLObjectPropertyExpression prop = arg0.getProperty(); OWLClassExpression classexp = arg0.getDomain(); return "anything that " + VerbalisationManager.INSTANCE.getPropertyNLString(prop) + " is " + classexp.accept(this); } public String visit(OWLEquivalentObjectPropertiesAxiom arg0) { String resultstring = "equivalent("; Set<OWLObjectPropertyExpression> exprs = arg0.getProperties(); boolean firstp = true; for (OWLPropertyExpression exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLNegativeDataPropertyAssertionAxiom arg0) { return "neg dat prop(" + arg0.getProperty().accept(this) + "," + arg0.getObject().accept(this) + "," + arg0.getSubject().accept(this) + ")"; } public String visit(OWLDifferentIndividualsAxiom arg0) { String resultstring = "different("; List<OWLIndividual> exprs = arg0.getIndividualsAsList(); boolean firstp = true; for (OWLIndividual exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLDisjointDataPropertiesAxiom arg0) { String resultstring = "disjoint("; Set<OWLDataPropertyExpression> exprs = arg0.getProperties(); boolean firstp = true; for (OWLDataPropertyExpression exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLDisjointObjectPropertiesAxiom arg0) { String resultstring = "disjoint("; Set<OWLObjectPropertyExpression> exprs = arg0.getProperties(); boolean firstp = true; for (OWLObjectPropertyExpression exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLObjectPropertyRangeAxiom arg0) { String resultstring = ""; OWLObjectPropertyExpression prop1 = arg0.getProperty(); OWLClassExpression prop2 = arg0.getRange(); // resultstring = resultstring + "range(" + prop1.accept(this) + "," // + prop2.accept(this) + ")"; resultstring = resultstring + "what " + VerbalisationManager.INSTANCE.getPropertyNLString(prop1) + " is " + prop2.accept(this) + ""; return resultstring; } public String visit(OWLObjectPropertyAssertionAxiom arg0) { return "{IMPLEMENT ME obj prop ass : " + arg0.getProperty().accept(this) + arg0.getObject().accept(this) + "}"; } public String visit(OWLFunctionalObjectPropertyAxiom arg0) { return "functional" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLSubObjectPropertyOfAxiom arg0) { return "to " + arg0.getSubProperty().accept(this) + " is a subproperty of " + arg0.getSuperProperty().accept(this); } public String visit(OWLDisjointUnionAxiom arg0) { String resultstring = "disjoint-union("; Set<OWLClassExpression> exprs = arg0.getClassExpressions(); boolean firstp = true; for (OWLClassExpression exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLDeclarationAxiom arg0) { return "{IMPLEMENT ME decl ax : " + arg0.getEntity() + "}"; } public String visit(OWLAnnotationAssertionAxiom arg0) { return "{IMPLEMENT ME annot ax : " + arg0.getSubject().accept(this) + arg0.getProperty().accept(this) + arg0.getValue().accept(this) + "}"; } public String visit(OWLSymmetricObjectPropertyAxiom arg0) { String resultstring = ""; resultstring = resultstring + "symm" + "(" + arg0.getProperty().accept(this); resultstring = resultstring + ")"; return resultstring; } public String visit(OWLDataPropertyRangeAxiom arg0) { return arg0.getProperty().accept(this) + " has range " + arg0.getRange().accept(this); } public String visit(OWLFunctionalDataPropertyAxiom arg0) { return "functional" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLEquivalentDataPropertiesAxiom arg0) { String resultstring = "equivalent-dataprops("; Set<OWLDataPropertyExpression> exprs = arg0.getProperties(); boolean firstp = true; for (OWLDataPropertyExpression exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLClassAssertionAxiom arg0) { return arg0.getIndividual().accept(this) + " is " + arg0.getClassExpression().accept(this); } public String visit(OWLDataPropertyAssertionAxiom arg0) { return arg0.getSubject() + arg0.getProperty().accept(this); } public String visit(OWLTransitiveObjectPropertyAxiom arg0) { return "trans" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLIrreflexiveObjectPropertyAxiom arg0) { return "irrefl" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLSubDataPropertyOfAxiom arg0) { return "to " + arg0.getSubProperty().accept(this) + " is a subproperty of " + arg0.getSuperProperty().accept(this); } public String visit(OWLInverseFunctionalObjectPropertyAxiom arg0) { return "inv-funct" + "(" + arg0.getProperty().accept(this) + ")"; } public String visit(OWLSameIndividualAxiom arg0) { String resultstring = "same-individual("; List<OWLIndividual> exprs = arg0.getIndividualsAsList(); boolean firstp = true; for (OWLIndividual exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLSubPropertyChainOfAxiom arg0) { String resultstring = "property-chain("; List<OWLObjectPropertyExpression> exprs = arg0.getPropertyChain(); boolean firstp = true; for (OWLObjectPropertyExpression exp : exprs){ if (!firstp) {resultstring = resultstring + ",";} firstp = false; resultstring = resultstring + exp.accept(this); } return resultstring + ")"; } public String visit(OWLInverseObjectPropertiesAxiom arg0) { String resultstring = ""; OWLObjectPropertyExpression expr1 = arg0.getFirstProperty(); OWLObjectPropertyExpression expr2 = arg0.getSecondProperty(); resultstring = resultstring + "inverse(" + expr1.accept(this) + ","+ expr2.accept(this) + ")"; return resultstring; } public String visit(OWLHasKeyAxiom arg0) { return null; } public String visit(OWLDatatypeDefinitionAxiom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLRule arg0) { // TODO Auto-generated method stub return null; } public String visit(OWLSubAnnotationPropertyOfAxiom arg0) { return arg0.getSubProperty().accept(this) + " is sub-annotiation property of " + arg0.getSuperProperty().accept(this); } public String visit(OWLAnnotationPropertyDomainAxiom arg0) { return arg0.getProperty().accept(this) + " has domain " + arg0.getDomain().accept(this); } public String visit(OWLAnnotationPropertyRangeAxiom arg0) { String resultstring = ""; OWLAnnotationProperty prop1 = arg0.getProperty(); IRI prop2 = arg0.getRange(); resultstring = resultstring + "range(" + prop1.accept(this) + "," + prop2.accept(this) + ")"; return resultstring; } public String visit(OWLObjectHasValue arg0) { VerbalisationManager.INSTANCE.includesHasValue = true; return "hasValue" + "(" + arg0.getProperty().accept(this) + "," + arg0.getValue() + ")"; } public String visit(OWLObjectMinCardinality arg0) { String resultstring = ""; resultstring = resultstring + ">=" + arg0.getCardinality() + "(" + arg0.getProperty().accept(this) + "." + arg0.getFiller().accept(this); resultstring = resultstring + ")"; return resultstring; } public String visit(OWLObjectExactCardinality arg0) { String resultstring = ""; resultstring = resultstring + "=" + arg0.getCardinality() + "(" + arg0.getProperty().accept(this) + "." + arg0.getFiller().accept(this); resultstring = resultstring + ")"; return resultstring; } public String visit(OWLObjectMaxCardinality arg0) { String resultstring = ""; resultstring = resultstring + "<=" + arg0.getCardinality() + "(" + arg0.getProperty().accept(this) + "." + arg0.getFiller().accept(this); resultstring = resultstring + ")"; return null; } public String visit(OWLObjectHasSelf arg0) { // TODO Auto-generated method stub return null; } public String visit(OWLObjectOneOf arg0) { // TODO Auto-generated method stub return null; } public String visit(OWLDataSomeValuesFrom arg0) { String propertyname = arg0.getProperty().accept(this); OWLDataRange r = arg0.getFiller(); return "something that " + propertyname + " " + r.accept(this); } public String visit(OWLDataAllValuesFrom arg0) { return arg0.getProperty().accept(this) + " " + arg0.getFiller().accept(this); } public String visit(OWLDataHasValue arg0) { VerbalisationManager.INSTANCE.includesHasValue = true; return arg0.getProperty().accept(this) + _space + arg0.getValue().getLiteral(); // return "hasValue" + "(" + arg0.getProperty().accept(this) + "," + arg0.getValue() + ")"; } public String visit(OWLDataMinCardinality arg0) { String resultstring = ""; resultstring = resultstring + "min#" + "(" + arg0.getCardinality() + ","+ arg0.getProperty().accept(this); resultstring = resultstring + "," + arg0.getFiller().accept(this) +")"; return null; } public String visit(OWLDataExactCardinality arg0) { return "=" + arg0.getCardinality() + "(" + arg0.getProperty().accept(this) + "." + arg0.getFiller().accept(this)+ ")"; } public String visit(OWLDataMaxCardinality arg0) { return "<=" + arg0.getCardinality() + "(" + arg0.getProperty().accept(this) + "." + arg0.getFiller().accept(this)+ ")"; } public String visit(OWLDatatype arg0) { // TODO Auto-generated method stub if (arg0.isBoolean()){ if (arg0.toString().equals("xsd:boolean")){ return "as indicated by a boolean"; } } return "as indicated by a " + arg0.toString(); } public String visit(OWLDataComplementOf arg0) { // TODO Auto-generated method stub return "NI-1002"; } public String visit(OWLDataOneOf arg0) { String result = "of one of the following: "; // TODO Auto-generated method stub Set<OWLLiteral> literals = arg0.getValues(); boolean firstP = true; for (OWLLiteral lit: literals){ if (!firstP) result += ", "; result += lit.accept(this); firstP = false; } return result; } public String visit(OWLDataIntersectionOf arg0) { // TODO Auto-generated method stub return "NI-1003"; } public String visit(OWLDataUnionOf arg0) { // TODO Auto-generated method stub return "NI-1004"; } public String visit(OWLDatatypeRestriction arg0) { // TODO Auto-generated method stub return "NI-1005"; } public String visit(OWLLiteral arg0) { return arg0.getLiteral(); } public String visit(OWLFacetRestriction arg0) { // TODO Auto-generated method stub return "NI-1006"; } public String visit(OWLObjectProperty arg0) { return arg0.getNamedProperty().toString(); } public String visit(OWLObjectInverseOf arg0) { return "not " + arg0.getInverse().accept(this); } public String visit(OWLDataProperty arg0) { return VerbalisationManager.treatCamelCaseAndUnderscores(arg0.getIRI().getFragment().toString()); // return arg0.getIRI().getFragment().toString(); } public String visit(OWLNamedIndividual arg0) { return arg0.getIRI().getFragment(); } public String visit(OWLAnnotationProperty arg0) { return arg0.getIRI().getFragment(); } public String visit(OWLAnnotation arg0) { return arg0.getProperty().accept(this) + arg0.getValue().accept(this); } public String visit(IRI arg0) { return arg0.getFragment(); } public String visit(OWLAnonymousIndividual arg0) { // TODO Auto-generated method stub return "NI-1008"; } public String visit(SWRLClassAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLDataRangeAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLObjectPropertyAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLDataPropertyAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLBuiltInAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLVariable arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLIndividualArgument arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLLiteralArgument arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLSameIndividualAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(SWRLDifferentIndividualsAtom arg0) { // TODO Auto-generated method stub return null; } public String visit(OWLOntology arg0) { return "ontology " + arg0.getOntologyID(); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFPortStatsRequestVer15 implements OFPortStatsRequest { private static final Logger logger = LoggerFactory.getLogger(OFPortStatsRequestVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int LENGTH = 24; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsRequestFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsRequestFlags>of(); private final static OFPort DEFAULT_PORT_NO = OFPort.ANY; // OF message fields private final long xid; private final Set<OFStatsRequestFlags> flags; private final OFPort portNo; // // Immutable default instance final static OFPortStatsRequestVer15 DEFAULT = new OFPortStatsRequestVer15( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_PORT_NO ); // package private constructor - used by readers, builders, and factory OFPortStatsRequestVer15(long xid, Set<OFStatsRequestFlags> flags, OFPort portNo) { if(flags == null) { throw new NullPointerException("OFPortStatsRequestVer15: property flags cannot be null"); } if(portNo == null) { throw new NullPointerException("OFPortStatsRequestVer15: property portNo cannot be null"); } this.xid = U32.normalize(xid); this.flags = flags; this.portNo = portNo; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.PORT; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFPort getPortNo() { return portNo; } public OFPortStatsRequest.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFPortStatsRequest.Builder { final OFPortStatsRequestVer15 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsRequestFlags> flags; private boolean portNoSet; private OFPort portNo; BuilderWithParent(OFPortStatsRequestVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFPortStatsRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.PORT; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFPortStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public OFPort getPortNo() { return portNo; } @Override public OFPortStatsRequest.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } @Override public OFPortStatsRequest build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); OFPort portNo = this.portNoSet ? this.portNo : parentMessage.portNo; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); // return new OFPortStatsRequestVer15( xid, flags, portNo ); } } static class Builder implements OFPortStatsRequest.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsRequestFlags> flags; private boolean portNoSet; private OFPort portNo; @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFPortStatsRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.PORT; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFPortStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public OFPort getPortNo() { return portNo; } @Override public OFPortStatsRequest.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } // @Override public OFPortStatsRequest build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); OFPort portNo = this.portNoSet ? this.portNo : DEFAULT_PORT_NO; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); return new OFPortStatsRequestVer15( xid, flags, portNo ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFPortStatsRequest> { @Override public OFPortStatsRequest readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 6 byte version = bb.readByte(); if(version != (byte) 0x6) throw new OFParseError("Wrong version: Expected=OFVersion.OF_15(6), got="+version); // fixed value property type == 18 byte type = bb.readByte(); if(type != (byte) 0x12) throw new OFParseError("Wrong type: Expected=OFType.STATS_REQUEST(18), got="+type); int length = U16.f(bb.readShort()); if(length != 24) throw new OFParseError("Wrong length: Expected=24(24), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 4 short statsType = bb.readShort(); if(statsType != (short) 0x4) throw new OFParseError("Wrong statsType: Expected=OFStatsType.PORT(4), got="+statsType); Set<OFStatsRequestFlags> flags = OFStatsRequestFlagsSerializerVer15.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); OFPort portNo = OFPort.read4Bytes(bb); // pad: 4 bytes bb.skipBytes(4); OFPortStatsRequestVer15 portStatsRequestVer15 = new OFPortStatsRequestVer15( xid, flags, portNo ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", portStatsRequestVer15); return portStatsRequestVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFPortStatsRequestVer15Funnel FUNNEL = new OFPortStatsRequestVer15Funnel(); static class OFPortStatsRequestVer15Funnel implements Funnel<OFPortStatsRequestVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFPortStatsRequestVer15 message, PrimitiveSink sink) { // fixed value property version = 6 sink.putByte((byte) 0x6); // fixed value property type = 18 sink.putByte((byte) 0x12); // fixed value property length = 24 sink.putShort((short) 0x18); sink.putLong(message.xid); // fixed value property statsType = 4 sink.putShort((short) 0x4); OFStatsRequestFlagsSerializerVer15.putTo(message.flags, sink); // skip pad (4 bytes) message.portNo.putTo(sink); // skip pad (4 bytes) } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFPortStatsRequestVer15> { @Override public void write(ByteBuf bb, OFPortStatsRequestVer15 message) { // fixed value property version = 6 bb.writeByte((byte) 0x6); // fixed value property type = 18 bb.writeByte((byte) 0x12); // fixed value property length = 24 bb.writeShort((short) 0x18); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 4 bb.writeShort((short) 0x4); OFStatsRequestFlagsSerializerVer15.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); message.portNo.write4Bytes(bb); // pad: 4 bytes bb.writeZero(4); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFPortStatsRequestVer15("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("portNo=").append(portNo); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFPortStatsRequestVer15 other = (OFPortStatsRequestVer15) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (portNo == null) { if (other.portNo != null) return false; } else if (!portNo.equals(other.portNo)) return false; return true; } @Override public boolean equalsIgnoreXid(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFPortStatsRequestVer15 other = (OFPortStatsRequestVer15) obj; // ignore XID if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (portNo == null) { if (other.portNo != null) return false; } else if (!portNo.equals(other.portNo)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((portNo == null) ? 0 : portNo.hashCode()); return result; } @Override public int hashCodeIgnoreXid() { final int prime = 31; int result = 1; // ignore XID result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((portNo == null) ? 0 : portNo.hashCode()); return result; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.packages.RuleErrorConsumer; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.CcToolchainVariables.StringSequenceBuilder; import com.google.devtools.build.lib.rules.cpp.CcToolchainVariables.VariablesExtension; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.List; import java.util.Map; /** Enum covering all build variables we create for all various {@link CppCompileAction}. */ public enum CompileBuildVariables { /** Variable for the path to the source file being compiled. */ SOURCE_FILE("source_file"), /** * Variable for all flags coming from copt rule attribute, and from --copt, --cxxopt, or * --conlyopt options. */ USER_COMPILE_FLAGS("user_compile_flags"), /** Variable for flags coming from unfiltered_cxx_flag CROSSTOOL fields. */ UNFILTERED_COMPILE_FLAGS("unfiltered_compile_flags"), /** Variable for the path to the compilation output file. */ OUTPUT_FILE("output_file"), /** Variable for the dependency file path */ DEPENDENCY_FILE("dependency_file"), /** Variable for the module file name. */ MODULE_NAME("module_name"), /** * Variable for the collection of include paths. * * @see CcCompilationContext#getIncludeDirs(). */ INCLUDE_PATHS("include_paths"), /** * Variable for the collection of quote include paths. * * @see CcCompilationContext#getQuoteIncludeDirs(). */ QUOTE_INCLUDE_PATHS("quote_include_paths"), /** * Variable for the collection of system include paths. * * @see CcCompilationContext#getSystemIncludeDirs(). */ SYSTEM_INCLUDE_PATHS("system_include_paths"), /** * Variable for the collection of framework include paths. * * @see CcCompilationContext#getFrameworkIncludeDirs(). */ FRAMEWORK_PATHS("framework_include_paths"), /** Variable for the module map file name. */ MODULE_MAP_FILE("module_map_file"), /** Variable for the dependent module map file name. */ DEPENDENT_MODULE_MAP_FILES("dependent_module_map_files"), /** Variable for the collection of module files. */ MODULE_FILES("module_files"), /** Variable for the collection of macros defined for preprocessor. */ PREPROCESSOR_DEFINES("preprocessor_defines"), /** Variable for the gcov coverage file path. */ GCOV_GCNO_FILE("gcov_gcno_file"), /** Variable for the LTO indexing bitcode file. */ LTO_INDEXING_BITCODE_FILE("lto_indexing_bitcode_file"), /** Variable marking fission is used. */ IS_USING_FISSION("is_using_fission"), /** Variable for the per object debug info file. */ PER_OBJECT_DEBUG_INFO_FILE("per_object_debug_info_file"), /** Variable present when the output is compiled as position independent. */ PIC("pic"), /** Variable marking that we are generating preprocessed sources (from --save_temps). */ OUTPUT_PREPROCESS_FILE("output_preprocess_file"), /** Variable marking that we are generating assembly source (from --save_temps). */ OUTPUT_ASSEMBLY_FILE("output_assembly_file"), /** Path to the fdo instrument artifact */ FDO_INSTRUMENT_PATH("fdo_instrument_path"), /** Path to the fdo profile artifact */ FDO_PROFILE_PATH("fdo_profile_path"), /** Path to the context sensitive fdo instrument artifact */ CS_FDO_INSTRUMENT_PATH("cs_fdo_instrument_path"), /** Path to the cache prefetch profile artifact */ FDO_PREFETCH_HINTS_PATH("fdo_prefetch_hints_path"), /** Variable for includes that compiler needs to include into sources. */ INCLUDES("includes"); private final String variableName; CompileBuildVariables(String variableName) { this.variableName = variableName; } public static CcToolchainVariables setupVariablesOrReportRuleError( RuleErrorConsumer ruleErrorConsumer, FeatureConfiguration featureConfiguration, CcToolchainProvider ccToolchainProvider, BuildOptions buildOptions, CppConfiguration cppConfiguration, String sourceFile, String outputFile, String gcnoFile, boolean isUsingFission, String dwoFile, String ltoIndexingFile, ImmutableList<String> includes, Iterable<String> userCompileFlags, CppModuleMap cppModuleMap, boolean usePic, PathFragment fakeOutputFile, String fdoStamp, String dotdFileExecPath, ImmutableList<VariablesExtension> variablesExtensions, ImmutableMap<String, String> additionalBuildVariables, Iterable<Artifact> directModuleMaps, Iterable<PathFragment> includeDirs, Iterable<PathFragment> quoteIncludeDirs, Iterable<PathFragment> systemIncludeDirs, Iterable<PathFragment> frameworkIncludeDirs, Iterable<String> defines, Iterable<String> localDefines) { try { if (usePic && !featureConfiguration.isEnabled(CppRuleClasses.PIC) && !featureConfiguration.isEnabled(CppRuleClasses.SUPPORTS_PIC)) { throw new EvalException(Location.BUILTIN, CcCommon.PIC_CONFIGURATION_ERROR); } return setupVariables( featureConfiguration, ccToolchainProvider.getBuildVariables(buildOptions, cppConfiguration), sourceFile, outputFile, gcnoFile, isUsingFission, dwoFile, ltoIndexingFile, includes, userCompileFlags, cppModuleMap, usePic, toPathString(fakeOutputFile), fdoStamp, dotdFileExecPath, variablesExtensions, additionalBuildVariables, directModuleMaps, getSafePathStrings(includeDirs), getSafePathStrings(quoteIncludeDirs), getSafePathStrings(systemIncludeDirs), getSafePathStrings(frameworkIncludeDirs), defines, localDefines); } catch (EvalException e) { ruleErrorConsumer.ruleError(e.getMessage()); return CcToolchainVariables.EMPTY; } } public static CcToolchainVariables setupVariablesOrThrowEvalException( FeatureConfiguration featureConfiguration, CcToolchainProvider ccToolchainProvider, BuildOptions buildOptions, CppConfiguration cppConfiguration, String sourceFile, String outputFile, String gcnoFile, boolean isUsingFission, String dwoFile, String ltoIndexingFile, ImmutableList<String> includes, Iterable<String> userCompileFlags, CppModuleMap cppModuleMap, boolean usePic, String fakeOutputFile, String fdoStamp, String dotdFileExecPath, ImmutableList<VariablesExtension> variablesExtensions, ImmutableMap<String, String> additionalBuildVariables, Iterable<Artifact> directModuleMaps, Iterable<String> includeDirs, Iterable<String> quoteIncludeDirs, Iterable<String> systemIncludeDirs, Iterable<String> frameworkIncludeDirs, Iterable<String> defines, Iterable<String> localDefines) throws EvalException { if (usePic && !featureConfiguration.isEnabled(CppRuleClasses.PIC) && !featureConfiguration.isEnabled(CppRuleClasses.SUPPORTS_PIC)) { throw new EvalException(Location.BUILTIN, CcCommon.PIC_CONFIGURATION_ERROR); } return setupVariables( featureConfiguration, ccToolchainProvider.getBuildVariables(buildOptions, cppConfiguration), sourceFile, outputFile, gcnoFile, isUsingFission, dwoFile, ltoIndexingFile, includes, userCompileFlags, cppModuleMap, usePic, fakeOutputFile, fdoStamp, dotdFileExecPath, variablesExtensions, additionalBuildVariables, directModuleMaps, includeDirs, quoteIncludeDirs, systemIncludeDirs, frameworkIncludeDirs, defines, localDefines); } private static CcToolchainVariables setupVariables( FeatureConfiguration featureConfiguration, CcToolchainVariables parent, String sourceFile, String outputFile, String gcnoFile, boolean isUsingFission, String dwoFile, String ltoIndexingFile, ImmutableList<String> includes, Iterable<String> userCompileFlags, CppModuleMap cppModuleMap, boolean usePic, String fakeOutputFile, String fdoStamp, String dotdFileExecPath, ImmutableList<VariablesExtension> variablesExtensions, ImmutableMap<String, String> additionalBuildVariables, Iterable<Artifact> directModuleMaps, Iterable<String> includeDirs, Iterable<String> quoteIncludeDirs, Iterable<String> systemIncludeDirs, Iterable<String> frameworkIncludeDirs, Iterable<String> defines, Iterable<String> localDefines) { CcToolchainVariables.Builder buildVariables = CcToolchainVariables.builder(parent); setupCommonVariablesInternal( buildVariables, featureConfiguration, includes, cppModuleMap, fdoStamp, variablesExtensions, additionalBuildVariables, directModuleMaps, includeDirs, quoteIncludeDirs, systemIncludeDirs, frameworkIncludeDirs, defines, localDefines); setupSpecificVariables( buildVariables, sourceFile, outputFile, gcnoFile, dwoFile, isUsingFission, ltoIndexingFile, userCompileFlags, fakeOutputFile, dotdFileExecPath, usePic, ImmutableMap.of()); return buildVariables.build(); } public static void setupSpecificVariables( CcToolchainVariables.Builder buildVariables, String sourceFile, String outputFile, String gcnoFile, String dwoFile, boolean isUsingFission, String ltoIndexingFile, Iterable<String> userCompileFlags, String fakeOutputFile, String dotdFileExecPath, boolean usePic, Map<String, String> additionalBuildVariables) { buildVariables.addStringSequenceVariable( USER_COMPILE_FLAGS.getVariableName(), userCompileFlags); if (sourceFile != null) { buildVariables.addStringVariable(SOURCE_FILE.getVariableName(), sourceFile); } String fakeOutputFileOrRealOutputFile = fakeOutputFile != null ? fakeOutputFile : outputFile; if (outputFile != null) { buildVariables.addStringVariable( OUTPUT_FILE.getVariableName(), fakeOutputFileOrRealOutputFile); } // Set dependency_file to enable <object>.d file generation. if (dotdFileExecPath != null) { buildVariables.addStringVariable(DEPENDENCY_FILE.getVariableName(), dotdFileExecPath); } if (gcnoFile != null) { buildVariables.addStringVariable(GCOV_GCNO_FILE.getVariableName(), gcnoFile); } if (dwoFile != null) { buildVariables.addStringVariable(PER_OBJECT_DEBUG_INFO_FILE.getVariableName(), dwoFile); } if (isUsingFission) { buildVariables.addStringVariable(IS_USING_FISSION.getVariableName(), ""); } if (ltoIndexingFile != null) { buildVariables.addStringVariable( LTO_INDEXING_BITCODE_FILE.getVariableName(), ltoIndexingFile); } if (usePic) { buildVariables.addStringVariable(PIC.getVariableName(), ""); } buildVariables.addAllStringVariables(additionalBuildVariables); } public static void setupCommonVariables( CcToolchainVariables.Builder buildVariables, FeatureConfiguration featureConfiguration, List<String> includes, CppModuleMap cppModuleMap, String fdoStamp, List<VariablesExtension> variablesExtensions, Map<String, String> additionalBuildVariables, Iterable<Artifact> directModuleMaps, Iterable<PathFragment> includeDirs, Iterable<PathFragment> quoteIncludeDirs, Iterable<PathFragment> systemIncludeDirs, Iterable<PathFragment> frameworkIncludeDirs, Iterable<String> defines, Iterable<String> localDefines) { setupCommonVariablesInternal( buildVariables, featureConfiguration, includes, cppModuleMap, fdoStamp, variablesExtensions, additionalBuildVariables, directModuleMaps, getSafePathStrings(includeDirs), getSafePathStrings(quoteIncludeDirs), getSafePathStrings(systemIncludeDirs), getSafePathStrings(frameworkIncludeDirs), defines, localDefines); } private static void setupCommonVariablesInternal( CcToolchainVariables.Builder buildVariables, FeatureConfiguration featureConfiguration, List<String> includes, CppModuleMap cppModuleMap, String fdoStamp, List<VariablesExtension> variablesExtensions, Map<String, String> additionalBuildVariables, Iterable<Artifact> directModuleMaps, Iterable<String> includeDirs, Iterable<String> quoteIncludeDirs, Iterable<String> systemIncludeDirs, Iterable<String> frameworkIncludeDirs, Iterable<String> defines, Iterable<String> localDefines) { Preconditions.checkNotNull(directModuleMaps); Preconditions.checkNotNull(includeDirs); Preconditions.checkNotNull(quoteIncludeDirs); Preconditions.checkNotNull(systemIncludeDirs); Preconditions.checkNotNull(frameworkIncludeDirs); Preconditions.checkNotNull(defines); Preconditions.checkNotNull(localDefines); if (featureConfiguration.isEnabled(CppRuleClasses.MODULE_MAPS) && cppModuleMap != null) { // If the feature is enabled and cppModuleMap is null, we are about to fail during analysis // in any case, but don't crash. buildVariables.addStringVariable(MODULE_NAME.getVariableName(), cppModuleMap.getName()); buildVariables.addStringVariable( MODULE_MAP_FILE.getVariableName(), cppModuleMap.getArtifact().getExecPathString()); StringSequenceBuilder sequence = new StringSequenceBuilder(); for (Artifact artifact : directModuleMaps) { sequence.addValue(artifact.getExecPathString()); } buildVariables.addCustomBuiltVariable(DEPENDENT_MODULE_MAP_FILES.getVariableName(), sequence); } if (featureConfiguration.isEnabled(CppRuleClasses.USE_HEADER_MODULES)) { // Module inputs will be set later when the action is executed. buildVariables.addStringSequenceVariable(MODULE_FILES.getVariableName(), ImmutableSet.of()); } buildVariables.addStringSequenceVariable(INCLUDE_PATHS.getVariableName(), includeDirs); buildVariables.addStringSequenceVariable( QUOTE_INCLUDE_PATHS.getVariableName(), quoteIncludeDirs); buildVariables.addStringSequenceVariable( SYSTEM_INCLUDE_PATHS.getVariableName(), systemIncludeDirs); if (!includes.isEmpty()) { buildVariables.addStringSequenceVariable(INCLUDES.getVariableName(), includes); } buildVariables.addStringSequenceVariable( FRAMEWORK_PATHS.getVariableName(), frameworkIncludeDirs); Iterable<String> allDefines; if (fdoStamp != null) { // Stamp FDO builds with FDO subtype string allDefines = ImmutableList.<String>builder() .addAll(defines) .addAll(localDefines) .add(CppConfiguration.FDO_STAMP_MACRO + "=\"" + fdoStamp + "\"") .build(); } else { allDefines = Iterables.concat(defines, localDefines); } buildVariables.addStringSequenceVariable(PREPROCESSOR_DEFINES.getVariableName(), allDefines); buildVariables.addAllStringVariables(additionalBuildVariables); for (VariablesExtension extension : variablesExtensions) { extension.addVariables(buildVariables); } } /** Get the safe path strings for a list of paths to use in the build variables. */ private static ImmutableList<String> getSafePathStrings(Iterable<PathFragment> paths) { // Using ImmutableSet first to remove duplicates, then ImmutableList for smaller memory // footprint. return ImmutableSet.copyOf(paths) .stream() .map(PathFragment::getSafePathString) .collect(ImmutableList.toImmutableList()); } private static String toPathString(PathFragment a) { return a == null ? null : a.getSafePathString(); } public String getVariableName() { return variableName; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.util; import org.apache.calcite.util.mapping.IntPair; import org.apache.calcite.util.mapping.Mapping; import org.apache.calcite.util.mapping.MappingType; import org.apache.calcite.util.mapping.Mappings; import java.util.Arrays; import java.util.Iterator; /** * Represents a mapping which reorders elements in an array. */ public class Permutation implements Mapping, Mappings.TargetMapping { //~ Instance fields -------------------------------------------------------- private int[] targets; private int[] sources; //~ Constructors ----------------------------------------------------------- /** * Creates a permutation of a given size. * * <p>It is initialized to the identity permutation, such as "[0, 1, 2, 3]". * * @param size Number of elements in the permutation */ public Permutation(int size) { targets = new int[size]; sources = new int[size]; // Initialize to identity. identity(); } /** * Creates a permutation from an array. * * @param targets Array of targets * @throws IllegalArgumentException if elements of array are not unique * @throws ArrayIndexOutOfBoundsException if elements of array are not * between 0 through targets.length - 1 * inclusive */ public Permutation(int[] targets) { this.targets = targets.clone(); this.sources = new int[targets.length]; Arrays.fill(sources, -1); for (int i = 0; i < targets.length; i++) { int target = targets[i]; if (target < 0 || target >= sources.length) { throw new IllegalArgumentException("target out of range"); } if (sources[target] != -1) { throw new IllegalArgumentException( "more than one permutation element maps to position " + target); } sources[target] = i; } assert isValid(true); } /** * Creates a permutation. Arrays are not copied, and are assumed to be valid * permutations. */ private Permutation(int[] targets, int[] sources) { this.targets = targets; this.sources = sources; assert isValid(true); } //~ Methods ---------------------------------------------------------------- public Object clone() { return new Permutation( targets.clone(), sources.clone()); } /** * Initializes this permutation to the identity permutation. */ public void identity() { for (int i = 0; i < targets.length; i++) { targets[i] = sources[i] = i; } } /** * Returns the number of elements in this permutation. */ public final int size() { return targets.length; } public void clear() { throw new UnsupportedOperationException( "Cannot clear: permutation must always contain one mapping per element"); } /** * Returns a string representation of this permutation. * * <p>For example, the mapping * * <table> * <caption>Example mapping</caption> * <tr> * <th>source</th> * <th>target</th> * </tr> * <tr> * <td>0</td> * <td>2</td> * </tr> * <tr> * <td>1</td> * <td>0</td> * </tr> * <tr> * <td>2</td> * <td>1</td> * </tr> * <tr> * <td>3</td> * <td>3</td> * </tr> * </table> * * <p>is represented by the string "[2, 0, 1, 3]". */ public String toString() { StringBuilder buf = new StringBuilder(); buf.append("["); for (int i = 0; i < targets.length; i++) { if (i > 0) { buf.append(", "); } buf.append(targets[i]); } buf.append("]"); return buf.toString(); } /** * Maps source position to target position. * * <p>To preserve the 1:1 nature of the permutation, the previous target of * source becomes the new target of the previous source. * * <p>For example, given the permutation * * <blockquote><pre>[3, 2, 0, 1]</pre></blockquote> * * <p>suppose we map position 2 to target 1. Position 2 currently has target * 0, and the source of position 1 is position 3. We preserve the permutation * property by mapping the previous source 3 to the previous target 0. The new * permutation is * * <blockquote><pre>[3, 2, 1, 0].</pre></blockquote> * * <p>Another example. Again starting from * * <blockquote><pre>[3, 2, 0, 1]</pre></blockquote> * * <p>suppose we map position 2 to target 3. We map the previous source 0 to * the previous target 0, which gives * * <blockquote><pre>[0, 2, 3, 1].</pre></blockquote> * * @param source Source position * @param target Target position * @throws ArrayIndexOutOfBoundsException if source or target is negative or * greater than or equal to the size of * the permuation */ public void set(int source, int target) { set(source, target, false); } /** * Maps source position to target position, automatically resizing if source * or target is out of bounds. * * <p>To preserve the 1:1 nature of the permutation, the previous target of * source becomes the new target of the previous source. * * <p>For example, given the permutation * * <blockquote><pre>[3, 2, 0, 1]</pre></blockquote> * * <p>suppose we map position 2 to target 1. Position 2 currently has target * 0, and the source of position 1 is position 3. We preserve the permutation * property by mapping the previous source 3 to the previous target 0. The new * permutation is * * <blockquote><pre>[3, 2, 1, 0].</pre></blockquote> * * <p>Another example. Again starting from * * <blockquote><pre>[3, 2, 0, 1]</pre></blockquote> * * <p>suppose we map position 2 to target 3. We map the previous source 0 to * the previous target 0, which gives * * <blockquote><pre>[0, 2, 3, 1].</pre></blockquote> * * @param source Source position * @param target Target position * @param allowResize Whether to resize the permutation if the source or * target is greater than the current capacity * @throws ArrayIndexOutOfBoundsException if source or target is negative, * or greater than or equal to the size * of the permutation, and * <code>allowResize</code> is false */ public void set(int source, int target, boolean allowResize) { final int maxSourceTarget = Math.max(source, target); if (maxSourceTarget >= sources.length) { if (allowResize) { resize(maxSourceTarget + 1); } else { throw new ArrayIndexOutOfBoundsException(maxSourceTarget); } } int prevTarget = targets[source]; assert sources[prevTarget] == source; int prevSource = sources[target]; assert targets[prevSource] == target; setInternal(source, target); // To balance things up, make the previous source reference the // previous target. This ensures that each ordinal occurs precisely // once in the sources array and the targets array. setInternal(prevSource, prevTarget); // For example: // Before: [2, 1, 0, 3] // Now we set(source=1, target=0) // previous target of source (1) was 1, is now 0 // previous source of target (0) was 2, is now 1 // something now has to have target 1 -- use previous source // After: [2, 0, 1, 3] } /** * Inserts into the targets. * * <p>For example, consider the permutation</p> * * <table border="1"> * <caption>Example permutation</caption> * <tr> * <td>source</td> * <td>0</td> * <td>1</td> * <td>2</td> * <td>3</td> * <td>4</td> * </tr> * <tr> * <td>target</td> * <td>3</td> * <td>0</td> * <td>4</td> * <td>2</td> * <td>1</td> * </tr> * </table> * * <p>After applying <code>insertTarget(2)</code> every target 2 or higher is * shifted up one.</p> * * <table border="1"> * <caption>Mapping after applying insertTarget(2)</caption> * <tr> * <td>source</td> * <td>0</td> * <td>1</td> * <td>2</td> * <td>3</td> * <td>4</td> * <td>5</td> * </tr> * <tr> * <td>target</td> * <td>4</td> * <td>0</td> * <td>5</td> * <td>3</td> * <td>1</td> * <td>2</td> * </tr> * </table> * * <p>Note that the array has been extended to accommodate the new target, and * the previously unmapped source 5 is mapped to the unused target slot 2.</p> * * @param x Ordinal of position to add to target */ public void insertTarget(int x) { assert isValid(true); resize(sources.length + 1); // Shuffle sources up. shuffleUp(sources, x); // Shuffle targets. increment(x, targets); assert isValid(true); } /** * Inserts into the sources. * * <p>Behavior is analogous to {@link #insertTarget(int)}.</p> * * @param x Ordinal of position to add to source */ public void insertSource(int x) { assert isValid(true); resize(targets.length + 1); // Shuffle targets up. shuffleUp(targets, x); // Increment sources. increment(x, sources); assert isValid(true); } private void increment(int x, int[] zzz) { final int size = zzz.length; for (int i = 0; i < size; i++) { if (targets[i] == (size - 1)) { targets[i] = x; } else if (targets[i] >= x) { ++targets[i]; } } } private void shuffleUp(final int[] zz, int x) { final int size = zz.length; int t = zz[size - 1]; System.arraycopy(zz, x, zz, x + 1, size - 1 - x); zz[x] = t; } private void resize(int newSize) { assert isValid(true); final int size = targets.length; int[] newTargets = new int[newSize]; System.arraycopy(targets, 0, newTargets, 0, size); int[] newSources = new int[newSize]; System.arraycopy(sources, 0, newSources, 0, size); // Initialize the new elements to the identity mapping. for (int i = size; i < newSize; i++) { newSources[i] = i; newTargets[i] = i; } targets = newTargets; sources = newSources; assert isValid(true); } private void setInternal(int source, int target) { targets[source] = target; sources[target] = source; } /** * Returns the inverse permutation. */ public Permutation inverse() { return new Permutation( sources.clone(), targets.clone()); } /** * Returns whether this is the identity permutation. */ public boolean isIdentity() { for (int i = 0; i < targets.length; i++) { if (targets[i] != i) { return false; } } return true; } /** * Returns the position that <code>source</code> is mapped to. */ public int getTarget(int source) { try { return targets[source]; } catch (ArrayIndexOutOfBoundsException e) { throw new Mappings.NoElementException("invalid source " + source); } } /** * Returns the position which maps to <code>target</code>. */ public int getSource(int target) { try { return sources[target]; } catch (ArrayIndexOutOfBoundsException e) { throw new Mappings.NoElementException("invalid target " + target); } } /** * Checks whether this permutation is valid. * * * * @param fail Whether to assert if invalid * @return Whether valid */ private boolean isValid(boolean fail) { final int size = targets.length; if (sources.length != size) { assert !fail : "different lengths"; return false; } // Every element in sources has corresponding element in targets. int[] occurCount = new int[size]; for (int i = 0; i < size; i++) { int target = targets[i]; if (sources[target] != i) { assert !fail : "source[" + target + "] = " + sources[target] + ", should be " + i; return false; } int source = sources[i]; if (targets[source] != i) { assert !fail : "target[" + source + "] = " + targets[source] + ", should be " + i; return false; } // Every member should occur once. if (occurCount[target] != 0) { assert !fail : "target " + target + " occurs more than once"; return false; } occurCount[target]++; } return true; } public int hashCode() { // not very efficient return toString().hashCode(); } public boolean equals(Object obj) { // not very efficient return (obj instanceof Permutation) && toString().equals(obj.toString()); } // implement Mapping public Iterator<IntPair> iterator() { return new Iterator<IntPair>() { private int i = 0; public boolean hasNext() { return i < targets.length; } public IntPair next() { final IntPair pair = new IntPair(i, targets[i]); ++i; return pair; } public void remove() { throw new UnsupportedOperationException(); } }; } public int getSourceCount() { return targets.length; } public int getTargetCount() { return targets.length; } public MappingType getMappingType() { return MappingType.BIJECTION; } public int getTargetOpt(int source) { return getTarget(source); } public int getSourceOpt(int target) { return getSource(target); } public void setAll(Mapping mapping) { for (IntPair pair : mapping) { set(pair.source, pair.target); } } /** * Returns the product of this Permutation with a given Permutation. Does * not modify this Permutation or <code>permutation</code>. * * <p>For example, perm.product(perm.inverse()) yields the identity. */ public Permutation product(Permutation permutation) { Permutation product = new Permutation(sources.length); for (int i = 0; i < targets.length; ++i) { product.set(i, permutation.getTarget(targets[i])); } return product; } } // End Permutation.java
/** * Copyright 2011 multibit.org * * Licensed under the MIT license (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/mit-license.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.multibit.network; import com.google.bitcoin.core.*; import com.google.bitcoin.core.Wallet.SendRequest; import com.google.bitcoin.crypto.KeyCrypterException; import com.google.bitcoin.discovery.DnsDiscovery; import com.google.bitcoin.discovery.IrcDiscovery; import com.google.bitcoin.store.BlockStore; import com.google.bitcoin.store.BlockStoreException; import com.google.bitcoin.store.SPVBlockStore; import org.bitcoinj.wallet.Protos.Wallet.EncryptionType; import org.multibit.ApplicationDataDirectoryLocator; import org.multibit.MultiBit; import org.multibit.controller.Controller; import org.multibit.controller.bitcoin.BitcoinController; import org.multibit.file.BackupManager; import org.multibit.file.FileHandlerException; import org.multibit.file.WalletSaveException; import org.multibit.message.Message; import org.multibit.message.MessageManager; import org.multibit.model.bitcoin.BitcoinModel; import org.multibit.model.bitcoin.WalletData; import org.multibit.model.bitcoin.WalletInfoData; import org.multibit.model.core.StatusEnum; import org.multibit.store.MultiBitWalletVersion; import org.multibit.store.WalletVersionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spongycastle.crypto.params.KeyParameter; import javax.swing.*; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.math.BigInteger; import java.net.InetAddress; import java.net.UnknownHostException; import java.security.SecureRandom; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; /** * <p> * MultiBitService encapsulates the interaction with the bitcoin netork * including: o Peers o Block chain download o sending / receiving bitcoins * * The testnet can be slow or flaky as it's a shared resource. You can use the * <a href="http://sourceforge * .net/projects/bitcoin/files/Bitcoin/testnet-in-a-box/">testnet in a box</a> * to do everything purely locally. * </p> */ public class MultiBitService { private static final String TESTNET3_GENESIS_HASH = "000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943"; private static final Logger log = LoggerFactory.getLogger(MultiBitService.class); public static final int MAXIMUM_EXPECTED_LENGTH_OF_ALTERNATE_CHAIN = 6; public static final String MULTIBIT_PREFIX = "multibit"; public static final String TESTNET_PREFIX = "testnet"; public static final String TESTNET3_PREFIX = "testnet3"; public static final String SEPARATOR = "-"; public static final String BLOCKCHAIN_SUFFIX = ".blockchain"; public static final String SPV_BLOCKCHAIN_SUFFIX = ".spvchain"; public static final String CHECKPOINTS_SUFFIX = ".checkpoints"; public static final String WALLET_SUFFIX = ".wallet"; public static final String IRC_CHANNEL_TEST = "#bitcoinTEST"; public static final String IRC_CHANNEL_TESTNET3 = "#bitcoinTEST3"; static boolean restartListenerHasBeenAddedToPeerGroup = false; public Logger logger = LoggerFactory.getLogger(MultiBitService.class.getName()); private MultiBitPeerGroup peerGroup; private String blockchainFilename; private MultiBitBlockChain blockChain; private BlockStore blockStore; private final Controller controller; private final BitcoinController bitcoinController; private final NetworkParameters networkParameters; private SecureRandom secureRandom = new SecureRandom(); private MultiBitCheckpointManager checkpointManager; private String checkpointsFilename; public static Date genesisBlockCreationDate; static { try { java.text.SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); java.util.Calendar cal = Calendar.getInstance(new SimpleTimeZone(0, "GMT")); format.setCalendar(cal); genesisBlockCreationDate = format.parse("2009-01-03 18:15:05"); } catch (ParseException e) { // Will never happen. e.printStackTrace(); } } /** * * @param bitcoinController BitcoinController */ public MultiBitService(BitcoinController bitcoinController) { this.bitcoinController = bitcoinController; this.controller = this.bitcoinController; if (controller == null) { throw new IllegalStateException("controller cannot be null"); } if (controller.getModel() == null) { throw new IllegalStateException("controller.getModel() cannot be null"); } if (controller.getApplicationDataDirectoryLocator() == null) { throw new IllegalStateException("controller.getApplicationDataDirectoryLocator() cannot be null"); } if (this.bitcoinController.getFileHandler() == null) { throw new IllegalStateException("controller.getFileHandler() cannot be null"); } networkParameters = this.bitcoinController.getModel().getNetworkParameters(); log.debug("Network parameters = " + networkParameters); try { // Load or create the blockStore.. log.debug("Loading/ creating blockstore ..."); blockStore = createBlockStore(null, false); log.debug("Blockstore is '" + blockStore + "'"); log.debug("Creating blockchain ..."); blockChain = new MultiBitBlockChain(networkParameters, blockStore); log.debug("Created blockchain '" + blockChain + "' with height " + blockChain.getBestChainHeight()); log.debug("Creating peergroup ..."); createNewPeerGroup(); log.debug("Created peergroup '" + peerGroup + "'"); log.debug("Starting peergroup ..."); peerGroup.start(); log.debug("Started peergroup."); } catch (BlockStoreException e) { handleError(e); } catch (FileHandlerException e) { handleError(e); } catch (Exception e) { handleError(e); } FileInputStream stream = null; try { stream = new FileInputStream(checkpointsFilename); checkpointManager = new MultiBitCheckpointManager(networkParameters, stream); } catch (IOException e) { log.error("Error creating checkpointManager " + e.getClass().getName() + " " + e.getMessage()); } finally { if (stream != null) { try { stream.close(); } catch (IOException e) { log.error("Error tidying up checkpointManager creation" + e.getClass().getName() + " " + e.getMessage()); } } } } private void handleError(Exception e) { controller.setOnlineStatus(StatusEnum.ERROR); MessageManager.INSTANCE.addMessage(new Message(controller.getLocaliser().getString( "multiBitService.couldNotLoadBlockchain", new Object[] { blockchainFilename, e.getClass().getName() + " " + e.getMessage() }))); log.error("Error creating MultiBitService " + e.getClass().getName() + " " + e.getMessage()); } private BlockStore createBlockStore(Date checkpointDate, boolean createNew) throws BlockStoreException, IOException { BlockStore blockStore = null; String filePrefix = getFilePrefix(); log.debug("filePrefix = " + filePrefix); if ("".equals(controller.getApplicationDataDirectoryLocator().getApplicationDataDirectory())) { blockchainFilename = filePrefix + SPV_BLOCKCHAIN_SUFFIX; checkpointsFilename = filePrefix + CHECKPOINTS_SUFFIX; } else { blockchainFilename = controller.getApplicationDataDirectoryLocator().getApplicationDataDirectory() + File.separator + filePrefix + SPV_BLOCKCHAIN_SUFFIX; checkpointsFilename = controller.getApplicationDataDirectoryLocator().getApplicationDataDirectory() + File.separator + filePrefix + CHECKPOINTS_SUFFIX; } File blockStoreFile = new File(blockchainFilename); boolean blockStoreCreatedNew = !blockStoreFile.exists(); // Ensure there is a checkpoints file. File checkpointsFile = new File(checkpointsFilename); if (!checkpointsFile.exists()) { bitcoinController.getFileHandler().copyCheckpointsFromInstallationDirectory(checkpointsFilename); } // Use the larger of the installed checkpoints file and the user data checkpoint file (larger = more recent). ApplicationDataDirectoryLocator applicationDataDirectoryLocator = new ApplicationDataDirectoryLocator(); String installedCheckpointsFilename = applicationDataDirectoryLocator.getInstallationDirectory() + File.separator + MultiBitService.getFilePrefix() + MultiBitService.CHECKPOINTS_SUFFIX; log.debug("Installed checkpoints file = '" + installedCheckpointsFilename + "'."); File installedCheckpointsFile = new File(installedCheckpointsFilename); long sizeOfUserDataCheckpointsFile = 0; if (checkpointsFile.exists()) { sizeOfUserDataCheckpointsFile = checkpointsFile.length(); } if (installedCheckpointsFile.exists() && installedCheckpointsFile.length() > sizeOfUserDataCheckpointsFile) { // The installed checkpoints file is longer (more checkpoints) so use that. checkpointsFilename = installedCheckpointsFilename; checkpointsFile = installedCheckpointsFile; log.debug("Using installed checkpoints file as it is longer than user data checkpoints - " + installedCheckpointsFile.length() + " bytes versus " + sizeOfUserDataCheckpointsFile + " bytes."); } else { log.debug("Using user data checkpoints file as it is longer/same size as installed checkpoints - " + sizeOfUserDataCheckpointsFile + " bytes versus " + installedCheckpointsFile.length() + " bytes."); } // If the spvBlockStore is to be created new // or its size is 0 bytes delete the file so that it is recreated fresh (fix for issue 165). if (createNew || blockStoreFile.length() == 0) { // Garbage collect any closed references to the blockchainFile. System.gc(); blockStoreFile.setWritable(true); boolean deletedOk = blockStoreFile.delete(); log.debug("Deleting SPV block store '{}' from disk.1", blockchainFilename + ", deletedOk = " + deletedOk); blockStoreCreatedNew = true; } log.debug("Opening / Creating SPV block store '{}' from disk", blockchainFilename); try { blockStore = new SPVBlockStore(networkParameters, blockStoreFile); } catch (BlockStoreException bse) { try { log.error("Failed to open/ create SPV block store '{}' from disk", blockchainFilename); // If the block store creation failed, delete the block store file and try again. // Garbage collect any closed references to the blockchainFile. System.gc(); blockStoreFile.setWritable(true); boolean deletedOk = blockStoreFile.delete(); log.debug("Deleting SPV block store '{}' from disk.2", blockchainFilename + ", deletedOk = " + deletedOk); blockStoreCreatedNew = true; blockStore = new SPVBlockStore(networkParameters, blockStoreFile); } catch (BlockStoreException bse2) { bse2.printStackTrace(); log.error("Unrecoverable failure in opening block store. This is bad."); // Throw the exception so that it is indicated on the UI. throw bse2; } } // Load the existing checkpoint file and checkpoint from today. if (blockStore != null && checkpointsFile.exists()) { FileInputStream stream = null; try { stream = new FileInputStream(checkpointsFile); if (checkpointDate == null) { if (blockStoreCreatedNew) { // Brand new block store - checkpoint from today. This // will go back to the last checkpoint. CheckpointManager.checkpoint(networkParameters, stream, blockStore, (new Date()).getTime() / 1000); } } else { // Use checkpoint date (block replay). CheckpointManager.checkpoint(networkParameters, stream, blockStore, checkpointDate.getTime() / 1000); } } finally { if (stream != null) { stream.close(); stream = null; } } } return blockStore; } public void createNewPeerGroup() { peerGroup = new MultiBitPeerGroup(bitcoinController, networkParameters, blockChain); peerGroup.setFastCatchupTimeSecs(0); // genesis block peerGroup.setUserAgent("MultiBit", controller.getLocaliser().getVersionNumber()); boolean peersSpecified = false; String singleNodeConnection = controller.getModel().getUserPreference(BitcoinModel.SINGLE_NODE_CONNECTION); String peers = controller.getModel().getUserPreference(BitcoinModel.PEERS); if (singleNodeConnection != null && !singleNodeConnection.equals("")) { try { peerGroup.addAddress(new PeerAddress(InetAddress.getByName(singleNodeConnection.trim()))); peerGroup.setMaxConnections(1); peersSpecified = true; } catch (UnknownHostException e) { log.error(e.getMessage(), e); } } else if (peers != null && !peers.equals("")) { // Split using commas. String[] peerList = peers.split(","); if (peerList != null) { int numberOfPeersAdded = 0; for (int i = 0; i < peerList.length; i++) { try { peerGroup.addAddress(new PeerAddress(InetAddress.getByName(peerList[i].trim()))); numberOfPeersAdded++; } catch (UnknownHostException e) { log.error(e.getMessage(), e); } } peerGroup.setMaxConnections(numberOfPeersAdded); peersSpecified = true; } } if (!peersSpecified) { // Use DNS for production, IRC for test. if (TESTNET3_GENESIS_HASH.equals(bitcoinController.getModel().getNetworkParameters().getGenesisBlock().getHashAsString())) { peerGroup.addPeerDiscovery(new IrcDiscovery(IRC_CHANNEL_TESTNET3)); } else if (NetworkParameters.testNet().equals(bitcoinController.getModel().getNetworkParameters())) { peerGroup.addPeerDiscovery(new IrcDiscovery(IRC_CHANNEL_TEST)); } else { peerGroup.addPeerDiscovery(new DnsDiscovery(networkParameters)); } } // Add the controller as a PeerEventListener. peerGroup.addEventListener(bitcoinController.getPeerEventListener()); // Add all existing wallets to the PeerGroup. if (controller != null && controller.getModel() != null) { List<WalletData> perWalletDataModels = bitcoinController.getModel().getPerWalletModelDataList(); if (perWalletDataModels != null) { Iterator<WalletData> iterator = perWalletDataModels.iterator(); if (iterator != null) { while(iterator.hasNext()) { WalletData perWalletModelData = iterator.next(); if (perWalletModelData != null && perWalletModelData.getWallet() != null) { peerGroup.addWallet(perWalletModelData.getWallet()); } } } } } } public static String getFilePrefix() { BitcoinController bitcoinController = MultiBit.getBitcoinController(); // testnet3 if (TESTNET3_GENESIS_HASH.equals(bitcoinController.getModel().getNetworkParameters().getGenesisBlock().getHashAsString())) { return MULTIBIT_PREFIX + SEPARATOR + TESTNET3_PREFIX; } else if (NetworkParameters.testNet().equals(bitcoinController.getModel().getNetworkParameters())) { return MULTIBIT_PREFIX + SEPARATOR + TESTNET_PREFIX; } else { return MULTIBIT_PREFIX; } } /** * Initialize wallet from the wallet filename. * * @param walletFilename * @return perWalletModelData */ public WalletData addWalletFromFilename(String walletFilename) throws IOException { WalletData perWalletModelDataToReturn = null; Wallet wallet = null; File walletFile = null; boolean walletFileIsADirectory = false; boolean newWalletCreated = false; if (walletFilename != null) { walletFile = new File(walletFilename); if (walletFile.isDirectory()) { walletFileIsADirectory = true; } else { perWalletModelDataToReturn = bitcoinController.getFileHandler().loadFromFile(walletFile); if (perWalletModelDataToReturn != null) { wallet = perWalletModelDataToReturn.getWallet(); } } } if (walletFilename == null || walletFilename.equals("") || walletFileIsADirectory) { // Use default wallet name - create if does not exist. if ("".equals(controller.getApplicationDataDirectoryLocator().getApplicationDataDirectory())) { walletFilename = getFilePrefix() + WALLET_SUFFIX; } else { walletFilename = controller.getApplicationDataDirectoryLocator().getApplicationDataDirectory() + File.separator + getFilePrefix() + WALLET_SUFFIX; } walletFile = new File(walletFilename); if (walletFile.exists()) { // Wallet file exists with default name. perWalletModelDataToReturn = bitcoinController.getFileHandler().loadFromFile(walletFile); if (perWalletModelDataToReturn != null) { wallet = perWalletModelDataToReturn.getWallet(); newWalletCreated = true; } } else { // Create a brand new wallet - by default unencrypted. wallet = new Wallet(networkParameters); ECKey newKey = new ECKey(); wallet.addKey(newKey); perWalletModelDataToReturn = bitcoinController.getModel().addWallet(bitcoinController, wallet, walletFile.getAbsolutePath()); // Create a wallet info. WalletInfoData walletInfo = new WalletInfoData(walletFile.getAbsolutePath(), wallet, MultiBitWalletVersion.PROTOBUF); perWalletModelDataToReturn.setWalletInfo(walletInfo); // Set a default description. String defaultDescription = controller.getLocaliser().getString("createNewWalletSubmitAction.defaultDescription"); perWalletModelDataToReturn.setWalletDescription(defaultDescription); try { bitcoinController.getFileHandler().savePerWalletModelData(perWalletModelDataToReturn, true); newWalletCreated = true; // Backup the wallet and wallet info. BackupManager.INSTANCE.backupPerWalletModelData(bitcoinController.getFileHandler(), perWalletModelDataToReturn); } catch (WalletSaveException wse) { log.error(wse.getClass().getCanonicalName() + " " + wse.getMessage()); MessageManager.INSTANCE.addMessage(new Message(wse.getClass().getCanonicalName() + " " + wse.getMessage())); } catch (WalletVersionException wve) { log.error(wve.getClass().getCanonicalName() + " " + wve.getMessage()); MessageManager.INSTANCE.addMessage(new Message(wve.getClass().getCanonicalName() + " " + wve.getMessage())); } } } if (wallet != null) { // Add the keys for this wallet to the address book as receiving // addresses. List<ECKey> keys = wallet.getKeychain(); if (keys != null) { if (!newWalletCreated) { perWalletModelDataToReturn = bitcoinController.getModel().getPerWalletModelDataByWalletFilename(walletFilename); } if (perWalletModelDataToReturn != null) { WalletInfoData walletInfo = perWalletModelDataToReturn.getWalletInfo(); if (walletInfo != null) { for (ECKey key : keys) { if (key != null) { Address address = key.toAddress(networkParameters); walletInfo.addReceivingAddressOfKey(address); } } } } } // Add wallet to blockchain. if (blockChain != null) { blockChain.addWallet(wallet); } else { log.error("Could not add wallet '" + walletFilename + "' to the blockChain as the blockChain is missing.\n" + "This is bad. MultiBit is currently looking for a blockChain at '" + blockchainFilename + "'"); } // Add wallet to peergroup. if (peerGroup != null) { peerGroup.addWallet(wallet); peerGroup.addEventListener(bitcoinController.getPeerEventListener()); } else { log.error("Could not add wallet '" + walletFilename + "' to the peerGroup as the peerGroup is null. This is bad. "); } } return perWalletModelDataToReturn; } /** * Create a new block store. * @param dateToReplayFrom * @return height tof new block chain after truncate. * @throws IOException * @throws BlockStoreException */ public int createNewBlockStoreForReplay(Date dateToReplayFrom) throws IOException, BlockStoreException { log.debug("Loading/ creating blockstore ..."); if (blockStore != null) { try { blockStore.close(); blockStore = null; } catch (NullPointerException npe) { log.debug("NullPointerException on blockstore close"); } } // The CheckpointManager removes a week to cater for block header drift. // Any date before genesis + 1 week gets adjusted accordingly. Date genesisPlusOnwWeekAndASecond = new Date(MultiBitService.genesisBlockCreationDate.getTime() + (86400 * 7 + 1) * 1000); if (dateToReplayFrom != null) { if (dateToReplayFrom.getTime() < genesisPlusOnwWeekAndASecond.getTime() ) { dateToReplayFrom = genesisPlusOnwWeekAndASecond; } blockStore = createBlockStore(dateToReplayFrom, true); } else { blockStore = createBlockStore(genesisPlusOnwWeekAndASecond, true); } log.debug("Blockstore is '" + blockStore + "'"); log.debug("Creating blockchain ..."); blockChain = new MultiBitBlockChain(bitcoinController.getModel().getNetworkParameters(), blockStore); log.debug("Created blockchain '" + blockChain + "'"); // Hook up the wallets to the new blockchain. if (blockChain != null) { List<WalletData> perWalletModelDataList = bitcoinController.getModel().getPerWalletModelDataList(); for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (loopPerWalletModelData.getWallet() != null) { blockChain.addWallet(loopPerWalletModelData.getWallet()); } } } return blockChain.getBestChainHeight(); } /** * Download the block chain. */ public void downloadBlockChain() { @SuppressWarnings("rawtypes") SwingWorker worker = new SwingWorker() { @Override protected Object doInBackground() throws Exception { logger.debug("Downloading blockchain"); peerGroup.downloadBlockChain(); return null; // return not used } }; worker.execute(); } /** * Send bitcoins from the active wallet. * * @return The sent transaction (may be null if there were insufficient * funds for send) * @throws KeyCrypterException * @throws IOException * @throws AddressFormatException */ public Transaction sendCoins(WalletData perWalletModelData, SendRequest sendRequest, CharSequence password) throws java.io.IOException, AddressFormatException, KeyCrypterException { // Send the coins log.debug("MultiBitService#sendCoins - Just about to send coins"); KeyParameter aesKey = null; if (perWalletModelData.getWallet().getEncryptionType() != EncryptionType.UNENCRYPTED) { aesKey = perWalletModelData.getWallet().getKeyCrypter().deriveKey(password); } sendRequest.aesKey = aesKey; sendRequest.fee = BigInteger.ZERO; sendRequest.feePerKb = BitcoinModel.SEND_FEE_PER_KB_DEFAULT; sendRequest.tx.getConfidence().addEventListener(perWalletModelData.getWallet().getTxConfidenceListener()); // log.debug("Added txConfidenceListener " + txConfidenceListener + " to tx " + request.tx.getHashAsString() + ", identityHashCode = " + System.identityHashCode(request.tx)); try { // The transaction is already added to the wallet (in SendBitcoinConfirmAction) so here we just need // to sign it, commit it and broadcast it. perWalletModelData.getWallet().sign(sendRequest); perWalletModelData.getWallet().commitTx(sendRequest.tx); // The tx has been committed to the pending pool by this point (via sendCoinsOffline -> commitTx), so it has // a txConfidenceListener registered. Once the tx is broadcast the peers will update the memory pool with the // count of seen peers, the memory pool will update the transaction confidence object, that will invoke the // txConfidenceListener which will in turn invoke the wallets event listener onTransactionConfidenceChanged // method. peerGroup.broadcastTransaction(sendRequest.tx); } catch (VerificationException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } Transaction sendTransaction = sendRequest.tx; log.debug("MultiBitService#sendCoins - Sent coins has completed"); assert sendTransaction != null; // We should never try to send more coins than we have! // throw an exception if sendTransaction is null - no money. if (sendTransaction != null) { log.debug("MultiBitService#sendCoins - Sent coins. Transaction hash is {}", sendTransaction.getHashAsString() + ", identityHashcode = " + System.identityHashCode(sendTransaction)); if (sendTransaction.getConfidence() != null) { log.debug("Added bitcoinController " + System.identityHashCode(bitcoinController) + " as listener to tx = " + sendTransaction.getHashAsString()); sendTransaction.getConfidence().addEventListener(bitcoinController); } else { log.debug("Cannot add bitcoinController as listener to tx = " + sendTransaction.getHashAsString() + " no transactionConfidence"); } try { bitcoinController.getFileHandler().savePerWalletModelData(perWalletModelData, false); } catch (WalletSaveException wse) { log.error(wse.getClass().getCanonicalName() + " " + wse.getMessage()); MessageManager.INSTANCE.addMessage(new Message(wse.getClass().getCanonicalName() + " " + wse.getMessage())); } catch (WalletVersionException wse) { log.error(wse.getClass().getCanonicalName() + " " + wse.getMessage()); MessageManager.INSTANCE.addMessage(new Message(wse.getClass().getCanonicalName() + " " + wse.getMessage())); } try { // Notify other wallets of the send (it might be a send to or from them). List<WalletData> perWalletModelDataList = bitcoinController.getModel().getPerWalletModelDataList(); if (perWalletModelDataList != null) { for (WalletData loopPerWalletModelData : perWalletModelDataList) { if (!perWalletModelData.getWalletFilename().equals(loopPerWalletModelData.getWalletFilename())) { Wallet loopWallet = loopPerWalletModelData.getWallet(); if (loopWallet.isPendingTransactionRelevant(sendTransaction)) { // The loopPerWalletModelData is marked as dirty. if (loopPerWalletModelData.getWalletInfo() != null) { synchronized (loopPerWalletModelData.getWalletInfo()) { loopPerWalletModelData.setDirty(true); } } else { loopPerWalletModelData.setDirty(true); } if (loopWallet.getTransaction(sendTransaction.getHash()) == null) { log.debug("MultiBit adding a new pending transaction for the wallet '" + loopPerWalletModelData.getWalletDescription() + "'\n" + sendTransaction.toString()); loopWallet.receivePending(sendTransaction, null); } } } } } } catch (ScriptException e) { e.printStackTrace(); } catch (VerificationException e) { e.printStackTrace(); } } return sendTransaction; } public PeerGroup getPeerGroup() { return peerGroup; } public MultiBitBlockChain getChain() { return blockChain; } public BlockStore getBlockStore() { return blockStore; } public SecureRandom getSecureRandom() { return secureRandom; }; public String getCheckpointsFilename() { return checkpointsFilename; } public MultiBitCheckpointManager getCheckpointManager() { return checkpointManager; }; }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1/video_intelligence.proto package com.google.cloud.videointelligence.v1; /** * * * <pre> * Alternative hypotheses (a.k.a. n-best list). * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.SpeechRecognitionAlternative} */ public final class SpeechRecognitionAlternative extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.SpeechRecognitionAlternative) SpeechRecognitionAlternativeOrBuilder { private static final long serialVersionUID = 0L; // Use SpeechRecognitionAlternative.newBuilder() to construct. private SpeechRecognitionAlternative(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SpeechRecognitionAlternative() { transcript_ = ""; confidence_ = 0F; words_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private SpeechRecognitionAlternative( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); transcript_ = s; break; } case 21: { confidence_ = input.readFloat(); break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { words_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1.WordInfo>(); mutable_bitField0_ |= 0x00000004; } words_.add( input.readMessage( com.google.cloud.videointelligence.v1.WordInfo.parser(), extensionRegistry)); break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { words_ = java.util.Collections.unmodifiableList(words_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_SpeechRecognitionAlternative_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_SpeechRecognitionAlternative_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative.class, com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative.Builder.class); } private int bitField0_; public static final int TRANSCRIPT_FIELD_NUMBER = 1; private volatile java.lang.Object transcript_; /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public java.lang.String getTranscript() { java.lang.Object ref = transcript_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); transcript_ = s; return s; } } /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public com.google.protobuf.ByteString getTranscriptBytes() { java.lang.Object ref = transcript_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); transcript_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONFIDENCE_FIELD_NUMBER = 2; private float confidence_; /** * * * <pre> * The confidence estimate between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. This field is typically provided only for the top hypothesis, and * only for `is_final=true` results. Clients should not rely on the * `confidence` field as it is not guaranteed to be accurate or consistent. * The default of 0.0 is a sentinel value indicating `confidence` was not set. * </pre> * * <code>float confidence = 2;</code> */ public float getConfidence() { return confidence_; } public static final int WORDS_FIELD_NUMBER = 3; private java.util.List<com.google.cloud.videointelligence.v1.WordInfo> words_; /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.WordInfo> getWordsList() { return words_; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public java.util.List<? extends com.google.cloud.videointelligence.v1.WordInfoOrBuilder> getWordsOrBuilderList() { return words_; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public int getWordsCount() { return words_.size(); } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfo getWords(int index) { return words_.get(index); } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfoOrBuilder getWordsOrBuilder(int index) { return words_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getTranscriptBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, transcript_); } if (confidence_ != 0F) { output.writeFloat(2, confidence_); } for (int i = 0; i < words_.size(); i++) { output.writeMessage(3, words_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getTranscriptBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, transcript_); } if (confidence_ != 0F) { size += com.google.protobuf.CodedOutputStream.computeFloatSize(2, confidence_); } for (int i = 0; i < words_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, words_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative)) { return super.equals(obj); } com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative other = (com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative) obj; boolean result = true; result = result && getTranscript().equals(other.getTranscript()); result = result && (java.lang.Float.floatToIntBits(getConfidence()) == java.lang.Float.floatToIntBits(other.getConfidence())); result = result && getWordsList().equals(other.getWordsList()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + TRANSCRIPT_FIELD_NUMBER; hash = (53 * hash) + getTranscript().hashCode(); hash = (37 * hash) + CONFIDENCE_FIELD_NUMBER; hash = (53 * hash) + java.lang.Float.floatToIntBits(getConfidence()); if (getWordsCount() > 0) { hash = (37 * hash) + WORDS_FIELD_NUMBER; hash = (53 * hash) + getWordsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Alternative hypotheses (a.k.a. n-best list). * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.SpeechRecognitionAlternative} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.SpeechRecognitionAlternative) com.google.cloud.videointelligence.v1.SpeechRecognitionAlternativeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_SpeechRecognitionAlternative_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_SpeechRecognitionAlternative_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative.class, com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative.Builder.class); } // Construct using // com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getWordsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); transcript_ = ""; confidence_ = 0F; if (wordsBuilder_ == null) { words_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { wordsBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_SpeechRecognitionAlternative_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative .getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative build() { com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative buildPartial() { com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative result = new com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.transcript_ = transcript_; result.confidence_ = confidence_; if (wordsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { words_ = java.util.Collections.unmodifiableList(words_); bitField0_ = (bitField0_ & ~0x00000004); } result.words_ = words_; } else { result.words_ = wordsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative) { return mergeFrom( (com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative other) { if (other == com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative .getDefaultInstance()) return this; if (!other.getTranscript().isEmpty()) { transcript_ = other.transcript_; onChanged(); } if (other.getConfidence() != 0F) { setConfidence(other.getConfidence()); } if (wordsBuilder_ == null) { if (!other.words_.isEmpty()) { if (words_.isEmpty()) { words_ = other.words_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureWordsIsMutable(); words_.addAll(other.words_); } onChanged(); } } else { if (!other.words_.isEmpty()) { if (wordsBuilder_.isEmpty()) { wordsBuilder_.dispose(); wordsBuilder_ = null; words_ = other.words_; bitField0_ = (bitField0_ & ~0x00000004); wordsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getWordsFieldBuilder() : null; } else { wordsBuilder_.addAllMessages(other.words_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object transcript_ = ""; /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public java.lang.String getTranscript() { java.lang.Object ref = transcript_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); transcript_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public com.google.protobuf.ByteString getTranscriptBytes() { java.lang.Object ref = transcript_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); transcript_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public Builder setTranscript(java.lang.String value) { if (value == null) { throw new NullPointerException(); } transcript_ = value; onChanged(); return this; } /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public Builder clearTranscript() { transcript_ = getDefaultInstance().getTranscript(); onChanged(); return this; } /** * * * <pre> * Transcript text representing the words that the user spoke. * </pre> * * <code>string transcript = 1;</code> */ public Builder setTranscriptBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); transcript_ = value; onChanged(); return this; } private float confidence_; /** * * * <pre> * The confidence estimate between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. This field is typically provided only for the top hypothesis, and * only for `is_final=true` results. Clients should not rely on the * `confidence` field as it is not guaranteed to be accurate or consistent. * The default of 0.0 is a sentinel value indicating `confidence` was not set. * </pre> * * <code>float confidence = 2;</code> */ public float getConfidence() { return confidence_; } /** * * * <pre> * The confidence estimate between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. This field is typically provided only for the top hypothesis, and * only for `is_final=true` results. Clients should not rely on the * `confidence` field as it is not guaranteed to be accurate or consistent. * The default of 0.0 is a sentinel value indicating `confidence` was not set. * </pre> * * <code>float confidence = 2;</code> */ public Builder setConfidence(float value) { confidence_ = value; onChanged(); return this; } /** * * * <pre> * The confidence estimate between 0.0 and 1.0. A higher number * indicates an estimated greater likelihood that the recognized words are * correct. This field is typically provided only for the top hypothesis, and * only for `is_final=true` results. Clients should not rely on the * `confidence` field as it is not guaranteed to be accurate or consistent. * The default of 0.0 is a sentinel value indicating `confidence` was not set. * </pre> * * <code>float confidence = 2;</code> */ public Builder clearConfidence() { confidence_ = 0F; onChanged(); return this; } private java.util.List<com.google.cloud.videointelligence.v1.WordInfo> words_ = java.util.Collections.emptyList(); private void ensureWordsIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { words_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1.WordInfo>(words_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.WordInfo, com.google.cloud.videointelligence.v1.WordInfo.Builder, com.google.cloud.videointelligence.v1.WordInfoOrBuilder> wordsBuilder_; /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.WordInfo> getWordsList() { if (wordsBuilder_ == null) { return java.util.Collections.unmodifiableList(words_); } else { return wordsBuilder_.getMessageList(); } } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public int getWordsCount() { if (wordsBuilder_ == null) { return words_.size(); } else { return wordsBuilder_.getCount(); } } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfo getWords(int index) { if (wordsBuilder_ == null) { return words_.get(index); } else { return wordsBuilder_.getMessage(index); } } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder setWords(int index, com.google.cloud.videointelligence.v1.WordInfo value) { if (wordsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWordsIsMutable(); words_.set(index, value); onChanged(); } else { wordsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder setWords( int index, com.google.cloud.videointelligence.v1.WordInfo.Builder builderForValue) { if (wordsBuilder_ == null) { ensureWordsIsMutable(); words_.set(index, builderForValue.build()); onChanged(); } else { wordsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder addWords(com.google.cloud.videointelligence.v1.WordInfo value) { if (wordsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWordsIsMutable(); words_.add(value); onChanged(); } else { wordsBuilder_.addMessage(value); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder addWords(int index, com.google.cloud.videointelligence.v1.WordInfo value) { if (wordsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureWordsIsMutable(); words_.add(index, value); onChanged(); } else { wordsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder addWords( com.google.cloud.videointelligence.v1.WordInfo.Builder builderForValue) { if (wordsBuilder_ == null) { ensureWordsIsMutable(); words_.add(builderForValue.build()); onChanged(); } else { wordsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder addWords( int index, com.google.cloud.videointelligence.v1.WordInfo.Builder builderForValue) { if (wordsBuilder_ == null) { ensureWordsIsMutable(); words_.add(index, builderForValue.build()); onChanged(); } else { wordsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder addAllWords( java.lang.Iterable<? extends com.google.cloud.videointelligence.v1.WordInfo> values) { if (wordsBuilder_ == null) { ensureWordsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, words_); onChanged(); } else { wordsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder clearWords() { if (wordsBuilder_ == null) { words_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { wordsBuilder_.clear(); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public Builder removeWords(int index) { if (wordsBuilder_ == null) { ensureWordsIsMutable(); words_.remove(index); onChanged(); } else { wordsBuilder_.remove(index); } return this; } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfo.Builder getWordsBuilder(int index) { return getWordsFieldBuilder().getBuilder(index); } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfoOrBuilder getWordsOrBuilder(int index) { if (wordsBuilder_ == null) { return words_.get(index); } else { return wordsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public java.util.List<? extends com.google.cloud.videointelligence.v1.WordInfoOrBuilder> getWordsOrBuilderList() { if (wordsBuilder_ != null) { return wordsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(words_); } } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfo.Builder addWordsBuilder() { return getWordsFieldBuilder() .addBuilder(com.google.cloud.videointelligence.v1.WordInfo.getDefaultInstance()); } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public com.google.cloud.videointelligence.v1.WordInfo.Builder addWordsBuilder(int index) { return getWordsFieldBuilder() .addBuilder(index, com.google.cloud.videointelligence.v1.WordInfo.getDefaultInstance()); } /** * * * <pre> * A list of word-specific information for each recognized word. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.WordInfo words = 3;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.WordInfo.Builder> getWordsBuilderList() { return getWordsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.WordInfo, com.google.cloud.videointelligence.v1.WordInfo.Builder, com.google.cloud.videointelligence.v1.WordInfoOrBuilder> getWordsFieldBuilder() { if (wordsBuilder_ == null) { wordsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.WordInfo, com.google.cloud.videointelligence.v1.WordInfo.Builder, com.google.cloud.videointelligence.v1.WordInfoOrBuilder>( words_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); words_ = null; } return wordsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.SpeechRecognitionAlternative) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.SpeechRecognitionAlternative) private static final com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative(); } public static com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SpeechRecognitionAlternative> PARSER = new com.google.protobuf.AbstractParser<SpeechRecognitionAlternative>() { @java.lang.Override public SpeechRecognitionAlternative parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new SpeechRecognitionAlternative(input, extensionRegistry); } }; public static com.google.protobuf.Parser<SpeechRecognitionAlternative> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SpeechRecognitionAlternative> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1.SpeechRecognitionAlternative getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.collections4.map; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import junit.framework.Test; import org.apache.commons.collections4.BulkTest; import org.apache.commons.collections4.MapIterator; import org.apache.commons.collections4.list.AbstractListTest; /** * Extension of {@link AbstractOrderedMapTest} for exercising the {@link ListOrderedMap} * implementation. * * @since 3.0 * @version $Id$ */ public class ListOrderedMapTest<K, V> extends AbstractOrderedMapTest<K, V> { public ListOrderedMapTest(final String testName) { super(testName); } public static Test suite() { return BulkTest.makeSuite(ListOrderedMapTest.class); } @Override public ListOrderedMap<K, V> makeObject() { return ListOrderedMap.listOrderedMap(new HashMap<K, V>()); } /** * {@inheritDoc} */ @Override public ListOrderedMap<K, V> makeFullMap() { return (ListOrderedMap<K, V>) super.makeFullMap(); } //----------------------------------------------------------------------- public void testGetByIndex() { resetEmpty(); ListOrderedMap<K, V> lom = getMap(); try { lom.get(0); } catch (final IndexOutOfBoundsException ex) {} try { lom.get(-1); } catch (final IndexOutOfBoundsException ex) {} resetFull(); lom = getMap(); try { lom.get(-1); } catch (final IndexOutOfBoundsException ex) {} try { lom.get(lom.size()); } catch (final IndexOutOfBoundsException ex) {} int i = 0; for (final MapIterator<K, V> it = lom.mapIterator(); it.hasNext(); i++) { assertSame(it.next(), lom.get(i)); } } public void testGetValueByIndex() { resetEmpty(); ListOrderedMap<K, V> lom = getMap(); try { lom.getValue(0); } catch (final IndexOutOfBoundsException ex) {} try { lom.getValue(-1); } catch (final IndexOutOfBoundsException ex) {} resetFull(); lom = getMap(); try { lom.getValue(-1); } catch (final IndexOutOfBoundsException ex) {} try { lom.getValue(lom.size()); } catch (final IndexOutOfBoundsException ex) {} int i = 0; for (final MapIterator<K, V> it = lom.mapIterator(); it.hasNext(); i++) { it.next(); assertSame(it.getValue(), lom.getValue(i)); } } public void testIndexOf() { resetEmpty(); ListOrderedMap<K, V> lom = getMap(); assertEquals(-1, lom.indexOf(getOtherKeys())); resetFull(); lom = getMap(); final List<K> list = new ArrayList<K>(); for (final MapIterator<K, V> it = lom.mapIterator(); it.hasNext();) { list.add(it.next()); } for (int i = 0; i < list.size(); i++) { assertEquals(i, lom.indexOf(list.get(i))); } } @SuppressWarnings("unchecked") public void testSetValueByIndex() { resetEmpty(); ListOrderedMap<K, V> lom = getMap(); try { lom.setValue(0, (V) ""); } catch (final IndexOutOfBoundsException ex) {} try { lom.setValue(-1, (V) ""); } catch (final IndexOutOfBoundsException ex) {} resetFull(); lom = getMap(); try { lom.setValue(-1, (V) ""); } catch (final IndexOutOfBoundsException ex) {} try { lom.setValue(lom.size(), (V) ""); } catch (final IndexOutOfBoundsException ex) {} for (int i = 0; i < lom.size(); i++) { final V value = lom.getValue(i); final Object input = Integer.valueOf(i); assertEquals(value, lom.setValue(i, (V) input)); assertEquals(input, lom.getValue(i)); } } public void testRemoveByIndex() { resetEmpty(); ListOrderedMap<K, V> lom = getMap(); try { lom.remove(0); } catch (final IndexOutOfBoundsException ex) {} try { lom.remove(-1); } catch (final IndexOutOfBoundsException ex) {} resetFull(); lom = getMap(); try { lom.remove(-1); } catch (final IndexOutOfBoundsException ex) {} try { lom.remove(lom.size()); } catch (final IndexOutOfBoundsException ex) {} final List<K> list = new ArrayList<K>(); for (final MapIterator<K, V> it = lom.mapIterator(); it.hasNext();) { list.add(it.next()); } for (int i = 0; i < list.size(); i++) { final Object key = list.get(i); final Object value = lom.get(key); assertEquals(value, lom.remove(i)); list.remove(i); assertEquals(false, lom.containsKey(key)); } } @SuppressWarnings("unchecked") public void testPut_intObjectObject() { resetEmpty(); ListOrderedMap<K, V> lom = getMap(); try { lom.put(1, (K) "testInsert1", (V) "testInsert1v"); fail("should not be able to insert at pos 1 in empty Map"); } catch (final IndexOutOfBoundsException ex) {} try { lom.put(-1, (K) "testInsert-1", (V) "testInsert-1v"); fail("should not be able to insert at pos -1 in empty Map"); } catch (final IndexOutOfBoundsException ex) {} // put where key doesn't exist lom.put(0, (K) "testInsert1", (V) "testInsert1v"); assertEquals("testInsert1v", lom.getValue(0)); lom.put((K) "testInsertPut", (V) "testInsertPutv"); assertEquals("testInsert1v", lom.getValue(0)); assertEquals("testInsertPutv", lom.getValue(1)); lom.put(0, (K) "testInsert0", (V) "testInsert0v"); assertEquals("testInsert0v", lom.getValue(0)); assertEquals("testInsert1v", lom.getValue(1)); assertEquals("testInsertPutv", lom.getValue(2)); lom.put(3, (K) "testInsert3", (V) "testInsert3v"); assertEquals("testInsert0v", lom.getValue(0)); assertEquals("testInsert1v", lom.getValue(1)); assertEquals("testInsertPutv", lom.getValue(2)); assertEquals("testInsert3v", lom.getValue(3)); // put in a full map resetFull(); lom = getMap(); final ListOrderedMap<K, V> lom2 = new ListOrderedMap<K, V>(); lom2.putAll(lom); lom2.put(0, (K) "testInsert0", (V) "testInsert0v"); assertEquals("testInsert0v", lom2.getValue(0)); for (int i = 0; i < lom.size(); i++) { assertEquals(lom2.getValue(i + 1), lom.getValue(i)); } // put where key does exist final Integer i1 = Integer.valueOf(1); final Integer i1b = Integer.valueOf(1); final Integer i2 = Integer.valueOf(2); final Integer i3 = Integer.valueOf(3); resetEmpty(); lom = getMap(); lom.put((K) i1, (V) "1"); lom.put((K) i2, (V) "2"); lom.put((K) i3, (V) "3"); lom.put(0, (K) i1, (V) "One"); assertEquals(3, lom.size()); assertEquals(3, lom.map.size()); assertEquals(3, lom.keyList().size()); assertEquals("One", lom.getValue(0)); assertSame(i1, lom.get(0)); resetEmpty(); lom = getMap(); lom.put((K) i1, (V) "1"); lom.put((K) i2, (V) "2"); lom.put((K) i3, (V) "3"); lom.put(0, (K) i1b, (V) "One"); assertEquals(3, lom.size()); assertEquals(3, lom.map.size()); assertEquals(3, lom.keyList().size()); assertEquals("One", lom.getValue(0)); assertEquals("2", lom.getValue(1)); assertEquals("3", lom.getValue(2)); assertSame(i1b, lom.get(0)); resetEmpty(); lom = getMap(); lom.put((K) i1, (V) "1"); lom.put((K) i2, (V) "2"); lom.put((K) i3, (V) "3"); lom.put(1, (K) i1b, (V) "One"); assertEquals(3, lom.size()); assertEquals(3, lom.map.size()); assertEquals(3, lom.keyList().size()); assertEquals("One", lom.getValue(0)); assertEquals("2", lom.getValue(1)); assertEquals("3", lom.getValue(2)); resetEmpty(); lom = getMap(); lom.put((K) i1, (V) "1"); lom.put((K) i2, (V) "2"); lom.put((K) i3, (V) "3"); lom.put(2, (K) i1b, (V) "One"); assertEquals(3, lom.size()); assertEquals(3, lom.map.size()); assertEquals(3, lom.keyList().size()); assertEquals("2", lom.getValue(0)); assertEquals("One", lom.getValue(1)); assertEquals("3", lom.getValue(2)); resetEmpty(); lom = getMap(); lom.put((K) i1, (V) "1"); lom.put((K) i2, (V) "2"); lom.put((K) i3, (V) "3"); lom.put(3, (K) i1b, (V) "One"); assertEquals(3, lom.size()); assertEquals(3, lom.map.size()); assertEquals(3, lom.keyList().size()); assertEquals("2", lom.getValue(0)); assertEquals("3", lom.getValue(1)); assertEquals("One", lom.getValue(2)); } public void testPutAllWithIndex() { resetEmpty(); @SuppressWarnings("unchecked") final ListOrderedMap<String, String> lom = (ListOrderedMap<String, String>) map; // Create Initial Data lom.put("testInsert0", "testInsert0v"); lom.put("testInsert1", "testInsert1v"); lom.put("testInsert2", "testInsert2v"); assertEquals("testInsert0v", lom.getValue(0)); assertEquals("testInsert1v", lom.getValue(1)); assertEquals("testInsert2v", lom.getValue(2)); // Create New Test Map and Add using putAll(int, Object, Object) final Map<String, String> values = new ListOrderedMap<String, String>(); values.put("NewInsert0", "NewInsert0v"); values.put("NewInsert1", "NewInsert1v"); lom.putAll(1, values); // Perform Asserts assertEquals("testInsert0v", lom.getValue(0)); assertEquals("NewInsert0v", lom.getValue(1)); assertEquals("NewInsert1v", lom.getValue(2)); assertEquals("testInsert1v", lom.getValue(3)); assertEquals("testInsert2v", lom.getValue(4)); } @SuppressWarnings("unchecked") public void testPutAllWithIndexBug441() { // see COLLECTIONS-441 resetEmpty(); final ListOrderedMap<K, V> lom = getMap(); final int size = 5; for (int i = 0; i < size; i++) { lom.put((K) Integer.valueOf(i), (V) Boolean.TRUE); } final Map<K, V> map = new TreeMap<K, V>(); for (int i = 0; i < size; i++) { map.put((K) Integer.valueOf(i), (V) Boolean.FALSE); } lom.putAll(3, map); final List<K> orderedList = lom.asList(); for (int i = 0; i < size; i++) { assertEquals(Integer.valueOf(i), orderedList.get(i)); } } //----------------------------------------------------------------------- public void testValueList_getByIndex() { resetFull(); final ListOrderedMap<K, V> lom = getMap(); for (int i = 0; i < lom.size(); i++) { final V expected = lom.getValue(i); assertEquals(expected, lom.valueList().get(i)); } } @SuppressWarnings("unchecked") public void testValueList_setByIndex() { resetFull(); final ListOrderedMap<K, V> lom = getMap(); for (int i = 0; i < lom.size(); i++) { final Object input = Integer.valueOf(i); final V expected = lom.getValue(i); assertEquals(expected, lom.valueList().set(i, (V) input)); assertEquals(input, lom.getValue(i)); assertEquals(input, lom.valueList().get(i)); } } public void testValueList_removeByIndex() { resetFull(); final ListOrderedMap<K, V> lom = getMap(); while (lom.size() > 1) { final V expected = lom.getValue(1); assertEquals(expected, lom.valueList().remove(1)); } } public void testCOLLECTIONS_474_nullValues () { Object key1 = new Object(); Object key2 = new Object(); HashMap<Object, Object> hmap = new HashMap<Object, Object>(); hmap.put(key1, null); hmap.put(key2, null); assertEquals("Should have two elements", 2, hmap.size()); ListOrderedMap<Object, Object> listMap = new ListOrderedMap<Object, Object>(); listMap.put(key1, null); listMap.put(key2, null); assertEquals("Should have two elements", 2, listMap.size()); listMap.putAll(2, hmap); } public void testCOLLECTIONS_474_nonNullValues () { Object key1 = new Object(); Object key2 = new Object(); HashMap<Object, Object> hmap = new HashMap<Object, Object>(); hmap.put(key1, "1"); hmap.put(key2, "2"); assertEquals("Should have two elements", 2, hmap.size()); ListOrderedMap<Object, Object> listMap = new ListOrderedMap<Object, Object>(); listMap.put(key1, "3"); listMap.put(key2, "4"); assertEquals("Should have two elements", 2, listMap.size()); listMap.putAll(2, hmap); } //----------------------------------------------------------------------- public BulkTest bulkTestKeyListView() { return new TestKeyListView(); } public BulkTest bulkTestValueListView() { return new TestValueListView(); } //----------------------------------------------------------------------- public class TestKeyListView extends AbstractListTest<K> { TestKeyListView() { super("TestKeyListView"); } @Override public List<K> makeObject() { return ListOrderedMapTest.this.makeObject().keyList(); } @Override public List<K> makeFullCollection() { return ListOrderedMapTest.this.makeFullMap().keyList(); } @Override public K[] getFullElements() { return ListOrderedMapTest.this.getSampleKeys(); } @Override public boolean isAddSupported() { return false; } @Override public boolean isRemoveSupported() { return false; } @Override public boolean isSetSupported() { return false; } @Override public boolean isNullSupported() { return ListOrderedMapTest.this.isAllowNullKey(); } @Override public boolean isTestSerialization() { return false; } } //----------------------------------------------------------------------- public class TestValueListView extends AbstractListTest<V> { TestValueListView() { super("TestValueListView"); } @Override public List<V> makeObject() { return ListOrderedMapTest.this.makeObject().valueList(); } @Override public List<V> makeFullCollection() { return ListOrderedMapTest.this.makeFullMap().valueList(); } @Override public V[] getFullElements() { return ListOrderedMapTest.this.getSampleValues(); } @Override public boolean isAddSupported() { return false; } @Override public boolean isRemoveSupported() { return true; } @Override public boolean isSetSupported() { return true; } @Override public boolean isNullSupported() { return ListOrderedMapTest.this.isAllowNullKey(); } @Override public boolean isTestSerialization() { return false; } } //----------------------------------------------------------------------- @Override public String getCompatibilityVersion() { return "4"; } // public void testCreate() throws Exception { // resetEmpty(); // writeExternalFormToDisk( // (java.io.Serializable) map, // "src/test/resources/data/test/ListOrderedMap.emptyCollection.version4.obj"); // resetFull(); // writeExternalFormToDisk( // (java.io.Serializable) map, // "src/test/resources/data/test/ListOrderedMap.fullCollection.version4.obj"); // } /** * {@inheritDoc} */ @Override public ListOrderedMap<K, V> getMap() { return (ListOrderedMap<K, V>) super.getMap(); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.serialization.impl; import com.hazelcast.internal.serialization.InternalSerializationService; import java.io.EOFException; import java.nio.ByteOrder; import static com.hazelcast.internal.memory.GlobalMemoryAccessor.MEM_COPY_THRESHOLD; import static com.hazelcast.internal.memory.GlobalMemoryAccessorRegistry.MEM; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BOOLEAN_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BOOLEAN_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BYTE_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_BYTE_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_CHAR_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_CHAR_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_DOUBLE_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_DOUBLE_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_FLOAT_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_FLOAT_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_INT_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_INT_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_LONG_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_LONG_INDEX_SCALE; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_SHORT_BASE_OFFSET; import static com.hazelcast.internal.memory.HeapMemoryAccessor.ARRAY_SHORT_INDEX_SCALE; import static com.hazelcast.internal.nio.Bits.CHAR_SIZE_IN_BYTES; import static com.hazelcast.internal.nio.Bits.DOUBLE_SIZE_IN_BYTES; import static com.hazelcast.internal.nio.Bits.FLOAT_SIZE_IN_BYTES; import static com.hazelcast.internal.nio.Bits.INT_SIZE_IN_BYTES; import static com.hazelcast.internal.nio.Bits.LONG_SIZE_IN_BYTES; import static com.hazelcast.internal.nio.Bits.NULL_ARRAY_LENGTH; import static com.hazelcast.internal.nio.Bits.SHORT_SIZE_IN_BYTES; class UnsafeObjectDataInput extends ByteArrayObjectDataInput { UnsafeObjectDataInput(byte[] buffer, InternalSerializationService service) { super(buffer, service, ByteOrder.nativeOrder()); } UnsafeObjectDataInput(byte[] buffer, int offset, InternalSerializationService service) { super(buffer, offset, service, ByteOrder.nativeOrder()); } @Override public int read() { return (pos < size) ? MEM.getByte(data, ARRAY_BYTE_BASE_OFFSET + pos++) & 0xFF : -1; } @Override public int read(int position) { return (position < size) ? MEM.getByte(data, ARRAY_BYTE_BASE_OFFSET + position) : NULL_ARRAY_LENGTH; } @Override public char readChar(int position) throws EOFException { checkAvailable(position, CHAR_SIZE_IN_BYTES); return MEM.getChar(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public double readDouble() throws EOFException { final double d = readDouble(pos); pos += DOUBLE_SIZE_IN_BYTES; return d; } @Override public double readDouble(int position) throws EOFException { checkAvailable(position, DOUBLE_SIZE_IN_BYTES); return MEM.getDouble(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public float readFloat() throws EOFException { final float f = readFloat(pos); pos += FLOAT_SIZE_IN_BYTES; return f; } @Override public float readFloat(int position) throws EOFException { checkAvailable(position, FLOAT_SIZE_IN_BYTES); return MEM.getFloat(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public int readInt(int position) throws EOFException { checkAvailable(position, INT_SIZE_IN_BYTES); return MEM.getInt(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public int readInt(int position, ByteOrder byteOrder) throws EOFException { int v = readInt(position); if (byteOrder != ByteOrder.nativeOrder()) { v = Integer.reverseBytes(v); } return v; } @Override public long readLong(int position) throws EOFException { checkAvailable(position, LONG_SIZE_IN_BYTES); return MEM.getLong(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public long readLong(int position, ByteOrder byteOrder) throws EOFException { long v = readLong(position); if (byteOrder != ByteOrder.nativeOrder()) { v = Long.reverseBytes(v); } return v; } @Override public short readShort(int position) throws EOFException { checkAvailable(position, SHORT_SIZE_IN_BYTES); return MEM.getShort(data, ARRAY_BYTE_BASE_OFFSET + position); } @Override public short readShort(int position, ByteOrder byteOrder) throws EOFException { short v = readShort(position); if (byteOrder != ByteOrder.nativeOrder()) { v = Short.reverseBytes(v); } return v; } @Override public char[] readCharArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { char[] values = new char[len]; memCopy(values, ARRAY_CHAR_BASE_OFFSET, len, ARRAY_CHAR_INDEX_SCALE); return values; } return new char[0]; } @Override public boolean[] readBooleanArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { boolean[] values = new boolean[len]; memCopy(values, ARRAY_BOOLEAN_BASE_OFFSET, len, ARRAY_BOOLEAN_INDEX_SCALE); return values; } return new boolean[0]; } @Override public byte[] readByteArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { byte[] values = new byte[len]; memCopy(values, ARRAY_BYTE_BASE_OFFSET, len, ARRAY_BYTE_INDEX_SCALE); return values; } return new byte[0]; } @Override public int[] readIntArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { int[] values = new int[len]; memCopy(values, ARRAY_INT_BASE_OFFSET, len, ARRAY_INT_INDEX_SCALE); return values; } return new int[0]; } @Override public long[] readLongArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { long[] values = new long[len]; memCopy(values, ARRAY_LONG_BASE_OFFSET, len, ARRAY_LONG_INDEX_SCALE); return values; } return new long[0]; } @Override public double[] readDoubleArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { double[] values = new double[len]; memCopy(values, ARRAY_DOUBLE_BASE_OFFSET, len, ARRAY_DOUBLE_INDEX_SCALE); return values; } return new double[0]; } @Override public float[] readFloatArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { float[] values = new float[len]; memCopy(values, ARRAY_FLOAT_BASE_OFFSET, len, ARRAY_FLOAT_INDEX_SCALE); return values; } return new float[0]; } @Override public short[] readShortArray() throws EOFException { int len = readInt(); if (len == NULL_ARRAY_LENGTH) { return null; } if (len > 0) { short[] values = new short[len]; memCopy(values, ARRAY_SHORT_BASE_OFFSET, len, ARRAY_SHORT_INDEX_SCALE); return values; } return new short[0]; } private void memCopy(final Object dest, final long destOffset, final int length, final int indexScale) throws EOFException { if (length < 0) { throw new NegativeArraySizeException("Destination length is negative: " + length); } int remaining = length * indexScale; checkAvailable(pos, remaining); long offset = destOffset; while (remaining > 0) { int chunk = (remaining > MEM_COPY_THRESHOLD) ? MEM_COPY_THRESHOLD : remaining; MEM.copyMemory(data, ARRAY_BYTE_BASE_OFFSET + pos, dest, offset, chunk); remaining -= chunk; offset += chunk; pos += chunk; } } @Override public ByteOrder getByteOrder() { return ByteOrder.nativeOrder(); } @Override public String toString() { return "UnsafeObjectDataInput{" + "size=" + size + ", pos=" + pos + ", mark=" + mark + ", byteOrder=" + getByteOrder() + '}'; } }
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.aws.platform; import java.util.ArrayList; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.annotation.Nonnull; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.dasein.cloud.CloudException; import org.dasein.cloud.InternalException; import org.dasein.cloud.ResourceStatus; import org.dasein.cloud.aws.AWSCloud; import org.dasein.cloud.aws.compute.EC2Exception; import org.dasein.cloud.aws.compute.EC2Method; import org.dasein.cloud.identity.ServiceAction; import org.dasein.cloud.platform.KeyValueDatabase; import org.dasein.cloud.platform.KeyValueDatabaseCapabilities; import org.dasein.cloud.platform.KeyValueDatabaseSupport; import org.dasein.cloud.platform.KeyValuePair; import org.dasein.cloud.util.APITrace; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public class SimpleDB implements KeyValueDatabaseSupport { static private final Logger logger = AWSCloud.getLogger(SimpleDB.class); static public final String CREATE_DOMAIN = "CreateDomain"; static public final String DELETE_ATTRIBUTES = "DeleteAttributes"; static public final String DELETE_DOMAIN = "DeleteDomain"; static public final String DOMAIN_META_DATA = "DomainMetadata"; static public final String GET_ATTRIBUTES = "GetAttributes"; static public final String LIST_DOMAINS = "ListDomains"; static public final String PUT_ATTRIBUTES = "PutAttributes"; static public final String SELECT = "Select"; private volatile transient SimpleDBCapabilities capabilities; static public @Nonnull ServiceAction[] asSimpleDBServiceAction( @Nonnull String action ) { if( action.equals(CREATE_DOMAIN) ) { return new ServiceAction[]{KeyValueDatabaseSupport.CREATE_KVDB}; } else if( action.equals(DELETE_DOMAIN) ) { return new ServiceAction[]{KeyValueDatabaseSupport.REMOVE_KVDB}; } else if( action.equals(LIST_DOMAINS) ) { return new ServiceAction[]{KeyValueDatabaseSupport.LIST_KVDB, KeyValueDatabaseSupport.GET_KVDB}; } else if( action.equals(SELECT) ) { return new ServiceAction[]{KeyValueDatabaseSupport.SELECT}; } return new ServiceAction[0]; } private AWSCloud provider; SimpleDB( AWSCloud cloud ) { provider = cloud; } @Override public void addKeyValuePairs( String inDomainId, String itemId, KeyValuePair... pairs ) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.addKeyValuePairs"); try { if( pairs != null && pairs.length > 0 ) { Map<String, String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), PUT_ATTRIBUTES); EC2Method method; int i = 0; parameters.put("DomainName", inDomainId); parameters.put("ItemName", itemId); for( KeyValuePair pair : pairs ) { parameters.put("Attribute." + i + ".Name", pair.getKey()); parameters.put("Attribute." + i + ".Value", pair.getValue()); i++; } method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } } } finally { APITrace.end(); } } @Override public String createDatabase(String name, String description) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.createDatabase"); try { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), CREATE_DOMAIN); EC2Method method; name = validateName(name); parameters.put("DomainName", name); method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } return name; } finally { APITrace.end(); } } @Override public @Nonnull KeyValueDatabaseCapabilities getCapabilities() throws InternalException, CloudException { if(capabilities == null) { capabilities = new SimpleDBCapabilities(provider); } return capabilities; } @Override public KeyValueDatabase getDatabase(String domainId) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.getDatabase"); try { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), DOMAIN_META_DATA); EC2Method method; Document doc; parameters.put("DomainName", domainId); method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { String code = e.getCode(); if( code != null && code.equals("NoSuchDomain") ) { return null; } throw new CloudException(e); } KeyValueDatabase database = new KeyValueDatabase(); database.setProviderOwnerId(provider.getContext().getAccountNumber()); database.setProviderRegionId(provider.getContext().getRegionId()); database.setProviderDatabaseId(domainId); database.setName(domainId); database.setDescription(domainId); NodeList blocks = doc.getElementsByTagName("DomainMetadataResult"); if( blocks.getLength() > 0 ) { for( int i=0; i<blocks.getLength(); i++ ) { NodeList items = blocks.item(i).getChildNodes(); for( int j=0; j<items.getLength(); j++ ) { Node item = items.item(j); String name = item.getNodeName(); if( name.equals("ItemCount") ) { if( item.hasChildNodes() ) { database.setItemCount(Integer.parseInt(item.getFirstChild().getNodeValue())); } } else if( name.equals("AttributeValueCount") ) { if( item.hasChildNodes() ) { database.setKeyValueCount(Integer.parseInt(item.getFirstChild().getNodeValue())); } } else if( name.equals("AttributeNameCount") ) { if( item.hasChildNodes() ) { database.setKeyCount(Integer.parseInt(item.getFirstChild().getNodeValue())); } } else if( name.equals("ItemNamesSizeBytes") ) { if( item.hasChildNodes() ) { database.setItemSize(Integer.parseInt(item.getFirstChild().getNodeValue())); } } else if( name.equals("AttributeValuesSizeBytes") ) { if( item.hasChildNodes() ) { database.setKeyValueSize(Integer.parseInt(item.getFirstChild().getNodeValue())); } } else if( name.equals("AttributeNamesSizeBytes") ) { if( item.hasChildNodes() ) { database.setKeySize(Integer.parseInt(item.getFirstChild().getNodeValue())); } } } } } return database; } finally { APITrace.end(); } } @Override public Iterable<KeyValuePair> getKeyValuePairs(String inDomainId, String itemId, boolean consistentRead) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.getKeyValuePairs"); try { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), GET_ATTRIBUTES); EC2Method method; Document doc; parameters.put("DomainName", inDomainId); parameters.put("ItemName", itemId); parameters.put("ConsistentRead", String.valueOf(consistentRead)); method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { String code = e.getCode(); if( code != null && code.equals("NoSuchDomain") ) { return null; } throw new CloudException(e); }; ArrayList<KeyValuePair> pairs = new ArrayList<KeyValuePair>(); NodeList blocks = doc.getElementsByTagName("Attribute"); for( int i=0; i<blocks.getLength(); i++ ) { Node node = blocks.item(i); if( node.hasChildNodes() ) { NodeList children = node.getChildNodes(); String key = null, value = null; for( int j=0; j<children.getLength(); j++ ) { Node item = children.item(j); if( item.hasChildNodes() ) { String nv = item.getFirstChild().getNodeValue(); if( item.getNodeName().equals("Name") ) { key = nv; } else if( item.getNodeName().equals("Value") ) { value = nv; } } } if( key != null ) { pairs.add(new KeyValuePair(key, value)); } } } return pairs; } finally { APITrace.end(); } } @Override @Deprecated public String getProviderTermForDatabase(Locale locale) { try { return getCapabilities().getProviderTermForDatabase(locale); } catch( InternalException e ) { } catch( CloudException e ) { } return "domain"; // legacy } private String getSimpleDBUrl() throws InternalException, CloudException { if( provider.getContext().getRegionId() == null || provider.getContext().getRegionId().equals("us-east-1") ) { return ("https://sdb.amazonaws.com"); } return ("https://sdb." + provider.getContext().getRegionId() + ".amazonaws.com"); } @Override public boolean isSubscribed() throws CloudException, InternalException { APITrace.begin(provider, "KVDB.isSubscribed"); try { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), LIST_DOMAINS); EC2Method method; method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); return true; } catch( EC2Exception e ) { if( e.getStatus() == HttpServletResponse.SC_UNAUTHORIZED || e.getStatus() == HttpServletResponse.SC_FORBIDDEN ) { return false; } String code = e.getCode(); if( code != null && (code.equals("SubscriptionCheckFailed") || code.equals("AuthFailure") || code.equals("SignatureDoesNotMatch") || code.equals("InvalidClientTokenId") || code.equals("OptInRequired")) ) { return false; } logger.warn(e.getSummary()); if( logger.isDebugEnabled() ) { e.printStackTrace(); } throw new CloudException(e); } } finally { APITrace.end(); } } @Override @Deprecated public boolean isSupportsKeyValueDatabases() throws CloudException, InternalException { return getCapabilities().isSupportsKeyValueDatabases(); } @Override public Iterable<String> list() throws CloudException, InternalException { APITrace.begin(provider, "KVDB.list"); try { ArrayList<String> list = new ArrayList<String>(); String marker = null; do { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), LIST_DOMAINS); EC2Method method; NodeList blocks; Document doc; if( marker != null ) { parameters.put("NextToken", marker); } method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } marker = null; blocks = doc.getElementsByTagName("NextToken"); if( blocks.getLength() > 0 ) { for( int i=0; i<blocks.getLength(); i++ ) { Node item = blocks.item(i); if( item.hasChildNodes() ) { marker = item.getFirstChild().getNodeValue().trim(); } } if( marker != null ) { break; } } blocks = doc.getElementsByTagName("DomainName"); for( int i=0; i<blocks.getLength(); i++ ) { Node name = blocks.item(i); if( name.hasChildNodes() ) { String domain = name.getFirstChild().getNodeValue(); if( domain != null ) { list.add(domain); } } } } while( marker != null ); return list; } finally { APITrace.end(); } } @Override public Iterable<ResourceStatus> listKeyValueDatabaseStatus() throws CloudException, InternalException { APITrace.begin(provider, "KVDB.listKeyValueDatabaseStatus"); try { ArrayList<ResourceStatus> list = new ArrayList<ResourceStatus>(); String marker = null; do { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), LIST_DOMAINS); EC2Method method; NodeList blocks; Document doc; if( marker != null ) { parameters.put("NextToken", marker); } method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } marker = null; blocks = doc.getElementsByTagName("NextToken"); if( blocks.getLength() > 0 ) { for( int i=0; i<blocks.getLength(); i++ ) { Node item = blocks.item(i); if( item.hasChildNodes() ) { marker = item.getFirstChild().getNodeValue().trim(); } } if( marker != null ) { break; } } blocks = doc.getElementsByTagName("DomainName"); for( int i=0; i<blocks.getLength(); i++ ) { Node name = blocks.item(i); if( name.hasChildNodes() ) { list.add(new ResourceStatus(name.getFirstChild().getNodeValue(), true)); } } } while( marker != null ); return list; } finally { APITrace.end(); } } @Override public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) { if( action.equals(KeyValueDatabaseSupport.ANY) ) { return new String[] { EC2Method.SDB_PREFIX + "*" }; } else if( action.equals(KeyValueDatabaseSupport.CREATE_KVDB) ) { return new String[] { EC2Method.SDB_PREFIX + CREATE_DOMAIN }; } else if( action.equals(KeyValueDatabaseSupport.DELETE) ) { return new String[] { EC2Method.SDB_PREFIX + DELETE_DOMAIN }; } else if( action.equals(KeyValueDatabaseSupport.GET_KVDB) ) { return new String[] { EC2Method.SDB_PREFIX + LIST_DOMAINS }; } else if( action.equals(KeyValueDatabaseSupport.LIST_KVDB) ) { return new String[] { EC2Method.SDB_PREFIX + LIST_DOMAINS }; } else if( action.equals(KeyValueDatabaseSupport.PUT) ) { return new String[] { EC2Method.SDB_PREFIX + PUT_ATTRIBUTES }; } else if( action.equals(KeyValueDatabaseSupport.REMOVE_KVDB) ) { return new String[] { EC2Method.SDB_PREFIX + DELETE_DOMAIN }; } else if( action.equals(KeyValueDatabaseSupport.SELECT) ) { return new String[] { EC2Method.SDB_PREFIX + SELECT }; } return new String[0]; } @Override public Map<String,Set<KeyValuePair>> query(String queryString, boolean consistentRead) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.query"); try { Map<String,Set<KeyValuePair>> pairs = new HashMap<String,Set<KeyValuePair>>(); String marker = null; do { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), SELECT); NodeList blocks; EC2Method method; Document doc; if( marker != null ) { parameters.put("NextToken", marker); } parameters.put("SelectExpression", queryString); parameters.put("ConsistentRead", String.valueOf(consistentRead)); method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { doc = method.invoke(); } catch( EC2Exception e ) { String code = e.getCode(); if( code != null && code.equals("NoSuchDomain") ) { return null; } throw new CloudException(e); } marker = null; blocks = doc.getElementsByTagName("NextToken"); if( blocks.getLength() > 0 ) { for( int i=0; i<blocks.getLength(); i++ ) { Node item = blocks.item(i); if( item.hasChildNodes() ) { marker = item.getFirstChild().getNodeValue().trim(); } } if( marker != null ) { break; } } blocks = doc.getElementsByTagName("Item"); for( int i=0; i<blocks.getLength(); i++ ) { Node item = blocks.item(i); if( item.hasChildNodes() ) { TreeSet<KeyValuePair> itemPairs = new TreeSet<KeyValuePair>(); NodeList children = item.getChildNodes(); String itemId = null; for( int j=0; j<children.getLength(); j++ ) { Node child = children.item(j); if( child.hasChildNodes() ) { String nn = child.getNodeName(); if( nn.equals("Name") ) { itemId = child.getFirstChild().getNodeValue(); } else if( nn.equals("Attribute") ) { NodeList parts = child.getChildNodes(); String key = null, value = null; for( int k=0; k<parts.getLength(); k++ ) { Node part = parts.item(k); if( part.hasChildNodes() ) { String nv = part.getFirstChild().getNodeValue(); if( part.getNodeName().equals("Name") ) { key = nv; } else if( part.getNodeName().equals("Value") ) { value = nv; } } } if( key != null ) { itemPairs.add(new KeyValuePair(key, value)); } } } } if( itemId != null ) { pairs.put(itemId, itemPairs); } } } } while( marker != null ); return pairs; } finally { APITrace.end(); } } @Override public void removeDatabase(String domainId) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.removeDatabase"); try { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), DELETE_DOMAIN); EC2Method method; parameters.put("DomainName", domainId); method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } } finally { APITrace.end(); } } @Override public void removeKeyValuePairs(String inDomainId, String itemId, KeyValuePair ... pairs) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.removeKeyValuePairs"); try { if( pairs != null && pairs.length > 0 ) { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), DELETE_ATTRIBUTES); EC2Method method; int i = 0; parameters.put("DomainName", inDomainId); parameters.put("ItemName", itemId); for( KeyValuePair pair : pairs ) { parameters.put("Attribute." + i + ".Name", pair.getKey()); if( pair.getValue() != null ) { parameters.put("Attribute." + i + ".Value", pair.getValue()); } i++; } method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } } } finally { APITrace.end(); } } @Override public void removeKeyValuePairs(String inDomainId, String itemId, String ... pairs) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.removeKeyValuePairStrings"); try { if( pairs != null && pairs.length > 0 ) { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), DELETE_ATTRIBUTES); EC2Method method; int i = 0; parameters.put("DomainName", inDomainId); parameters.put("ItemName", itemId); for( String pair : pairs ) { parameters.put("Attribute." + i + ".Name", pair); i++; } method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } } } finally { APITrace.end(); } } @Override public void replaceKeyValuePairs(String inDomainId, String itemId, KeyValuePair ... pairs) throws CloudException, InternalException { APITrace.begin(provider, "KVDB.replaceKeyValuePairs"); try { if( pairs != null && pairs.length > 0 ) { Map<String,String> parameters = provider.getStandardSimpleDBParameters(provider.getContext(), PUT_ATTRIBUTES); EC2Method method; int i = 0; parameters.put("DomainName", inDomainId); parameters.put("ItemName", itemId); for( KeyValuePair pair : pairs ) { parameters.put("Attribute." + i + ".Name", pair.getKey()); parameters.put("Attribute." + i + ".Value", pair.getValue()); parameters.put("Attribute." + i + ".Replace", "true"); i++; } method = new EC2Method(provider, getSimpleDBUrl(), parameters); try { method.invoke(); } catch( EC2Exception e ) { throw new CloudException(e); } } } finally { APITrace.end(); } } private String validateName(String name) { StringBuilder str = new StringBuilder(); for( int i=0; i<name.length(); i++ ) { char c = name.charAt(i); if( Character.isLetterOrDigit(c) ) { str.append(c); } else if( c == '-' || c == '_' || c == '.' ) { str.append(c); } } if( str.length() < 3 ) { if( str.length() < 2 ) { if( str.length() < 1 ) { return "aaa"; } return str.toString() + "aa"; } return str.toString() + "a"; } else if( str.length() > 255 ) { return str.toString().substring(0,255); } return str.toString(); } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2000, 2010 Oracle and/or its affiliates. All rights reserved. * */ package com.sleepycat.compat; import java.util.Comparator; import com.sleepycat.compat.DbCompat; import com.sleepycat.je.Cursor; import com.sleepycat.je.CursorConfig; import com.sleepycat.je.Database; import com.sleepycat.je.DatabaseConfig; import com.sleepycat.je.DatabaseEntry; import com.sleepycat.je.DatabaseException; import com.sleepycat.je.DatabaseExistsException; import com.sleepycat.je.DatabaseNotFoundException; import com.sleepycat.je.Durability; import com.sleepycat.je.DbInternal; import com.sleepycat.je.Environment; import com.sleepycat.je.EnvironmentConfig; import com.sleepycat.je.EnvironmentFailureException; import com.sleepycat.je.LockMode; import com.sleepycat.je.OperationStatus; import com.sleepycat.je.SecondaryConfig; import com.sleepycat.je.SecondaryCursor; import com.sleepycat.je.SecondaryDatabase; import com.sleepycat.je.Transaction; import com.sleepycat.je.TransactionConfig; import com.sleepycat.je.dbi.EnvironmentImpl; /** * A minimal set of BDB DB-JE compatibility constants and static methods, for * internal use only. * * Two versions of this class, with the same public interface but different * implementations, are maintained in parallel in the DB and JE source trees. * By the use of the constants and methods in this class, along with a script * that moves the source code from JE to DB, the source code in certain * packages is kept "portable" and is shared by the two products. The script * translates the package names from com.sleepycat.je to com.sleepycat.db, and * perform other fix-ups as described further below. * * The JE directories that contain portable code are: * * src/com/sleepycat/bind * /collections * /persist * /util * test/com/sleepycat/bind * /collections * /persist * /util * * In DB, these sources are stored in the following locations: * * Sources: * src/java * Tests: * test/java/compat * * To keep this source code portable there are additional coding rules, above * and beyond the standard rules (such as coding style) for all JE code. * * + In general we should try to use the JE/DB public API, since it is usually * the same or similar in both products. If we use internal APIs, they will * always be different and will require special handling. * * + When there are differences between products, the first choice for * handling the difference is to use a DbCompat static method or constant. * This keeps the source code the same for both products (except in this * DbCompat class). * * + When JE-only code is needed -- for example, some APIs only exist in JE, * and special handling of JE exceptions is sometimes needed -- the * following special comment syntax can be used to bracket the JE-only code: * * <!-- begin JE only --> * JE-only code goes here * <!-- end JE only --> * * This syntax must be used inside of a comment: either inside a javadoc * section as shown above, or inside a single-line comment (space before * last slash is to prevent ending this javadoc comment): * * /* <!-- begin JE only --> * / * JE-only code goes here * /* <!-- end JE only --> * / * * All lines between the <!-- begin JE only --> and <!-- end JE only --> * lines, and including these lines, will be removed by the script that * transfers code from JE to DB. * * + When DB-only code is needed, the code will exist in the JE product but * will never be executed. For DB-only APIs, we hide the API from the user * with the @hidden javadoc tag. The @hidden tag is ignored on the DB side. * We do not have a way to remove DB-only code completely from the JE * product, because we do not use a proprocessor for building JE. * * + Because DatabaseException (and all subclasses) are checked exceptions in * DB but runtime exceptions in JE, we cannot omit the 'throws' declaration. * Another difference is that DB normally throws DatabaseException for all * errors, while JE has many specific subclasses for specific errors. * Therefore, any method that calls a DB API method (for example, * Database.get or put) will have a "throws DatabaseException" clause. * * + Special consideration is needed for the @throws clauses in javadoc. We do * want to javadoc the JE-only exceptions that are thrown, so the @throws * for these exceptions should be inside the "begin/end JE only" brackets. * We also need to document the fact that DB may throw DatabaseException for * almost any method, so we do that with a final @throws clause that looks * like this: * * @throws DatabaseException the base class for all BDB exceptions. * * This is a compromise. JE doesn't throw this exception, but we've * described it in a way that still makes some sense for JE, sort of. * * + Other special handling can be implemented in the transfer script, which * uses SED. Entire files can be excluded from the transfer, for example, * the JE-only exception classes. Name changes can also be made using SED, * for example: s/LockConflictException/DeadlockException/. See the * db/dist/s_je2db script for details. */ public class DbCompat { /* Capabilities */ public static final boolean CDB = false; public static final boolean JOIN = true; public static final boolean NESTED_TRANSACTIONS = false; public static final boolean INSERTION_ORDERED_DUPLICATES = false; public static final boolean SEPARATE_DATABASE_FILES = false; public static final boolean MEMORY_SUBSYSTEM = false; public static final boolean LOCK_SUBSYSTEM = false; public static final boolean HASH_METHOD = false; public static final boolean RECNO_METHOD = false; public static final boolean QUEUE_METHOD = false; public static final boolean BTREE_RECNUM_METHOD = false; public static final boolean OPTIONAL_READ_UNCOMMITTED = false; public static final boolean SECONDARIES = true; public static boolean TRANSACTION_RUNNER_PRINT_STACK_TRACES = true; public static final boolean DATABASE_COUNT = true; public static final boolean NEW_JE_EXCEPTIONS = true; public static final boolean POPULATE_ENFORCES_CONSTRAINTS = true; /** * For read-only cursor operations on a replicated node, we must use a * transaction to satisfy HA requirements. However, we use a Durability * that avoids consistency checks on the Master, and we use ReadCommitted * isolation since that gives the same behavior as a non-transactional * cursor: locks are released when the cursor is moved or closed. */ public static final TransactionConfig READ_ONLY_TXN_CONFIG; static { READ_ONLY_TXN_CONFIG = new TransactionConfig(); READ_ONLY_TXN_CONFIG.setDurability(Durability.READ_ONLY_TXN); READ_ONLY_TXN_CONFIG.setReadCommitted(true); } public static boolean getInitializeCache(EnvironmentConfig config) { return true; } public static boolean getInitializeLocking(EnvironmentConfig config) { return config.getLocking(); } public static boolean getInitializeCDB(EnvironmentConfig config) { return false; } public static boolean isReplicated(Environment env) { return DbInternal.getEnvironmentImpl(env).isReplicated(); } public static boolean isTypeBtree(DatabaseConfig dbConfig) { return true; } public static boolean isTypeHash(DatabaseConfig dbConfig) { return false; } public static boolean isTypeQueue(DatabaseConfig dbConfig) { return false; } public static boolean isTypeRecno(DatabaseConfig dbConfig) { return false; } public static boolean getBtreeRecordNumbers(DatabaseConfig dbConfig) { return false; } public static boolean getReadUncommitted(DatabaseConfig dbConfig) { return true; } public static boolean getRenumbering(DatabaseConfig dbConfig) { return false; } public static boolean getSortedDuplicates(DatabaseConfig dbConfig) { return dbConfig.getSortedDuplicates(); } public static boolean getUnsortedDuplicates(DatabaseConfig dbConfig) { return false; } public static boolean getDeferredWrite(DatabaseConfig dbConfig) { return dbConfig.getDeferredWrite(); } // XXX Remove this when DB and JE support CursorConfig.cloneConfig public static CursorConfig cloneCursorConfig(CursorConfig config) { CursorConfig newConfig = new CursorConfig(); newConfig.setReadCommitted(config.getReadCommitted()); newConfig.setReadUncommitted(config.getReadUncommitted()); return newConfig; } public static boolean getWriteCursor(CursorConfig config) { return false; } public static void setWriteCursor(CursorConfig config, boolean write) { if (write) { throw new UnsupportedOperationException(); } } public static void setRecordNumber(DatabaseEntry entry, int recNum) { throw new UnsupportedOperationException(); } public static int getRecordNumber(DatabaseEntry entry) { throw new UnsupportedOperationException(); } public static String getDatabaseFile(Database db) { return null; } public static long getDatabaseCount(Database db) throws DatabaseException { return db.count(); } /** * @throws DatabaseException from DB core. */ public static OperationStatus getCurrentRecordNumber(Cursor cursor, DatabaseEntry key, LockMode lockMode) throws DatabaseException { throw new UnsupportedOperationException(); } /** * @throws DatabaseException from DB core. */ public static OperationStatus getSearchRecordNumber(Cursor cursor, DatabaseEntry key, DatabaseEntry data, LockMode lockMode) throws DatabaseException { throw new UnsupportedOperationException(); } /** * @throws DatabaseException from DB core. */ public static OperationStatus getSearchRecordNumber(SecondaryCursor cursor, DatabaseEntry key, DatabaseEntry pKey, DatabaseEntry data, LockMode lockMode) throws DatabaseException { throw new UnsupportedOperationException(); } /** * @throws DatabaseException from DB core. */ public static OperationStatus putAfter(Cursor cursor, DatabaseEntry key, DatabaseEntry data) throws DatabaseException { throw new UnsupportedOperationException(); } /** * @throws DatabaseException from DB core. */ public static OperationStatus putBefore(Cursor cursor, DatabaseEntry key, DatabaseEntry data) throws DatabaseException { throw new UnsupportedOperationException(); } public static OperationStatus append(Database db, Transaction txn, DatabaseEntry key, DatabaseEntry data) { throw new UnsupportedOperationException(); } public static Transaction getThreadTransaction(Environment env) throws DatabaseException { return env.getThreadTransaction(); } public static ClassLoader getClassLoader(Environment env) { return DbInternal.getEnvironmentImpl(env).getClassLoader(); } /* Methods used by the collections tests. */ public static void setInitializeCache(EnvironmentConfig config, boolean val) { if (!val) { throw new UnsupportedOperationException(); } } public static void setInitializeLocking(EnvironmentConfig config, boolean val) { if (!val) { throw new UnsupportedOperationException(); } } public static void setInitializeCDB(EnvironmentConfig config, boolean val) { if (val) { throw new UnsupportedOperationException(); } } public static void setLockDetectModeOldest(EnvironmentConfig config) { /* JE does this by default, since it uses timeouts. */ } public static void setSerializableIsolation(TransactionConfig config, boolean val) { config.setSerializableIsolation(val); } public static boolean setImportunate(final Transaction txn, final boolean importunate) { final boolean oldVal = DbInternal.getTxn(txn).getImportunate(); DbInternal.getTxn(txn).setImportunate(importunate); return oldVal; } public static void setBtreeComparator(DatabaseConfig dbConfig, Comparator<byte[]> comparator) { dbConfig.setBtreeComparator(comparator); } public static void setTypeBtree(DatabaseConfig dbConfig) { } public static void setTypeHash(DatabaseConfig dbConfig) { throw new UnsupportedOperationException(); } public static void setTypeRecno(DatabaseConfig dbConfig) { throw new UnsupportedOperationException(); } public static void setTypeQueue(DatabaseConfig dbConfig) { throw new UnsupportedOperationException(); } public static void setBtreeRecordNumbers(DatabaseConfig dbConfig, boolean val) { throw new UnsupportedOperationException(); } public static void setReadUncommitted(DatabaseConfig dbConfig, boolean val) { } public static void setRenumbering(DatabaseConfig dbConfig, boolean val) { throw new UnsupportedOperationException(); } public static void setSortedDuplicates(DatabaseConfig dbConfig, boolean val) { dbConfig.setSortedDuplicates(val); } public static void setUnsortedDuplicates(DatabaseConfig dbConfig, boolean val) { if (val) { throw new UnsupportedOperationException(); } } public static void setDeferredWrite(DatabaseConfig dbConfig, boolean val) { dbConfig.setDeferredWrite(val); } public static void setRecordLength(DatabaseConfig dbConfig, int val) { if (val != 0) { throw new UnsupportedOperationException(); } } public static void setRecordPad(DatabaseConfig dbConfig, int val) { throw new UnsupportedOperationException(); } public static boolean databaseExists(Environment env, String fileName, String dbName) { assert fileName == null; return env.getDatabaseNames().contains(dbName); } /** * Returns null if the database is not found (and AllowCreate is false) or * already exists (and ExclusiveCreate is true). */ public static Database openDatabase(Environment env, Transaction txn, String fileName, String dbName, DatabaseConfig config) { assert fileName == null; try { return env.openDatabase(txn, dbName, config); } catch (DatabaseNotFoundException e) { return null; } catch (DatabaseExistsException e) { return null; } } /** * Returns null if the database is not found (and AllowCreate is false) or * already exists (and ExclusiveCreate is true). */ public static SecondaryDatabase openSecondaryDatabase(Environment env, Transaction txn, String fileName, String dbName, Database primaryDatabase, SecondaryConfig config) { assert fileName == null; try { return env.openSecondaryDatabase(txn, dbName, primaryDatabase, config); } catch (DatabaseNotFoundException e) { return null; } catch (DatabaseExistsException e) { return null; } } /** * Returns false if the database is not found. */ public static boolean truncateDatabase(Environment env, Transaction txn, String fileName, String dbName) { assert fileName == null; try { env.truncateDatabase(txn, dbName, false /*returnCount*/); return true; } catch (DatabaseNotFoundException e) { return false; } } /** * Returns false if the database is not found. */ public static boolean removeDatabase(Environment env, Transaction txn, String fileName, String dbName) { assert fileName == null; try { env.removeDatabase(txn, dbName); return true; } catch (DatabaseNotFoundException e) { return false; } } /** * Returns false if the database is not found. */ public static boolean renameDatabase(Environment env, Transaction txn, String oldFileName, String oldDbName, String newFileName, String newDbName) { assert oldFileName == null; assert newFileName == null; try { env.renameDatabase(txn, oldDbName, newDbName); return true; } catch (DatabaseNotFoundException e) { return false; } } /** * Fires an assertion if the database is not found (and AllowCreate is * false) or already exists (and ExclusiveCreate is true). */ public static Database testOpenDatabase(Environment env, Transaction txn, String file, String name, DatabaseConfig config) { try { return env.openDatabase(txn, makeTestDbName(file, name), config); } catch (DatabaseNotFoundException e) { assert false; return null; } catch (DatabaseExistsException e) { assert false; return null; } } /** * Fires an assertion if the database is not found (and AllowCreate is * false) or already exists (and ExclusiveCreate is true). */ public static SecondaryDatabase testOpenSecondaryDatabase(Environment env, Transaction txn, String file, String name, Database primary, SecondaryConfig config) { try { return env.openSecondaryDatabase(txn, makeTestDbName(file, name), primary, config); } catch (DatabaseNotFoundException e) { assert false; return null; } catch (DatabaseExistsException e) { assert false; return null; } } private static String makeTestDbName(String file, String name) { if (file == null) { return name; } else { if (name != null) { return file + '.' + name; } else { return file; } } } public static boolean isDalvik() { return EnvironmentImpl.IS_DALVIK; } public static RuntimeException unexpectedException(Exception cause) { return EnvironmentFailureException.unexpectedException(cause); } public static RuntimeException unexpectedException(String msg, Exception cause) { return EnvironmentFailureException.unexpectedException(msg, cause); } public static RuntimeException unexpectedState(String msg) { return EnvironmentFailureException.unexpectedState(msg); } public static RuntimeException unexpectedState() { return EnvironmentFailureException.unexpectedState(); } }
package br.com.sarpsys.database.daos; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import br.com.sarpsys.database.CreateConnectionMySQL; import br.com.sarpsys.entities.Clp; import br.com.sarpsys.entities.ClpPort; import br.com.sarpsys.entities.Dispositivo; import br.com.sarpsys.entities.Room; public class DispositivoDao { private Connection connection; public DispositivoDao() throws SQLException { this.connection = CreateConnectionMySQL.getConnection(); } public boolean insere(Dispositivo dispositivo) throws SQLException { String sql = "INSERT INTO dispositivo (clpport_id,state,tipo_dispositivo_id,room_id,clp_id) values (?,?,?,?,?)"; try { PreparedStatement stmt = connection.prepareStatement(sql); stmt.setInt(1, dispositivo.getPort()); stmt.setInt(2, dispositivo.getState()); stmt.setInt(3, dispositivo.getIdTipo()); stmt.setInt(4, dispositivo.getIdRoom()); stmt.setInt(5, dispositivo.getIdClp()); stmt.execute(); stmt.close(); return true; } catch (Exception e) { e.printStackTrace(); } return false; } public List<Dispositivo> getDispositivos() throws SQLException { String sql = "SELECT * FROM dispositivo"; List<Dispositivo> dispositivos = new ArrayList<Dispositivo>(); try { PreparedStatement stmt = this.connection.prepareStatement(sql); ResultSet rs = stmt.executeQuery(); while (rs.next()) { Dispositivo dispositivo = new Dispositivo(); dispositivo.setId(rs.getInt("id")); dispositivo.setPort(rs.getInt("clpport_id")); dispositivo.setState(rs.getInt("state")); dispositivo.setIdTipo(rs.getInt("tipo_dispositivo_id")); dispositivo.setIdRoom(rs.getInt("room_id")); dispositivo.setIdClp(rs.getInt("clp_id")); dispositivos.add(dispositivo); } rs.close(); stmt.close(); } catch (Exception e) { // TODO: handle exception } return dispositivos; } public List<Dispositivo> getDispositivosFromRoom(Room room) throws SQLException { String sql = "SELECT * FROM dispositivo WHERE room_id = ?"; List<Dispositivo> dispositivos = new ArrayList<Dispositivo>(); try { PreparedStatement stmt = this.connection.prepareStatement(sql); stmt.setInt(1, room.getId()); ResultSet rs = stmt.executeQuery(); while (rs.next()) { Dispositivo dispositivo = new Dispositivo(); dispositivo.setId(rs.getInt("id")); dispositivo.setPort(rs.getInt("clpport_id")); dispositivo.setState(rs.getInt("state")); dispositivo.setIdTipo(rs.getInt("tipo_dispositivo_id")); dispositivo.setIdRoom(rs.getInt("room_id")); dispositivo.setIdClp(rs.getInt("clp_id")); dispositivos.add(dispositivo); } rs.close(); stmt.close(); } catch (Exception e) { // TODO: handle exception } return dispositivos; } public List<Dispositivo> getDispositivosFromClp(Clp clp) throws SQLException { String sql = "SELECT * FROM dispositivo WHERE clp_id = ?"; List<Dispositivo> dispositivos = new ArrayList<Dispositivo>(); try { PreparedStatement stmt = this.connection.prepareStatement(sql); stmt.setInt(1, clp.getId()); ResultSet rs = stmt.executeQuery(); while (rs.next()) { Dispositivo dispositivo = new Dispositivo(); dispositivo.setId(rs.getInt("id")); dispositivo.setPort(rs.getInt("clpport_id")); dispositivo.setState(rs.getInt("state")); dispositivo.setIdTipo(rs.getInt("tipo_dispositivo_id")); dispositivo.setIdRoom(rs.getInt("room_id")); dispositivo.setIdClp(rs.getInt("clp_id")); dispositivos.add(dispositivo); } rs.close(); stmt.close(); return dispositivos; } catch (Exception e) { // TODO: handle exception } return dispositivos; } public boolean existDispositivoFromClpPort(Clp clp, ClpPort clpport) throws SQLException { String sql = "SELECT * FROM dispositivo WHERE clp_id = ? AND clpport_id=?"; boolean exist = false; try { PreparedStatement stmt = this.connection.prepareStatement(sql); stmt.setInt(1, clp.getId()); stmt.setInt(2, clpport.getId()); ResultSet rs = stmt.executeQuery(); if(rs.next()) { exist = true; } rs.close(); stmt.close(); } catch (Exception e) { // TODO: handle exception } return exist; } public Dispositivo getDispositivo(Dispositivo dispositivo) throws SQLException { String sql = "SELECT * FROM dispositivo WHERE id=?"; try { PreparedStatement stmt = this.connection.prepareStatement(sql); stmt.setInt(1, dispositivo.getId()); ResultSet rs = stmt.executeQuery(); dispositivo = null; if (rs.next()) { dispositivo = new Dispositivo(); dispositivo.setId(rs.getInt("id")); dispositivo.setPort(rs.getInt("clpport_id")); dispositivo.setIdTipo(rs.getInt("tipo_dispositivo_id")); dispositivo.setState(rs.getInt("state")); dispositivo.setIdRoom(rs.getInt("room_id")); dispositivo.setIdClp(rs.getInt("clp_id")); } rs.close(); stmt.close(); } catch (Exception e) { e.printStackTrace(); } return dispositivo; } public boolean altera(Dispositivo dispositivo) throws SQLException { String sql = "UPDATE dispositivo SET clpport_id=?, state=?,tipo_dispositivo_id=?,room_id=?,clp_id=? WHERE id=?"; try { PreparedStatement stmt = connection.prepareStatement(sql); stmt.setInt(1, dispositivo.getPort()); stmt.setInt(2, dispositivo.getState()); stmt.setInt(3, dispositivo.getIdTipo()); stmt.setInt(4, dispositivo.getIdRoom()); stmt.setInt(5, dispositivo.getIdClp()); stmt.setInt(6, dispositivo.getId()); stmt.execute(); stmt.close(); return true; } catch (Exception e) { // TODO: handle exception } return false; } public boolean atualizaStatus(Dispositivo dispositivo) throws SQLException { String sql = "UPDATE dispositivo SET state=? WHERE id=?"; try { PreparedStatement stmt = connection.prepareStatement(sql); stmt.setInt(1, dispositivo.getState()); stmt.setInt(2, dispositivo.getId()); stmt.execute(); stmt.close(); return true; } catch (Exception e) { // TODO: handle exception } return false; } /* * Verificar Integridade Referencial */ public boolean remove(Dispositivo dispositivo) throws SQLException { String sql = "DELETE FROM dispositivo WHERE id=?"; try { PreparedStatement stmt = connection.prepareStatement(sql); stmt.setInt(1, dispositivo.getId()); stmt.execute(); stmt.close(); return true; } catch (Exception e) { e.printStackTrace(); } return false; } public void closeConnection(){ try { this.connection.close(); } catch (SQLException e) { e.printStackTrace(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor.storage; import com.facebook.presto.orc.OrcBatchRecordReader; import com.facebook.presto.orc.OrcDataSource; import com.facebook.presto.orc.StorageStripeMetadataSource; import com.facebook.presto.orc.cache.StorageOrcFileTailSource; import com.facebook.presto.raptor.RaptorColumnHandle; import com.facebook.presto.raptor.backup.BackupManager; import com.facebook.presto.raptor.backup.BackupStore; import com.facebook.presto.raptor.backup.FileBackupStore; import com.facebook.presto.raptor.filesystem.FileSystemContext; import com.facebook.presto.raptor.filesystem.LocalFileStorageService; import com.facebook.presto.raptor.filesystem.LocalOrcDataEnvironment; import com.facebook.presto.raptor.filesystem.RaptorLocalFileSystem; import com.facebook.presto.raptor.metadata.ColumnStats; import com.facebook.presto.raptor.metadata.ShardDeleteDelta; import com.facebook.presto.raptor.metadata.ShardDelta; import com.facebook.presto.raptor.metadata.ShardInfo; import com.facebook.presto.raptor.metadata.ShardManager; import com.facebook.presto.raptor.metadata.ShardRecorder; import com.facebook.presto.raptor.storage.InMemoryShardRecorder.RecordedShard; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.NodeManager; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.predicate.NullableValue; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.SqlDate; import com.facebook.presto.spi.type.SqlTime; import com.facebook.presto.spi.type.SqlTimestamp; import com.facebook.presto.spi.type.SqlVarbinary; import com.facebook.presto.spi.type.Type; import com.facebook.presto.testing.MaterializedResult; import com.facebook.presto.testing.TestingNodeManager; import com.facebook.presto.type.TypeRegistry; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import io.airlift.slice.Slice; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.joda.time.DateTime; import org.joda.time.Days; import org.joda.time.chrono.ISOChronology; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.IDBI; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.OptionalLong; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.IntStream; import static com.facebook.airlift.concurrent.MoreFutures.getFutureValue; import static com.facebook.airlift.json.JsonCodec.jsonCodec; import static com.facebook.presto.RowPagesBuilder.rowPagesBuilder; import static com.facebook.presto.hive.HiveFileContext.DEFAULT_HIVE_FILE_CONTEXT; import static com.facebook.presto.orc.metadata.CompressionKind.SNAPPY; import static com.facebook.presto.raptor.filesystem.FileSystemUtil.xxhash64; import static com.facebook.presto.raptor.metadata.SchemaDaoUtil.createTablesWithRetry; import static com.facebook.presto.raptor.metadata.TestDatabaseShardManager.createShardManager; import static com.facebook.presto.raptor.storage.OrcTestingUtil.createReader; import static com.facebook.presto.raptor.storage.OrcTestingUtil.octets; import static com.facebook.presto.raptor.storage.StorageManagerConfig.OrcOptimizedWriterStage.ENABLED_AND_VALIDATED; import static com.facebook.presto.spi.type.BigintType.BIGINT; import static com.facebook.presto.spi.type.BooleanType.BOOLEAN; import static com.facebook.presto.spi.type.DateType.DATE; import static com.facebook.presto.spi.type.DoubleType.DOUBLE; import static com.facebook.presto.spi.type.TimeType.TIME; import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY; import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP; import static com.facebook.presto.spi.type.VarbinaryType.VARBINARY; import static com.facebook.presto.spi.type.VarcharType.createVarcharType; import static com.facebook.presto.testing.DateTimeTestingUtils.sqlTimestampOf; import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream; import static com.facebook.presto.testing.MaterializedResult.resultBuilder; import static com.facebook.presto.testing.TestingConnectorSession.SESSION; import static com.facebook.presto.testing.assertions.Assert.assertEquals; import static com.google.common.hash.Hashing.md5; import static com.google.common.io.Files.createTempDir; import static com.google.common.io.Files.hash; import static com.google.common.io.MoreFiles.deleteRecursively; import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; import static io.airlift.slice.Slices.utf8Slice; import static io.airlift.slice.Slices.wrappedBuffer; import static io.airlift.units.DataSize.Unit.BYTE; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static org.joda.time.DateTimeZone.UTC; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNotEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import static org.testng.FileAssert.assertDirectory; import static org.testng.FileAssert.assertFile; @Test(singleThreaded = true) public class TestOrcStorageManager { private static final ISOChronology UTC_CHRONOLOGY = ISOChronology.getInstanceUTC(); private static final DateTime EPOCH = new DateTime(0, UTC_CHRONOLOGY); private static final String CURRENT_NODE = "node"; private static final String CONNECTOR_ID = "test"; private static final long TRANSACTION_ID = 123; private static final int DELETION_THREADS = 2; private static final Duration SHARD_RECOVERY_TIMEOUT = new Duration(30, TimeUnit.SECONDS); private static final int MAX_SHARD_ROWS = 100; private static final DataSize MAX_FILE_SIZE = new DataSize(1, MEGABYTE); private static final Duration MISSING_SHARD_DISCOVERY = new Duration(5, TimeUnit.MINUTES); private static final ReaderAttributes READER_ATTRIBUTES = new ReaderAttributes(new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), new DataSize(1, MEGABYTE), true, false); private final NodeManager nodeManager = new TestingNodeManager(); private Handle dummyHandle; private File temporary; private StorageService storageService; private ShardRecoveryManager recoveryManager; private FileBackupStore fileBackupStore; private Optional<BackupStore> backupStore; private InMemoryShardRecorder shardRecorder; @BeforeMethod public void setup() { temporary = createTempDir(); URI directory = new File(temporary, "data").toURI(); storageService = new LocalFileStorageService(new LocalOrcDataEnvironment(), directory); storageService.start(); File backupDirectory = new File(temporary, "backup"); fileBackupStore = new FileBackupStore(backupDirectory); fileBackupStore.start(); backupStore = Optional.of(fileBackupStore); IDBI dbi = new DBI("jdbc:h2:mem:test" + System.nanoTime()); dummyHandle = dbi.open(); createTablesWithRetry(dbi); ShardManager shardManager = createShardManager(dbi); Duration discoveryInterval = new Duration(5, TimeUnit.MINUTES); recoveryManager = new ShardRecoveryManager(storageService, backupStore, new LocalOrcDataEnvironment(), nodeManager, shardManager, discoveryInterval, 10); shardRecorder = new InMemoryShardRecorder(); } @AfterMethod(alwaysRun = true) public void tearDown() throws Exception { if (dummyHandle != null) { dummyHandle.close(); } deleteRecursively(temporary.toPath(), ALLOW_INSECURE); } @Test public void testWriter() throws Exception { OrcStorageManager manager = createOrcStorageManager(); List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10)); StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello") .row(456L, "bye") .build(); sink.appendPages(pages); // shard is not recorded until flush assertEquals(shardRecorder.getShards().size(), 0); sink.flush(); // shard is recorded after flush List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 1); List<ShardInfo> shards = getFutureValue(sink.commit()); assertEquals(shards.size(), 1); ShardInfo shardInfo = Iterables.getOnlyElement(shards); UUID shardUuid = shardInfo.getShardUuid(); File file = new File(storageService.getStorageFile(shardUuid).toString()); File backupFile = fileBackupStore.getBackupFile(shardUuid); assertEquals(recordedShards.get(0).getTransactionId(), TRANSACTION_ID); assertEquals(recordedShards.get(0).getShardUuid(), shardUuid); assertEquals(shardInfo.getRowCount(), 2); assertEquals(shardInfo.getCompressedSize(), file.length()); assertEquals(shardInfo.getXxhash64(), xxhash64(new RaptorLocalFileSystem(new Configuration()), new Path(file.toURI()))); // verify primary and backup shard exist assertFile(file, "primary shard"); assertFile(backupFile, "backup shard"); assertFileEquals(file, backupFile); // remove primary shard to force recovery from backup assertTrue(file.delete()); assertTrue(file.getParentFile().delete()); assertFalse(file.exists()); recoveryManager.restoreFromBackup(shardUuid, shardInfo.getCompressedSize(), OptionalLong.of(shardInfo.getXxhash64())); FileSystem fileSystem = new LocalOrcDataEnvironment().getFileSystem(FileSystemContext.DEFAULT_RAPTOR_CONTEXT); try (OrcDataSource dataSource = manager.openShard(fileSystem, shardUuid, READER_ATTRIBUTES)) { OrcBatchRecordReader reader = createReader(dataSource, columnIds, columnTypes); assertEquals(reader.nextBatch(), 2); Block column0 = reader.readBlock(0); assertEquals(column0.isNull(0), false); assertEquals(column0.isNull(1), false); assertEquals(BIGINT.getLong(column0, 0), 123L); assertEquals(BIGINT.getLong(column0, 1), 456L); Block column1 = reader.readBlock(1); assertEquals(createVarcharType(10).getSlice(column1, 0), utf8Slice("hello")); assertEquals(createVarcharType(10).getSlice(column1, 1), utf8Slice("bye")); assertEquals(reader.nextBatch(), -1); } } @Test public void testReader() throws Exception { OrcStorageManager manager = createOrcStorageManager(); List<Long> columnIds = ImmutableList.of(2L, 4L, 6L, 7L, 8L, 9L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10), VARBINARY, DATE, BOOLEAN, DOUBLE); byte[] bytes1 = octets(0x00, 0xFE, 0xFF); byte[] bytes3 = octets(0x01, 0x02, 0x19, 0x80); StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); Object[][] doubles = { {881L, "-inf", null, null, null, Double.NEGATIVE_INFINITY}, {882L, "+inf", null, null, null, Double.POSITIVE_INFINITY}, {883L, "nan", null, null, null, Double.NaN}, {884L, "min", null, null, null, Double.MIN_VALUE}, {885L, "max", null, null, null, Double.MAX_VALUE}, {886L, "pzero", null, null, null, 0.0}, {887L, "nzero", null, null, null, -0.0}, }; List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello", wrappedBuffer(bytes1), sqlDate(2001, 8, 22).getDays(), true, 123.45) .row(null, null, null, null, null, null) .row(456L, "bye", wrappedBuffer(bytes3), sqlDate(2005, 4, 22).getDays(), false, 987.65) .rows(doubles) .build(); sink.appendPages(pages); List<ShardInfo> shards = getFutureValue(sink.commit()); assertEquals(shards.size(), 1); UUID uuid = Iterables.getOnlyElement(shards).getShardUuid(); MaterializedResult expected = resultBuilder(SESSION, columnTypes) .row(123L, "hello", sqlBinary(bytes1), sqlDate(2001, 8, 22), true, 123.45) .row(null, null, null, null, null, null) .row(456L, "bye", sqlBinary(bytes3), sqlDate(2005, 4, 22), false, 987.65) .rows(doubles) .build(); // no tuple domain (all) TupleDomain<RaptorColumnHandle> tupleDomain = TupleDomain.all(); try (ConnectorPageSource pageSource = getPageSource(manager, columnIds, columnTypes, uuid, tupleDomain)) { MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes); assertEquals(result.getRowCount(), expected.getRowCount()); assertEquals(result, expected); } // tuple domain within the column range tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.<RaptorColumnHandle, NullableValue>builder() .put(new RaptorColumnHandle("test", "c1", 2, BIGINT), NullableValue.of(BIGINT, 124L)) .build()); try (ConnectorPageSource pageSource = getPageSource(manager, columnIds, columnTypes, uuid, tupleDomain)) { MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes); assertEquals(result.getRowCount(), expected.getRowCount()); } // tuple domain outside the column range tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.<RaptorColumnHandle, NullableValue>builder() .put(new RaptorColumnHandle("test", "c1", 2, BIGINT), NullableValue.of(BIGINT, 122L)) .build()); try (ConnectorPageSource pageSource = getPageSource(manager, columnIds, columnTypes, uuid, tupleDomain)) { MaterializedResult result = materializeSourceDataStream(SESSION, pageSource, columnTypes); assertEquals(result.getRowCount(), 0); } } @Test public void testRewriter() throws Exception { OrcStorageManager manager = createOrcStorageManager(); FileSystem fileSystem = new LocalOrcDataEnvironment().getFileSystem(FileSystemContext.DEFAULT_RAPTOR_CONTEXT); long transactionId = TRANSACTION_ID; List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10)); // create file with 2 rows StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello") .row(456L, "bye") .build(); sink.appendPages(pages); List<ShardInfo> shards = getFutureValue(sink.commit()); assertEquals(shardRecorder.getShards().size(), 1); // delete one row BitSet rowsToDelete = new BitSet(); rowsToDelete.set(0); InplaceShardRewriter shardRewriter = (InplaceShardRewriter) manager.createShardRewriter( FileSystemContext.DEFAULT_RAPTOR_CONTEXT, fileSystem, transactionId, OptionalInt.empty(), shards.get(0).getShardUuid(), 2, Optional.empty(), false, IntStream.range(0, columnIds.size()).boxed().collect(Collectors.toMap(index -> String.valueOf(columnIds.get(index)), columnTypes::get))); Collection<Slice> fragments = shardRewriter.rewriteShard(rowsToDelete); Slice shardDelta = Iterables.getOnlyElement(fragments); ShardDelta shardDeltas = jsonCodec(ShardDelta.class).fromJson(shardDelta.getBytes()); ShardInfo shardInfo = Iterables.getOnlyElement(shardDeltas.getNewShards()); // check that output file has one row assertEquals(shardInfo.getRowCount(), 1); // check that storage file is same as backup file File storageFile = new File(storageService.getStorageFile(shardInfo.getShardUuid()).toString()); File backupFile = fileBackupStore.getBackupFile(shardInfo.getShardUuid()); assertFileEquals(storageFile, backupFile); // verify recorded shard List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 2); assertEquals(recordedShards.get(1).getTransactionId(), TRANSACTION_ID); assertEquals(recordedShards.get(1).getShardUuid(), shardInfo.getShardUuid()); } @Test public void testWriteDeltaDelete() throws Exception { FileSystem fileSystem = new LocalOrcDataEnvironment().getFileSystem(FileSystemContext.DEFAULT_RAPTOR_CONTEXT); // delete one row BitSet rowsToDelete = new BitSet(); rowsToDelete.set(0); Collection<Slice> fragments = deltaDelete(rowsToDelete, false); Slice shardDelta = Iterables.getOnlyElement(fragments); ShardDeleteDelta shardDeltas = jsonCodec(ShardDeleteDelta.class).fromJson(shardDelta.getBytes()); ShardInfo shardInfo = shardDeltas.getDeltaInfoPair().getNewDeltaDeleteShard().get(); // Check that output file (new delta file) has one row assertEquals(shardInfo.getRowCount(), 1); assertTrue(checkContent(fileSystem, shardInfo.getShardUuid(), rowsToDelete)); // Check that storage file is same as backup file File storageFile = new File(storageService.getStorageFile(shardInfo.getShardUuid()).toString()); File backupFile = fileBackupStore.getBackupFile(shardInfo.getShardUuid()); assertFileEquals(storageFile, backupFile); // Verify recorded shard List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 2); // original file + delta file assertEquals(recordedShards.get(1).getTransactionId(), TRANSACTION_ID); assertEquals(recordedShards.get(1).getShardUuid(), shardInfo.getShardUuid()); } @Test public void testWriteDeltaDeleteEmpty() { // delete zero row BitSet rowsToDelete = new BitSet(); Collection<Slice> fragments = deltaDelete(rowsToDelete, false); assertEquals(ImmutableList.of(), fragments); List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 1); // no delta file } @Test public void testWriteDeltaDeleteAll() { // delete every row BitSet rowsToDelete = new BitSet(); rowsToDelete.set(0); rowsToDelete.set(1); rowsToDelete.set(2); Collection<Slice> fragments = deltaDelete(rowsToDelete, false); Slice shardDelta = Iterables.getOnlyElement(fragments); ShardDeleteDelta shardDeltas = jsonCodec(ShardDeleteDelta.class).fromJson(shardDelta.getBytes()); assertEquals(shardDeltas.getDeltaInfoPair().getNewDeltaDeleteShard(), Optional.empty()); // verify recorded shard List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 1); } @Test // rowsToDelete and rowsDeleted must be mutually exclusive public void testWriteDeltaDeleteMerge() throws Exception { FileSystem fileSystem = new LocalOrcDataEnvironment().getFileSystem(FileSystemContext.DEFAULT_RAPTOR_CONTEXT); BitSet rowsToDelete = new BitSet(); rowsToDelete.set(0); Collection<Slice> fragments = deltaDelete(rowsToDelete, true); Slice shardDelta = Iterables.getOnlyElement(fragments); ShardDeleteDelta shardDeltas = jsonCodec(ShardDeleteDelta.class).fromJson(shardDelta.getBytes()); ShardInfo shardInfo = shardDeltas.getDeltaInfoPair().getNewDeltaDeleteShard().get(); // Check that output file (new delta file) has merged 2 rows assertEquals(shardInfo.getRowCount(), 2); assertTrue(checkContent(fileSystem, shardInfo.getShardUuid(), rowsToDelete)); // Check that storage file is same as backup file File storageFile = new File(storageService.getStorageFile(shardInfo.getShardUuid()).toString()); File backupFile = fileBackupStore.getBackupFile(shardInfo.getShardUuid()); assertFileEquals(storageFile, backupFile); // Verify recorded shard List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 3); // original file + old delta + new delta assertEquals(recordedShards.get(2).getTransactionId(), TRANSACTION_ID); assertEquals(recordedShards.get(2).getShardUuid(), shardInfo.getShardUuid()); } @Test public void testWriteDeltaDeleteMergeAll() { // delete every row BitSet rowsToDelete = new BitSet(); rowsToDelete.set(0); rowsToDelete.set(1); Collection<Slice> fragments = deltaDelete(rowsToDelete, true); Slice shardDelta = Iterables.getOnlyElement(fragments); ShardDeleteDelta shardDeltas = jsonCodec(ShardDeleteDelta.class).fromJson(shardDelta.getBytes()); assertEquals(shardDeltas.getDeltaInfoPair().getNewDeltaDeleteShard(), Optional.empty()); // verify recorded shard List<RecordedShard> recordedShards = shardRecorder.getShards(); assertEquals(recordedShards.size(), 2); // original file + old delta } @Test(expectedExceptions = PrestoException.class) public void testWriteDeltaDeleteMergeConflict() { // delete same row BitSet rowsToDelete = new BitSet(); rowsToDelete.set(2); Collection<Slice> fragments = deltaDelete(rowsToDelete, true); deltaDelete(rowsToDelete, true); } private Collection<Slice> deltaDelete(BitSet rowsToDelete, boolean oldDeltaDeleteExist) { OrcStorageManager manager = createOrcStorageManager(); FileSystem fileSystem = new LocalOrcDataEnvironment().getFileSystem(FileSystemContext.DEFAULT_RAPTOR_CONTEXT); List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10)); // create file with 3 rows StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello") .row(456L, "bye") .row(456L, "test") .build(); sink.appendPages(pages); List<ShardInfo> shards = getFutureValue(sink.commit()); assertEquals(shardRecorder.getShards().size(), 1); List<ShardInfo> oldDeltaDeleteShards = null; if (oldDeltaDeleteExist) { // create oldDeltaDeleteExist with 1 row List<Long> deltaColumnIds = ImmutableList.of(0L); List<Type> deltaColumnTypes = ImmutableList.of(BIGINT); StoragePageSink deltaSink = createStoragePageSink(manager, deltaColumnIds, deltaColumnTypes); List<Page> deltaPages = rowPagesBuilder(deltaColumnTypes) .row(2L) .build(); deltaSink.appendPages(deltaPages); oldDeltaDeleteShards = getFutureValue(deltaSink.commit()); } // delta delete DeltaShardRewriter shardRewriter = (DeltaShardRewriter) manager.createShardRewriter( FileSystemContext.DEFAULT_RAPTOR_CONTEXT, fileSystem, TRANSACTION_ID, OptionalInt.empty(), shards.get(0).getShardUuid(), 3, oldDeltaDeleteExist ? Optional.of(oldDeltaDeleteShards.get(0).getShardUuid()) : Optional.empty(), true, null); Collection<Slice> fragments = shardRewriter.writeDeltaDeleteFile(rowsToDelete); return fragments; } private boolean checkContent(FileSystem fileSystem, UUID shardUuid, BitSet rowsToDelete) { OrcStorageManager manager = createOrcStorageManager(); Optional<BitSet> rows = manager.getRowsFromUuid(fileSystem, Optional.of(shardUuid)); return rows.map(r -> r.equals(rowsToDelete)).orElse(false); } public void testWriterRollback() { // verify staging directory is empty File staging = new File(new File(temporary, "data"), "staging"); assertDirectory(staging); assertEquals(staging.list(), new String[] {}); // create a shard in staging OrcStorageManager manager = createOrcStorageManager(); List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10)); StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello") .row(456L, "bye") .build(); sink.appendPages(pages); sink.flush(); // verify shard exists in staging String[] files = staging.list(); assertNotNull(files); String stagingFile = Arrays.stream(files) .filter(file -> file.endsWith(".orc")) .findFirst() .orElseThrow(() -> new AssertionError("file not found in staging")); // rollback should cleanup staging files sink.rollback(); files = staging.list(); assertNotNull(files); assertTrue(Arrays.stream(files).noneMatch(stagingFile::equals)); } @Test public void testShardStatsBigint() { List<ColumnStats> stats = columnStats(types(BIGINT), row(2L), row(-3L), row(5L)); assertColumnStats(stats, 1, -3L, 5L); } @Test public void testShardStatsDouble() { List<ColumnStats> stats = columnStats(types(DOUBLE), row(2.5), row(-4.1), row(6.6)); assertColumnStats(stats, 1, -4.1, 6.6); } @Test public void testShardStatsBigintDouble() { List<ColumnStats> stats = columnStats(types(BIGINT, DOUBLE), row(-3L, 6.6), row(5L, -4.1)); assertColumnStats(stats, 1, -3L, 5L); assertColumnStats(stats, 2, -4.1, 6.6); } @Test public void testShardStatsDoubleMinMax() { List<ColumnStats> stats = columnStats(types(DOUBLE), row(3.2), row(Double.MIN_VALUE), row(4.5)); assertColumnStats(stats, 1, Double.MIN_VALUE, 4.5); stats = columnStats(types(DOUBLE), row(3.2), row(Double.MAX_VALUE), row(4.5)); assertColumnStats(stats, 1, 3.2, Double.MAX_VALUE); } @Test public void testShardStatsDoubleNotFinite() { List<ColumnStats> stats = columnStats(types(DOUBLE), row(3.2), row(Double.NEGATIVE_INFINITY), row(4.5)); assertColumnStats(stats, 1, null, 4.5); stats = columnStats(types(DOUBLE), row(3.2), row(Double.POSITIVE_INFINITY), row(4.5)); assertColumnStats(stats, 1, 3.2, null); stats = columnStats(types(DOUBLE), row(3.2), row(Double.NaN), row(4.5)); assertColumnStats(stats, 1, 3.2, 4.5); } @Test public void testShardStatsVarchar() { List<ColumnStats> stats = columnStats( types(createVarcharType(10)), row(utf8Slice("hello")), row(utf8Slice("bye")), row(utf8Slice("foo"))); assertColumnStats(stats, 1, "bye", "hello"); } @Test public void testShardStatsBigintVarbinary() { List<ColumnStats> stats = columnStats(types(BIGINT, VARBINARY), row(5L, wrappedBuffer(octets(0x00))), row(3L, wrappedBuffer(octets(0x01)))); assertColumnStats(stats, 1, 3L, 5L); assertNoColumnStats(stats, 2); } @Test public void testShardStatsDateTimestamp() { long minDate = sqlDate(2001, 8, 22).getDays(); long maxDate = sqlDate(2005, 4, 22).getDays(); long maxTimestamp = sqlTimestamp(2002, 4, 13, 6, 7, 8).getMillisUtc(); long minTimestamp = sqlTimestamp(2001, 3, 15, 9, 10, 11).getMillisUtc(); List<ColumnStats> stats = columnStats(types(DATE, TIMESTAMP), row(minDate, maxTimestamp), row(maxDate, minTimestamp)); assertColumnStats(stats, 1, minDate, maxDate); assertColumnStats(stats, 2, minTimestamp, maxTimestamp); } @Test public void testShardStatsTime() { long minTime = sqlTime(2004, 8, 22).getMillis(); long maxTime = sqlTime(2006, 4, 22).getMillis(); // Apache ORC writer does not support TIME List<ColumnStats> columnStats = columnStats(types(TIME), row(minTime), row(maxTime)); assertColumnStats(columnStats, 1, minTime, maxTime); } @Test public void testMaxShardRows() { OrcStorageManager manager = createOrcStorageManager(2, new DataSize(2, MEGABYTE)); List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10)); StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello") .row(456L, "bye") .build(); sink.appendPages(pages); assertTrue(sink.isFull()); } @Test public void testMaxFileSize() { List<Long> columnIds = ImmutableList.of(3L, 7L); List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(5)); List<Page> pages = rowPagesBuilder(columnTypes) .row(123L, "hello") .row(456L, "bye") .build(); // Set maxFileSize to 1 byte, so adding any page makes the StoragePageSink full OrcStorageManager manager = createOrcStorageManager(20, new DataSize(1, BYTE)); StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); sink.appendPages(pages); assertTrue(sink.isFull()); } private static ConnectorPageSource getPageSource( OrcStorageManager manager, List<Long> columnIds, List<Type> columnTypes, UUID uuid, TupleDomain<RaptorColumnHandle> tupleDomain) { return manager.getPageSource( FileSystemContext.DEFAULT_RAPTOR_CONTEXT, DEFAULT_HIVE_FILE_CONTEXT, uuid, Optional.empty(), false, OptionalInt.empty(), columnIds, columnTypes, tupleDomain, READER_ATTRIBUTES); } private static StoragePageSink createStoragePageSink(StorageManager manager, List<Long> columnIds, List<Type> columnTypes) { long transactionId = TRANSACTION_ID; return manager.createStoragePageSink(FileSystemContext.DEFAULT_RAPTOR_CONTEXT, transactionId, OptionalInt.empty(), columnIds, columnTypes, false); } private OrcStorageManager createOrcStorageManager() { return createOrcStorageManager(MAX_SHARD_ROWS, MAX_FILE_SIZE); } private OrcStorageManager createOrcStorageManager(int maxShardRows, DataSize maxFileSize) { return createOrcStorageManager(storageService, backupStore, recoveryManager, shardRecorder, maxShardRows, maxFileSize); } public static OrcStorageManager createOrcStorageManager(IDBI dbi, File temporary) { return createOrcStorageManager(dbi, temporary, MAX_SHARD_ROWS); } public static OrcStorageManager createOrcStorageManager(IDBI dbi, File temporary, int maxShardRows) { URI directory = new File(temporary, "data").toURI(); StorageService storageService = new LocalFileStorageService(new LocalOrcDataEnvironment(), directory); storageService.start(); File backupDirectory = new File(temporary, "backup"); FileBackupStore fileBackupStore = new FileBackupStore(backupDirectory); fileBackupStore.start(); Optional<BackupStore> backupStore = Optional.of(fileBackupStore); ShardManager shardManager = createShardManager(dbi); ShardRecoveryManager recoveryManager = new ShardRecoveryManager( storageService, backupStore, new LocalOrcDataEnvironment(), new TestingNodeManager(), shardManager, MISSING_SHARD_DISCOVERY, 10); return createOrcStorageManager( storageService, backupStore, recoveryManager, new InMemoryShardRecorder(), maxShardRows, MAX_FILE_SIZE); } public static OrcStorageManager createOrcStorageManager( StorageService storageService, Optional<BackupStore> backupStore, ShardRecoveryManager recoveryManager, ShardRecorder shardRecorder, int maxShardRows, DataSize maxFileSize) { return new OrcStorageManager( CURRENT_NODE, storageService, backupStore, READER_ATTRIBUTES, new BackupManager(backupStore, storageService, new LocalOrcDataEnvironment(), 1), recoveryManager, shardRecorder, new TypeRegistry(), new LocalOrcDataEnvironment(), CONNECTOR_ID, DELETION_THREADS, SHARD_RECOVERY_TIMEOUT, maxShardRows, maxFileSize, new DataSize(0, BYTE), SNAPPY, ENABLED_AND_VALIDATED, new StorageOrcFileTailSource(), new StorageStripeMetadataSource()); } private static void assertFileEquals(File actual, File expected) throws IOException { assertEquals(hash(actual, md5()), hash(expected, md5())); } private static void assertColumnStats(List<ColumnStats> list, long columnId, Object min, Object max) { for (ColumnStats stats : list) { if (stats.getColumnId() == columnId) { assertEquals(stats.getMin(), min); assertEquals(stats.getMax(), max); return; } } fail(format("no stats for column: %s: %s", columnId, list)); } private static void assertNoColumnStats(List<ColumnStats> list, long columnId) { for (ColumnStats stats : list) { assertNotEquals(stats.getColumnId(), columnId); } } private static List<Type> types(Type... types) { return ImmutableList.copyOf(types); } private static Object[] row(Object... values) { return values; } private List<ColumnStats> columnStats(List<Type> columnTypes, Object[]... rows) { ImmutableList.Builder<Long> list = ImmutableList.builder(); for (long i = 1; i <= columnTypes.size(); i++) { list.add(i); } List<Long> columnIds = list.build(); List<ColumnStats> apacheOrcWriterStats = columnStats(false, columnIds, columnTypes, rows); List<ColumnStats> optimizedOrcWriterStats = columnStats(true, columnIds, columnTypes, rows); assertEquals(apacheOrcWriterStats, optimizedOrcWriterStats); return optimizedOrcWriterStats; } private List<ColumnStats> columnStats(boolean useOptimizedWriter, List<Long> columnIds, List<Type> columnTypes, Object[]... rows) { OrcStorageManager manager = createOrcStorageManager(); StoragePageSink sink = createStoragePageSink(manager, columnIds, columnTypes); sink.appendPages(rowPagesBuilder(columnTypes).rows(rows).build()); List<ShardInfo> shards = getFutureValue(sink.commit()); assertEquals(shards.size(), 1); return Iterables.getOnlyElement(shards).getColumnStats(); } private static SqlVarbinary sqlBinary(byte[] bytes) { return new SqlVarbinary(bytes); } private static SqlDate sqlDate(int year, int month, int day) { DateTime date = new DateTime(year, month, day, 0, 0, 0, 0, UTC); return new SqlDate(Days.daysBetween(EPOCH, date).getDays()); } private static SqlTime sqlTime(int year, int month, int day) { DateTime date = new DateTime(year, month, day, 0, 0, 0, 0, UTC); return new SqlTime(NANOSECONDS.toMillis(date.toLocalTime().getMillisOfDay())); } private static SqlTimestamp sqlTimestamp(int year, int month, int day, int hour, int minute, int second) { return sqlTimestampOf(year, month, day, hour, minute, second, 0, UTC, UTC_KEY, SESSION); } }
/* * Copyright (C) 2014 Dell, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dell.doradus.testprocessor; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketAddress; import java.util.ArrayList; import java.util.List; public class RestClient { private static boolean USE_CUSTOM_BUFFER_SIZE = true; private static int NET_BUFFER_SIZE = 65536; private static boolean DISABLE_NAGLES = true; private String m_host = null; private int m_port = -1; private Socket m_socket = null; private InputStream m_inStream = null; private OutputStream m_outStream = null; public void connect(String host, int port) throws Exception { disconnect(); m_host = host; m_port = port; m_socket = new Socket(); if (m_socket == null) { throw new Exception("Failed to create socket"); } try { if (DISABLE_NAGLES) { m_socket.setTcpNoDelay(true); } if (USE_CUSTOM_BUFFER_SIZE) { if (m_socket.getSendBufferSize() < NET_BUFFER_SIZE) { m_socket.setSendBufferSize(NET_BUFFER_SIZE); } if (m_socket.getReceiveBufferSize() < NET_BUFFER_SIZE) { m_socket.setReceiveBufferSize(NET_BUFFER_SIZE); } } SocketAddress addr = new InetSocketAddress(m_host, m_port); m_socket.connect(addr); m_inStream = m_socket.getInputStream(); m_outStream = m_socket.getOutputStream(); } catch(Exception ex) { disconnect(); String msg = "Failed to connect to " + m_host + ":" + m_port; throw new Exception(msg, ex); } } public boolean notConnected() { return m_socket == null || m_socket.isClosed() || !m_socket.isConnected(); } public void disconnect() { if (m_socket == null) return; try { m_socket.close(); } catch (Exception ex) { /* ignore */ } m_socket = null; } public void sendRequest(RestRequest request) throws Exception { if (m_socket == null) { String msg = "No connection to Doradus server (possibly <connect> statement is missed)"; throw new Exception(msg); } try { StringBuilder header = new StringBuilder(); header.append(request.getRest() + " HTTP/1.1" + "\r\n"); header.append("Host: " + m_host + "\r\n"); String acceptType = request.getAcceptType(); if (acceptType != null && !acceptType.isEmpty()) { header.append("Accept: " + acceptType + "\r\n"); } String contentType = request.getContentType(); if (contentType != null && !contentType.isEmpty()) { header.append("Content-type: " + contentType + "\r\n"); } String body = request.getBody(); int contentLength = (body == null) ? 0 : body.length(); header.append("Content-length: " + contentLength + "\r\n"); header.append("x-api-version: 2" + "\r\n"); header.append("\r\n"); byte[] headerBytes = header.toString().getBytes("UTF-8"); byte[] requestBytes = headerBytes; if (contentLength > 0) { byte[] bodyBytes = body.getBytes("UTF-8"); requestBytes = new byte[headerBytes.length + bodyBytes.length]; System.arraycopy(headerBytes, 0, requestBytes, 0, headerBytes.length); System.arraycopy(bodyBytes, 0, requestBytes, headerBytes.length, bodyBytes.length); } m_outStream.write(requestBytes); m_outStream.flush(); } catch(Exception ex) { String msg = "Failed to send request"; throw new Exception(msg, ex); } } public RestResponse readResponse() throws Exception { try { RestResponse response = new RestResponse(); List<String> headerLines = readHeaderLines(); response.parseHeader(headerLines); readBody(response); return response; } catch(Exception ex) { String msg = "Failed to read response"; throw new Exception(msg, ex); } } private List<String> readHeaderLines() throws Exception { try { List<String> lines = new ArrayList<String>(); while (true) { String line = readHeaderLine(); if (line.length() <= 2) { if (!line.equals("\r\n")) { String msg = "Response header is not properly terminated: \"" + line + "\""; throw new Exception(msg); } break; } lines.add(line.trim()); } return lines; } catch(Exception ex) { String msg = "Failed to read response header"; throw new Exception(msg, ex); } } private String readHeaderLine() throws Exception { try { StringBuilder line = new StringBuilder(); int buf = ' '; while (buf != '\n') { buf = m_inStream.read(); if (buf == -1) { String msg = "Unexpected EOF"; throw new Exception(msg); } line.append((char) buf); } return line.toString(); } catch(Exception ex) { String msg = "Failed to read response header line"; throw new Exception(msg, ex); } } private void readBody(RestResponse response) throws Exception { int bytesLeft = response.getContentLength(); if (bytesLeft <= 0) { response.setBody(""); return; } byte[] buffer = new byte[bytesLeft]; int offset = 0; while (bytesLeft > 0) { int bytesGot = m_inStream.read(buffer, offset, bytesLeft); bytesLeft -= bytesGot; offset += bytesGot; } String body = (new String(buffer, "UTF-8")).trim(); /* if ("xml".equals(response.contentType()) && body.indexOf("<?xml") > -1) { int pos = body.indexOf("?>"); if (pos > -1) { body = body.substring(pos + 2, body.length() - (pos + 2)).trim(); } } */ response.setBody(body); } }
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ package org.caleydo.core.data.collection.table; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Set; import org.caleydo.core.data.collection.column.container.CategoricalClassDescription; import org.caleydo.core.data.datadomain.ATableBasedDataDomain; import org.caleydo.core.data.perspective.variable.Perspective; import org.caleydo.core.data.virtualarray.VirtualArray; import org.caleydo.core.id.IDCategory; import org.caleydo.core.id.IDMappingManager; import org.caleydo.core.id.IDMappingManagerRegistry; import org.caleydo.core.id.IDType; import org.caleydo.core.io.DataDescription; import org.caleydo.core.io.DataSetDescription; import org.caleydo.core.io.NumericalProperties; import org.caleydo.core.io.parser.ascii.LinearDataParser; import org.caleydo.core.io.parser.ascii.TabularDataParser; import org.caleydo.core.util.logging.Logger; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; /** * Utility class that features creating, loading and saving sets and dimensions. * * @author Werner Puff * @author Alexander Lex */ public class TableUtils { /** * Creates the {@link Table} from a previously prepared dimension definition. * * @param dataDomain * @param createDefaultDimensionPerspectives * @param createDefaultRecordPerspective * @return */ @SuppressWarnings({ "rawtypes", "unchecked" }) public static void loadData(ATableBasedDataDomain dataDomain, DataSetDescription dataSetDescription, boolean createDefaultDimensionPerspectives, boolean createDefaultRecordPerspective) { // --------- data loading --------------- Table table; if (dataSetDescription.getDataDescription() == null) { table = new Table(dataDomain); } else { DataDescription dataDescription = dataSetDescription.getDataDescription(); if (dataDescription.getNumericalProperties() != null) { NumericalTable nTable = new NumericalTable(dataDomain); table = nTable; NumericalProperties numericalProperties = dataDescription.getNumericalProperties(); nTable.setDataCenter(numericalProperties.getDataCenter()); if (numericalProperties.getMin() != null) { nTable.setMin(numericalProperties.getMin()); } if (numericalProperties.getMax() != null) { nTable.setMax(numericalProperties.getMax()); } nTable.setDefaultDataTransformation(sanitize(numericalProperties.getDataTransformation())); } else if (dataDescription.getCategoricalClassDescription() != null) { CategoricalClassDescription<?> catClassDescr = dataDescription.getCategoricalClassDescription(); CategoricalTable cTable; switch (catClassDescr.getRawDataType()) { case INTEGER: cTable = new CategoricalTable<Integer>(dataDomain); break; case STRING: cTable = new CategoricalTable<String>(dataDomain); break; case FLOAT: default: throw new UnsupportedOperationException("Float not supported for categorical data"); } cTable.setCategoryDescritions(catClassDescr); table = cTable; } else { throw new IllegalStateException("DataDescription must contain categorical or numerical definitions" + dataDescription); } } dataDomain.setTable(table); if (!dataSetDescription.isLinearSource()) { TabularDataParser parser = new TabularDataParser(dataDomain, dataSetDescription); parser.loadData(); } else { LinearDataParser parser = new LinearDataParser(dataDomain, dataSetDescription); parser.loadData(); } table.normalize(); if (createDefaultDimensionPerspectives) table.createDefaultDimensionPerspectives(); if (createDefaultRecordPerspective) table.createDefaultRecordPerspectives(); } /** * cleans the input data transformation to known one if possible * * @param dataTransformation * @return */ private static String sanitize(String dataTransformation) { if (Table.Transformation.LINEAR.equalsIgnoreCase(dataTransformation)) return Table.Transformation.LINEAR; if (NumericalTable.Transformation.LOG10.equalsIgnoreCase(dataTransformation)) return NumericalTable.Transformation.LOG10; if (NumericalTable.Transformation.LOG2.equalsIgnoreCase(dataTransformation)) return NumericalTable.Transformation.LOG2; return dataTransformation; } /** * Exports the dataset identified through the perspectives to the file specified. * * @param dataDomain * @param fileName * @param recordPerspective * @param dimensionPerspective * @param targetRecordIDType * the id type to be used in the file. If this is null the {@link IDCategory#getHumanReadableIDType()} * will be used. * @param targetDimensionIDType * same as targetRecordIDType for dimensions * @param includeRecordClusterInfo * true if you want to add information about the clustering of records to the file, else false * @param includeDimensionClusterInfo * true if you want to add information about the clustering of dimensions to the file, else false * * @return true if export was successful, else false. */ public static boolean export(ATableBasedDataDomain dataDomain, String fileName, Perspective recordPerspective, Perspective dimensionPerspective, IDType targetRecordIDType, IDType targetDimensionIDType, boolean includeRecordClusterInfo, boolean includeDimensionClusterInfo) { if (targetRecordIDType == null) targetRecordIDType = dataDomain.getRecordIDCategory().getHumanReadableIDType(); if (targetDimensionIDType == null) targetDimensionIDType = dataDomain.getDimensionIDCategory().getHumanReadableIDType(); IDType rowTargetIDType; IDType rowSourceIDType; IDType colTargetIDType; IDType colSourceIDType; VirtualArray rowVA; VirtualArray colVA; // if (dataDomain.isColumnDimension()) { rowVA = recordPerspective.getVirtualArray(); colVA = dimensionPerspective.getVirtualArray(); rowTargetIDType = targetRecordIDType; colTargetIDType = targetDimensionIDType; rowSourceIDType = dataDomain.getRecordIDType(); colSourceIDType = dataDomain.getDimensionIDType(); // } else { // rowVA = dimensionPerspective.getVirtualArray(); // colVA = recordPerspective.getVirtualArray(); // rowTargetIDType = targetDimensionIDType; // colTargetIDType = targetRecordIDType; // // rowSourceIDType = dataDomain.getDimensionIDType(); // colSourceIDType = dataDomain.getRecordIDType(); // } if (rowVA == null || colVA == null) throw new IllegalArgumentException("VAs in perspectives were null"); IDMappingManager rowIDMappingManager = IDMappingManagerRegistry.get().getIDMappingManager(rowSourceIDType); IDMappingManager colIDMappingManager = IDMappingManagerRegistry.get().getIDMappingManager(colSourceIDType); try { PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(fileName))); int cnt = -1; int cluster = 0; int example = 0; int offset = 0; // export cluster info for cols if (includeDimensionClusterInfo && colVA.getGroupList() != null) { String clusterNr = "Cluster\t" + (includeRecordClusterInfo ? "\t\t" : ""); String clusterRep = "Representative Element\t" + (includeRecordClusterInfo ? "\t\t" : ""); cluster = 0; cnt = -1; for (@SuppressWarnings("unused") Integer colIndex : colVA) { if (cnt == colVA.getGroupList().get(cluster).getSize() - 1) { offset = offset + colVA.getGroupList().get(cluster).getSize(); cluster++; cnt = 0; } else { cnt++; } example = colVA.getGroupList().get(cluster).getRepresentativeElementIndex(); clusterNr += cluster + "\t"; clusterRep += example + "\t"; } clusterNr += "\n"; clusterRep += "\n"; out.print(clusterNr); out.print(clusterRep); } // Writing dimension labels if (includeRecordClusterInfo && rowVA.getGroupList() != null) out.print("Cluster_Number\tCluster_Repr\t"); // first cell out.print("Identifier \t"); for (Integer colID : colVA) { Set<Object> colTargetIDs = colIDMappingManager.getIDAsSet(colSourceIDType, colTargetIDType, colID); String id = ""; for (Object colTargetID : colTargetIDs) { id = colTargetID.toString(); // here we only use the first id break; } out.print(id + "\t"); } out.println(); cnt = -1; cluster = 0; example = 0; offset = 0; String id; for (Integer rowID : rowVA) { if (includeRecordClusterInfo) { // export cluster info for rows if (rowVA.getGroupList() != null) { if (cnt == rowVA.getGroupList().get(cluster).getSize() - 1) { offset = offset + rowVA.getGroupList().get(cluster).getSize(); cluster++; cnt = 0; } else { cnt++; } example = rowVA.getGroupList().get(cluster).getRepresentativeElementIndex(); out.print(cluster + "\t" + example + "\t"); } } Set<Object> rowTargetIDs = rowIDMappingManager.getIDAsSet(rowSourceIDType, rowTargetIDType, rowID); id = ""; for (Object rowTargetID : rowTargetIDs) { id = rowTargetID.toString(); // here we only use the first id break; } out.print(id + "\t"); for (Integer colID : colVA) { // if (dataDomain.isColumnDimension()) { out.print(dataDomain.getTable().getRawAsString(colID, rowID)); // } else { // out.print(dataDomain.getTable().getRawAsString(rowID, colID)); // } out.print("\t"); } out.println(); } out.close(); } catch (IOException e) { Logger.log(new Status(IStatus.ERROR, "TableUtils", "Failed to export data.", e)); return false; } return true; } }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.car.app.navigation.model; import static androidx.car.app.model.constraints.ActionsConstraints.ACTIONS_CONSTRAINTS_MAP; import static androidx.car.app.model.constraints.ActionsConstraints.ACTIONS_CONSTRAINTS_NAVIGATION; import static androidx.car.app.model.constraints.CarColorConstraints.UNCONSTRAINED; import static java.util.Objects.requireNonNull; import android.annotation.SuppressLint; import androidx.annotation.Keep; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.car.app.Screen; import androidx.car.app.SurfaceCallback; import androidx.car.app.annotations.CarProtocol; import androidx.car.app.annotations.RequiresCarApi; import androidx.car.app.model.Action; import androidx.car.app.model.ActionStrip; import androidx.car.app.model.CarColor; import androidx.car.app.model.Template; import androidx.car.app.model.Toggle; import java.util.Objects; /** * A template for showing navigation information. * * <p>This template has two independent sections which can be updated: * * <ul> * <li>Navigation information such as routing instructions or navigation-related messages. * <li>Travel estimates to the destination. * </ul> * * <p>To update the template as the user navigates, call {@link Screen#invalidate} to provide the * host with a new template with the updated information. * * <p>The template itself does not expose a drawing surface. In order to draw on the canvas, use * {@link androidx.car.app.AppManager#setSurfaceCallback(SurfaceCallback)}. * * <p>See {@link androidx.car.app.notification.CarAppExtender} for how to show * alerts with notifications. Frequent alert notifications distract the driver and are discouraged. * * <h4>Pan and Zoom</h4> * * This template allows an app to provide pan and zoom functionality. To support pan and zoom, * respond to the user input in {@link SurfaceCallback} methods, such as: * * <ul> * <li>{@link SurfaceCallback#onScroll(float, float)}</li> * <li>{@link SurfaceCallback#onFling(float, float)}</li> * <li>{@link SurfaceCallback#onScale(float, float, float)}</li> * </ul> * * In order to receive the callbacks, add an {@link Action#PAN} button in a map * {@link ActionStrip} via the {@link Builder#setMapActionStrip(ActionStrip)} method: * * <pre>{@code * ... * Action panAction = new Action.Builder(Action.PAN).setIcon(myPanIcon).build(); * ActionStrip mapActionStrip = new ActionStrip.Builder().addAction(panAction).build(); * NavigationTemplate.Builder builder = new NavigationTemplate.Builder(); * builder.setMapActionStrip(mapActionStrip); * ... * }</pre> * * When the user presses the {@link Action#PAN} button, the host enters the pan mode. In this * mode, the host translates the user input from non-touch input devices, such as rotary controllers * and touchpads, and calls the appropriate {@link SurfaceCallback} methods. Respond to the user * action to enter or exit the pan mode via {@link Builder#setPanModeListener(PanModeListener)}. * * <p>If the app does not include this button in the map {@link ActionStrip}, the app will not * receive the user input for panning gestures from {@link SurfaceCallback} methods, and the host * will exit any previously activated pan mode. * * <p>The host may hide the pan button in some head units in which the user does not need it. * Also, the host may hide other UI components in the template while the user is in the pan mode. * * <p>Note that not all head units support touch gestures, and not all touch screens support * multi-touch gestures. Therefore, some {@link SurfaceCallback} methods may not be called in * some cars. In order to support different head units, use the buttons in the map action strip * to provide necessary functionality, such as the zoom-in and zoom-out buttons. * * <h4>Template Restrictions</h4> * * In regard to template refreshes, as described in {@link Screen#onGetTemplate()}, this template * supports any content changes as refreshes. This allows apps to interactively update the * turn-by-turn instructions without the templates being counted against the template quota. * * <p>Further, this template is considered a view that the user will stay and consume contents from, * and the host will reset the template quota once an app reaches this template. * * <p>In order to use this template your car app <b>MUST</b> declare that it uses the {@code * androidx.car.app.NAVIGATION_TEMPLATES} permission in the manifest. */ @CarProtocol public final class NavigationTemplate implements Template { /** * Represents navigation information such as routing instructions or navigation-related * messages. */ public interface NavigationInfo { } @Keep @Nullable private final NavigationInfo mNavigationInfo; @Keep @Nullable private final CarColor mBackgroundColor; @Keep @Nullable private final TravelEstimate mDestinationTravelEstimate; @Keep @Nullable private final ActionStrip mActionStrip; @Keep @Nullable private final ActionStrip mMapActionStrip; @Keep @Nullable private final Toggle mPanModeToggle; @Keep @Nullable private final PanModeDelegate mPanModeDelegate; /** * Returns the {@link ActionStrip} for this template or {@code null} if not set. * * @see Builder#setActionStrip(ActionStrip) */ @Nullable public ActionStrip getActionStrip() { return requireNonNull(mActionStrip); } /** * Returns the map {@link ActionStrip} for this template or {@code null} if not set. * * @see Builder#setMapActionStrip(ActionStrip) */ @RequiresCarApi(2) @Nullable public ActionStrip getMapActionStrip() { return mMapActionStrip; } /** * Returns whether this template is in the pan mode. * * @deprecated use {@link #getPanModeDelegate()} */ // TODO(b/187989940): remove after hosts switch over to using getPanModeDelegate/ @Deprecated @RequiresCarApi(2) @Nullable public Toggle getPanModeToggle() { return mPanModeToggle; } /** * Returns the {@link PanModeDelegate} that should be called when the user interacts with * pan mode on this template, or {@code null} if a {@link PanModeListener} was not set. */ @RequiresCarApi(2) @Nullable public PanModeDelegate getPanModeDelegate() { return mPanModeDelegate; } /** * Returns the navigation information displayed on the template or {@code null} if there is no * navigation information on top of the map. */ @Nullable public NavigationInfo getNavigationInfo() { return mNavigationInfo; } /** * Returns the background color used for the navigation information or {@code null} if set to * the default value. */ @Nullable public CarColor getBackgroundColor() { return mBackgroundColor; } /** * Returns the {@link TravelEstimate} to the final destination or {@code null} if there is no * travel estimate information. */ @Nullable public TravelEstimate getDestinationTravelEstimate() { return mDestinationTravelEstimate; } @NonNull @Override public String toString() { return "NavigationTemplate"; } @Override public int hashCode() { return Objects.hash(mNavigationInfo, mBackgroundColor, mDestinationTravelEstimate, mActionStrip, mMapActionStrip, mPanModeToggle, mPanModeDelegate == null); } @Override public boolean equals(@Nullable Object other) { if (this == other) { return true; } if (!(other instanceof NavigationTemplate)) { return false; } NavigationTemplate otherTemplate = (NavigationTemplate) other; return Objects.equals(mNavigationInfo, otherTemplate.mNavigationInfo) && Objects.equals(mBackgroundColor, otherTemplate.mBackgroundColor) && Objects.equals(mDestinationTravelEstimate, otherTemplate.mDestinationTravelEstimate) && Objects.equals(mActionStrip, otherTemplate.mActionStrip) && Objects.equals(mMapActionStrip, otherTemplate.mMapActionStrip) && Objects.equals(mPanModeToggle, otherTemplate.mPanModeToggle) && Objects.equals(mPanModeDelegate == null, otherTemplate.mPanModeDelegate == null); } NavigationTemplate(Builder builder) { mNavigationInfo = builder.mNavigationInfo; mBackgroundColor = builder.mBackgroundColor; mDestinationTravelEstimate = builder.mDestinationTravelEstimate; mActionStrip = builder.mActionStrip; mMapActionStrip = builder.mMapActionStrip; mPanModeToggle = builder.mPanModeToggle; mPanModeDelegate = builder.mPanModeDelegate; } /** Constructs an empty instance, used by serialization code. */ private NavigationTemplate() { mNavigationInfo = null; mBackgroundColor = null; mDestinationTravelEstimate = null; mActionStrip = null; mMapActionStrip = null; mPanModeToggle = null; mPanModeDelegate = null; } /** A builder of {@link NavigationTemplate}. */ public static final class Builder { @Nullable NavigationInfo mNavigationInfo; @Nullable CarColor mBackgroundColor; @Nullable TravelEstimate mDestinationTravelEstimate; @Nullable ActionStrip mActionStrip; @Nullable ActionStrip mMapActionStrip; @Nullable Toggle mPanModeToggle; @Nullable PanModeDelegate mPanModeDelegate; /** * Sets the navigation information to display on the template. * * <p>Unless set with this method, navigation info won't be displayed on the template. * * @throws NullPointerException if {@code navigationInfo} is {@code null} */ @NonNull public Builder setNavigationInfo(@NonNull NavigationInfo navigationInfo) { mNavigationInfo = requireNonNull(navigationInfo); return this; } /** * Sets the background color to use for the navigation information. * * <p>Depending on contrast requirements, capabilities of the vehicle screens, or other * factors, the color may be ignored by the host or overridden by the vehicle system. */ @NonNull public Builder setBackgroundColor(@NonNull CarColor backgroundColor) { UNCONSTRAINED.validateOrThrow(requireNonNull(backgroundColor)); mBackgroundColor = backgroundColor; return this; } /** * Sets the {@link TravelEstimate} to the final destination. * * @throws IllegalArgumentException if the {@link TravelEstimate}'s remaining time is * {@link TravelEstimate#REMAINING_TIME_UNKNOWN} or less * than zero * @throws NullPointerException if {@code destinationTravelEstimate} is {@code null} */ @NonNull public Builder setDestinationTravelEstimate( @NonNull TravelEstimate destinationTravelEstimate) { if (requireNonNull(destinationTravelEstimate).getRemainingTimeSeconds() < 0) { throw new IllegalArgumentException( "The destination travel estimate's remaining time must be greater or " + "equal to zero"); } mDestinationTravelEstimate = destinationTravelEstimate; return this; } /** * Sets an {@link ActionStrip} with a list of template-scoped actions for this template. * * <h4>Requirements</h4> * * Besides {@link Action#APP_ICON} and {@link Action#BACK}, this template requires at * least 1 and up to 4 {@link Action}s in its {@link ActionStrip}. Of the 4 allowed * {@link Action}s, only one can contain a title as set via * {@link Action.Builder#setTitle}. Otherwise, only {@link Action}s with icons are allowed. * * @throws IllegalArgumentException if {@code actionStrip} does not meet the template's * requirements * @throws NullPointerException if {@code actionStrip} is {@code null} */ @NonNull public Builder setActionStrip(@NonNull ActionStrip actionStrip) { ACTIONS_CONSTRAINTS_NAVIGATION.validateOrThrow( requireNonNull(actionStrip).getActions()); mActionStrip = actionStrip; return this; } /** * Sets an {@link ActionStrip} with a list of map-control related actions for this * template, such as pan or zoom. * * <p>The host will draw the buttons in an area that is associated with map controls. * * <p>If the app does not include the {@link Action#PAN} button in this * {@link ActionStrip}, the app will not receive the user input for panning gestures from * {@link SurfaceCallback} methods, and the host will exit any previously activated pan * mode. * * <h4>Requirements</h4> * * This template allows up to 4 {@link Action}s in its map {@link ActionStrip}. Only * {@link Action}s with icons set via {@link Action.Builder#setIcon} are allowed. * * @throws IllegalArgumentException if {@code actionStrip} does not meet the template's * requirements * @throws NullPointerException if {@code actionStrip} is {@code null} */ @RequiresCarApi(2) @NonNull public Builder setMapActionStrip(@NonNull ActionStrip actionStrip) { ACTIONS_CONSTRAINTS_MAP.validateOrThrow( requireNonNull(actionStrip).getActions()); mMapActionStrip = actionStrip; return this; } /** * Sets a {@link PanModeListener} that notifies when the user enters and exits * the pan mode. * * <p>If the app does not include the {@link Action#PAN} button in the map * {@link ActionStrip}, the app will not receive the user input for panning gestures from * {@link SurfaceCallback} methods, and the host will exit any previously activated pan * mode. * * @throws NullPointerException if {@code panModeListener} is {@code null} */ @SuppressLint({"MissingGetterMatchingBuilder", "ExecutorRegistration"}) @RequiresCarApi(2) @NonNull public Builder setPanModeListener(@NonNull PanModeListener panModeListener) { requireNonNull(panModeListener); mPanModeToggle = new Toggle.Builder( (isInPanMode) -> panModeListener.onPanModeChanged(isInPanMode)).build(); mPanModeDelegate = PanModeDelegateImpl.create(panModeListener); return this; } /** * Constructs the {@link NavigationTemplate} defined by this builder. * * @throws IllegalStateException if an {@link ActionStrip} is not set on this template */ @NonNull public NavigationTemplate build() { if (mActionStrip == null) { throw new IllegalStateException("Action strip for this template must be set"); } return new NavigationTemplate(this); } /** Constructs an empty {@link Builder} instance. */ public Builder() { } } }
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.spark.models.embeddings.word2vec; import org.apache.spark.api.java.function.Function; import org.deeplearning4j.models.embeddings.inmemory.InMemoryLookupTable; import org.deeplearning4j.models.word2vec.VocabWord; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.common.primitives.Triple; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** * @author Adam Gibson */ @Deprecated public class SentenceBatch implements Function<Word2VecFuncCall, Word2VecChange> { private AtomicLong nextRandom = new AtomicLong(5); // private static Logger log = LoggerFactory.getLogger(SentenceBatch.class); @Override public Word2VecChange call(Word2VecFuncCall sentence) throws Exception { Word2VecParam param = sentence.getParam().getValue(); List<Triple<Integer, Integer, Integer>> changed = new ArrayList<>(); double alpha = Math.max(param.getMinAlpha(), param.getAlpha() * (1 - (1.0 * sentence.getWordsSeen() / (double) param.getTotalWords()))); trainSentence(param, sentence.getSentence(), alpha, changed); return new Word2VecChange(changed, param); } /** * Train on a list of vocab words * @param sentence the list of vocab words to train on */ public void trainSentence(Word2VecParam param, final List<VocabWord> sentence, double alpha, List<Triple<Integer, Integer, Integer>> changed) { if (sentence != null && !sentence.isEmpty()) { for (int i = 0; i < sentence.size(); i++) { VocabWord vocabWord = sentence.get(i); if (vocabWord != null && vocabWord.getWord().endsWith("STOP")) { nextRandom.set(nextRandom.get() * 25214903917L + 11); skipGram(param, i, sentence, (int) nextRandom.get() % param.getWindow(), alpha, changed); } } } } /** * Train via skip gram * @param i the current word * @param sentence the sentence to train on * @param b * @param alpha the learning rate */ public void skipGram(Word2VecParam param, int i, List<VocabWord> sentence, int b, double alpha, List<Triple<Integer, Integer, Integer>> changed) { final VocabWord word = sentence.get(i); int window = param.getWindow(); if (word != null && !sentence.isEmpty()) { int end = window * 2 + 1 - b; for (int a = b; a < end; a++) { if (a != window) { int c = i - window + a; if (c >= 0 && c < sentence.size()) { VocabWord lastWord = sentence.get(c); iterateSample(param, word, lastWord, alpha, changed); } } } } } /** * Iterate on the given 2 vocab words * * @param w1 the first word to iterate on * @param w2 the second word to iterate on */ public void iterateSample(Word2VecParam param, VocabWord w1, VocabWord w2, double alpha, List<Triple<Integer, Integer, Integer>> changed) { if (w2 == null || w2.getIndex() < 0 || w1.getIndex() == w2.getIndex() || w1.getWord().equals("STOP") || w2.getWord().equals("STOP") || w1.getWord().equals("UNK") || w2.getWord().equals("UNK")) return; int vectorLength = param.getVectorLength(); InMemoryLookupTable weights = param.getWeights(); boolean useAdaGrad = param.isUseAdaGrad(); double negative = param.getNegative(); INDArray table = param.getTable(); double[] expTable = param.getExpTable().getValue(); double MAX_EXP = 6; int numWords = param.getNumWords(); //current word vector INDArray l1 = weights.vector(w2.getWord()); //error for current word and context INDArray neu1e = Nd4j.create(vectorLength); for (int i = 0; i < w1.getCodeLength(); i++) { int code = w1.getCodes().get(i); int point = w1.getPoints().get(i); INDArray syn1 = weights.getSyn1().slice(point); double dot = Nd4j.getBlasWrapper().level1().dot(syn1.length(), 1.0, l1, syn1); if (dot < -MAX_EXP || dot >= MAX_EXP) continue; int idx = (int) ((dot + MAX_EXP) * ((double) expTable.length / MAX_EXP / 2.0)); //score double f = expTable[idx]; //gradient double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha, alpha) : alpha); Nd4j.getBlasWrapper().level1().axpy(syn1.length(), g, syn1, neu1e); Nd4j.getBlasWrapper().level1().axpy(syn1.length(), g, l1, syn1); changed.add(new Triple<>(point, w1.getIndex(), -1)); } changed.add(new Triple<>(w1.getIndex(), w2.getIndex(), -1)); //negative sampling if (negative > 0) { int target = w1.getIndex(); int label; INDArray syn1Neg = weights.getSyn1Neg().slice(target); for (int d = 0; d < negative + 1; d++) { if (d == 0) { label = 1; } else { nextRandom.set(nextRandom.get() * 25214903917L + 11); target = table.getInt((int) (nextRandom.get() >> 16) % (int) table.length()); if (target == 0) target = (int) nextRandom.get() % (numWords - 1) + 1; if (target == w1.getIndex()) continue; label = 0; } double f = Nd4j.getBlasWrapper().dot(l1, syn1Neg); double g; if (f > MAX_EXP) g = useAdaGrad ? w1.getGradient(target, (label - 1), alpha) : (label - 1) * alpha; else if (f < -MAX_EXP) g = label * (useAdaGrad ? w1.getGradient(target, alpha, alpha) : alpha); else g = useAdaGrad ? w1 .getGradient(target, label - expTable[(int) ((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))], alpha) : (label - expTable[(int) ((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; Nd4j.getBlasWrapper().level1().axpy(l1.length(), g, neu1e, l1); Nd4j.getBlasWrapper().level1().axpy(l1.length(), g, syn1Neg, l1); changed.add(new Triple<>(-1, -1, label)); } } Nd4j.getBlasWrapper().level1().axpy(l1.length(), 1.0f, neu1e, l1); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.predicate.Domain; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.Type; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.DistinctLimitNode; import com.facebook.presto.sql.planner.plan.ExchangeNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.LimitNode; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.PlanVisitor; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.SemiJoinNode; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.planner.plan.TopNNode; import com.facebook.presto.sql.planner.plan.UnionNode; import com.facebook.presto.sql.planner.plan.WindowNode; import com.facebook.presto.sql.tree.ComparisonExpression; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.SymbolReference; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Predicate; import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts; import static com.facebook.presto.sql.ExpressionUtils.expressionOrNullSymbols; import static com.facebook.presto.sql.ExpressionUtils.extractConjuncts; import static com.facebook.presto.sql.ExpressionUtils.stripNonDeterministicConjuncts; import static com.facebook.presto.sql.planner.EqualityInference.createEqualityInference; import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.Predicates.in; /** * Computes the effective predicate at the top of the specified PlanNode * <p> * Note: non-deterministic predicates can not be pulled up (so they will be ignored) */ public class EffectivePredicateExtractor extends PlanVisitor<Void, Expression> { public static Expression extract(PlanNode node, Map<Symbol, Type> symbolTypes) { return node.accept(new EffectivePredicateExtractor(symbolTypes), null); } private static final Predicate<Map.Entry<Symbol, ? extends Expression>> SYMBOL_MATCHES_EXPRESSION = entry -> entry.getValue().equals(entry.getKey().toSymbolReference()); private static final Function<Map.Entry<Symbol, ? extends Expression>, Expression> ENTRY_TO_EQUALITY = entry -> { SymbolReference reference = entry.getKey().toSymbolReference(); Expression expression = entry.getValue(); // TODO: switch this to 'IS NOT DISTINCT FROM' syntax when EqualityInference properly supports it return new ComparisonExpression(ComparisonExpression.Type.EQUAL, reference, expression); }; private final Map<Symbol, Type> symbolTypes; public EffectivePredicateExtractor(Map<Symbol, Type> symbolTypes) { this.symbolTypes = symbolTypes; } @Override protected Expression visitPlan(PlanNode node, Void context) { return TRUE_LITERAL; } @Override public Expression visitAggregation(AggregationNode node, Void context) { // GROUP BY () always produces a group, regardless of whether there's any // input (unlike the case where there are group by keys, which produce // no output if there's no input). // Therefore, we can't say anything about the effective predicate of the // output of such an aggregation. if (node.getGroupingKeys().isEmpty()) { return TRUE_LITERAL; } Expression underlyingPredicate = node.getSource().accept(this, context); return pullExpressionThroughSymbols(underlyingPredicate, node.getGroupingKeys()); } @Override public Expression visitFilter(FilterNode node, Void context) { Expression underlyingPredicate = node.getSource().accept(this, context); Expression predicate = node.getPredicate(); // Remove non-deterministic conjuncts predicate = stripNonDeterministicConjuncts(predicate); return combineConjuncts(predicate, underlyingPredicate); } @Override public Expression visitExchange(ExchangeNode node, Void context) { return deriveCommonPredicates(node, source -> { Map<Symbol, SymbolReference> mappings = new HashMap<>(); for (int i = 0; i < node.getInputs().get(source).size(); i++) { mappings.put( node.getOutputSymbols().get(i), node.getInputs().get(source).get(i).toSymbolReference()); } return mappings.entrySet(); }); } @Override public Expression visitProject(ProjectNode node, Void context) { // TODO: add simple algebraic solver for projection translation (right now only considers identity projections) Expression underlyingPredicate = node.getSource().accept(this, context); List<Expression> projectionEqualities = node.getAssignments().entrySet().stream() .filter(SYMBOL_MATCHES_EXPRESSION.negate()) .map(ENTRY_TO_EQUALITY) .collect(toImmutableList()); return pullExpressionThroughSymbols(combineConjuncts( ImmutableList.<Expression>builder() .addAll(projectionEqualities) .add(underlyingPredicate) .build()), node.getOutputSymbols()); } @Override public Expression visitTopN(TopNNode node, Void context) { return node.getSource().accept(this, context); } @Override public Expression visitLimit(LimitNode node, Void context) { return node.getSource().accept(this, context); } @Override public Expression visitDistinctLimit(DistinctLimitNode node, Void context) { return node.getSource().accept(this, context); } @Override public Expression visitTableScan(TableScanNode node, Void context) { Map<ColumnHandle, Symbol> assignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse(); return DomainTranslator.toPredicate(spanTupleDomain(node.getCurrentConstraint()).transform(assignments::get)); } private static TupleDomain<ColumnHandle> spanTupleDomain(TupleDomain<ColumnHandle> tupleDomain) { if (tupleDomain.isNone()) { return tupleDomain; } // Simplify domains if they get too complex Map<ColumnHandle, Domain> spannedDomains = Maps.transformValues(tupleDomain.getDomains().get(), DomainUtils::simplifyDomain); return TupleDomain.withColumnDomains(spannedDomains); } @Override public Expression visitSort(SortNode node, Void context) { return node.getSource().accept(this, context); } @Override public Expression visitWindow(WindowNode node, Void context) { return node.getSource().accept(this, context); } @Override public Expression visitUnion(UnionNode node, Void context) { return deriveCommonPredicates(node, source -> node.outputSymbolMap(source).entries()); } @Override public Expression visitJoin(JoinNode node, Void context) { Expression leftPredicate = node.getLeft().accept(this, context); Expression rightPredicate = node.getRight().accept(this, context); List<Expression> joinConjuncts = new ArrayList<>(); for (JoinNode.EquiJoinClause clause : node.getCriteria()) { joinConjuncts.add(new ComparisonExpression(ComparisonExpression.Type.EQUAL, clause.getLeft().toSymbolReference(), clause.getRight().toSymbolReference())); } switch (node.getType()) { case INNER: return combineConjuncts(ImmutableList.<Expression>builder() .add(leftPredicate) .add(rightPredicate) .addAll(joinConjuncts) .build()); case LEFT: return combineConjuncts(ImmutableList.<Expression>builder() .add(leftPredicate) .addAll(pullNullableConjunctsThroughOuterJoin(extractConjuncts(rightPredicate), node.getRight().getOutputSymbols()::contains)) .addAll(pullNullableConjunctsThroughOuterJoin(joinConjuncts, node.getRight().getOutputSymbols()::contains)) .build()); case RIGHT: return combineConjuncts(ImmutableList.<Expression>builder() .add(rightPredicate) .addAll(pullNullableConjunctsThroughOuterJoin(extractConjuncts(leftPredicate), node.getLeft().getOutputSymbols()::contains)) .addAll(pullNullableConjunctsThroughOuterJoin(joinConjuncts, node.getLeft().getOutputSymbols()::contains)) .build()); case FULL: return combineConjuncts(ImmutableList.<Expression>builder() .addAll(pullNullableConjunctsThroughOuterJoin(extractConjuncts(leftPredicate), node.getLeft().getOutputSymbols()::contains)) .addAll(pullNullableConjunctsThroughOuterJoin(extractConjuncts(rightPredicate), node.getRight().getOutputSymbols()::contains)) .addAll(pullNullableConjunctsThroughOuterJoin(joinConjuncts, node.getLeft().getOutputSymbols()::contains, node.getRight().getOutputSymbols()::contains)) .build()); default: throw new UnsupportedOperationException("Unknown join type: " + node.getType()); } } private Iterable<Expression> pullNullableConjunctsThroughOuterJoin(List<Expression> conjuncts, Predicate<Symbol>... nullSymbolScopes) { // Conjuncts without any symbol dependencies cannot be applied to the effective predicate (e.g. FALSE literal) return conjuncts.stream() .map(expression -> DependencyExtractor.extractAll(expression).isEmpty() ? TRUE_LITERAL : expression) .map(expressionOrNullSymbols(nullSymbolScopes)) .collect(toImmutableList()); } @Override public Expression visitSemiJoin(SemiJoinNode node, Void context) { // Filtering source does not change the effective predicate over the output symbols return node.getSource().accept(this, context); } private Expression deriveCommonPredicates(PlanNode node, Function<Integer, Collection<Map.Entry<Symbol, SymbolReference>>> mapping) { // Find the predicates that can be pulled up from each source List<Set<Expression>> sourceOutputConjuncts = new ArrayList<>(); for (int i = 0; i < node.getSources().size(); i++) { Expression underlyingPredicate = node.getSources().get(i).accept(this, null); List<Expression> equalities = mapping.apply(i).stream() .filter(SYMBOL_MATCHES_EXPRESSION.negate()) .map(ENTRY_TO_EQUALITY) .collect(toImmutableList()); sourceOutputConjuncts.add(ImmutableSet.copyOf(extractConjuncts(pullExpressionThroughSymbols(combineConjuncts( ImmutableList.<Expression>builder() .addAll(equalities) .add(underlyingPredicate) .build()), node.getOutputSymbols())))); } // Find the intersection of predicates across all sources // TODO: use a more precise way to determine overlapping conjuncts (e.g. commutative predicates) Iterator<Set<Expression>> iterator = sourceOutputConjuncts.iterator(); Set<Expression> potentialOutputConjuncts = iterator.next(); while (iterator.hasNext()) { potentialOutputConjuncts = Sets.intersection(potentialOutputConjuncts, iterator.next()); } return combineConjuncts(potentialOutputConjuncts); } private static Expression pullExpressionThroughSymbols(Expression expression, Collection<Symbol> symbols) { EqualityInference equalityInference = createEqualityInference(expression); ImmutableList.Builder<Expression> effectiveConjuncts = ImmutableList.builder(); for (Expression conjunct : EqualityInference.nonInferrableConjuncts(expression)) { if (DeterminismEvaluator.isDeterministic(conjunct)) { Expression rewritten = equalityInference.rewriteExpression(conjunct, in(symbols)); if (rewritten != null) { effectiveConjuncts.add(rewritten); } } } effectiveConjuncts.addAll(equalityInference.generateEqualitiesPartitionedBy(in(symbols)).getScopeEqualities()); return combineConjuncts(effectiveConjuncts.build()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.igfs; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentMap; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteFileSystem; import org.apache.ignite.cache.affinity.AffinityKeyMapper; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.FileSystemConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.igfs.IgfsGroupDataBlocksKeyMapper; import org.apache.ignite.igfs.IgfsPath; import org.apache.ignite.igfs.mapreduce.IgfsJob; import org.apache.ignite.igfs.mapreduce.IgfsRecordResolver; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteNodeAttributes; import org.apache.ignite.internal.util.ipc.IpcServerEndpoint; import org.apache.ignite.internal.util.typedef.C1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.lang.IgniteClosure; import org.jetbrains.annotations.Nullable; import java.util.concurrent.ConcurrentHashMap; import static org.apache.ignite.IgniteSystemProperties.IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK; import static org.apache.ignite.IgniteSystemProperties.getBoolean; import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_IGFS; /** * Fully operational Ignite file system processor. */ public class IgfsProcessor extends IgfsProcessorAdapter { /** Converts context to IGFS. */ private static final IgniteClosure<IgfsContext,IgniteFileSystem> CTX_TO_IGFS = new C1<IgfsContext, IgniteFileSystem>() { @Override public IgniteFileSystem apply(IgfsContext igfsCtx) { return igfsCtx.igfs(); } }; /** */ private final ConcurrentMap<String, IgfsContext> igfsCache = new ConcurrentHashMap<>(); /** * @param ctx Kernal context. */ public IgfsProcessor(GridKernalContext ctx) { super(ctx); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { IgniteConfiguration igniteCfg = ctx.config(); if (igniteCfg.isDaemon()) return; FileSystemConfiguration[] cfgs = igniteCfg.getFileSystemConfiguration(); assert cfgs != null && cfgs.length > 0; // Start IGFS instances. for (FileSystemConfiguration cfg : cfgs) { assert cfg.getName() != null; FileSystemConfiguration cfg0 = new FileSystemConfiguration(cfg); boolean metaClient = true; CacheConfiguration[] cacheCfgs = igniteCfg.getCacheConfiguration(); String metaCacheName = cfg.getMetaCacheConfiguration().getName(); if (cacheCfgs != null) { for (CacheConfiguration cacheCfg : cacheCfgs) { if (F.eq(cacheCfg.getName(), metaCacheName)) { metaClient = false; break; } } } if (igniteCfg.isClientMode() != null && igniteCfg.isClientMode()) metaClient = true; IgfsContext igfsCtx = new IgfsContext( ctx, cfg0, new IgfsMetaManager(cfg0.isRelaxedConsistency(), metaClient), new IgfsDataManager(), new IgfsServerManager(), new IgfsFragmentizerManager()); // Start managers first. for (IgfsManager mgr : igfsCtx.managers()) mgr.start(igfsCtx); igfsCache.put(cfg0.getName(), igfsCtx); } if (log.isDebugEnabled()) log.debug("IGFS processor started."); // Node doesn't have IGFS if it: // is daemon; // doesn't have configured IGFS; // doesn't have configured caches. if (igniteCfg.isDaemon() || F.isEmpty(igniteCfg.getFileSystemConfiguration()) || F.isEmpty(igniteCfg.getCacheConfiguration())) return; final Map<String, CacheConfiguration> cacheCfgs = new HashMap<>(); assert igniteCfg.getCacheConfiguration() != null; for (CacheConfiguration ccfg : igniteCfg.getCacheConfiguration()) cacheCfgs.put(ccfg.getName(), ccfg); Collection<IgfsAttributes> attrVals = new ArrayList<>(); assert igniteCfg.getFileSystemConfiguration() != null; for (FileSystemConfiguration igfsCfg : igniteCfg.getFileSystemConfiguration()) { String dataCacheName = igfsCfg.getDataCacheConfiguration().getName(); CacheConfiguration cacheCfg = cacheCfgs.get(dataCacheName); if (cacheCfg == null) continue; // No cache for the given IGFS configuration. AffinityKeyMapper affMapper = cacheCfg.getAffinityMapper(); if (!(affMapper instanceof IgfsGroupDataBlocksKeyMapper)) // Do not create IGFS attributes for such a node nor throw error about invalid configuration. // Configuration will be validated later, while starting IgfsProcessor. continue; attrVals.add(new IgfsAttributes( igfsCfg.getName(), igfsCfg.getBlockSize(), ((IgfsGroupDataBlocksKeyMapper)affMapper).getGroupSize(), igfsCfg.getMetaCacheConfiguration().getName(), dataCacheName, igfsCfg.getDefaultMode(), igfsCfg.getPathModes(), igfsCfg.isFragmentizerEnabled())); } ctx.addNodeAttribute(ATTR_IGFS, attrVals.toArray(new IgfsAttributes[attrVals.size()])); } /** {@inheritDoc} */ @Override public void onKernalStart(boolean active) throws IgniteCheckedException { if (!active || ctx.config().isDaemon()) return; if (!getBoolean(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK)) { for (ClusterNode n : ctx.discovery().remoteNodes()) checkIgfsOnRemoteNode(n); } for (IgfsContext igfsCtx : igfsCache.values()) for (IgfsManager mgr : igfsCtx.managers()) mgr.onKernalStart(); } /** {@inheritDoc} */ @Override public void onActivate(GridKernalContext kctx) throws IgniteCheckedException { onKernalStart(true); } /** {@inheritDoc} */ @Override public void onDeActivate(GridKernalContext kctx) { onKernalStop(true); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) { // Stop IGFS instances. for (IgfsContext igfsCtx : igfsCache.values()) { if (log.isDebugEnabled()) log.debug("Stopping igfs: " + igfsCtx.configuration().getName()); List<IgfsManager> mgrs = igfsCtx.managers(); for (ListIterator<IgfsManager> it = mgrs.listIterator(mgrs.size()); it.hasPrevious();) { IgfsManager mgr = it.previous(); mgr.stop(cancel); } igfsCtx.igfs().stop(cancel); } igfsCache.clear(); if (log.isDebugEnabled()) log.debug("IGFS processor stopped."); } /** {@inheritDoc} */ @Override public void onKernalStop(boolean cancel) { for (IgfsContext igfsCtx : igfsCache.values()) { if (log.isDebugEnabled()) log.debug("Stopping igfs: " + igfsCtx.configuration().getName()); List<IgfsManager> mgrs = igfsCtx.managers(); for (ListIterator<IgfsManager> it = mgrs.listIterator(mgrs.size()); it.hasPrevious();) { IgfsManager mgr = it.previous(); mgr.onKernalStop(cancel); } } if (log.isDebugEnabled()) log.debug("Finished executing IGFS processor onKernalStop() callback."); } /** {@inheritDoc} */ @Override public void printMemoryStats() { X.println(">>>"); X.println(">>> IGFS processor memory stats [igniteInstanceName=" + ctx.igniteInstanceName() + ']'); X.println(">>> igfsCacheSize: " + igfsCache.size()); } /** {@inheritDoc} */ @Override public Collection<IgniteFileSystem> igfss() { return F.viewReadOnly(igfsCache.values(), CTX_TO_IGFS); } /** {@inheritDoc} */ @Override @Nullable public IgniteFileSystem igfs(String name) { if (name == null) throw new IllegalArgumentException("IGFS name cannot be null"); IgfsContext igfsCtx = igfsCache.get(name); return igfsCtx == null ? null : igfsCtx.igfs(); } /** {@inheritDoc} */ @Override @Nullable public Collection<IpcServerEndpoint> endpoints(String name) { if (name == null) throw new IllegalArgumentException("IGFS name cannot be null"); IgfsContext igfsCtx = igfsCache.get(name); return igfsCtx == null ? Collections.<IpcServerEndpoint>emptyList() : igfsCtx.server().endpoints(); } /** {@inheritDoc} */ @Nullable @Override public ComputeJob createJob(IgfsJob job, @Nullable String igfsName, IgfsPath path, long start, long len, IgfsRecordResolver recRslv) { return new IgfsJobImpl(job, igfsName, path, start, len, recRslv); } /** * Check IGFS config on remote node. * * @param rmtNode Remote node. * @throws IgniteCheckedException If check failed. */ private void checkIgfsOnRemoteNode(ClusterNode rmtNode) throws IgniteCheckedException { IgfsAttributes[] locAttrs = ctx.discovery().localNode().attribute(IgniteNodeAttributes.ATTR_IGFS); IgfsAttributes[] rmtAttrs = rmtNode.attribute(IgniteNodeAttributes.ATTR_IGFS); if (F.isEmpty(locAttrs) || F.isEmpty(rmtAttrs)) return; assert rmtAttrs != null && locAttrs != null; for (IgfsAttributes rmtAttr : rmtAttrs) for (IgfsAttributes locAttr : locAttrs) { // Checking the use of different caches on the different IGFSes. if (!F.eq(rmtAttr.igfsName(), locAttr.igfsName())) { if (F.eq(rmtAttr.metaCacheName(), locAttr.metaCacheName())) throw new IgniteCheckedException("Meta cache names should be different for different IGFS instances " + "configuration (fix configuration or set " + "-D" + IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK + "=true system " + "property) [metaCacheName=" + rmtAttr.metaCacheName() + ", locNodeId=" + ctx.localNodeId() + ", rmtNodeId=" + rmtNode.id() + ", locIgfsName=" + locAttr.igfsName() + ", rmtIgfsName=" + rmtAttr.igfsName() + ']'); if (F.eq(rmtAttr.dataCacheName(), locAttr.dataCacheName())) throw new IgniteCheckedException("Data cache names should be different for different IGFS instances " + "configuration (fix configuration or set " + "-D" + IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK + "=true system " + "property)[dataCacheName=" + rmtAttr.dataCacheName() + ", locNodeId=" + ctx.localNodeId() + ", rmtNodeId=" + rmtNode.id() + ", locIgfsName=" + locAttr.igfsName() + ", rmtIgfsName=" + rmtAttr.igfsName() + ']'); continue; } // Compare other attributes only for IGFSes with same name. checkSame("Data block size", "BlockSize", rmtNode.id(), rmtAttr.blockSize(), locAttr.blockSize(), rmtAttr.igfsName()); checkSame("Affinity mapper group size", "GrpSize", rmtNode.id(), rmtAttr.groupSize(), locAttr.groupSize(), rmtAttr.igfsName()); checkSame("Meta cache name", "MetaCacheName", rmtNode.id(), rmtAttr.metaCacheName(), locAttr.metaCacheName(), rmtAttr.igfsName()); checkSame("Data cache name", "DataCacheName", rmtNode.id(), rmtAttr.dataCacheName(), locAttr.dataCacheName(), rmtAttr.igfsName()); checkSame("Default mode", "DefaultMode", rmtNode.id(), rmtAttr.defaultMode(), locAttr.defaultMode(), rmtAttr.igfsName()); checkSame("Path modes", "PathModes", rmtNode.id(), rmtAttr.pathModes(), locAttr.pathModes(), rmtAttr.igfsName()); checkSame("Fragmentizer enabled", "FragmentizerEnabled", rmtNode.id(), rmtAttr.fragmentizerEnabled(), locAttr.fragmentizerEnabled(), rmtAttr.igfsName()); } } /** * Check IGFS property equality on local and remote nodes. * * @param name Property human readable name. * @param propName Property name/ * @param rmtNodeId Remote node ID. * @param rmtVal Remote value. * @param locVal Local value. * @param igfsName IGFS name. * * @throws IgniteCheckedException If failed. */ private void checkSame(String name, String propName, UUID rmtNodeId, Object rmtVal, Object locVal, String igfsName) throws IgniteCheckedException { if (!F.eq(rmtVal, locVal)) throw new IgniteCheckedException(name + " should be the same on all nodes in grid for IGFS configuration " + "(fix configuration or set " + "-D" + IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK + "=true system " + "property ) [rmtNodeId=" + rmtNodeId + ", rmt" + propName + "=" + rmtVal + ", loc" + propName + "=" + locVal + ", ggfName=" + igfsName + ']'); } }
/* * Copyright (C) 2007-2013 Geometer Plus <contact@geometerplus.com> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301, USA. */ package org.geometerplus.zlibrary.core.util; import java.io.IOException; import java.io.InputStream; public class Base64InputStream extends InputStream { private final InputStream myBaseStream; private int myDecoded0 = -1; private int myDecoded1 = -1; private int myDecoded2 = -1; private final byte[] myBuffer = new byte[32768]; private int myBufferOffset; private int myBufferLength; public Base64InputStream(InputStream stream) { myBaseStream = stream; } @Override public int available() throws IOException { // TODO: real value might be less than returned one return (myBufferLength + myBaseStream.available()) * 3 / 4; } @Override public long skip(long n) throws IOException { // TODO: optimize for (long skipped = 0; skipped < n; ++skipped) { if (read() == -1) { return skipped; } } return n; } @Override public int read() throws IOException { int result = myDecoded0; if (result != -1) { myDecoded0 = -1; return result; } result = myDecoded1; if (result != -1) { myDecoded1 = -1; return result; } result = myDecoded2; if (result != -1) { myDecoded2 = -1; return result; } fillDecodedBuffer(); result = myDecoded0; myDecoded0 = -1; return result; } @Override public void close() throws IOException { myBaseStream.close(); } @Override public int read(byte[] b, int off, int len) throws IOException { if (len == 0) { return 0; } int ready = 0; if (myDecoded0 != -1) { b[off] = (byte)myDecoded0; myDecoded0 = -1; if (len == 1) { return 1; } b[off + 1] = (byte)myDecoded1; myDecoded1 = -1; if (len == 2) { return 2; } b[off + 2] = (byte)myDecoded2; myDecoded2 = -1; ready = 3; } else if (myDecoded1 != -1) { b[off] = (byte)myDecoded1; myDecoded1 = -1; if (len == 1) { return 1; } b[off + 1] = (byte)myDecoded2; myDecoded2 = -1; ready = 2; } else if (myDecoded2 != -1) { b[off] = (byte)myDecoded2; myDecoded2 = -1; ready = 1; } for (; ready < len - 2; ready += 3) { int first = -1; int second = -1; int third = -1; int fourth = -1; main: while (myBufferLength >= 0) { while (myBufferLength-- > 0) { final int digit = decode(myBuffer[myBufferOffset++]); if (digit != -1) { if (first == -1) { first = digit; } else if (second == -1) { second = digit; } else if (third == -1) { third = digit; } else { fourth = digit; break main; } } } fillBuffer(); } if (first == -1) { return ready > 0 ? ready : -1; } b[off + ready] = (byte)((first << 2) | (second >> 4)); b[off + ready + 1] = (byte)((second << 4) | (third >> 2)); b[off + ready + 2] = (byte)((third << 6) | fourth); } fillDecodedBuffer(); for (; ready < len; ++ready) { final int num = read(); if (num == -1) { return ready > 0 ? ready : -1; } b[off + ready] = (byte)num; } return len; } @Override public void reset() throws IOException { myBaseStream.reset(); myBufferOffset = 0; myBufferLength = 0; myDecoded0 = -1; myDecoded1 = -1; myDecoded2 = -1; } private void fillDecodedBuffer() throws IOException { int first = -1; int second = -1; int third = -1; int fourth = -1; main: while (myBufferLength >= 0) { while (myBufferLength-- > 0) { final int digit = decode(myBuffer[myBufferOffset++]); if (digit != -1) { if (first == -1) { first = digit; } else if (second == -1) { second = digit; } else if (third == -1) { third = digit; } else { fourth = digit; break main; } } } fillBuffer(); } if (first != -1) { myDecoded0 = (first << 2) | (second >> 4); myDecoded1 = 0xFF & ((second << 4) | (third >> 2)); myDecoded2 = 0xFF & ((third << 6) | fourth); } } private void fillBuffer() throws IOException { myBufferLength = myBaseStream.read(myBuffer); myBufferOffset = 0; } private static int decode(byte b) { switch (b) { default: return -1; case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': return b - 'A'; case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': return b - 'a' + 26; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': return b - '0' + 52; case '+': return 62; case '/': return 63; case '=': return 64; } } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.pgm.http.jetty; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import com.google.common.base.Charsets; import com.google.common.base.Objects; import com.google.common.base.Strings; import com.google.common.escape.Escaper; import com.google.common.html.HtmlEscapers; import com.google.common.io.ByteStreams; import com.google.gerrit.extensions.events.LifecycleListener; import com.google.gerrit.launcher.GerritLauncher; import com.google.gerrit.reviewdb.client.AuthType; import com.google.gerrit.server.config.ConfigUtil; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.config.SitePaths; import com.google.gerrit.server.util.TimeUtil; import com.google.gwtexpui.linker.server.UserAgentRule; import com.google.gwtexpui.server.CacheHeaders; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Singleton; import com.google.inject.servlet.GuiceFilter; import com.google.inject.servlet.GuiceServletContextListener; import org.eclipse.jetty.http.HttpScheme; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.ForwardedRequestCustomizer; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.SecureRequestCustomizer; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.SslConnectionFactory; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.handler.RequestLogHandler; import org.eclipse.jetty.server.session.SessionHandler; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.eclipse.jetty.util.BlockingArrayQueue; import org.eclipse.jetty.util.resource.Resource; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.eclipse.jetty.util.thread.ThreadPool; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.util.RawParseUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.io.PrintWriter; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.EnumSet; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.servlet.DispatcherType; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; @Singleton public class JettyServer { private static final Logger log = LoggerFactory.getLogger(JettyServer.class); static class Lifecycle implements LifecycleListener { private final JettyServer server; private final Config cfg; @Inject Lifecycle(final JettyServer server, @GerritServerConfig final Config cfg) { this.server = server; this.cfg = cfg; } @Override public void start() { try { String origUrl = cfg.getString("httpd", null, "listenUrl"); boolean rewrite = !Strings.isNullOrEmpty(origUrl) && origUrl.endsWith(":0/"); server.httpd.start(); if (rewrite) { Connector con = server.httpd.getConnectors()[0]; if (con instanceof ServerConnector) { @SuppressWarnings("resource") ServerConnector serverCon = (ServerConnector)con; String host = serverCon.getHost(); int port = serverCon.getLocalPort(); String url = String.format("http://%s:%d", host, port); cfg.setString("gerrit", null, "canonicalWebUrl", url); cfg.setString("httpd", null, "listenUrl", url); } } } catch (Exception e) { throw new IllegalStateException("Cannot start HTTP daemon", e); } } @Override public void stop() { try { server.httpd.stop(); server.httpd.join(); } catch (Exception e) { throw new IllegalStateException("Cannot stop HTTP daemon", e); } } } private final SitePaths site; private final Server httpd; private boolean reverseProxy; /** Location on disk where our WAR file was unpacked to. */ private Resource baseResource; @Inject JettyServer(@GerritServerConfig final Config cfg, final SitePaths site, final JettyEnv env) throws MalformedURLException, IOException { this.site = site; httpd = new Server(threadPool(cfg)); httpd.setConnectors(listen(httpd, cfg)); Handler app = makeContext(env, cfg); if (cfg.getBoolean("httpd", "requestLog", !reverseProxy)) { RequestLogHandler handler = new RequestLogHandler(); handler.setRequestLog(new HttpLog(site, cfg)); handler.setHandler(app); app = handler; } httpd.setHandler(app); httpd.setStopAtShutdown(false); } private Connector[] listen(Server server, Config cfg) { // OpenID and certain web-based single-sign-on products can cause // some very long headers, especially in the Referer header. We // need to use a larger default header size to ensure we have // the space required. // final int requestHeaderSize = cfg.getInt("httpd", "requestheadersize", 16386); final URI[] listenUrls = listenURLs(cfg); final boolean reuseAddress = cfg.getBoolean("httpd", "reuseaddress", true); final int acceptors = cfg.getInt("httpd", "acceptorThreads", 2); final AuthType authType = ConfigUtil.getEnum(cfg, "auth", null, "type", AuthType.OPENID); reverseProxy = isReverseProxied(listenUrls); final Connector[] connectors = new Connector[listenUrls.length]; for (int idx = 0; idx < listenUrls.length; idx++) { final URI u = listenUrls[idx]; final int defaultPort; final ServerConnector c; HttpConfiguration config = defaultConfig(requestHeaderSize); if (AuthType.CLIENT_SSL_CERT_LDAP.equals(authType) && ! "https".equals(u.getScheme())) { throw new IllegalArgumentException("Protocol '" + u.getScheme() + "' " + " not supported in httpd.listenurl '" + u + "' when auth.type = '" + AuthType.CLIENT_SSL_CERT_LDAP.name() + "'; only 'https' is supported"); } if ("http".equals(u.getScheme())) { defaultPort = 80; c = newServerConnector(server, acceptors, config); } else if ("https".equals(u.getScheme())) { SslContextFactory ssl = new SslContextFactory(); final File keystore = getFile(cfg, "sslkeystore", "etc/keystore"); String password = cfg.getString("httpd", null, "sslkeypassword"); if (password == null) { password = "gerrit"; } ssl.setKeyStorePath(keystore.getAbsolutePath()); ssl.setTrustStorePath(keystore.getAbsolutePath()); ssl.setKeyStorePassword(password); ssl.setTrustStorePassword(password); if (AuthType.CLIENT_SSL_CERT_LDAP.equals(authType)) { ssl.setNeedClientAuth(true); File crl = getFile(cfg, "sslcrl", "etc/crl.pem"); if (crl.exists()) { ssl.setCrlPath(crl.getAbsolutePath()); ssl.setValidatePeerCerts(true); } } defaultPort = 443; config.addCustomizer(new SecureRequestCustomizer()); c = new ServerConnector(server, null, null, null, 0, acceptors, new SslConnectionFactory(ssl, "http/1.1"), new HttpConnectionFactory(config)); } else if ("proxy-http".equals(u.getScheme())) { defaultPort = 8080; config.addCustomizer(new ForwardedRequestCustomizer()); c = newServerConnector(server, acceptors, config); } else if ("proxy-https".equals(u.getScheme())) { defaultPort = 8080; config.addCustomizer(new ForwardedRequestCustomizer()); config.addCustomizer(new HttpConfiguration.Customizer() { @Override public void customize(Connector connector, HttpConfiguration channelConfig, Request request) { request.setScheme(HttpScheme.HTTPS.asString()); request.setSecure(true); } }); c = newServerConnector(server, acceptors, config); } else { throw new IllegalArgumentException("Protocol '" + u.getScheme() + "' " + " not supported in httpd.listenurl '" + u + "';" + " only 'http', 'https', 'proxy-http, 'proxy-https'" + " are supported"); } try { if (u.getHost() == null && (u.getAuthority().equals("*") // || u.getAuthority().startsWith("*:"))) { // Bind to all local addresses. Port wasn't parsed right by URI // due to the illegal host of "*" so replace with a legal name // and parse the URI. // final URI r = new URI(u.toString().replace('*', 'A')).parseServerAuthority(); c.setHost(null); c.setPort(0 < r.getPort() ? r.getPort() : defaultPort); } else { final URI r = u.parseServerAuthority(); c.setHost(r.getHost()); c.setPort(0 <= r.getPort() ? r.getPort() : defaultPort); } } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid httpd.listenurl " + u, e); } c.setReuseAddress(reuseAddress); connectors[idx] = c; } return connectors; } private static ServerConnector newServerConnector(Server server, int acceptors, HttpConfiguration config) { return new ServerConnector(server, null, null, null, 0, acceptors, new HttpConnectionFactory(config)); } private HttpConfiguration defaultConfig(int requestHeaderSize) { HttpConfiguration config = new HttpConfiguration(); config.setRequestHeaderSize(requestHeaderSize); config.setSendServerVersion(false); config.setSendDateHeader(true); return config; } static boolean isReverseProxied(final URI[] listenUrls) { for (URI u : listenUrls) { if ("http".equals(u.getScheme()) || "https".equals(u.getScheme())) { return false; } } return true; } static URI[] listenURLs(final Config cfg) { String[] urls = cfg.getStringList("httpd", null, "listenurl"); if (urls.length == 0) { urls = new String[] {"http://*:8080/"}; } final URI[] r = new URI[urls.length]; for (int i = 0; i < r.length; i++) { final String s = urls[i]; try { r[i] = new URI(s); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid httpd.listenurl " + s, e); } } return r; } private File getFile(final Config cfg, final String name, final String def) { String path = cfg.getString("httpd", null, name); if (path == null || path.length() == 0) { path = def; } return site.resolve(path); } private ThreadPool threadPool(Config cfg) { int maxThreads = cfg.getInt("httpd", null, "maxthreads", 25); int minThreads = cfg.getInt("httpd", null, "minthreads", 5); int maxQueued = cfg.getInt("httpd", null, "maxqueued", 50); int idleTimeout = (int)MILLISECONDS.convert(60, SECONDS); int maxCapacity = maxQueued == 0 ? Integer.MAX_VALUE : Math.max(minThreads, maxQueued); QueuedThreadPool pool = new QueuedThreadPool( maxThreads, minThreads, idleTimeout, new BlockingArrayQueue<Runnable>( minThreads, // capacity, minThreads, // growBy, maxCapacity // maxCapacity )); pool.setName("HTTP"); return pool; } private Handler makeContext(final JettyEnv env, final Config cfg) throws MalformedURLException, IOException { final Set<String> paths = new HashSet<String>(); for (URI u : listenURLs(cfg)) { String p = u.getPath(); if (p == null || p.isEmpty()) { p = "/"; } while (1 < p.length() && p.endsWith("/")) { p = p.substring(0, p.length() - 1); } paths.add(p); } final List<ContextHandler> all = new ArrayList<ContextHandler>(); for (String path : paths) { all.add(makeContext(path, env, cfg)); } if (all.size() == 1) { // If we only have one context path in our web space, return it // without any wrapping so Jetty has less work to do per-request. // return all.get(0); } else { // We have more than one path served out of this container so // combine them in a handler which supports dispatching to the // individual contexts. // final ContextHandlerCollection r = new ContextHandlerCollection(); r.setHandlers(all.toArray(new Handler[0])); return r; } } private ContextHandler makeContext(final String contextPath, final JettyEnv env, final Config cfg) throws MalformedURLException, IOException { final ServletContextHandler app = new ServletContextHandler(); // This enables the use of sessions in Jetty, feature available // for Gerrit plug-ins to enable user-level sessions. // app.setSessionHandler(new SessionHandler()); app.setErrorHandler(new HiddenErrorHandler()); // This is the path we are accessed by clients within our domain. // app.setContextPath(contextPath); // Serve static resources directly from our JAR. This way we don't // need to unpack them into yet another temporary directory prior to // serving to clients. // app.setBaseResource(getBaseResource(app)); // HTTP front-end filter to be used as surrogate of Apache HTTP // reverse-proxy filtering. // It is meant to be used as simpler tiny deployment of custom-made // security enforcement (Security tokens, IP-based security filtering, others) String filterClassName = cfg.getString("httpd", null, "filterClass"); if (filterClassName != null) { try { @SuppressWarnings("unchecked") Class<? extends Filter> filterClass = (Class<? extends Filter>) Class.forName(filterClassName); Filter filter = env.webInjector.getInstance(filterClass); app.addFilter(new FilterHolder(filter), "/*", EnumSet.of(DispatcherType.REQUEST, DispatcherType.ASYNC)); } catch (Throwable e) { String errorMessage = "Unable to instantiate front-end HTTP Filter " + filterClassName; log.error(errorMessage, e); throw new IllegalArgumentException(errorMessage, e); } } // Perform the same binding as our web.xml would do, but instead // of using the listener to create the injector pass the one we // already have built. // GuiceFilter filter = env.webInjector.getInstance(GuiceFilter.class); app.addFilter(new FilterHolder(filter), "/*", EnumSet.of( DispatcherType.REQUEST, DispatcherType.ASYNC)); app.addEventListener(new GuiceServletContextListener() { @Override protected Injector getInjector() { return env.webInjector; } }); // Jetty requires at least one servlet be bound before it will // bother running the filter above. Since the filter has all // of our URLs except the static resources, the only servlet // we need to bind is the default static resource servlet from // the Jetty container. // final ServletHolder ds = app.addServlet(DefaultServlet.class, "/"); ds.setInitParameter("dirAllowed", "false"); ds.setInitParameter("redirectWelcome", "false"); ds.setInitParameter("useFileMappedBuffer", "false"); ds.setInitParameter("gzip", "true"); app.setWelcomeFiles(new String[0]); return app; } private Resource getBaseResource(ServletContextHandler app) throws IOException { if (baseResource == null) { try { baseResource = unpackWar(GerritLauncher.getDistributionArchive()); } catch (FileNotFoundException err) { if (GerritLauncher.NOT_ARCHIVED.equals(err.getMessage())) { baseResource = useDeveloperBuild(app); } else { throw err; } } } return baseResource; } private static Resource unpackWar(File srcwar) throws IOException { File dstwar = makeWarTempDir(); unpack(srcwar, dstwar); return Resource.newResource(dstwar.toURI()); } private static File makeWarTempDir() throws IOException { // Obtain our local temporary directory, but it comes back as a file // so we have to switch it to be a directory post creation. // File dstwar = GerritLauncher.createTempFile("gerrit_", "war"); if (!dstwar.delete() || !dstwar.mkdir()) { throw new IOException("Cannot mkdir " + dstwar.getAbsolutePath()); } // Jetty normally refuses to serve out of a symlinked directory, as // a security feature. Try to resolve out any symlinks in the path. // try { return dstwar.getCanonicalFile(); } catch (IOException e) { return dstwar.getAbsoluteFile(); } } private static void unpack(File srcwar, File dstwar) throws IOException { final ZipFile zf = new ZipFile(srcwar); try { final Enumeration<? extends ZipEntry> e = zf.entries(); while (e.hasMoreElements()) { final ZipEntry ze = e.nextElement(); final String name = ze.getName(); if (ze.isDirectory()) continue; if (name.startsWith("WEB-INF/")) continue; if (name.startsWith("META-INF/")) continue; if (name.startsWith("com/google/gerrit/launcher/")) continue; if (name.equals("Main.class")) continue; final File rawtmp = new File(dstwar, name); mkdir(rawtmp.getParentFile()); rawtmp.deleteOnExit(); final FileOutputStream rawout = new FileOutputStream(rawtmp); try { final InputStream in = zf.getInputStream(ze); try { final byte[] buf = new byte[4096]; int n; while ((n = in.read(buf, 0, buf.length)) > 0) { rawout.write(buf, 0, n); } } finally { in.close(); } } finally { rawout.close(); } } } finally { zf.close(); } } private static void mkdir(File dir) throws IOException { if (!dir.isDirectory()) { mkdir(dir.getParentFile()); if (!dir.mkdir()) throw new IOException("Cannot mkdir " + dir.getAbsolutePath()); dir.deleteOnExit(); } } private Resource useDeveloperBuild(ServletContextHandler app) throws IOException { final File dir = GerritLauncher.getDeveloperBuckOut(); final File gen = new File(dir, "gen"); final File root = dir.getParentFile(); final File dstwar = makeWarTempDir(); File ui = new File(dstwar, "gerrit_ui"); File p = new File(ui, "permutations"); mkdir(ui); p.createNewFile(); p.deleteOnExit(); app.addFilter(new FilterHolder(new Filter() { private final UserAgentRule rule = new UserAgentRule(); private String lastTarget; private long lastTime; @Override public void doFilter(ServletRequest request, ServletResponse res, FilterChain chain) throws IOException, ServletException { String pkg = "gerrit-gwtui"; String target = "ui_" + rule.select((HttpServletRequest) request); String rule = "//" + pkg + ":" + target; File zip = new File(new File(gen, pkg), target + ".zip"); synchronized (this) { try { build(root, gen, rule); } catch (BuildFailureException e) { displayFailure(rule, e.why, (HttpServletResponse) res); return; } if (!target.equals(lastTarget) || lastTime != zip.lastModified()) { lastTarget = target; lastTime = zip.lastModified(); unpack(zip, dstwar); } } chain.doFilter(request, res); } private void displayFailure(String rule, byte[] why, HttpServletResponse res) throws IOException { res.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); res.setContentType("text/html"); res.setCharacterEncoding(Charsets.UTF_8.name()); CacheHeaders.setNotCacheable(res); Escaper html = HtmlEscapers.htmlEscaper(); PrintWriter w = res.getWriter(); w.write("<html><title>BUILD FAILED</title><body>"); w.format("<h1>%s FAILED</h1>", html.escape(rule)); w.write("<pre>"); w.write(html.escape(RawParseUtils.decode(why))); w.write("</pre>"); w.write("</body></html>"); w.close(); } @Override public void init(FilterConfig config) { } @Override public void destroy() { } }), "/", EnumSet.of(DispatcherType.REQUEST)); return Resource.newResource(dstwar.toURI()); } private static void build(File root, File gen, String target) throws IOException, BuildFailureException { log.info("buck build " + target); Properties properties = loadBuckProperties(gen); String buck = Objects.firstNonNull(properties.getProperty("buck"), "buck"); ProcessBuilder proc = new ProcessBuilder(buck, "build", target) .directory(root) .redirectErrorStream(true); if (properties.containsKey("PATH")) { proc.environment().put("PATH", properties.getProperty("PATH")); } long start = TimeUtil.nowMs(); Process rebuild = proc.start(); byte[] out; InputStream in = rebuild.getInputStream(); try { out = ByteStreams.toByteArray(in); } finally { rebuild.getOutputStream().close(); in.close(); } int status; try { status = rebuild.waitFor(); } catch (InterruptedException e) { throw new InterruptedIOException("interrupted waiting for " + buck); } if (status != 0) { throw new BuildFailureException(out); } long time = TimeUtil.nowMs() - start; log.info(String.format("UPDATED %s in %.3fs", target, time / 1000.0)); } private static Properties loadBuckProperties(File gen) throws FileNotFoundException, IOException { Properties properties = new Properties(); InputStream in = new FileInputStream( new File(new File(gen, "tools"), "buck.properties")); try { properties.load(in); } finally { in.close(); } return properties; } @SuppressWarnings("serial") private static class BuildFailureException extends Exception { final byte[] why; BuildFailureException(byte[] why) { this.why = why; } } }
/** * Copyright 2015 StreamSets Inc. * * Licensed under the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.datacollector.client.model; import com.streamsets.datacollector.client.StringUtil; import java.util.*; import com.streamsets.datacollector.client.model.ModelDefinitionJson; import io.swagger.annotations.*; import com.fasterxml.jackson.annotation.JsonProperty; @ApiModel(description = "") @javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaClientCodegen", date = "2015-09-11T14:51:29.367-07:00") public class ConfigDefinitionJson { private String name = null; public enum TypeEnum { BOOLEAN("BOOLEAN"), NUMBER("NUMBER"), STRING("STRING"), LIST("LIST"), MAP("MAP"), MODEL("MODEL"), CHARACTER("CHARACTER"), TEXT("TEXT"); private String value; TypeEnum(String value) { this.value = value; } @Override public String toString() { return value; } } private TypeEnum type = null; private Object defaultValue = null; private String label = null; private String mode = null; private Boolean required = null; private List<String> elDefs = new ArrayList<String>(); private List<String> elFunctionDefinitionsIdx = new ArrayList<String>(); private List<String> elConstantDefinitionsIdx = new ArrayList<String>(); private ModelDefinitionJson model = null; private Integer lines = null; private Integer displayPosition = null; private Map<String, List<Object>> dependsOnMap = new HashMap<String, List<Object>>(); private String description = null; private String dependsOn = null; private List<Object> triggeredByValues = new ArrayList<Object>(); private Long min = null; private String group = null; public enum EvaluationEnum { IMPLICIT("IMPLICIT"), EXPLICIT("EXPLICIT"); private String value; EvaluationEnum(String value) { this.value = value; } @Override public String toString() { return value; } } private EvaluationEnum evaluation = null; private Long max = null; private String fieldName = null; /** **/ @ApiModelProperty(value = "") @JsonProperty("name") public String getName() { return name; } public void setName(String name) { this.name = name; } /** **/ @ApiModelProperty(value = "") @JsonProperty("type") public TypeEnum getType() { return type; } public void setType(TypeEnum type) { this.type = type; } /** **/ @ApiModelProperty(value = "") @JsonProperty("defaultValue") public Object getDefaultValue() { return defaultValue; } public void setDefaultValue(Object defaultValue) { this.defaultValue = defaultValue; } /** **/ @ApiModelProperty(value = "") @JsonProperty("label") public String getLabel() { return label; } public void setLabel(String label) { this.label = label; } /** **/ @ApiModelProperty(value = "") @JsonProperty("mode") public String getMode() { return mode; } public void setMode(String mode) { this.mode = mode; } /** **/ @ApiModelProperty(value = "") @JsonProperty("required") public Boolean getRequired() { return required; } public void setRequired(Boolean required) { this.required = required; } /** **/ @ApiModelProperty(value = "") @JsonProperty("elDefs") public List<String> getElDefs() { return elDefs; } public void setElDefs(List<String> elDefs) { this.elDefs = elDefs; } /** **/ @ApiModelProperty(value = "") @JsonProperty("elFunctionDefinitionsIdx") public List<String> getElFunctionDefinitionsIdx() { return elFunctionDefinitionsIdx; } public void setElFunctionDefinitionsIdx(List<String> elFunctionDefinitionsIdx) { this.elFunctionDefinitionsIdx = elFunctionDefinitionsIdx; } /** **/ @ApiModelProperty(value = "") @JsonProperty("elConstantDefinitionsIdx") public List<String> getElConstantDefinitionsIdx() { return elConstantDefinitionsIdx; } public void setElConstantDefinitionsIdx(List<String> elConstantDefinitionsIdx) { this.elConstantDefinitionsIdx = elConstantDefinitionsIdx; } /** **/ @ApiModelProperty(value = "") @JsonProperty("model") public ModelDefinitionJson getModel() { return model; } public void setModel(ModelDefinitionJson model) { this.model = model; } /** **/ @ApiModelProperty(value = "") @JsonProperty("lines") public Integer getLines() { return lines; } public void setLines(Integer lines) { this.lines = lines; } /** **/ @ApiModelProperty(value = "") @JsonProperty("displayPosition") public Integer getDisplayPosition() { return displayPosition; } public void setDisplayPosition(Integer displayPosition) { this.displayPosition = displayPosition; } /** **/ @ApiModelProperty(value = "") @JsonProperty("dependsOnMap") public Map<String, List<Object>> getDependsOnMap() { return dependsOnMap; } public void setDependsOnMap(Map<String, List<Object>> dependsOnMap) { this.dependsOnMap = dependsOnMap; } /** **/ @ApiModelProperty(value = "") @JsonProperty("description") public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } /** **/ @ApiModelProperty(value = "") @JsonProperty("dependsOn") public String getDependsOn() { return dependsOn; } public void setDependsOn(String dependsOn) { this.dependsOn = dependsOn; } /** **/ @ApiModelProperty(value = "") @JsonProperty("triggeredByValues") public List<Object> getTriggeredByValues() { return triggeredByValues; } public void setTriggeredByValues(List<Object> triggeredByValues) { this.triggeredByValues = triggeredByValues; } /** **/ @ApiModelProperty(value = "") @JsonProperty("min") public Long getMin() { return min; } public void setMin(Long min) { this.min = min; } /** **/ @ApiModelProperty(value = "") @JsonProperty("group") public String getGroup() { return group; } public void setGroup(String group) { this.group = group; } /** **/ @ApiModelProperty(value = "") @JsonProperty("evaluation") public EvaluationEnum getEvaluation() { return evaluation; } public void setEvaluation(EvaluationEnum evaluation) { this.evaluation = evaluation; } /** **/ @ApiModelProperty(value = "") @JsonProperty("max") public Long getMax() { return max; } public void setMax(Long max) { this.max = max; } /** **/ @ApiModelProperty(value = "") @JsonProperty("fieldName") public String getFieldName() { return fieldName; } public void setFieldName(String fieldName) { this.fieldName = fieldName; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConfigDefinitionJson {\n"); sb.append(" name: ").append(StringUtil.toIndentedString(name)).append("\n"); sb.append(" type: ").append(StringUtil.toIndentedString(type)).append("\n"); sb.append(" defaultValue: ").append(StringUtil.toIndentedString(defaultValue)).append("\n"); sb.append(" label: ").append(StringUtil.toIndentedString(label)).append("\n"); sb.append(" mode: ").append(StringUtil.toIndentedString(mode)).append("\n"); sb.append(" required: ").append(StringUtil.toIndentedString(required)).append("\n"); sb.append(" elDefs: ").append(StringUtil.toIndentedString(elDefs)).append("\n"); sb.append(" elFunctionDefinitionsIdx: ").append(StringUtil.toIndentedString(elFunctionDefinitionsIdx)).append("\n"); sb.append(" elConstantDefinitionsIdx: ").append(StringUtil.toIndentedString(elConstantDefinitionsIdx)).append("\n"); sb.append(" model: ").append(StringUtil.toIndentedString(model)).append("\n"); sb.append(" lines: ").append(StringUtil.toIndentedString(lines)).append("\n"); sb.append(" displayPosition: ").append(StringUtil.toIndentedString(displayPosition)).append("\n"); sb.append(" dependsOnMap: ").append(StringUtil.toIndentedString(dependsOnMap)).append("\n"); sb.append(" description: ").append(StringUtil.toIndentedString(description)).append("\n"); sb.append(" dependsOn: ").append(StringUtil.toIndentedString(dependsOn)).append("\n"); sb.append(" triggeredByValues: ").append(StringUtil.toIndentedString(triggeredByValues)).append("\n"); sb.append(" min: ").append(StringUtil.toIndentedString(min)).append("\n"); sb.append(" group: ").append(StringUtil.toIndentedString(group)).append("\n"); sb.append(" evaluation: ").append(StringUtil.toIndentedString(evaluation)).append("\n"); sb.append(" max: ").append(StringUtil.toIndentedString(max)).append("\n"); sb.append(" fieldName: ").append(StringUtil.toIndentedString(fieldName)).append("\n"); sb.append("}"); return sb.toString(); } }
/* * Copyright 2002-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.validation; import java.io.Serializable; import java.util.Collections; import java.util.EmptyStackException; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Stack; import org.springframework.util.StringUtils; /** * Abstract implementation of the {@link Errors} interface. Provides common * access to evaluated errors; however, does not define concrete management * of {@link ObjectError ObjectErrors} and {@link FieldError FieldErrors}. * * @author Juergen Hoeller * @since 2.5.3 */ public abstract class AbstractErrors implements Errors, Serializable { private String nestedPath = ""; private final Stack nestedPathStack = new Stack(); public void setNestedPath(String nestedPath) { doSetNestedPath(nestedPath); this.nestedPathStack.clear(); } public String getNestedPath() { return this.nestedPath; } public void pushNestedPath(String subPath) { this.nestedPathStack.push(getNestedPath()); doSetNestedPath(getNestedPath() + subPath); } public void popNestedPath() throws IllegalArgumentException { try { String formerNestedPath = (String) this.nestedPathStack.pop(); doSetNestedPath(formerNestedPath); } catch (EmptyStackException ex) { throw new IllegalStateException("Cannot pop nested path: no nested path on stack"); } } /** * Actually set the nested path. * Delegated to by setNestedPath and pushNestedPath. */ protected void doSetNestedPath(String nestedPath) { if (nestedPath == null) { nestedPath = ""; } nestedPath = canonicalFieldName(nestedPath); if (nestedPath.length() > 0 && !nestedPath.endsWith(Errors.NESTED_PATH_SEPARATOR)) { nestedPath += Errors.NESTED_PATH_SEPARATOR; } this.nestedPath = nestedPath; } /** * Transform the given field into its full path, * regarding the nested path of this instance. */ protected String fixedField(String field) { if (StringUtils.hasLength(field)) { return getNestedPath() + canonicalFieldName(field); } else { String path = getNestedPath(); return (path.endsWith(Errors.NESTED_PATH_SEPARATOR) ? path.substring(0, path.length() - NESTED_PATH_SEPARATOR.length()) : path); } } /** * Determine the canonical field name for the given field. * <p>The default implementation simply returns the field name as-is. * @param field the original field name * @return the canonical field name */ protected String canonicalFieldName(String field) { return field; } public void reject(String errorCode) { reject(errorCode, null, null); } public void reject(String errorCode, String defaultMessage) { reject(errorCode, null, defaultMessage); } public void rejectValue(String field, String errorCode) { rejectValue(field, errorCode, null, null); } public void rejectValue(String field, String errorCode, String defaultMessage) { rejectValue(field, errorCode, null, defaultMessage); } public boolean hasErrors() { return !getAllErrors().isEmpty(); } public int getErrorCount() { return getAllErrors().size(); } public List getAllErrors() { List result = new LinkedList(); result.addAll(getGlobalErrors()); result.addAll(getFieldErrors()); return Collections.unmodifiableList(result); } public boolean hasGlobalErrors() { return (getGlobalErrorCount() > 0); } public int getGlobalErrorCount() { return getGlobalErrors().size(); } public ObjectError getGlobalError() { List globalErrors = getGlobalErrors(); return (!globalErrors.isEmpty() ? (ObjectError) globalErrors.get(0) : null); } public boolean hasFieldErrors() { return (getFieldErrorCount() > 0); } public int getFieldErrorCount() { return getFieldErrors().size(); } public FieldError getFieldError() { List fieldErrors = getFieldErrors(); return (!fieldErrors.isEmpty() ? (FieldError) fieldErrors.get(0) : null); } public boolean hasFieldErrors(String field) { return (getFieldErrorCount(field) > 0); } public int getFieldErrorCount(String field) { return getFieldErrors(field).size(); } public List getFieldErrors(String field) { List fieldErrors = getFieldErrors(); List result = new LinkedList(); String fixedField = fixedField(field); for (Iterator it = fieldErrors.iterator(); it.hasNext();) { Object error = it.next(); if (isMatchingFieldError(fixedField, (FieldError) error)) { result.add(error); } } return Collections.unmodifiableList(result); } public FieldError getFieldError(String field) { List fieldErrors = getFieldErrors(field); return (!fieldErrors.isEmpty() ? (FieldError) fieldErrors.get(0) : null); } public Class getFieldType(String field) { Object value = getFieldValue(field); if (value != null) { return value.getClass(); } return null; } /** * Check whether the given FieldError matches the given field. * @param field the field that we are looking up FieldErrors for * @param fieldError the candidate FieldError * @return whether the FieldError matches the given field */ protected boolean isMatchingFieldError(String field, FieldError fieldError) { return (field.equals(fieldError.getField()) || (field.endsWith("*") && fieldError.getField().startsWith(field.substring(0, field.length() - 1)))); } public String toString() { StringBuffer sb = new StringBuffer(getClass().getName()); sb.append(": ").append(getErrorCount()).append(" errors"); Iterator it = getAllErrors().iterator(); while (it.hasNext()) { sb.append('\n').append(it.next()); } return sb.toString(); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/datastore/v1/datastore.proto package com.google.datastore.v1; /** * <pre> * The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. * </pre> * * Protobuf type {@code google.datastore.v1.LookupRequest} */ public final class LookupRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.datastore.v1.LookupRequest) LookupRequestOrBuilder { // Use LookupRequest.newBuilder() to construct. private LookupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LookupRequest() { projectId_ = ""; keys_ = java.util.Collections.emptyList(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private LookupRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); int mutable_bitField0_ = 0; try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!input.skipField(tag)) { done = true; } break; } case 10: { com.google.datastore.v1.ReadOptions.Builder subBuilder = null; if (readOptions_ != null) { subBuilder = readOptions_.toBuilder(); } readOptions_ = input.readMessage(com.google.datastore.v1.ReadOptions.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(readOptions_); readOptions_ = subBuilder.buildPartial(); } break; } case 26: { if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { keys_ = new java.util.ArrayList<com.google.datastore.v1.Key>(); mutable_bitField0_ |= 0x00000004; } keys_.add( input.readMessage(com.google.datastore.v1.Key.parser(), extensionRegistry)); break; } case 66: { java.lang.String s = input.readStringRequireUtf8(); projectId_ = s; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { keys_ = java.util.Collections.unmodifiableList(keys_); } makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.datastore.v1.DatastoreProto.internal_static_google_datastore_v1_LookupRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.datastore.v1.DatastoreProto.internal_static_google_datastore_v1_LookupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.datastore.v1.LookupRequest.class, com.google.datastore.v1.LookupRequest.Builder.class); } private int bitField0_; public static final int PROJECT_ID_FIELD_NUMBER = 8; private volatile java.lang.Object projectId_; /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int READ_OPTIONS_FIELD_NUMBER = 1; private com.google.datastore.v1.ReadOptions readOptions_; /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public boolean hasReadOptions() { return readOptions_ != null; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public com.google.datastore.v1.ReadOptions getReadOptions() { return readOptions_ == null ? com.google.datastore.v1.ReadOptions.getDefaultInstance() : readOptions_; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public com.google.datastore.v1.ReadOptionsOrBuilder getReadOptionsOrBuilder() { return getReadOptions(); } public static final int KEYS_FIELD_NUMBER = 3; private java.util.List<com.google.datastore.v1.Key> keys_; /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public java.util.List<com.google.datastore.v1.Key> getKeysList() { return keys_; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public java.util.List<? extends com.google.datastore.v1.KeyOrBuilder> getKeysOrBuilderList() { return keys_; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public int getKeysCount() { return keys_.size(); } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.Key getKeys(int index) { return keys_.get(index); } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.KeyOrBuilder getKeysOrBuilder( int index) { return keys_.get(index); } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (readOptions_ != null) { output.writeMessage(1, getReadOptions()); } for (int i = 0; i < keys_.size(); i++) { output.writeMessage(3, keys_.get(i)); } if (!getProjectIdBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 8, projectId_); } } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (readOptions_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getReadOptions()); } for (int i = 0; i < keys_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, keys_.get(i)); } if (!getProjectIdBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, projectId_); } memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.datastore.v1.LookupRequest)) { return super.equals(obj); } com.google.datastore.v1.LookupRequest other = (com.google.datastore.v1.LookupRequest) obj; boolean result = true; result = result && getProjectId() .equals(other.getProjectId()); result = result && (hasReadOptions() == other.hasReadOptions()); if (hasReadOptions()) { result = result && getReadOptions() .equals(other.getReadOptions()); } result = result && getKeysList() .equals(other.getKeysList()); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); if (hasReadOptions()) { hash = (37 * hash) + READ_OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getReadOptions().hashCode(); } if (getKeysCount() > 0) { hash = (37 * hash) + KEYS_FIELD_NUMBER; hash = (53 * hash) + getKeysList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.datastore.v1.LookupRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.datastore.v1.LookupRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.datastore.v1.LookupRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.datastore.v1.LookupRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.datastore.v1.LookupRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.datastore.v1.LookupRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.datastore.v1.LookupRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.datastore.v1.LookupRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.datastore.v1.LookupRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.datastore.v1.LookupRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.datastore.v1.LookupRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. * </pre> * * Protobuf type {@code google.datastore.v1.LookupRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.datastore.v1.LookupRequest) com.google.datastore.v1.LookupRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.datastore.v1.DatastoreProto.internal_static_google_datastore_v1_LookupRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.datastore.v1.DatastoreProto.internal_static_google_datastore_v1_LookupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.datastore.v1.LookupRequest.class, com.google.datastore.v1.LookupRequest.Builder.class); } // Construct using com.google.datastore.v1.LookupRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getKeysFieldBuilder(); } } public Builder clear() { super.clear(); projectId_ = ""; if (readOptionsBuilder_ == null) { readOptions_ = null; } else { readOptions_ = null; readOptionsBuilder_ = null; } if (keysBuilder_ == null) { keys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); } else { keysBuilder_.clear(); } return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.datastore.v1.DatastoreProto.internal_static_google_datastore_v1_LookupRequest_descriptor; } public com.google.datastore.v1.LookupRequest getDefaultInstanceForType() { return com.google.datastore.v1.LookupRequest.getDefaultInstance(); } public com.google.datastore.v1.LookupRequest build() { com.google.datastore.v1.LookupRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public com.google.datastore.v1.LookupRequest buildPartial() { com.google.datastore.v1.LookupRequest result = new com.google.datastore.v1.LookupRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.projectId_ = projectId_; if (readOptionsBuilder_ == null) { result.readOptions_ = readOptions_; } else { result.readOptions_ = readOptionsBuilder_.build(); } if (keysBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004)) { keys_ = java.util.Collections.unmodifiableList(keys_); bitField0_ = (bitField0_ & ~0x00000004); } result.keys_ = keys_; } else { result.keys_ = keysBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.datastore.v1.LookupRequest) { return mergeFrom((com.google.datastore.v1.LookupRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.datastore.v1.LookupRequest other) { if (other == com.google.datastore.v1.LookupRequest.getDefaultInstance()) return this; if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; onChanged(); } if (other.hasReadOptions()) { mergeReadOptions(other.getReadOptions()); } if (keysBuilder_ == null) { if (!other.keys_.isEmpty()) { if (keys_.isEmpty()) { keys_ = other.keys_; bitField0_ = (bitField0_ & ~0x00000004); } else { ensureKeysIsMutable(); keys_.addAll(other.keys_); } onChanged(); } } else { if (!other.keys_.isEmpty()) { if (keysBuilder_.isEmpty()) { keysBuilder_.dispose(); keysBuilder_ = null; keys_ = other.keys_; bitField0_ = (bitField0_ & ~0x00000004); keysBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getKeysFieldBuilder() : null; } else { keysBuilder_.addAllMessages(other.keys_); } } } onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.datastore.v1.LookupRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.datastore.v1.LookupRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object projectId_ = ""; /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public Builder setProjectId( java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; onChanged(); return this; } /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); onChanged(); return this; } /** * <pre> * The ID of the project against which to make the request. * </pre> * * <code>optional string project_id = 8;</code> */ public Builder setProjectIdBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; onChanged(); return this; } private com.google.datastore.v1.ReadOptions readOptions_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.datastore.v1.ReadOptions, com.google.datastore.v1.ReadOptions.Builder, com.google.datastore.v1.ReadOptionsOrBuilder> readOptionsBuilder_; /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public boolean hasReadOptions() { return readOptionsBuilder_ != null || readOptions_ != null; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public com.google.datastore.v1.ReadOptions getReadOptions() { if (readOptionsBuilder_ == null) { return readOptions_ == null ? com.google.datastore.v1.ReadOptions.getDefaultInstance() : readOptions_; } else { return readOptionsBuilder_.getMessage(); } } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public Builder setReadOptions(com.google.datastore.v1.ReadOptions value) { if (readOptionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } readOptions_ = value; onChanged(); } else { readOptionsBuilder_.setMessage(value); } return this; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public Builder setReadOptions( com.google.datastore.v1.ReadOptions.Builder builderForValue) { if (readOptionsBuilder_ == null) { readOptions_ = builderForValue.build(); onChanged(); } else { readOptionsBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public Builder mergeReadOptions(com.google.datastore.v1.ReadOptions value) { if (readOptionsBuilder_ == null) { if (readOptions_ != null) { readOptions_ = com.google.datastore.v1.ReadOptions.newBuilder(readOptions_).mergeFrom(value).buildPartial(); } else { readOptions_ = value; } onChanged(); } else { readOptionsBuilder_.mergeFrom(value); } return this; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public Builder clearReadOptions() { if (readOptionsBuilder_ == null) { readOptions_ = null; onChanged(); } else { readOptions_ = null; readOptionsBuilder_ = null; } return this; } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public com.google.datastore.v1.ReadOptions.Builder getReadOptionsBuilder() { onChanged(); return getReadOptionsFieldBuilder().getBuilder(); } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ public com.google.datastore.v1.ReadOptionsOrBuilder getReadOptionsOrBuilder() { if (readOptionsBuilder_ != null) { return readOptionsBuilder_.getMessageOrBuilder(); } else { return readOptions_ == null ? com.google.datastore.v1.ReadOptions.getDefaultInstance() : readOptions_; } } /** * <pre> * The options for this lookup request. * </pre> * * <code>optional .google.datastore.v1.ReadOptions read_options = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.datastore.v1.ReadOptions, com.google.datastore.v1.ReadOptions.Builder, com.google.datastore.v1.ReadOptionsOrBuilder> getReadOptionsFieldBuilder() { if (readOptionsBuilder_ == null) { readOptionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.datastore.v1.ReadOptions, com.google.datastore.v1.ReadOptions.Builder, com.google.datastore.v1.ReadOptionsOrBuilder>( getReadOptions(), getParentForChildren(), isClean()); readOptions_ = null; } return readOptionsBuilder_; } private java.util.List<com.google.datastore.v1.Key> keys_ = java.util.Collections.emptyList(); private void ensureKeysIsMutable() { if (!((bitField0_ & 0x00000004) == 0x00000004)) { keys_ = new java.util.ArrayList<com.google.datastore.v1.Key>(keys_); bitField0_ |= 0x00000004; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.datastore.v1.Key, com.google.datastore.v1.Key.Builder, com.google.datastore.v1.KeyOrBuilder> keysBuilder_; /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public java.util.List<com.google.datastore.v1.Key> getKeysList() { if (keysBuilder_ == null) { return java.util.Collections.unmodifiableList(keys_); } else { return keysBuilder_.getMessageList(); } } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public int getKeysCount() { if (keysBuilder_ == null) { return keys_.size(); } else { return keysBuilder_.getCount(); } } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.Key getKeys(int index) { if (keysBuilder_ == null) { return keys_.get(index); } else { return keysBuilder_.getMessage(index); } } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder setKeys( int index, com.google.datastore.v1.Key value) { if (keysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureKeysIsMutable(); keys_.set(index, value); onChanged(); } else { keysBuilder_.setMessage(index, value); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder setKeys( int index, com.google.datastore.v1.Key.Builder builderForValue) { if (keysBuilder_ == null) { ensureKeysIsMutable(); keys_.set(index, builderForValue.build()); onChanged(); } else { keysBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder addKeys(com.google.datastore.v1.Key value) { if (keysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureKeysIsMutable(); keys_.add(value); onChanged(); } else { keysBuilder_.addMessage(value); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder addKeys( int index, com.google.datastore.v1.Key value) { if (keysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureKeysIsMutable(); keys_.add(index, value); onChanged(); } else { keysBuilder_.addMessage(index, value); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder addKeys( com.google.datastore.v1.Key.Builder builderForValue) { if (keysBuilder_ == null) { ensureKeysIsMutable(); keys_.add(builderForValue.build()); onChanged(); } else { keysBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder addKeys( int index, com.google.datastore.v1.Key.Builder builderForValue) { if (keysBuilder_ == null) { ensureKeysIsMutable(); keys_.add(index, builderForValue.build()); onChanged(); } else { keysBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder addAllKeys( java.lang.Iterable<? extends com.google.datastore.v1.Key> values) { if (keysBuilder_ == null) { ensureKeysIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, keys_); onChanged(); } else { keysBuilder_.addAllMessages(values); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder clearKeys() { if (keysBuilder_ == null) { keys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); } else { keysBuilder_.clear(); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public Builder removeKeys(int index) { if (keysBuilder_ == null) { ensureKeysIsMutable(); keys_.remove(index); onChanged(); } else { keysBuilder_.remove(index); } return this; } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.Key.Builder getKeysBuilder( int index) { return getKeysFieldBuilder().getBuilder(index); } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.KeyOrBuilder getKeysOrBuilder( int index) { if (keysBuilder_ == null) { return keys_.get(index); } else { return keysBuilder_.getMessageOrBuilder(index); } } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public java.util.List<? extends com.google.datastore.v1.KeyOrBuilder> getKeysOrBuilderList() { if (keysBuilder_ != null) { return keysBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(keys_); } } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.Key.Builder addKeysBuilder() { return getKeysFieldBuilder().addBuilder( com.google.datastore.v1.Key.getDefaultInstance()); } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public com.google.datastore.v1.Key.Builder addKeysBuilder( int index) { return getKeysFieldBuilder().addBuilder( index, com.google.datastore.v1.Key.getDefaultInstance()); } /** * <pre> * Keys of entities to look up. * </pre> * * <code>repeated .google.datastore.v1.Key keys = 3;</code> */ public java.util.List<com.google.datastore.v1.Key.Builder> getKeysBuilderList() { return getKeysFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.datastore.v1.Key, com.google.datastore.v1.Key.Builder, com.google.datastore.v1.KeyOrBuilder> getKeysFieldBuilder() { if (keysBuilder_ == null) { keysBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.datastore.v1.Key, com.google.datastore.v1.Key.Builder, com.google.datastore.v1.KeyOrBuilder>( keys_, ((bitField0_ & 0x00000004) == 0x00000004), getParentForChildren(), isClean()); keys_ = null; } return keysBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return this; } // @@protoc_insertion_point(builder_scope:google.datastore.v1.LookupRequest) } // @@protoc_insertion_point(class_scope:google.datastore.v1.LookupRequest) private static final com.google.datastore.v1.LookupRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.datastore.v1.LookupRequest(); } public static com.google.datastore.v1.LookupRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LookupRequest> PARSER = new com.google.protobuf.AbstractParser<LookupRequest>() { public LookupRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LookupRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<LookupRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LookupRequest> getParserForType() { return PARSER; } public com.google.datastore.v1.LookupRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/******************************************************************************* * Copyright (c) 2009 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Zend Technologies *******************************************************************************/ package org.eclipse.php.internal.ui.preferences.includepath; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.Path; import org.eclipse.dltk.core.IBuildpathEntry; import org.eclipse.dltk.core.IScriptProject; import org.eclipse.dltk.internal.corext.buildpath.BuildpathModifier; import org.eclipse.dltk.internal.ui.wizards.NewWizardMessages; import org.eclipse.dltk.internal.ui.wizards.buildpath.*; import org.eclipse.dltk.internal.ui.wizards.dialogfields.*; import org.eclipse.dltk.ui.DLTKUIPlugin; import org.eclipse.dltk.ui.actions.AbstractOpenWizardAction; import org.eclipse.dltk.ui.util.PixelConverter; import org.eclipse.jface.action.IAction; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.window.Window; import org.eclipse.swt.SWT; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.INewWizard; import org.eclipse.ui.internal.IChangeListener; public class PHPSourceContainerWorkbookPage extends BuildPathBasePage { public class OpenBuildPathWizardAction extends AbstractOpenWizardAction implements IPropertyChangeListener { private final BuildPathWizard fWizard; private final List fSelectedElements; public OpenBuildPathWizardAction(BuildPathWizard wizard) { fWizard = wizard; addPropertyChangeListener(this); fSelectedElements = fFoldersList.getSelectedElements(); } /** * {@inheritDoc} */ protected INewWizard createWizard() throws CoreException { return fWizard; } /** * {@inheritDoc} */ public void propertyChange(PropertyChangeEvent event) { if (event.getProperty().equals(IAction.RESULT)) { if (event.getNewValue().equals(Boolean.TRUE)) { finishWizard(); } else { fWizard.cancel(); } } } protected void finishWizard() { List insertedElements = fWizard.getInsertedElements(); refresh(insertedElements, fWizard.getRemovedElements(), fWizard .getModifiedElements()); if (insertedElements.isEmpty()) { fFoldersList.postSetSelection(new StructuredSelection( fSelectedElements)); } } } protected static AddSourceFolderWizard newSourceFolderWizard( BPListElement element, List/* <BPListElement> */existingElements, boolean newFolder) { BPListElement[] existing = (BPListElement[]) existingElements .toArray(new BPListElement[existingElements.size()]); AddSourceFolderWizard wizard = new AddSourceFolderWizard(existing, element, false, newFolder, newFolder, newFolder ? BPListElement .isProjectSourceFolder(existing, element .getScriptProject()) : false, newFolder); wizard.setDoFlushChange(false); return wizard; } private static AddSourceFolderWizard newLinkedSourceFolderWizard( BPListElement element, List/* <BPListElement> */existingElements, boolean newFolder) { BPListElement[] existing = (BPListElement[]) existingElements .toArray(new BPListElement[existingElements.size()]); AddSourceFolderWizard wizard = new AddSourceFolderWizard(existing, element, true, newFolder, newFolder, newFolder ? BPListElement .isProjectSourceFolder(existing, element .getScriptProject()) : false, newFolder); wizard.setDoFlushChange(false); return wizard; } private static EditFilterWizard newEditFilterWizard(BPListElement element, List/* <BPListElement> */existingElements) { BPListElement[] existing = (BPListElement[]) existingElements .toArray(new BPListElement[existingElements.size()]); EditFilterWizard result = new EditFilterWizard(existing, element); result.setDoFlushChange(false); return result; } protected ListDialogField fBuildpathList; protected IScriptProject fCurrJProject; private Control fSWTControl; protected TreeListDialogField fFoldersList; private final int IDX_ADD = 0; private final int IDX_ADD_LINK = 1; private final int IDX_EDIT = 3; private final int IDX_REMOVE = 4; protected List<IChangeListener> addedElementListeners = new ArrayList<IChangeListener>( 1); protected int getIDX_ADD() { return IDX_ADD; } protected int getIDX_ADD_LINK() { return IDX_ADD_LINK; } protected int getIDX_EDIT() { return IDX_EDIT; } protected int getIDX_REMOVE() { return IDX_REMOVE; } public PHPSourceContainerWorkbookPage(ListDialogField buildpathList) { fBuildpathList = buildpathList; fSWTControl = null; } /** * Initialize container elements. This code should not be in the constructor * in order to enable extensibility */ protected void initContainerElements() { SourceContainerAdapter adapter = new SourceContainerAdapter(); String[] buttonLabels; buttonLabels = new String[] { NewWizardMessages.SourceContainerWorkbookPage_folders_add_button, NewWizardMessages.SourceContainerWorkbookPage_folders_link_source_button, /* 1 */null, NewWizardMessages.SourceContainerWorkbookPage_folders_edit_button, NewWizardMessages.SourceContainerWorkbookPage_folders_remove_button }; fFoldersList = new TreeListDialogField(adapter, buttonLabels, new PHPIPListLabelProvider()); fFoldersList.setDialogFieldListener(adapter); fFoldersList .setLabelText(NewWizardMessages.SourceContainerWorkbookPage_folders_label); fFoldersList.setViewerSorter(new BPListElementSorter()); fFoldersList.enableButton(getIDX_EDIT(), false); } /* * (non-Javadoc) * * @see * org.eclipse.dltk.internal.ui.wizards.buildpath.BuildPathBasePage#setTitle * (java.lang.String) */ public void setTitle(String title) { fFoldersList.setLabelText(title); } public void init(IScriptProject jproject) { fCurrJProject = jproject; updateFoldersList(); } protected void updateFoldersList() { ArrayList folders = new ArrayList(); List cpelements = fBuildpathList.getElements(); for (int i = 0; i < cpelements.size(); i++) { BPListElement cpe = (BPListElement) cpelements.get(i); if (cpe.getEntryKind() == IBuildpathEntry.BPE_SOURCE) { folders.add(cpe); } } fFoldersList.setElements(folders); for (int i = 0; i < folders.size(); i++) { BPListElement cpe = (BPListElement) folders.get(i); IPath[] ePatterns = (IPath[]) cpe .getAttribute(BPListElement.EXCLUSION); IPath[] iPatterns = (IPath[]) cpe .getAttribute(BPListElement.INCLUSION); if (ePatterns.length > 0 || iPatterns.length > 0) { fFoldersList.expandElement(cpe, 3); } } } public Control getControl(Composite parent) { initContainerElements(); PixelConverter converter = new PixelConverter(parent); Composite composite = new Composite(parent, SWT.NONE); LayoutUtil.doDefaultLayout(composite, new DialogField[] { fFoldersList }, true, SWT.DEFAULT, SWT.DEFAULT); LayoutUtil.setHorizontalGrabbing(fFoldersList.getTreeControl(null)); int buttonBarWidth = converter.convertWidthInCharsToPixels(24); fFoldersList.setButtonsMinWidth(buttonBarWidth); fSWTControl = composite; // expand List elements = fFoldersList.getElements(); for (int i = 0; i < elements.size(); i++) { BPListElement elem = (BPListElement) elements.get(i); IPath[] exclusionPatterns = (IPath[]) elem .getAttribute(BPListElement.EXCLUSION); IPath[] inclusionPatterns = (IPath[]) elem .getAttribute(BPListElement.INCLUSION); if (exclusionPatterns.length > 0 || inclusionPatterns.length > 0) { fFoldersList.expandElement(elem, 3); } } return composite; } protected Shell getShell() { if (fSWTControl != null && !fSWTControl.isDisposed()) { return fSWTControl.getShell(); } return DLTKUIPlugin.getActiveWorkbenchShell(); } public class SourceContainerAdapter implements ITreeListAdapter, IDialogFieldListener { private final Object[] EMPTY_ARR = new Object[0]; // -------- IListAdapter -------- public void customButtonPressed(TreeListDialogField field, int index) { sourcePageCustomButtonPressed(field, index); } public void selectionChanged(TreeListDialogField field) { sourcePageSelectionChanged(field); } public void doubleClicked(TreeListDialogField field) { sourcePageDoubleClicked(field); } public void keyPressed(TreeListDialogField field, KeyEvent event) { sourcePageKeyPressed(field, event); } public Object[] getChildren(TreeListDialogField field, Object element) { if (element instanceof BPListElement) { return ((BPListElement) element).getChildren(); } return EMPTY_ARR; } public Object getParent(TreeListDialogField field, Object element) { if (element instanceof BPListElementAttribute) { return ((BPListElementAttribute) element).getParent(); } return null; } public boolean hasChildren(TreeListDialogField field, Object element) { return (element instanceof BPListElement); } // ---------- IDialogFieldListener -------- public void dialogFieldChanged(DialogField field) { sourcePageDialogFieldChanged(field); } } protected void sourcePageKeyPressed(TreeListDialogField field, KeyEvent event) { if (field == fFoldersList) { if (event.character == SWT.DEL && event.stateMask == 0) { List selection = field.getSelectedElements(); if (canRemove(selection)) { removeEntry(); } } } } protected void sourcePageDoubleClicked(TreeListDialogField field) { if (field == fFoldersList) { List selection = field.getSelectedElements(); if (canEdit(selection)) { editEntry(); } } } protected void sourcePageCustomButtonPressed(DialogField field, int index) { if (field == fFoldersList) { if (index == getIDX_ADD()) { IProject project = fCurrJProject.getProject(); if (project.exists() && hasFolders(project)) { List existingElements = fFoldersList.getElements(); BPListElement[] existing = (BPListElement[]) existingElements .toArray(new BPListElement[existingElements.size()]); CreateMultipleSourceFoldersDialog dialog = new CreateMultipleSourceFoldersDialog( fCurrJProject, existing, getShell()); if (dialog.open() == Window.OK) { refresh(dialog.getInsertedElements(), dialog .getRemovedElements(), dialog .getModifiedElements()); } } else { BPListElement newElement = new BPListElement(fCurrJProject, IBuildpathEntry.BPE_SOURCE, false); AddSourceFolderWizard wizard = newSourceFolderWizard( newElement, fFoldersList.getElements(), true); OpenBuildPathWizardAction action = new OpenBuildPathWizardAction( wizard); action.run(); } } else if (index == getIDX_ADD_LINK()) { BPListElement newElement = new BPListElement(fCurrJProject, IBuildpathEntry.BPE_SOURCE, false); AddSourceFolderWizard wizard = newLinkedSourceFolderWizard( newElement, fFoldersList.getElements(), true); OpenBuildPathWizardAction action = new OpenBuildPathWizardAction( wizard); action.run(); } else if (index == getIDX_EDIT()) { editEntry(); } else if (index == getIDX_REMOVE()) { removeEntry(); } } } protected boolean hasFolders(IContainer container) { try { IResource[] members = container.members(); for (int i = 0; i < members.length; i++) { if (members[i] instanceof IContainer) { return true; } } } catch (CoreException e) { // ignore } List elements = fFoldersList.getElements(); if (elements.size() > 1) return true; if (elements.size() == 0) return false; BPListElement single = (BPListElement) elements.get(0); if (single.getPath().equals(fCurrJProject.getPath())) return false; return true; } private void editEntry() { List selElements = fFoldersList.getSelectedElements(); if (selElements.size() != 1) { return; } Object elem = selElements.get(0); if (fFoldersList.getIndexOfElement(elem) != -1) { editElementEntry((BPListElement) elem); } else if (elem instanceof BPListElementAttribute) { editAttributeEntry((BPListElementAttribute) elem); } } private void editElementEntry(BPListElement elem) { if (elem.getLinkTarget() != null) { AddSourceFolderWizard wizard = newLinkedSourceFolderWizard(elem, fFoldersList.getElements(), false); OpenBuildPathWizardAction action = new OpenBuildPathWizardAction( wizard); action.run(); } else { AddSourceFolderWizard wizard = newSourceFolderWizard(elem, fFoldersList.getElements(), false); OpenBuildPathWizardAction action = new OpenBuildPathWizardAction( wizard); action.run(); } } private void editAttributeEntry(BPListElementAttribute elem) { String key = elem.getKey(); if (key.equals(BPListElement.EXCLUSION) || key.equals(BPListElement.INCLUSION)) { EditFilterWizard wizard = newEditFilterWizard(elem.getParent(), fFoldersList.getElements()); OpenBuildPathWizardAction action = new OpenBuildPathWizardAction( wizard); action.run(); fFoldersList.refresh(); fBuildpathList.dialogFieldChanged(); } // else if (key.equals(BPListElement.NATIVE_LIB_PATH)) { // BPListElement selElement= elem.getParent(); // NativeLibrariesDialog dialog= new NativeLibrariesDialog(getShell(), // selElement); // if (dialog.open() == Window.OK) { // selElement.setAttribute(BPListElement.NATIVE_LIB_PATH, // dialog.getNativeLibraryPath()); // fFoldersList.refresh(); // fClassPathList.dialogFieldChanged(); // validate // } // } } protected void sourcePageSelectionChanged(DialogField field) { List selected = fFoldersList.getSelectedElements(); fFoldersList.enableButton(getIDX_EDIT(), canEdit(selected)); fFoldersList.enableButton(getIDX_REMOVE(), canRemove(selected)); boolean noAttributes = containsOnlyTopLevelEntries(selected); fFoldersList.enableButton(getIDX_ADD(), noAttributes); } protected void removeEntry() { List selElements = fFoldersList.getSelectedElements(); for (int i = selElements.size() - 1; i >= 0; i--) { Object elem = selElements.get(i); if (elem instanceof BPListElementAttribute) { BPListElementAttribute attrib = (BPListElementAttribute) elem; String key = attrib.getKey(); Object value = null; if (key.equals(BPListElement.EXCLUSION) || key.equals(BPListElement.INCLUSION)) { value = new Path[0]; } attrib.getParent().setAttribute(key, value); selElements.remove(i); } } if (selElements.isEmpty()) { fFoldersList.refresh(); fBuildpathList.dialogFieldChanged(); // validate } else { for (Iterator iter = selElements.iterator(); iter.hasNext();) { BPListElement element = (BPListElement) iter.next(); if (element.getEntryKind() == IBuildpathEntry.BPE_SOURCE) { List list = BuildpathModifier.removeFilters(element .getPath(), fCurrJProject, fFoldersList .getElements()); for (Iterator iterator = list.iterator(); iterator .hasNext();) { BPListElement modified = (BPListElement) iterator .next(); fFoldersList.refresh(modified); fFoldersList.expandElement(modified, 3); } } } fFoldersList.removeElements(selElements); } } protected boolean canRemove(List selElements) { if (selElements.size() == 0) { return false; } for (int i = 0; i < selElements.size(); i++) { Object elem = selElements.get(i); if (elem instanceof BPListElementAttribute) { BPListElementAttribute attrib = (BPListElementAttribute) elem; String key = attrib.getKey(); if (BPListElement.INCLUSION.equals(key)) { if (((IPath[]) attrib.getValue()).length == 0) { return false; } } else if (BPListElement.EXCLUSION.equals(key)) { if (((IPath[]) attrib.getValue()).length == 0) { return false; } } else if (attrib.getValue() == null) { return false; } } else if (elem instanceof BPListElement) { BPListElement curr = (BPListElement) elem; if (curr.getParentContainer() != null) { return false; } } } return true; } private boolean canEdit(List selElements) { if (selElements.size() != 1) { return false; } Object elem = selElements.get(0); if (elem instanceof BPListElement) { BPListElement cp = ((BPListElement) elem); if (cp.getPath().equals(cp.getScriptProject().getPath())) return false; return true; } if (elem instanceof BPListElementAttribute) { return true; } return false; } private void sourcePageDialogFieldChanged(DialogField field) { if (fCurrJProject == null) { // not initialized return; } if (field == fFoldersList) { updateBuildpathList(); } } private void updateBuildpathList() { List srcelements = fFoldersList.getElements(); List cpelements = fBuildpathList.getElements(); int nEntries = cpelements.size(); // backwards, as entries will be deleted int lastRemovePos = nEntries; int afterLastSourcePos = 0; for (int i = nEntries - 1; i >= 0; i--) { BPListElement cpe = (BPListElement) cpelements.get(i); int kind = cpe.getEntryKind(); if (isEntryKind(kind)) { if (!srcelements.remove(cpe)) { cpelements.remove(i); lastRemovePos = i; } else if (lastRemovePos == nEntries) { afterLastSourcePos = i + 1; } } } if (!srcelements.isEmpty()) { int insertPos = Math.min(afterLastSourcePos, lastRemovePos); cpelements.addAll(insertPos, srcelements); } if (lastRemovePos != nEntries || !srcelements.isEmpty()) { fBuildpathList.setElements(cpelements); } } /* * @see BuildPathBasePage#getSelection */ public List getSelection() { return fFoldersList.getSelectedElements(); } /* * @see BuildPathBasePage#setSelection */ public void setSelection(List selElements, boolean expand) { fFoldersList.selectElements(new StructuredSelection(selElements)); if (expand) { for (int i = 0; i < selElements.size(); i++) { fFoldersList.expandElement(selElements.get(i), 1); } } } public boolean isEntryKind(int kind) { return kind == IBuildpathEntry.BPE_SOURCE; } protected void refresh(List insertedElements, List removedElements, List modifiedElements) { fFoldersList.addElements(insertedElements); for (Iterator iter = insertedElements.iterator(); iter.hasNext();) { BPListElement element = (BPListElement) iter.next(); fFoldersList.expandElement(element, 3); } fFoldersList.removeElements(removedElements); for (Iterator iter = modifiedElements.iterator(); iter.hasNext();) { BPListElement element = (BPListElement) iter.next(); fFoldersList.refresh(element); fFoldersList.expandElement(element, 3); } fFoldersList.refresh(); // does enforce the order of the entries. if (!insertedElements.isEmpty()) { fFoldersList.postSetSelection(new StructuredSelection( insertedElements)); } } public void registerAddedElementListener(IChangeListener listener) { if (listener != null) { addedElementListeners.add(listener); } } public void unregisterAddedElementListener(IChangeListener listener) { if (listener != null) { addedElementListeners.remove(listener); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oodt.cas.filemgr.system; import org.apache.avro.AvroRemoteException; import org.apache.avro.ipc.NettyServer; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.specific.SpecificResponder; import org.apache.oodt.cas.filemgr.catalog.Catalog; import org.apache.oodt.cas.filemgr.datatransfer.DataTransfer; import org.apache.oodt.cas.filemgr.structs.Element; import org.apache.oodt.cas.filemgr.structs.FileTransferStatus; import org.apache.oodt.cas.filemgr.structs.Product; import org.apache.oodt.cas.filemgr.structs.ProductPage; import org.apache.oodt.cas.filemgr.structs.ProductType; import org.apache.oodt.cas.filemgr.structs.Reference; import org.apache.oodt.cas.filemgr.structs.avrotypes.*; import org.apache.oodt.cas.filemgr.structs.exceptions.CatalogException; import org.apache.oodt.cas.filemgr.structs.exceptions.DataTransferException; import org.apache.oodt.cas.filemgr.structs.exceptions.QueryFormulationException; import org.apache.oodt.cas.filemgr.structs.exceptions.RepositoryManagerException; import org.apache.oodt.cas.filemgr.structs.exceptions.ValidationLayerException; import org.apache.oodt.cas.filemgr.structs.exceptions.VersioningException; import org.apache.oodt.cas.filemgr.structs.query.QueryResult; import org.apache.oodt.cas.filemgr.util.AvroTypeFactory; import org.apache.oodt.cas.filemgr.util.GenericFileManagerObjectFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URL; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; /** * @author radu * * <p>Implementaion of FileManagerServer that uses apache avro-ipc API.</p> */ public class AvroFileManagerServer implements AvroFileManager, FileManagerServer { private static final Logger logger = LoggerFactory.getLogger(AvroFileManagerServer.class); /*port for server*/ protected int port = 1999; private Server server; /* file manager tools */ private FileManager fileManager; public AvroFileManagerServer(int port){ this.port = port; } @Override public boolean startUp() throws Exception { server = new NettyServer(new SpecificResponder(AvroFileManager.class,this),new InetSocketAddress(this.port)); server.start(); try { this.fileManager = new FileManager(); this.loadConfiguration(); } catch (Exception e) { e.printStackTrace(); return false; } return true; } public void loadConfiguration() throws IOException { fileManager.loadConfiguration(); String transferFactory = null; transferFactory = System.getProperty("filemgr.datatransfer.factory", "org.apache.oodt.cas.filemgr.datatransfer.LocalDataTransferFactory"); DataTransfer dataTransfer = GenericFileManagerObjectFactory .getDataTransferServiceFromFactory(transferFactory); dataTransfer .setFileManagerUrl(new URL("http://localhost:" + port)); fileManager.setDataTransfer(dataTransfer); } @Override public boolean shutdown() { this.server.close(); return true; } @Override public boolean isAlive() { return true; } @Override public void setCatalog(Catalog catalog) { this.fileManager.setCatalog(catalog); } @Override public boolean refreshConfigAndPolicy() throws AvroRemoteException { return this.fileManager.refreshConfigAndPolicy(); } @Override public boolean transferringProduct(AvroProduct p) throws AvroRemoteException { return this.fileManager.transferringProduct(AvroTypeFactory.getProduct(p)); } @Override public AvroFileTransferStatus getCurrentFileTransfer() throws AvroRemoteException { return AvroTypeFactory.getAvroFileTransferStatus(this.fileManager.getCurrentFileTransfer()); } @Override public List<AvroFileTransferStatus> getCurrentFileTransfers() throws AvroRemoteException { List<AvroFileTransferStatus> avroFileTransferStatuses = new ArrayList<AvroFileTransferStatus>(); for (FileTransferStatus fts : this.fileManager.getCurrentFileTransfers()){ avroFileTransferStatuses.add(AvroTypeFactory.getAvroFileTransferStatus(fts)); } return avroFileTransferStatuses; } @Override public double getProductPctTransferred(AvroProduct product) throws AvroRemoteException { return this.fileManager.getProductPctTransferred(AvroTypeFactory.getProduct(product)); } @Override public double getRefPctTransferred(AvroReference reference) throws AvroRemoteException { return this.fileManager.getRefPctTransferred(AvroTypeFactory.getReference(reference)); } @Override public boolean removeProductTransferStatus(AvroProduct product) throws AvroRemoteException { return this.fileManager.removeProductTransferStatus(AvroTypeFactory.getProduct(product)); } @Override public boolean isTransferComplete(AvroProduct product) throws AvroRemoteException { return this.fileManager.isTransferComplete(AvroTypeFactory.getProduct(product)); } @Override public AvroProductPage pagedQuery(AvroQuery query, AvroProductType type, int pageNum) throws AvroRemoteException{ try { return AvroTypeFactory.getAvroProductPage(this.fileManager.pagedQuery( AvroTypeFactory.getQuery(query), AvroTypeFactory.getProductType(type), pageNum )); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroProductPage getFirstPage(AvroProductType type) throws AvroRemoteException { logger.debug("Getting first page for type: {}", type.getName()); ProductPage firstPage = this.fileManager.getFirstPage(AvroTypeFactory.getProductType(type)); logger.debug("Found first page for product type: {} -> {}", type.getName(), firstPage); if (firstPage == null) { logger.warn("No first page found for product type: {}", type.getName()); return null; } return AvroTypeFactory.getAvroProductPage(firstPage); } @Override public AvroProductPage getLastPage(AvroProductType type) throws AvroRemoteException { return AvroTypeFactory.getAvroProductPage( this.fileManager.getLastPage(AvroTypeFactory.getProductType(type))); } @Override public AvroProductPage getNextPage(AvroProductType type, AvroProductPage currPage) throws AvroRemoteException { return AvroTypeFactory.getAvroProductPage(this.fileManager .getNextPage(AvroTypeFactory.getProductType(type), AvroTypeFactory.getProductPage(currPage))); } @Override public AvroProductPage getPrevPage(AvroProductType type, AvroProductPage currPage) throws AvroRemoteException { return AvroTypeFactory.getAvroProductPage( this.fileManager.getPrevPage( AvroTypeFactory.getProductType(type), AvroTypeFactory.getProductPage(currPage))); } @Override public boolean setProductTransferStatus(AvroProduct product) throws AvroRemoteException { try { return this.fileManager.setProductTransferStatus(AvroTypeFactory.getProduct(product)); } catch (CatalogException e) { throw new AvroRemoteException(e); } } @Override public int getNumProducts(AvroProductType type) throws AvroRemoteException { try { return this.fileManager.getNumProducts(AvroTypeFactory.getProductType(type)); } catch (CatalogException e) { throw new AvroRemoteException(e); } } @Override public List<AvroProduct> getTopNProductsByProductType(int n, AvroProductType type) throws AvroRemoteException { List<AvroProduct> avroProducts = new ArrayList<AvroProduct>(); try { for (Product p : this.fileManager.getTopNProductsByProductType(n, AvroTypeFactory.getProductType(type))){ avroProducts.add(AvroTypeFactory.getAvroProduct(p)); } } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } return avroProducts; } @Override public List<AvroProduct> getTopNProducts(int n) throws AvroRemoteException { List<AvroProduct> avroProducts = new ArrayList<AvroProduct>(); try { for (Product p : this.fileManager.getTopNProducts(n)){ avroProducts.add(AvroTypeFactory.getAvroProduct(p)); } } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } return avroProducts; } @Override public boolean hasProduct(String productName) throws AvroRemoteException { try { return this.fileManager.hasProduct(productName); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroMetadata getMetadata(AvroProduct product) throws AvroRemoteException { try { return AvroTypeFactory.getAvroMetadata(this.fileManager.getMetadata(AvroTypeFactory.getProduct(product))); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroMetadata getReducedMetadata(AvroProduct product, List<String> elements) throws AvroRemoteException { try { return AvroTypeFactory.getAvroMetadata(this.fileManager.getReducedMetadata(AvroTypeFactory.getProduct(product), elements)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public List<AvroProductType> getProductTypes() throws AvroRemoteException { List<AvroProductType> avroProductTypes = new ArrayList<AvroProductType>(); try { for (ProductType pt : this.fileManager.getProductTypes()){ avroProductTypes.add(AvroTypeFactory.getAvroProductType(pt)); } } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } return avroProductTypes; } @Override public List<AvroReference> getProductReferences(AvroProduct product) throws AvroRemoteException { List<AvroReference> avroProductTypes = new ArrayList<AvroReference>(); try { for (Reference r : this.fileManager.getProductReferences(AvroTypeFactory.getProduct(product))){ avroProductTypes.add(AvroTypeFactory.getAvroReference(r)); } } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } return avroProductTypes; } @Override public AvroProduct getProductById(String productId) throws AvroRemoteException { try { return AvroTypeFactory.getAvroProduct(this.fileManager.getProductById(productId)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroProduct getProductByName(String productName) throws AvroRemoteException { try { return AvroTypeFactory.getAvroProduct(this.fileManager.getProductByName(productName)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public List<AvroProduct> getProductsByProductType(AvroProductType type) throws AvroRemoteException { List<AvroProduct> avroProducts = new ArrayList<AvroProduct>(); try { List<Product> products = this.fileManager.getProductsByProductType(AvroTypeFactory.getProductType(type)); if (products != null) { for (Product p : products) { avroProducts.add(AvroTypeFactory.getAvroProduct(p)); } } return avroProducts; } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public List<AvroElement> getElementsByProductType(AvroProductType type) throws AvroRemoteException { List<AvroElement> avroElements = new ArrayList<AvroElement>(); try { for (Element e : this.fileManager.getElementsByProductType(AvroTypeFactory.getProductType(type))) { avroElements.add(AvroTypeFactory.getAvroElement(e)); } return avroElements; } catch (ValidationLayerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroElement getElementById(String elementId) throws AvroRemoteException { try { return AvroTypeFactory.getAvroElement(this.fileManager.getElementById(elementId)); } catch (ValidationLayerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroElement getElementByName(String elementName) throws AvroRemoteException { try { return AvroTypeFactory.getAvroElement(this.fileManager.getElementByName(elementName)); } catch (ValidationLayerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public List<AvroQueryResult> complexQuery(AvroComplexQuery avroComplexQuery) throws AvroRemoteException { List<AvroQueryResult> avroQueryResults = new ArrayList<AvroQueryResult>(); try { for (QueryResult qr : this.fileManager.complexQuery(AvroTypeFactory.getComplexQuery(avroComplexQuery))){ avroQueryResults.add(AvroTypeFactory.getAvroQueryResult(qr)); } return avroQueryResults; } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public List<AvroProduct> query(AvroQuery avroQuery, AvroProductType avroProductType) throws AvroRemoteException { List<AvroProduct> avroProducts = new ArrayList<AvroProduct>(); try { for (Product p : this.fileManager.query(AvroTypeFactory.getQuery(avroQuery), AvroTypeFactory.getProductType(avroProductType))){ avroProducts.add(AvroTypeFactory.getAvroProduct(p)); } return avroProducts; } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroProductType getProductTypeByName(String productTypeName) throws AvroRemoteException { try { return AvroTypeFactory.getAvroProductType(this.fileManager.getProductTypeByName(productTypeName)); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroProductType getProductTypeById(String productTypeId) throws AvroRemoteException { try { return AvroTypeFactory.getAvroProductType(this.fileManager.getProductTypeById(productTypeId)); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean updateMetadata(AvroProduct product, AvroMetadata met) throws AvroRemoteException { try { return this.fileManager.updateMetadata(AvroTypeFactory.getProduct(product), AvroTypeFactory.getMetadata(met)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public String addProductType(AvroProductType type) throws AvroRemoteException { try { return this.fileManager.addProductType(AvroTypeFactory.getProductType(type)); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public String catalogProduct(AvroProduct product) throws AvroRemoteException { try { return this.fileManager.catalogProduct(AvroTypeFactory.getProduct(product)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean addMetadata(AvroProduct product, AvroMetadata met) throws AvroRemoteException { try { return this.fileManager.addMetadata(AvroTypeFactory.getProduct(product), AvroTypeFactory.getMetadata(met)) != null; } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean addProductReferences(AvroProduct product) throws AvroRemoteException { try { return this.fileManager.addProductReferences(AvroTypeFactory.getProduct(product)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public String ingestProduct(AvroProduct p, AvroMetadata m, boolean clientTransfer) throws AvroRemoteException { try { return this.fileManager.ingestProduct(AvroTypeFactory.getProduct(p), AvroTypeFactory.getMetadata(m), clientTransfer); } catch (VersioningException e) { throw new AvroRemoteException(e.getMessage()); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } catch (DataTransferException e) { throw new AvroRemoteException(e.getMessage()); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public ByteBuffer retrieveFile(String filePath, int offset, int numBytes) throws AvroRemoteException { try { return ByteBuffer.wrap(this.fileManager.retrieveFile(filePath, offset, numBytes)); } catch (DataTransferException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean transferFile(String filePath, ByteBuffer fileData, int offset, int numBytes) throws AvroRemoteException { return this.fileManager.transferFile(filePath,fileData.array(),offset,numBytes); } @Override public boolean moveProduct(AvroProduct p, String newPath) throws AvroRemoteException { try { return this.fileManager.moveProduct(AvroTypeFactory.getProduct(p), newPath); } catch (DataTransferException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean removeFile(String filePath) throws AvroRemoteException { try { return this.fileManager.removeFile(filePath); } catch (DataTransferException e) { throw new AvroRemoteException(e.getMessage()); } catch (IOException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean modifyProduct(AvroProduct p) throws AvroRemoteException { try { return this.fileManager.modifyProduct(AvroTypeFactory.getProduct(p)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public boolean removeProduct(AvroProduct p) throws AvroRemoteException { try { return this.fileManager.removeProduct(AvroTypeFactory.getProduct(p)); } catch (CatalogException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroMetadata getCatalogValues(AvroMetadata m, AvroProductType productType) throws AvroRemoteException { try { return AvroTypeFactory.getAvroMetadata(this.fileManager.getCatalogValues( AvroTypeFactory.getMetadata(m), AvroTypeFactory.getProductType(productType) )); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroMetadata getOrigValues(AvroMetadata m, AvroProductType productType) throws AvroRemoteException { try { return AvroTypeFactory.getAvroMetadata(this.fileManager.getOrigValues( AvroTypeFactory.getMetadata(m), AvroTypeFactory.getProductType(productType) )); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } } @Override public AvroQuery getCatalogQuery(AvroQuery query, AvroProductType productType) throws AvroRemoteException { try { return AvroTypeFactory.getAvroQuery(this.fileManager.getCatalogQuery( AvroTypeFactory.getQuery(query), AvroTypeFactory.getProductType(productType) )); } catch (RepositoryManagerException e) { throw new AvroRemoteException(e.getMessage()); } catch (QueryFormulationException e) { throw new AvroRemoteException(e.getMessage()); } } }
package fluentSolver; import java.util.ArrayList; import java.util.List; import java.util.Vector; import org.metacsp.framework.Constraint; import org.metacsp.framework.Variable; import org.metacsp.framework.multi.MultiBinaryConstraint; import org.metacsp.multi.allenInterval.AllenInterval; import org.metacsp.multi.allenInterval.AllenIntervalConstraint; import org.metacsp.time.Bounds; import htn.AdditionalConstraintTemplate; import htn.PlanReportroryItem; import resourceFluent.ResourceUsageTemplate; import unify.CompoundSymbolicValueConstraint; import unify.CompoundSymbolicVariable; public class FluentConstraint extends MultiBinaryConstraint { /** * */ private static final long serialVersionUID = 137380711080409334L; public enum Type {MATCHES, DC, PRE, OPENS, CLOSES, BEFORE, UNARYAPPLIED, MOVEDURATION, AXIOM, RESOURCEUSAGE} private Type type; private int[] connections; private PlanReportroryItem plannedWith; // The operator or method that has been used for planning the task. private boolean isNegativeEffect; private Bounds bounds; private String axiom; private ResourceUsageTemplate resourceIndicator; private Vector<AdditionalConstraintTemplate> additionalConstraints; private int depth; // depth of the task in the decomposition tree (only set for UNARYAPPLIED private boolean depthIsSet = false; public FluentConstraint(Type type) { this.type = type; } public FluentConstraint(Type type, int[] connections) { this(type); this.connections = connections; } public FluentConstraint(Type type, PlanReportroryItem plannedWith) { this(type); this.plannedWith = plannedWith; } public FluentConstraint(Type type, Bounds bounds) { this(type); this.bounds = bounds; } public FluentConstraint(Type type, String axiom) { this(type); this.axiom = axiom; } public FluentConstraint(Type type, ResourceUsageTemplate resourceIndicator) { this(type); this.resourceIndicator = resourceIndicator; } @Override protected Constraint[] createInternalConstraints(Variable f, Variable t) { if (!( f instanceof Fluent) || !(t instanceof Fluent)) return null; List<Constraint> retList = new ArrayList<Constraint>(); if (this.type.equals(Type.MATCHES)) { AllenIntervalConstraint eq = new AllenIntervalConstraint( AllenIntervalConstraint.Type.Equals); eq.setFrom(((Fluent) f).getAllenInterval()); eq.setTo(((Fluent) t).getAllenInterval()); retList.add(eq); CompoundSymbolicValueConstraint ncon = new CompoundSymbolicValueConstraint(CompoundSymbolicValueConstraint.Type.MATCHES); CompoundSymbolicVariable nf = ((Fluent) f).getCompoundSymbolicVariable(); ncon.setFrom(nf); CompoundSymbolicVariable nt = ((Fluent) t).getCompoundSymbolicVariable(); ncon.setTo(nt); retList.add(ncon); } else if (this.type.equals(Type.DC)) { if (additionalConstraints != null && additionalConstraints.size() > 0) { for (AdditionalConstraintTemplate aConTemplate : additionalConstraints) { AllenIntervalConstraint con = (AllenIntervalConstraint) aConTemplate.getConstraint().clone(); if (aConTemplate.startsFromHead()) { con.setFrom(((Fluent) f).getAllenInterval()); con.setTo(((Fluent) t).getAllenInterval()); } else { con.setFrom(((Fluent) t).getAllenInterval()); con.setTo(((Fluent) f).getAllenInterval()); } retList.add(con); } } else { AllenIntervalConstraint desf = new AllenIntervalConstraint( AllenIntervalConstraint.Type.DuringOrEqualsOrStartsOrFinishes, AllenIntervalConstraint.Type.DuringOrEqualsOrStartsOrFinishes.getDefaultBounds()); desf.setFrom(((Fluent) t).getAllenInterval()); desf.setTo(((Fluent) f).getAllenInterval()); retList.add(desf); } } else if (this.type.equals(Type.PRE)) { if (additionalConstraints != null && additionalConstraints.size() > 0) { for (AdditionalConstraintTemplate aConTemplate : additionalConstraints) { AllenIntervalConstraint con = (AllenIntervalConstraint) aConTemplate.getConstraint().clone(); if (aConTemplate.startsFromHead()) { con.setFrom(((Fluent) t).getAllenInterval()); con.setTo(((Fluent) f).getAllenInterval()); } else { con.setFrom(((Fluent) f).getAllenInterval()); con.setTo(((Fluent) t).getAllenInterval()); } retList.add(con); } } else { // use default temporal constraints // AllenIntervalConstraint preAIC = new AllenIntervalConstraint( // AllenIntervalConstraint.Type.MetByOrOverlappedByOrIsFinishedByOrDuring, // AllenIntervalConstraint.Type.MetByOrOverlappedByOrIsFinishedByOrDuring.getDefaultBounds()); // preAIC.setFrom(((Fluent) t).getAllenInterval()); // preAIC.setTo(((Fluent) f).getAllenInterval()); AllenIntervalConstraint preAIC = new AllenIntervalConstraint( AllenIntervalConstraint.Type.MeetsOrOverlapsOrFinishedByOrContains, AllenIntervalConstraint.Type.MeetsOrOverlapsOrFinishedByOrContains.getDefaultBounds()); preAIC.setFrom(((Fluent) f).getAllenInterval()); preAIC.setTo(((Fluent) t).getAllenInterval()); retList.add(preAIC); } } else if (this.type.equals(Type.OPENS)) { if (additionalConstraints != null && additionalConstraints.size() > 0) { for (AdditionalConstraintTemplate aConTemplate : additionalConstraints) { AllenIntervalConstraint con = (AllenIntervalConstraint) aConTemplate.getConstraint().clone(); if (aConTemplate.startsFromHead()) { con.setFrom(((Fluent) f).getAllenInterval()); con.setTo(((Fluent) t).getAllenInterval()); } else { con.setFrom(((Fluent) t).getAllenInterval()); con.setTo(((Fluent) f).getAllenInterval()); } retList.add(con); } } else { // TODO probably need other relations, too // AllenIntervalConstraint befCon = new AllenIntervalConstraint(AllenIntervalConstraint.Type.Meets); // AllenIntervalConstraint befCon = new AllenIntervalConstraint(AllenIntervalConstraint.Type.Overlaps, new Bounds(2L, 6L)); AllenIntervalConstraint befCon = new AllenIntervalConstraint(AllenIntervalConstraint.Type.Overlaps, AllenIntervalConstraint.Type.Overlaps.getDefaultBounds()); befCon.setFrom(((Fluent) f).getAllenInterval()); befCon.setTo(((Fluent) t).getAllenInterval()); retList.add(befCon); } } else if (this.type.equals(Type.BEFORE)) { // Only used for ordering. No temporal constraints to set } else if (this.type.equals(Type.CLOSES)) { if (additionalConstraints != null && additionalConstraints.size() > 0) { // Additional constraints are added in pre at the moment. } else { // TODO probably need other relations, too AllenIntervalConstraint oiCon = new AllenIntervalConstraint( AllenIntervalConstraint.Type.OverlappedBy, AllenIntervalConstraint.Type.OverlappedBy.getDefaultBounds()); oiCon.setFrom(((Fluent) f).getAllenInterval()); oiCon.setTo(((Fluent) t).getAllenInterval()); retList.add(oiCon); } } else if (this.type.equals(Type.MOVEDURATION)) { AllenIntervalConstraint durationCon = new AllenIntervalConstraint( AllenIntervalConstraint.Type.Duration, this.bounds // new Bounds(10, 100) ); AllenInterval ai = ((Fluent) f).getAllenInterval(); durationCon.setFrom(ai); durationCon.setTo(ai); retList.add(durationCon); } if (connections != null && connections.length > 0) { retList.add(createSubmatches(f, t)); } return retList.toArray(new Constraint[retList.size()]); } private CompoundSymbolicValueConstraint createSubmatches(Variable f, Variable t) { CompoundSymbolicValueConstraint ncon = new CompoundSymbolicValueConstraint( CompoundSymbolicValueConstraint.Type.SUBMATCHES, connections); CompoundSymbolicVariable nf = ((Fluent) f).getCompoundSymbolicVariable(); ncon.setFrom(nf); CompoundSymbolicVariable nt = ((Fluent) t).getCompoundSymbolicVariable(); ncon.setTo(nt); return ncon; } @Override public Object clone() { FluentConstraint ret = new FluentConstraint(type); ret.connections = this.connections; ret.bounds = this.bounds; ret.plannedWith = this.plannedWith; ret.setNegativeEffect(isNegativeEffect); ret.axiom = this.axiom; ret.resourceIndicator = this.resourceIndicator; ret.additionalConstraints = this.additionalConstraints; ret.depth = this.depth; ret.depthIsSet = this.depthIsSet; return ret; } @Override public String getEdgeLabel() { return this.type.toString(); } @Override public boolean isEquivalent(Constraint c) { // TODO Auto-generated method stub return false; } public Type getType() { return type; } public PlanReportroryItem getPlannedWith() { return plannedWith; } public boolean isNegativeEffect() { return isNegativeEffect; } public void setNegativeEffect(boolean isNegativeEffect) { this.isNegativeEffect = isNegativeEffect; } public String getAxiom() { return axiom; } public boolean isUsingResource(String resource) { if (resourceIndicator.getResourceName().equals(resource)) { CompoundSymbolicVariable cv = ((Fluent) this.getFrom()).getCompoundSymbolicVariable(); int[] resourceRequirementPositions = resourceIndicator.getResourceRequirementPositions(); for (int i = 0; i < resourceRequirementPositions.length; i++) { String[] symbols = cv.getSymbolsAt(resourceRequirementPositions[i]); if (symbols.length != 1 || !symbols[0].equals(resourceIndicator.getResourceRequirements()[i])) { return false; } } return true; } else { return false; } } public String getResourceName() { return resourceIndicator.getResourceName(); } public int getResourceUsageLevel() { return resourceIndicator.getResourceUsageLevel(); } public void setAdditionalConstraints(Vector<AdditionalConstraintTemplate> additionalConstraints) { this.additionalConstraints = additionalConstraints; } public Vector<AdditionalConstraintTemplate> getAdditionalConstraints() { return additionalConstraints; } public boolean hasAdditionalConstraints() { return additionalConstraints.size() > 0; } public int getDepth() throws IllegalAccessException { if (this.depthIsSet) { return depth; } else { throw new IllegalAccessException("Depth has not been set"); } } public void setDepth(int depth) { this.depth = depth; this.depthIsSet = true; } }
/* ************************************************************************ # # designCraft.io # # http://designcraft.io/ # # Copyright: # Copyright 2014 eTimeline, LLC. All rights reserved. # # License: # See the license.txt file in the project's top-level directory for details. # # Authors: # * Andy White # ************************************************************************ */ package dcraft.struct.scalar; import org.joda.time.DateTime; import org.joda.time.Period; import org.joda.time.format.ISOPeriodFormat; import dcraft.lang.op.OperationContext; import dcraft.schema.DataType; import dcraft.schema.RootType; import dcraft.script.StackEntry; import dcraft.struct.ScalarStruct; import dcraft.struct.Struct; import dcraft.util.TimeUtil; import dcraft.xml.XElement; public class DateTimeStruct extends ScalarStruct { protected DateTime value = null; @Override public DataType getType() { if (this.explicitType != null) return super.getType(); return OperationContext.get().getSchema().getType("DateTime"); } public DateTimeStruct() { } public DateTimeStruct(Object v) { this.adaptValue(v); } @Override public Object getGenericValue() { return this.value; } @Override public void adaptValue(Object v) { this.value = Struct.objectToDateTime(v); } public DateTime getValue() { return this.value; } public void setValue(DateTime v) { this.value = v; } @Override public boolean isEmpty() { return (this.value == null); } @Override public boolean isNull() { return (this.value == null); } @Override public void operation(StackEntry stack, XElement code) { String op = code.getName(); // we are loose on the idea of null/zero. operations always perform on now, except Validate if ((this.value == null) && !"Validate".equals(op)) this.value = new DateTime(); if ("Inc".equals(op)) { this.value = this.value.plusDays(1); stack.resume(); return; } else if ("Dec".equals(op)) { this.value = this.value.minusDays(1); stack.resume(); return; } else if ("Set".equals(op)) { Struct sref = code.hasAttribute("Value") ? stack.refFromElement(code, "Value") : stack.resolveValue(code.getText()); this.adaptValue(sref); stack.resume(); return; } else if ("Add".equals(op)) { try { if (code.hasAttribute("Years")) this.value = this.value.plusYears((int)stack.intFromElement(code, "Years")); else if (code.hasAttribute("Months")) this.value = this.value.plusMonths((int)stack.intFromElement(code, "Months")); else if (code.hasAttribute("Days")) this.value = this.value.plusDays((int)stack.intFromElement(code, "Days")); else if (code.hasAttribute("Hours")) this.value = this.value.plusHours((int)stack.intFromElement(code, "Hours")); else if (code.hasAttribute("Minutes")) this.value = this.value.plusMinutes((int)stack.intFromElement(code, "Minutes")); else if (code.hasAttribute("Seconds")) this.value = this.value.plusSeconds((int)stack.intFromElement(code, "Seconds")); else if (code.hasAttribute("Weeks")) this.value = this.value.plusWeeks((int)stack.intFromElement(code, "Weeks")); else if (code.hasAttribute("Period")) { Period p = ISOPeriodFormat.standard().parsePeriod(stack.stringFromElement(code, "Period")); this.value = this.value.plus(p); } } catch (Exception x) { OperationContext.get().error("Error doing " + op + ": " + x); } stack.resume(); return; } else if ("Subtract".equals(op)) { try { if (code.hasAttribute("Years")) this.value = this.value.minusYears((int)stack.intFromElement(code, "Years")); else if (code.hasAttribute("Months")) this.value = this.value.minusMonths((int)stack.intFromElement(code, "Months")); else if (code.hasAttribute("Days")) this.value = this.value.minusDays((int)stack.intFromElement(code, "Days")); else if (code.hasAttribute("Hours")) this.value = this.value.minusHours((int)stack.intFromElement(code, "Hours")); else if (code.hasAttribute("Minutes")) this.value = this.value.minusMinutes((int)stack.intFromElement(code, "Minutes")); else if (code.hasAttribute("Seconds")) this.value = this.value.minusSeconds((int)stack.intFromElement(code, "Seconds")); else if (code.hasAttribute("Weeks")) this.value = this.value.minusWeeks((int)stack.intFromElement(code, "Weeks")); else if (code.hasAttribute("Period")) { Period p = ISOPeriodFormat.standard().parsePeriod(stack.stringFromElement(code, "Period")); this.value = this.value.minus(p); } } catch (Exception x) { OperationContext.get().error("Error doing " + op + ": " + x); } stack.resume(); return; } super.operation(stack, code); } @Override protected void doCopy(Struct n) { super.doCopy(n); DateTimeStruct nn = (DateTimeStruct)n; nn.value = this.value; } @Override public Struct deepCopy() { DateTimeStruct cp = new DateTimeStruct(); this.doCopy(cp); return cp; } @Override public boolean equals(Object obj) { return (DateTimeStruct.comparison(this, obj) == 0); } @Override public int compare(Object y) { return DateTimeStruct.comparison(this, y); } @Override public int hashCode() { return (this.value == null) ? 0 : this.value.hashCode(); } @Override public String toString() { return (this.value == null) ? "null" : TimeUtil.stampFmt.print(this.value); } @Override public Object toInternalValue(RootType t) { if ((this.value != null) && (t == RootType.String)) return this.toString(); return this.value; } public static int comparison(Object x, Object y) { DateTime xv = Struct.objectToDateTime(x); DateTime yv = Struct.objectToDateTime(y); if ((yv == null) && (xv == null)) return 0; if (yv == null) return 1; if (xv == null) return -1; return xv.compareTo(yv); } @Override public boolean checkLogic(StackEntry stack, XElement source) { return Struct.objectToBooleanOrFalse(this.value); } }
/** * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * This file is part of the LDP4j Project: * http://www.ldp4j.org/ * * Center for Open Middleware * http://www.centeropenmiddleware.com/ * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Copyright (C) 2014-2016 Center for Open Middleware. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# * Artifact : org.ldp4j.framework:ldp4j-application-kernel-core:0.2.2 * Bundle : ldp4j-application-kernel-core-0.2.2.jar * #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=# */ package org.ldp4j.application.kernel.endpoint; import static com.google.common.base.Preconditions.checkNotNull; import java.util.Date; import org.ldp4j.application.engine.context.EntityTag; import org.ldp4j.application.engine.util.ListenerManager; import org.ldp4j.application.engine.util.Notification; import org.ldp4j.application.kernel.endpoint.Endpoint; import org.ldp4j.application.kernel.endpoint.EndpointRepository; import org.ldp4j.application.kernel.resource.Attachment; import org.ldp4j.application.kernel.resource.Container; import org.ldp4j.application.kernel.resource.Member; import org.ldp4j.application.kernel.resource.Resource; import org.ldp4j.application.kernel.resource.ResourceId; import org.ldp4j.application.kernel.resource.ResourceRepository; import org.ldp4j.application.kernel.resource.Slug; import org.ldp4j.application.kernel.service.Service; import org.ldp4j.application.kernel.service.ServiceBuilder; import org.ldp4j.application.kernel.spi.ModelFactory; import org.ldp4j.application.kernel.spi.RuntimeDelegate; import org.ldp4j.application.kernel.template.AttachedTemplate; import org.ldp4j.application.kernel.template.ContainerTemplate; import org.ldp4j.application.kernel.template.ResourceTemplate; import org.ldp4j.application.kernel.template.TemplateManagementService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public final class EndpointManagementService implements Service { private static final class EndpointCreationNotification implements Notification<EndpointLifecycleListener> { private final Endpoint endpoint; private EndpointCreationNotification(Endpoint endpoint) { this.endpoint = endpoint; } @Override public void propagate(EndpointLifecycleListener listener) { listener.endpointCreated(endpoint); } } private static final class EndpointDeletionNotification implements Notification<EndpointLifecycleListener> { private final Endpoint endpoint; private EndpointDeletionNotification(Endpoint endpoint) { this.endpoint = endpoint; } @Override public void propagate(EndpointLifecycleListener listener) { listener.endpointDeleted(endpoint); } } private static final class EndpointManagementServiceBuilder extends ServiceBuilder<EndpointManagementService> { private EndpointManagementServiceBuilder() { super(EndpointManagementService.class); } @Override public EndpointManagementService build() { return new EndpointManagementService( super.service(TemplateManagementService.class)); } } private static final String RESOURCE_IDENTIFIER_CANNOT_BE_NULL = "Resource identifier cannot be null"; private static final String PATH_CANNOT_BE_NULL = "Path cannot be null"; private static final String LAST_MODIFIED_CANNOT_BE_NULL = "Last modified cannot be null"; private static final String ENTITY_TAG_CANNOT_BE_NULL = "Entity tag cannot be null"; private static final String RESOURCE_CANNOT_BE_NULL = "Resource cannot be null"; private static final Logger LOGGER=LoggerFactory.getLogger(EndpointManagementService.class); // TODO: Make this configurable, or subject to a policy private static final int MAX_ENDPOINT_CREATION_FAILURE = 3; private final TemplateManagementService templateManagementService; private final ListenerManager<EndpointLifecycleListener> listenerManager; private final ModelFactory modelFactory; private final EndpointRepository endpointRepository; private final ResourceRepository resourceRepository; private EndpointManagementService(TemplateManagementService templateManagementService) { this.templateManagementService = templateManagementService; this.modelFactory=RuntimeDelegate.getInstance().getModelFactory(); this.endpointRepository=RuntimeDelegate.getInstance().getEndpointRepository(); this.resourceRepository=RuntimeDelegate.getInstance().getResourceRepository(); this.listenerManager=ListenerManager.<EndpointLifecycleListener>newInstance(); } private String calculateResourcePath(Resource resource, String desiredPath) throws EndpointNotFoundException { if(resource.isRoot()) { throw new IllegalStateException("Cannot get path for root resource"); } Resource parent=this.resourceRepository.resourceById(resource.parentId(),Resource.class); if(parent==null) { throw new IllegalStateException("Could not load resource '"+resource.parentId()+"' from the repository"); } String result= parent instanceof Container? generatePathForMember(resource,(Container)parent,desiredPath): null; if(result==null) { result = generatePathForAttachment(resource,parent); if(result==null) { throw new IllegalStateException("Could not determine path for resource '"+resource.id()+"' with parent '"+parent.id()+"'"); } } return result; } private String generatePathForAttachment(Resource child, Resource parent) throws EndpointNotFoundException { Attachment attachment=parent.findAttachment(child.id()); if(attachment==null) { return null; } Endpoint endpoint=getResourceEndpoint(parent.id()); ResourceTemplate parentTemplate=this.templateManagementService.templateOfId(parent.id().templateId()); AttachedTemplate attachedTemplate = parentTemplate.attachedTemplate(attachment.id()); return PathBuilder. create(). addSegment(endpoint.path()). addSegment(attachedTemplate.path()). addSegment(attachment.version()>0?attachment.version():null). build(); } private String generatePathForMember(Resource child, Container parent, String desiredPath) throws EndpointNotFoundException { Member member = parent.findMember(child.id()); if(member!=null) { Endpoint endpoint=getResourceEndpoint(parent.id()); ContainerTemplate parentTemplate=this.templateManagementService.templateOfId(parent.id().templateId(),ContainerTemplate.class); if(parentTemplate==null) { throw new IllegalStateException("Could not find template resource '"+parent+"'"); } String slugPath=getSlugPath(parent, desiredPath); return PathBuilder. create(). addSegment(endpoint.path()). addSegment(parentTemplate.memberPath().or("")). addSegment(member.number()). addSegment(slugPath). build(); } return null; } private String getSlugPath(Container parent, String desiredPath) { String slugPath=null; if(desiredPath!=null) { Slug slug=parent.findSlug(desiredPath); if(slug==null) { slug=parent.addSlug(desiredPath); } slugPath=slug.nextPath(); } return slugPath; } private Endpoint createEndpoint(Resource resource, String relativePath, EntityTag entityTag, Date lastModified) throws EndpointCreationException { String candidatePath=relativePath; int repetitions=0; while(repetitions<MAX_ENDPOINT_CREATION_FAILURE) { LOGGER.debug("({}) Creating endpoint for {} [{},{},{}]",repetitions,resource.id(),entityTag,lastModified,relativePath); try { String resourcePath = calculateResourcePath(resource,candidatePath); LOGGER.debug("({}) Trying resource path {} ",repetitions,resourcePath); Endpoint newEndpoint = this.modelFactory.createEndpoint(resourcePath, resource, lastModified, entityTag); this.endpointRepository.add(newEndpoint); return newEndpoint; } catch (EndpointNotFoundException e) { throw new EndpointCreationException("Could not calculate path for resource '"+resource.id()+"'",e); } catch (IllegalArgumentException e) { LOGGER.debug("Could not create endpoint ({} --> {})",repetitions,candidatePath,e); repetitions++; candidatePath=null; } } throw new EndpointCreationException("Could not create endpoint for resource '"+resource.id()+"' after "+MAX_ENDPOINT_CREATION_FAILURE+" tries"); } /** * Enforce http://tools.ietf.org/html/rfc7232#section-2.2: * if the clock in the request is ahead of the clock of the origin * server (e.g., I request from Spain the update of a resource held in USA) * the last-modified data should be changed to that of the request and not * a generated date from the origin server */ private Date getModificationDate(Endpoint endpoint, Date modificationDate) { Date result=endpoint.lastModified(); if(modificationDate.after(result)) { result=modificationDate; } return result; } public void registerEndpointLifecycleListener(EndpointLifecycleListener listener) { this.listenerManager.registerListener(listener); } public void deregisterEndpointLifecycleListener(EndpointLifecycleListener listener) { this.listenerManager.deregisterListener(listener); } public Endpoint getResourceEndpoint(ResourceId resourceId) throws EndpointNotFoundException { checkNotNull(resourceId,RESOURCE_IDENTIFIER_CANNOT_BE_NULL); Endpoint endpoint = this.endpointRepository.endpointOfResource(resourceId); if(endpoint==null) { throw new EndpointNotFoundException(resourceId); } return endpoint; } public Endpoint resolveEndpoint(String path) { checkNotNull(path,PATH_CANNOT_BE_NULL); return this.endpointRepository.endpointOfPath(path); } public Endpoint createEndpointForResource(Resource resource, String relativePath, EntityTag entityTag, Date lastModified) throws EndpointCreationException { checkNotNull(resource,RESOURCE_CANNOT_BE_NULL); checkNotNull(entityTag,ENTITY_TAG_CANNOT_BE_NULL); checkNotNull(lastModified,LAST_MODIFIED_CANNOT_BE_NULL); Endpoint newEndpoint = createEndpoint(resource, relativePath, entityTag, lastModified); this.listenerManager.notify(new EndpointCreationNotification(newEndpoint)); return newEndpoint; } public Endpoint modifyResourceEndpoint(Resource resource, EntityTag entityTag, Date lastModified) throws EndpointNotFoundException { checkNotNull(resource,RESOURCE_CANNOT_BE_NULL); checkNotNull(entityTag,ENTITY_TAG_CANNOT_BE_NULL); checkNotNull(lastModified,LAST_MODIFIED_CANNOT_BE_NULL); Endpoint endpoint = this.endpointRepository.endpointOfResource(resource.id()); if(endpoint==null) { throw new EndpointNotFoundException(resource.id()); } endpoint. modify( entityTag, getModificationDate(endpoint,lastModified)); return endpoint; } public Endpoint deleteResourceEndpoint(Resource resource, Date deletionDate) throws EndpointNotFoundException { checkNotNull(resource,RESOURCE_CANNOT_BE_NULL); Endpoint endpoint = this.endpointRepository.endpointOfResource(resource.id()); if(endpoint==null) { throw new EndpointNotFoundException(resource.id()); } endpoint.delete(getModificationDate(endpoint,deletionDate)); this.listenerManager.notify(new EndpointDeletionNotification(endpoint)); return endpoint; } public static ServiceBuilder<EndpointManagementService> serviceBuilder() { return new EndpointManagementServiceBuilder(); } public static EndpointManagementService defaultService() { return serviceBuilder().build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.json; import javax.json.spi.JsonProvider; import javax.json.stream.JsonGenerator; import javax.json.stream.JsonGeneratorFactory; import javax.json.stream.JsonParser; import javax.json.stream.JsonParserFactory; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Collection; import java.util.Map; public final class Json { private Json() { // no-op } public static JsonParser createParser(Reader reader) { return JsonProvider.provider().createParser(reader); } public static JsonParser createParser(InputStream in) { return JsonProvider.provider().createParser(in); } public static JsonGenerator createGenerator(Writer writer) { return JsonProvider.provider().createGenerator(writer); } public static JsonGenerator createGenerator(OutputStream out) { return JsonProvider.provider().createGenerator(out); } public static JsonParserFactory createParserFactory(Map<String, ?> config) { return JsonProvider.provider().createParserFactory(config); } public static JsonGeneratorFactory createGeneratorFactory(Map<String, ?> config) { return JsonProvider.provider().createGeneratorFactory(config); } public static JsonWriter createWriter(Writer writer) { return JsonProvider.provider().createWriter(writer); } public static JsonWriter createWriter(OutputStream out) { return JsonProvider.provider().createWriter(out); } public static JsonReader createReader(Reader reader) { return JsonProvider.provider().createReader(reader); } public static JsonReader createReader(InputStream in) { return JsonProvider.provider().createReader(in); } public static JsonReaderFactory createReaderFactory(Map<String, ?> config) { return JsonProvider.provider().createReaderFactory(config); } public static JsonWriterFactory createWriterFactory(Map<String, ?> config) { return JsonProvider.provider().createWriterFactory(config); } public static JsonArrayBuilder createArrayBuilder() { return JsonProvider.provider().createArrayBuilder(); } /** * Create an empty JsonObjectBuilder * * @since 1.0 */ public static JsonObjectBuilder createObjectBuilder() { return JsonProvider.provider().createObjectBuilder(); } /** * Creates a JSON object builder, initialized with the specified JsonObject. * * @since 1.1 */ public static JsonObjectBuilder createObjectBuilder(JsonObject object) { return JsonProvider.provider().createObjectBuilder(object); } /** * Creates a JSON object builder, initialized with the specified Map. * * @since 1.1 */ public static JsonObjectBuilder createObjectBuilder(Map<String, Object> map) { return JsonProvider.provider().createObjectBuilder(map); } public static JsonBuilderFactory createBuilderFactory(Map<String, ?> config) { return JsonProvider.provider().createBuilderFactory(config); } /** * Creates a JSON array builder, initialized with an initial content * taken from a JsonArray * * @param initialData the initial array in the builder * @return a JSON array builder * @since 1.1 */ public static JsonArrayBuilder createArrayBuilder(JsonArray initialData) { return JsonProvider.provider().createArrayBuilder(initialData); } /** * Creates a JSON array builder, initialized with an initial content * * @param initialData the initial array in the builder * @return a JSON array builder * @since 1.1 */ public static JsonArrayBuilder createArrayBuilder(Collection<?> initialData) { return JsonProvider.provider().createArrayBuilder(initialData); } public static JsonString createValue(String value) { return JsonProvider.provider().createValue(value); } public static JsonNumber createValue(int value) { return JsonProvider.provider().createValue(value); } public static JsonNumber createValue(long value) { return JsonProvider.provider().createValue(value); } public static JsonNumber createValue(double value) { return JsonProvider.provider().createValue(value); } public static JsonNumber createValue(BigDecimal value) { return JsonProvider.provider().createValue(value); } public static JsonNumber createValue(BigInteger value) { return JsonProvider.provider().createValue(value); } /** * Create a {@link JsonPatch} as defined in * <a href="https://tools.ietf.org/html/rfc6902">RFC-6902</a>. * * @param array with the patch operations * @return the JsonPatch based on the given operations * @since 1.1 */ public static JsonPatch createPatch(JsonArray array) { return JsonProvider.provider().createPatch(array); } /** * Create a {@link JsonPatch} by comparing the source to the target as defined in * <a href="https://tools.ietf.org/html/rfc6902">RFC-6902</a>. * <p> * Applying this {@link JsonPatch} to the source you will give you the target. * * @see #createPatch(JsonArray) * @since 1.1 */ public static JsonPatch createDiff(JsonStructure source, JsonStructure target) { return JsonProvider.provider().createDiff(source, target); } /** * Create a new JsonPatchBuilder * * @since 1.1 */ public static JsonPatchBuilder createPatchBuilder() { return JsonProvider.provider().createPatchBuilder(); } /** * Create a new JsonPatchBuilder * * @param initialData the initial patch operations * @since 1.1 */ public static JsonPatchBuilder createPatchBuilder(JsonArray initialData) { return JsonProvider.provider().createPatchBuilder(initialData); } /** * Creates JSON Merge Patch (<a href="http://tools.ietf.org/html/rfc7396">RFC 7396</a>) * from a specified {@link JsonValue}. * Create a merged patch by comparing the source to the target. * <p> * Applying this JsonPatch to the source will give you the target. * <p> * If you have a JSON like * <pre> * { * "a": "b", * "c": { * "d": "e", * "f": "g" * } * } * </pre> * <p> * Then you can change the value of "a" and removing "f" by sending: * <pre> * { * "a":"z", * "c": { * "f": null * } * } * </pre> * * @param patch the patch * @return a JSON Merge Patch * @since 1.1 */ public static JsonMergePatch createMergePatch(JsonValue patch) { return JsonProvider.provider().createMergePatch(patch); } /** * Create a JSON Merge Patch (<a href="http://tools.ietf.org/html/rfc7396">RFC 7396</a>) * from the source and target {@link JsonValue JsonValues}. * Create a merged patch by comparing the source to the target. * <p> * Applying this JsonPatch to the source will give you the target. * <p> * If you have a JSON like * <pre> * { * "a": "b", * "c": { * "d": "e", * "f": "g" * } * } * </pre> * <p> * Then you can change the value of "a" and removing "f" by sending: * <pre> * { * "a":"z", * "c": { * "f": null * } * } * </pre> * * @param source the source * @param target the target * @return a JSON Merge Patch * @since 1.1 */ public static JsonMergePatch createMergeDiff(JsonValue source, JsonValue target) { return JsonProvider.provider().createMergeDiff(source, target); } /** * Create a {@link JsonPointer} for the given path * * @since 1.1 */ public static JsonPointer createPointer(String path) { return JsonProvider.provider().createPointer(path); } /** * @param pointer to encode * @return the properly encoded JsonPointer string * @since 1.1 */ public static String encodePointer(String pointer) { if (pointer == null || pointer.length() == 0) { return pointer; } return pointer.replace("~", "~0").replace("/", "~1"); } /** * @param escapedPointer * @return the de-escaped JsonPointer * * @since 1.1 */ public static String decodePointer(String escapedPointer) { if (escapedPointer == null || escapedPointer.length() == 0) { return escapedPointer; } return escapedPointer.replace("~1", "/").replace("~0", "~"); } }
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.sfm.d2; import boofcv.abst.feature.tracker.PointTrack; import boofcv.abst.feature.tracker.PointTracker; import boofcv.struct.geo.AssociatedPair; import boofcv.struct.image.ImageBase; import georegression.struct.InvertibleTransform; import org.ddogleg.fitting.modelset.ModelFitter; import org.ddogleg.fitting.modelset.ModelMatcher; import java.util.ArrayList; import java.util.List; /** * Computes the transform from the first image in a sequence to the current frame. Keyframe based algorithm. * Whenever a new keyframe is selected by the user all tracks are dropped and new ones spawned. No logic is * contained for selecting key frames and relies on the user for selecting them. * * @author Peter Abeles * @param <I> Input image type * @param <IT> Motion model data type */ @SuppressWarnings("unchecked") public class ImageMotionPointTrackerKey<I extends ImageBase, IT extends InvertibleTransform> { // total number of frames processed protected int totalFramesProcessed = 0; // feature tracker protected PointTracker<I> tracker; // Fits a model to the tracked features protected ModelMatcher<IT,AssociatedPair> modelMatcher; // Refines the model using the complete inlier set protected ModelFitter<IT,AssociatedPair> modelRefiner; // transform from the world frame to the key frame protected IT worldToKey; // transform from key frame to current frame protected IT keyToCurr; // transform from world to current frame protected IT worldToCurr; // tracks which are not in the inlier set for this many frames in a row are pruned protected int outlierPrune; // if the current frame is a keyframe or not protected boolean keyFrame; /** * Specify algorithms to use internally. Each of these classes must work with * compatible data structures. * * @param tracker feature tracker * @param modelMatcher Fits model to track data * @param modelRefiner (Optional) Refines the found model using the entire inlier set. Can be null. * @param model Motion model data structure * @param outlierPrune If a track is an outlier for this many frames in a row they are pruned */ public ImageMotionPointTrackerKey(PointTracker<I> tracker, ModelMatcher<IT, AssociatedPair> modelMatcher, ModelFitter<IT, AssociatedPair> modelRefiner, IT model, int outlierPrune) { this.tracker = tracker; this.modelMatcher = modelMatcher; this.modelRefiner = modelRefiner; this.outlierPrune = outlierPrune; worldToKey = (IT)model.createInstance(); keyToCurr = (IT)model.createInstance(); worldToCurr = (IT)model.createInstance(); } protected ImageMotionPointTrackerKey() { } /** * Makes the current frame the first frame and discards its past history */ public void reset() { totalFramesProcessed = 0; tracker.dropAllTracks(); resetTransforms(); } /** * Processes the next frame in the sequence. * * @param frame Next frame in the video sequence * @return true if motion was estimated and false if no motion was estimated */ public boolean process( I frame ) { keyFrame = false; // update the feature tracker tracker.process(frame); totalFramesProcessed++; List<PointTrack> tracks = tracker.getActiveTracks(null); if( tracks.size() == 0 ) return false; List<AssociatedPair> pairs = new ArrayList<AssociatedPair>(); for( PointTrack t : tracks ) { pairs.add((AssociatedPair)t.getCookie()); } // fit the motion model to the feature tracks if( !modelMatcher.process((List)pairs) ) { return false; } if( modelRefiner != null ) { if( !modelRefiner.fitModel(modelMatcher.getMatchSet(),modelMatcher.getModelParameters(),keyToCurr) ) return false; } else { keyToCurr.set(modelMatcher.getModelParameters()); } // mark that the track is in the inlier set for( AssociatedPair p : modelMatcher.getMatchSet() ) { ((AssociatedPairTrack)p).lastUsed = totalFramesProcessed; } // prune tracks which aren't being used pruneUnusedTracks(); // Update the motion worldToKey.concat(keyToCurr, worldToCurr); return true; } private void pruneUnusedTracks() { List<PointTrack> all = tracker.getAllTracks(null); for( PointTrack t : all ) { AssociatedPairTrack p = t.getCookie(); if( totalFramesProcessed - p.lastUsed >= outlierPrune) { if( !tracker.dropTrack(t) ) throw new RuntimeException("Drop track failed. Must be a bug in the tracker"); } } } /** * Change the current frame into the keyframe. p1 location of existing tracks is set to * their current location and new tracks are spawned. Reference frame transformations are also updated */ public void changeKeyFrame() { // drop all inactive tracks since their location is unknown in the current frame List<PointTrack> inactive = tracker.getInactiveTracks(null); for( PointTrack l : inactive ) { tracker.dropTrack(l); } // set the keyframe for active tracks as their current location List<PointTrack> active = tracker.getActiveTracks(null); for( PointTrack l : active ) { AssociatedPairTrack p = l.getCookie(); p.p1.set(l); p.lastUsed = totalFramesProcessed; } tracker.spawnTracks(); List<PointTrack> spawned = tracker.getNewTracks(null); for( PointTrack l : spawned ) { AssociatedPairTrack p = l.getCookie(); if( p == null ) { l.cookie = p = new AssociatedPairTrack(); // little bit of trickery here. Save the reference so that the point // in the current frame is updated for free as PointTrack is p.p2 = l; } p.p1.set(l); p.lastUsed = totalFramesProcessed; } worldToKey.set(worldToCurr); keyToCurr.reset(); keyFrame = true; } public void resetTransforms() { worldToCurr.reset(); worldToKey.reset(); keyToCurr.reset(); } public IT getWorldToCurr() { return worldToCurr; } public IT getWorldToKey() { return worldToKey; } public IT getKeyToCurr() { return keyToCurr; } public PointTracker<I> getTracker() { return tracker; } public ModelMatcher<IT, AssociatedPair> getModelMatcher() { return modelMatcher; } public int getTotalFramesProcessed() { return totalFramesProcessed; } public boolean isKeyFrame() { return keyFrame; } public Class<IT> getModelType() { return (Class<IT>)keyToCurr.getClass(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gobblin.source.jdbc; import java.io.IOException; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import lombok.extern.slf4j.Slf4j; import static com.google.common.base.Strings.isNullOrEmpty; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.collect.ImmutableMap; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import gobblin.configuration.ConfigurationKeys; import gobblin.configuration.WorkUnitState; import gobblin.source.extractor.DataRecordException; import gobblin.source.extractor.exception.HighWatermarkException; import gobblin.source.extractor.exception.RecordCountException; import gobblin.source.extractor.exception.SchemaException; import gobblin.source.extractor.extract.Command; import gobblin.source.extractor.extract.CommandOutput; import gobblin.source.extractor.schema.Schema; import gobblin.source.extractor.utils.Utils; import gobblin.source.extractor.watermark.Predicate; import gobblin.source.extractor.watermark.WatermarkType; import gobblin.source.workunit.WorkUnit; /** * Teradata extractor using JDBC protocol * * @author ypopov */ @Slf4j public class TeradataExtractor extends JdbcExtractor { private static final String TERADATA_TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss"; private static final String TERADATA_DATE_FORMAT = "yyyy-MM-dd"; private static final String TERADATA_HOUR_FORMAT = "HH"; private static final long SAMPLE_RECORD_COUNT = -1; private static final String ELEMENT_DATA_TYPE = "string"; private static final String TERADATA_SAMPLE_CLAUSE = " sample "; private static final Gson gson = new Gson(); public TeradataExtractor(WorkUnitState workUnitState) { super(workUnitState); } @Override public List<Command> getSchemaMetadata(String schema, String entity) throws SchemaException { log.debug("Build query to get schema"); List<Command> commands = new ArrayList<Command>(); String inputQuery = this.workUnit.getProp(ConfigurationKeys.SOURCE_QUERYBASED_QUERY); String metadataSql, predicate = "1=0"; if(isNullOrEmpty(inputQuery)) { metadataSql = "select * from " + schema + "." + entity; } else { metadataSql = this.removeSampleClauseFromQuery(inputQuery); } metadataSql = SqlQueryUtils.addPredicate(metadataSql, predicate); commands.add(JdbcExtractor.getCommand(metadataSql, JdbcCommand.JdbcCommandType.QUERY)); return commands; } @Override public List<Command> getHighWatermarkMetadata(String schema, String entity, String watermarkColumn, List<Predicate> predicateList) throws HighWatermarkException { log.debug("Build query to get high watermark"); List<Command> commands = new ArrayList<Command>(); String columnProjection = "max(" + Utils.getCoalesceColumnNames(watermarkColumn) + ")"; String watermarkFilter = this.concatPredicates(predicateList); String query = this.getExtractSql(); if (isNullOrEmpty(watermarkFilter)) { watermarkFilter = "1=1"; } query = query.replace(this.getOutputColumnProjection(), columnProjection) .replace(ConfigurationKeys.DEFAULT_SOURCE_QUERYBASED_WATERMARK_PREDICATE_SYMBOL, watermarkFilter); commands.add(JdbcExtractor.getCommand(query, JdbcCommand.JdbcCommandType.QUERY)); return commands; } @Override public List<Command> getCountMetadata(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws RecordCountException { log.debug("Build query to get source record count"); List<Command> commands = new ArrayList<Command>(); String columnProjection = "CAST(COUNT(1) AS BIGINT)"; String watermarkFilter = this.concatPredicates(predicateList); String query = this.getExtractSql(); if (isNullOrEmpty(watermarkFilter)) { watermarkFilter = "1=1"; } query = query.replace(this.getOutputColumnProjection(), columnProjection) .replace(ConfigurationKeys.DEFAULT_SOURCE_QUERYBASED_WATERMARK_PREDICATE_SYMBOL, watermarkFilter); String sampleFilter = this.constructSampleClause(); query = query + sampleFilter; if (!isNullOrEmpty(sampleFilter)) { query = "SELECT " + columnProjection + " FROM (" + query.replace(columnProjection, "1 as t") + ") temp"; } commands.add(JdbcExtractor.getCommand(query, JdbcCommand.JdbcCommandType.QUERY)); return commands; } @Override public List<Command> getDataMetadata(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws DataRecordException { log.debug("Build query to extract data"); List<Command> commands = new ArrayList<Command>(); int fetchSize = this.workUnitState.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_JDBC_RESULTSET_FETCH_SIZE, ConfigurationKeys.DEFAULT_SOURCE_QUERYBASED_JDBC_RESULTSET_FETCH_SIZE); String watermarkFilter = this.concatPredicates(predicateList); String query = this.getExtractSql(); if (isNullOrEmpty(watermarkFilter)) { watermarkFilter = "1=1"; } query = query.replace(ConfigurationKeys.DEFAULT_SOURCE_QUERYBASED_WATERMARK_PREDICATE_SYMBOL, watermarkFilter); String sampleFilter = this.constructSampleClause(); query = query + sampleFilter; commands.add(JdbcExtractor.getCommand(query, JdbcCommand.JdbcCommandType.QUERY)); commands.add(JdbcExtractor.getCommand(fetchSize, JdbcCommand.JdbcCommandType.FETCHSIZE)); return commands; } @Override public Map<String, String> getDataTypeMap() { Map<String, String> dataTypeMap = ImmutableMap.<String, String>builder() .put("byteint", "int") .put("smallint", "int") .put("integer", "int") .put("bigint", "long") .put("float", "float") .put("decimal", "double") .put("char", "string") .put("varchar", "string") .put("byte", "bytes") .put("varbyte", "bytes") .put("date", "date") .put("time", "time") .put("timestamp", "timestamp") .put("clob", "string") .put("blob", "string") .put("structured udt", "array") .put("double precision", "float") .put("numeric", "double") .put("real", "float") .put("character", "string") .put("char varying", "string") .put("character varying", "string") .put("long varchar", "string") .put("interval", "string") .build(); return dataTypeMap; } @Override public Iterator<JsonElement> getRecordSetFromSourceApi(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList) throws IOException { return null; } @Override public String getConnectionUrl() { String urlPrefix = "jdbc:teradata://"; String host = this.workUnit.getProp(ConfigurationKeys.SOURCE_CONN_HOST_NAME); checkArgument(!isNullOrEmpty(host), "Connectionn host cannot be null or empty at %s", ConfigurationKeys.SOURCE_CONN_HOST_NAME); String port = this.workUnit.getProp(ConfigurationKeys.SOURCE_CONN_PORT,"1025"); String database = this.workUnit.getProp(ConfigurationKeys.SOURCE_QUERYBASED_SCHEMA); String defaultUrl = urlPrefix + host.trim() + "/TYPE=FASTEXPORT,DATABASE=" + database.trim() + ",DBS_PORT=" + port.trim() ; // use custom url from source.conn.host if Teradata jdbc url available return host.contains(urlPrefix) ? host.trim() : defaultUrl; } @Override public long exractSampleRecordCountFromQuery(String query) { if (isNullOrEmpty(query)) { return SAMPLE_RECORD_COUNT; } long recordcount = SAMPLE_RECORD_COUNT; String limit = null; String inputQuery = query.toLowerCase(); int limitIndex = inputQuery.indexOf(TERADATA_SAMPLE_CLAUSE); if (limitIndex > 0) { limit = query.substring(limitIndex + TERADATA_SAMPLE_CLAUSE.length()).trim(); } if (!isNullOrEmpty(limit)) { try { recordcount = Long.parseLong(limit); } catch (Exception e) { log.error("Ignoring incorrect limit value in input query: {}", limit); } } return recordcount; } @Override public String removeSampleClauseFromQuery(String query) { if (isNullOrEmpty(query)) { return null; } String limitString = ""; String inputQuery = query.toLowerCase(); int limitIndex = inputQuery.indexOf(TERADATA_SAMPLE_CLAUSE); if (limitIndex > 0) { limitString = query.substring(limitIndex); } return query.replace(limitString, ""); } @Override public String constructSampleClause() { long sampleRowCount = this.getSampleRecordCount(); if (sampleRowCount >= 0) { return TERADATA_SAMPLE_CLAUSE + sampleRowCount; } return ""; } @Override public String getWatermarkSourceFormat(WatermarkType watermarkType) { String columnFormat = null; switch (watermarkType) { case TIMESTAMP: columnFormat = TERADATA_TIMESTAMP_FORMAT; break; case DATE: columnFormat = TERADATA_DATE_FORMAT; break; case HOUR: columnFormat = TERADATA_HOUR_FORMAT; break; case SIMPLE: break; default: log.error("Watermark type {} not recognized", watermarkType.toString()); } return columnFormat; } @Override public String getHourPredicateCondition(String column, long value, String valueFormat, String operator) { log.debug("Getting hour predicate for Teradata"); String formattedvalue = Utils.toDateTimeFormat(Long.toString(value), valueFormat, TERADATA_HOUR_FORMAT); return Utils.getCoalesceColumnNames(column) + " " + operator + " '" + formattedvalue + "'"; } @Override public String getDatePredicateCondition(String column, long value, String valueFormat, String operator) { log.debug("Getting date predicate for Teradata"); String formattedvalue = Utils.toDateTimeFormat(Long.toString(value), valueFormat, TERADATA_DATE_FORMAT); return Utils.getCoalesceColumnNames(column) + " " + operator + " '" + formattedvalue + "'"; } @Override public String getTimestampPredicateCondition(String column, long value, String valueFormat, String operator) { log.debug("Getting timestamp predicate for Teradata"); String formattedvalue = Utils.toDateTimeFormat(Long.toString(value), valueFormat, TERADATA_TIMESTAMP_FORMAT); return Utils.getCoalesceColumnNames(column) + " " + operator + " '" + formattedvalue + "'"; } @Override public JsonArray getSchema(CommandOutput<?, ?> response) throws SchemaException, IOException { log.debug("Extract schema from resultset"); ResultSet resultset = null; Iterator<ResultSet> itr = (Iterator<ResultSet>) response.getResults().values().iterator(); if (itr.hasNext()) { resultset = itr.next(); } else { throw new SchemaException("Failed to get schema from Teradata - empty schema resultset"); } JsonArray fieldJsonArray = new JsonArray(); try { Schema schema = new Schema(); ResultSetMetaData rsmd = resultset.getMetaData(); String columnName, columnTypeName; for (int i = 1; i <= rsmd.getColumnCount(); i++) { columnName = rsmd.getColumnName(i); columnTypeName = rsmd.getColumnTypeName(i); schema.setColumnName(columnName); List<String> mapSymbols = null; JsonObject newDataType = this.convertDataType(columnName, columnTypeName, ELEMENT_DATA_TYPE, mapSymbols); schema.setDataType(newDataType); schema.setLength(rsmd.getColumnDisplaySize(i)); schema.setPrecision(rsmd.getPrecision(i)); schema.setScale(rsmd.getScale(i)); schema.setNullable(rsmd.isNullable(i) == ResultSetMetaData.columnNullable); schema.setComment(rsmd.getColumnLabel(i)); String jsonStr = gson.toJson(schema); JsonObject obj = gson.fromJson(jsonStr, JsonObject.class).getAsJsonObject(); fieldJsonArray.add(obj); } } catch (Exception e) { throw new SchemaException("Failed to get schema from Teradaa; error - " + e.getMessage(), e); } return fieldJsonArray; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.principalbased.impl; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.api.security.principal.ItemBasedPrincipal; import org.apache.jackrabbit.api.security.user.User; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.spi.security.SecurityProvider; import org.apache.jackrabbit.oak.spi.security.authorization.principalbased.Filter; import org.apache.jackrabbit.oak.spi.security.authorization.principalbased.FilterProvider; import org.apache.jackrabbit.oak.spi.security.principal.AdminPrincipal; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalConfiguration; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider; import org.apache.jackrabbit.oak.spi.security.principal.SystemUserPrincipal; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Before; import org.junit.Test; import javax.jcr.RepositoryException; import java.security.Principal; import java.util.Collections; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class FilterImplTest extends AbstractPrincipalBasedTest { private Filter filter; private String supportedPath; @Before public void before() throws Exception { super.before(); FilterProvider fp = getFilterProvider(); filter = fp.getFilter(getSecurityProvider(), root, getNamePathMapper()); supportedPath = fp.getFilterRoot(); } @Test public void testCanHandleEmptySet() throws Exception { assertFalse(filter.canHandle(Collections.emptySet())); } @Test public void testCanHandleGroupPrincipal() throws Exception { assertFalse(filter.canHandle(Collections.singleton(getUserManager(root).createGroup("group").getPrincipal()))); } @Test public void testCanHandleUserPrincipal() throws Exception { assertFalse(filter.canHandle(Collections.singleton(getTestUser().getPrincipal()))); } @Test public void testCanHandleUnknownSystemUserPrincipal() { SystemUserPrincipal principal = () -> "systemUserPrincipal"; assertFalse(filter.canHandle(Collections.singleton(principal))); } @Test public void testCanHandleRandomSystemUserPrincipal() throws Exception { Principal principal = getUserManager(root).createSystemUser("anySystemUser", null).getPrincipal(); assertFalse(filter.canHandle(Collections.singleton(principal))); } @Test public void testCanHandleValidSystemUserPrincipal() throws Exception { assertTrue(filter.canHandle(Collections.singleton(getTestSystemUser().getPrincipal()))); } @Test public void testCanHandleValidSystemUserPrincipal2() throws Exception { Principal principal = getTestSystemUser().getPrincipal(); assertTrue(filter.canHandle(Collections.singleton((SystemUserPrincipal) () -> principal.getName()))); } @Test public void testCanHandleWrongPrincipalClass() throws Exception { Principal principal = getTestSystemUser().getPrincipal(); assertFalse(filter.canHandle(Collections.singleton((AdminPrincipal) () -> principal.getName()))); assertFalse(filter.canHandle(Collections.singleton((new ItemBasedPrincipal() { @Override public String getPath() throws RepositoryException { return ((ItemBasedPrincipal) principal).getPath(); } @Override public String getName() { return principal.getName(); } })))); } @Test public void testCanHandleItemBasedSystemUserPrincipalUnsupportedPath() throws Exception { // make sure supported path exists User tu = getTestSystemUser(); assertTrue(root.getTree(supportedPath).exists()); Principal principal = new TestPrincipal("name", PathUtils.getParentPath(supportedPath)); assertFalse(filter.canHandle(Collections.singleton(principal))); } // @Test // public void testCanHandleItemBasedSystemUserPrincipalSupportedPath() { // Principal principal = new TestPrincipal("name", PathUtils.concat(supportedPath, "oak:path/to/oak:principal")); // assertTrue(filter.canHandle(Collections.singleton(principal))); // } @Test public void testCanHandleGetPathThrows() { Principal principal = new TestPrincipal("name", null); assertFalse(filter.canHandle(Collections.singleton(principal))); } /** * Test that the filter can deal with principals that have been accessed with a different {@code NamePathMapper}. * This might actually occure with {@code AbstractAccessControlManager#hasPrivilege} and {@code AbstractAccessControlManager#getPrivileges}, * when a {@link PermissionProvider} is built from the principal set passed to the Jackrabbit API methods (and not from * principals obtained on the system level when populating the {@code Subject}. */ @Test public void testCanHandlePathMapperMismatch() throws Exception { Principal principal = getTestSystemUser().getPrincipal(); // create filter with a different NamePathMapper than was used to build the principal Filter f = getFilterProvider().getFilter(getSecurityProvider(), root, NamePathMapper.DEFAULT); assertTrue(f.canHandle(Collections.singleton(principal))); } @Test public void testCanHandlePathMapperMismatchUnknownPrincipal() throws Exception { Principal principal = new TestPrincipal("name", PathUtils.concat(supportedPath, "oak:path/to/oak:principal")); // create filter with a different NamePathMapper than was used to build the principal // since the principal is not known to the PrincipalManager, the extra lookup doesn't reveal a valid principal. Filter f = getFilterProvider().getFilter(getSecurityProvider(), root, NamePathMapper.DEFAULT); assertFalse(f.canHandle(Collections.singleton(principal))); } @Test public void testCanHandleCombination() throws Exception { assertFalse(filter.canHandle(ImmutableSet.of(getTestSystemUser().getPrincipal(), getTestUser().getPrincipal()))); } @Test public void testCanHandlePopulatesCache() throws Exception { Principal principal = getTestSystemUser().getPrincipal(); PrincipalProvider pp = when(mock(PrincipalProvider.class).getPrincipal(principal.getName())).thenReturn(principal).getMock(); PrincipalConfiguration pc = when(mock(PrincipalConfiguration.class).getPrincipalProvider(root, getNamePathMapper())).thenReturn(pp).getMock(); SecurityProvider sp = when(mock(SecurityProvider.class).getConfiguration(PrincipalConfiguration.class)).thenReturn(pc).getMock(); Filter filter = getFilterProvider().getFilter(sp, root, getNamePathMapper()); // call 'canHandle' twice assertTrue(filter.canHandle(Collections.singleton((SystemUserPrincipal) () -> principal.getName()))); assertTrue(filter.canHandle(Collections.singleton((SystemUserPrincipal) () -> principal.getName()))); // principalprovider must only be hit once verify(pp, times(1)).getPrincipal(principal.getName()); } @Test(expected = IllegalArgumentException.class) public void testGetPathUserPrincipal() throws Exception { filter.getOakPath(getTestUser().getPrincipal()); } @Test(expected = IllegalArgumentException.class) public void testGetPathInvalidSystemUserPrincipal() throws Exception { filter.getOakPath((SystemUserPrincipal) () -> "name"); } @Test(expected = IllegalArgumentException.class) public void testGetPathValidSystemUserPrincipalNotValidated() throws Exception { filter.getOakPath(getTestSystemUser().getPrincipal()); } @Test public void testGetPathValidatedSystemUserPrincipal() throws Exception { ItemBasedPrincipal principal = (ItemBasedPrincipal) getTestSystemUser().getPrincipal(); filter.canHandle(Collections.singleton(principal)); assertNotEquals(principal.getPath(), filter.getOakPath(principal)); assertEquals(getNamePathMapper().getOakPath(principal.getPath()), filter.getOakPath(principal)); } @Test public void testGetPathAfterGetValidUserPrincipal() throws Exception { ItemBasedPrincipal principal = (ItemBasedPrincipal) getTestSystemUser().getPrincipal(); filter.getValidPrincipal(getNamePathMapper().getOakPath(principal.getPath())); assertNotEquals(principal.getPath(), filter.getOakPath(principal)); assertEquals(getNamePathMapper().getOakPath(principal.getPath()), filter.getOakPath(principal)); } @Test public void testGetPrincipalUserPath() throws Exception { assertNull(filter.getValidPrincipal(getNamePathMapper().getOakPath(getTestUser().getPath()))); } @Test public void testGetPrincipalJcrPath() throws Exception { assertNull(filter.getValidPrincipal(getTestSystemUser().getPath())); } @Test public void testGetPrincipalSystemUserPath() throws Exception { User user = getTestSystemUser(); Principal principal = user.getPrincipal(); assertEquals(principal, filter.getValidPrincipal(getNamePathMapper().getOakPath(user.getPath()))); } @Test public void testGetPrincipalSupportedRootPath() { assertNull(filter.getValidPrincipal(supportedPath)); } @Test public void testGetPrincipalMockedItemBasedProvider() throws Exception { ItemBasedPrincipal principal = (ItemBasedPrincipal) getTestSystemUser().getPrincipal(); String oakPath = getNamePathMapper().getOakPath(principal.getPath()); PrincipalProvider pp = when(mock(PrincipalProvider.class).getItemBasedPrincipal(oakPath)).thenReturn(principal).getMock(); PrincipalConfiguration pc = when(mock(PrincipalConfiguration.class).getPrincipalProvider(root, getNamePathMapper())).thenReturn(pp).getMock(); SecurityProvider sp = when(mock(SecurityProvider.class).getConfiguration(PrincipalConfiguration.class)).thenReturn(pc).getMock(); Filter filter = getFilterProvider().getFilter(sp, root, getNamePathMapper()); // call 'getValidPrincipal' twice Principal p = filter.getValidPrincipal(oakPath); assertEquals(principal, p); assertEquals(principal.getPath(), ((ItemBasedPrincipal) p).getPath()); assertEquals(principal, filter.getValidPrincipal(oakPath)); verify(pp, times(2)).getItemBasedPrincipal(oakPath); verify(pp, never()).getPrincipal(principal.getName()); } @Test public void testGetPrincipalMockedPrincipalProvider() throws Exception { ItemBasedPrincipal principal = (ItemBasedPrincipal) getTestSystemUser().getPrincipal(); String oakPath = getNamePathMapper().getOakPath(principal.getPath()); PrincipalProvider pp = mock(PrincipalProvider.class); PrincipalConfiguration pc = when(mock(PrincipalConfiguration.class).getPrincipalProvider(root, getNamePathMapper())).thenReturn(pp).getMock(); SecurityProvider sp = when(mock(SecurityProvider.class).getConfiguration(PrincipalConfiguration.class)).thenReturn(pc).getMock(); Filter filter = getFilterProvider().getFilter(sp, root, getNamePathMapper()); assertNull(filter.getValidPrincipal(oakPath)); verify(pp, never()).getPrincipal(principal.getName()); } private final class TestPrincipal implements SystemUserPrincipal, ItemBasedPrincipal { private final String jcrPath; private final String name; private TestPrincipal(@NotNull String name, @Nullable String oakPath) { if (oakPath == null) { jcrPath = null; } else { jcrPath = getNamePathMapper().getJcrPath(oakPath); assertNotNull(jcrPath); } this.name = name; } @Override public String getPath() throws RepositoryException { if (jcrPath != null) { return jcrPath; } else { throw new RepositoryException(); } } @Override public String getName() { return name; } } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.socket.oio; import io.netty.buffer.ByteBuf; import io.netty.channel.Channel; import io.netty.channel.ChannelException; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelPromise; import io.netty.channel.ConnectTimeoutException; import io.netty.channel.EventLoop; import io.netty.channel.oio.OioByteStreamChannel; import io.netty.channel.socket.ServerSocketChannel; import io.netty.channel.socket.SocketChannel; import io.netty.util.internal.SocketUtils; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketTimeoutException; /** * A {@link SocketChannel} which is using Old-Blocking-IO */ public class OioSocketChannel extends OioByteStreamChannel implements SocketChannel { private static final InternalLogger logger = InternalLoggerFactory.getInstance(OioSocketChannel.class); private final Socket socket; private final OioSocketChannelConfig config; /** * Create a new instance with an new {@link Socket} */ public OioSocketChannel() { this(new Socket()); } /** * Create a new instance from the given {@link Socket} * * @param socket the {@link Socket} which is used by this instance */ public OioSocketChannel(Socket socket) { this(null, socket); } /** * Create a new instance from the given {@link Socket} * * @param parent the parent {@link Channel} which was used to create this instance. This can be null if the * {@link} has no parent as it was created by your self. * @param socket the {@link Socket} which is used by this instance */ public OioSocketChannel(Channel parent, Socket socket) { super(parent); this.socket = socket; config = new DefaultOioSocketChannelConfig(this, socket); boolean success = false; try { if (socket.isConnected()) { activate(socket.getInputStream(), socket.getOutputStream()); } socket.setSoTimeout(SO_TIMEOUT); success = true; } catch (Exception e) { throw new ChannelException("failed to initialize a socket", e); } finally { if (!success) { try { socket.close(); } catch (IOException e) { logger.warn("Failed to close a socket.", e); } } } } @Override public ServerSocketChannel parent() { return (ServerSocketChannel) super.parent(); } @Override public OioSocketChannelConfig config() { return config; } @Override public boolean isOpen() { return !socket.isClosed(); } @Override public boolean isActive() { return !socket.isClosed() && socket.isConnected(); } @Override public boolean isOutputShutdown() { return socket.isOutputShutdown() || !isActive(); } @Override public boolean isInputShutdown() { return socket.isInputShutdown() || !isActive(); } @Override public boolean isShutdown() { return socket.isInputShutdown() && socket.isOutputShutdown() || !isActive(); } @Override public ChannelFuture shutdownOutput() { return shutdownOutput(newPromise()); } @Override public ChannelFuture shutdownInput() { return shutdownInput(newPromise()); } @Override public ChannelFuture shutdown() { return shutdown(newPromise()); } @Override protected int doReadBytes(ByteBuf buf) throws Exception { if (socket.isClosed()) { return -1; } try { return super.doReadBytes(buf); } catch (SocketTimeoutException ignored) { return 0; } } @Override public ChannelFuture shutdownOutput(final ChannelPromise promise) { EventLoop loop = eventLoop(); if (loop.inEventLoop()) { shutdownOutput0(promise); } else { loop.execute(new Runnable() { @Override public void run() { shutdownOutput0(promise); } }); } return promise; } private void shutdownOutput0(ChannelPromise promise) { try { shutdownOutput0(); promise.setSuccess(); } catch (Throwable t) { promise.setFailure(t); } } private void shutdownOutput0() throws IOException { socket.shutdownOutput(); ((AbstractUnsafe) unsafe()).shutdownOutput(); } @Override public ChannelFuture shutdownInput(final ChannelPromise promise) { EventLoop loop = eventLoop(); if (loop.inEventLoop()) { shutdownInput0(promise); } else { loop.execute(new Runnable() { @Override public void run() { shutdownInput0(promise); } }); } return promise; } private void shutdownInput0(ChannelPromise promise) { try { socket.shutdownInput(); promise.setSuccess(); } catch (Throwable t) { promise.setFailure(t); } } @Override public ChannelFuture shutdown(final ChannelPromise promise) { EventLoop loop = eventLoop(); if (loop.inEventLoop()) { shutdown0(promise); } else { loop.execute(new Runnable() { @Override public void run() { shutdown0(promise); } }); } return promise; } private void shutdown0(ChannelPromise promise) { Throwable cause = null; try { shutdownOutput0(); } catch (Throwable t) { cause = t; } try { socket.shutdownInput(); } catch (Throwable t) { if (cause == null) { promise.setFailure(t); } else { logger.debug("Exception suppressed because a previous exception occurred.", t); promise.setFailure(cause); } return; } if (cause == null) { promise.setSuccess(); } else { promise.setFailure(cause); } } @Override public InetSocketAddress localAddress() { return (InetSocketAddress) super.localAddress(); } @Override public InetSocketAddress remoteAddress() { return (InetSocketAddress) super.remoteAddress(); } @Override protected SocketAddress localAddress0() { return socket.getLocalSocketAddress(); } @Override protected SocketAddress remoteAddress0() { return socket.getRemoteSocketAddress(); } @Override protected void doBind(SocketAddress localAddress) throws Exception { SocketUtils.bind(socket, localAddress); } @Override protected void doConnect(SocketAddress remoteAddress, SocketAddress localAddress) throws Exception { if (localAddress != null) { SocketUtils.bind(socket, localAddress); } boolean success = false; try { SocketUtils.connect(socket, remoteAddress, config().getConnectTimeoutMillis()); activate(socket.getInputStream(), socket.getOutputStream()); success = true; } catch (SocketTimeoutException e) { ConnectTimeoutException cause = new ConnectTimeoutException("connection timed out: " + remoteAddress); cause.setStackTrace(e.getStackTrace()); throw cause; } finally { if (!success) { doClose(); } } } @Override protected void doDisconnect() throws Exception { doClose(); } @Override protected void doClose() throws Exception { socket.close(); } protected boolean checkInputShutdown() { if (isInputShutdown()) { try { Thread.sleep(config().getSoTimeout()); } catch (Throwable e) { // ignore } return true; } return false; } @Deprecated @Override protected void setReadPending(boolean readPending) { super.setReadPending(readPending); } final void clearReadPending0() { clearReadPending(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.controller.repository.crypto; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.file.Path; import java.security.KeyManagementException; import javax.crypto.CipherOutputStream; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.controller.repository.FileSystemRepository; import org.apache.nifi.controller.repository.claim.ContentClaim; import org.apache.nifi.controller.repository.claim.StandardContentClaim; import org.apache.nifi.security.kms.EncryptionException; import org.apache.nifi.security.kms.KeyProvider; import org.apache.nifi.security.repository.RepositoryEncryptorUtils; import org.apache.nifi.security.repository.RepositoryType; import org.apache.nifi.security.repository.stream.RepositoryObjectStreamEncryptor; import org.apache.nifi.security.repository.stream.aes.RepositoryObjectAESCTREncryptor; import org.apache.nifi.stream.io.ByteCountingOutputStream; import org.apache.nifi.stream.io.NonCloseableOutputStream; import org.apache.nifi.stream.io.StreamUtils; import org.apache.nifi.util.NiFiProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is an implementation of the {@link FileSystemRepository} content repository which provides transparent * streaming encryption/decryption of content claim data during file system interaction. As of Apache NiFi 1.10.0 * (October 2019), this implementation is considered <a href="https://nifi.apache.org/docs/nifi-docs/html/user-guide.html#experimental-warning">*experimental*</a>. For further details, review the * <a href="https://nifi.apache.org/docs/nifi-docs/html/user-guide.html#encrypted-content">Apache NiFi User Guide - * Encrypted Content Repository</a> and * <a href="https://nifi.apache.org/docs/nifi-docs/html/administration-guide.html#encrypted-file-system-content-repository-properties">Apache NiFi Admin Guide - Encrypted File System Content * Repository Properties</a>. */ public class EncryptedFileSystemRepository extends FileSystemRepository { private static final Logger logger = LoggerFactory.getLogger(EncryptedFileSystemRepository.class); private String activeKeyId; private KeyProvider keyProvider; /** * Default no args constructor for service loading only */ public EncryptedFileSystemRepository() { super(); keyProvider = null; } public EncryptedFileSystemRepository(final NiFiProperties niFiProperties) throws IOException { super(niFiProperties); // Initialize the encryption-specific fields this.keyProvider = RepositoryEncryptorUtils.validateAndBuildRepositoryKeyProvider(niFiProperties, RepositoryType.CONTENT); // Set active key ID setActiveKeyId(niFiProperties.getContentRepositoryEncryptionKeyId()); } /** * Returns the number of bytes read after importing content from the provided * {@link InputStream} into the {@link ContentClaim}. This method has the same logic as * the parent method, but must be overridden to use the subclass's * {@link #write(ContentClaim)} method which performs the encryption. The * overloaded method {@link super#importFrom(Path, ContentClaim)} does not need to be * overridden because it delegates to this one. * * @param content the InputStream containing the desired content * @param claim the ContentClaim to put the content into * @return the number of bytes read * @throws IOException if there is a problem reading from the stream */ @Override public long importFrom(final InputStream content, final ContentClaim claim) throws IOException { try (final OutputStream out = write(claim)) { return StreamUtils.copy(content, out); } } /** * Exports the content of the given claim to the given destination. Returns the number of bytes written. <strong>This method decrypts the encrypted content and writes it in plaintext.</strong> * * @param claim to export from * @param destination where to export data * @return the size of the claim in bytes * @throws IOException if an IO error occurs */ @Override public long exportTo(final ContentClaim claim, final OutputStream destination) throws IOException { logger.warn("Exporting content from {} to output stream {}. This content will be decrypted", claim.getResourceClaim().getId(), destination); return super.exportTo(claim, destination); } /** * Exports a subset of the content of the given claim, starting at offset * and copying length bytes, to the given destination. Returns the number of bytes written. <strong>This method decrypts the encrypted content and writes it in plaintext.</strong> * * @param claim to export from * @param destination where to export data * @param offset the offset into the claim at which the copy should begin * @param length the number of bytes to copy * @return the size of the claim in bytes * @throws IOException if an IO error occurs */ @Override public long exportTo(final ContentClaim claim, final OutputStream destination, final long offset, final long length) throws IOException { logger.warn("Exporting content from {} (offset: {}, length: {}) to output stream {}. This content will be decrypted", claim.getResourceClaim().getId(), offset, length, destination); return super.exportTo(claim, destination, offset, length); } /** * Exports the content of the given claim to the given destination. Returns the number of bytes written. <strong>This method decrypts the encrypted content and writes it in plaintext.</strong> * * @param claim to export from * @param destination where to export data * @return the size of the claim in bytes * @throws IOException if an IO error occurs */ @Override public long exportTo(final ContentClaim claim, final Path destination, final boolean append) throws IOException { logger.warn("Exporting content from {} to path {}. This content will be decrypted", claim.getResourceClaim().getId(), destination); return super.exportTo(claim, destination, append); } /** * Exports a subset of the content of the given claim, starting at offset * and copying length bytes, to the given destination. <strong>This method decrypts the encrypted content and writes it in plaintext.</strong> * * @param claim to export from * @param destination where to export data * @param offset the offset into the claim at which the copy should begin * @param length the number of bytes to copy * @return the number of bytes copied * @throws IOException if an IO error occurs */ @Override public long exportTo(final ContentClaim claim, final Path destination, final boolean append, final long offset, final long length) throws IOException { logger.warn("Exporting content from {} (offset: {}, length: {}) to path {}. This content will be decrypted", claim.getResourceClaim().getId(), offset, length, destination); return super.exportTo(claim, destination, append, offset, length); } /** * Returns an InputStream (actually a {@link javax.crypto.CipherInputStream}) which wraps * the {@link java.io.FileInputStream} from the content repository claim on disk. This * allows a consuming caller to automatically decrypt the content as it is read. * * @param claim the content claim to read * @return the decrypting input stream * @throws IOException if there is a problem reading from disk or configuring the cipher */ @Override public InputStream read(final ContentClaim claim) throws IOException { InputStream inputStream = super.read(claim); if (claim == null) { return inputStream; } try { String recordId = getRecordId(claim); logger.debug("Creating decrypted input stream to read flowfile content with record ID: " + recordId); final InputStream decryptingInputStream = getDecryptingInputStream(inputStream, recordId); logger.debug("Reading from record ID {}", recordId); if (logger.isTraceEnabled()) { logger.trace("Stack trace: ", new RuntimeException("Stack Trace for reading from record ID " + recordId)); } return decryptingInputStream; } catch (EncryptionException | KeyManagementException e) { logger.error("Encountered an error instantiating the encrypted content repository input stream: " + e.getMessage()); throw new IOException("Error creating encrypted content repository input stream", e); } } private InputStream getDecryptingInputStream(InputStream inputStream, String recordId) throws KeyManagementException, EncryptionException { RepositoryObjectStreamEncryptor encryptor = new RepositoryObjectAESCTREncryptor(); encryptor.initialize(keyProvider); // ECROS wrapping COS wrapping BCOS wrapping FOS return encryptor.decrypt(inputStream, recordId); } /** * Returns an OutputStream (actually a {@link javax.crypto.CipherOutputStream}) which wraps * the {@link ByteCountingOutputStream} to the content repository claim on disk. This * allows a consuming caller to automatically encrypt the content as it is written. * * @param claim the content claim to write to * @return the encrypting output stream * @throws IOException if there is a problem writing to disk or configuring the cipher */ @Override public OutputStream write(final ContentClaim claim) throws IOException { StandardContentClaim scc = validateContentClaimForWriting(claim); // BCOS wrapping FOS ByteCountingOutputStream claimStream = getWritableClaimStreamByResourceClaim(scc.getResourceClaim()); final long startingOffset = claimStream.getBytesWritten(); try { String keyId = getActiveKeyId(); String recordId = getRecordId(claim); logger.debug("Creating encrypted output stream (keyId: " + keyId + ") to write flowfile content with record ID: " + recordId); final OutputStream out = getEncryptedOutputStream(scc, claimStream, startingOffset, keyId, recordId); logger.debug("Writing to {}", out); if (logger.isTraceEnabled()) { logger.trace("Stack trace: ", new RuntimeException("Stack Trace for writing to " + out)); } return out; } catch (EncryptionException | KeyManagementException e) { logger.error("Encountered an error instantiating the encrypted content repository output stream: " + e.getMessage()); throw new IOException("Error creating encrypted content repository output stream", e); } } String getActiveKeyId() { return activeKeyId; } public void setActiveKeyId(String activeKeyId) { // Key must not be blank and key provider must make key available if (StringUtils.isNotBlank(activeKeyId) && keyProvider.keyExists(activeKeyId)) { this.activeKeyId = activeKeyId; logger.debug("Set active key ID to '" + activeKeyId + "'"); } else { logger.warn("Attempted to set active key ID to '" + activeKeyId + "' but that is not a valid or available key ID. Keeping active key ID as '" + this.activeKeyId + "'"); } } /** * Returns an identifier for this {@link ContentClaim} to be used when serializing/retrieving the encrypted content. * For version 1, the identifier is {@code "nifi-ecr-rc-" + the resource claim ID + offset}. If any piece of the * CC -> RC -> ID chain is null or empty, the current system time in nanoseconds is used with a different * prefix ({@code "nifi-ecr-ts-"}). * * @param claim the content claim * @return the string identifier */ public static String getRecordId(ContentClaim claim) { // For version 1, use the content claim's resource claim ID as the record ID rather than introducing a new field in the metadata if (claim != null && claim.getResourceClaim() != null && !StringUtils.isBlank(claim.getResourceClaim().getId())) { return "nifi-ecr-rc-" + claim.getResourceClaim().getId() + "+" + claim.getOffset(); } else { String tempId = "nifi-ecr-ts-" + System.nanoTime(); logger.error("Cannot determine record ID from null content claim or claim with missing/empty resource claim ID; using timestamp-generated ID: " + tempId + "+0"); return tempId; } } private OutputStream getEncryptedOutputStream(StandardContentClaim scc, ByteCountingOutputStream claimStream, long startingOffset, String keyId, String recordId) throws KeyManagementException, EncryptionException { RepositoryObjectStreamEncryptor encryptor = new RepositoryObjectAESCTREncryptor(); encryptor.initialize(keyProvider); // ECROS wrapping COS wrapping BCOS wrapping FOS return new EncryptedContentRepositoryOutputStream(scc, claimStream, encryptor, recordId, keyId, startingOffset); } /** * Private class which wraps the {@link org.apache.nifi.controller.repository.FileSystemRepository.ContentRepositoryOutputStream}'s * internal {@link ByteCountingOutputStream} with a {@link CipherOutputStream} * to handle streaming encryption operations. */ private class EncryptedContentRepositoryOutputStream extends ContentRepositoryOutputStream { private final CipherOutputStream cipherOutputStream; private final long startingOffset; EncryptedContentRepositoryOutputStream(StandardContentClaim scc, ByteCountingOutputStream byteCountingOutputStream, RepositoryObjectStreamEncryptor encryptor, String recordId, String keyId, long startingOffset) throws EncryptionException { super(scc, byteCountingOutputStream, 0); this.startingOffset = startingOffset; // Set up cipher stream this.cipherOutputStream = (CipherOutputStream) encryptor.encrypt(new NonCloseableOutputStream(byteCountingOutputStream), recordId, keyId); } @Override public String toString() { return "EncryptedFileSystemRepository Stream [" + scc + "]"; } @Override public synchronized void write(final int b) throws IOException { ByteBuffer bb = ByteBuffer.allocate(4); bb.putInt(b); writeBytes(bb.array(), 0, 4); } @Override public synchronized void write(final byte[] b) throws IOException { writeBytes(b, 0, b.length); } @Override public synchronized void write(final byte[] b, final int off, final int len) throws IOException { writeBytes(b, off, len); } /** * Internal method used to reduce duplication throughout code. * * @param b the byte array to write * @param off the offset in bytes * @param len the length in bytes to write * @throws IOException if there is a problem writing the output */ private void writeBytes(byte[] b, int off, int len) throws IOException { if (closed) { throw new IOException("Stream is closed"); } try { cipherOutputStream.write(b, off, len); scc.setLength(bcos.getBytesWritten() - startingOffset); } catch (final IOException ioe) { recycle = false; throw new IOException("Failed to write to " + this, ioe); } } @Override public synchronized void flush() throws IOException { if (closed) { throw new IOException("Stream is closed"); } cipherOutputStream.flush(); } @Override public synchronized void close() throws IOException { closed = true; // Always flush and close (close triggers cipher.doFinal()) cipherOutputStream.flush(); cipherOutputStream.close(); // Add the additional bytes written to the scc.length scc.setLength(bcos.getBytesWritten() - startingOffset); super.close(); } } }
package org.deeplearning4j.nn.conf.preprocessor; import org.deeplearning4j.nn.api.OptimizationAlgorithm; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.inputs.InputType; import org.deeplearning4j.nn.conf.layers.OutputLayer; import org.deeplearning4j.nn.conf.layers.SubsamplingLayer; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; import org.junit.Test; import org.nd4j.linalg.activations.Activation; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.lossfunctions.LossFunctions; import static org.junit.Assert.*; /** **/ public class CNNProcessorTest { private static int rows = 28; private static int cols = 28; private static INDArray in2D = Nd4j.create(1, 784); private static INDArray in3D = Nd4j.create(20, 784, 7); private static INDArray in4D = Nd4j.create(20, 1, 28, 28); @Test public void testFeedForwardToCnnPreProcessor() { FeedForwardToCnnPreProcessor convProcessor = new FeedForwardToCnnPreProcessor(rows, cols, 1); INDArray check2to4 = convProcessor.preProcess(in2D, -1); int val2to4 = check2to4.shape().length; assertTrue(val2to4 == 4); assertEquals(Nd4j.create(1, 1, 28, 28), check2to4); INDArray check4to4 = convProcessor.preProcess(in4D, -1); int val4to4 = check4to4.shape().length; assertTrue(val4to4 == 4); assertEquals(Nd4j.create(20, 1, 28, 28), check4to4); } @Test public void testFeedForwardToCnnPreProcessor2() { int[] nRows = {1, 5, 20}; int[] nCols = {1, 5, 20}; int[] nDepth = {1, 3}; int[] nMiniBatchSize = {1, 5}; for (int rows : nRows) { for (int cols : nCols) { for (int d : nDepth) { FeedForwardToCnnPreProcessor convProcessor = new FeedForwardToCnnPreProcessor(rows, cols, d); for (int miniBatch : nMiniBatchSize) { int[] ffShape = new int[] {miniBatch, rows * cols * d}; INDArray rand = Nd4j.rand(ffShape); INDArray ffInput_c = Nd4j.create(ffShape, 'c'); INDArray ffInput_f = Nd4j.create(ffShape, 'f'); ffInput_c.assign(rand); ffInput_f.assign(rand); assertEquals(ffInput_c, ffInput_f); //Test forward pass: INDArray convAct_c = convProcessor.preProcess(ffInput_c, -1); INDArray convAct_f = convProcessor.preProcess(ffInput_f, -1); int[] convShape = {miniBatch, d, rows, cols}; assertArrayEquals(convShape, convAct_c.shape()); assertArrayEquals(convShape, convAct_f.shape()); assertEquals(convAct_c, convAct_f); //Check values: //CNN reshaping (for each example) takes a 1d vector and converts it to 3d // (4d total, for minibatch data) //1d vector is assumed to be rows from depth 0 concatenated, followed by depth 1, etc for (int ex = 0; ex < miniBatch; ex++) { for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { for (int depth = 0; depth < d; depth++) { int origPosition = depth * (rows * cols) + r * cols + c; //pos in vector double vecValue = ffInput_c.getDouble(ex, origPosition); double convValue = convAct_c.getDouble(ex, depth, r, c); assertEquals(vecValue, convValue, 0.0); } } } } //Test backward pass: //Idea is that backward pass should do opposite to forward pass INDArray epsilon4_c = Nd4j.create(convShape, 'c'); INDArray epsilon4_f = Nd4j.create(convShape, 'f'); epsilon4_c.assign(convAct_c); epsilon4_f.assign(convAct_f); INDArray epsilon2_c = convProcessor.backprop(epsilon4_c, -1); INDArray epsilon2_f = convProcessor.backprop(epsilon4_f, -1); assertEquals(ffInput_c, epsilon2_c); assertEquals(ffInput_c, epsilon2_f); } } } } } @Test public void testFeedForwardToCnnPreProcessorBackprop() { FeedForwardToCnnPreProcessor convProcessor = new FeedForwardToCnnPreProcessor(rows, cols, 1); convProcessor.preProcess(in2D, -1); INDArray check2to2 = convProcessor.backprop(in2D, -1); int val2to2 = check2to2.shape().length; assertTrue(val2to2 == 2); assertEquals(Nd4j.create(1, 784), check2to2); } @Test public void testCnnToFeedForwardProcessor() { CnnToFeedForwardPreProcessor convProcessor = new CnnToFeedForwardPreProcessor(rows, cols, 1); INDArray check2to4 = convProcessor.backprop(in2D, -1); int val2to4 = check2to4.shape().length; assertTrue(val2to4 == 4); assertEquals(Nd4j.create(1, 1, 28, 28), check2to4); INDArray check4to4 = convProcessor.backprop(in4D, -1); int val4to4 = check4to4.shape().length; assertTrue(val4to4 == 4); assertEquals(Nd4j.create(20, 1, 28, 28), check4to4); } @Test public void testCnnToFeedForwardPreProcessorBackprop() { CnnToFeedForwardPreProcessor convProcessor = new CnnToFeedForwardPreProcessor(rows, cols, 1); convProcessor.preProcess(in4D, -1); INDArray check2to2 = convProcessor.preProcess(in2D, -1); int val2to2 = check2to2.shape().length; assertTrue(val2to2 == 2); assertEquals(Nd4j.create(1, 784), check2to2); INDArray check4to2 = convProcessor.preProcess(in4D, -1); int val4to2 = check4to2.shape().length; assertTrue(val4to2 == 2); assertEquals(Nd4j.create(20, 784), check4to2); } @Test public void testCnnToFeedForwardPreProcessor2() { int[] nRows = {1, 5, 20}; int[] nCols = {1, 5, 20}; int[] nDepth = {1, 3}; int[] nMiniBatchSize = {1, 5}; for (int rows : nRows) { for (int cols : nCols) { for (int d : nDepth) { CnnToFeedForwardPreProcessor convProcessor = new CnnToFeedForwardPreProcessor(rows, cols, d); for (int miniBatch : nMiniBatchSize) { int[] convActShape = new int[] {miniBatch, d, rows, cols}; INDArray rand = Nd4j.rand(convActShape); INDArray convInput_c = Nd4j.create(convActShape, 'c'); INDArray convInput_f = Nd4j.create(convActShape, 'f'); convInput_c.assign(rand); convInput_f.assign(rand); assertEquals(convInput_c, convInput_f); //Test forward pass: INDArray ffAct_c = convProcessor.preProcess(convInput_c, -1); INDArray ffAct_f = convProcessor.preProcess(convInput_f, -1); int[] ffActShape = {miniBatch, d * rows * cols}; assertArrayEquals(ffActShape, ffAct_c.shape()); assertArrayEquals(ffActShape, ffAct_f.shape()); assertEquals(ffAct_c, ffAct_f); //Check values: //CNN reshaping (for each example) takes a 1d vector and converts it to 3d // (4d total, for minibatch data) //1d vector is assumed to be rows from depth 0 concatenated, followed by depth 1, etc for (int ex = 0; ex < miniBatch; ex++) { for (int r = 0; r < rows; r++) { for (int c = 0; c < cols; c++) { for (int depth = 0; depth < d; depth++) { int vectorPosition = depth * (rows * cols) + r * cols + c; //pos in vector after reshape double vecValue = ffAct_c.getDouble(ex, vectorPosition); double convValue = convInput_c.getDouble(ex, depth, r, c); assertEquals(convValue, vecValue, 0.0); } } } } //Test backward pass: //Idea is that backward pass should do opposite to forward pass INDArray epsilon2_c = Nd4j.create(ffActShape, 'c'); INDArray epsilon2_f = Nd4j.create(ffActShape, 'f'); epsilon2_c.assign(ffAct_c); epsilon2_f.assign(ffAct_c); INDArray epsilon4_c = convProcessor.backprop(epsilon2_c, -1); INDArray epsilon4_f = convProcessor.backprop(epsilon2_f, -1); assertEquals(convInput_c, epsilon4_c); assertEquals(convInput_c, epsilon4_f); } } } } } public static MultiLayerNetwork getCNNMnistConfig() { Nd4j.ENFORCE_NUMERICAL_STABILITY = true; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(123).iterations(5) .optimizationAlgo(OptimizationAlgorithm.LINE_GRADIENT_DESCENT).list() .layer(0, new org.deeplearning4j.nn.conf.layers.ConvolutionLayer.Builder(new int[] {9, 9}, new int[] {1, 1}).nOut(20).weightInit(WeightInit.XAVIER) .activation(Activation.RELU).build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] {2, 2}) .build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10) .weightInit(WeightInit.XAVIER).activation(Activation.SOFTMAX).build()) .setInputType(InputType.convolutionalFlat(28, 28, 1)).build(); return new MultiLayerNetwork(conf); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model.language; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.CamelContext; import org.apache.camel.Expression; import org.apache.camel.ExpressionIllegalSyntaxException; import org.apache.camel.Predicate; import org.apache.camel.component.bean.BeanHolder; import org.apache.camel.component.bean.BeanInfo; import org.apache.camel.component.bean.ConstantBeanHolder; import org.apache.camel.component.bean.ConstantStaticTypeBeanHolder; import org.apache.camel.component.bean.MethodNotFoundException; import org.apache.camel.component.bean.RegistryBean; import org.apache.camel.language.bean.BeanExpression; import org.apache.camel.spi.Metadata; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.OgnlHelper; /** * For expressions and predicates using a java bean (aka method call) * * @version */ @Metadata(label = "language,core,java", title = "Bean method") @XmlRootElement(name = "method") @XmlAccessorType(XmlAccessType.FIELD) public class MethodCallExpression extends ExpressionDefinition { @XmlAttribute @Deprecated private String bean; @XmlAttribute private String ref; @XmlAttribute private String method; @XmlAttribute(name = "beanType") private String beanTypeName; @XmlTransient private Class<?> beanType; @XmlTransient private Object instance; public MethodCallExpression() { } public MethodCallExpression(String beanName) { this(beanName, null); } public MethodCallExpression(String beanName, String method) { super(beanName); this.method = method; } public MethodCallExpression(Object instance) { this(instance, null); } public MethodCallExpression(Object instance, String method) { super(ObjectHelper.className(instance)); // must use setter as they have special logic setInstance(instance); setMethod(method); } public MethodCallExpression(Class<?> type) { this(type, null); } public MethodCallExpression(Class<?> type, String method) { super(type.getName()); this.beanType = type; this.method = method; } public String getLanguage() { return "bean"; } public String getBean() { return bean; } /** * Either a reference or a class name of the bean to use */ public void setBean(String bean) { this.bean = bean; } public String getRef() { return ref; } /** * Reference to bean to lookup in the registry */ public void setRef(String ref) { this.ref = ref; } public String getMethod() { return method; } /** * Name of method to call */ public void setMethod(String method) { this.method = method; } public Class<?> getBeanType() { return beanType; } public void setBeanType(Class<?> beanType) { this.beanType = beanType; this.instance = null; } public String getBeanTypeName() { return beanTypeName; } /** * Class name of the bean to use */ public void setBeanTypeName(String beanTypeName) { this.beanTypeName = beanTypeName; } public Object getInstance() { return instance; } public void setInstance(Object instance) { // people may by mistake pass in a class type as the instance if (instance instanceof Class) { this.beanType = (Class<?>) instance; this.instance = null; } else { this.beanType = null; this.instance = instance; } } @Override public Expression createExpression(CamelContext camelContext) { Expression answer; if (beanType == null && beanTypeName != null) { try { beanType = camelContext.getClassResolver().resolveMandatoryClass(beanTypeName); } catch (ClassNotFoundException e) { throw ObjectHelper.wrapRuntimeCamelException(e); } } BeanHolder holder; if (beanType != null) { // create a bean if there is a default public no-arg constructor if (ObjectHelper.hasDefaultPublicNoArgConstructor(beanType)) { instance = camelContext.getInjector().newInstance(beanType); holder = new ConstantBeanHolder(instance, camelContext); } else { holder = new ConstantStaticTypeBeanHolder(beanType, camelContext); } } else if (instance != null) { holder = new ConstantBeanHolder(instance, camelContext); } else { String ref = beanName(); // if its a ref then check that the ref exists BeanHolder regHolder = new RegistryBean(camelContext, ref); // get the bean which will check that it exists instance = regHolder.getBean(); holder = new ConstantBeanHolder(instance, camelContext); } // create answer using the holder answer = new BeanExpression(holder, getMethod()); // and do sanity check that if a method name was given, that it exists validateHasMethod(camelContext, instance, beanType, getMethod()); return answer; } @Override public Predicate createPredicate(CamelContext camelContext) { return (BeanExpression) createExpression(camelContext); } /** * Validates the given bean has the method. * <p/> * This implementation will skip trying to validate OGNL method name expressions. * * @param context camel context * @param bean the bean instance * @param type the bean type * @param method the method, can be <tt>null</tt> if no method name provided * @throws org.apache.camel.RuntimeCamelException is thrown if bean does not have the method */ protected void validateHasMethod(CamelContext context, Object bean, Class<?> type, String method) { if (method == null) { return; } if (bean == null && type == null) { throw new IllegalArgumentException("Either bean or type should be provided on " + this); } // do not try to validate ognl methods if (OgnlHelper.isValidOgnlExpression(method)) { return; } // if invalid OGNL then fail if (OgnlHelper.isInvalidValidOgnlExpression(method)) { ExpressionIllegalSyntaxException cause = new ExpressionIllegalSyntaxException(method); throw ObjectHelper.wrapRuntimeCamelException(new MethodNotFoundException(bean != null ? bean : type, method, cause)); } if (bean != null) { BeanInfo info = new BeanInfo(context, bean.getClass()); if (!info.hasMethod(method)) { throw ObjectHelper.wrapRuntimeCamelException(new MethodNotFoundException(null, bean, method)); } } else { BeanInfo info = new BeanInfo(context, type); // must be a static method as we do not have a bean instance to invoke if (!info.hasStaticMethod(method)) { throw ObjectHelper.wrapRuntimeCamelException(new MethodNotFoundException(null, type, method, true)); } } } protected String beanName() { if (bean != null) { return bean; } else if (ref != null) { return ref; } else if (instance != null) { return ObjectHelper.className(instance); } return getExpression(); } @Override public String toString() { return "bean{" + beanName() + (method != null ? ", method=" + method : "") + "}"; } }
package com.pivotal.gemfirexd.internal.impl.jdbc; /* Derby - Class com.pivotal.gemfirexd.internal.impl.tools.ij.StatementFinder Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * Adapted from Derby StatementFinder. * * Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ import java.io.IOException; import java.io.Reader; /** * StatementReader looks through an input stream for the next JSQL statement. A * statement is considered to be any tokens up to the next semicolon or EOF. * <p> * Semicolons inside comments, strings, and delimited identifiers are not * considered to be statement terminators but to be part of those tokens. * <p> * Comments currently recognized include the SQL comment, which begins with "--" * and ends at the next EOL, and nested bracketed comments. * <p> * Strings and delimited identifiers are permitted to contain newlines; the * actual IJ or JSQL parsers will report errors when those cases occur. * <p> * There are no escaped characters, i.e. "\n" is considered to be two * characters, '\' and 'n'. */ public class StatementReader { private Reader source; private StringBuilder statement = new StringBuilder(); private int state; private boolean atEOF = false; private boolean peekEOF = false; private char peekChar; private boolean peeked = false; private boolean continuedStatement; // state variables private static final int IN_STATEMENT = 0; private static final int IN_STRING = 1; private static final int IN_SQLCOMMENT = 2; private static final int END_OF_STATEMENT = 3; private static final int END_OF_INPUT = 4; // special state-changing characters private static final char MINUS = '-'; private static final char SINGLEQUOTE = '\''; private static final char DOUBLEQUOTE = '\"'; private static final char SEMICOLON = ';'; private static final char NEWLINE = '\n'; private static final char RETURN = '\r'; private static final char SPACE = ' '; private static final char TAB = '\t'; private static final char FORMFEED = '\f'; private static final char SLASH = '/'; private static final char ASTERISK = '*'; /** * The constructor does not assume the stream is data input or buffered, so it * will wrap it appropriately. * * @param s * the input stream for reading statements from. */ public StatementReader(Reader s) { source = s; } /** * Reinit is used to redirect the finder to another stream. The previous * stream should not have been in a PEEK state. * * @param s * the input stream for reading statements from. */ public void ReInit(Reader s) { try { source.close(); } catch (IOException ioe) { // just be quiet if it is already gone } source = s; state = IN_STATEMENT; atEOF = false; peekEOF = false; peeked = false; } public void close() throws IOException { source.close(); } /** * get the next statement in the input stream. Returns it, dropping its * closing semicolon if it has one. If there is no next statement, return a * null. * * @return the next statement in the input stream. */ public String nextStatement() { boolean haveSemi = false; char nextChar; // initialize fields for getting the next statement statement.setLength(0); if (state == END_OF_INPUT) return null; state = IN_STATEMENT; // skip leading whitespace nextChar = peekChar(); if (peekEOF()) { state = END_OF_INPUT; return null; } if (whiteSpace(nextChar)) { while (whiteSpace(peekChar()) && !peekEOF()) ; if (peekEOF()) { state = END_OF_INPUT; return null; } } while (state != END_OF_STATEMENT && state != END_OF_INPUT) { // get the next character from the input nextChar = readChar(); if (atEOF()) { state = END_OF_INPUT; break; } if (!(nextChar == MINUS)) continuedStatement = true; switch (nextChar) { case MINUS: readSingleLineComment(nextChar); break; case SLASH: readBracketedComment(); break; case SINGLEQUOTE: case DOUBLEQUOTE: readString(nextChar); break; case SEMICOLON: haveSemi = true; state = END_OF_STATEMENT; continuedStatement = false; break; case NEWLINE: case RETURN: foundReturn(nextChar, true); break; default: // keep going, just a normal character break; } } if (haveSemi) statement.setLength(statement.length() - 1); return statement.toString(); } /** * Determine if the given character is considered whitespace * * @param c * the character to consider * @return true if the character is whitespace */ private boolean whiteSpace(char c) { return (c == SPACE || c == TAB || c == RETURN || c == NEWLINE || c == FORMFEED); } /** * Advance the source stream to the end of a comment if it is on one, assuming * the first character of a potential bracketed comment has been found. If it * is not a comment, do not advance the stream. */ private void readBracketedComment() { char nextChar = peekChar(); // if next char is EOF, we are done. if (peekEOF()) return; // if nextChar is not an asterisk, then not a comment. if (nextChar != ASTERISK) { continuedStatement = true; return; } // we are really in a comment readChar(); // grab the asterisk for real. int nestingLevel = 1; while (true) { nextChar = readChar(); if (atEOF()) { // let the caller process the EOF, don't read it state = IN_STATEMENT; return; } char peek = peekChar(); if (nextChar == SLASH && peek == ASTERISK) { readChar(); nestingLevel++; } else if (nextChar == ASTERISK && peek == SLASH) { readChar(); nestingLevel--; if (nestingLevel == 0) { state = IN_STATEMENT; return; } } else if (nextChar == NEWLINE || nextChar == RETURN) { foundReturn(nextChar, true); } } } /** * Advance the source stream to the end of a comment if it is on one, assuming * the first character of a potential single line comment has been found. If * it is not a comment, do not advance the stream. * <p> * The form of a single line comment is, in regexp, XX.*$, where XX is two * instances of commentChar. * * @param commentChar * the character whose duplication signifies the start of the * comment. */ private void readSingleLineComment(char commentChar) { char nextChar; nextChar = peekChar(); // if next char is EOF, we are done. if (peekEOF()) return; // if nextChar is not a minus, it was just a normal minus, // nothing special to do if (nextChar != commentChar) { continuedStatement = true; return; } // we are really in a comment readChar(); // grab the minus for real. state = IN_SQLCOMMENT; do { nextChar = peekChar(); if (peekEOF()) { // let the caller process the EOF, don't read it state = IN_STATEMENT; return; } switch (nextChar) { case NEWLINE: case RETURN: readChar(); // okay to process the character state = IN_STATEMENT; foundReturn(nextChar, continuedStatement); return; default: readChar(); // process the character, still in comment break; } } while (state == IN_SQLCOMMENT); // could be while true... } /** * Advance the stream to the end of the string. Assumes the opening delimiter * of the string has been read. This handles the SQL ability to put the * delimiter within the string by doubling it, by reading those as two strings * sitting next to one another. I.e, 'Mary''s lamb' is read by this class as * two strings, 'Mary' and 's lamb'. * <p> * The delimiter of the string is expected to be repeated at its other end. If * the other flavor of delimiter occurs within the string, it is just a normal * character within it. * <p> * All characters except the delimiter are permitted within the string. If EOF * is hit before the closing delimiter is found, the end of the string is * assumed. Parsers using this parser will detect the error in that case and * return appropriate messages. * * @param stringDelimiter * the starting and ending character for the string being read. */ private void readString(char stringDelimiter) { state = IN_STRING; do { char nextChar = readChar(); if (atEOF()) { state = END_OF_INPUT; return; } if (nextChar == stringDelimiter) { // we've reached the end of the string state = IN_STATEMENT; return; } // still in string } while (state == IN_STRING); // could be while true... } private boolean atEOF() { return atEOF; } private boolean peekEOF() { return peekEOF; } /** * return the next character in the source stream and append it to the * statement buffer. * * @return the next character in the source stream. */ protected char readChar() { if (!peeked) peekChar(); peeked = false; atEOF = peekEOF; if (!atEOF) statement.append(peekChar); return peekChar; } /** * return the next character in the source stream, without advancing. * * @return the next character in the source stream. */ protected char peekChar() { peeked = true; char c = '\00'; try { int cInt; // REMIND: this is assuming a flat ascii source file. // will need to beef it up at some future point to // understand whether the stream is ascii or something else. cInt = source.read(); peekEOF = (cInt == -1); if (!peekEOF) c = (char)cInt; } catch (IOException ie) { throw getIOException(ie); } peekChar = c; return c; } protected void foundReturn(char c, boolean continuedStatement) { } protected RuntimeException getIOException(IOException ex) { return new IllegalArgumentException(ex); } }
/** * $URL: https://source.sakaiproject.org/svn/sitestats/tags/sakai-10.1/sitestats-tool/src/java/org/sakaiproject/sitestats/tool/wicket/widget/ActivityWidget.java $ * $Id: ActivityWidget.java 105078 2012-02-24 23:00:38Z ottenhoff@longsight.com $ * * Copyright (c) 2006-2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sakaiproject.sitestats.tool.wicket.widget; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.wicket.Page; import org.apache.wicket.PageParameters; import org.apache.wicket.extensions.markup.html.tabs.AbstractTab; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.ResourceModel; import org.sakaiproject.sitestats.api.EventStat; import org.sakaiproject.sitestats.api.PrefsData; import org.sakaiproject.sitestats.api.Stat; import org.sakaiproject.sitestats.api.StatsManager; import org.sakaiproject.sitestats.api.Util; import org.sakaiproject.sitestats.api.report.Report; import org.sakaiproject.sitestats.api.report.ReportDef; import org.sakaiproject.sitestats.api.report.ReportManager; import org.sakaiproject.sitestats.api.report.ReportParams; import org.sakaiproject.sitestats.tool.facade.Locator; import org.sakaiproject.sitestats.tool.wicket.pages.PreferencesPage; import org.sakaiproject.user.api.UserNotDefinedException; public class ActivityWidget extends Panel { private static final long serialVersionUID = 1L; private static Log LOG = LogFactory.getLog(ActivityWidget.class); /** The site id. */ private String siteId = null; private PrefsData prefsdata = null; /** * Default constructor. * @param id The wicket:id * @param siteId The related site id */ public ActivityWidget(String id, final String siteId) { super(id); this.siteId = siteId; setRenderBodyOnly(true); setOutputMarkupId(true); // Single values (MiniStat) List<WidgetMiniStat> widgetMiniStats = new ArrayList<WidgetMiniStat>(); widgetMiniStats.add(getMiniStatActivityEvents()); widgetMiniStats.add(getMiniStatMostActiveTool()); widgetMiniStats.add(getMiniStatMostActiveUser()); //widgetMiniStats.add(getMiniStatConfigureLink()); // Tabs List<AbstractTab> tabs = new ArrayList<AbstractTab>(); tabs.add(new AbstractTab(new ResourceModel("overview_tab_bydate")) { private static final long serialVersionUID = 1L; @Override public Panel getPanel(String panelId) { return getWidgetTabByDate(panelId); } }); tabs.add(new AbstractTab(new ResourceModel("overview_tab_byuser")) { private static final long serialVersionUID = 1L; @Override public Panel getPanel(String panelId) { return getWidgetTabByUser(panelId); } }); tabs.add(new AbstractTab(new ResourceModel("overview_tab_bytool")) { private static final long serialVersionUID = 1L; @Override public Panel getPanel(String panelId) { return getWidgetTabByTool(panelId); } }); // Final Widget object String icon = StatsManager.SILK_ICONS_DIR + "chart_pie.png"; String title = (String) new ResourceModel("overview_title_activity").getObject(); Widget widget = new Widget("widget", icon, title, widgetMiniStats, tabs); add(widget); } // ------------------------------------------------------------------------------- /** MiniStat:: Activity events */ private WidgetMiniStat getMiniStatActivityEvents() { return new WidgetMiniStat() { private static final long serialVersionUID = 1L; @Override public String getValue() { long start = 0; if(LOG.isDebugEnabled()) start = System.currentTimeMillis(); String val = Long.toString(Locator.getFacade().getStatsManager().getTotalSiteActivity(siteId, getPrefsdata().getToolEventsStringList())); if(LOG.isDebugEnabled()) LOG.debug("getMiniStatActivityEvents() in " + (System.currentTimeMillis() - start) + " ms"); return val; } @Override public String getSecondValue() { return null; } @Override public String getTooltip() { return null; } @Override public boolean isWiderText() { return false; } @Override public String getLabel() { return (String) new ResourceModel("overview_title_events_sum").getObject(); } @Override public ReportDef getReportDefinition() { ReportDef r = new ReportDef(); r.setId(0); r.setSiteId(siteId); ReportParams rp = new ReportParams(siteId); // what rp.setWhat(ReportManager.WHAT_EVENTS); rp.setWhatEventSelType(ReportManager.WHAT_EVENTS_BYEVENTS); rp.setWhatEventIds(getPrefsdata().getToolEventsStringList()); // when rp.setWhen(ReportManager.WHEN_ALL); // who rp.setWho(ReportManager.WHO_ALL); // grouping List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_EVENT); rp.setHowTotalsBy(totalsBy); // sorting rp.setHowSort(true); rp.setHowSortBy(StatsManager.T_EVENT); rp.setHowSortAscending(true); // chart rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_TABLE); r.setReportParams(rp); return r; } }; } /** MiniStat:: Most active tool */ private WidgetMiniStat getMiniStatMostActiveTool() { return new WidgetMiniStat() { private static final long serialVersionUID = 1L; private String mostActiveTool = null; private long totalToolActivity = 0; private long totalActivity = 0; @Override public String getValue() { long start = 0; if(LOG.isDebugEnabled()) start = System.currentTimeMillis(); processData(); String val = null; if(mostActiveTool != null) { val = Locator.getFacade().getEventRegistryService().getToolName(mostActiveTool); }else{ val = "-"; } if(LOG.isDebugEnabled()) LOG.debug("getMiniStatMostActiveTool() in " + (System.currentTimeMillis() - start) + " ms"); return val; } @Override public String getSecondValue() { double percentage = totalActivity==0 ? 0 : Util.round(100 * totalToolActivity / (double) totalActivity, 0); return String.valueOf((int) percentage) + '%'; } @Override public String getTooltip() { if(mostActiveTool != null) { return Locator.getFacade().getEventRegistryService().getToolName(mostActiveTool); }else{ return null; } } @Override public boolean isWiderText() { return true; } @Override public String getLabel() { return (String) new ResourceModel("overview_title_mostactivetool_sum").getObject(); } private ReportDef getCommonReportDefition() { ReportDef r = new ReportDef(); r.setId(0); r.setSiteId(siteId); ReportParams rp = new ReportParams(siteId); // what rp.setWhat(ReportManager.WHAT_EVENTS); rp.setWhatEventSelType(ReportManager.WHAT_EVENTS_BYEVENTS); rp.setWhatEventIds(getPrefsdata().getToolEventsStringList()); // when rp.setWhen(ReportManager.WHEN_ALL); // who rp.setWho(ReportManager.WHO_ALL); // grouping List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_EVENT); rp.setHowTotalsBy(totalsBy); // sorting rp.setHowSort(true); rp.setHowSortBy(StatsManager.T_EVENT); rp.setHowSortAscending(true); // chart rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_TABLE); r.setReportParams(rp); return r; } private void processData() { if(mostActiveTool == null) { ReportDef rd = getCommonReportDefition(); ReportParams rp = rd.getReportParams(); List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_TOOL); rp.setHowTotalsBy(totalsBy); rp.setHowSort(true); rp.setHowSortBy(StatsManager.T_TOTAL); rp.setHowSortAscending(false); Report r = Locator.getFacade().getReportManager().getReport(rd, true, null, false); try{ boolean first = true; for(Stat s : r.getReportData()) { EventStat es = (EventStat) s; if(first) { mostActiveTool = es.getToolId(); totalToolActivity = es.getCount(); first = false; } totalActivity += es.getCount(); } }catch(Exception e) { mostActiveTool = null; } } } @Override public ReportDef getReportDefinition() { ReportDef rd = getCommonReportDefition(); ReportParams rp = rd.getReportParams(); List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_TOOL); rp.setHowTotalsBy(totalsBy); rp.setHowSortBy(StatsManager.T_TOOL); rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_BOTH); rp.setHowChartType(StatsManager.CHARTTYPE_PIE); rp.setHowChartSource(StatsManager.T_TOOL); return rd; } }; } /** MiniStat:: Most active user */ private WidgetMiniStat getMiniStatMostActiveUser() { return new WidgetMiniStat() { private static final long serialVersionUID = 1L; private String mostActiveUser = null; private long totalUserActivity = 0; private long totalActivity = 0; @Override public String getValue() { long start = 0; if(LOG.isDebugEnabled()) start = System.currentTimeMillis(); processData(); String val = null; if(mostActiveUser != null) { String id = null; if(("-").equals(mostActiveUser) || ("?").equals(mostActiveUser)){ id = "-"; }else{ try{ id = Locator.getFacade().getUserDirectoryService().getUser(mostActiveUser).getDisplayId(); }catch(UserNotDefinedException e1){ id = mostActiveUser; } } val = id; }else{ val = "-"; } if(LOG.isDebugEnabled()) LOG.debug("getMiniStatMostActiveUser() in " + (System.currentTimeMillis() - start) + " ms"); return val; } @Override public String getSecondValue() { double percentage = totalActivity==0 ? 0 : Util.round(100 * totalUserActivity / (double) totalActivity, 0); return String.valueOf((int) percentage) + '%'; } @Override public String getTooltip() { if(mostActiveUser != null) { String name = null; if(("-").equals(mostActiveUser)) { name = (String) new ResourceModel("user_anonymous").getObject(); }else if(("?").equals(mostActiveUser)) { name = (String) new ResourceModel("user_anonymous_access").getObject(); }else{ name = Locator.getFacade().getStatsManager().getUserNameForDisplay(mostActiveUser); } return name; }else{ return null; } } @Override public boolean isWiderText() { return true; } @Override public String getLabel() { return (String) new ResourceModel("overview_title_mostactiveuser_sum").getObject(); } private ReportDef getCommonReportDefition() { ReportDef r = new ReportDef(); r.setId(0); r.setSiteId(siteId); ReportParams rp = new ReportParams(siteId); // what rp.setWhat(ReportManager.WHAT_EVENTS); rp.setWhatEventSelType(ReportManager.WHAT_EVENTS_BYEVENTS); rp.setWhatEventIds(getPrefsdata().getToolEventsStringList()); // when rp.setWhen(ReportManager.WHEN_ALL); // who rp.setWho(ReportManager.WHO_ALL); // grouping List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_USER); rp.setHowTotalsBy(totalsBy); // sorting rp.setHowSort(true); rp.setHowSortBy(StatsManager.T_TOTAL); rp.setHowSortAscending(false); // chart rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_TABLE); r.setReportParams(rp); return r; } private void processData() { if(mostActiveUser == null) { Report r = Locator.getFacade().getReportManager().getReport(getCommonReportDefition(), true, null, false); try{ boolean first = true; for(Stat s : r.getReportData()) { EventStat es = (EventStat) s; if(first) { mostActiveUser = es.getUserId(); totalUserActivity = es.getCount(); first = false; } totalActivity += es.getCount(); } }catch(Exception e) { mostActiveUser = null; } } } @Override public ReportDef getReportDefinition() { return getCommonReportDefition(); } }; } /** MiniStat:: Link for Preferences */ private WidgetMiniStat getMiniStatConfigureLink() { return new WidgetMiniStatLink() { private static final long serialVersionUID = 1L; @Override public Page getPageLink() { return new PreferencesPage(new PageParameters("siteId="+siteId)); } @Override public String getLabel() { return (String) new ResourceModel("overview_title_configure_activity").getObject(); } @Override public String getPageLinkTooltip() { return (String) new ResourceModel("overview_title_configure_activity_tip").getObject(); }; }; } // ------------------------------------------------------------------------------- /** WidgetTab: By date */ protected WidgetTabTemplate getWidgetTabByDate(String panelId) { WidgetTabTemplate wTab = new WidgetTabTemplate(panelId, ActivityWidget.this.siteId) { private static final long serialVersionUID = 1L; @Override public List<Integer> getFilters() { return Arrays.asList(FILTER_DATE, FILTER_ROLE, FILTER_TOOL); } @Override public boolean useChartReportDefinitionForTable() { return true; } @Override public ReportDef getTableReportDefinition() { return getChartReportDefinition(); } @Override public ReportDef getChartReportDefinition() { String dateFilter = getDateFilter(); String roleFilter = getRoleFilter(); ReportDef r = new ReportDef(); r.setSiteId(siteId); ReportParams rp = new ReportParams(siteId); // what rp.setWhat(ReportManager.WHAT_EVENTS); rp.setWhatEventSelType(ReportManager.WHAT_EVENTS_BYEVENTS); rp.setWhatEventIds(getToolEventsFilter()); // when rp.setWhen(dateFilter); // who if(!ReportManager.WHO_ALL.equals(roleFilter)) { rp.setWho(ReportManager.WHO_ROLE); rp.setWhoRoleId(roleFilter); } // grouping List<String> totalsBy = new ArrayList<String>(); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { totalsBy.add(StatsManager.T_DATEMONTH); }else{ totalsBy.add(StatsManager.T_DATE); } rp.setHowTotalsBy(totalsBy); // sorting rp.setHowSort(true); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { rp.setHowSortBy(StatsManager.T_DATEMONTH); }else{ rp.setHowSortBy(StatsManager.T_DATE); } rp.setHowSortAscending(false); // chart rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_BOTH); rp.setHowChartType(StatsManager.CHARTTYPE_TIMESERIESBAR); rp.setHowChartSource(StatsManager.T_DATE); rp.setHowChartSeriesSource(StatsManager.T_NONE); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { rp.setHowChartSeriesPeriod(StatsManager.CHARTTIMESERIES_MONTH); }else if(dateFilter.equals(ReportManager.WHEN_LAST30DAYS)) { rp.setHowChartSeriesPeriod(StatsManager.CHARTTIMESERIES_DAY); }else{ rp.setHowChartSeriesPeriod(StatsManager.CHARTTIMESERIES_WEEKDAY); } r.setReportParams(rp); return r; } }; return wTab; } /** WidgetTab: By user */ protected WidgetTabTemplate getWidgetTabByUser(String panelId) { WidgetTabTemplate wTab = new WidgetTabTemplate(panelId, ActivityWidget.this.siteId) { private static final long serialVersionUID = 1L; @Override public List<Integer> getFilters() { return Arrays.asList(FILTER_DATE, FILTER_ROLE, FILTER_TOOL); } @Override public boolean useChartReportDefinitionForTable() { return false; } @Override public ReportDef getTableReportDefinition() { ReportDef r = getChartReportDefinition(); ReportParams rp = r.getReportParams(); List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_USER); rp.setHowTotalsBy(totalsBy); rp.setHowSortBy(StatsManager.T_TOTAL); rp.setHowChartType(StatsManager.CHARTTYPE_PIE); rp.setHowChartSource(StatsManager.T_USER); r.setReportParams(rp); return r; } @Override public ReportDef getChartReportDefinition() { String dateFilter = getDateFilter(); String roleFilter = getRoleFilter(); ReportDef r = new ReportDef(); r.setSiteId(siteId); ReportParams rp = new ReportParams(siteId); // what rp.setWhat(ReportManager.WHAT_EVENTS); rp.setWhatEventSelType(ReportManager.WHAT_EVENTS_BYEVENTS); rp.setWhatEventIds(getToolEventsFilter()); // when rp.setWhen(dateFilter); // who if(!ReportManager.WHO_ALL.equals(roleFilter)) { rp.setWho(ReportManager.WHO_ROLE); rp.setWhoRoleId(roleFilter); } // grouping List<String> totalsBy = new ArrayList<String>(); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { totalsBy.add(StatsManager.T_DATEMONTH); }else{ totalsBy.add(StatsManager.T_DATE); } totalsBy.add(StatsManager.T_USER); rp.setHowTotalsBy(totalsBy); // sorting rp.setHowSort(true); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { rp.setHowSortBy(StatsManager.T_DATEMONTH); }else{ rp.setHowSortBy(StatsManager.T_DATE); } rp.setHowSortAscending(false); // chart rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_BOTH); /*rp.setHowChartType(StatsManager.CHARTTYPE_TIMESERIES); rp.setHowChartSource(StatsManager.T_DATE); rp.setHowChartSeriesSource(StatsManager.T_USER); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { rp.setHowChartSeriesPeriod(StatsManager.CHARTTIMESERIES_MONTH); }else if(dateFilter.equals(ReportManager.WHEN_LAST30DAYS)) { rp.setHowChartSeriesPeriod(StatsManager.CHARTTIMESERIES_DAY); }else{ rp.setHowChartSeriesPeriod(StatsManager.CHARTTIMESERIES_WEEKDAY); }*/ rp.setHowChartType(StatsManager.CHARTTYPE_PIE); rp.setHowChartSource(StatsManager.T_USER); r.setReportParams(rp); return r; } }; return wTab; } /** WidgetTab: By tool */ protected WidgetTabTemplate getWidgetTabByTool(String panelId) { WidgetTabTemplate wTab = new WidgetTabTemplate(panelId, ActivityWidget.this.siteId) { private static final long serialVersionUID = 1L; @Override public List<Integer> getFilters() { return Arrays.asList(FILTER_DATE, FILTER_ROLE, FILTER_TOOL); } @Override public boolean useChartReportDefinitionForTable() { return false; } @Override public ReportDef getTableReportDefinition() { ReportDef r = getChartReportDefinition(); ReportParams rp = r.getReportParams(); List<String> totalsBy = new ArrayList<String>(); totalsBy.add(StatsManager.T_TOOL); rp.setHowTotalsBy(totalsBy); rp.setHowSortBy(StatsManager.T_TOTAL); rp.setHowChartType(StatsManager.CHARTTYPE_PIE); rp.setHowChartSource(StatsManager.T_TOOL); r.setReportParams(rp); return r; } @Override public ReportDef getChartReportDefinition() { String dateFilter = getDateFilter(); String roleFilter = getRoleFilter(); ReportDef r = new ReportDef(); r.setSiteId(siteId); ReportParams rp = new ReportParams(siteId); // what rp.setWhat(ReportManager.WHAT_EVENTS); rp.setWhatEventSelType(ReportManager.WHAT_EVENTS_BYEVENTS); rp.setWhatEventIds(getToolEventsFilter()); // when rp.setWhen(dateFilter); // who if(!ReportManager.WHO_ALL.equals(roleFilter)) { rp.setWho(ReportManager.WHO_ROLE); rp.setWhoRoleId(roleFilter); } // grouping List<String> totalsBy = new ArrayList<String>(); /*if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { totalsBy.add(StatsManager.T_DATEMONTH); }else{ totalsBy.add(StatsManager.T_DATE); }*/ totalsBy.add(StatsManager.T_TOOL); rp.setHowTotalsBy(totalsBy); // sorting rp.setHowSort(true); if(dateFilter.equals(ReportManager.WHEN_LAST365DAYS) || dateFilter.equals(ReportManager.WHEN_ALL)) { rp.setHowSortBy(StatsManager.T_DATEMONTH); }else{ rp.setHowSortBy(StatsManager.T_DATE); } rp.setHowSortAscending(false); // chart rp.setHowPresentationMode(ReportManager.HOW_PRESENTATION_BOTH); rp.setHowChartType(StatsManager.CHARTTYPE_PIE); rp.setHowChartSource(StatsManager.T_TOOL); r.setReportParams(rp); return r; } }; return wTab; } // ------------------------------------------------------------------------------- // ------------------------------------------------------------------------------- private PrefsData getPrefsdata() { if(prefsdata == null) { prefsdata = Locator.getFacade().getStatsManager().getPreferences(siteId, true); } return prefsdata; } }
package net.ashald.envfile.platform.ui; import com.intellij.execution.configurations.RunConfigurationBase; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.DefaultActionGroup; import com.intellij.openapi.actionSystem.impl.SimpleDataContext; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptor; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.util.Conditions; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.*; import com.intellij.ui.table.TableView; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.ListTableModel; import net.ashald.envfile.platform.EnvFileEntry; import net.ashald.envfile.platform.EnvVarsProviderExtension; import net.ashald.envfile.platform.EnvFileSettings; import net.ashald.envfile.platform.ui.table.EnvFileIsActiveColumnInfo; import net.ashald.envfile.platform.ui.table.EnvFilePathColumnInfo; import net.ashald.envfile.platform.ui.table.EnvFileTypeColumnInfo; import javax.swing.*; import javax.swing.border.EmptyBorder; import javax.swing.table.JTableHeader; import javax.swing.table.TableColumn; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.*; class EnvFileConfigurationPanel<T extends RunConfigurationBase> extends JPanel { private static final int MAX_RECENT = 5; private static final LinkedList<EnvFileEntry> recent = new LinkedList<EnvFileEntry>(); private final RunConfigurationBase runConfig; private final JCheckBox useEnvFileCheckBox; private final JCheckBox substituteEnvVarsCheckBox; private final JCheckBox supportPathMacroCheckBox; private final JCheckBox ignoreMissingCheckBox; private final JCheckBox experimentalIntegrationsCheckBox; private final ListTableModel<EnvFileEntry> envFilesModel; private final TableView<EnvFileEntry> envFilesTable; EnvFileConfigurationPanel(T configuration) { runConfig = configuration; // Define Model ColumnInfo<EnvFileEntry, Boolean> IS_ACTIVE = new EnvFileIsActiveColumnInfo(); ColumnInfo<EnvFileEntry, String> FILE = new EnvFilePathColumnInfo(); ColumnInfo<EnvFileEntry, EnvFileEntry> TYPE = new EnvFileTypeColumnInfo(); envFilesModel = new ListTableModel<>(IS_ACTIVE, FILE, TYPE); // Create Table envFilesTable = new TableView<>(envFilesModel); envFilesTable.getEmptyText().setText("No environment variables files selected"); setUpColumnWidth(envFilesTable, 0, IS_ACTIVE, 20); setUpColumnWidth(envFilesTable, 2, TYPE, 50); envFilesTable.setColumnSelectionAllowed(false); envFilesTable.setShowGrid(false); envFilesTable.setDragEnabled(true); envFilesTable.setShowHorizontalLines(false); envFilesTable.setShowVerticalLines(false); envFilesTable.setIntercellSpacing(new Dimension(0, 0)); // Create global activation flag useEnvFileCheckBox = new JCheckBox("Enable EnvFile"); useEnvFileCheckBox.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { envFilesTable.setEnabled(useEnvFileCheckBox.isSelected()); substituteEnvVarsCheckBox.setEnabled(useEnvFileCheckBox.isSelected()); supportPathMacroCheckBox.setEnabled(useEnvFileCheckBox.isSelected()); ignoreMissingCheckBox.setEnabled(useEnvFileCheckBox.isSelected()); } }); substituteEnvVarsCheckBox = new JCheckBox("Substitute Environment Variables (${FOO} / ${BAR:-default} / $${ESCAPED})"); substituteEnvVarsCheckBox.addActionListener(e -> envFilesModel.getItems().forEach(envFileEntry -> envFileEntry.setSubstitutionEnabled(substituteEnvVarsCheckBox.isSelected()))); supportPathMacroCheckBox = new JCheckBox("Process JetBrains path macro references ($PROJECT_DIR$)"); ignoreMissingCheckBox = new JCheckBox("Ignore missing files"); experimentalIntegrationsCheckBox = new JCheckBox("Enable experimental integrations (e.g. Gradle) - may break any time!"); // TODO: come up with a generic approach for this envFilesModel.addRow(new EnvFileEntry(runConfig, "runconfig", null, true, substituteEnvVarsCheckBox.isSelected())); // Create Toolbar - Add/Remove/Move actions final ToolbarDecorator envFilesTableDecorator = ToolbarDecorator.createDecorator(envFilesTable); final AnActionButtonUpdater updater = new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { return useEnvFileCheckBox.isSelected(); } }; envFilesTableDecorator .setAddAction(new AnActionButtonRunnable() { @Override public void run(AnActionButton button) { doAddAction(button, envFilesTable, envFilesModel); } }) .setAddActionUpdater(updater) .setRemoveActionUpdater(new AnActionButtonUpdater() { @Override public boolean isEnabled(AnActionEvent e) { boolean allEditable = true; for (EnvFileEntry entry : envFilesTable.getSelectedObjects()) { if (!entry.isEditable()) { allEditable = false; break; } } return updater.isEnabled(e) && envFilesTable.getSelectedRowCount() >= 1 && allEditable; } }); JPanel optionsPanel = new JPanel(); optionsPanel.setBorder(JBUI.Borders.empty(5, 22, 5, 5)); optionsPanel.setLayout(new BoxLayout(optionsPanel, BoxLayout.Y_AXIS)); optionsPanel.add(substituteEnvVarsCheckBox); optionsPanel.add(supportPathMacroCheckBox); optionsPanel.add(ignoreMissingCheckBox); optionsPanel.add(experimentalIntegrationsCheckBox); // Compose UI JPanel checkboxPanel = new JPanel(); checkboxPanel.setLayout(new BoxLayout(checkboxPanel, BoxLayout.Y_AXIS)); checkboxPanel.add(useEnvFileCheckBox); checkboxPanel.add(optionsPanel); optionsPanel.setLocation(100, 100); JPanel envFilesTableDecoratorPanel = envFilesTableDecorator.createPanel(); Dimension size = new Dimension(-1, 150); envFilesTableDecoratorPanel.setMinimumSize(size); envFilesTableDecoratorPanel.setPreferredSize(size); setLayout(new BorderLayout()); add(checkboxPanel, BorderLayout.NORTH); add(envFilesTableDecoratorPanel, BorderLayout.CENTER); } private void setUpColumnWidth(TableView<EnvFileEntry> table, int columnIdx, ColumnInfo columnInfo, int extend) { JTableHeader tableHeader = table.getTableHeader(); FontMetrics fontMetrics = tableHeader.getFontMetrics(tableHeader.getFont()); int preferredWidth = fontMetrics.stringWidth(columnInfo.getName()) + extend; table.getColumnModel().getColumn(columnIdx).setCellRenderer(new BooleanTableCellRenderer()); TableColumn tableColumn = tableHeader.getColumnModel().getColumn(columnIdx); tableColumn.setWidth(preferredWidth); tableColumn.setPreferredWidth(preferredWidth); tableColumn.setMinWidth(preferredWidth); tableColumn.setMaxWidth(preferredWidth); } private void doAddAction(AnActionButton button, final TableView<EnvFileEntry> table, final ListTableModel<EnvFileEntry> model) { final JBPopupFactory popupFactory = JBPopupFactory.getInstance(); DefaultActionGroup actionGroup = new DefaultActionGroup(null, false); for (final EnvVarsProviderExtension extension : EnvVarsProviderExtension.getParserExtensions()) { if (!extension.getFactory().createProvider(substituteEnvVarsCheckBox.isSelected()).isEditable()) { continue; } final String title = String.format("%s file", extension.getFactory().getTitle()); AnAction anAction = new AnAction(title) { @Override public void actionPerformed(AnActionEvent e) { final FileChooserDescriptor chooserDescriptor = FileChooserDescriptorFactory .createSingleFileNoJarsDescriptor() .withTitle(String.format("Select %s", title)); Project project = runConfig.getProject(); VirtualFile selectedFile = FileChooser.chooseFile(chooserDescriptor, project, null); if (selectedFile != null) { String selectedPath = selectedFile.getPath(); String baseDir = runConfig.getProject().getBaseDir().getPath(); if (selectedPath.startsWith(baseDir)) { selectedPath = selectedPath.substring(baseDir.length() + 1); } ArrayList<EnvFileEntry> newList = new ArrayList<EnvFileEntry>(model.getItems()); final EnvFileEntry newOptions = new EnvFileEntry(runConfig, extension.getId(), selectedPath, true, substituteEnvVarsCheckBox.isSelected()); newList.add(newOptions); model.setItems(newList); int index = model.getRowCount() - 1; model.fireTableRowsInserted(index, index); table.setRowSelectionInterval(index, index); synchronized (recent) { recent.remove(newOptions); recent.addFirst(newOptions); if (recent.size() > MAX_RECENT) recent.removeLast(); } } } }; actionGroup.add(anAction); } synchronized (recent) { if (!recent.isEmpty()) { actionGroup.addSeparator("Recent"); for (final EnvFileEntry entry : recent) { String title = String.format("%s -> %s", entry.getTypeTitle(), entry.getPath()); String shortTitle = title.length() < 81 ? title : title.replaceFirst("(.{39}).+(.{39})", "$1...$2"); AnAction anAction = new AnAction(shortTitle, title, null) { @Override public void actionPerformed(AnActionEvent e) { ArrayList<EnvFileEntry> newList = new ArrayList<EnvFileEntry>(model.getItems()); newList.add(entry); model.setItems(newList); int index = model.getRowCount() - 1; model.fireTableRowsInserted(index, index); table.setRowSelectionInterval(index, index); synchronized (recent) { recent.remove(entry); recent.addFirst(entry); } } }; actionGroup.add(anAction); } } } final ListPopup popup = popupFactory.createActionGroupPopup("Add...", actionGroup, SimpleDataContext.getProjectContext(runConfig.getProject()), false, false, false, null, -1, Conditions.<AnAction>alwaysTrue()); popup.show(button.getPreferredPopupPoint()); } EnvFileSettings getState() { return new EnvFileSettings( useEnvFileCheckBox.isSelected(), substituteEnvVarsCheckBox.isSelected(), supportPathMacroCheckBox.isSelected(), envFilesModel.getItems(), ignoreMissingCheckBox.isSelected(), experimentalIntegrationsCheckBox.isSelected() ); } void setState(EnvFileSettings state) { useEnvFileCheckBox.setSelected(state.isEnabled()); substituteEnvVarsCheckBox.setSelected(state.isSubstituteEnvVarsEnabled()); supportPathMacroCheckBox.setSelected(state.isPathMacroSupported()); ignoreMissingCheckBox.setSelected(state.isIgnoreMissing()); experimentalIntegrationsCheckBox.setSelected(state.isEnableExperimentalIntegrations()); envFilesTable.setEnabled(state.isEnabled()); substituteEnvVarsCheckBox.setEnabled(state.isEnabled()); supportPathMacroCheckBox.setEnabled(state.isEnabled()); ignoreMissingCheckBox.setEnabled(state.isEnabled()); experimentalIntegrationsCheckBox.setEnabled(state.isEnabled()); envFilesModel.setItems(new ArrayList<>(state.getEntries())); } }
package main; /* Serialize the GBM model into txt file, unserialize the txt file into GBM model. the content format in the txt file: first_round_prediction tree[tree_index]: internal_node_index:[feature_name,feature_type,split value || split values],missing_go_to=0|1|2 leaf_node_index:leaf=leaf_score for example: 0.5000 tree[0]: 1:[7,num,30.6000],missing_go_to=0 2:[9,cat,1,3,5],missing_go_to=1 4:leaf=0.3333 tree[1]: 1:[4,num,10.9000],missing_go_to=2 2:leaf=0.9900 */ import java.io.*; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.Queue; public class ModelSerializer { private static String serializeLeafNode(TreeNode node){ StringBuilder sb = new StringBuilder(); sb.append(node.index); sb.append(":leaf="); sb.append(String.format("%.6f", node.leaf_score)); return sb.toString(); } private static String serializeInternalNode(TreeNode node){ StringBuilder sb = new StringBuilder(); sb.append(node.index); sb.append(":["); sb.append(node.split_feature+","); if(node.split_left_child_catvalue==null){ sb.append("num,"); sb.append(String.format("%.6f",node.split_threshold)); sb.append("],"); }else { sb.append("cat"); for(double catvalue:node.split_left_child_catvalue){ sb.append(","+catvalue); } sb.append("],"); } if(node.nan_go_to==0){ sb.append("missing_go_to=0"); }else if(node.nan_go_to==1){ sb.append("missing_go_to=1"); }else if(node.nan_go_to==2){ sb.append("missing_go_to=2"); }else{ if(node.left_child.num_sample>node.right_child.num_sample){ sb.append("missing_go_to=1"); }else { sb.append("missing_go_to=2"); } } return sb.toString(); } //Serialize the GBM model into txt file public static void save_model(GBM gbm,String path){ double first_round_predict = gbm.getFirst_round_pred(); double eta = gbm.getEta(); Loss loss = gbm.getLoss(); ArrayList<Tree> trees = gbm.getTrees(); StringBuilder sb = new StringBuilder(); sb.append("first_round_predict="+first_round_predict+"\n"); sb.append("eta="+eta+"\n"); if(loss instanceof LogisticLoss){ sb.append("logloss"+"\n"); }else { sb.append("squareloss"+"\n"); } for(int i=1;i<=trees.size();i++){ sb.append("tree["+i+"]:\n"); Tree tree = trees.get(i-1); TreeNode root = tree.getRoot(); Queue<TreeNode> queue = new LinkedList<>(); queue.offer(root); while(!queue.isEmpty()){ int cur_level_num = queue.size(); while (cur_level_num!=0){ cur_level_num--; TreeNode node = queue.poll(); if(node.is_leaf){ sb.append(serializeLeafNode(node)+"\n"); }else { sb.append(serializeInternalNode(node)+"\n"); queue.offer(node.left_child); if(node.nan_child!=null){ queue.offer(node.nan_child); } queue.offer(node.right_child); } } } } sb.append("tree[end]"); try{ Files.write(Paths.get(path), sb.toString().getBytes()); }catch (IOException e){ e.printStackTrace(); } } //unserialize the txt file into GBM model. public static GBM load_model(String path){ try{ BufferedReader br = new BufferedReader(new FileReader(path)); double first_round_predict = Double.parseDouble(br.readLine().split("=")[1]); double eta = Double.parseDouble(br.readLine().split("=")[1]); Loss loss = null; if(br.readLine().equals("logloss")){ loss = new LogisticLoss(); }else { loss = new SquareLoss(); } ArrayList<Tree> trees = new ArrayList<>(); String line; HashMap<Integer,TreeNode> map = new HashMap<>(); while ((line=br.readLine())!=null){ if(line.startsWith("tree")){ //store this tree,clear map if(!map.isEmpty()){ Queue<TreeNode> queue = new LinkedList<>(); TreeNode root = map.get(1); queue.offer(root); while (!queue.isEmpty()){ int cur_level_num = queue.size(); while(cur_level_num!=0){ cur_level_num--; TreeNode node = queue.poll(); if(!node.is_leaf){ node.left_child = map.get(3*node.index-1); node.right_child = map.get(3*node.index+1); queue.offer(node.left_child); queue.offer(node.right_child); if(map.containsKey(3*node.index)){ node.nan_child = map.get(3*node.index); queue.offer(node.nan_child); } } } } trees.add(new Tree(root)); map.clear(); } }else { //store this node into map int index = Integer.parseInt(line.split(":")[0]); if(line.split(":")[1].startsWith("leaf")){ double leaf_score = Double.parseDouble(line.split(":")[1].split("=")[1]); TreeNode node = new TreeNode(index,leaf_score); map.put(index,node); }else { double nan_go_to = Double.parseDouble(line.split("=")[1]); String split_info = line.split(":")[1].split("]")[0]; split_info = split_info.substring(1); String[] strs = split_info.split(","); int split_feature = Integer.parseInt(strs[0]); if(strs[1].equals("num")){ double split_threshold = Double.parseDouble(strs[2]); TreeNode node = new TreeNode(index,split_feature,split_threshold,nan_go_to); map.put(index,node); }else { ArrayList<Double> split_left_child_catvalue = new ArrayList<>(); for(int i=2;i<strs.length;i++){ split_left_child_catvalue.add(Double.parseDouble(strs[i])); } TreeNode node = new TreeNode(index,split_feature,split_left_child_catvalue,nan_go_to); map.put(index,node); } } } } return new GBM(trees,loss,first_round_predict,eta); }catch (Exception e){ e.printStackTrace(); } return null; } }
package org.drools.verifier.subsumption; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.Set; import org.drools.StatelessSession; import org.drools.StatelessSessionResult; import org.drools.base.RuleNameMatchesAgendaFilter; import org.drools.verifier.TestBase; import org.drools.verifier.components.VerifierRule; import org.drools.verifier.components.LiteralRestriction; import org.drools.verifier.components.Pattern; import org.drools.verifier.components.PatternPossibility; import org.drools.verifier.components.RulePossibility; import org.drools.verifier.dao.VerifierResult; import org.drools.verifier.dao.VerifierResultFactory; import org.drools.verifier.report.components.Cause; import org.drools.verifier.report.components.Redundancy; import org.drools.verifier.report.components.RedundancyType; import org.drools.verifier.report.components.Subsumption; public class SubsumptantPossibilitiesTest extends SubsumptionTestBase { public void testPatternPossibilityRedundancy1() throws Exception { StatelessSession session = getStatelessSession(this.getClass() .getResourceAsStream("Possibilities.drl")); session.setAgendaFilter(new RuleNameMatchesAgendaFilter( "Find subsumptant pattern possibilities")); Collection<Object> data = new ArrayList<Object>(); VerifierResult result = VerifierResultFactory.createVerifierResult(); session.setGlobal("result", result); /* * Redundant patterns */ String ruleName1 = "Rule 1"; String ruleName2 = "Rule 2"; Pattern p1 = new Pattern(); p1.setRuleName(ruleName1); Pattern p2 = new Pattern(); p2.setRuleName(ruleName2); LiteralRestriction lr1 = new LiteralRestriction(); lr1.setRuleName(ruleName1); lr1.setOrderNumber(0); LiteralRestriction lr2 = new LiteralRestriction(); lr2.setRuleName(ruleName2); lr2.setOrderNumber(0); LiteralRestriction lr3 = new LiteralRestriction(); lr3.setRuleName(ruleName2); lr3.setOrderNumber(1); PatternPossibility pp1 = new PatternPossibility(); pp1.setPatternId(p1.getId()); pp1.setRuleName(ruleName1); pp1.add(lr1); PatternPossibility pp2 = new PatternPossibility(); pp2.setPatternId(p2.getId()); pp2.setRuleName(ruleName2); pp2.add(lr2); pp2.add(lr3); Redundancy r1 = new Redundancy(lr1, lr2); Redundancy r2 = new Redundancy(p1, p2); data.add(p1); data.add(p2); data.add(lr1); data.add(lr2); data.add(lr3); data.add(pp1); data.add(pp2); data.add(r1); data.add(r2); StatelessSessionResult sessionResult = session.executeWithResults(data); Map<Cause, Set<Cause>> map = createSubsumptionMap(sessionResult .iterateObjects()); assertTrue(TestBase.causeMapContains(map, pp1, pp2)); if (!map.isEmpty()) { fail("More redundancies than was expected."); } } public void testPatternPossibilityRedundancy2() throws Exception { StatelessSession session = getStatelessSession(this.getClass() .getResourceAsStream("Possibilities.drl")); session.setAgendaFilter(new RuleNameMatchesAgendaFilter( "Find subsumptant pattern possibilities")); Collection<Object> data = new ArrayList<Object>(); VerifierResult result = VerifierResultFactory.createVerifierResult(); session.setGlobal("result", result); /* * Not redundant patterns * * For example: Pattern ( a==1, b==1, c==1) and Pattern ( a==1, c==1) */ String ruleName1 = "Rule 1"; String ruleName2 = "Rule 2"; Pattern p1 = new Pattern(); p1.setRuleName(ruleName1); Pattern p2 = new Pattern(); p2.setRuleName(ruleName2); LiteralRestriction lr1 = new LiteralRestriction(); lr1.setRuleName(ruleName1); lr1.setOrderNumber(0); LiteralRestriction lr2 = new LiteralRestriction(); lr2.setRuleName(ruleName2); lr2.setOrderNumber(0); LiteralRestriction lr3 = new LiteralRestriction(); lr3.setRuleName(ruleName2); lr3.setOrderNumber(1); PatternPossibility pp1 = new PatternPossibility(); pp1.setPatternId(p1.getId()); pp1.setRuleName(ruleName1); pp1.add(lr1); PatternPossibility pp2 = new PatternPossibility(); pp2.setPatternId(p2.getId()); pp2.setRuleName(ruleName2); pp2.add(lr2); pp2.add(lr3); Redundancy r1 = new Redundancy(lr1, lr3); Redundancy r2 = new Redundancy(p1, p2); data.add(p1); data.add(p2); data.add(lr1); data.add(lr2); data.add(lr3); data.add(pp1); data.add(pp2); data.add(r1); data.add(r2); StatelessSessionResult sessionResult = session.executeWithResults(data); Map<Cause, Set<Cause>> map = createSubsumptionMap(sessionResult .iterateObjects()); assertFalse(TestBase.causeMapContains(map, pp1, pp2)); if (!map.isEmpty()) { fail("More redundancies than was expected."); } } public void testRulePossibilityRedundancy1() throws Exception { StatelessSession session = getStatelessSession(this.getClass() .getResourceAsStream("Possibilities.drl")); session.setAgendaFilter(new RuleNameMatchesAgendaFilter( "Find subsumptant rule possibilities")); Collection<Object> data = new ArrayList<Object>(); VerifierResult result = VerifierResultFactory.createVerifierResult(); session.setGlobal("result", result); /* * First rules. These are subsumptant, */ String ruleName1 = "Rule 1"; String ruleName2 = "Rule 2"; VerifierRule r1 = new VerifierRule(); r1.setRuleName(ruleName1); VerifierRule r2 = new VerifierRule(); r2.setRuleName(ruleName2); PatternPossibility pp1 = new PatternPossibility(); pp1.setRuleName(ruleName1); PatternPossibility pp2 = new PatternPossibility(); pp2.setRuleName(ruleName2); PatternPossibility pp3 = new PatternPossibility(); pp3.setRuleName(ruleName2); RulePossibility rp1 = new RulePossibility(); rp1.setRuleId(r1.getId()); rp1.setRuleName(ruleName1); rp1.add(pp1); RulePossibility rp2 = new RulePossibility(); rp2.setRuleId(r2.getId()); rp2.setRuleName(ruleName2); rp2.add(pp2); rp2.add(pp3); Redundancy possibilityredundancy = new Redundancy( RedundancyType.STRONG, pp1, pp2); Redundancy ruleRedundancy = new Redundancy(r1, r2); data.add(r1); data.add(r2); data.add(pp1); data.add(pp2); data.add(pp3); data.add(possibilityredundancy); data.add(ruleRedundancy); data.add(rp1); data.add(rp2); StatelessSessionResult sessionResult = session.executeWithResults(data); Map<Cause, Set<Cause>> map = createSubsumptionMap(sessionResult .iterateObjects()); assertTrue(TestBase.causeMapContains(map, rp1, rp2)); if (!map.isEmpty()) { fail("More redundancies than was expected."); } } public void testRulePossibilityRedundancy2() throws Exception { StatelessSession session = getStatelessSession(this.getClass() .getResourceAsStream("Possibilities.drl")); session.setAgendaFilter(new RuleNameMatchesAgendaFilter( "Find subsumptant rule possibilities")); // session.setAgendaFilter(new RuleNameMatchesAgendaFilter( // "XXX: test rule")); Collection<Object> data = new ArrayList<Object>(); VerifierResult result = VerifierResultFactory.createVerifierResult(); session.setGlobal("result", result); /* * First rules. These are subsumptant, */ String ruleName1 = "Rule 1"; String ruleName2 = "Rule 2"; VerifierRule r1 = new VerifierRule(); r1.setRuleName(ruleName1); VerifierRule r2 = new VerifierRule(); r2.setRuleName(ruleName2); PatternPossibility pp1 = new PatternPossibility(); pp1.setRuleName(ruleName1); PatternPossibility pp2 = new PatternPossibility(); pp2.setRuleName(ruleName1); PatternPossibility pp3 = new PatternPossibility(); pp3.setRuleName(ruleName2); PatternPossibility pp4 = new PatternPossibility(); pp4.setRuleName(ruleName2); RulePossibility rp1 = new RulePossibility(); rp1.setRuleId(r1.getId()); rp1.setRuleName(ruleName1); rp1.add(pp1); rp1.add(pp2); RulePossibility rp2 = new RulePossibility(); rp2.setRuleId(r2.getId()); rp2.setRuleName(ruleName2); rp2.add(pp3); rp2.add(pp4); Redundancy possibilityredundancy = new Redundancy( RedundancyType.STRONG, pp1, pp3); Subsumption possibilitysubsupmtion = new Subsumption(pp2, pp4); Redundancy ruleRedundancy = new Redundancy(r1, r2); data.add(r1); data.add(r2); data.add(pp1); data.add(pp2); data.add(pp3); data.add(possibilityredundancy); data.add(possibilitysubsupmtion); data.add(ruleRedundancy); data.add(rp1); data.add(rp2); StatelessSessionResult sessionResult = session.executeWithResults(data); Map<Cause, Set<Cause>> map = createSubsumptionMap(sessionResult .iterateObjects()); assertTrue(TestBase.causeMapContains(map, rp1, rp2)); assertTrue(TestBase.causeMapContains(map, pp2, pp4)); if (!map.isEmpty()) { fail("More redundancies than was expected."); } } }
/* Copyright 2013, 2016, 2017 Nationale-Nederlanden Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package nl.nn.adapterframework.extensions.tibco; import java.util.Enumeration; import javax.jms.Connection; import javax.jms.ConnectionFactory; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.Queue; import javax.jms.QueueBrowser; import javax.jms.Session; import nl.nn.adapterframework.util.CredentialFactory; import nl.nn.adapterframework.util.LogUtil; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.DurationFormatUtils; import org.apache.logging.log4j.Logger; import com.tibco.tibjms.admin.ServerInfo; import com.tibco.tibjms.admin.TibjmsAdmin; import com.tibco.tibjms.admin.TibjmsAdminException; /** * Some utilities for working with TIBCO. * * @author Peter Leeuwenburgh * @author Jaco de Groot */ public class TibcoUtils { static Logger log = LogUtil.getLogger(TibcoUtils.class); public static long getQueueFirstMessageAge(String provUrl, String authAlias, String userName, String password, String queueName) throws JMSException { return getQueueFirstMessageAge(provUrl, authAlias, userName, password, queueName, null); } /** * return -1: no message found * return -2: message found, but not of type Message. */ public static long getQueueFirstMessageAge(String provUrl, String authAlias, String userName, String password, String queueName, String messageSelector) throws JMSException { Connection connection = null; Session jSession = null; try { connection = getConnection(provUrl, authAlias, userName, password); jSession = connection.createSession(false, javax.jms.Session.AUTO_ACKNOWLEDGE); return getQueueFirstMessageAge(jSession, queueName, messageSelector); } finally { if (connection != null) { try { connection.close(); } catch (JMSException e) { log.warn("Exception on closing connection", e); } } } } public static Connection getConnection(String provUrl, String authAlias, String userName, String password) throws JMSException { String url = StringUtils.replace(provUrl, "tibjmsnaming:", "tcp:"); CredentialFactory cf = new CredentialFactory(authAlias, userName, password); ConnectionFactory factory = new com.tibco.tibjms.TibjmsConnectionFactory( url); return factory.createConnection(cf.getUsername(), cf.getPassword()); } protected static long getQueueFirstMessageAge(Session jSession, String queueName) throws JMSException { return getQueueFirstMessageAge(jSession, queueName, null); } protected static long getQueueFirstMessageAge(Session jSession, String queueName, String messageSelector) throws JMSException { return getQueueFirstMessageAge(jSession, queueName, messageSelector, System.currentTimeMillis()); } protected static long getQueueFirstMessageAge(Session jSession, String queueName, long currentTime) throws JMSException { return getQueueFirstMessageAge(jSession, queueName, null, currentTime); } protected static long getQueueFirstMessageAge(Session jSession, String queueName, String messageSelector, long currentTime) throws JMSException { return getQueueFirstMessageAge(jSession, queueName, messageSelector, currentTime, true); } protected static long getQueueFirstMessageAge(Session jSession, String queueName, String messageSelector, long currentTime, boolean warn) throws JMSException { QueueBrowser queueBrowser = null; try { Queue queue = jSession.createQueue(queueName); if (messageSelector == null) { queueBrowser = jSession.createBrowser(queue); } else { queueBrowser = jSession.createBrowser(queue, messageSelector); } Enumeration enm = queueBrowser.getEnumeration(); if (enm.hasMoreElements()) { Object o = enm.nextElement(); if (o instanceof Message) { Message msg = (Message) o; long jmsTimestamp = msg.getJMSTimestamp(); return currentTime - jmsTimestamp; } else { if (warn) { log.warn("message was not of type Message, but [" + o.getClass().getName() + "]"); } return -2; } } else { return -1; } } finally { if (queueBrowser != null) { try { queueBrowser.close(); } catch (JMSException e) { log.warn("Exception on closing queueBrowser", e); } } } } protected static String getQueueFirstMessageAgeAsString(Session jSession, String queueName, long currentTime) { try { long age = getQueueFirstMessageAge(jSession, queueName, null, currentTime, false); if (age == -2) { return "??"; } else if (age == -1) { return null; } else { return DurationFormatUtils.formatDuration(age, "ddd-HH:mm:ss"); } } catch (JMSException e) { return "?"; } } public static long getQueueMessageCount(String provUrl, String authAlias, String userName, String password, String queueName, String messageSelector) throws JMSException { Connection connection = null; Session jSession = null; try { connection = getConnection(provUrl, authAlias, userName, password); jSession = connection.createSession(false, javax.jms.Session.AUTO_ACKNOWLEDGE); return getQueueMessageCount(jSession, queueName, messageSelector); } finally { if (connection != null) { try { connection.close(); } catch (JMSException e) { log.warn("Exception on closing connection", e); } } } } protected static long getQueueMessageCount(Session jSession, String queueName, String messageSelector) throws JMSException { QueueBrowser queueBrowser = null; try { Queue queue = jSession.createQueue(queueName); if (messageSelector == null) { queueBrowser = jSession.createBrowser(queue); } else { queueBrowser = jSession.createBrowser(queue, messageSelector); } int count = 0; for (Enumeration enm = queueBrowser.getEnumeration(); enm .hasMoreElements(); enm.nextElement()) { count++; } return count; } finally { if (queueBrowser != null) { try { queueBrowser.close(); } catch (JMSException e) { log.warn("Exception on closing queueBrowser", e); } } } } protected static TibjmsAdmin getActiveServerAdmin(String url, CredentialFactory cf) throws TibjmsAdminException { TibjmsAdminException lastException = null; TibjmsAdmin admin = null; String[] uws = url.split(","); String uw = null; boolean uws_ok = false; for (int i = 0; !uws_ok && i < uws.length; i++) { uw = uws[i].trim(); int state = ServerInfo.SERVER_ACTIVE * -1; try { // The next line of code has been reported to throw the // following exception: // com.tibco.tibjms.admin.TibjmsAdminException: Unable to connect to server. Root cause: // javax.jms.ResourceAllocationException: too many open connections admin = new TibjmsAdmin(uw, cf.getUsername(), cf.getPassword()); // The next line of code has been reported to throw the // following exception: // com.tibco.tibjms.admin.TibjmsAdminSecurityException: Command unavailable on a server not in active state and using a JSON configuration file state = admin.getInfo().getState(); } catch(TibjmsAdminException e) { // In case a passive or broken server is tried before an active // server this will result in an exception. Hence, ignore all // exceptions unless all servers fail in which case the latest // exception should be logged to give an indication of what is // going wrong. lastException = e; } if (admin != null) { if (state == ServerInfo.SERVER_ACTIVE) { uws_ok = true; } else { log.debug("Server [" + uw + "] is not active"); try { admin.close(); } catch (TibjmsAdminException e) { log.warn( "Exception on closing Tibjms Admin on server [" + uw + "]", e); } } } } if (!uws_ok) { log.warn("Could not find an active server", lastException); return null; } else { log.debug("Found active server [" + uw + "]"); return admin; } } }
package org.zaproxy.zap.authentication; import java.awt.GridBagLayout; import java.awt.Insets; import java.util.HashMap; import java.util.Map; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPasswordField; import net.sf.json.JSONObject; import org.apache.commons.codec.binary.Base64; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.zaproxy.zap.extension.api.ApiDynamicActionImplementor; import org.zaproxy.zap.extension.api.ApiException; import org.zaproxy.zap.extension.api.ApiResponse; import org.zaproxy.zap.extension.api.ApiResponseSet; import org.zaproxy.zap.extension.users.ExtensionUserManagement; import org.zaproxy.zap.extension.users.UsersAPI; import org.zaproxy.zap.model.Context; import org.zaproxy.zap.users.User; import org.zaproxy.zap.utils.ApiUtils; import org.zaproxy.zap.utils.ZapTextField; import org.zaproxy.zap.view.LayoutHelper; /** * The credentials implementation for use in systems that require a username and password * combination for authentication. */ class UsernamePasswordAuthenticationCredentials implements AuthenticationCredentials { private static final String API_NAME = "UsernamePasswordAuthenticationCredentials"; /** * String used to represent encoded null credentials, that is, when {@code username} is {@code null}. * <p> * It's a null character Base64 encoded, which will never be equal to encoding of defined {@code username}/{@code password}. * * @see #encode(String) * @see #decode(String) */ private static final String NULL_CREDENTIALS = "AA=="; private static String FIELD_SEPARATOR = "~"; private String username; private String password; public UsernamePasswordAuthenticationCredentials() { super(); } public UsernamePasswordAuthenticationCredentials(String username, String password) { super(); this.username = username; this.password = password; } /** * Gets the username. * * @return the username */ public String getUsername() { return username; } /** * Gets the password. * * @return the password */ public String getPassword() { return password; } @Override public boolean isConfigured() { return username != null && password != null; } @Override public String encode(String parentStringSeparator) { assert (!FIELD_SEPARATOR.equals(parentStringSeparator)); if (username == null) { return NULL_CREDENTIALS; } StringBuilder out = new StringBuilder(); out.append(Base64.encodeBase64String(username.getBytes())).append(FIELD_SEPARATOR); out.append(Base64.encodeBase64String(password.getBytes())).append(FIELD_SEPARATOR); return out.toString(); } @Override public void decode(String encodedCredentials) { if (NULL_CREDENTIALS.equals(encodedCredentials)) { username = null; password = null; return; } String[] pieces = encodedCredentials.split(FIELD_SEPARATOR); this.username = new String(Base64.decodeBase64(pieces[0])); if (pieces.length > 1) this.password = new String(Base64.decodeBase64(pieces[1])); else this.password = ""; } /** * The Options Panel used for configuring a {@link UsernamePasswordAuthenticationCredentials}. */ protected static class UsernamePasswordAuthenticationCredentialsOptionsPanel extends AbstractCredentialsOptionsPanel<UsernamePasswordAuthenticationCredentials> { private static final long serialVersionUID = 8881019014296985804L; private static final String USERNAME_LABEL = Constant.messages .getString("authentication.method.fb.credentials.field.label.user"); private static final String PASSWORD_LABEL = Constant.messages .getString("authentication.method.fb.credentials.field.label.pass"); private ZapTextField usernameTextField; private JPasswordField passwordTextField; public UsernamePasswordAuthenticationCredentialsOptionsPanel( UsernamePasswordAuthenticationCredentials credentials) { super(credentials); initialize(); } private void initialize() { this.setLayout(new GridBagLayout()); this.add(new JLabel(USERNAME_LABEL), LayoutHelper.getGBC(0, 0, 1, 0.0d)); this.usernameTextField = new ZapTextField(); if (this.getCredentials().username != null) this.usernameTextField.setText(this.getCredentials().username); this.add(this.usernameTextField, LayoutHelper.getGBC(1, 0, 1, 0.0d, new Insets(0, 4, 0, 0))); this.add(new JLabel(PASSWORD_LABEL), LayoutHelper.getGBC(0, 1, 1, 0.0d)); this.passwordTextField = new JPasswordField(); if (this.getCredentials().password != null) this.passwordTextField.setText(this.getCredentials().password); this.add(this.passwordTextField, LayoutHelper.getGBC(1, 1, 1, 1.0d, new Insets(0, 4, 0, 0))); } @Override public boolean validateFields() { if (usernameTextField.getText().isEmpty()) { JOptionPane.showMessageDialog(this, Constant.messages .getString("authentication.method.fb.credentials.dialog.error.user.text"), Constant.messages.getString("authentication.method.fb.dialog.error.title"), JOptionPane.WARNING_MESSAGE); usernameTextField.requestFocusInWindow(); return false; } return true; } @Override public void saveCredentials() { getCredentials().username = usernameTextField.getText(); getCredentials().password = new String(passwordTextField.getPassword()); } } /* API related constants and methods. */ @Override public ApiResponse getApiResponseRepresentation() { Map<String, String> values = new HashMap<>(); values.put("type", API_NAME); values.put("username", username); values.put("password", password); return new ApiResponseSet("credentials", values); } private static final String ACTION_SET_CREDENTIALS = "formBasedAuthenticationCredentials"; private static final String PARAM_USERNAME = "username"; private static final String PARAM_PASSWORD = "password"; /** * Gets the api action for setting a {@link UsernamePasswordAuthenticationCredentials} for an * User. * * @param methodType the method type for which this is called * @return the sets the credentials for user api action */ public static ApiDynamicActionImplementor getSetCredentialsForUserApiAction( final AuthenticationMethodType methodType) { return new ApiDynamicActionImplementor(ACTION_SET_CREDENTIALS, new String[] { PARAM_USERNAME, PARAM_PASSWORD }, null) { @Override public void handleAction(JSONObject params) throws ApiException { Context context = ApiUtils.getContextByParamId(params, UsersAPI.PARAM_CONTEXT_ID); int userId = ApiUtils.getIntParam(params, UsersAPI.PARAM_USER_ID); // Make sure the type of authentication method is compatible if (!methodType.isTypeForMethod(context.getAuthenticationMethod())) throw new ApiException(ApiException.Type.ILLEGAL_PARAMETER, "User's credentials should match authentication method type of the context: " + context.getAuthenticationMethod().getType().getName()); // NOTE: no need to check if extension is loaded as this method is called only if // the Users // extension is loaded ExtensionUserManagement extensionUserManagement = (ExtensionUserManagement) Control .getSingleton().getExtensionLoader().getExtension(ExtensionUserManagement.NAME); User user = extensionUserManagement.getContextUserAuthManager(context.getIndex()) .getUserById(userId); if (user == null) throw new ApiException(ApiException.Type.USER_NOT_FOUND, UsersAPI.PARAM_USER_ID); // Build and set the credentials UsernamePasswordAuthenticationCredentials credentials = new UsernamePasswordAuthenticationCredentials(); credentials.username = ApiUtils.getNonEmptyStringParam(params, PARAM_USERNAME); credentials.password = ApiUtils.getNonEmptyStringParam(params, PARAM_PASSWORD); user.setAuthenticationCredentials(credentials); } }; } }
package org.holoeverywhere; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Context; import android.content.res.Resources; import android.content.res.TypedArray; import android.content.res.XmlResourceParser; import android.graphics.Canvas; import android.os.Build; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.os.Handler; import android.os.Message; import android.support.v4.app.FragmentActivity; import android.support.v4.app._HoloFragmentInflater; import android.support.v7.internal.view.menu.ExpandedMenuView; import android.support.v7.internal.widget.ActionBarContainer; import android.support.v7.internal.widget.ActionBarView; import android.util.AttributeSet; import android.util.Xml; import android.view.InflateException; import android.view.View; import android.view.ViewGroup; import android.view.ViewStub; import org.holoeverywhere.SystemServiceManager.SystemServiceCreator; import org.holoeverywhere.SystemServiceManager.SystemServiceCreator.SystemService; import org.holoeverywhere.app.ContextThemeWrapperPlus; import org.holoeverywhere.app.Dialog; import org.holoeverywhere.app.Fragment; import org.holoeverywhere.util.SparseIntArray; import org.holoeverywhere.widget.FrameLayout; import org.holoeverywhere.widget.NumberPicker; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.WeakHashMap; public class LayoutInflater extends android.view.LayoutInflater implements Cloneable { private static final HashMap<String, Constructor<? extends View>> sConstructorMap = new HashMap<String, Constructor<? extends View>>(); private static final Class<?>[] sConstructorSignature = { Context.class, AttributeSet.class }; private static final Map<Class<?>, Method> sFinishInflateMethods = new HashMap<Class<?>, Method>(100); private static final Map<Context, LayoutInflater> sInstances = new WeakHashMap<Context, LayoutInflater>(); private static final List<String> sPackages = new ArrayList<String>(); private static final Map<String, String> sRemaps = new HashMap<String, String>(); private static final SparseIntArray sLayoutRemap = new SparseIntArray(); private static final String TAG_1995 = "blink"; private static final String TAG_INCLUDE = "include"; private static final String TAG_MERGE = "merge"; private static final String TAG_REQUEST_FOCUS = "requestFocus"; static { registerPackage("android.webkit"); registerPackage("android.view"); registerPackage("android.widget"); registerPackage("android.support.v4.view"); registerPackage(HoloEverywhere.PACKAGE + ".widget"); asInternal(ActionBarView.class); asInternal(ExpandedMenuView.class); asInternal(ActionBarContainer.class); asInternal(Dialog.DialogTitle.class); asInternal(NumberPicker.NumberPickerEditText.class); } private static OnInitInflaterListener sListener; private final Fragment mChildFragment; private final Object[] mConstructorArgs = new Object[2]; private final Context mContext; private List<Factory> mFactories; private Filter mFilter; private HashMap<String, Boolean> mFilterMap; private FragmentActivity mFragmentActivity; private LayoutInflater mParentInflater; protected LayoutInflater(android.view.LayoutInflater original, Context newContext) { this(original, newContext, null); } protected LayoutInflater(android.view.LayoutInflater original, Context newContext, Fragment childFragment) { this(newContext, childFragment); setParent(original); } protected LayoutInflater(Context context) { this(context, null); } protected LayoutInflater(Context context, Fragment childFragment) { super(context); if (context == null) { throw new IllegalArgumentException("Context cannot be null"); } mChildFragment = childFragment; mContext = context; if (LayoutInflater.sListener != null) { LayoutInflater.sListener.onInitInflater(this); } } private static void asInternal(Class<?> clazz) { register("Internal." + clazz.getSimpleName(), clazz.getName()); } public static LayoutInflater from(android.view.LayoutInflater inflater) { if (inflater instanceof LayoutInflater) { return (LayoutInflater) inflater; } return LayoutInflater.from(inflater.getContext()).setParent(inflater); } public static LayoutInflater from(Context context) { LayoutInflater inflater = sInstances.get(context); if (inflater == null) { sInstances.put(context, inflater = new LayoutInflater(context)); } return inflater; } public static LayoutInflater from(Context context, int theme) { return from(new ContextThemeWrapperPlus(context, ThemeManager.getThemeResource(theme, false))); } public static View inflate(Context context, int resource) { return from(context).inflate(resource, null); } public static View inflate(Context context, int resource, ViewGroup root) { return from(context).inflate(resource, root); } public static View inflate(Context context, int resource, ViewGroup root, boolean attachToRoot) { return from(context).inflate(resource, root, attachToRoot); } /** * Iterate over classes and call {@link #register(Class)} for each */ public static void register(Class<? extends View>... classes) { for (Class<? extends View> classe : classes) { register(classe); } } /** * Fast mapping views by name<br /> * <br /> * MyView -> com.myapppackage.widget.MyView<br /> */ public static void register(Class<? extends View> clazz) { if (clazz != null) { register(clazz.getSimpleName(), clazz.getName()); } } /** * Manually register shortcuts for inflating<br /> * Not recommend to use. You are warned. <br /> * <br /> * MyView -> com.myapppackage.widget.SuperPuperViewVeryCustom */ public static void register(String from, String to) { LayoutInflater.sRemaps.put(from, to); } /** * Hack for overriding android default layouts for custom.<br /> * For example, if some parts of android framework try to inflate layout {@link android.R.layout#simple_list_item_1} * you can override this behavior and replace system layout by custom<br /> * Just call register(android.R.layout.simple_list_item_1, R.layout.my_simple_list_item)<br /> * <br /> * Be sure that you need for it before using. You are warned. */ public static void register(int fromId, int toId) { if (toId == 0) { sLayoutRemap.delete(fromId); } else { sLayoutRemap.put(fromId, toId); } } /** * Resolve ids for given name in each package (android and application) and then call {@link #register(int, int)} */ public static void register(Context context, String name) { final Resources res = context.getResources(); int androidId = res.getIdentifier(name, "layout", "android"); int appId = res.getIdentifier(name, "layout", context.getPackageName()); if (androidId != 0 && appId != 0) { register(androidId, appId); } else { HoloEverywhere.warn("Failed to register layout remapping:\n" + " Android ID: 0x%8x\n" + " Application ID: 0x%8x", androidId, appId); } } /** * Resolve ids for given id in each package (android and application) and then call {@link #register(int, int)} */ public static void register(Context context, int id) { register(context, context.getResources().getResourceName(id)); } public static void registerPackage(String packageName) { packageName = resolveFullPackageName(packageName); if (packageName != null && !sPackages.contains(packageName)) { sPackages.add(packageName); } } private static String resolveFullPackageName(String packageName) { Package resolvedPackage = Package.getPackage(packageName); if (resolvedPackage == null) { return null; } return resolvedPackage.getName(); } public static void removeInstance(Context context) { sInstances.remove(context); } public static void setOnInitInflaterListener(OnInitInflaterListener listener) { sListener = listener; } public View _createView(String name, String prefix, AttributeSet attrs) throws ClassNotFoundException, InflateException { Constructor<? extends View> constructor = sConstructorMap.get(name); Class<? extends View> clazz = null; try { if (constructor == null) { clazz = mContext.getClassLoader().loadClass( prefix != null ? prefix + name : name).asSubclass(View.class); if (mFilter != null && clazz != null) { boolean allowed = mFilter.onLoadClass(clazz); if (!allowed) { failNotAllowed(name, prefix, attrs); } } constructor = clazz.getConstructor(sConstructorSignature); sConstructorMap.put(name, constructor); } else { if (mFilter != null) { Boolean allowedState = mFilterMap.get(name); if (allowedState == null) { clazz = mContext.getClassLoader().loadClass( prefix != null ? prefix + name : name).asSubclass(View.class); boolean allowed = clazz != null && mFilter.onLoadClass(clazz); mFilterMap.put(name, allowed); if (!allowed) { failNotAllowed(name, prefix, attrs); } } else if (allowedState.equals(Boolean.FALSE)) { failNotAllowed(name, prefix, attrs); } } } Object[] args = mConstructorArgs; args[1] = attrs; constructor.setAccessible(true); final View view = constructor.newInstance(args); if (view instanceof ViewStub) { final ViewStub viewStub = (ViewStub) view; if (VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN) { viewStub.setLayoutInflater(this); } } return view; } catch (NoSuchMethodException e) { InflateException ie = new InflateException(attrs.getPositionDescription() + ": Error inflating class " + (prefix != null ? prefix + name : name)); ie.initCause(e); throw ie; } catch (ClassCastException e) { InflateException ie = new InflateException(attrs.getPositionDescription() + ": Class is not a View " + (prefix != null ? prefix + name : name)); ie.initCause(e); throw ie; } catch (ClassNotFoundException e) { throw e; } catch (Exception e) { InflateException ie = new InflateException(attrs.getPositionDescription() + ": Error inflating class " + (clazz == null ? "<unknown>" : clazz.getName())); ie.initCause(e); throw ie; } } public void addFactory(Factory factory) { checkFactoryOnNull(factory); if (mFactories == null) { mFactories = new ArrayList<Factory>(); } mFactories.add(factory); } public void addFactory(Factory factory, int index) { checkFactoryOnNull(factory); if (mFactories == null) { mFactories = new ArrayList<Factory>(); } mFactories.add(index, factory); } private void checkFactoryOnNull(Factory factory) { if (factory == null) { throw new NullPointerException("Given factory can not be null"); } } @Override public LayoutInflater cloneInContext(Context newContext) { return new LayoutInflater(this, newContext); } View createViewFromTag(View parent, String name, AttributeSet attrs) { if ("fragment".equals(name)) { return _HoloFragmentInflater .inflate(LayoutInflater.this, attrs, parent, mChildFragment); } if (name.equals("view")) { name = attrs.getAttributeValue(null, "class"); } try { View view = null; if (mFactories != null) { for (int i = 0; i < mFactories.size(); i++) { view = mFactories.get(i).onCreateView(parent, name, mContext, attrs); if (view != null) { break; } } } if (view == null) { view = onCreateView(parent, name, attrs); } return prepareView(view); } catch (InflateException e) { throw e; } catch (ClassNotFoundException e) { InflateException ie = new InflateException(attrs.getPositionDescription() + ": Error inflating class " + name); ie.initCause(e); throw ie; } catch (Exception e) { InflateException ie = new InflateException(attrs.getPositionDescription() + ": Error inflating class " + name); ie.initCause(e); throw ie; } } private void failNotAllowed(String name, String prefix, AttributeSet attrs) { throw new InflateException(attrs.getPositionDescription() + ": Class not allowed to be inflated " + (prefix != null ? prefix + name : name)); } @Override public Filter getFilter() { return mFilter; } @Override public void setFilter(Filter filter) { mFilter = filter; if (filter != null) { mFilterMap = new HashMap<String, Boolean>(); } } public FragmentActivity getFragmentActivity() { return mFragmentActivity; } public void setFragmentActivity(FragmentActivity fragmentActivity) { mFragmentActivity = fragmentActivity; } public View inflate(int resource) { return inflate(resource, null, false); } @Override public View inflate(int resource, ViewGroup root) { return inflate(resource, root, root != null); } @Override public View inflate(int resource, ViewGroup root, boolean attachToRoot) { return inflate(getContext().getResources().getLayout(sLayoutRemap.get(resource, resource)), root, attachToRoot); } public View inflate(XmlPullParser parser) { return inflate(parser, null, false); } @Override public View inflate(XmlPullParser parser, ViewGroup root) { return inflate(parser, root, root != null); } @Override public View inflate(XmlPullParser parser, ViewGroup root, boolean attachToRoot) { synchronized (mConstructorArgs) { final AttributeSet attrs = Xml.asAttributeSet(parser); mConstructorArgs[0] = mContext; View result = root; try { int type; while ((type = parser.next()) != XmlPullParser.START_TAG && type != XmlPullParser.END_DOCUMENT) { ; } if (type != XmlPullParser.START_TAG) { throw new InflateException(parser.getPositionDescription() + ": No start tag found!"); } final String name = parser.getName(); if (TAG_MERGE.equals(name)) { if (root == null || !attachToRoot) { throw new InflateException("<merge /> can be used only with a valid " + "ViewGroup root and attachToRoot=true"); } rInflate(parser, root, attrs, false); } else { View temp; if (TAG_1995.equals(name)) { temp = new BlinkLayout(mContext, attrs); } else { temp = createViewFromTag(root, name, attrs); } ViewGroup.LayoutParams params = null; if (root != null) { params = root.generateLayoutParams(attrs); if (!attachToRoot) { temp.setLayoutParams(params); } } rInflate(parser, temp, attrs, true); if (root != null && attachToRoot) { root.addView(temp, params); } if (root == null || !attachToRoot) { result = temp; } } } catch (XmlPullParserException e) { InflateException ex = new InflateException(e.getMessage()); ex.initCause(e); throw ex; } catch (IOException e) { InflateException ex = new InflateException( parser.getPositionDescription() + ": " + e.getMessage()); ex.initCause(e); throw ex; } finally { mConstructorArgs[1] = null; } return result; } } public LayoutInflater obtainFragmentChildInflater(Fragment fragment) { if (mParentInflater != null) { return mParentInflater.obtainFragmentChildInflater(fragment); } return new LayoutInflater(this, mContext, fragment); } @Override protected View onCreateView(View parent, String name, AttributeSet attrs) throws ClassNotFoundException { View view; String newName = LayoutInflater.sRemaps.get(name); if (newName != null) { view = _createView(newName, null, attrs); if (view != null) { return view; } } if (name.indexOf('.') > 0) { return _createView(name, null, attrs); } for (int i = sPackages.size() - 1; i >= 0; i--) { try { view = _createView(name, sPackages.get(i) + ".", attrs); if (view != null) { return view; } } catch (ClassNotFoundException e) { } } throw new ClassNotFoundException("Could not find class: " + name); } private void parseInclude(XmlPullParser parser, View parent, AttributeSet attrs) throws XmlPullParserException, IOException { int type; if (parent instanceof ViewGroup) { final int layout = attrs.getAttributeResourceValue(null, "layout", 0); if (layout == 0) { final String value = attrs.getAttributeValue(null, "layout"); if (value == null) { throw new InflateException("You must specifiy a layout in the" + " include tag: <include layout=\"@layout/layoutID\" />"); } else { throw new InflateException("You must specifiy a valid layout " + "reference. The layout ID " + value + " is not valid."); } } else { final XmlResourceParser childParser = getContext().getResources().getLayout(layout); try { final AttributeSet childAttrs = Xml.asAttributeSet(childParser); while ((type = childParser.next()) != XmlPullParser.START_TAG && type != XmlPullParser.END_DOCUMENT) { ; } if (type != XmlPullParser.START_TAG) { throw new InflateException(childParser.getPositionDescription() + ": No start tag found!"); } final String childName = childParser.getName(); if (TAG_MERGE.equals(childName)) { rInflate(childParser, parent, childAttrs, false); } else { final View view = createViewFromTag(parent, childName, childAttrs); final ViewGroup group = (ViewGroup) parent; ViewGroup.LayoutParams params = null; try { params = group.generateLayoutParams(attrs); } catch (RuntimeException e) { params = group.generateLayoutParams(childAttrs); } finally { if (params != null) { view.setLayoutParams(params); } } rInflate(childParser, view, childAttrs, true); TypedArray a = mContext.obtainStyledAttributes(attrs, new int[]{ android.R.attr.id, android.R.attr.visibility }, 0, 0); int id = a.getResourceId(0, View.NO_ID); int visibility = a.getInt(1, -1); a.recycle(); if (id != View.NO_ID) { view.setId(id); } switch (visibility) { case 0: view.setVisibility(View.VISIBLE); break; case 1: view.setVisibility(View.INVISIBLE); break; case 2: view.setVisibility(View.GONE); break; } group.addView(view); } } finally { childParser.close(); } } } else { throw new InflateException("<include /> can only be used inside of a ViewGroup"); } final int currentDepth = parser.getDepth(); while (((type = parser.next()) != XmlPullParser.END_TAG || parser.getDepth() > currentDepth) && type != XmlPullParser.END_DOCUMENT) { ; } } private void parseRequestFocus(XmlPullParser parser, View parent) throws XmlPullParserException, IOException { int type; parent.requestFocus(); final int currentDepth = parser.getDepth(); while (((type = parser.next()) != XmlPullParser.END_TAG || parser.getDepth() > currentDepth) && type != XmlPullParser.END_DOCUMENT) { ; } } @SuppressLint("NewApi") private View prepareView(View view) { return view; } void rInflate(XmlPullParser parser, View parent, final AttributeSet attrs, boolean finishInflate) throws XmlPullParserException, IOException { final int depth = parser.getDepth(); int type; while (((type = parser.next()) != XmlPullParser.END_TAG || parser.getDepth() > depth) && type != XmlPullParser.END_DOCUMENT) { if (type != XmlPullParser.START_TAG) { continue; } final String name = parser.getName(); if (TAG_REQUEST_FOCUS.equals(name)) { parseRequestFocus(parser, parent); } else if (TAG_INCLUDE.equals(name)) { if (parser.getDepth() == 0) { throw new InflateException("<include /> cannot be the root element"); } parseInclude(parser, parent, attrs); } else if (TAG_MERGE.equals(name)) { throw new InflateException("<merge /> must be the root element"); } else if (TAG_1995.equals(name)) { final View view = new BlinkLayout(mContext, attrs); final ViewGroup viewGroup = (ViewGroup) parent; final ViewGroup.LayoutParams params = viewGroup.generateLayoutParams(attrs); rInflate(parser, view, attrs, true); viewGroup.addView(view, params); } else { final View view = createViewFromTag(parent, name, attrs); final ViewGroup viewGroup = (ViewGroup) parent; final ViewGroup.LayoutParams params = viewGroup.generateLayoutParams(attrs); rInflate(parser, view, attrs, true); viewGroup.addView(view, params); } } if (finishInflate) { Class<?> clazz = parent.getClass(); Method method = sFinishInflateMethods.get(clazz); if (method == null) { while (clazz != Object.class && method == null) { try { method = clazz.getDeclaredMethod("onFinishInflate", (Class<?>[]) null); } catch (Exception e) { clazz = clazz.getSuperclass(); } catch (NoClassDefFoundError e) { // HE#596 - Let's end the search here for those less than API 14 clazz = Object.class; } } if (method != null) { method.setAccessible(true); sFinishInflateMethods.put(parent.getClass(), method); } } if (method != null) { try { method.invoke(parent, (Object[]) null); } catch (Exception e) { } } } } @Override public void setFactory(android.view.LayoutInflater.Factory factory) { setFactory(new FactoryWrapper(factory)); } public void setFactory(Factory factory) { addFactory(factory, 0); } @Override public void setFactory2(Factory2 factory) { setFactory(new Factory2Wrapper(factory)); } protected LayoutInflater setParent(android.view.LayoutInflater original) { if (original == this) { return this; } if (original instanceof LayoutInflater) { mParentInflater = (LayoutInflater) original; mFilter = mParentInflater.mFilter; mFilterMap = mParentInflater.mFilterMap; mFactories = mParentInflater.mFactories; } else { mParentInflater = null; if (VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB) { final Factory2 factory = original.getFactory2(); if (factory != null) { setFactory2(factory); } } final android.view.LayoutInflater.Factory factory = original.getFactory(); if (factory != null) { setFactory(factory); } final Filter filter = original.getFilter(); if (filter != null) { setFilter(filter); } } return this; } public interface Factory { public View onCreateView(View parent, String name, Context context, AttributeSet attrs); } public static interface OnInitInflaterListener { public void onInitInflater(LayoutInflater inflater); } private static class BlinkLayout extends FrameLayout { private static final int BLINK_DELAY = 500; private static final int MESSAGE_BLINK = 0x42; private final Handler mHandler; private boolean mBlink; private boolean mBlinkState; public BlinkLayout(Context context, AttributeSet attrs) { super(context, attrs); mHandler = new Handler(new Handler.Callback() { @Override public boolean handleMessage(Message msg) { if (msg.what == MESSAGE_BLINK) { if (mBlink) { mBlinkState = !mBlinkState; makeBlink(); } invalidate(); return true; } return false; } }); } @Override protected void dispatchDraw(Canvas canvas) { if (mBlinkState) { super.dispatchDraw(canvas); } } private void makeBlink() { Message message = mHandler.obtainMessage(MESSAGE_BLINK); mHandler.sendMessageDelayed(message, BLINK_DELAY); } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mBlink = true; mBlinkState = true; makeBlink(); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mBlink = false; mBlinkState = true; mHandler.removeMessages(MESSAGE_BLINK); } } @TargetApi(Build.VERSION_CODES.HONEYCOMB) private static final class Factory2Wrapper implements Factory { private Factory2 mFactory; public Factory2Wrapper(Factory2 factory) { mFactory = factory; } @Override public View onCreateView(View parent, String name, Context context, AttributeSet attrs) { return mFactory.onCreateView(parent, name, context, attrs); } } private static final class FactoryWrapper implements Factory { private android.view.LayoutInflater.Factory mFactory; public FactoryWrapper(android.view.LayoutInflater.Factory factory) { mFactory = factory; } @Override public View onCreateView(View parent, String name, Context context, AttributeSet attrs) { return mFactory.onCreateView(name, context, attrs); } } @SystemService(Context.LAYOUT_INFLATER_SERVICE) public static class LayoutInflaterCreator implements SystemServiceCreator<LayoutInflater> { @Override public LayoutInflater createService(Context context) { return LayoutInflater.from(context); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2018_06_01; import com.microsoft.azure.arm.model.HasInner; import com.microsoft.azure.arm.resources.models.Resource; import com.microsoft.azure.arm.resources.models.GroupableResourceCore; import com.microsoft.azure.arm.resources.models.HasResourceGroup; import com.microsoft.azure.arm.model.Refreshable; import com.microsoft.azure.arm.model.Updatable; import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable; import com.microsoft.azure.arm.resources.models.HasManager; import com.microsoft.azure.management.network.v2018_06_01.implementation.NetworkManager; import java.util.List; import com.microsoft.azure.management.network.v2018_06_01.implementation.SecurityRuleInner; import com.microsoft.azure.management.network.v2018_06_01.implementation.NetworkSecurityGroupInner; /** * Type representing NetworkSecurityGroup. */ public interface NetworkSecurityGroup extends HasInner<NetworkSecurityGroupInner>, Resource, GroupableResourceCore<NetworkManager, NetworkSecurityGroupInner>, HasResourceGroup, Refreshable<NetworkSecurityGroup>, Updatable<NetworkSecurityGroup.Update>, HasManager<NetworkManager> { /** * @return the defaultSecurityRules value. */ List<NetworkSecurityGroupSecurityRule> defaultSecurityRules(); /** * @return the etag value. */ String etag(); /** * @return the networkInterfaces value. */ List<NetworkInterface> networkInterfaces(); /** * @return the provisioningState value. */ String provisioningState(); /** * @return the resourceGuid value. */ String resourceGuid(); /** * @return the securityRules value. */ List<NetworkSecurityGroupSecurityRule> securityRules(); /** * @return the subnets value. */ List<Subnet> subnets(); /** * The entirety of the NetworkSecurityGroup definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithGroup, DefinitionStages.WithCreate { } /** * Grouping of NetworkSecurityGroup definition stages. */ interface DefinitionStages { /** * The first stage of a NetworkSecurityGroup definition. */ interface Blank extends GroupableResourceCore.DefinitionWithRegion<WithGroup> { } /** * The stage of the NetworkSecurityGroup definition allowing to specify the resource group. */ interface WithGroup extends GroupableResourceCore.DefinitionStages.WithGroup<WithCreate> { } /** * The stage of the networksecuritygroup definition allowing to specify DefaultSecurityRules. */ interface WithDefaultSecurityRules { /** * Specifies defaultSecurityRules. * @param defaultSecurityRules The default security rules of network security group * @return the next definition stage */ WithCreate withDefaultSecurityRules(List<SecurityRuleInner> defaultSecurityRules); } /** * The stage of the networksecuritygroup definition allowing to specify Etag. */ interface WithEtag { /** * Specifies etag. * @param etag A unique read-only string that changes whenever the resource is updated * @return the next definition stage */ WithCreate withEtag(String etag); } /** * The stage of the networksecuritygroup definition allowing to specify ProvisioningState. */ interface WithProvisioningState { /** * Specifies provisioningState. * @param provisioningState The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed' * @return the next definition stage */ WithCreate withProvisioningState(String provisioningState); } /** * The stage of the networksecuritygroup definition allowing to specify ResourceGuid. */ interface WithResourceGuid { /** * Specifies resourceGuid. * @param resourceGuid The resource GUID property of the network security group resource * @return the next definition stage */ WithCreate withResourceGuid(String resourceGuid); } /** * The stage of the networksecuritygroup definition allowing to specify SecurityRules. */ interface WithSecurityRules { /** * Specifies securityRules. * @param securityRules A collection of security rules of the network security group * @return the next definition stage */ WithCreate withSecurityRules(List<SecurityRuleInner> securityRules); } /** * The stage of the definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<NetworkSecurityGroup>, Resource.DefinitionWithTags<WithCreate>, DefinitionStages.WithDefaultSecurityRules, DefinitionStages.WithEtag, DefinitionStages.WithProvisioningState, DefinitionStages.WithResourceGuid, DefinitionStages.WithSecurityRules { } } /** * The template for a NetworkSecurityGroup update operation, containing all the settings that can be modified. */ interface Update extends Appliable<NetworkSecurityGroup>, Resource.UpdateWithTags<Update>, UpdateStages.WithDefaultSecurityRules, UpdateStages.WithEtag, UpdateStages.WithProvisioningState, UpdateStages.WithResourceGuid, UpdateStages.WithSecurityRules { } /** * Grouping of NetworkSecurityGroup update stages. */ interface UpdateStages { /** * The stage of the networksecuritygroup update allowing to specify DefaultSecurityRules. */ interface WithDefaultSecurityRules { /** * Specifies defaultSecurityRules. * @param defaultSecurityRules The default security rules of network security group * @return the next update stage */ Update withDefaultSecurityRules(List<SecurityRuleInner> defaultSecurityRules); } /** * The stage of the networksecuritygroup update allowing to specify Etag. */ interface WithEtag { /** * Specifies etag. * @param etag A unique read-only string that changes whenever the resource is updated * @return the next update stage */ Update withEtag(String etag); } /** * The stage of the networksecuritygroup update allowing to specify ProvisioningState. */ interface WithProvisioningState { /** * Specifies provisioningState. * @param provisioningState The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed' * @return the next update stage */ Update withProvisioningState(String provisioningState); } /** * The stage of the networksecuritygroup update allowing to specify ResourceGuid. */ interface WithResourceGuid { /** * Specifies resourceGuid. * @param resourceGuid The resource GUID property of the network security group resource * @return the next update stage */ Update withResourceGuid(String resourceGuid); } /** * The stage of the networksecuritygroup update allowing to specify SecurityRules. */ interface WithSecurityRules { /** * Specifies securityRules. * @param securityRules A collection of security rules of the network security group * @return the next update stage */ Update withSecurityRules(List<SecurityRuleInner> securityRules); } } }
import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.FileEditorManagerEvent; import com.intellij.openapi.fileEditor.FileEditorManagerListener; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.ComboBox; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowFactory; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBScrollPane; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.border.BevelBorder; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.BadLocationException; import java.awt.*; import java.awt.event.*; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; public class Main implements ToolWindowFactory, FileEditorManagerListener { //Cards protected final String CARD_LOADING_TESTS = "loading"; protected final String CARD_NO_TESTS = "no_tests"; protected final String CARD_TESTS = "tests"; //Stuff that needs to be accessed from other places protected HintTextField search_field = new HintTextField(Constants.SEARCH_HINT); protected JPanel main_panel = new JPanel(); protected JPanel tests_panel = new JPanel(); protected Loader loader = new Loader(); protected CardLayout layout = new CardLayout(); protected Project project; protected TestSelector testSelector; protected ComboBox test_type; /** * Initialize the tool window * @param project the current project * @param toolWindow the tool window which should be initialized */ @Override public void createToolWindowContent(@NotNull Project project, @NotNull ToolWindow toolWindow) { this.project = project; main_panel.setLayout(layout); project.getMessageBus().connect().subscribe(FileEditorManagerListener.FILE_EDITOR_MANAGER, this); Component component = toolWindow.getComponent(); component.getParent().setLayout(new BorderLayout()); component.getParent().add(main_panel); initCommonViews(); component.getParent().add(test_type, BorderLayout.NORTH); loadingTests(); initNoTests(); initTests(); testSelector = new TestSelectorEspresso(tests_panel, search_field, project); loader.start_animation(); populate(); } private void initCommonViews() { test_type = new ComboBox(new String[]{"Espresso", "Junit"}); test_type.setBorder(null); test_type.addItemListener(e -> { if (test_type.getSelectedIndex() == 0) { testSelector = new TestSelectorEspresso(tests_panel, search_field, project); populate(); } else { testSelector = new TestSelectorJUnit(tests_panel, search_field, project); populate(); } }); } /** * Function used to get all the tests asynchronously */ public void populate() { AsyncTask task = new AsyncTask() { @Override protected void onPreExecute() { //show loading animation while list is being populated loader.start_animation(); SwingUtilities.invokeLater(() -> layout.show(main_panel, CARD_LOADING_TESTS)); } @Override protected void doInBackground() { //fill the panel with the tests SwingUtilities.invokeLater(testSelector::fillTests); } @Override protected void onPostExecute() { //show population results SwingUtilities.invokeLater(() -> { loader.stop_animation(); if (testSelector.getTests() > 0) layout.show(main_panel, CARD_TESTS); else layout.show(main_panel, CARD_NO_TESTS); }); } }; new Thread(task).start(); } /** Initialize the tests gui */ private void initTests(){ JPanel panel_search = new JPanel(); JPanel panel_btns = new JPanel(); JPanel panel = new JPanel(); //INIT the main panel panel.setLayout(new BorderLayout()); tests_panel.setLayout(new BoxLayout(tests_panel, BoxLayout.PAGE_AXIS)); JBScrollPane scrollPane = new JBScrollPane(tests_panel); scrollPane.getVerticalScrollBar().setBlockIncrement(16); panel.add(scrollPane); //###################################################################### //INIT the buttons panel JButton all_btn = new JButton(Constants.BTN_SELECT_ALL); JButton none_btn = new JButton(Constants.BTN_SELECT_NONE); JButton refresh_btn = new JButton(Constants.BTN_REFRESH); panel_btns.setLayout(new GridLayout(3,1)); all_btn.addActionListener((ActionEvent e)->{ for (int i = 0; i < tests_panel.getComponentCount(); i++) { ((JCheckBox) tests_panel.getComponents()[i]).setSelected(true); } } ); none_btn.addActionListener((ActionEvent e) -> { for (int i = 0; i < tests_panel.getComponentCount(); i++) { ((JCheckBox) tests_panel.getComponents()[i]).setSelected(false); } } ); refresh_btn.addActionListener((ActionEvent e) -> populate() ); panel_btns.add(all_btn); panel_btns.add(none_btn); panel_btns.add(refresh_btn); //###################################################################### //INIT the search panel panel_search.setLayout(new BorderLayout()); search_field.setOpaque(false); search_field.setBackground(new JBColor(new Color(0, 0, 0, 0), new Color(255, 255, 255, 0))); search_field.setBorder(null); search_field.getDocument().addDocumentListener(searchListener(tests_panel)); //http://www.programcreek.com/java-api-examples/index.php?api=com.intellij.openapi.util.IconLoader final Icon icon= IconLoader.getIcon("/actions/close.png"); final Icon hoveredIcon=IconLoader.getIcon("/actions/closeHovered.png"); JButton cleartxt_btn = new JButton(icon); cleartxt_btn.addMouseListener(new MouseAdapter() { @Override public void mouseEntered(MouseEvent e) { cleartxt_btn.setIcon(hoveredIcon); } @Override public void mouseExited(MouseEvent e) { cleartxt_btn.setIcon(icon); } }); cleartxt_btn.setBackground(new JBColor(new Color(0, 0, 0, 0), new Color(255, 255, 255, 0))); cleartxt_btn.addActionListener((ActionEvent e) -> search_field.setText("")); cleartxt_btn.setBorder(BorderFactory.createEmptyBorder(4, 4, 4, 4)); cleartxt_btn.setOpaque(false); cleartxt_btn.setBorderPainted(false); cleartxt_btn.setBackground(new JBColor(JBColor.WHITE, search_field.getBackground())); Box.Filler b = (Box.Filler) Box.createHorizontalStrut(5); b.setOpaque(false); panel_search.add(b, BorderLayout.WEST); panel_search.add(search_field); panel_search.add(cleartxt_btn, BorderLayout.EAST); panel_search.setBorder(BorderFactory.createBevelBorder(BevelBorder.LOWERED)); panel_search.setOpaque(false); //###################################################################### main_panel.add(panel, CARD_TESTS); panel.add(panel_search, BorderLayout.NORTH); panel.add(panel_btns, BorderLayout.SOUTH); } /** Initialize the no tests gui */ private void initNoTests(){ JPanel panel_btns = new JPanel(); panel_btns.setLayout(new GridLayout(1, 1)); JButton refresh_btn = new JButton(Constants.BTN_REFRESH); refresh_btn.addActionListener((ActionEvent e) -> populate()); JPanel panel = new JPanel(); panel.setLayout(new BorderLayout()); JLabel label = new JLabel(Constants.NO_RESULTS); label.setFont(label.getFont().deriveFont(15f)); label.setHorizontalAlignment(JLabel.CENTER); label.setVerticalAlignment(JLabel.CENTER); panel.add(label); panel_btns.add(refresh_btn); panel.add(panel_btns, BorderLayout.SOUTH); main_panel.add(panel, CARD_NO_TESTS); } /** Initialize the loading animation gui */ private void loadingTests(){ loader.setSize(new Dimension(10, 10)); JPanel panel = new JPanel(); panel.setLayout(new BoxLayout(panel, BoxLayout.LINE_AXIS)); JPanel vertical = new JPanel(); vertical.setLayout(new BoxLayout(vertical, BoxLayout.PAGE_AXIS)); vertical.add(Box.createVerticalGlue()); vertical.add(loader); vertical.add(Box.createVerticalGlue()); panel.add(Box.createHorizontalGlue()); panel.add(vertical); panel.add(Box.createHorizontalGlue()); main_panel.add(panel, CARD_LOADING_TESTS); } /** * Filters the tests by query string * @param e the document event generated by the documentlistener of the textfield document */ public void updateTests(DocumentEvent e, JPanel tests_panel){ try { String query = e.getDocument().getText(0, e.getDocument().getLength()); for (int i = 0; i < tests_panel.getComponentCount(); i++) { Pattern test_pattern = Pattern.compile(".*" + query + ".*", Pattern.CASE_INSENSITIVE); if(test_pattern.matcher(((JCheckBox) tests_panel.getComponents()[i]).getText()).find()){ tests_panel.getComponents()[i].setVisible(true); }else{ tests_panel.getComponents()[i].setVisible(false); } } } catch (BadLocationException e1) { e1.printStackTrace(); } catch (PatternSyntaxException e2){ //do nothing, just wait for the user to put a correct pattern } } /**The listenr for the search field*/ public DocumentListener searchListener(JPanel tests_panel){ return new DocumentListener() { @Override public void insertUpdate(DocumentEvent e) { updateTests(e, tests_panel); } @Override public void removeUpdate(DocumentEvent e) { updateTests(e, tests_panel); } @Override public void changedUpdate(DocumentEvent e) {} }; } @Override public void fileOpened(@NotNull FileEditorManager fileEditorManager, @NotNull VirtualFile virtualFile) {} @Override public void fileClosed(@NotNull FileEditorManager fileEditorManager, @NotNull VirtualFile virtualFile) { layout.show(main_panel, CARD_NO_TESTS); } @Override public void selectionChanged(@NotNull FileEditorManagerEvent fileEditorManagerEvent) { populate(); } }
package components.abstracts; import io.GdxPainter; import java.util.BitSet; import java.util.Collection; import java.util.Iterator; import components.aggregated.GdxContent; import components.interfaces.GdxComponent; public class GdxAbstractScrollView extends GdxAbstractContainer { private static final float DEFAULT_ACCELERATION = 2.5f; private final GdxContent content = new GdxContent(this); private final BitSet capturedDraggingPointers = new BitSet(10); private int numberOfCapturedDrags = 0; private float accelerationX = DEFAULT_ACCELERATION; private float accelerationY = DEFAULT_ACCELERATION; private float scrollX, scrollY; private float overshootX = 0, overshootY = 0; private float velocityX = 0, velocityY = 0; public GdxAbstractScrollView(float x, float y, float width, float height, GdxComponent content) { super(x, y, width, height); makeActive(); this.content.set(content); } protected float getScrollX() { return scrollX; } protected float getScrollY() { return scrollY; } protected void setScrollX(float scrollX) { this.scrollX = scrollX; velocityX = 0; velocityY = 0; updatePosition(); } protected void setScrollY(float scrollY) { this.scrollY = scrollY; velocityX = 0; velocityY = 0; updatePosition(); } protected void setScroll(float scrollX, float scrollY) { this.scrollX = scrollX; this.scrollY = scrollY; velocityX = 0; velocityY = 0; updatePosition(); } protected float trimScrollX(float scrollX) { if (scrollX < 0) return 0; float maxScrollX = content.get().getWidth() - getWidth(); if (maxScrollX > 0) { if (scrollX > maxScrollX) return maxScrollX; return scrollX; } else { return 0; } } protected float trimScrollY(float scrollY) { if (scrollY < 0) return 0; float maxScrollY = content.get().getHeight() - getHeight(); if (maxScrollY > 0) { if (scrollY > maxScrollY) return maxScrollY; return scrollY; } else { return 0; } } protected void updatePosition() { scrollX = trimScrollX(scrollX); scrollY = trimScrollY(scrollY); content.get().setLocation(- scrollX, - scrollY); } protected GdxComponent getContent() { return content.get(); } protected void setContent(GdxComponent component) { content.set(component); updatePosition(); } @Override public Collection<GdxComponent> getComponents() { return content.collection(); } @Override public boolean hasComponent(GdxComponent component) { return content.is(component); } @Override public boolean removeComponent(GdxComponent component) { return content.remove(component); } @Override public Iterator<GdxComponent> interactionCandidatesIterator(float x, float y) { return content.collection().iterator(); } @Override protected void resized() { super.resized(); updatePosition(); } @Override public void reportResize(GdxComponent component) { updatePosition(); } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates how many times per second it can * move by a distance equal to its width. * @return Current acceleration of the ScrollView on * the horizontal axis */ protected float getAccelerationX() { return accelerationX; } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates how many times per second it can * move by a distance equal to its height. * @return Current acceleration of the ScrollView on * the vertical axis. */ protected float getAccelerationY() { return accelerationY; } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates how many times per second it can * move by a distance equal to its width. * @param accelerationX Acceleration of the ScrollView on * the horizontal axis. */ protected void setAccelerationX(float accelerationX) { if (accelerationX <= 0) throw new IllegalArgumentException("Acceleration must be a positive value."); this.accelerationX = accelerationX; } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates how many times per second it can * move by a distance equal to its height. * @param accelerationY Acceleration of the ScrollView on * the vertical axis. */ protected void setAccelerationY(float accelerationY) { if (accelerationY <= 0) throw new IllegalArgumentException("Acceleration must be a positive value."); this.accelerationY = accelerationY; } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates by how many internal units ScrollView * can scroll per second on the horizontal axis. * @return Current acceleration of the ScrollView on * the horizontal axis */ protected float getAccelerationAbsoluteX() { return getWidth() * accelerationX; } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates by how many internal units ScrollView * can scroll per second on the vertical axis. * @return Current acceleration of the ScrollView on * the vertical axis */ protected float getAccelerationAbsoluteY() { return getHeight() * accelerationY; } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates by how many internal units ScrollView * can scroll per second on horizontal axis. * @param accelerationAbsoluteX Acceleration of the ScrollView on * the horizontal axis. */ protected void setAccelerationAbsoluteX(float accelerationAbsoluteX) { if (accelerationAbsoluteX <= 0) throw new IllegalArgumentException("Acceleration must be a positive value."); accelerationX = accelerationAbsoluteX / getWidth(); } /** * Acceleration determines how quickly ScrollView slows * down after it was set in motion. * <p> * The value indicates by how many internal units ScrollView * can scroll per second on vertical axis. * @param accelerationAbsoluteY Acceleration of the ScrollView on * the vertical axis. */ protected void setAccelerationAbsoluteY(float accelerationAbsoluteY) { if (accelerationAbsoluteY <= 0) throw new IllegalArgumentException("Acceleration must be a positive value."); accelerationY = accelerationAbsoluteY / getHeight(); } protected float getVelocityX() { return velocityX; } protected float getVelocityY() { return velocityY; } protected void setVelocityX(float velocityX) { this.velocityX = velocityX; } protected void setVelocityY(float velocityY) { this.velocityY = velocityY; } @Override public void paint(float x, float y, GdxPainter painter) { if (painter.pushClippingArea(x, y, getWidth(), getHeight())) { GdxComponent component = content.get(); painter.paintComponent(x + component.getX(), y + component.getY(), component); painter.popClippingArea(); } } @Override public boolean isDragged() { return super.isDragged() || numberOfCapturedDrags > 0; } @Override public int getNumberOfDragging() { return super.getNumberOfDragging() + numberOfCapturedDrags; } @Override public void step(float delay) { if (!isDragged()) { boolean doScroll = false; if (velocityX != 0) { if (velocityX > 0) { velocityX -= getAccelerationAbsoluteX() * delay; if (velocityX < 0) { velocityX = 0; } else { doScroll = true; } } else { velocityX += getAccelerationAbsoluteX() * delay; if (velocityX > 0) { velocityX = 0; } else { doScroll = true; } } } if (velocityY != 0) { if (velocityY > 0) { velocityY -= getAccelerationAbsoluteY() * delay; if (velocityY < 0) { velocityY = 0; } else { doScroll = true; } } else { velocityY += getAccelerationAbsoluteY() * delay; if (velocityY > 0) { velocityY = 0; } else { doScroll = true; } } } if (doScroll) { float previousScrollX = scrollX; float previousScrollY = scrollY; scrollX -= velocityX * delay; scrollY -= velocityY * delay; updatePosition(); if (previousScrollX == scrollX) velocityX = 0; if (previousScrollY == scrollY) velocityY = 0; makeDirty(); } } } @Override public boolean onDrag(float x, float y, float differenceX, float differenceY, int pointer) { super.onDrag(x, y, differenceX, differenceY, pointer); float desiredScrollX = scrollX - differenceX + overshootX; float desiredScrollY = scrollY - differenceY + overshootY; scrollX = desiredScrollX; scrollY = desiredScrollY; updatePosition(); overshootX += desiredScrollX - scrollX; overshootY += desiredScrollY - scrollY; return true; } @Override protected void onStopDrag(float x, float y, int pointer) { overshootX = 0; overshootY = 0; super.onStopDrag(x, y, pointer); } @Override public boolean onDragReceived(float x, float y, float differenceX, float differenceY, int pointer) { super.onDragReceived(x, y, differenceX, differenceY, pointer); if (!capturedDraggingPointers.get(pointer)) { capturedDraggingPointers.set(pointer, true); numberOfCapturedDrags++; if (getNumberOfDragging() == 1) onStartDrag(x, y, pointer); } return true; } @Override public void onDragCapturingStopped(float x, float y, int pointer) { super.onDragCapturingStopped(x, y, pointer); capturedDraggingPointers.set(pointer, false); numberOfCapturedDrags--; if (getNumberOfDragging() == 0) onStopDrag(x, y, pointer); } @Override public boolean onFling(float x, float y, float velocityX, float velocityY, int pointer) { this.velocityX = velocityX; this.velocityY = velocityY; makeDirty(); return true; } @Override public void dispose() { content.dispose(); super.dispose(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.impl.plan.optimizer; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import org.apache.pig.PigException; import org.apache.pig.impl.plan.Operator; import org.apache.pig.impl.plan.OperatorPlan; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.impl.plan.optimizer.RuleOperator.NodeType; import org.apache.pig.impl.util.Pair; /** * RuleMatcher contains the logic to determine whether a given rule matches. * This alone does not mean the rule will be applied. Transformer.check() * still has to pass before Transfomer.transform() is called. * */ public class RuleMatcher<O extends Operator, P extends OperatorPlan<O>> { private Rule<O, P> mRule; private List<Pair<O, RuleOperator.NodeType>> mMatch; private List<List<Pair<O, RuleOperator.NodeType>>> mPrelimMatches = new ArrayList<List<Pair<O, RuleOperator.NodeType>>>(); private List<List<O>> mMatches = new ArrayList<List<O>>(); private P mPlan; // for convenience. private int mNumCommonNodes = 0; /** * Test a rule to see if it matches the current plan. Save all matched nodes using BFS * @param rule Rule to test for a match. * @return true if the plan matches. */ public boolean match(Rule<O, P> rule) throws OptimizerException { mRule = rule; CommonNodeFinder commonNodeFinder = new CommonNodeFinder(mRule.getPlan()); try { commonNodeFinder.visit(); mNumCommonNodes = commonNodeFinder.getCount(); } catch (VisitorException ve) { int errCode = 2125; String msg = "Internal error. Problem in computing common nodes in the Rule Plan."; throw new OptimizerException(msg, errCode, PigException.BUG, ve); } mPlan = mRule.getTransformer().getPlan(); mMatches.clear(); mPrelimMatches.clear(); if (mRule.getWalkerAlgo() == Rule.WalkerAlgo.DependencyOrderWalker) DependencyOrderWalker(); else if (mRule.getWalkerAlgo() == Rule.WalkerAlgo.DepthFirstWalker) DepthFirstWalker(); else if (mRule.getWalkerAlgo() == Rule.WalkerAlgo.ReverseDependencyOrderWalker) ReverseDependencyOrderWalker(); return (mMatches.size()!=0); } private void ReverseDependencyOrderWalker() { List<O> fifo = new ArrayList<O>(); Set<O> seen = new HashSet<O>(); List<O> roots = mPlan.getRoots(); if (roots == null) return; for (O op : roots) { RDODoAllSuccessors(op, seen, fifo); } for (O op: fifo) { if (beginMatch(op)) mPrelimMatches.add(mMatch); } if(mPrelimMatches.size() > 0) { processPreliminaryMatches(); } } private void RDODoAllSuccessors(O node, Set<O> seen, Collection<O> fifo) { if (!seen.contains(node)) { // We haven't seen this one before. Collection<O> succs = mPlan.getSuccessors(node); if (succs != null && succs.size() > 0) { // Do all our successors before ourself for (O op : succs) { RDODoAllSuccessors(op, seen, fifo); } } // Now do ourself seen.add(node); fifo.add(node); } } private void DependencyOrderWalker() { List<O> fifo = new ArrayList<O>(); Set<O> seen = new HashSet<O>(); List<O> leaves = mPlan.getLeaves(); if (leaves == null) return; for (O op : leaves) { BFSDoAllPredecessors(op, seen, fifo); } for (O op: fifo) { if (beginMatch(op)) { mPrelimMatches.add(mMatch); } } if(mPrelimMatches.size() > 0) { processPreliminaryMatches(); } } /** * A method to compute the final matches */ private void processPreliminaryMatches() { //The preliminary matches contain paths that match //the specification in the RulePlan. However, if there //are twigs and DAGs, then a further computation is required //to extract the nodes in the mPlan that correspond to the //roots of the RulePlan //compute the number of common nodes in each preliminary match List<List<O>> commonNodesPerMatch = new ArrayList<List<O>>(); for(int i = 0; i < mPrelimMatches.size(); ++i) { commonNodesPerMatch.add(getCommonNodesFromMatch(mPrelimMatches.get(i))); } if(mNumCommonNodes == 0) { //the rule plan had simple paths //verification step //if any of the preliminary matches had common nodes //then its an anomaly for(int i = 0; i < commonNodesPerMatch.size(); ++i) { if(commonNodesPerMatch.get(i) != null) { //we have found common nodes when there should be none //just return as mMatches will be empty return; } } //pick the first node of each match and put them into individual lists //put the lists inside the list of lists mMatches for(int i = 0; i < mPrelimMatches.size(); ++i) { List<O> match = new ArrayList<O>(); match.add(mPrelimMatches.get(i).get(0).first); mMatches.add(match); } //all the matches have been computed for the simple path return; } else { for(int i = 0; i < commonNodesPerMatch.size(); ++i) { int commonNodes = (commonNodesPerMatch.get(i) == null? 0 : commonNodesPerMatch.get(i).size()); if(commonNodes != mNumCommonNodes) { //if there are is a mismatch in the common nodes then we have a problem //the rule plan states that we have mNumCommonNodes but we have commonNodes //in the match. Just return return; } } } //keep track of the matches that have been processed List<Boolean> processedMatches = new ArrayList<Boolean>(); for(int i = 0; i < mPrelimMatches.size(); ++i) { processedMatches.add(false); } //a do while loop to handle single matches int outerIndex = 0; do { if(processedMatches.get(outerIndex)) { ++outerIndex; continue; } List<Pair<O, RuleOperator.NodeType>> outerMatch = mPrelimMatches.get(outerIndex); List<O> outerCommonNodes = commonNodesPerMatch.get(outerIndex); Set<O> outerSetCommonNodes = new HashSet<O>(outerCommonNodes); Set<O> finalIntersection = new HashSet<O>(outerCommonNodes); Set<O> cumulativeIntersection = new HashSet<O>(outerCommonNodes); List<O> patternMatchingRoots = new ArrayList<O>(); Set<O> unionOfRoots = new HashSet<O>(); boolean innerMatchProcessed = false; unionOfRoots.add(outerMatch.get(0).first); for(int innerIndex = outerIndex + 1; (innerIndex < mPrelimMatches.size()) && (!processedMatches.get(innerIndex)); ++innerIndex) { List<Pair<O, RuleOperator.NodeType>> innerMatch = mPrelimMatches.get(innerIndex); List<O> innerCommonNodes = commonNodesPerMatch.get(innerIndex); Set<O> innerSetCommonNodes = new HashSet<O>(innerCommonNodes); //we need to compute the intersection of the common nodes //the size of the intersection should be equal to the number //of common nodes and the type of each rule node class //if there is no match then it could be that we hit a match //for a different path, i.e., another pattern that matched //with a different set of nodes. In this case, we mark this //match as not processed and move onto the next match outerSetCommonNodes.retainAll(innerSetCommonNodes); if(outerSetCommonNodes.size() != mNumCommonNodes) { //there was no match //continue to the next match continue; } else { Set<O> tempCumulativeIntersection = new HashSet<O>(cumulativeIntersection); tempCumulativeIntersection.retainAll(outerSetCommonNodes); if(tempCumulativeIntersection.size() != mNumCommonNodes) { //problem - there was a set intersection with a size mismatch //between the cumulative intersection and the intersection of the //inner and outer common nodes //set mMatches to empty and return mMatches = new ArrayList<List<O>>(); return; } else { processedMatches.set(innerIndex, true); innerMatchProcessed = true; cumulativeIntersection = tempCumulativeIntersection; unionOfRoots.add(innerMatch.get(0).first); } } } cumulativeIntersection.retainAll(finalIntersection); if(cumulativeIntersection.size() != mNumCommonNodes) { //the cumulative and final intersections did not intersect //this could happen when each of the matches are disjoint //check if the innerMatches were processed at all if(innerMatchProcessed) { //problem - the inner matches were processed and we did //not find common intersections mMatches = new ArrayList<List<O>>(); return; } } processedMatches.set(outerIndex, true); for(O node: unionOfRoots) { patternMatchingRoots.add(node); } mMatches.add(patternMatchingRoots); ++outerIndex; } while (outerIndex < mPrelimMatches.size() - 1); } private List<O> getCommonNodesFromMatch(List<Pair<O, NodeType>> match) { List<O> commonNodes = null; //A lookup table to weed out duplicates Map<O, Boolean> lookup = new HashMap<O, Boolean>(); for(int index = 0; index < match.size(); ++index) { if(match.get(index).second.equals(RuleOperator.NodeType.COMMON_NODE)) { if(commonNodes == null) { commonNodes = new ArrayList<O>(); } O node = match.get(index).first; //lookup the node under question //if the node is not found in the table //then we are examining it for the first time //add it to the output list and mark it as seen //else continue to the next iteration if(lookup.get(node) == null) { commonNodes.add(node); lookup.put(node, true); } } } return commonNodes; } private void BFSDoAllPredecessors(O node, Set<O> seen, Collection<O> fifo) { if (!seen.contains(node)) { // We haven't seen this one before. Collection<O> preds = mPlan.getPredecessors(node); if (preds != null && preds.size() > 0) { // Do all our predecessors before ourself for (O op : preds) { BFSDoAllPredecessors(op, seen, fifo); } } // Now do ourself seen.add(node); fifo.add(node); } } private void DepthFirstWalker() { Set<O> seen = new HashSet<O>(); DFSVisit(null, mPlan.getRoots(), seen); } private void DFSVisit(O node, Collection<O> successors,Set<O> seen) { if (successors == null) return; for (O suc : successors) { if (seen.add(suc)) { if (beginMatch(suc)) mPrelimMatches.add(mMatch); Collection<O> newSuccessors = mPlan.getSuccessors(suc); DFSVisit(suc, newSuccessors, seen); } } } /** * @return first occurrence of matched list of nodes that with the instances of nodes that matched the * pattern defined by * the rule. The nodes will be in the vector in the order they are * specified in the rule. */ List<O> getMatches() { if (mMatches.size()>=1) return mMatches.get(0); return null; } /** * @return all occurrences of matches. lists of nodes that with the instances of nodes that matched the * pattern defined by * the rule. The nodes will be in the vector in the order they are * specified in the rule. */ public List<List<O>> getAllMatches() { return mMatches; } /* * This pattern matching is fairly simple and makes some important * assumptions. * 1) The pattern to be matched must be expressible as a graph. * 2) The pattern must always begin with one of the root nodes in the rule plan. * After that it can go where it wants. * */ private boolean beginMatch(O node) { if (node == null) return false; mMatch = new ArrayList<Pair<O, RuleOperator.NodeType>>(); List<O> nodeSuccessors; List<RuleOperator> ruleRoots = mRule.getPlan().getRoots(); for(RuleOperator ruleRoot: ruleRoots) { if (node.getClass().getName().equals(ruleRoot.getNodeClass().getName()) || ruleRoot.getNodeType().equals(RuleOperator.NodeType.ANY_NODE)) { mMatch.add(new Pair<O, RuleOperator.NodeType>(node, ruleRoot.getNodeType())); // Follow the edge to see the next node we should be looking for. List<RuleOperator> ruleRootSuccessors = mRule.getPlan().getSuccessors(ruleRoot); if (ruleRootSuccessors == null) { // This was looking for a single node return true; } nodeSuccessors = mPlan.getSuccessors(node); if ((nodeSuccessors == null) || (nodeSuccessors.size() != ruleRootSuccessors.size())) { //the ruleRoot has successors but the node does not //OR //the number of successors for the ruleRoot does not match //the number of successors for the node return false; } boolean foundMatch = false; for (O nodeSuccessor : nodeSuccessors) { foundMatch |= continueMatch(nodeSuccessor, ruleRootSuccessors); } return foundMatch; } } // If we get here we haven't found it. return false; } private boolean continueMatch(O node, List<RuleOperator> ruleOperators) { for(RuleOperator ruleOperator: ruleOperators) { if (node.getClass().getName().equals(ruleOperator.getNodeClass().getName()) || ruleOperator.getNodeType().equals(RuleOperator.NodeType.ANY_NODE)) { mMatch.add(new Pair<O, RuleOperator.NodeType>(node,ruleOperator.getNodeType())); // Follow the edge to see the next node we should be looking for. List<RuleOperator> ruleOperatorSuccessors = mRule.getPlan().getSuccessors(ruleOperator); if (ruleOperatorSuccessors == null) { // We've completed the match return true; } List<O> nodeSuccessors; nodeSuccessors = mPlan.getSuccessors(node); if ((nodeSuccessors == null) || (nodeSuccessors.size() != ruleOperatorSuccessors.size())) { //the ruleOperator has successors but the node does not //OR //the number of successors for the ruleOperator does not match //the number of successors for the node return false; } boolean foundMatch = false; for (O nodeSuccessor : nodeSuccessors) { foundMatch |= continueMatch(nodeSuccessor, ruleOperatorSuccessors); } return foundMatch; } // We can arrive here either because we didn't match at this node or // further down the line. One way or another we need to remove ourselves // from the match vector and return false. //SMS - I don't think we need this as mMatch will be discarded anyway //mMatch.set(nodeNumber, null); return false; } return false; } }
package com.aspose.cells.model; import java.util.ArrayList; import java.util.List; public class Workbook { private String FileName = null; private List<Link> Links = new ArrayList<Link>(); private LinkElement Worksheets = null; private LinkElement DefaultStyle = null; private LinkElement DocumentProperties = null; private LinkElement Names = null; private String IsWriteProtected = null; private String IsProtected = null; private String IsEncryption = null; private String Password = null; /** * getFileName * Gets String * @return FileName */ public String getFileName() { return FileName; } /** * setFileName * Sets String * @param FileName String */ public void setFileName(String FileName) { this.FileName = FileName; } /** * getLinks * Gets List<Link> * @return Links */ public List<Link> getLinks() { return Links; } /** * setLinks * Sets List<Link> * @param Links List<Link> */ public void setLinks(List<Link> Links) { this.Links = Links; } /** * getWorksheets * Gets LinkElement * @return Worksheets */ public LinkElement getWorksheets() { return Worksheets; } /** * setWorksheets * Sets LinkElement * @param Worksheets LinkElement */ public void setWorksheets(LinkElement Worksheets) { this.Worksheets = Worksheets; } /** * getDefaultStyle * Gets LinkElement * @return DefaultStyle */ public LinkElement getDefaultStyle() { return DefaultStyle; } /** * setDefaultStyle * Sets LinkElement * @param DefaultStyle LinkElement */ public void setDefaultStyle(LinkElement DefaultStyle) { this.DefaultStyle = DefaultStyle; } /** * getDocumentProperties * Gets LinkElement * @return DocumentProperties */ public LinkElement getDocumentProperties() { return DocumentProperties; } /** * setDocumentProperties * Sets LinkElement * @param DocumentProperties LinkElement */ public void setDocumentProperties(LinkElement DocumentProperties) { this.DocumentProperties = DocumentProperties; } /** * getNames * Gets LinkElement * @return Names */ public LinkElement getNames() { return Names; } /** * setNames * Sets LinkElement * @param Names LinkElement */ public void setNames(LinkElement Names) { this.Names = Names; } /** * getIsWriteProtected * Gets String * @return IsWriteProtected */ public String getIsWriteProtected() { return IsWriteProtected; } /** * setIsWriteProtected * Sets String * @param IsWriteProtected String */ public void setIsWriteProtected(String IsWriteProtected) { this.IsWriteProtected = IsWriteProtected; } /** * getIsProtected * Gets String * @return IsProtected */ public String getIsProtected() { return IsProtected; } /** * setIsProtected * Sets String * @param IsProtected String */ public void setIsProtected(String IsProtected) { this.IsProtected = IsProtected; } /** * getIsEncryption * Gets String * @return IsEncryption */ public String getIsEncryption() { return IsEncryption; } /** * setIsEncryption * Sets String * @param IsEncryption String */ public void setIsEncryption(String IsEncryption) { this.IsEncryption = IsEncryption; } /** * getPassword * Gets String * @return Password */ public String getPassword() { return Password; } /** * setPassword * Sets String * @param Password String */ public void setPassword(String Password) { this.Password = Password; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Workbook {\n"); sb.append(" FileName: ").append(FileName).append("\n"); sb.append(" Links: ").append(Links).append("\n"); sb.append(" Worksheets: ").append(Worksheets).append("\n"); sb.append(" DefaultStyle: ").append(DefaultStyle).append("\n"); sb.append(" DocumentProperties: ").append(DocumentProperties).append("\n"); sb.append(" Names: ").append(Names).append("\n"); sb.append(" IsWriteProtected: ").append(IsWriteProtected).append("\n"); sb.append(" IsProtected: ").append(IsProtected).append("\n"); sb.append(" IsEncryption: ").append(IsEncryption).append("\n"); sb.append(" Password: ").append(Password).append("\n"); sb.append("}\n"); return sb.toString(); } }
/* * Copyright 2005-2007 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.core.transports; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.xpath.AXIOMXPath; import org.apache.axiom.util.blob.OverflowBlob; import org.apache.axis2.AxisFault; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.transport.http.AxisServlet; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.util.XMLUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.protocol.HTTP; import org.jaxen.SimpleNamespaceContext; import org.jaxen.XPath; import org.osgi.util.tracker.ServiceTracker; import org.wso2.carbon.base.ServerConfiguration; import org.wso2.carbon.core.CarbonThreadFactory; import org.wso2.carbon.core.transports.metering.MeteredServletRequest; import org.wso2.carbon.core.transports.metering.MeteredServletResponse; import org.wso2.carbon.core.transports.metering.RequestDataPersister; import org.wso2.carbon.micro.integrator.core.internal.CarbonCoreDataHolder; import org.wso2.carbon.utils.ServerConstants; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.namespace.QName; import java.io.IOException; import java.util.Enumeration; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * This is the main HTTP and HTTPS transport */ public class CarbonServlet extends AxisServlet { private static final long serialVersionUID = 3460108128756524161L; private Map<String, HttpGetRequestProcessor> getRequestProcessors = new LinkedHashMap<String, HttpGetRequestProcessor>(); private static final QName ITEM_QN = new QName(ServerConstants.CARBON_SERVER_XML_NAMESPACE, "Item"); private static final QName CLASS_QN = new QName(ServerConstants.CARBON_SERVER_XML_NAMESPACE, "Class"); private static final Log log = LogFactory.getLog(CarbonServlet.class); private ScheduledExecutorService requestDataPersisterScheduler = Executors .newScheduledThreadPool(25, new CarbonThreadFactory(new ThreadGroup("RequestDataPersisterThread"))); private RequestDataPersisterTask requestDataPersister; private boolean isMeteringEnabled = false; public CarbonServlet(ConfigurationContext configurationContext){ this.configContext = configurationContext; } public void init(ServletConfig config) throws ServletException { this.axisConfiguration = this.configContext.getAxisConfiguration(); this.servletConfig = config; populateGetRequestProcessors(); configContext.setProperty("GETRequestProcessorMap", getRequestProcessors); initParams(); String isMeteringEnabledStr = ServerConfiguration.getInstance().getFirstProperty("EnableMetering"); if(isMeteringEnabledStr!=null){ isMeteringEnabled = Boolean.parseBoolean(isMeteringEnabledStr); } if(isMeteringEnabled){ requestDataPersister = new RequestDataPersisterTask(); new Thread(requestDataPersister).start(); requestDataPersisterScheduler.scheduleWithFixedDelay(requestDataPersister, 5, 5, TimeUnit.SECONDS); } } private void populateGetRequestProcessors() throws ServletException { try { OMElement docEle = XMLUtils.toOM(CarbonCoreDataHolder.getInstance().getServerConfigurationService().getDocumentElement()); if (docEle != null) { SimpleNamespaceContext nsCtx = new SimpleNamespaceContext(); nsCtx.addNamespace("wsas", ServerConstants.CARBON_SERVER_XML_NAMESPACE); XPath xp = new AXIOMXPath("//wsas:HttpGetRequestProcessors/wsas:Processor"); xp.setNamespaceContext(nsCtx); List nodeList = xp.selectNodes(docEle); for (Object aNodeList : nodeList) { OMElement processorEle = (OMElement) aNodeList; OMElement itemEle = processorEle.getFirstChildWithName(ITEM_QN); if (itemEle == null) { throw new ServletException("Required element, 'Item' not found!"); } OMElement classEle = processorEle.getFirstChildWithName(CLASS_QN); HttpGetRequestProcessor processor; if (classEle == null) { throw new ServletException("Required element, 'Class' not found!"); } else { processor = (HttpGetRequestProcessor) Class.forName(classEle.getText().trim()).newInstance(); } getRequestProcessors.put(itemEle.getText().trim(), processor); } } } catch (Exception e) { log.error("Cannot populate HTTPGetRequestProcessors", e); throw new ServletException(e); } } /** * WSAS specific GET implementation */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { initContextRoot(request); boolean isRequestHandled = false; try { String queryString = request.getQueryString(); if (queryString != null) { for (String item : getRequestProcessors.keySet()) { if (queryString.indexOf(item) == 0 && (queryString.equals(item) || queryString.indexOf('&') == item.length() || queryString.indexOf('=') == item.length())) { processWithGetProcessor(request, response, item); isRequestHandled = true; break; } } } if (!isRequestHandled) { handleRestRequest(request, response); // Assume that this is a REST request } } catch (Exception e) { throw AxisFault.makeFault(e); } } private void processWithGetProcessor(HttpServletRequest request, HttpServletResponse response, String item) throws Exception { OverflowBlob temporaryData = new OverflowBlob(256, 4048, "_servlet", ".dat"); try { CarbonHttpRequest carbonHttpRequest = new CarbonHttpRequest( "GET", request.getRequestURI(), request.getRequestURL().toString()); Enumeration names = request.getParameterNames(); while (names.hasMoreElements()) { Object name = names.nextElement(); if (name != null && name instanceof String) { carbonHttpRequest.setParameter((String) name, request.getParameter((String) name)); } } carbonHttpRequest.setContextPath(request.getContextPath()); carbonHttpRequest.setQueryString(request.getQueryString()); CarbonHttpResponse carbonHttpResponse = new CarbonHttpResponse( temporaryData.getOutputStream()); (getRequestProcessors.get(item)).process(carbonHttpRequest, carbonHttpResponse, configContext); // adding headers Map responseHeaderMap = carbonHttpResponse.getHeaders(); for (Object obj : responseHeaderMap.entrySet()) { Map.Entry entry = (Map.Entry) obj; response.setHeader(entry.getKey().toString(), entry.getValue().toString()); } // setting status code response.setStatus(carbonHttpResponse.getStatusCode()); // setting error codes if (carbonHttpResponse.isError()) { if (carbonHttpResponse.getStatusMessage() != null) { response.sendError(carbonHttpResponse.getStatusCode(), carbonHttpResponse.getStatusMessage()); } else { response.sendError(carbonHttpResponse.getStatusCode()); } } if (carbonHttpResponse.isRedirect()) { response.sendRedirect(carbonHttpResponse.getRedirect()); } if (carbonHttpResponse.getHeaders().get(HTTP.CONTENT_TYPE) != null) { response.setContentType( carbonHttpResponse.getHeaders().get(HTTP.CONTENT_TYPE)); } temporaryData.writeTo(response.getOutputStream()); } finally { temporaryData.release(); } } protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Here we are using MeteredServletRequest and MeteredServletResponse to meter // the request and response. if(isMeteringEnabled){ final MeteredServletRequest wrappedRequest = new MeteredServletRequest(request); final MeteredServletResponse wrappedResponse = new MeteredServletResponse(response); super.doPost(wrappedRequest, wrappedResponse); // Call the callback to persist the wrapped request and wrapped response data requestDataPersister.addRequestResponse(wrappedRequest, wrappedResponse); }else{ super.doPost(request, response); } } private class RequestDataPersisterTask implements Runnable { private volatile List<RequestResponse> list = new CopyOnWriteArrayList<RequestResponse>(); public void addRequestResponse(MeteredServletRequest wrappedRequest, MeteredServletResponse wrappedResponse){ list.add(new RequestResponse(wrappedRequest, wrappedResponse)); } @Override public void run() { try { int itemsProcessed = 0; for (RequestResponse requestResponse : list) { persistRequestData(requestResponse.getWrappedRequest(), requestResponse.getWrappedResponse()); list.remove(requestResponse); itemsProcessed++; if(itemsProcessed > 200){ // Don't continue inifinitely return; } } } catch (Throwable e) { log.error("Cannot persist request data", e); } } } private static class RequestResponse { private MeteredServletRequest wrappedRequest; private MeteredServletResponse wrappedResponse; private RequestResponse(MeteredServletRequest wrappedRequest, MeteredServletResponse wrappedResponse) { this.wrappedRequest = wrappedRequest; this.wrappedResponse = wrappedResponse; } public MeteredServletRequest getWrappedRequest() { return wrappedRequest; } public MeteredServletResponse getWrappedResponse() { return wrappedResponse; } } protected void persistRequestData(MeteredServletRequest wrappedRequest, MeteredServletResponse wrappedResponse) { RequestDataPersister requestDataPersister = null; ServiceTracker meteringDataPersistTracker = new ServiceTracker(CarbonCoreDataHolder.getInstance().getBundleContext(), RequestDataPersister.class.getName(), null); meteringDataPersistTracker.open(); try { requestDataPersister = (RequestDataPersister) meteringDataPersistTracker.getService(); } finally { meteringDataPersistTracker.close(); } if (requestDataPersister != null) { requestDataPersister.persist(wrappedRequest, wrappedResponse); } } protected void handleRestRequest(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { if (!disableREST) { new RestRequestProcessor(HTTPConstants.HTTP_METHOD_GET, request, response).processURLRequest(); } else { showRestDisabledErrorMessage(response); } } public void addGetRequestProcessor(String key, HttpGetRequestProcessor processor) { getRequestProcessors.put(key, processor); } public void removeGetRequestProcessor(String key) { getRequestProcessors.remove(key); } }
/* * Copyright 2000-2003 by Model N, Inc. All Rights Reserved. * * This software is the confidential and proprietary information * of Model N, Inc ("Confidential Information"). You shall not * disclose such Confidential Information and shall use it only * in accordance with the terms of the license agreement you * entered into with Model N, Inc. */ package com.modeln.build.ctrl.forms; import com.modeln.build.sourcecontrol.CMnCheckIn; import com.modeln.build.sourcecontrol.CMnGitCheckIn; import com.modeln.build.sourcecontrol.CMnPerforceCheckIn; import java.net.URL; import java.text.SimpleDateFormat; import java.util.Hashtable; import javax.servlet.http.HttpServletRequest; import com.modeln.build.web.tags.TextTag; import com.modeln.build.web.tags.SelectTag; /** * This provides base functionality for all HTML forms. * * @author Shawn Stafford */ public class CMnBaseForm { public enum SourceControl { PERFORCE, GIT } /** Placeholder for a build ID number in the build URL */ public static final String BUILD_ID_TOKEN = "@@BUILD@@"; /** Placeholder for a bug number in the bug URL */ public static final String BUG_TOKEN = "@@BUG@@"; /** Placeholder for a changelist number in the changelist URL */ public static final String CHANGELIST_TOKEN = "@@CL@@"; /** Placeholder for a git repository in the web URL */ public static final String GIT_PROJECT_TOKEN = "@@PROJECT@@"; /** Placeholder for a git SHA HASH in the URL */ public static final String GIT_SHA_TOKEN = "@@HASH@@"; /** Color used when highlighting errors */ public static final String ERROR_BGLIGHT = "#FFCCCC"; /** Color used when highlighting emphasized errors */ public static final String ERROR_BGDARK = "#FF3333"; /** Color used when highlighting warnings */ public static final String WARNING_BGLIGHT = "#FFFFCC"; /** Color used when highlighting emphasized warnings */ public static final String WARNING_BGDARK = "#FFFF33"; /** Color used when highlighting errors */ public static final String ERRORLONG_BGLIGHT = "#CC99FF"; /** Color used when highlighting emphasized errors */ public static final String ERRORLONG_BGDARK = "#CC66FF"; /** Color used when highlighting skipped tests */ public static final String ERRORSKIP_BGLIGHT = "#CC9999"; /** Color used when highlighting emphasized skipped tests */ public static final String ERRORSKIP_BGDARK = "#CC6666"; /** Default background color */ public static final String DEFAULT_BGLIGHT = "#FFFFFF"; /** Default header color */ public static final String DEFAULT_BGDARK = "#CCCCCC"; /** Border color */ public static final String DEFAULT_BGBORDER = "#000000"; /** Default text displayed on the submit button when input is enabled */ public static final String DEFAULT_BUTTON_TEXT = "Update"; /** List of input fields which contain errors */ public static final String INPUT_ERROR_DATA = "errors"; /** Form field that indicates the status of the page */ public static final String FORM_STATUS_LABEL = "mode"; /** Form status value indicating that the form data should be updated */ public static final String UPDATE_DATA = "update"; /** Form status value indicating that the form data should be deleted */ public static final String DELETE_DATA = "delete"; /** Form status value indicating that the form data should be in view-only mode */ public static final String VIEW_DATA = "view"; /** Short form for displaying dates */ public static final SimpleDateFormat shortDateFormat = new SimpleDateFormat("yyyy-MM-dd 'at' HH:mm"); /** Long form for displaying dates */ public static final SimpleDateFormat fullDateFormat = new SimpleDateFormat("EEE, MMM dd, yyyy 'at' HH:mm:ss"); /** Date format used when constructing SQL queries */ protected static final SimpleDateFormat sqlDateFormat = new SimpleDateFormat("yyyy-MM-dd"); /** Determines whether the form uses the get or post method to submit data */ protected boolean postMethodEnabled = true; /** Determines whether the form will be rendered with input fields. */ protected boolean inputEnabled = false; /** Determines whether the form will be rendered with admin functionality. */ protected boolean adminEnabled = false; /** List of external URLs */ protected Hashtable<String,String> externalUrls = null; /** The URL used when submitting form input. */ protected URL formUrl = null; /** The URL used when accessing images */ protected URL imageUrl = null; /** The URL used when performing search operations. */ protected URL searchUrl = null; /** The URL used to delete a unittest suite */ protected URL deleteUrl = null; /** The URL used for admin actions */ protected URL adminUrl = null; /** * Mapping of input field names to error messages. If an input * field has a validation error, the field name and error message * should be placed in this data structure so the error can be * displayed to the user and the problem can be corrected. */ protected Hashtable<String,String> formErrors = null; /** Text displayed on the submit button */ private String buttonText = DEFAULT_BUTTON_TEXT; /** * Construct an HTML form. * * @param form URL to use when submitting form input * @param images URL to use when constructing image links */ public CMnBaseForm(URL form, URL images) { formUrl = form; imageUrl = images; } /** * Set the input fields by examining the HTTP request to see if * a value was submitted. * * @param req HTTP request */ public void setValues(HttpServletRequest req) { formErrors = (Hashtable) req.getAttribute(INPUT_ERROR_DATA); } /** * Set the list of form errors associated with the input names. * * @param errors List of input elements and the associated errors */ public void setFormErrors(Hashtable<String,String> errors) { formErrors = errors; } /** * Return true if there are errors associated with the form input. * * @return TRUE if errors are found */ public boolean hasFormErrors() { return ((formErrors != null) && (formErrors.size() > 0)); } /** * Return the list of errors associated with the form input. * * @return form errors */ public Hashtable<String,String> getFormErrors() { return formErrors; } /** * Return the form error associated with the form element. * If the form input is enbled and the specified element has an * error message, then the error message will be returned. * If a null is returned, it means that there was no error * for that input element or input is disabled. * * @param name Form element name * @return Error message if the form element contains an error */ public String getFormError(String name) { String msg = null; if ((formErrors != null) && (name != null)) { msg = (String) formErrors.get(name); } return msg; } /** * Set the text on the submit button to something other than the default value. * * @param text Button text */ public void setButtonText(String text) { buttonText = text; } /** * Sets the URL to be used to delete individual test suites. * * @param url Link to the suite delete command */ public void setDeleteUrl(URL url) { deleteUrl = url; } /** * Sets the URL to be used to perform admin operations. * * @param url Link to the admin command */ public void setAdminUrl(URL url) { adminUrl = url; } /** * Return the URL for admin operations. * * @return Admin URL */ public String getAdminUrl() { if (adminUrl != null) { return adminUrl.toString(); } else { return null; } } /** * Return the URL for form submissions. * * @return Form submission URL */ public String getFormUrl() { if (formUrl != null) { return formUrl.toString(); } else { return null; } } /** * Return the base URL for retrieving images. * * @return Base image URL */ public String getImageUrl() { if (imageUrl != null) { return imageUrl.toString(); } else { return null; } } /** * Enables or disables administrative functionality. * * @param enabled TRUE to enable administrative functionality */ public void setAdminMode(boolean enabled) { adminEnabled = enabled; } /** * Determines if the administrative functionality is enabled. * * @return TRUE if administrative functionality is enabled. */ public boolean getAdminMode() { return adminEnabled; } /** * Enables or disables form input. * * @param enabled TRUE to enable form input */ public void setInputMode(boolean enabled) { inputEnabled = enabled; } /** * Determines if form input is allowed. * * @return TRUE if form input is enabled. */ public boolean getInputMode() { return inputEnabled; } /** * Enables or disables the POST method for submitting form data. * * @param enabled TRUE to use the POST method, FALSE to use the GET method */ public void setPostEnabled(boolean enabled) { postMethodEnabled = enabled; } /** * Determines if form data is submitted using the POST method. * * @return TRUE if form data is submitted using the POST method */ public boolean getPostEnabled() { return postMethodEnabled; } /** * Sets the URL to be used to perform search operations. * * @param url Link to the search command */ public void setSearchUrl(URL url) { searchUrl = url; } /** * Return the URL for search queries. * * @return Search submission URL */ public String getSearchUrl() { if (searchUrl != null) { return searchUrl.toString(); } else { return null; } } /** * Set the list of external URLs used for linking patch data. * * @param urls List of external URLs */ public void setExternalUrls(Hashtable<String,String> urls) { externalUrls = urls; } /** * Return the external URL corresponding to the specified * URL property name. These properties can be found in the * application config file in the following format: * <pre> * urls=name1,name2 * url.name1=http://hostname1/@@PARAM@@ * url.name2=http://hostname2/@@PARAM@@ * </pre> * * @param name URL property name * @return URL property value */ public String getExternalUrl(String name) { String value = null; if (externalUrls != null) { value = externalUrls.get(name); } return value; } /** * Return the URL for downloading builds. * * @param path Path to the build * @return Download URL */ public String getDownloadUrl(String path) { String url = getExternalUrl("download"); // Append the path to the URL if possible, else return null if (url != null) { url = url + path; } return url; } /** * Return the URL for viewing bug information. * * @param bugId Bug number to use in URL * @return Bug URL */ public String getBugUrl(String bugId) { String url = getExternalUrl("sdtracker"); // Replace the URL tokens if (url != null) { url = url.replaceAll(BUG_TOKEN, bugId); } return url; } /** * Return the URL for viewing the patch build information. * * @param buildId Build ID to use in the URL * @return Service patch build URL */ public String getPatchBuildUrl(String buildId) { String url = getExternalUrl("patchbuild"); // Replace the URL tokens if (url != null) { url = url.replaceAll(BUILD_ID_TOKEN, buildId); } return url; } /** * Return the URL for viewing the patch build diff information. * * @param buildId Build IDs to use in the URL * @return Service patch build diff URL */ public String getPatchBuildDiffUrl(String[] buildId) { String url = getExternalUrl("patchdiff"); // Replace the URL tokens if (url != null) { for (int idx = 0; idx < buildId.length; idx++) { url = url.replaceFirst(BUILD_ID_TOKEN, buildId[idx]); } } return url; } /** * Return the URL for tech owners to provide feedback on the patch build. * * @param buildId Build ID to use in the URL * @return Service patch build review URL */ public String getPatchBuildReviewUrl(String buildId) { String url = getExternalUrl("patchreview"); // Replace the URL tokens if (url != null) { url = url.replaceAll(BUILD_ID_TOKEN, buildId); } return url; } /** * Return the URL for viewing changelist information. * * @return Changelist URL */ public String getChangelistUrl(CMnCheckIn cl) { return getChangelistUrl("", cl); } /** * Return the URL for viewing changelist information. * * @return Changelist URL */ public String getPatchChangelistUrl(CMnCheckIn cl) { return getChangelistUrl("patch", cl); } /** * Return the URL for viewing changelist information. * * @param prefix Prefix appended to external URL property name * @param cl Check-in information * @return Changelist URL */ private String getChangelistUrl(String prefix, CMnCheckIn cl) { String sysurl = null; String url = null; if ((cl != null) && (externalUrls != null)) { if (cl instanceof CMnGitCheckIn) { sysurl = externalUrls.get(prefix + "git"); if (sysurl != null) { url = sysurl.replaceAll(GIT_SHA_TOKEN, cl.getId()); url = url.replaceAll(GIT_PROJECT_TOKEN, ((CMnGitCheckIn)cl).getRepository()); } } else if (cl instanceof CMnPerforceCheckIn) { sysurl = externalUrls.get(prefix + "perforce"); if (sysurl != null) { url = sysurl.replaceAll(CHANGELIST_TOKEN, cl.getId()); } } } return url; } /** * Return the URL for deleting individual test suites. * * @return Search submission URL */ public String getDeleteUrl() { if (deleteUrl != null) { return deleteUrl.toString(); } else { return null; } } /** * Formatting method for displaying time (milliseconds) in "hr m s" format. * * @param time Time value to be formatted, measured in milliseconds * @return Formatted string, returned in "#hr #m #s" format */ public static String formatTime(long time) { String timestamp=""; long seconds = (time / 1000) % 60; long minutes = (time / (60 * 1000)) % 60; long hours = time / (3600 * 1000); // Hours if (hours > 0) { timestamp = hours + "hr"; } // Minutes if ((hours > 0) || (minutes > 0)) { if ((hours > 0) && (minutes < 10)) { timestamp = timestamp + " 0" + minutes + "m"; } else { timestamp = timestamp + " " + minutes + "m"; } } // Seconds if (seconds < 10) { timestamp = timestamp + " 0" + seconds + "s"; } else { timestamp = timestamp + " " + seconds + "s"; } return timestamp; } /** * Create a table which defines a title and border for the contents. * * @param title Text to be placed in the title bar * @param content Content of the table */ public String getTitledBorder(String title, String content) { return getTitledBorder(title, content, false); } /** * Create a table which defines a title and border for the contents. * * @param title Text to be placed in the title bar * @param content Content of the table * @param center True if the content should be centered in the border */ public String getTitledBorder(String title, String content, boolean center) { StringBuffer html = new StringBuffer(); if (inputEnabled) { String method = null; if (postMethodEnabled) { method = "post"; } else { method = "get"; } html.append("<form method=\"" + method + "\" action=\"" + formUrl + "\">\n"); } html.append("<!-- ==================================================================== -->\n"); html.append("<table border=\"0\" cellspacing=\"0\" cellpadding=\"1\" width=\"100%\">\n"); html.append(" <tr>\n"); html.append(" <td bgcolor=\"" + DEFAULT_BGBORDER + "\">\n"); html.append(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"2\" width=\"100%\" bgcolor=\"" + DEFAULT_BGDARK + "\">\n"); html.append(" <tr>\n"); html.append(" <td><b>" + title + "</b></td>\n"); if (inputEnabled) { html.append(" <td align=\"right\">"); html.append("<input type=\"hidden\" name=\"" + FORM_STATUS_LABEL + "\" value=\"" + UPDATE_DATA + "\">"); html.append("<input type=\"submit\" value=\"" + buttonText + "\" tabindex=\"999\">"); html.append("</td>\n"); } html.append(" </tr>\n"); html.append(" </table>\n"); html.append(" </td>\n"); html.append(" </tr>\n"); html.append(" <tr>\n"); html.append(" <td bgcolor=\"" + DEFAULT_BGBORDER + "\">\n"); html.append(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"2\" width=\"100%\" bgcolor=\"" + DEFAULT_BGLIGHT + "\">\n"); html.append(" <tr>\n"); String align = "left"; if (center) { align = "center"; } html.append(" <td align=\"" + align + "\">\n"); html.append(content); html.append(" </td>\n"); html.append(" </tr>\n"); html.append(" </table>\n"); html.append(" </td>\n"); html.append(" </tr>\n"); html.append("</table>\n"); if (inputEnabled) { html.append("</form>\n"); } return html.toString(); } /** * Create a table which defines a title and border for the contents. * * @param title Text to be placed in the title bar * @param content Content of the table * @param linkUrl Link to be placed in the right side of the title bar * @param linkName Link to be placed in the right side of the title bar */ public String getTitledBorderLink(String title, String content, URL linkUrl, String linkName) { StringBuffer html = new StringBuffer(); if (inputEnabled) { html.append("<form method=\"post\" action=\"" + formUrl + "\">\n"); } html.append("<!-- ==================================================================== -->\n"); html.append("<table border=\"0\" cellspacing=\"0\" cellpadding=\"1\" width=\"100%\">\n"); html.append(" <tr>\n"); html.append(" <td bgcolor=\"" + DEFAULT_BGBORDER + "\">\n"); html.append(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"2\" width=\"100%\" bgcolor=\"" + DEFAULT_BGDARK + "\">\n"); html.append(" <tr>\n"); html.append(" <td><b>" + title + "</b></td>\n"); html.append(" <td align=\"right\">"); if (linkUrl != null) { html.append("<a href=" + linkUrl.toString() + ">" + linkName + "</a>"); } else { html.append(linkName); } html.append("</td>\n"); if (inputEnabled) { html.append(" <td align=\"right\">"); html.append("<input type=\"hidden\" name=\"" + FORM_STATUS_LABEL + "\" value=\"" + UPDATE_DATA + "\">"); html.append("<input type=\"submit\" value=\"" + buttonText + "\" tabindex=\"999\">"); html.append("</td>\n"); } html.append(" </tr>\n"); html.append(" </table>\n"); html.append(" </td>\n"); html.append(" </tr>\n"); html.append(" <tr>\n"); html.append(" <td bgcolor=\"" + DEFAULT_BGBORDER + "\">\n"); html.append(" <table border=\"0\" cellspacing=\"0\" cellpadding=\"2\" width=\"100%\" bgcolor=\"" + DEFAULT_BGLIGHT + "\">\n"); html.append(" <tr>\n"); html.append(" <td>\n"); html.append(content); html.append(" </td>\n"); html.append(" </tr>\n"); html.append(" </table>\n"); html.append(" </td>\n"); html.append(" </tr>\n"); html.append("</table>\n"); if (inputEnabled) { html.append("</form>\n"); } return html.toString(); } }