gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.grpc;
import java.net.URI;
import io.grpc.internal.GrpcUtil;
import io.grpc.netty.NegotiationType;
import io.grpc.netty.NettyChannelBuilder;
import org.apache.camel.component.grpc.auth.jwt.JwtAlgorithm;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriParams;
import org.apache.camel.spi.UriPath;
@UriParams
public class GrpcConfiguration {
@UriPath
@Metadata(required = true)
private String host;
@UriPath
@Metadata(required = true)
private int port;
@UriPath
@Metadata(required = true)
private String service;
@UriParam(label = "producer")
private String method;
@UriParam(label = "security", defaultValue = "PLAINTEXT")
private NegotiationType negotiationType = NegotiationType.PLAINTEXT;
@UriParam(label = "security", defaultValue = "NONE")
private GrpcAuthType authenticationType = GrpcAuthType.NONE;
@UriParam(label = "security", defaultValue = "HMAC256")
private JwtAlgorithm jwtAlgorithm = JwtAlgorithm.HMAC256;
@UriParam(label = "security", secret = true)
private String jwtSecret;
@UriParam(label = "security")
private String jwtIssuer;
@UriParam(label = "security")
private String jwtSubject;
@UriParam(label = "security")
private String serviceAccountResource;
@UriParam(label = "security")
private String keyCertChainResource;
@UriParam(label = "security")
private String keyResource;
@UriParam(label = "security", secret = true)
private String keyPassword;
@UriParam(label = "security")
private String trustCertCollectionResource;
@UriParam(label = "producer", defaultValue = "SIMPLE")
private GrpcProducerStrategy producerStrategy = GrpcProducerStrategy.SIMPLE;
@UriParam(label = "producer")
private String streamRepliesTo;
@UriParam(label = "producer")
private String userAgent;
@UriParam(label = "consumer", defaultValue = "PROPAGATION")
private GrpcConsumerStrategy consumerStrategy = GrpcConsumerStrategy.PROPAGATION;
@UriParam(label = "consumer", defaultValue = "false")
private boolean forwardOnCompleted;
@UriParam(label = "consumer", defaultValue = "false")
private boolean forwardOnError;
@UriParam(defaultValue = "" + NettyChannelBuilder.DEFAULT_FLOW_CONTROL_WINDOW)
private int flowControlWindow = NettyChannelBuilder.DEFAULT_FLOW_CONTROL_WINDOW;
@UriParam(defaultValue = "" + GrpcUtil.DEFAULT_MAX_MESSAGE_SIZE)
private int maxMessageSize = GrpcUtil.DEFAULT_MAX_MESSAGE_SIZE;
@UriParam(label = "consumer", defaultValue = "" + Integer.MAX_VALUE)
private int maxConcurrentCallsPerConnection = Integer.MAX_VALUE;
/**
* Fully qualified service name from the protocol buffer descriptor file (package dot service definition name)
*/
public String getService() {
return service;
}
public void setService(String service) {
this.service = service;
}
/**
* gRPC method name
*/
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
/**
* The gRPC server host name. This is localhost or 0.0.0.0 when being a consumer or remote server host name when
* using producer.
*/
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
/**
* The gRPC local or remote server port
*/
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
/**
* Identifies the security negotiation type used for HTTP/2 communication
*/
public void setNegotiationType(NegotiationType negotiationType) {
this.negotiationType = negotiationType;
}
public NegotiationType getNegotiationType() {
return negotiationType;
}
/**
* Authentication method type in advance to the SSL/TLS negotiation
*/
public GrpcAuthType getAuthenticationType() {
return authenticationType;
}
public void setAuthenticationType(GrpcAuthType authenticationType) {
this.authenticationType = authenticationType;
}
/**
* JSON Web Token sign algorithm
*/
public JwtAlgorithm getJwtAlgorithm() {
return jwtAlgorithm;
}
public void setJwtAlgorithm(JwtAlgorithm jwtAlgorithm) {
this.jwtAlgorithm = jwtAlgorithm;
}
/**
* JSON Web Token secret
*/
public String getJwtSecret() {
return jwtSecret;
}
public void setJwtSecret(String jwtSecret) {
this.jwtSecret = jwtSecret;
}
/**
* JSON Web Token issuer
*/
public String getJwtIssuer() {
return jwtIssuer;
}
public void setJwtIssuer(String jwtIssuer) {
this.jwtIssuer = jwtIssuer;
}
/**
* JSON Web Token subject
*/
public String getJwtSubject() {
return jwtSubject;
}
public void setJwtSubject(String jwtSubject) {
this.jwtSubject = jwtSubject;
}
/**
* Service Account key file in JSON format resource link supported by the Google Cloud SDK
*/
public String getServiceAccountResource() {
return serviceAccountResource;
}
public void setServiceAccountResource(String serviceAccountResource) {
this.serviceAccountResource = serviceAccountResource;
}
/**
* The X.509 certificate chain file resource in PEM format link
*/
public void setKeyCertChainResource(String keyCertChainResource) {
this.keyCertChainResource = keyCertChainResource;
}
public String getKeyCertChainResource() {
return keyCertChainResource;
}
/**
* The PKCS#8 private key file resource in PEM format link
*/
public void setKeyResource(String keyResource) {
this.keyResource = keyResource;
}
public String getKeyResource() {
return keyResource;
}
/**
* The PKCS#8 private key file password
*/
public String getKeyPassword() {
return keyPassword;
}
public void setKeyPassword(String keyPassword) {
this.keyPassword = keyPassword;
}
/**
* The trusted certificates collection file resource in PEM format for verifying the remote endpoint's certificate
*/
public void setTrustCertCollectionResource(String trustCertCollectionResource) {
this.trustCertCollectionResource = trustCertCollectionResource;
}
public String getTrustCertCollectionResource() {
return trustCertCollectionResource;
}
/**
* This option specifies the top-level strategy for processing service requests and responses in streaming mode. If
* an aggregation strategy is selected, all requests will be accumulated in the list, then transferred to the flow,
* and the accumulated responses will be sent to the sender. If a propagation strategy is selected, request is sent
* to the stream, and the response will be immediately sent back to the sender.
*/
public GrpcConsumerStrategy getConsumerStrategy() {
return consumerStrategy;
}
public void setConsumerStrategy(GrpcConsumerStrategy consumerStrategy) {
this.consumerStrategy = consumerStrategy;
}
/**
* Determines if onCompleted events should be pushed to the Camel route.
*/
public void setForwardOnCompleted(boolean forwardOnCompleted) {
this.forwardOnCompleted = forwardOnCompleted;
}
public boolean isForwardOnCompleted() {
return forwardOnCompleted;
}
/**
* Determines if onError events should be pushed to the Camel route. Exceptions will be set as message body.
*/
public void setForwardOnError(boolean forwardOnError) {
this.forwardOnError = forwardOnError;
}
public boolean isForwardOnError() {
return forwardOnError;
}
public GrpcProducerStrategy getProducerStrategy() {
return producerStrategy;
}
/**
* The mode used to communicate with a remote gRPC server. In SIMPLE mode a single exchange is translated into a
* remote procedure call. In STREAMING mode all exchanges will be sent within the same request (input and output of
* the recipient gRPC service must be of type 'stream').
*/
public void setProducerStrategy(GrpcProducerStrategy producerStrategy) {
this.producerStrategy = producerStrategy;
}
public String getStreamRepliesTo() {
return streamRepliesTo;
}
/**
* When using STREAMING client mode, it indicates the endpoint where responses should be forwarded.
*/
public void setStreamRepliesTo(String streamRepliesTo) {
this.streamRepliesTo = streamRepliesTo;
}
/**
* The user agent header passed to the server
*/
public String getUserAgent() {
return userAgent;
}
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
/**
* The HTTP/2 flow control window size (MiB)
*/
public int getFlowControlWindow() {
return flowControlWindow;
}
public void setFlowControlWindow(int flowControlWindow) {
this.flowControlWindow = flowControlWindow;
}
/**
* The maximum message size allowed to be received/sent (MiB)
*/
public void setMaxMessageSize(int maxMessageSize) {
this.maxMessageSize = maxMessageSize;
}
public int getMaxMessageSize() {
return maxMessageSize;
}
/**
* The maximum number of concurrent calls permitted for each incoming server connection
*/
public void setMaxConcurrentCallsPerConnection(int maxConcurrentCallsPerConnection) {
this.maxConcurrentCallsPerConnection = maxConcurrentCallsPerConnection;
}
public int getMaxConcurrentCallsPerConnection() {
return maxConcurrentCallsPerConnection;
}
public void parseURI(URI uri) {
setHost(uri.getHost());
if (uri.getPort() != -1) {
setPort(uri.getPort());
}
setService(uri.getPath().substring(1));
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.dcm.qrscp;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.management.ObjectName;
import javax.security.auth.Subject;
import javax.xml.transform.Templates;
import org.dcm4che.data.Command;
import org.dcm4che.data.Dataset;
import org.dcm4che.data.DcmElement;
import org.dcm4che.data.DcmObjectFactory;
import org.dcm4che.dict.Status;
import org.dcm4che.dict.Tags;
import org.dcm4che.net.AAssociateAC;
import org.dcm4che.net.AAssociateRQ;
import org.dcm4che.net.ActiveAssociation;
import org.dcm4che.net.Association;
import org.dcm4che.net.AssociationListener;
import org.dcm4che.net.DcmServiceBase;
import org.dcm4che.net.DcmServiceException;
import org.dcm4che.net.Dimse;
import org.dcm4che.net.DimseListener;
import org.dcm4che.net.ExtNegotiation;
import org.dcm4che.net.PDU;
import org.dcm4chex.archive.common.PIDWithIssuer;
import org.dcm4chex.archive.ejb.jdbc.QueryCmd;
import org.dcm4chex.archive.perf.PerfCounterEnum;
import org.dcm4chex.archive.perf.PerfMonDelegate;
import org.dcm4chex.archive.perf.PerfPropertyEnum;
import org.jboss.logging.Logger;
/**
* @author Gunter.Zeilinger@tiani.com
* @version $Revision: 15660 $ $Date: 2008-02-27 22:38:35 +0100 (Wed, 27 Feb
* 2008) $
* @since 31.08.2003
*/
public class FindScp extends DcmServiceBase implements AssociationListener {
protected static final int PID = 0;
protected static final int ISSUER = 1;
private static final int FUZZY_MATCHING = 2;
private static final String QUERY_XSL = "cfindrq.xsl";
private static final String RESULT_XSL = "cfindrsp.xsl";
private static final String QUERY_XML = "-cfindrq.xml";
private static final String RESULT_XML = "-cfindrsp.xml";
protected static final String FORCE_PIX_QUERY_FLAG = "ForcePIXQueryFlag";
private static final MultiDimseRsp NO_MATCH_RSP = new MultiDimseRsp() {
public DimseListener getCancelListener() {
return null;
}
public Dataset next(ActiveAssociation assoc, Dimse rq, Command rspCmd)
throws DcmServiceException {
rspCmd.putUS(Tags.Status, Status.Success);
return null;
}
public void release() {
}
};
protected final QueryRetrieveScpService service;
protected final boolean filterResult;
protected final Logger log;
private PerfMonDelegate perfMon;
public FindScp(QueryRetrieveScpService service, boolean filterResult) {
this.service = service;
this.log = service.getLog();
this.filterResult = filterResult;
perfMon = new PerfMonDelegate(this.service);
}
public final ObjectName getPerfMonServiceName() {
return perfMon.getPerfMonServiceName();
}
public final void setPerfMonServiceName(ObjectName perfMonServiceName) {
perfMon.setPerfMonServiceName(perfMonServiceName);
}
protected MultiDimseRsp doCFind(ActiveAssociation assoc, Dimse rq,
Command rspCmd) throws IOException, DcmServiceException {
Association a = assoc.getAssociation();
String callingAET = a.getCallingAET();
try {
perfMon.start(assoc, rq, PerfCounterEnum.C_FIND_SCP_QUERY_DB);
Command rqCmd = rq.getCommand();
Dataset rqData = rq.getDataset();
perfMon.setProperty(assoc, rq, PerfPropertyEnum.REQ_DIMSE, rq);
if (log.isDebugEnabled()) {
log.debug("Identifier:\n");
log.debug(rqData);
}
service.logDIMSE(a, QUERY_XML, rqData);
service.logDicomQuery(a, rq.getCommand().getAffectedSOPClassUID(),
rqData);
Dataset coerce = service.getCoercionAttributesFor(callingAET,
QUERY_XSL, rqData, a);
if (coerce != null) {
service.coerceAttributes(rqData, coerce);
}
service.postCoercionProcessing(rqData, Command.C_FIND_RQ, assoc.getAssociation());
int[] excludeFromRSP = excludeFromRSP(rqData);
service.supplementIssuerOfPatientID(rqData, a, callingAET, true);
Dataset issuerOfAccessionNumberInRQ = rqData.getItem(Tags.IssuerOfAccessionNumberSeq);
if (!"PATIENT".equals(rqData.getString(Tags.QueryRetrieveLevel)))
service.supplementIssuerOfAccessionNumber(rqData, a, callingAET, true);
boolean pixQuery = forcePixQuery(assoc)
|| service.isPixQueryCallingAET(callingAET);
Set<PIDWithIssuer> pidWithIssuer = pixQuery ? pixQuery(rqData) : null;
boolean adjustPatientID = pixQuery && pidWithIssuer == null
&& rqData.containsValue(Tags.IssuerOfPatientID);
// return OtherPatientIDs needed to adjust Patient IDs
rqData.putSQ(Tags.OtherPatientIDSeq);
boolean hideWithoutIssuerOfPID =
service.isHideWithoutIssuerOfPIDFromAET(callingAET);
boolean fuzzyMatchingOfPN = fuzzyMatchingOfPN(
a.getAcceptedExtNegotiation(
rqCmd.getAffectedSOPClassUID()));
MultiDimseRsp rsp;
if (service.hasUnrestrictedQueryPermissions(callingAET)) {
rsp = newMultiCFindRsp(rqData, pidWithIssuer, adjustPatientID,
excludeFromRSP, issuerOfAccessionNumberInRQ,
fuzzyMatchingOfPN, hideWithoutIssuerOfPID, null);
} else {
Subject subject = (Subject) a.getProperty("user");
if (subject != null) {
rsp = newMultiCFindRsp(rqData, pidWithIssuer, adjustPatientID,
excludeFromRSP, issuerOfAccessionNumberInRQ,
fuzzyMatchingOfPN, hideWithoutIssuerOfPID, subject);
} else {
log
.info("Missing user identification -> no records returned");
rsp = NO_MATCH_RSP;
}
}
perfMon.stop(assoc, rq, PerfCounterEnum.C_FIND_SCP_QUERY_DB);
return rsp;
} catch (DcmServiceException e) {
throw e;
} catch (Exception e) {
log.error("Query DB failed:", e);
throw new DcmServiceException(Status.UnableToProcess, e);
}
}
private static final int[] EXCLUDE_FROM_RSP_TAGS = {
Tags.OtherPatientIDSeq,
Tags.IssuerOfPatientID,
Tags.IssuerOfAccessionNumberSeq
};
private static int[] excludeFromRSP(Dataset rqData) {
int count = 0;
boolean[] exclude = new boolean[EXCLUDE_FROM_RSP_TAGS.length];
for (int i = 0; i < EXCLUDE_FROM_RSP_TAGS.length; i++) {
if (exclude[i] = !rqData.contains(EXCLUDE_FROM_RSP_TAGS[i]))
count++;
}
int[] tags = new int[count];
for (int i = 0, j = 0; i < exclude.length; i++) {
if (exclude[i])
tags[j++] = EXCLUDE_FROM_RSP_TAGS[i];
}
return tags;
}
private boolean forcePixQuery(ActiveAssociation assoc) {
Object flag = assoc.getAssociation().getProperty(FORCE_PIX_QUERY_FLAG);
return (flag instanceof Boolean) && ((Boolean) flag).booleanValue();
}
private boolean fuzzyMatchingOfPN(ExtNegotiation extNeg) {
byte[] info;
return extNeg != null
&& (info = extNeg.info()).length > FUZZY_MATCHING
&& info[FUZZY_MATCHING] != 0;
}
private boolean isUniversalMatching(String key) {
if (key == null) {
return true;
}
char[] a = key.toCharArray();
for (int i = 0; i < a.length; i++) {
if (a[i] != '*') {
return false;
}
}
return true;
}
private boolean isWildCardMatching(String key) {
return key.indexOf('*') != -1 || key.indexOf('?') != -1;
}
protected Set<PIDWithIssuer> pixQuery(Dataset rqData) throws DcmServiceException {
HashSet<PIDWithIssuer> result = new HashSet<PIDWithIssuer>();
pixQuery(rqData, result);
DcmElement opidsq = rqData.get(Tags.OtherPatientIDSeq);
if (opidsq != null) {
for (int i = 0, n = opidsq.countItems(); i < n; i++)
pixQuery(opidsq.getItem(i), result);
}
return result.isEmpty() ? null : result;
}
protected boolean skipPixQuery(String pid,String issuer) throws DcmServiceException {
return isUniversalMatching(pid)
|| !service.isPixQueryIssuer(issuer)
|| isWildCardMatching(pid) && !service.isPixQueryLocal();
}
protected void pixQuery(Dataset rqData, Set<PIDWithIssuer> result)
throws DcmServiceException {
String pid = rqData.getString(Tags.PatientID);
String issuer = rqData.getString(Tags.IssuerOfPatientID);
if (skipPixQuery(pid, issuer) )
return;
List<String[]> pidAndIssuers =
service.queryCorrespondingPIDs(pid, issuer);
if (pidAndIssuers == null)
// pid was not known by pix manager.
return;
for (String[] pidAndIssuer : pidAndIssuers)
result.add(new PIDWithIssuer(pidAndIssuer[0], pidAndIssuer[1]));
}
protected MultiDimseRsp newMultiCFindRsp(Dataset rqData,
Set<PIDWithIssuer> pidWithIssuers, boolean adjustPatientIDs,
int[] excludeFromRSP, Dataset issuerOfAccessionNumberInRQ,
boolean fuzzyMatchingOfPN, boolean hideWithoutIssuerOfPID,
Subject subject)
throws SQLException, DcmServiceException {
QueryCmd queryCmd = QueryCmd.create(rqData, pidWithIssuers,
filterResult, fuzzyMatchingOfPN, service.isNoMatchForNoValue(),
hideWithoutIssuerOfPID, subject);
if (filterResult && issuerOfAccessionNumberInRQ != null)
// restore Issuer of Accession Number Sequence to value in request
// to return only requested item attributes
rqData.putSQ(Tags.IssuerOfAccessionNumberSeq)
.addItem(issuerOfAccessionNumberInRQ);
queryCmd.setFetchSize(service.getFetchSize()).execute();
return new MultiCFindRsp(queryCmd, adjustPatientIDs,
rqData.getString(Tags.IssuerOfPatientID), excludeFromRSP);
}
protected Dataset getDataset(QueryCmd queryCmd) throws SQLException,
DcmServiceException {
return queryCmd.getDataset();
}
protected void doBeforeRsp(ActiveAssociation assoc, Dimse rsp) {
if (log.isDebugEnabled())
{
if (service.isCFindRspDebugLogDeferToDoBeforeRsp()) {
try {
Dataset ds = rsp.getDataset();
if (ds != null) {
log.debug("Identifier:\n");
log.debug(ds);
}
} catch (IOException iOException) {
log.error("Failed to debug log C-Find response", iOException);
}
}
}
}
protected void doMultiRsp(ActiveAssociation assoc, Dimse rq,
Command rspCmd, MultiDimseRsp mdr) throws IOException,
DcmServiceException {
try {
DimseListener cl = mdr.getCancelListener();
if (cl != null) {
assoc.addCancelListener(
rspCmd.getMessageIDToBeingRespondedTo(), cl);
}
do {
perfMon.start(assoc, rq, PerfCounterEnum.C_FIND_SCP_RESP_OUT);
Dataset rspData = mdr.next(assoc, rq, rspCmd);
Dimse rsp = fact.newDimse(rq.pcid(), rspCmd, rspData);
doBeforeRsp(assoc, rsp);
assoc.getAssociation().write(rsp);
perfMon.setProperty(assoc, rq, PerfPropertyEnum.RSP_DATASET,
rspData);
perfMon.stop(assoc, rq, PerfCounterEnum.C_FIND_SCP_RESP_OUT);
doAfterRsp(assoc, rsp);
} while (rspCmd.isPending());
} finally {
mdr.release();
}
}
protected class MultiCFindRsp implements MultiDimseRsp {
private final QueryCmd queryCmd;
private final boolean adjustPatientIDs;
private final String requestedIssuer;
private final int[] excludeFromRSP;
private final Map<PIDWithIssuer, Set<PIDWithIssuer>> pixQueryResults;
private boolean canceled = false;
private final int pendingStatus;
private int count = 0;
private Templates coerceTpl;
public MultiCFindRsp(QueryCmd queryCmd, boolean adjustPatientIDs,
String requestedIssuer, int[] excludeFromRSP) {
this.queryCmd = queryCmd;
this.adjustPatientIDs = adjustPatientIDs;
this.requestedIssuer = requestedIssuer;
this.excludeFromRSP = excludeFromRSP;
this.pixQueryResults = adjustPatientIDs
? new HashMap<PIDWithIssuer, Set<PIDWithIssuer>>()
: null;
this.pendingStatus = queryCmd.isKeyNotSupported() ? 0xff01 : 0xff00;
}
public DimseListener getCancelListener() {
return new DimseListener() {
public void dimseReceived(Association assoc, Dimse dimse) {
canceled = true;
}
};
}
public Dataset next(ActiveAssociation assoc, Dimse rq, Command rspCmd)
throws DcmServiceException {
if (canceled) {
rspCmd.putUS(Tags.Status, Status.Cancel);
return null;
}
try {
Association a = assoc.getAssociation();
String callingAET = a.getCallingAET();
if (!queryCmd.next()) {
rspCmd.putUS(Tags.Status, Status.Success);
return null;
}
rspCmd.putUS(Tags.Status, pendingStatus);
Dataset data = getDataset(queryCmd);
if (adjustPatientIDs
&& data.containsValue(Tags.PatientID)
&& data.containsValue(Tags.IssuerOfPatientID)) {
if (filterResult) {
Dataset tmp = DcmObjectFactory.getInstance().newDataset();
tmp.putAll(data);
data = tmp;
}
service.adjustPatientID(data, requestedIssuer, pixQueryResults);
if (!data.contains(Tags.IssuerOfPatientID))
data.putLO(Tags.IssuerOfPatientID, requestedIssuer);
}
if (filterResult) {
data = data.exclude(excludeFromRSP);
for (int tag : excludeFromRSP)
data.remove(tag);
}
if (!service.isCFindRspDebugLogDeferToDoBeforeRsp()) {
log.debug("Identifier:\n");
log.debug(data);
}
service.logDIMSE(a, RESULT_XML, data);
if (count++ == 0) {
coerceTpl = service.getCoercionTemplates(callingAET,
RESULT_XSL);
}
Dataset coerce = service.getCoercionAttributesFor(
a, RESULT_XSL, data, coerceTpl);
if (coerce != null) {
service.coerceAttributes(data, coerce);
}
service.postCoercionProcessing(data, Command.C_FIND_RSP, assoc.getAssociation());
return data;
} catch (DcmServiceException e) {
throw e;
} catch (SQLException e) {
log.error("Retrieve DB record failed:", e);
throw new DcmServiceException(Status.UnableToProcess, e);
} catch (Exception e) {
log.error("Corrupted DB record:", e);
throw new DcmServiceException(Status.UnableToProcess, e);
}
}
public void release() {
queryCmd.close();
}
}
public void write(Association src, PDU pdu) {
if (pdu instanceof AAssociateAC)
perfMon.assocEstEnd(src, Command.C_FIND_RQ);
}
public void received(Association src, PDU pdu) {
if (pdu instanceof AAssociateRQ)
perfMon.assocEstStart(src, Command.C_FIND_RQ);
}
public void write(Association src, Dimse dimse) {
}
public void received(Association src, Dimse dimse) {
}
public void error(Association src, IOException ioe) {
}
public void closing(Association assoc) {
if (assoc.getAAssociateAC() != null)
perfMon.assocRelStart(assoc, Command.C_FIND_RQ);
}
public void closed(Association assoc) {
if (assoc.getAAssociateAC() != null)
perfMon.assocRelEnd(assoc, Command.C_FIND_RQ);
}
}
| |
package eu.fbk.dkm.premon.premonitor;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.*;
import com.google.common.io.Files;
import eu.fbk.dkm.premon.vocab.FB;
import eu.fbk.dkm.premon.vocab.ONTOLEX;
import eu.fbk.dkm.premon.vocab.PM;
import eu.fbk.rdfpro.*;
import eu.fbk.rdfpro.util.Statements;
import org.openrdf.model.*;
import org.openrdf.model.vocabulary.*;
import org.openrdf.rio.RDFHandler;
import org.openrdf.rio.RDFHandlerException;
import org.openrdf.rio.Rio;
import java.io.File;
import java.io.IOException;
import java.util.*;
public class FramebaseCleanerConverter extends Converter {
private static final Set<String> POS_TAGS = ImmutableSet.of("a", "adv", "art", "c", "intj",
"n", "num", "prep", "scon", "v");
private static final ValueFactory VF = Statements.VALUE_FACTORY;
private static final Ordering<Value> ORDERING = Ordering.from(Statements.valueComparator());
private static final Ordering<URI> MICROFRAME_ORDERING = new Ordering<URI>() {
@Override
public int compare(final URI left, final URI right) {
final String s1 = left.stringValue();
final String s2 = right.stringValue();
final boolean wn1 = s1.contains("-wn_");
final boolean wn2 = s2.contains("-wn_");
if (wn1 && wn2) {
return s1.compareTo(s2);
} else if (wn1 || wn2) {
return wn1 ? 1 : -1;
}
final String t1 = s1.substring(s1.lastIndexOf('.') + 1);
final String t2 = s2.substring(s2.lastIndexOf('.') + 1);
if (!t1.equals(t2)) {
for (final String t : new String[] { "v", "n", "a", "adv", "c", "scon", "art",
"intj" }) {
if (t1.endsWith(t)) {
return -1;
} else if (t2.endsWith(t)) {
return 1;
}
}
}
int result = s1.length() - s2.length();
if (result == 0) {
result = s1.compareTo(s2);
}
return result;
}
};
public FramebaseCleanerConverter(final File path, final RDFHandler sink, final Properties properties,
Map<String, URI> wnInfo) {
super(path, properties.getProperty("source"), sink, properties, properties.getProperty("language"), wnInfo);
}
@Override
public void convert() throws IOException, RDFHandlerException {
// Identify schema files (RDF) and synset mapping files (.txt) in the source folder
final List<String> synsetFiles = Lists.newArrayList();
final List<String> schemaFiles = Lists.newArrayList();
for (final File file : Files.fileTreeTraverser().preOrderTraversal(this.path)) {
if (Rio.getParserFormatForFileName(file.getName()) != null) {
schemaFiles.add(file.getAbsolutePath());
} else if (file.getName().endsWith(".txt")) {
synsetFiles.add(file.getAbsolutePath());
}
}
// Read the input once
final RDFSource source = RDFSources.read(true, false, null, null,
schemaFiles.toArray(new String[schemaFiles.size()]));
final RDFProcessor p1 = RDFProcessors.rdfs(source, SESAME.NIL, true, false);
final RDFProcessor p2 = new RDFProcessor() {
@Override
public RDFHandler wrap(final RDFHandler handler) {
return new Handler(handler);
}
};
final RDFProcessor p = RDFProcessors.sequence(p1, p2);
p.apply(RDFSources.NIL, this.defaultSink, 1);
}
private static class Handler extends AbstractRDFHandlerWrapper {
private Map<URI, URIInfo> uriMap;
Handler(final RDFHandler handler) {
super(handler);
}
@Override
public void startRDF() throws RDFHandlerException {
super.startRDF();
this.uriMap = Maps.newHashMap();
}
@Override
public void handleComment(final String comment) throws RDFHandlerException {
// ignore
}
@Override
public void handleNamespace(final String prefix, final String uri)
throws RDFHandlerException {
// ignore
}
@Override
public synchronized void handleStatement(final Statement stmt) throws RDFHandlerException {
final Resource s = stmt.getSubject();
final URI p = stmt.getPredicate();
final Value o = stmt.getObject();
if (p.equals(OWL.EQUIVALENTCLASS) && s instanceof URI && o instanceof URI) {
final URIInfo si = getURIInfo((URI) s);
final URIInfo so = getURIInfo((URI) o);
if (si != so) {
si.merge(so);
for (final URI alias : si.getAliases()) {
this.uriMap.put(alias, si);
}
}
} else if (s instanceof URI) {
final URIInfo si = getURIInfo((URI) s);
si.update((URI) s, p, o);
}
}
@Override
public void endRDF() throws RDFHandlerException {
for (final URI uri : new URI[] { DCTERMS.TYPE, FB.INHERITS_FROM, FB.IS_PERSPECTIVE_OF,
RDFS.LABEL, RDFS.COMMENT }) {
this.handler.handleStatement(VF.createStatement(uri, RDF.TYPE,
OWL.ANNOTATIONPROPERTY));
}
this.handler.handleStatement(VF.createStatement(ONTOLEX.IS_DENOTED_BY, RDF.TYPE,
OWL.OBJECTPROPERTY));
for (final URIInfo info : Ordering.natural().sortedCopy(
ImmutableSet.copyOf(this.uriMap.values()))) {
info.emit(this.handler, this.uriMap);
}
super.endRDF();
}
private URIInfo getURIInfo(final URI uri) {
URIInfo info = this.uriMap.get(uri);
if (info == null) {
info = new URIInfo(uri);
this.uriMap.put(uri, info);
}
return info;
}
}
private static final class URIInfo implements Comparable<URIInfo> {
private URI uri;
private Set<URI> aliases;
private boolean isFrame;
private boolean isMicroframe;
private boolean isFE;
private Set<String> labels;
private Set<String> comments;
private Set<URI> synsets;
private Set<URI> inheritsFrom;
private Set<URI> perspectiveOf;
private Set<URI> parents;
private Set<URI> domains;
private Set<URI> ranges;
public URIInfo(final URI uri) {
this.uri = uri;
this.aliases = ImmutableSet.of(uri);
this.isFrame = false;
this.isMicroframe = false;
this.isFE = false;
this.labels = ImmutableSet.of();
this.comments = ImmutableSet.of();
this.synsets = ImmutableSet.of();
this.inheritsFrom = ImmutableSet.of();
this.perspectiveOf = ImmutableSet.of();
this.parents = ImmutableSet.of();
this.domains = ImmutableSet.of();
this.ranges = ImmutableSet.of();
}
public Set<URI> getAliases() {
return this.aliases;
}
public void merge(final URIInfo info) {
this.uri = MICROFRAME_ORDERING.min(this.uri, info.uri);
this.aliases = setAdd(this.aliases, info.aliases);
this.isFrame |= info.isFrame;
this.isMicroframe |= info.isMicroframe;
this.isFE |= info.isFE;
this.labels = setAdd(this.labels, info.labels);
this.comments = setAdd(this.comments, info.comments);
this.synsets = setAdd(this.synsets, info.synsets);
this.inheritsFrom = setAdd(this.inheritsFrom, info.inheritsFrom);
this.perspectiveOf = setAdd(this.perspectiveOf, info.perspectiveOf);
this.parents = setAdd(this.parents, info.parents);
this.domains = setAdd(this.domains, info.domains);
this.ranges = setAdd(this.ranges, info.ranges);
}
public void update(final URI s, final URI p, final Value o) {
final boolean isSynsetMicroframe = s.stringValue().contains("-wn_");
if (p.equals(RDFS.LABEL)) {
if (!isSynsetMicroframe) {
this.labels = setAdd(this.labels, ((Literal) o).getLabel());
}
} else if (p.equals(RDFS.COMMENT)) {
if (!isSynsetMicroframe) {
this.comments = setAdd(this.comments, ((Literal) o).getLabel());
}
} else if (p.equals(FB.HAS_SYNSET_NUMBER)) {
// Broken in FrameBase release
// final String l = ((Literal) o).getLabel();
// final String str = s.stringValue();
// final int index = str.indexOf(l) + l.length() - 8;
// final char pos = str.charAt(index - 2);
// this.synsets = setAdd(this.synsets,
// VF.createURI("http://wordnet-rdf.princeton.edu/wn30/" + l + "-" + pos));
} else if (p.equals(RDFS.DOMAIN)) {
this.domains = setAdd(this.domains, toURI(o));
} else if (p.equals(RDFS.RANGE)) {
this.ranges = setAdd(this.ranges, toURI(o));
} else if (p.equals(RDFS.SUBCLASSOF) || p.equals(RDFS.SUBPROPERTYOF)) {
this.parents = setAdd(this.parents, toURI(o));
} else if (p.equals(FB.INHERITS_FROM)) {
this.inheritsFrom = setAdd(this.inheritsFrom, toURI(o));
} else if (p.equals(FB.IS_PERSPECTIVE_OF)) {
this.perspectiveOf = setAdd(this.perspectiveOf, toURI(o));
} else if (p.equals(RDF.TYPE)) {
if (o.equals(FB.FRAME)) {
this.isFrame = true;
} else if (o.equals(FB.MICROFRAME)) {
this.isMicroframe = true;
} else if (o.equals(FB.FRAME_ELEMENT_PROPERTY)) {
this.isFE = true;
}
}
}
public void emit(final RDFHandler handler, final Map<URI, URIInfo> uriMap)
throws RDFHandlerException {
if (this.isMicroframe) {
emit(handler, this.uri, RDF.TYPE, OWL.CLASS);
emit(handler, this.uri, DCTERMS.TYPE, FB.MICROFRAME);
emit(handler, this.uri, DCTERMS.TYPE, FB.FRAME);
} else if (this.isFrame) {
emit(handler, this.uri, RDF.TYPE, OWL.CLASS);
emit(handler, this.uri, DCTERMS.TYPE, FB.FRAME);
} else if (this.isFE) {
emit(handler, this.uri, RDF.TYPE, OWL.OBJECTPROPERTY);
emit(handler, this.uri, DCTERMS.TYPE, FB.FRAME_ELEMENT_PROPERTY);
} else {
return;
}
if (!this.labels.isEmpty()) {
final Literal l = VF.createLiteral(
Joiner.on(" / ").join(Ordering.natural().sortedCopy(this.labels)), "en");
emit(handler, this.uri, RDFS.LABEL, l);
}
if (!this.comments.isEmpty()) {
final Literal l = VF.createLiteral(
Joiner.on("\n").join(Ordering.natural().sortedCopy(this.comments)), "en");
emit(handler, this.uri, RDFS.COMMENT, l);
}
for (final URI uri : ORDERING.sortedCopy(this.synsets)) {
emit(handler, this.uri, ONTOLEX.CONCEPT, uri);
}
for (final URI uri : filter(this.inheritsFrom, uriMap, false)) {
emit(handler, this.uri, FB.INHERITS_FROM, uri);
}
for (final URI uri : filter(this.perspectiveOf, uriMap, false)) {
emit(handler, this.uri, FB.IS_PERSPECTIVE_OF, uri);
}
for (final URI uri : filter(Sets.difference(this.parents, this.aliases), uriMap, true)) {
emit(handler, this.uri, this.isFE ? RDFS.SUBPROPERTYOF : RDFS.SUBCLASSOF, uri);
}
for (final URI uri : filter(this.domains, uriMap, true)) {
emit(handler, this.uri, RDFS.DOMAIN, uri);
}
for (final URI uri : filter(this.ranges, uriMap, true)) {
emit(handler, this.uri, RDFS.RANGE, uri);
}
if (this.isMicroframe) {
for (final URI uri : this.aliases) {
URI entry = null;
final String uriStr = uri.stringValue();
final int index = uriStr.lastIndexOf('.');
final String pos = uriStr.substring(index + 1);
if (POS_TAGS.contains(pos)) {
for (final URI parent : uriMap.get(uri).inheritsFrom) {
final String parentStr = parent.stringValue();
if (uriStr.startsWith(parentStr)) {
final String form = uriStr
.substring(parentStr.length() + 1, index);
entry = VF.createURI(PM.NAMESPACE, pos + "-" + form);
}
}
Preconditions.checkArgument(entry != null, uriStr);
emit(handler, this.uri, ONTOLEX.IS_DENOTED_BY, entry);
}
}
}
}
@Override
public int compareTo(final URIInfo other) {
if (this.isFE && other.isFrame) {
return 1;
} else if (this.isFrame && other.isFE) {
return -1;
} else {
return ORDERING.compare(this.uri, other.uri);
}
}
private void emit(final RDFHandler handler, final Resource s, final URI p, final Value o)
throws RDFHandlerException {
handler.handleStatement(Statements.VALUE_FACTORY.createStatement(s, p, o));
}
private static List<URI> filter(final Iterable<URI> uris, final Map<URI, URIInfo> uriMap,
final boolean removeParents) {
final Set<URI> rewrittenURIs = Sets.newHashSet();
for (final URI uri : uris) {
final URIInfo info = uriMap.get(uri);
if (info != null) {
rewrittenURIs.add(info.uri);
}
}
if (removeParents) {
final Set<URI> parents = Sets.newHashSet();
for (final URI uri : rewrittenURIs) {
final URIInfo i = uriMap.get(uri);
for (final URI u : i.parents) {
if (!i.aliases.contains(u)) {
parents.add(u);
}
}
}
rewrittenURIs.removeAll(parents);
}
return ORDERING.sortedCopy(rewrittenURIs);
}
private static <T> Set<T> setAdd(Set<T> set, final T element) {
if (!(set instanceof HashSet)) {
set = Sets.newHashSet();
}
set.add(element);
return set;
}
private static <T> Set<T> setAdd(Set<T> set, final Iterable<T> elements) {
if (Iterables.isEmpty(elements)) {
return set;
}
if (!(set instanceof HashSet)) {
set = Sets.newHashSet(set);
}
Iterables.addAll(set, elements);
return set;
}
private static URI toURI(final Value value) {
if (value instanceof URI) {
return (URI) value;
}
if (value instanceof Literal) {
final String s = ((Literal) value).getLabel();
if (s.startsWith("http://")) {
return VF.createURI(s.trim());
}
}
throw new IllegalArgumentException("Not a valid URI: " + value);
}
}
@Override protected URI getPosURI(String textualPOS) {
return null;
}
}
| |
package com.hcse.app.d6;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.OptionBuilder;
import org.apache.log4j.Logger;
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.util.DefaultPrettyPrinter;
import com.hcse.app.ExitException;
import com.hcse.protocol.BasePacket;
import com.hcse.protocol.d6.message.D6ResponseMessage;
import com.hcse.protocol.util.packet.BaseDoc;
enum DocContentFormat {
array, object
};
public class Client extends ClientBase {
protected final Logger logger = Logger.getLogger(Client.class);
protected ObjectMapper objectMapper = new ObjectMapper();
protected DocContentFormat fieldFormat = DocContentFormat.array;
protected String dir = ".";
protected boolean save = false;
protected boolean pretty = true;
protected int firstLevelLength = 3;
protected String charset = "utf8";
static class MyPrettyPrinter extends DefaultPrettyPrinter {
public MyPrettyPrinter() {
this._arrayIndenter = new Lf2SpacesIndenter();
}
}
public void dumpJsonDoc(OutputStream os, BaseDoc doc) {
try {
OutputStreamWriter writer;
writer = new OutputStreamWriter(os, charset);
JsonGenerator generator = objectMapper.getJsonFactory().createJsonGenerator(writer);
if (pretty) {
MyPrettyPrinter pp = new MyPrettyPrinter();
generator.setPrettyPrinter(pp);
}
//
generator.writeStartArray();
{
generator.writeStartObject();
{
generator.writeStringField("md5Lite", Long.toHexString(doc.getMd5Lite()).toUpperCase());
generator.writeNumberField("weight", doc.getWeight());
switch (fieldFormat) {
case object: {
generator.writeObjectFieldStart("content");
{
List<String> values = doc.getValues();
List<String> names = doc.getNames();
Iterator<String> it = names.iterator();
for (String value : values) {
if (!it.hasNext()) {
break;
}
String name = it.next();
if (!value.isEmpty()) {
generator.writeStringField(name, value);
}
}
}
generator.writeEndObject();
break;
}
case array: {
generator.writeArrayFieldStart("content");
{
List<String> values = doc.getValues();
for (String value : values) {
generator.writeString(value);
}
}
generator.writeEndArray();
break;
}
}
}
generator.writeEndObject();
}
generator.writeEndArray();
//
generator.flush();
writer.flush();
} catch (UnsupportedEncodingException e) {
logger.error("dump response failed.", e);
} catch (IOException e) {
logger.error("dump response failed.", e);
}
}
private OutputStream createOutputStream(int mid, String md5) throws FileNotFoundException {
if (!save) {
return System.out;
}
String preDir = md5.substring(0, firstLevelLength);
String pathName = String.format("%s%c%03d%c%s", dir, File.separatorChar, mid, File.separatorChar, preDir);
File file = new File(pathName);
if (!file.exists()) {
file.mkdirs();
}
return new FileOutputStream(pathName + File.separatorChar + md5 + ".txt");
}
private void closeOutputStream(OutputStream os) {
if (save) {
try {
os.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void getOneDoc(String midAndDoc) {
String[] fields = midAndDoc.split(":");
try {
if (fields.length == 2) {
getOneDoc(Integer.parseInt(fields[0]), fields[1]);
} else {
getOneDoc(defalutMid, fields[0]);
}
} catch (NumberFormatException e) {
logger.error("parse machine id failed.", e);
}
}
private void getOneDoc(int mid, String md5Lite) {
try {
D6ResponseMessage response = request(mid, md5Lite);
if (response == null) {
return;
}
List<BasePacket> docs = response.getDocs();
if (docs.isEmpty()) {
return;
}
OutputStream os = createOutputStream(mid, md5Lite);
dumpJsonDoc(os, docs.get(0).getDocument());
closeOutputStream(os);
} catch (Exception e) {
logger.error("get doc from server failed.", e);
}
}
private void getDoc(String fileName) {
FileInputStream in;
try {
in = new FileInputStream(fileName);
} catch (FileNotFoundException e) {
e.printStackTrace();
return;
}
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String line = null;
try {
while ((line = reader.readLine()) != null) {
if (line.startsWith("#")) {
continue;
}
getOneDoc(line);
}
} catch (IOException e) {
logger.error("get doc from server failed.", e);
}
}
private void getDoc(String[] md5LiteList) {
for (String v : md5LiteList) {
getOneDoc(v);
}
}
@SuppressWarnings("static-access")
protected void init() throws ExitException {
super.init();
options.addOption(OptionBuilder.withLongOpt("file").withDescription("md5 file name.").hasArg()
.withArgName("file").create('f'));
options.addOption(OptionBuilder.withLongOpt("md5Lite").withDescription("md5Lite list split by ','").hasArg()
.withArgName("md5Lite").create('m'));
options.addOption(OptionBuilder.withLongOpt("directory").withDescription("directory to save result").hasArg()
.withArgName("directory").create('d'));
options.addOption(OptionBuilder.withLongOpt("pretty").withDescription("print pretty format. true/false")
.hasArg().withArgName("pretty").create());
options.addOption(OptionBuilder.withLongOpt("array").withDescription("print document field by json array.")
.withArgName("array").create());
options.addOption(OptionBuilder.withLongOpt("object").withDescription("print document field by json array.")
.withArgName("object").create());
options.addOption(OptionBuilder.withLongOpt("charset").withDescription("charset to encoding JSON.")
.withArgName("charset").create());
}
protected void parseArgs(CommandLine cmd) throws ExitException {
super.parseArgs(cmd);
if (cmd.hasOption("directory")) {
save = true;
dir = cmd.getOptionValue("directory");
}
if (cmd.hasOption("pretty")) {
String value = cmd.getOptionValue("pretty");
value = value.toLowerCase();
if (value.equals("yes") || value.equalsIgnoreCase("true") || value.equals("1")) {
pretty = true;
}
}
if (cmd.hasOption("charset")) {
charset = cmd.getOptionValue("charset");
}
if (cmd.hasOption("array")) {
fieldFormat = DocContentFormat.array;
}
if (cmd.hasOption("object")) {
fieldFormat = DocContentFormat.object;
}
}
protected void run(CommandLine cmd) {
if (cmd.hasOption("file")) {
getDoc(cmd.getOptionValue("file"));
return;
}
if (cmd.hasOption("md5Lite")) {
String array[] = cmd.getOptionValue("md5Lite").split(",");
getDoc(array);
return;
}
}
public static void main(String[] args) {
ClientBase client = new Client();
client.entry(args);
}
}
| |
/*
* Copyright (C) 2015 Imran Mammadli
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package algorithms.structures;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashSet;
import java.util.Set;
/**
* This class represents a node of a singly linked list
*
* <ul>
* <li><b>data</b> - the stored value</li>
* <li><b>inEdges</b> - set of the incoming nodes</li>
* <li><b>outEdges</b> - set of the outcoming nodes</li>
* </ul>
*
* @param <E> Comparable data type
*
* @author Imran Mammadli
**/
public class DGraphNode<E extends Comparable<E>> implements Comparable<DGraphNode<E>>, Serializable
{
private E data;
private Set<DGraphNode<E>> inEdges;
private Set<DGraphNode<E>> outEdges;
private static final long serialVersionUID = 1L;
public DGraphNode()
{
inEdges = new HashSet<DGraphNode<E>>();
outEdges = new HashSet<DGraphNode<E>>();
}
public DGraphNode(E object)
{
data = object;
inEdges = new HashSet<DGraphNode<E>>();
outEdges = new HashSet<DGraphNode<E>>();
}
public E getData()
{
return data;
}
public void setData(E data)
{
this.data = data;
}
public Set<DGraphNode<E>> getIncomingNodes()
{
return inEdges;
}
public Set<DGraphNode<E>> getOutcomingNodes()
{
return outEdges;
}
public boolean addIn(DGraphNode<E> node)
{
return inEdges.add(node);
}
public boolean addOut(DGraphNode<E> node)
{
return outEdges.add(node);
}
public boolean addAllIn(Set<DGraphNode<E>> nodes)
{
return inEdges.addAll(nodes);
}
public boolean addAllOut(Set<DGraphNode<E>> nodes)
{
return outEdges.addAll(nodes);
}
public boolean removeIn(DGraphNode<E> node)
{
return inEdges.remove(node);
}
public boolean removeOut(DGraphNode<E> node)
{
return outEdges.remove(node);
}
public boolean removeAllIn(Set<DGraphNode<E>> nodes)
{
return inEdges.removeAll(nodes);
}
public boolean removeAllOut(Set<DGraphNode<E>> nodes)
{
return outEdges.removeAll(nodes);
}
public String deepToString()
{
Set<DGraphNode<E>> visited = new HashSet<DGraphNode<E>>();
Deque<DGraphNode<E>> queue = new ArrayDeque<DGraphNode<E>>();
queue.addFirst(this);
while(!queue.isEmpty())
{
DGraphNode<E> node = queue.removeFirst();
visited.add(node);
for (DGraphNode<E> adjacentNode : node.inEdges)
{
if (!visited.contains(adjacentNode))
{
queue.addFirst(adjacentNode);
}
}
}
StringBuilder result = new StringBuilder();
for (DGraphNode<E> v : visited)
{
result.append(v + " : [ ");
for (DGraphNode<E> edge : v.inEdges)
{
result.append(edge + ", ");
}
result.replace(result.length()-2, result.length(), " ]\n");
}
return result.toString();
}
@Override
public String toString()
{
return this.data != null ? this.data.toString() : "null";
}
/**
* Creates and returns a deep copy of the node
*
* <p>A deep copying of node is a process of duplicating
* not only the node itself, but all relevant structures
* to which the original node is connected. The mechanism
* is implemented by means of the serialization technique.
*
* @return deep copy of the node
*
* @see <a href="http://en.wikipedia.org/wiki/Object_copying#Deep_copy">Deep Copy</a>
**/
@SuppressWarnings("unchecked")
public <T extends Node<?>> T copy()
{
T clone = null;
try(ByteArrayOutputStream binOut = new ByteArrayOutputStream();
ObjectOutputStream objOut = new ObjectOutputStream(binOut))
{
objOut.writeObject(this);
ByteArrayInputStream binIn = new ByteArrayInputStream(binOut.toByteArray());
ObjectInputStream objIn = new ObjectInputStream(binIn);
clone = (T) objIn.readObject();
}
catch(IOException e)
{
e.printStackTrace();
}
catch(ClassNotFoundException e)
{
e.printStackTrace();
}
return clone;
}
@Override
public int compareTo(DGraphNode<E> other)
{
// other node is NULL
if (other == null)
{
return 1;
}
// only one of the nodes contains NULL data
if (this.data == null ^ other.data == null)
{
return this.data == null ? -1 : 1;
}
// both nodes contain NULL data
if (this.data == null && other.data == null)
{
return 0;
}
// none of the nodes contains NULL data
return this.data.compareTo(other.data);
}
}
| |
/**
* This class was created by <Professorvennie>. It's distributed as
* part of the Machinery Craft Mod. Get the Source Code in github:
* https://github.com/Professorvennie/MachineryCraft
*
* Machinery Craft is Open Source and distributed under a
* Creative Commons Attribution-NonCommercial-ShareAlike 3.0 License
* (http://creativecommons.org/licenses/by-nc-sa/3.0/deed.en_GB)
* */
package com.professorvennie.machinerycraft.machines.copper.furnace;
import com.professorvennie.machinerycraft.block.ModBlocks;
import com.professorvennie.machinerycraft.lib.Names;
import com.professorvennie.machinerycraft.machines.TileEntityBasicMachine;
import cpw.mods.fml.common.registry.GameRegistry;
import net.minecraft.block.Block;
import net.minecraft.block.material.Material;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.*;
import net.minecraft.item.crafting.FurnaceRecipes;
import net.minecraft.nbt.NBTTagCompound;
public class TileEntityCopperFurnace extends TileEntityBasicMachine {
public int burnTime, currentItemBurnTime, cookTime;
public TileEntityCopperFurnace() {
super(Names.Containers.CONTAINER_COPPER_FURNACE);
this.slots_top = new int[]{0};
this.slots_bottom = new int[]{2, 1};
this.slots_sides = new int[]{1};
setMachineSpeed(95);
upgradeSlots = new int[]{3, 4, 5};
}
public static int getItemBurnTime(ItemStack itemstack) {
if (itemstack == null) {
return 0;
} else {
Item item = itemstack.getItem();
if (item instanceof ItemBlock && Block.getBlockFromItem(item) != Blocks.air) {
Block block = Block.getBlockFromItem(item);
if (block == Blocks.wooden_slab) {
return 150;
}
if (block.getMaterial() == Material.wood) {
return 300;
}
if (block == Blocks.coal_block) {
return 16000;
}
}
if (item instanceof ItemTool && ((ItemTool) item).getToolMaterialName().equals("WOOD")) return 200;
if (item instanceof ItemSword && ((ItemSword) item).getToolMaterialName().equals("WOOD")) return 200;
if (item instanceof ItemHoe && ((ItemHoe) item).getToolMaterialName().equals("WOOD")) return 200;
if (item == Items.stick) return 100;
if (item == Items.coal) return 1600;
if (item == Items.lava_bucket) return 20000;
if (item == Item.getItemFromBlock(Blocks.sapling)) return 100;
if (item == Items.blaze_rod) return 2400;
return GameRegistry.getFuelValue(itemstack);
}
}
public static boolean isItemFuel(ItemStack itemstack) {
return getItemBurnTime(itemstack) > 0;
}
@Override
public int getSizeInventory() {
return 6;
}
public void readFromNBT(NBTTagCompound nbt) {
super.readFromNBT(nbt);
this.cookTime = (int) nbt.getShort("cookTime");
this.burnTime = (int) nbt.getShort("burnTime");
this.currentItemBurnTime = (int) nbt.getShort("currentItemBurnTime");
}
public void writeToNBT(NBTTagCompound nbt) {
super.writeToNBT(nbt);
nbt.setShort("cookTime", (short) cookTime);
nbt.setShort("burnTime", (short) this.burnTime);
nbt.setShort("currentItemBurnTime", (short) this.currentItemBurnTime);
}
public boolean isBurning() {
return this.burnTime > 0;
}
public void updateEntity() {
super.updateEntity();
boolean flag = this.burnTime > 0;
boolean flag1 = false;
if (this.burnTime > 0) this.burnTime--;
if (!this.worldObj.isRemote) {
if (this.burnTime == 0 && this.canSmelt() && canWork) {
this.currentItemBurnTime = this.burnTime = getItemBurnTime(this.inventory[1]) * 2;
if (this.burnTime > 0) {
flag1 = true;
if (this.inventory[1] != null) {
this.inventory[1].stackSize--;
if (this.inventory[1].stackSize == 0) {
this.inventory[1] = this.inventory[1].getItem().getContainerItem(this.inventory[1]);
}
}
}
}
if (this.isBurning() && this.canSmelt() && canWork) {
this.cookTime++;
if (this.cookTime == getMachineSpeed()) {
this.cookTime = 0;
this.smeltItem();
flag1 = true;
}
} else this.cookTime = 0;
if (flag != this.isBurning()) {
flag1 = true;
BlockCopperFurnace.updateBlockState(this.burnTime > 0, this.worldObj, this.xCoord, this.yCoord, this.zCoord, ModBlocks.copperFurnaceActive, ModBlocks.copperFurnaceIdle);
}
}
if (flag1) this.markDirty();
if (!worldObj.isRemote) {
eject(getEjectorMode(), 2);
}
}
private boolean canSmelt() {
if (this.inventory[0] == null) {
return false;
} else {
ItemStack itemstack = FurnaceRecipes.smelting().getSmeltingResult(this.inventory[0]);
if (itemstack == null) return false;
if (this.inventory[2] == null) return true;
if (!this.inventory[2].isItemEqual(itemstack)) return false;
int result = this.inventory[2].stackSize + itemstack.stackSize;
return (result <= getInventoryStackLimit() && result <= itemstack.getMaxStackSize());
}
}
public void smeltItem() {
if (this.canSmelt()) {
ItemStack itemstack = FurnaceRecipes.smelting().getSmeltingResult(this.inventory[0]);
if (this.inventory[2] == null)
this.inventory[2] = itemstack.copy();
else if (this.inventory[2].isItemEqual(itemstack))
this.inventory[2].stackSize += itemstack.stackSize;
this.inventory[0].stackSize--;
if (this.inventory[0].stackSize <= 0)
this.inventory[0] = null;
}
}
@Override
public boolean isItemValidForSlot(int slot, ItemStack itemStack) {
return slot == 2 ? false : (slot == 1 ? isItemFuel(itemStack) : true);
}
@Override
public boolean canExtractItem(int var1, ItemStack var2, int var3) {
return var3 != 0 || var1 != 1 || var2.getItem() == Items.bucket;
}
public int getBurnTimeReamingScaled(int i) {
if (this.currentItemBurnTime == 0) {
this.currentItemBurnTime = getMachineSpeed();
}
return this.burnTime * i / this.currentItemBurnTime;
}
public int getCookProgressScaled(int scale) {
if (getMachineSpeed() != 0)
return this.cookTime * scale / this.getMachineSpeed();
return 0;
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.codeStyle;
import com.intellij.lang.Language;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.ExtensionException;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.*;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.ClassMap;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
public class CodeStyleSettings extends CommonCodeStyleSettings implements Cloneable, JDOMExternalizable {
private static final Logger LOG = Logger.getInstance("#" + CodeStyleSettings.class.getName());
private final ClassMap<CustomCodeStyleSettings> myCustomSettings = new ClassMap<CustomCodeStyleSettings>();
@NonNls private static final String ADDITIONAL_INDENT_OPTIONS = "ADDITIONAL_INDENT_OPTIONS";
@NonNls private static final String FILETYPE = "fileType";
private CommonCodeStyleSettingsManager myCommonSettingsManager = new CommonCodeStyleSettingsManager(this);
public CodeStyleSettings() {
this(true);
}
public CodeStyleSettings(boolean loadExtensions) {
super(null);
initTypeToName();
initImportsByDefault();
if (loadExtensions) {
final CodeStyleSettingsProvider[] codeStyleSettingsProviders = Extensions.getExtensions(CodeStyleSettingsProvider.EXTENSION_POINT_NAME);
for (final CodeStyleSettingsProvider provider : codeStyleSettingsProviders) {
addCustomSettings(provider.createCustomSettings(this));
}
}
}
private void initImportsByDefault() {
PACKAGES_TO_USE_IMPORT_ON_DEMAND.addEntry(new PackageEntry(false, "java.awt", false));
PACKAGES_TO_USE_IMPORT_ON_DEMAND.addEntry(new PackageEntry(false, "javax.swing", false));
IMPORT_LAYOUT_TABLE.addEntry(PackageEntry.ALL_OTHER_IMPORTS_ENTRY);
IMPORT_LAYOUT_TABLE.addEntry(PackageEntry.BLANK_LINE_ENTRY);
IMPORT_LAYOUT_TABLE.addEntry(new PackageEntry(false, "javax", true));
IMPORT_LAYOUT_TABLE.addEntry(new PackageEntry(false, "java", true));
IMPORT_LAYOUT_TABLE.addEntry(PackageEntry.BLANK_LINE_ENTRY);
IMPORT_LAYOUT_TABLE.addEntry(PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY);
}
private void initTypeToName() {
initGeneralLocalVariable(PARAMETER_TYPE_TO_NAME);
initGeneralLocalVariable(LOCAL_VARIABLE_TYPE_TO_NAME);
PARAMETER_TYPE_TO_NAME.addPair("*Exception", "e");
}
private static void initGeneralLocalVariable(@NonNls TypeToNameMap map) {
map.addPair("int", "i");
map.addPair("byte", "b");
map.addPair("char", "c");
map.addPair("long", "l");
map.addPair("short", "i");
map.addPair("boolean", "b");
map.addPair("double", "v");
map.addPair("float", "v");
map.addPair("java.lang.Object", "o");
map.addPair("java.lang.String", "s");
}
public void setParentSettings(CodeStyleSettings parent) {
myParentSettings = parent;
}
public CodeStyleSettings getParentSettings() {
return myParentSettings;
}
private void addCustomSettings(CustomCodeStyleSettings settings) {
if (settings != null) {
synchronized (myCustomSettings) {
myCustomSettings.put(settings.getClass(), settings);
}
}
}
public <T extends CustomCodeStyleSettings> T getCustomSettings(@NotNull Class<T> aClass) {
synchronized (myCustomSettings) {
return (T)myCustomSettings.get(aClass);
}
}
@Override
public CodeStyleSettings clone() {
CodeStyleSettings clone = new CodeStyleSettings();
clone.copyFrom(this);
return clone;
}
private void copyCustomSettingsFrom(@NotNull CodeStyleSettings from) {
synchronized (myCustomSettings) {
myCustomSettings.clear();
for (final CustomCodeStyleSettings settings : from.getCustomSettingsValues()) {
addCustomSettings((CustomCodeStyleSettings)settings.clone());
}
FIELD_TYPE_TO_NAME.copyFrom(from.FIELD_TYPE_TO_NAME);
STATIC_FIELD_TYPE_TO_NAME.copyFrom(from.STATIC_FIELD_TYPE_TO_NAME);
PARAMETER_TYPE_TO_NAME.copyFrom(from.PARAMETER_TYPE_TO_NAME);
LOCAL_VARIABLE_TYPE_TO_NAME.copyFrom(from.LOCAL_VARIABLE_TYPE_TO_NAME);
PACKAGES_TO_USE_IMPORT_ON_DEMAND.copyFrom(from.PACKAGES_TO_USE_IMPORT_ON_DEMAND);
IMPORT_LAYOUT_TABLE.copyFrom(from.IMPORT_LAYOUT_TABLE);
OTHER_INDENT_OPTIONS.copyFrom(from.OTHER_INDENT_OPTIONS);
myAdditionalIndentOptions.clear();
for (Map.Entry<FileType, IndentOptions> optionEntry : from.myAdditionalIndentOptions.entrySet()) {
IndentOptions options = optionEntry.getValue();
myAdditionalIndentOptions.put(optionEntry.getKey(), (IndentOptions)options.clone());
}
myCommonSettingsManager = from.myCommonSettingsManager.clone(this);
}
}
public void copyFrom(CodeStyleSettings from) {
copyPublicFields(from, this);
copyCustomSettingsFrom(from);
}
public boolean USE_SAME_INDENTS = false;
public boolean IGNORE_SAME_INDENTS_FOR_LANGUAGES = false;
public boolean AUTODETECT_INDENTS = true;
public final IndentOptions OTHER_INDENT_OPTIONS = new IndentOptions();
private final Map<FileType, IndentOptions> myAdditionalIndentOptions = new LinkedHashMap<FileType, IndentOptions>();
private static final String ourSystemLineSeparator = SystemProperties.getLineSeparator();
/**
* Line separator. It can be null if choosen line separator is "System-dependent"!
*/
public String LINE_SEPARATOR;
/**
* @return line separator. If choosen line separator is "System-dependent" method returns default separator for this OS.
*/
public String getLineSeparator() {
return LINE_SEPARATOR != null ? LINE_SEPARATOR : ourSystemLineSeparator;
}
//----------------- NAMING CONVENTIONS --------------------
public String FIELD_NAME_PREFIX = "";
public String STATIC_FIELD_NAME_PREFIX = "";
public String PARAMETER_NAME_PREFIX = "";
public String LOCAL_VARIABLE_NAME_PREFIX = "";
public String FIELD_NAME_SUFFIX = "";
public String STATIC_FIELD_NAME_SUFFIX = "";
public String PARAMETER_NAME_SUFFIX = "";
public String LOCAL_VARIABLE_NAME_SUFFIX = "";
public boolean PREFER_LONGER_NAMES = true;
public final TypeToNameMap FIELD_TYPE_TO_NAME = new TypeToNameMap();
public final TypeToNameMap STATIC_FIELD_TYPE_TO_NAME = new TypeToNameMap();
@NonNls public final TypeToNameMap PARAMETER_TYPE_TO_NAME = new TypeToNameMap();
public final TypeToNameMap LOCAL_VARIABLE_TYPE_TO_NAME = new TypeToNameMap();
//----------------- 'final' modifier settings -------
public boolean GENERATE_FINAL_LOCALS = false;
public boolean GENERATE_FINAL_PARAMETERS = false;
//----------------- visibility -----------------------------
public String VISIBILITY = "public";
//----------------- generate parentheses around method arguments ----------
public boolean PARENTHESES_AROUND_METHOD_ARGUMENTS = true;
//----------------- annotations ----------------
public boolean USE_EXTERNAL_ANNOTATIONS = false;
public boolean INSERT_OVERRIDE_ANNOTATION = true;
//----------------- override -------------------
public boolean REPEAT_SYNCHRONIZED = true;
//----------------- IMPORTS --------------------
public boolean LAYOUT_STATIC_IMPORTS_SEPARATELY = true;
public boolean USE_FQ_CLASS_NAMES = false;
public boolean USE_FQ_CLASS_NAMES_IN_JAVADOC = true;
public boolean USE_SINGLE_CLASS_IMPORTS = true;
public boolean INSERT_INNER_CLASS_IMPORTS = false;
public int CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND = 5;
public int NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND = 3;
public final PackageEntryTable PACKAGES_TO_USE_IMPORT_ON_DEMAND = new PackageEntryTable();
public final PackageEntryTable IMPORT_LAYOUT_TABLE = new PackageEntryTable();
//----------------- ORDER OF MEMBERS ------------------
public int STATIC_FIELDS_ORDER_WEIGHT = 1;
public int FIELDS_ORDER_WEIGHT = 2;
public int CONSTRUCTORS_ORDER_WEIGHT = 3;
public int STATIC_METHODS_ORDER_WEIGHT = 4;
public int METHODS_ORDER_WEIGHT = 5;
public int STATIC_INNER_CLASSES_ORDER_WEIGHT = 6;
public int INNER_CLASSES_ORDER_WEIGHT = 7;
//----------------- WRAPPING ---------------------------
/**
* @deprecated Use get/setRightMargin() methods instead.
*/
@Deprecated
/**
* <b>Do not use this field directly since it doesn't reflect a setting for a specific language which may
* overwrite this one. Call {@link #isWrapOnTyping(Language)} method instead.</b>
*
* @see #WRAP_ON_TYPING
*/
public boolean WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN = false;
// ---------------------------------- Javadoc formatting options -------------------------
public boolean ENABLE_JAVADOC_FORMATTING = true;
/**
* Align parameter comments to longest parameter name
*/
public boolean JD_ALIGN_PARAM_COMMENTS = true;
/**
* Align exception comments to longest exception name
*/
public boolean JD_ALIGN_EXCEPTION_COMMENTS = true;
public boolean JD_ADD_BLANK_AFTER_PARM_COMMENTS = false;
public boolean JD_ADD_BLANK_AFTER_RETURN = false;
public boolean JD_ADD_BLANK_AFTER_DESCRIPTION = true;
public boolean JD_P_AT_EMPTY_LINES = true;
public boolean JD_KEEP_INVALID_TAGS = true;
public boolean JD_KEEP_EMPTY_LINES = true;
public boolean JD_DO_NOT_WRAP_ONE_LINE_COMMENTS = false;
public boolean JD_USE_THROWS_NOT_EXCEPTION = true;
public boolean JD_KEEP_EMPTY_PARAMETER = true;
public boolean JD_KEEP_EMPTY_EXCEPTION = true;
public boolean JD_KEEP_EMPTY_RETURN = true;
public boolean JD_LEADING_ASTERISKS_ARE_ENABLED = true;
public boolean JD_PRESERVE_LINE_FEEDS = false;
public boolean JD_PARAM_DESCRIPTION_ON_NEW_LINE = false;
// ---------------------------------------------------------------------------------------
// ---------------------------------- HTML formatting options -------------------------
public boolean HTML_KEEP_WHITESPACES = false;
public int HTML_ATTRIBUTE_WRAP = WRAP_AS_NEEDED;
public int HTML_TEXT_WRAP = WRAP_AS_NEEDED;
public boolean HTML_KEEP_LINE_BREAKS = true;
public boolean HTML_KEEP_LINE_BREAKS_IN_TEXT = true;
public int HTML_KEEP_BLANK_LINES = 2;
public boolean HTML_ALIGN_ATTRIBUTES = true;
public boolean HTML_ALIGN_TEXT = false;
public boolean HTML_SPACE_AROUND_EQUALITY_IN_ATTRINUTE = false;
public boolean HTML_SPACE_AFTER_TAG_NAME = false;
public boolean HTML_SPACE_INSIDE_EMPTY_TAG = false;
@NonNls public String HTML_ELEMENTS_TO_INSERT_NEW_LINE_BEFORE = "body,div,p,form,h1,h2,h3";
@NonNls public String HTML_ELEMENTS_TO_REMOVE_NEW_LINE_BEFORE = "br";
@NonNls public String HTML_DO_NOT_INDENT_CHILDREN_OF = "html,body,thead,tbody,tfoot";
public int HTML_DO_NOT_ALIGN_CHILDREN_OF_MIN_LINES = 200;
@NonNls public String HTML_KEEP_WHITESPACES_INSIDE = "span,pre";
@NonNls public String HTML_INLINE_ELEMENTS =
"a,abbr,acronym,b,basefont,bdo,big,br,cite,cite,code,dfn,em,font,i,img,input,kbd,label,q,s,samp,select,span,strike,strong,sub,sup,textarea,tt,u,var";
@NonNls public String HTML_DONT_ADD_BREAKS_IF_INLINE_CONTENT = "title,h1,h2,h3,h4,h5,h6,p";
// ---------------------------------------------------------------------------------------
// true if <%page import="x.y.z, x.y.t"%>
// false if <%page import="x.y.z"%>
// <%page import="x.y.t"%>
public boolean JSP_PREFER_COMMA_SEPARATED_IMPORT_LIST = false;
//----------------------------------------------------------------------------------------
// region Formatter control
public boolean FORMATTER_TAGS_ENABLED = false;
public String FORMATTER_ON_TAG = "@formatter:on";
public String FORMATTER_OFF_TAG = "@formatter:off";
public volatile boolean FORMATTER_TAGS_ACCEPT_REGEXP = false;
private volatile Pattern myFormatterOffPattern = null;
private volatile Pattern myFormatterOnPattern = null;
@Nullable
public Pattern getFormatterOffPattern() {
if (myFormatterOffPattern == null && FORMATTER_TAGS_ENABLED && FORMATTER_TAGS_ACCEPT_REGEXP) {
myFormatterOffPattern = getPatternOrDisableRegexp(FORMATTER_OFF_TAG);
}
return myFormatterOffPattern;
}
public void setFormatterOffPattern(@Nullable Pattern formatterOffPattern) {
myFormatterOffPattern = formatterOffPattern;
}
@Nullable
public Pattern getFormatterOnPattern() {
if (myFormatterOffPattern == null && FORMATTER_TAGS_ENABLED && FORMATTER_TAGS_ACCEPT_REGEXP) {
myFormatterOnPattern = getPatternOrDisableRegexp(FORMATTER_ON_TAG);
}
return myFormatterOnPattern;
}
public void setFormatterOnPattern(@Nullable Pattern formatterOnPattern) {
myFormatterOnPattern = formatterOnPattern;
}
@Nullable
private Pattern getPatternOrDisableRegexp(@NotNull String markerText) {
try {
return Pattern.compile(markerText);
}
catch (PatternSyntaxException pse) {
LOG.error("Loaded regexp pattern is invalid: '" + markerText + "', error message: " + pse.getMessage());
FORMATTER_TAGS_ACCEPT_REGEXP = false;
return null;
}
}
// endregion
//----------------------------------------------------------------------------------------
private CodeStyleSettings myParentSettings;
private boolean myLoadedAdditionalIndentOptions;
@NotNull
private Collection<CustomCodeStyleSettings> getCustomSettingsValues() {
synchronized (myCustomSettings) {
return Collections.unmodifiableCollection(myCustomSettings.values());
}
}
@Override
public void readExternal(Element element) throws InvalidDataException {
DefaultJDOMExternalizer.readExternal(this, element);
if (LAYOUT_STATIC_IMPORTS_SEPARATELY) {
// add <all other static imports> entry if there is none
boolean found = false;
for (PackageEntry entry : IMPORT_LAYOUT_TABLE.getEntries()) {
if (entry == PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY) {
found = true;
break;
}
}
if (!found) {
PackageEntry last = IMPORT_LAYOUT_TABLE.getEntryCount() == 0 ? null : IMPORT_LAYOUT_TABLE.getEntryAt(IMPORT_LAYOUT_TABLE.getEntryCount() - 1);
if (last != PackageEntry.BLANK_LINE_ENTRY) {
IMPORT_LAYOUT_TABLE.addEntry(PackageEntry.BLANK_LINE_ENTRY);
}
IMPORT_LAYOUT_TABLE.addEntry(PackageEntry.ALL_OTHER_STATIC_IMPORTS_ENTRY);
}
}
for (final CustomCodeStyleSettings settings : getCustomSettingsValues()) {
settings.readExternal(element);
settings.importLegacySettings();
}
final List<Element> list = element.getChildren(ADDITIONAL_INDENT_OPTIONS);
for (Element o : list) {
final String fileTypeId = o.getAttributeValue(FILETYPE);
if (fileTypeId != null && !fileTypeId.isEmpty()) {
FileType target = FileTypeManager.getInstance().getFileTypeByExtension(fileTypeId);
if (target == UnknownFileType.INSTANCE || target == PlainTextFileType.INSTANCE || target.getDefaultExtension().isEmpty()) {
target = new TempFileType(fileTypeId);
}
final IndentOptions options = getDefaultIndentOptions(target);
options.readExternal(o);
registerAdditionalIndentOptions(target, options);
}
}
myCommonSettingsManager.readExternal(element);
if (USE_SAME_INDENTS) IGNORE_SAME_INDENTS_FOR_LANGUAGES = true;
}
@Override
public void writeExternal(Element element) throws WriteExternalException {
final CodeStyleSettings parentSettings = new CodeStyleSettings();
DefaultJDOMExternalizer.writeExternal(this, element, new DifferenceFilter<CodeStyleSettings>(this, parentSettings));
List<CustomCodeStyleSettings> customSettings = new ArrayList<CustomCodeStyleSettings>(getCustomSettingsValues());
Collections.sort(customSettings, new Comparator<CustomCodeStyleSettings>() {
@Override
public int compare(final CustomCodeStyleSettings o1, final CustomCodeStyleSettings o2) {
return o1.getTagName().compareTo(o2.getTagName());
}
});
for (final CustomCodeStyleSettings settings : customSettings) {
final CustomCodeStyleSettings parentCustomSettings = parentSettings.getCustomSettings(settings.getClass());
if (parentCustomSettings == null) {
throw new WriteExternalException("Custom settings are null for " + settings.getClass());
}
settings.writeExternal(element, parentCustomSettings);
}
final FileType[] fileTypes = myAdditionalIndentOptions.keySet().toArray(new FileType[myAdditionalIndentOptions.keySet().size()]);
Arrays.sort(fileTypes, new Comparator<FileType>() {
@Override
public int compare(final FileType o1, final FileType o2) {
return o1.getDefaultExtension().compareTo(o2.getDefaultExtension());
}
});
for (FileType fileType : fileTypes) {
final IndentOptions indentOptions = myAdditionalIndentOptions.get(fileType);
Element additionalIndentOptions = new Element(ADDITIONAL_INDENT_OPTIONS);
indentOptions.serialize(additionalIndentOptions, getDefaultIndentOptions(fileType));
additionalIndentOptions.setAttribute(FILETYPE, fileType.getDefaultExtension());
if (!additionalIndentOptions.getChildren().isEmpty()) {
element.addContent(additionalIndentOptions);
}
}
myCommonSettingsManager.writeExternal(element);
}
private static IndentOptions getDefaultIndentOptions(FileType fileType) {
final FileTypeIndentOptionsProvider[] providers = Extensions.getExtensions(FileTypeIndentOptionsProvider.EP_NAME);
for (final FileTypeIndentOptionsProvider provider : providers) {
if (provider.getFileType().equals(fileType)) {
return getFileTypeIndentOptions(provider);
}
}
return new IndentOptions();
}
@Override
@Nullable
public IndentOptions getIndentOptions() {
return OTHER_INDENT_OPTIONS;
}
/**
* If the file type has an associated language and language indent options are defined, returns these options. Otherwise attempts to find
* indent options from <code>FileTypeIndentOptionsProvider</code>. If none are found, other indent options are returned.
*
* @param fileType The file type to search indent options for.
* @return File type indent options or <code>OTHER_INDENT_OPTIONS</code>.
* @see FileTypeIndentOptionsProvider
* @see LanguageCodeStyleSettingsProvider
*/
public IndentOptions getIndentOptions(@Nullable FileType fileType) {
IndentOptions indentOptions = getLanguageIndentOptions(fileType);
if (indentOptions != null) return indentOptions;
if (USE_SAME_INDENTS || fileType == null) return OTHER_INDENT_OPTIONS;
if (!myLoadedAdditionalIndentOptions) {
loadAdditionalIndentOptions();
}
indentOptions = myAdditionalIndentOptions.get(fileType);
if (indentOptions != null) return indentOptions;
return OTHER_INDENT_OPTIONS;
}
@NotNull
public IndentOptions getIndentOptionsByFile(@Nullable PsiFile file) {
return getIndentOptionsByFile(file, null);
}
@NotNull
public IndentOptions getIndentOptionsByFile(@Nullable PsiFile file, @Nullable TextRange formatRange) {
return getIndentOptionsByFile(file, formatRange, false);
}
/**
* Retrieves indent options for PSI file from an associated document or (if not defined in the document) from file indent options
* providers.
*
* @param file The PSI file to retrieve options for.
* @param formatRange The text range within the file for formatting purposes or null if there is either no specific range or multiple
* ranges. If the range covers the entire file (full reformat), options stored in the document are ignored and
* indent options are taken from file indent options providers.
* @param ignoreDocOptions Ignore options stored in the document and use file indent options providers even if there is no text range
* or the text range doesn't cover the entire file.
* @return Indent options from the associated document or file indent options providers.
* @see com.intellij.psi.codeStyle.FileIndentOptionsProvider
*/
@NotNull
public IndentOptions getIndentOptionsByFile(@Nullable PsiFile file, @Nullable TextRange formatRange, boolean ignoreDocOptions) {
if (file != null && file.isValid()) {
boolean isFullReformat = isFileFullyCoveredByRange(file, formatRange);
if (!ignoreDocOptions && !isFullReformat) {
IndentOptions docOptions = IndentOptions.retrieveFromAssociatedDocument(file);
if (docOptions != null) return docOptions;
}
FileIndentOptionsProvider[] providers = FileIndentOptionsProvider.EP_NAME.getExtensions();
for (FileIndentOptionsProvider provider : providers) {
if (!isFullReformat || provider.useOnFullReformat()) {
IndentOptions indentOptions = provider.getIndentOptions(this, file);
if (indentOptions != null) {
logIndentOptions(file, provider, indentOptions);
return indentOptions;
}
}
}
return getIndentOptions(file.getFileType());
}
else {
return OTHER_INDENT_OPTIONS;
}
}
private static boolean isFileFullyCoveredByRange(@NotNull PsiFile file, @Nullable TextRange formatRange) {
return formatRange != null && file.getTextRange().equals(formatRange);
}
private static void logIndentOptions(@NotNull PsiFile file, @NotNull FileIndentOptionsProvider provider, @NotNull IndentOptions options) {
LOG.debug("Indent options returned by " + provider.getClass().getName() +
" for " + file.getName() +
": indent size=" + options.INDENT_SIZE +
", use tabs=" + options.USE_TAB_CHARACTER +
", tab size=" + options.TAB_SIZE);
}
@Nullable
private IndentOptions getLanguageIndentOptions(@Nullable FileType fileType) {
if (fileType == null || !(fileType instanceof LanguageFileType)) return null;
Language lang = ((LanguageFileType)fileType).getLanguage();
CommonCodeStyleSettings langSettings = getCommonSettings(lang);
return langSettings == this ? null : langSettings.getIndentOptions();
}
public boolean isSmartTabs(FileType fileType) {
return getIndentOptions(fileType).SMART_TABS;
}
public int getIndentSize(FileType fileType) {
return getIndentOptions(fileType).INDENT_SIZE;
}
public int getContinuationIndentSize(FileType fileType) {
return getIndentOptions(fileType).CONTINUATION_INDENT_SIZE;
}
public int getLabelIndentSize(FileType fileType) {
return getIndentOptions(fileType).LABEL_INDENT_SIZE;
}
public boolean getLabelIndentAbsolute(FileType fileType) {
return getIndentOptions(fileType).LABEL_INDENT_ABSOLUTE;
}
public int getTabSize(FileType fileType) {
return getIndentOptions(fileType).TAB_SIZE;
}
public boolean useTabCharacter(FileType fileType) {
return getIndentOptions(fileType).USE_TAB_CHARACTER;
}
public static class TypeToNameMap implements JDOMExternalizable {
private final List<String> myPatterns = new ArrayList<String>();
private final List<String> myNames = new ArrayList<String>();
public void addPair(String pattern, String name) {
myPatterns.add(pattern);
myNames.add(name);
}
public String nameByType(String type) {
for (int i = 0; i < myPatterns.size(); i++) {
String pattern = myPatterns.get(i);
if (StringUtil.startsWithChar(pattern, '*')) {
if (type.endsWith(pattern.substring(1))) {
return myNames.get(i);
}
}
else {
if (type.equals(pattern)) {
return myNames.get(i);
}
}
}
return null;
}
@Override
public void readExternal(@NonNls Element element) throws InvalidDataException {
myPatterns.clear();
myNames.clear();
for (final Object o : element.getChildren("pair")) {
@NonNls Element e = (Element)o;
String pattern = e.getAttributeValue("type");
String name = e.getAttributeValue("name");
if (pattern == null || name == null) {
throw new InvalidDataException();
}
myPatterns.add(pattern);
myNames.add(name);
}
}
@Override
public void writeExternal(Element parentNode) throws WriteExternalException {
for (int i = 0; i < myPatterns.size(); i++) {
String pattern = myPatterns.get(i);
String name = myNames.get(i);
@NonNls Element element = new Element("pair");
parentNode.addContent(element);
element.setAttribute("type", pattern);
element.setAttribute("name", name);
}
}
public void copyFrom(TypeToNameMap from) {
assert from != this;
myPatterns.clear();
myPatterns.addAll(from.myPatterns);
myNames.clear();
myNames.addAll(from.myNames);
}
@Override
public boolean equals(Object other) {
if (other instanceof TypeToNameMap) {
TypeToNameMap otherMap = (TypeToNameMap)other;
return myPatterns.equals(otherMap.myPatterns) && myNames.equals(otherMap.myNames);
}
return false;
}
@Override
public int hashCode() {
int code = 0;
for (String myPattern : myPatterns) {
code += myPattern.hashCode();
}
for (String myName : myNames) {
code += myName.hashCode();
}
return code;
}
}
private void registerAdditionalIndentOptions(FileType fileType, IndentOptions options) {
boolean exist = false;
for (final FileType existing : myAdditionalIndentOptions.keySet()) {
if (Comparing.strEqual(existing.getDefaultExtension(), fileType.getDefaultExtension())) {
exist = true;
break;
}
}
if (!exist) {
myAdditionalIndentOptions.put(fileType, options);
}
}
public void unregisterAdditionalIndentOptions(FileType fileType) {
myAdditionalIndentOptions.remove(fileType);
}
public IndentOptions getAdditionalIndentOptions(FileType fileType) {
if (!myLoadedAdditionalIndentOptions) {
loadAdditionalIndentOptions();
}
return myAdditionalIndentOptions.get(fileType);
}
private void loadAdditionalIndentOptions() {
synchronized (myAdditionalIndentOptions) {
myLoadedAdditionalIndentOptions = true;
final FileTypeIndentOptionsProvider[] providers = Extensions.getExtensions(FileTypeIndentOptionsProvider.EP_NAME);
for (final FileTypeIndentOptionsProvider provider : providers) {
if (!myAdditionalIndentOptions.containsKey(provider.getFileType())) {
registerAdditionalIndentOptions(provider.getFileType(), getFileTypeIndentOptions(provider));
}
}
}
}
private static IndentOptions getFileTypeIndentOptions(FileTypeIndentOptionsProvider provider) {
try {
return provider.createIndentOptions();
}
catch (AbstractMethodError error) {
LOG.error("Plugin uses obsolete API.", new ExtensionException(provider.getClass()));
return new IndentOptions();
}
}
@TestOnly
public void clearCodeStyleSettings() {
CodeStyleSettings cleanSettings = new CodeStyleSettings();
copyFrom(cleanSettings);
myAdditionalIndentOptions.clear(); //hack
myLoadedAdditionalIndentOptions = false;
}
private static class TempFileType implements FileType {
private final String myExtension;
private TempFileType(@NotNull final String extension) {
myExtension = extension;
}
@Override
@NotNull
public String getName() {
return "TempFileType";
}
@Override
@NotNull
public String getDescription() {
return "TempFileType";
}
@Override
@NotNull
public String getDefaultExtension() {
return myExtension;
}
@Override
public Icon getIcon() {
return null;
}
@Override
public boolean isBinary() {
return false;
}
@Override
public boolean isReadOnly() {
return false;
}
@Override
public String getCharset(@NotNull VirtualFile file, @NotNull byte[] content) {
return null;
}
}
public CommonCodeStyleSettings getCommonSettings(Language lang) {
return myCommonSettingsManager.getCommonSettings(lang);
}
/**
* @param langName The language name.
* @return Language-specific code style settings or shared settings if not found.
* @see CommonCodeStyleSettingsManager#getCommonSettings
*/
public CommonCodeStyleSettings getCommonSettings(String langName) {
return myCommonSettingsManager.getCommonSettings(langName);
}
/**
* Retrieves right margin for the given language. The language may overwrite default RIGHT_MARGIN value with its own RIGHT_MARGIN
* in language's CommonCodeStyleSettings instance.
*
* @param language The language to get right margin for or null if root (default) right margin is requested.
* @return The right margin for the language if it is defined (not null) and its settings contain non-negative margin. Root (default)
* margin otherwise (CodeStyleSettings.RIGHT_MARGIN).
*/
public int getRightMargin(@Nullable Language language) {
if (language != null) {
CommonCodeStyleSettings langSettings = getCommonSettings(language);
if (langSettings != null) {
if (langSettings.RIGHT_MARGIN >= 0) return langSettings.RIGHT_MARGIN;
}
}
return getDefaultRightMargin();
}
/**
* Assigns another right margin for the language or (if it is null) to root (default) margin.
*
* @param language The language to assign the right margin to or null if root (default) right margin is to be changed.
* @param rightMargin New right margin.
*/
public void setRightMargin(@Nullable Language language, int rightMargin) {
if (language != null) {
CommonCodeStyleSettings langSettings = getCommonSettings(language);
if (langSettings != null) {
langSettings.RIGHT_MARGIN = rightMargin;
return;
}
}
setDefaultRightMargin(rightMargin);
}
@SuppressWarnings("deprecation")
public int getDefaultRightMargin() {
return RIGHT_MARGIN;
}
@SuppressWarnings("deprecation")
public void setDefaultRightMargin(int rightMargin) {
RIGHT_MARGIN = rightMargin;
}
/**
* Defines whether or not wrapping should occur when typing reaches right margin.
* @param language The language to check the option for or null for a global option.
* @return True if wrapping on right margin is enabled.
*/
public boolean isWrapOnTyping(@Nullable Language language) {
if (language != null) {
CommonCodeStyleSettings langSettings = getCommonSettings(language);
if (langSettings != null) {
if (langSettings.WRAP_ON_TYPING != WrapOnTyping.DEFAULT.intValue) {
return langSettings.WRAP_ON_TYPING == WrapOnTyping.WRAP.intValue;
}
}
}
//noinspection deprecation
return WRAP_WHEN_TYPING_REACHES_RIGHT_MARGIN;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.http.client;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLConnection;
/**
*
*/
public class HttpDownloadHelper {
private boolean useTimestamp = false;
private boolean skipExisting = false;
public boolean download(URL source, File dest, @Nullable DownloadProgress progress, TimeValue timeout) throws Exception {
if (dest.exists() && skipExisting) {
return true;
}
//don't do any progress, unless asked
if (progress == null) {
progress = new NullProgress();
}
//set the timestamp to the file date.
long timestamp = 0;
boolean hasTimestamp = false;
if (useTimestamp && dest.exists()) {
timestamp = dest.lastModified();
hasTimestamp = true;
}
GetThread getThread = new GetThread(source, dest, hasTimestamp, timestamp, progress);
try {
getThread.setDaemon(true);
getThread.start();
getThread.join(timeout.millis());
if (getThread.isAlive()) {
throw new ElasticsearchTimeoutException("The GET operation took longer than " + timeout + ", stopping it.");
}
}
catch (InterruptedException ie) {
return false;
} finally {
getThread.closeStreams();
}
return getThread.wasSuccessful();
}
/**
* Interface implemented for reporting
* progress of downloading.
*/
public interface DownloadProgress {
/**
* begin a download
*/
void beginDownload();
/**
* tick handler
*/
void onTick();
/**
* end a download
*/
void endDownload();
}
/**
* do nothing with progress info
*/
public static class NullProgress implements DownloadProgress {
/**
* begin a download
*/
public void beginDownload() {
}
/**
* tick handler
*/
public void onTick() {
}
/**
* end a download
*/
public void endDownload() {
}
}
/**
* verbose progress system prints to some output stream
*/
public static class VerboseProgress implements DownloadProgress {
private int dots = 0;
// CheckStyle:VisibilityModifier OFF - bc
PrintStream out;
// CheckStyle:VisibilityModifier ON
/**
* Construct a verbose progress reporter.
*
* @param out the output stream.
*/
public VerboseProgress(PrintStream out) {
this.out = out;
}
/**
* begin a download
*/
public void beginDownload() {
out.print("Downloading ");
dots = 0;
}
/**
* tick handler
*/
public void onTick() {
out.print(".");
if (dots++ > 50) {
out.flush();
dots = 0;
}
}
/**
* end a download
*/
public void endDownload() {
out.println("DONE");
out.flush();
}
}
private class GetThread extends Thread {
private final URL source;
private final File dest;
private final boolean hasTimestamp;
private final long timestamp;
private final DownloadProgress progress;
private boolean success = false;
private IOException ioexception = null;
private InputStream is = null;
private OutputStream os = null;
private URLConnection connection;
private int redirections = 0;
GetThread(URL source, File dest, boolean h, long t, DownloadProgress p) {
this.source = source;
this.dest = dest;
hasTimestamp = h;
timestamp = t;
progress = p;
}
public void run() {
try {
success = get();
} catch (IOException ioex) {
ioexception = ioex;
}
}
private boolean get() throws IOException {
connection = openConnection(source);
if (connection == null) {
return false;
}
boolean downloadSucceeded = downloadFile();
//if (and only if) the use file time option is set, then
//the saved file now has its timestamp set to that of the
//downloaded file
if (downloadSucceeded && useTimestamp) {
updateTimeStamp();
}
return downloadSucceeded;
}
private boolean redirectionAllowed(URL aSource, URL aDest) throws IOException {
// Argh, github does this...
// if (!(aSource.getProtocol().equals(aDest.getProtocol()) || ("http"
// .equals(aSource.getProtocol()) && "https".equals(aDest
// .getProtocol())))) {
// String message = "Redirection detected from "
// + aSource.getProtocol() + " to " + aDest.getProtocol()
// + ". Protocol switch unsafe, not allowed.";
// throw new IOException(message);
// }
redirections++;
if (redirections > 5) {
String message = "More than " + 5 + " times redirected, giving up";
throw new IOException(message);
}
return true;
}
private URLConnection openConnection(URL aSource) throws IOException {
// set up the URL connection
URLConnection connection = aSource.openConnection();
// modify the headers
// NB: things like user authentication could go in here too.
if (hasTimestamp) {
connection.setIfModifiedSince(timestamp);
}
if (connection instanceof HttpURLConnection) {
((HttpURLConnection) connection).setInstanceFollowRedirects(false);
((HttpURLConnection) connection).setUseCaches(true);
((HttpURLConnection) connection).setConnectTimeout(5000);
}
// connect to the remote site (may take some time)
connection.connect();
// First check on a 301 / 302 (moved) response (HTTP only)
if (connection instanceof HttpURLConnection) {
HttpURLConnection httpConnection = (HttpURLConnection) connection;
int responseCode = httpConnection.getResponseCode();
if (responseCode == HttpURLConnection.HTTP_MOVED_PERM ||
responseCode == HttpURLConnection.HTTP_MOVED_TEMP ||
responseCode == HttpURLConnection.HTTP_SEE_OTHER) {
String newLocation = httpConnection.getHeaderField("Location");
String message = aSource
+ (responseCode == HttpURLConnection.HTTP_MOVED_PERM ? " permanently"
: "") + " moved to " + newLocation;
URL newURL = new URL(newLocation);
if (!redirectionAllowed(aSource, newURL)) {
return null;
}
return openConnection(newURL);
}
// next test for a 304 result (HTTP only)
long lastModified = httpConnection.getLastModified();
if (responseCode == HttpURLConnection.HTTP_NOT_MODIFIED
|| (lastModified != 0 && hasTimestamp && timestamp >= lastModified)) {
// not modified so no file download. just return
// instead and trace out something so the user
// doesn't think that the download happened when it
// didn't
return null;
}
// test for 401 result (HTTP only)
if (responseCode == HttpURLConnection.HTTP_UNAUTHORIZED) {
String message = "HTTP Authorization failure";
throw new IOException(message);
}
}
//REVISIT: at this point even non HTTP connections may
//support the if-modified-since behaviour -we just check
//the date of the content and skip the write if it is not
//newer. Some protocols (FTP) don't include dates, of
//course.
return connection;
}
private boolean downloadFile() throws FileNotFoundException, IOException {
IOException lastEx = null;
for (int i = 0; i < 3; i++) {
// this three attempt trick is to get round quirks in different
// Java implementations. Some of them take a few goes to bind
// property; we ignore the first couple of such failures.
try {
is = connection.getInputStream();
break;
} catch (IOException ex) {
lastEx = ex;
}
}
if (is == null) {
throw new IOException("Can't get " + source + " to " + dest, lastEx);
}
os = new FileOutputStream(dest);
progress.beginDownload();
boolean finished = false;
try {
byte[] buffer = new byte[1024 * 100];
int length;
while (!isInterrupted() && (length = is.read(buffer)) >= 0) {
os.write(buffer, 0, length);
progress.onTick();
}
finished = !isInterrupted();
} finally {
if (!finished) {
// we have started to (over)write dest, but failed.
// Try to delete the garbage we'd otherwise leave
// behind.
IOUtils.closeWhileHandlingException(os, is);
dest.delete();
} else {
IOUtils.close(os, is);
}
}
progress.endDownload();
return true;
}
private void updateTimeStamp() {
long remoteTimestamp = connection.getLastModified();
if (remoteTimestamp != 0) {
dest.setLastModified(remoteTimestamp);
}
}
/**
* Has the download completed successfully?
* <p/>
* <p>Re-throws any exception caught during executaion.</p>
*/
boolean wasSuccessful() throws IOException {
if (ioexception != null) {
throw ioexception;
}
return success;
}
/**
* Closes streams, interrupts the download, may delete the
* output file.
*/
void closeStreams() throws IOException {
interrupt();
if (success) {
IOUtils.close(is, os);
} else {
IOUtils.closeWhileHandlingException(is, os);
if (dest != null && dest.exists()) {
dest.delete();
}
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.basic;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.Set;
import java.util.TreeSet;
import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH;
import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH;
import static org.elasticsearch.client.Requests.createIndexRequest;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;
public class TransportTwoNodesSearchIT extends ESIntegTestCase {
@Override
protected int numberOfReplicas() {
return 0;
}
private Set<String> prepareData() throws Exception {
return prepareData(-1);
}
private Set<String> prepareData(int numShards) throws Exception {
Set<String> fullExpectedIds = new TreeSet<>();
Settings.Builder settingsBuilder = Settings.builder()
.put(indexSettings());
if (numShards > 0) {
settingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numShards);
}
client().admin().indices().create(createIndexRequest("test")
.settings(settingsBuilder)
.mapping("type", "foo", "type=geo_point"))
.actionGet();
ensureGreen();
for (int i = 0; i < 100; i++) {
index(Integer.toString(i), "test", i);
fullExpectedIds.add(Integer.toString(i));
}
refresh();
return fullExpectedIds;
}
private void index(String id, String nameValue, int age) throws IOException {
client().index(Requests.indexRequest("test").type("type").id(id).source(source(id, nameValue, age))).actionGet();
}
private XContentBuilder source(String id, String nameValue, int age) throws IOException {
StringBuilder multi = new StringBuilder().append(nameValue);
for (int i = 0; i < age; i++) {
multi.append(" ").append(nameValue);
}
return jsonBuilder().startObject()
.field("id", id)
.field("nid", Integer.parseInt(id))
.field("name", nameValue + id)
.field("age", age)
.field("multi", multi.toString())
.endObject();
}
public void testDfsQueryThenFetch() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put(indexSettings());
client().admin().indices().create(createIndexRequest("test")
.settings(settingsBuilder))
.actionGet();
ensureGreen();
// we need to have age (ie number of repeats of "test" term) high enough
// to produce the same 8-bit norm for all docs here, so that
// the tf is basically the entire score (assuming idf is fixed, which
// it should be if dfs is working correctly)
// With the current way of encoding norms, every length between 1048 and 1176
// are encoded into the same byte
for (int i = 1048; i < 1148; i++) {
index(Integer.toString(i - 1048), "test", i);
}
refresh();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat(hit.getExplanation().getDetails().length, equalTo(1));
assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(3));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails().length, equalTo(2));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDescription(),
startsWith("n,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getValue(),
equalTo(100L));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getDescription(),
startsWith("N,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getValue(),
equalTo(100L));
assertThat("id[" + hit.getId() + "] -> " + hit.getExplanation().toString(), hit.getId(), equalTo(Integer.toString(100 - total - i - 1)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testDfsQueryThenFetchWithSort() throws Exception {
prepareData();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("age", SortOrder.ASC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat(hit.getExplanation().getDetails().length, equalTo(1));
assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(3));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails().length, equalTo(2));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDescription(),
startsWith("n,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getValue(),
equalTo(100L));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getDescription(),
startsWith("N,"));
assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getValue(),
equalTo(100L));
assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testQueryThenFetch() throws Exception {
prepareData();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("nid", SortOrder.DESC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(100 - total - i - 1)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testQueryThenFetchWithFrom() throws Exception {
Set<String> fullExpectedIds = prepareData();
SearchSourceBuilder source = searchSource()
.query(matchAllQuery())
.explain(true);
Set<String> collectedIds = new TreeSet<>();
SearchResponse searchResponse = client().search(searchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)).actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().getHits().length, equalTo(60));
for (int i = 0; i < 60; i++) {
SearchHit hit = searchResponse.getHits().getHits()[i];
collectedIds.add(hit.getId());
}
searchResponse = client().search(searchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)).actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
assertThat(searchResponse.getHits().getHits().length, equalTo(40));
for (int i = 0; i < 40; i++) {
SearchHit hit = searchResponse.getHits().getHits()[i];
collectedIds.add(hit.getId());
}
assertThat(collectedIds, equalTo(fullExpectedIds));
}
public void testQueryThenFetchWithSort() throws Exception {
prepareData();
int total = 0;
SearchResponse searchResponse = client().prepareSearch("test").setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).addSort("age", SortOrder.ASC).setScroll(TimeValue.timeValueSeconds(30)).get();
while (true) {
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
SearchHit[] hits = searchResponse.getHits().getHits();
if (hits.length == 0) {
break; // finished
}
for (int i = 0; i < hits.length; ++i) {
SearchHit hit = hits[i];
assertThat(hit.getExplanation(), notNullValue());
assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i)));
}
total += hits.length;
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
}
clearScroll(searchResponse.getScrollId());
assertEquals(100, total);
}
public void testSimpleFacets() throws Exception {
prepareData();
SearchSourceBuilder sourceBuilder = searchSource()
.query(termQuery("multi", "test"))
.from(0).size(20).explain(true)
.aggregation(AggregationBuilders.global("global").subAggregation(
AggregationBuilders.filter("all", termQuery("multi", "test"))))
.aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1")));
SearchResponse searchResponse = client().search(searchRequest("test").source(sourceBuilder)).actionGet();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
Global global = searchResponse.getAggregations().get("global");
Filter all = global.getAggregations().get("all");
Filter test1 = searchResponse.getAggregations().get("test1");
assertThat(test1.getDocCount(), equalTo(1L));
assertThat(all.getDocCount(), equalTo(100L));
}
public void testFailedSearchWithWrongQuery() throws Exception {
prepareData();
NumShards test = getNumShards("test");
logger.info("Start Testing failed search with wrong query");
try {
SearchResponse searchResponse = client().search(
searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))).actionGet();
assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries));
assertThat(searchResponse.getSuccessfulShards(), equalTo(0));
assertThat(searchResponse.getFailedShards(), equalTo(test.numPrimaries));
fail("search should fail");
} catch (ElasticsearchException e) {
assertThat(e.unwrapCause(), instanceOf(SearchPhaseExecutionException.class));
// all is well
}
logger.info("Done Testing failed search");
}
public void testFailedSearchWithWrongFrom() throws Exception {
prepareData();
NumShards test = getNumShards("test");
logger.info("Start Testing failed search with wrong from");
SearchSourceBuilder source = searchSource()
.query(termQuery("multi", "test"))
.from(1000).size(20).explain(true);
SearchResponse response = client().search(searchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet();
assertThat(response.getHits().getHits().length, equalTo(0));
assertThat(response.getTotalShards(), equalTo(test.numPrimaries));
assertThat(response.getSuccessfulShards(), equalTo(test.numPrimaries));
assertThat(response.getFailedShards(), equalTo(0));
response = client().search(searchRequest("test").searchType(QUERY_THEN_FETCH).source(source)).actionGet();
assertNoFailures(response);
assertThat(response.getHits().getHits().length, equalTo(0));
response = client().search(searchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet();
assertNoFailures(response);
assertThat(response.getHits().getHits().length, equalTo(0));
response = client().search(searchRequest("test").searchType(DFS_QUERY_THEN_FETCH).source(source)).actionGet();
assertNoFailures(response);
assertThat(response.getHits().getHits().length, equalTo(0));
logger.info("Done Testing failed search");
}
public void testFailedMultiSearchWithWrongQuery() throws Exception {
prepareData();
logger.info("Start Testing failed multi search with a wrong query");
MultiSearchResponse response = client().prepareMultiSearch()
.add(client().prepareSearch("test").setQuery(new MatchQueryBuilder("foo", "biz")))
.add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2)))
.add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()))
.execute().actionGet();
assertThat(response.getResponses().length, equalTo(3));
assertThat(response.getResponses()[0].getFailureMessage(), notNullValue());
assertThat(response.getResponses()[1].getFailureMessage(), nullValue());
assertThat(response.getResponses()[1].getResponse().getHits().getHits().length, equalTo(1));
assertThat(response.getResponses()[2].getFailureMessage(), nullValue());
assertThat(response.getResponses()[2].getResponse().getHits().getHits().length, equalTo(10));
logger.info("Done Testing failed search");
}
public void testFailedMultiSearchWithWrongQueryWithFunctionScore() throws Exception {
prepareData();
logger.info("Start Testing failed multi search with a wrong query");
MultiSearchResponse response = client().prepareMultiSearch()
// Add custom score query with bogus script
.add(client().prepareSearch("test").setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("nid", 1), new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, "bar", "foo", Collections.emptyMap())))))
.add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2)))
.add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()))
.execute().actionGet();
assertThat(response.getResponses().length, equalTo(3));
assertThat(response.getResponses()[0].getFailureMessage(), notNullValue());
assertThat(response.getResponses()[1].getFailureMessage(), nullValue());
assertThat(response.getResponses()[1].getResponse().getHits().getHits().length, equalTo(1));
assertThat(response.getResponses()[2].getFailureMessage(), nullValue());
assertThat(response.getResponses()[2].getResponse().getHits().getHits().length, equalTo(10));
logger.info("Done Testing failed search");
}
}
| |
package wordcloud;
import ch.lambdaj.Lambda;
import org.apache.log4j.Logger;
import wordcloud.bg.Background;
import wordcloud.bg.RectangleBackground;
import wordcloud.collide.RectanglePixelCollidable;
import wordcloud.collide.checkers.CollisionChecker;
import wordcloud.collide.checkers.RectangleCollisionChecker;
import wordcloud.collide.checkers.RectanglePixelCollisionChecker;
import wordcloud.font.CloudFont;
import wordcloud.font.FontWeight;
import wordcloud.font.scale.FontScalar;
import wordcloud.font.scale.LinearFontScalar;
import wordcloud.image.AngleGenerator;
import wordcloud.image.CollisionRaster;
import wordcloud.image.ImageRotation;
import wordcloud.padding.Padder;
import wordcloud.padding.RectanglePadder;
import wordcloud.padding.WordPixelPadder;
import wordcloud.palette.ColorPalette;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import static ch.lambdaj.Lambda.on;
/**
* Created by kenny on 6/29/14.
*/
public class WordCloud {
private static final Logger LOGGER = Logger.getLogger(WordCloud.class);
protected static final Random RANDOM = new Random();
protected final int width;
protected final int height;
protected final CollisionMode collisionMode;
protected final CollisionChecker collisionChecker;
protected final Padder padder;
protected int padding = 0;
protected Background background;
protected final RectanglePixelCollidable backgroundCollidable;
protected Color backgroundColor = Color.BLACK;
protected FontScalar fontScalar = new LinearFontScalar(10, 40);
protected CloudFont cloudFont = new CloudFont("Comic Sans MS", FontWeight.BOLD);
protected AngleGenerator angleGenerator = new AngleGenerator();
protected final CollisionRaster collisionRaster;
protected final BufferedImage bufferedImage;
protected final Set<Word> placedWords = new HashSet<>();
protected final Set<Word> skipped = new HashSet<>();
protected ColorPalette colorPalette = new ColorPalette(Color.ORANGE, Color.WHITE, Color.YELLOW, Color.GRAY, Color.GREEN);
public WordCloud(int width, int height, CollisionMode collisionMode) {
this.width = width;
this.height = height;
this.collisionMode = collisionMode;
switch(collisionMode) {
case PIXEL_PERFECT:
this.padder = new WordPixelPadder();
this.collisionChecker = new RectanglePixelCollisionChecker();
break;
case RECTANGLE:
default:
this.padder = new RectanglePadder();
this.collisionChecker = new RectangleCollisionChecker();
break;
}
this.collisionRaster = new CollisionRaster(width, height);
this.bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
this.backgroundCollidable = new RectanglePixelCollidable(collisionRaster, 0, 0);
this.background = new RectangleBackground(width, height);
}
public void build(List<WordFrequency> wordFrequencies) {
Collections.sort(wordFrequencies);
for(final Word word : buildwords(wordFrequencies, this.colorPalette)) {
final int startX = RANDOM.nextInt(Math.max(width - word.getWidth(), width));
final int startY = RANDOM.nextInt(Math.max(height - word.getHeight(), height));
place(word, startX, startY);
}
drawForgroundToBackground();
}
public void writeToFile(final String outputFileName) {
String extension = "";
int i = outputFileName.lastIndexOf('.');
if (i > 0) {
extension = outputFileName.substring(i + 1);
}
try {
LOGGER.info("Saving WordCloud to " + outputFileName);
ImageIO.write(bufferedImage, extension, new File(outputFileName));
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
}
}
/**
* Write to output stream as PNG
*
* @param outputStream the output stream to write the image data to
*/
public void writeToStreamAsPNG(final OutputStream outputStream) {
writeToStream("png", outputStream);
}
/**
* Write wordcloud image data to stream in the given format
*
* @param format the image format
* @param outputStream the output stream to write image data to
*/
public void writeToStream(final String format, final OutputStream outputStream) {
try {
LOGGER.debug("Writing WordCloud image data to output stream");
ImageIO.write(bufferedImage, format, outputStream);
LOGGER.debug("Done writing WordCloud image data to output stream");
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
throw new RuntimeException("Could not write wordcloud to outputstream due to an IOException", e);
}
}
/**
* create background, then draw current word cloud on top of it.
* Doing it this way preserves the transparency of the this.bufferedImage's pixels
* for a more flexible pixel perfect collision
*/
protected void drawForgroundToBackground() {
if(backgroundColor == null) { return; }
final BufferedImage backgroundBufferedImage = new BufferedImage(width, height, this.bufferedImage.getType());
final Graphics graphics = backgroundBufferedImage.getGraphics();
// draw current color
graphics.setColor(backgroundColor);
graphics.fillRect(0, 0, width, height);
graphics.drawImage(bufferedImage, 0, 0, null);
// draw back to original
final Graphics graphics2 = bufferedImage.getGraphics();
graphics2.drawImage(backgroundBufferedImage, 0, 0, null);
}
/**
* try to place in center, build out in a spiral trying to place words for N steps
* @param word
*/
protected void place(final Word word, final int startX, final int startY) {
final Graphics graphics = this.bufferedImage.getGraphics();
final int maxRadius = width;
for(int r = 0; r < maxRadius; r += 2) {
for(int x = -r; x <= r; x++) {
if(startX + x < 0) { continue; }
if(startX + x >= width) { continue; }
boolean placed = false;
word.setX(startX + x);
// try positive root
int y1 = (int) Math.sqrt(r * r - x * x);
if(startY + y1 >= 0 && startY + y1 < height) {
word.setY(startY + y1);
placed = tryToPlace(word);
}
// try negative root
int y2 = -y1;
if(!placed && startY + y2 >= 0 && startY + y2 < height) {
word.setY(startY + y2);
placed = tryToPlace(word);
}
if(placed) {
collisionRaster.mask(word.getCollisionRaster(), word.getX(), word.getY());
graphics.drawImage(word.getBufferedImage(), word.getX(), word.getY(), null);
return;
}
}
}
LOGGER.info("skipped: " + word.getWord());
skipped.add(word);
}
private boolean tryToPlace(final Word word) {
if(!background.isInBounds(word)) { return false; }
switch(this.collisionMode) {
case RECTANGLE:
for(Word placeWord : this.placedWords) {
if(placeWord.collide(word)) {
return false;
}
}
LOGGER.info("place: " + word.getWord());
placedWords.add(word);
return true;
case PIXEL_PERFECT:
if(backgroundCollidable.collide(word)) { return false; }
LOGGER.info("place: " + word.getWord());
placedWords.add(word);
return true;
}
return false;
}
protected List<Word> buildwords(final List<WordFrequency> wordFrequencies, final ColorPalette colorPalette) {
final int maxFrequency = maxFrequency(wordFrequencies);
final List<Word> words = new ArrayList<>();
for(final WordFrequency wordFrequency : wordFrequencies) {
words.add(buildWord(wordFrequency, maxFrequency, colorPalette));
}
return words;
}
private Word buildWord(final WordFrequency wordFrequency, int maxFrequency, final ColorPalette colorPalette) {
final Graphics graphics = this.bufferedImage.getGraphics();
final int frequency = wordFrequency.getFrequency();
final float fontHeight = this.fontScalar.scale(frequency, 0, maxFrequency);
final Font font = cloudFont.getFont().deriveFont(fontHeight);
final FontMetrics fontMetrics = graphics.getFontMetrics(font);
final Word word = new Word(wordFrequency.getWord(), colorPalette.next(), fontMetrics, this.collisionChecker);
final double theta = angleGenerator.randomNext();
if(theta != 0) {
word.setBufferedImage(ImageRotation.rotate(word.getBufferedImage(), theta));
}
if(padding > 0) {
padder.pad(word, padding);
}
return word;
}
private int maxFrequency(final Collection<WordFrequency> wordFrequencies) {
if(wordFrequencies.isEmpty()) { return 1; }
return Lambda.max(wordFrequencies, on(WordFrequency.class).getFrequency());
}
public void setBackgroundColor(Color backgroundColor) {
this.backgroundColor = backgroundColor;
}
public void setPadding(int padding) {
this.padding = padding;
}
public void setColorPalette(ColorPalette colorPalette) {
this.colorPalette = colorPalette;
}
public void setBackground(Background background) {
this.background = background;
}
public void setFontScalar(FontScalar fontScalar) {
this.fontScalar = fontScalar;
}
public void setCloudFont(CloudFont cloudFont) {
this.cloudFont = cloudFont;
}
public void setAngleGenerator(AngleGenerator angleGenerator) {
this.angleGenerator = angleGenerator;
}
public BufferedImage getBufferedImage() {
return bufferedImage;
}
public Set<Word> getSkipped() {
return skipped;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.query2.query.output;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.packages.DependencyFilter;
import com.google.devtools.build.lib.packages.EnvironmentGroup;
import com.google.devtools.build.lib.packages.FilesetEntry;
import com.google.devtools.build.lib.packages.InputFile;
import com.google.devtools.build.lib.packages.License;
import com.google.devtools.build.lib.packages.OutputFile;
import com.google.devtools.build.lib.packages.PackageGroup;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.Type;
import com.google.devtools.build.lib.query2.common.CommonQueryOptions;
import com.google.devtools.build.lib.query2.compat.FakeLoadTarget;
import com.google.devtools.build.lib.query2.engine.OutputFormatterCallback;
import com.google.devtools.build.lib.query2.engine.QueryEnvironment;
import com.google.devtools.build.lib.query2.engine.SynchronizedDelegatingOutputFormatterCallback;
import com.google.devtools.build.lib.query2.engine.ThreadSafeOutputFormatterCallback;
import com.google.devtools.build.lib.query2.query.aspectresolvers.AspectResolver;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
* An output formatter that prints the result as XML.
*/
class XmlOutputFormatter extends AbstractUnorderedFormatter {
private AspectResolver aspectResolver;
private DependencyFilter dependencyFilter;
private boolean relativeLocations;
private QueryOptions queryOptions;
@Override
public String getName() {
return "xml";
}
@Override
public ThreadSafeOutputFormatterCallback<Target> createStreamCallback(
OutputStream out, QueryOptions options, QueryEnvironment<?> env) {
return new SynchronizedDelegatingOutputFormatterCallback<>(
createPostFactoStreamCallback(out, options));
}
@Override
public void setOptions(CommonQueryOptions options, AspectResolver aspectResolver) {
super.setOptions(options, aspectResolver);
this.aspectResolver = aspectResolver;
this.dependencyFilter = FormatUtils.getDependencyFilter(options);
this.relativeLocations = options.relativeLocations;
Preconditions.checkArgument(options instanceof QueryOptions);
this.queryOptions = (QueryOptions) options;
}
@Override
public OutputFormatterCallback<Target> createPostFactoStreamCallback(
final OutputStream out, final QueryOptions options) {
return new OutputFormatterCallback<Target>() {
private Document doc;
private Element queryElem;
@Override
public void start() {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
doc = factory.newDocumentBuilder().newDocument();
} catch (ParserConfigurationException e) {
// This shouldn't be possible: all the configuration is hard-coded.
throw new IllegalStateException("XML output failed", e);
}
doc.setXmlVersion("1.1");
queryElem = doc.createElement("query");
queryElem.setAttribute("version", "2");
doc.appendChild(queryElem);
}
@Override
public void processOutput(Iterable<Target> partialResult)
throws IOException, InterruptedException {
for (Target target : partialResult) {
queryElem.appendChild(createTargetElement(doc, target));
}
}
@Override
public void close(boolean failFast) throws IOException {
if (!failFast) {
try {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.transform(new DOMSource(doc), new StreamResult(out));
} catch (TransformerFactoryConfigurationError | TransformerException e) {
// This shouldn't be possible: all the configuration is hard-coded.
throw new IllegalStateException("XML output failed", e);
}
}
}
};
}
/**
* Creates and returns a new DOM tree for the specified build target.
*
* XML structure:
* - element tag is <source-file>, <generated-file> or <rule
* class="cc_library">, following the terminology of
* {@link Target#getTargetKind()}.
* - 'name' attribute is target's label.
* - 'location' attribute is consistent with output of --output location.
* - rule attributes are represented in the DOM structure.
* @throws InterruptedException
*/
private Element createTargetElement(Document doc, Target target)
throws InterruptedException {
Element elem;
if (target instanceof Rule) {
Rule rule = (Rule) target;
elem = doc.createElement("rule");
elem.setAttribute("class", rule.getRuleClass());
for (Attribute attr : rule.getAttributes()) {
PossibleAttributeValues values = PossibleAttributeValues.forRuleAndAttribute(rule, attr);
if (values.getSource() == AttributeValueSource.RULE || queryOptions.xmlShowDefaultValues) {
Element attrElem = createValueElement(doc, attr.getType(), values);
attrElem.setAttribute("name", attr.getName());
elem.appendChild(attrElem);
}
}
// Include explicit elements for all direct inputs and outputs of a rule;
// this goes beyond what is available from the attributes above, since it
// may also (depending on options) include implicit outputs,
// host-configuration outputs, and default values.
for (Label label : rule.getLabels(dependencyFilter)) {
Element inputElem = doc.createElement("rule-input");
inputElem.setAttribute("name", label.toString());
elem.appendChild(inputElem);
}
for (Label label :
aspectResolver.computeAspectDependencies(target, dependencyFilter).values()) {
Element inputElem = doc.createElement("rule-input");
inputElem.setAttribute("name", label.toString());
elem.appendChild(inputElem);
}
for (OutputFile outputFile: rule.getOutputFiles()) {
Element outputElem = doc.createElement("rule-output");
outputElem.setAttribute("name", outputFile.getLabel().toString());
elem.appendChild(outputElem);
}
for (String feature : rule.getPackage().getFeatures()) {
Element outputElem = doc.createElement("rule-default-setting");
outputElem.setAttribute("name", feature);
elem.appendChild(outputElem);
}
} else if (target instanceof PackageGroup) {
PackageGroup packageGroup = (PackageGroup) target;
elem = doc.createElement("package-group");
elem.setAttribute("name", packageGroup.getName());
Element includes = createValueElement(doc,
BuildType.LABEL_LIST,
packageGroup.getIncludes());
includes.setAttribute("name", "includes");
elem.appendChild(includes);
Element packages =
createValueElement(doc, Type.STRING_LIST, packageGroup.getContainedPackages());
packages.setAttribute("name", "packages");
elem.appendChild(packages);
} else if (target instanceof OutputFile) {
OutputFile outputFile = (OutputFile) target;
elem = doc.createElement("generated-file");
elem.setAttribute("generating-rule",
outputFile.getGeneratingRule().getLabel().toString());
} else if (target instanceof InputFile) {
elem = doc.createElement("source-file");
InputFile inputFile = (InputFile) target;
if (inputFile.getName().equals("BUILD")) {
addSkylarkFilesToElement(doc, elem, inputFile);
addFeaturesToElement(doc, elem, inputFile);
elem.setAttribute("package_contains_errors",
String.valueOf(inputFile.getPackage().containsErrors()));
}
addPackageGroupsToElement(doc, elem, inputFile);
} else if (target instanceof EnvironmentGroup) {
EnvironmentGroup envGroup = (EnvironmentGroup) target;
elem = doc.createElement("environment-group");
elem.setAttribute("name", envGroup.getName());
Element environments = createValueElement(doc,
BuildType.LABEL_LIST,
envGroup.getEnvironments());
environments.setAttribute("name", "environments");
elem.appendChild(environments);
Element defaults = createValueElement(doc,
BuildType.LABEL_LIST,
envGroup.getDefaults());
defaults.setAttribute("name", "defaults");
elem.appendChild(defaults);
} else if (target instanceof FakeLoadTarget) {
elem = doc.createElement("source-file");
} else {
throw new IllegalArgumentException(target.toString());
}
elem.setAttribute("name", target.getLabel().toString());
String location = FormatUtils.getLocation(target, relativeLocations);
if (!queryOptions.xmlLineNumbers) {
int firstColon = location.indexOf(':');
if (firstColon != -1) {
location = location.substring(0, firstColon);
}
}
elem.setAttribute("location", location);
return elem;
}
private void addPackageGroupsToElement(Document doc, Element parent, Target target) {
for (Label visibilityDependency : target.getVisibility().getDependencyLabels()) {
Element elem = doc.createElement("package-group");
elem.setAttribute("name", visibilityDependency.toString());
parent.appendChild(elem);
}
for (Label visibilityDeclaration : target.getVisibility().getDeclaredLabels()) {
Element elem = doc.createElement("visibility-label");
elem.setAttribute("name", visibilityDeclaration.toString());
parent.appendChild(elem);
}
}
private void addFeaturesToElement(Document doc, Element parent, InputFile inputFile) {
for (String feature : inputFile.getPackage().getFeatures()) {
Element elem = doc.createElement("feature");
elem.setAttribute("name", feature);
parent.appendChild(elem);
}
}
private void addSkylarkFilesToElement(Document doc, Element parent, InputFile inputFile)
throws InterruptedException {
Iterable<Label> dependencies =
aspectResolver.computeBuildFileDependencies(inputFile.getPackage());
for (Label skylarkFileDep : dependencies) {
Element elem = doc.createElement("load");
elem.setAttribute("name", skylarkFileDep.toString());
parent.appendChild(elem);
}
}
/**
* Creates and returns a new DOM tree for the specified attribute values.
* For non-configurable attributes, this is a single value. For configurable
* attributes, this contains one value for each configuration.
* (Only toplevel values are named attributes; list elements are unnamed.)
*
* <p>In the case of configurable attributes, multi-value attributes (e.g. lists)
* merge all configured lists into an aggregate flattened list. Single-value attributes
* simply refrain to set a value and annotate the DOM element as configurable.
*
* <P>(The ungainly qualified class name is required to avoid ambiguity with
* OutputFormatter.OutputType.)
*/
private static Element createValueElement(Document doc, Type<?> type, Iterable<Object> values) {
// "Import static" with method scope:
Type<?> FILESET_ENTRY = BuildType.FILESET_ENTRY;
Type<?> LABEL_LIST = BuildType.LABEL_LIST;
Type<?> LICENSE = BuildType.LICENSE;
Type<?> STRING_LIST = Type.STRING_LIST;
final Element elem;
final boolean hasMultipleValues = Iterables.size(values) > 1;
Type<?> elemType = type.getListElementType();
if (elemType != null) { // it's a list (includes "distribs")
elem = doc.createElement("list");
for (Object value : values) {
for (Object elemValue : (Collection<?>) value) {
elem.appendChild(createValueElement(doc, elemType, elemValue));
}
}
} else if (type instanceof Type.DictType) {
Set<Object> visitedValues = new HashSet<>();
elem = doc.createElement("dict");
Type.DictType<?, ?> dictType = (Type.DictType<?, ?>) type;
for (Object value : values) {
for (Map.Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
if (visitedValues.add(entry.getKey())) {
Element pairElem = doc.createElement("pair");
elem.appendChild(pairElem);
pairElem.appendChild(createValueElement(doc,
dictType.getKeyType(), entry.getKey()));
pairElem.appendChild(createValueElement(doc,
dictType.getValueType(), entry.getValue()));
}
}
}
} else if (type == LICENSE) {
elem = createSingleValueElement(doc, "license", hasMultipleValues);
if (!hasMultipleValues) {
License license = (License) Iterables.getOnlyElement(values);
Element exceptions = createValueElement(doc, LABEL_LIST, license.getExceptions());
exceptions.setAttribute("name", "exceptions");
elem.appendChild(exceptions);
Element licenseTypes = createValueElement(doc, STRING_LIST, license.getLicenseTypes());
licenseTypes.setAttribute("name", "license-types");
elem.appendChild(licenseTypes);
}
} else if (type == FILESET_ENTRY) {
// Fileset entries: not configurable.
FilesetEntry filesetEntry = (FilesetEntry) Iterables.getOnlyElement(values);
elem = doc.createElement("fileset-entry");
elem.setAttribute("srcdir", filesetEntry.getSrcLabel().toString());
elem.setAttribute("destdir", filesetEntry.getDestDir().toString());
elem.setAttribute("symlinks", filesetEntry.getSymlinkBehavior().toString());
elem.setAttribute("strip_prefix", filesetEntry.getStripPrefix());
if (filesetEntry.getExcludes() != null) {
Element excludes =
createValueElement(doc, LABEL_LIST, filesetEntry.getExcludes());
excludes.setAttribute("name", "excludes");
elem.appendChild(excludes);
}
if (filesetEntry.getFiles() != null) {
Element files = createValueElement(doc, LABEL_LIST, filesetEntry.getFiles());
files.setAttribute("name", "files");
elem.appendChild(files);
}
} else { // INTEGER STRING LABEL DISTRIBUTION OUTPUT
elem = createSingleValueElement(doc, type.toString(), hasMultipleValues);
if (!hasMultipleValues && !Iterables.isEmpty(values)) {
Object value = Iterables.getOnlyElement(values);
// Values such as those of attribute "linkstamp" may be null.
if (value != null) {
try {
elem.setAttribute("value", value.toString());
} catch (DOMException e) {
elem.setAttribute("value", "[[[ERROR: could not be encoded as XML]]]");
}
}
}
}
return elem;
}
private static Element createValueElement(Document doc, Type<?> type, Object value) {
return createValueElement(doc, type, ImmutableList.of(value));
}
/**
* Creates the given DOM element, adding <code>configurable="yes"</code> if it represents
* a configurable single-value attribute (configurable list attributes simply have their
* lists merged into an aggregate flat list).
*/
private static Element createSingleValueElement(Document doc, String name,
boolean configurable) {
Element elem = doc.createElement(name);
if (configurable) {
elem.setAttribute("configurable", "yes");
}
return elem;
}
}
| |
/*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.common.metric;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import com.google.common.collect.ImmutableList;
import com.linecorp.armeria.common.annotation.Nullable;
import io.micrometer.core.instrument.Meter;
import io.micrometer.core.instrument.Tag;
/**
* A common prefix of {@link Meter.Id} which consists of {@link Meter} name and {@link Tag}s.
*/
public final class MeterIdPrefix {
private final String name;
private final ImmutableList<Tag> tags;
private int hashCode;
/**
* Creates a new instance with no {@link Tag}s.
*
* @param name the {@link Meter} name
*/
public MeterIdPrefix(String name) {
this(name, ImmutableList.of());
}
/**
* Creates a new instance.
*
* @param name the {@link Meter} name
* @param tags the keys and values of the {@link Tag}s
*/
public MeterIdPrefix(String name, String... tags) {
this(name, zipAndSort(requireNonNull(tags, "tags")));
}
/**
* Creates a new instance.
*
* @param name the {@link Meter} name
* @param tags the {@link Tag}s of the {@link Meter}
*/
public MeterIdPrefix(String name, Iterable<Tag> tags) {
this(name, copyAndSort(requireNonNull(tags, "tags")));
}
private MeterIdPrefix(String name, ImmutableList<Tag> tags) {
this.name = requireNonNull(name, "name");
this.tags = tags;
}
private static ImmutableList<Tag> zipAndSort(String... tags) {
if (tags.length == 0) {
return ImmutableList.of();
}
final List<Tag> result = new ArrayList<>(tags.length / 2);
zip(result, tags);
return sort(result);
}
private static void zip(List<Tag> list, String... tags) {
checkArgument(tags.length % 2 == 0, "tags.length: %s (expected: even)", tags.length);
for (int i = 0; i < tags.length;) {
list.add(Tag.of(tags[i++], tags[i++]));
}
}
private static ImmutableList<Tag> sort(List<Tag> tags) {
if (tags.isEmpty()) {
return ImmutableList.of();
}
Collections.sort(tags);
return ImmutableList.copyOf(tags);
}
private static ImmutableList<Tag> copyAndSort(Iterable<Tag> tags) {
return ImmutableList.sortedCopyOf(tags);
}
/**
* Returns the name.
*/
public String name() {
return name;
}
/**
* Returns the name concatenated by the specified {@code suffix}.
*/
public String name(String suffix) {
requireNonNull(suffix, "suffix");
return name + '.' + suffix;
}
/**
* Returns the {@link Tag}s.
*/
public List<Tag> tags() {
return tags;
}
/**
* Returns the {@link Tag}s concatenated by the specified {@code tags}.
*/
public List<Tag> tags(String... tags) {
return sortedImmutableTags(tags);
}
/**
* Returns the {@link Tag}s concatenated by the specified {@code tags}.
*/
public List<Tag> tags(Iterable<Tag> tags) {
return sortedImmutableTags(tags);
}
private ImmutableList<Tag> sortedImmutableTags(String[] tags) {
requireNonNull(tags, "tags");
if (tags.length == 0) {
return this.tags;
}
final List<Tag> list = new ArrayList<>(this.tags);
zip(list, tags);
return sort(list);
}
private ImmutableList<Tag> sortedImmutableTags(Iterable<Tag> tags) {
requireNonNull(tags, "tags");
if (tags instanceof Collection && ((Collection<?>) tags).isEmpty()) {
return this.tags;
}
final List<Tag> list = new ArrayList<>(this.tags);
tags.forEach(list::add);
return sort(list);
}
/**
* Returns a newly-created instance whose name is concatenated by the specified {@code suffix}.
*/
public MeterIdPrefix append(String suffix) {
return new MeterIdPrefix(name(suffix), tags);
}
/**
* Returns a newly-created instance whose name is concatenated by the specified {@code suffix} and
* {@code tags}.
*/
public MeterIdPrefix appendWithTags(String suffix, String... tags) {
return new MeterIdPrefix(name(suffix), sortedImmutableTags(tags));
}
/**
* Returns a newly-created instance whose name is concatenated by the specified {@code suffix} and
* {@code tags}.
*/
public MeterIdPrefix appendWithTags(String suffix, Iterable<Tag> tags) {
return new MeterIdPrefix(name(suffix), sortedImmutableTags(tags));
}
/**
* Returns a newly-created instance whose name is concatenated by the specified {@code tags}.
*/
public MeterIdPrefix withTags(String... tags) {
return new MeterIdPrefix(name, sortedImmutableTags(tags));
}
/**
* Returns a newly-created instance whose name is concatenated by the specified {@code tags}.
*/
public MeterIdPrefix withTags(Iterable<Tag> tags) {
return new MeterIdPrefix(name, sortedImmutableTags(tags));
}
@Override
public int hashCode() {
if (hashCode == 0) {
hashCode = name.hashCode() * 31 + tags.hashCode();
}
return hashCode;
}
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof MeterIdPrefix)) {
return false;
}
final MeterIdPrefix that = (MeterIdPrefix) obj;
return name.equals(that.name) && tags.equals(that.tags);
}
@Override
public String toString() {
if (tags.isEmpty()) {
return name;
}
final StringBuilder buf = new StringBuilder();
buf.append(name).append('{');
tags.forEach(tag -> buf.append(tag.getKey()).append('=')
.append(tag.getValue()).append(','));
buf.setCharAt(buf.length() - 1, '}');
return buf.toString();
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Table;
import com.google.api.services.bigquery.model.TableDataInsertAllRequest;
import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
import com.google.api.services.bigquery.model.TableDataList;
import com.google.api.services.bigquery.model.TableReference;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.cloud.dataflow.sdk.io.BigQueryIO;
import com.google.cloud.dataflow.sdk.io.BigQueryIO.Write.CreateDisposition;
import com.google.cloud.dataflow.sdk.io.BigQueryIO.Write.WriteDisposition;
import com.google.cloud.hadoop.util.ApiErrorExtractor;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.util.concurrent.MoreExecutors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
/**
* Inserts rows into BigQuery.
*/
public class BigQueryTableInserter {
private static final Logger LOG = LoggerFactory.getLogger(BigQueryTableInserter.class);
// Approximate amount of table data to upload per InsertAll request.
private static final long UPLOAD_BATCH_SIZE_BYTES = 64 * 1024;
// The maximum number of rows to upload per InsertAll request.
private static final long MAX_ROWS_PER_BATCH = 500;
// The maximum number of times to retry inserting rows into BigQuery.
private static final int MAX_INSERT_ATTEMPTS = 5;
// The initial backoff after a failure inserting rows into BigQuery.
private static final long INITIAL_INSERT_BACKOFF_INTERVAL_MS = 200L;
private final Bigquery client;
private final TableReference defaultRef;
private final long maxRowsPerBatch;
private static final ExecutorService executor = MoreExecutors.getExitingExecutorService(
(ThreadPoolExecutor) Executors.newFixedThreadPool(100), 10, TimeUnit.SECONDS);
/**
* Constructs a new row inserter.
*
* @param client a BigQuery client
*/
public BigQueryTableInserter(Bigquery client) {
this.client = client;
this.defaultRef = null;
this.maxRowsPerBatch = MAX_ROWS_PER_BATCH;
}
/**
* Constructs a new row inserter.
*
* @param client a BigQuery client
* @param defaultRef identifies the table to insert into
* @deprecated replaced by {@link #BigQueryTableInserter(Bigquery)}
*/
@Deprecated
public BigQueryTableInserter(Bigquery client, TableReference defaultRef) {
this.client = client;
this.defaultRef = defaultRef;
this.maxRowsPerBatch = MAX_ROWS_PER_BATCH;
}
/**
* Constructs a new row inserter.
*
* @param client a BigQuery client
*/
public BigQueryTableInserter(Bigquery client, int maxRowsPerBatch) {
this.client = client;
this.defaultRef = null;
this.maxRowsPerBatch = maxRowsPerBatch;
}
/**
* Constructs a new row inserter.
*
* @param client a BigQuery client
* @param defaultRef identifies the default table to insert into
* @deprecated replaced by {@link #BigQueryTableInserter(Bigquery, int)}
*/
@Deprecated
public BigQueryTableInserter(Bigquery client, TableReference defaultRef, int maxRowsPerBatch) {
this.client = client;
this.defaultRef = defaultRef;
this.maxRowsPerBatch = maxRowsPerBatch;
}
/**
* Insert all rows from the given list.
*
* @deprecated replaced by {@link #insertAll(TableReference, List)}
*/
@Deprecated
public void insertAll(List<TableRow> rowList) throws IOException {
insertAll(defaultRef, rowList, null);
}
/**
* Insert all rows from the given list using specified insertIds if not null.
*
* @deprecated replaced by {@link #insertAll(TableReference, List, List)}
*/
@Deprecated
public void insertAll(List<TableRow> rowList,
@Nullable List<String> insertIdList) throws IOException {
insertAll(defaultRef, rowList, insertIdList);
}
/**
* Insert all rows from the given list.
*/
public void insertAll(TableReference ref, List<TableRow> rowList) throws IOException {
insertAll(ref, rowList, null);
}
/**
* Insert all rows from the given list using specified insertIds if not null.
*/
public void insertAll(TableReference ref, List<TableRow> rowList,
@Nullable List<String> insertIdList) throws IOException {
Preconditions.checkNotNull(ref, "ref");
if (insertIdList != null && rowList.size() != insertIdList.size()) {
throw new AssertionError("If insertIdList is not null it needs to have at least "
+ "as many elements as rowList");
}
AttemptBoundedExponentialBackOff backoff = new AttemptBoundedExponentialBackOff(
MAX_INSERT_ATTEMPTS,
INITIAL_INSERT_BACKOFF_INTERVAL_MS);
List<TableDataInsertAllResponse.InsertErrors> allErrors = new ArrayList<>();
// These lists contain the rows to publish. Initially the contain the entire list. If there are
// failures, they will contain only the failed rows to be retried.
List<TableRow> rowsToPublish = rowList;
List<String> idsToPublish = insertIdList;
while (true) {
List<TableRow> retryRows = new ArrayList<>();
List<String> retryIds = (idsToPublish != null) ? new ArrayList<String>() : null;
int strideIndex = 0;
// Upload in batches.
List<TableDataInsertAllRequest.Rows> rows = new LinkedList<>();
int dataSize = 0;
List<Future<List<TableDataInsertAllResponse.InsertErrors>>> futures = new ArrayList<>();
List<Integer> strideIndices = new ArrayList<>();
for (int i = 0; i < rowsToPublish.size(); ++i) {
TableRow row = rowsToPublish.get(i);
TableDataInsertAllRequest.Rows out = new TableDataInsertAllRequest.Rows();
if (idsToPublish != null) {
out.setInsertId(idsToPublish.get(i));
}
out.setJson(row.getUnknownKeys());
rows.add(out);
dataSize += row.toString().length();
if (dataSize >= UPLOAD_BATCH_SIZE_BYTES || rows.size() >= maxRowsPerBatch ||
i == rowsToPublish.size() - 1) {
TableDataInsertAllRequest content = new TableDataInsertAllRequest();
content.setRows(rows);
final Bigquery.Tabledata.InsertAll insert = client.tabledata()
.insertAll(ref.getProjectId(), ref.getDatasetId(), ref.getTableId(),
content);
futures.add(
executor.submit(new Callable<List<TableDataInsertAllResponse.InsertErrors>>() {
@Override
public List<TableDataInsertAllResponse.InsertErrors> call() throws IOException {
return insert.execute().getInsertErrors();
}
}));
strideIndices.add(strideIndex);
dataSize = 0;
strideIndex = i + 1;
rows = new LinkedList<>();
}
}
try {
for (int i = 0; i < futures.size(); i++) {
List<TableDataInsertAllResponse.InsertErrors> errors = futures.get(i).get();
if (errors != null) {
for (TableDataInsertAllResponse.InsertErrors error : errors) {
allErrors.add(error);
if (error.getIndex() == null) {
throw new IOException("Insert failed: " + allErrors);
}
int errorIndex = error.getIndex().intValue() + strideIndices.get(i);
retryRows.add(rowsToPublish.get(errorIndex));
if (retryIds != null) {
retryIds.add(idsToPublish.get(errorIndex));
}
}
}
}
} catch (InterruptedException e) {
throw new IOException("Interrupted while inserting " + rowsToPublish);
} catch (ExecutionException e) {
Throwables.propagate(e.getCause());
}
if (!allErrors.isEmpty() && !backoff.atMaxAttempts()) {
try {
Thread.sleep(backoff.nextBackOffMillis());
} catch (InterruptedException e) {
throw new IOException("Interrupted while waiting before retrying insert of " + retryRows);
}
LOG.info("Retrying failed inserts to BigQuery");
rowsToPublish = retryRows;
idsToPublish = retryIds;
allErrors.clear();
} else {
break;
}
}
if (!allErrors.isEmpty()) {
throw new IOException("Insert failed: " + allErrors);
}
}
/**
* Retrieves or creates the table.
*
* <p>The table is checked to conform to insertion requirements as specified
* by WriteDisposition and CreateDisposition.
*
* <p>If table truncation is requested (WriteDisposition.WRITE_TRUNCATE), then
* this will re-create the table if necessary to ensure it is empty.
*
* <p>If an empty table is required (WriteDisposition.WRITE_EMPTY), then this
* will fail if the table exists and is not empty.
*
* <p>When constructing a table, a {@code TableSchema} must be available. If a
* schema is provided, then it will be used. If no schema is provided, but
* an existing table is being cleared (WRITE_TRUNCATE option above), then
* the existing schema will be re-used. If no schema is available, then an
* {@code IOException} is thrown.
*/
public Table getOrCreateTable(
TableReference ref,
WriteDisposition writeDisposition,
CreateDisposition createDisposition,
@Nullable TableSchema schema) throws IOException {
// Check if table already exists.
Bigquery.Tables.Get get = client.tables()
.get(ref.getProjectId(), ref.getDatasetId(), ref.getTableId());
Table table = null;
try {
table = get.execute();
} catch (IOException e) {
ApiErrorExtractor errorExtractor = new ApiErrorExtractor();
if (!errorExtractor.itemNotFound(e) ||
createDisposition != CreateDisposition.CREATE_IF_NEEDED) {
// Rethrow.
throw e;
}
}
// If we want an empty table, and it isn't, then delete it first.
if (table != null) {
if (writeDisposition == WriteDisposition.WRITE_APPEND) {
return table;
}
boolean empty = isEmpty(ref);
if (empty) {
if (writeDisposition == WriteDisposition.WRITE_TRUNCATE) {
LOG.info("Empty table found, not removing {}", BigQueryIO.toTableSpec(ref));
}
return table;
} else if (writeDisposition == WriteDisposition.WRITE_EMPTY) {
throw new IOException("WriteDisposition is WRITE_EMPTY, "
+ "but table is not empty");
}
// Reuse the existing schema if none was provided.
if (schema == null) {
schema = table.getSchema();
}
// Delete table and fall through to re-creating it below.
LOG.info("Deleting table {}", BigQueryIO.toTableSpec(ref));
Bigquery.Tables.Delete delete = client.tables()
.delete(ref.getProjectId(), ref.getDatasetId(), ref.getTableId());
delete.execute();
}
if (schema == null) {
throw new IllegalArgumentException(
"Table schema required for new table.");
}
// Create the table.
return tryCreateTable(ref, schema);
}
/**
* Checks if a table is empty.
*/
public boolean isEmpty(TableReference ref) throws IOException {
Bigquery.Tabledata.List list = client.tabledata()
.list(ref.getProjectId(), ref.getDatasetId(), ref.getTableId());
list.setMaxResults(1L);
TableDataList dataList = list.execute();
return dataList.getRows() == null || dataList.getRows().isEmpty();
}
/**
* Tries to create the BigQuery table.
* If a table with the same name already exists in the dataset, the table
* creation fails, and the function returns null. In such a case,
* the existing table doesn't necessarily have the same schema as specified
* by the parameter.
*
* @param schema Schema of the new BigQuery table.
* @return The newly created BigQuery table information, or null if the table
* with the same name already exists.
* @throws IOException if other error than already existing table occurs.
*/
@Nullable
public Table tryCreateTable(TableReference ref, TableSchema schema) throws IOException {
LOG.info("Trying to create BigQuery table: {}", BigQueryIO.toTableSpec(ref));
Table content = new Table();
content.setTableReference(ref);
content.setSchema(schema);
try {
return client.tables()
.insert(ref.getProjectId(), ref.getDatasetId(), content)
.execute();
} catch (IOException e) {
if (new ApiErrorExtractor().itemAlreadyExists(e)) {
LOG.info("The BigQuery table already exists.");
return null;
}
throw e;
}
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.w9jds.marketbot.utils;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.Color;
import android.graphics.Outline;
import android.graphics.Rect;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.RippleDrawable;
import android.os.Build;
import android.support.annotation.ColorInt;
import android.support.annotation.FloatRange;
import android.support.annotation.NonNull;
import android.support.v7.graphics.Palette;
import android.text.TextPaint;
import android.util.DisplayMetrics;
import android.util.Property;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewOutlineProvider;
import android.widget.ImageView;
import com.w9jds.marketbot.utils.AnimUtils;
import com.w9jds.marketbot.utils.ColorUtils;
/**
* Utility methods for working with Views.
*/
public final class ViewUtils {
private ViewUtils() { }
private static int actionBarSize = -1;
public static int getActionBarSize(Context context) {
if (actionBarSize < 0) {
TypedValue value = new TypedValue();
context.getTheme().resolveAttribute(android.R.attr.actionBarSize, value, true);
actionBarSize = TypedValue.complexToDimensionPixelSize(value.data, context
.getResources().getDisplayMetrics());
}
return actionBarSize;
}
/**
* Determine if the navigation bar will be on the bottom of the screen, based on logic in
* PhoneWindowManager.
*/
public static boolean isNavBarOnBottom(@NonNull Context context) {
final Resources res= context.getResources();
final Configuration cfg = context.getResources().getConfiguration();
final DisplayMetrics dm =res.getDisplayMetrics();
boolean canMove = (dm.widthPixels != dm.heightPixels &&
cfg.smallestScreenWidthDp < 600);
return(!canMove || dm.widthPixels < dm.heightPixels);
}
public static RippleDrawable createRipple(@ColorInt int color,
@FloatRange(from = 0f, to = 1f) float alpha,
boolean bounded) {
color = ColorUtils.modifyAlpha(color, alpha);
return new RippleDrawable(ColorStateList.valueOf(color), null,
bounded ? new ColorDrawable(Color.WHITE) : null);
}
public static RippleDrawable createRipple(@NonNull Palette palette,
@FloatRange(from = 0f, to = 1f) float darkAlpha,
@FloatRange(from = 0f, to = 1f) float lightAlpha,
@ColorInt int fallbackColor,
boolean bounded) {
int rippleColor = fallbackColor;
if (palette != null) {
// try the named swatches in preference order
if (palette.getVibrantSwatch() != null) {
rippleColor =
ColorUtils.modifyAlpha(palette.getVibrantSwatch().getRgb(), darkAlpha);
} else if (palette.getLightVibrantSwatch() != null) {
rippleColor = ColorUtils.modifyAlpha(palette.getLightVibrantSwatch().getRgb(),
lightAlpha);
} else if (palette.getDarkVibrantSwatch() != null) {
rippleColor = ColorUtils.modifyAlpha(palette.getDarkVibrantSwatch().getRgb(),
darkAlpha);
} else if (palette.getMutedSwatch() != null) {
rippleColor = ColorUtils.modifyAlpha(palette.getMutedSwatch().getRgb(), darkAlpha);
} else if (palette.getLightMutedSwatch() != null) {
rippleColor = ColorUtils.modifyAlpha(palette.getLightMutedSwatch().getRgb(),
lightAlpha);
} else if (palette.getDarkMutedSwatch() != null) {
rippleColor =
ColorUtils.modifyAlpha(palette.getDarkMutedSwatch().getRgb(), darkAlpha);
}
}
return new RippleDrawable(ColorStateList.valueOf(rippleColor), null,
bounded ? new ColorDrawable(Color.WHITE) : null);
}
public static void setLightStatusBar(@NonNull View view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
int flags = view.getSystemUiVisibility();
flags |= View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR;
view.setSystemUiVisibility(flags);
}
}
public static void clearLightStatusBar(@NonNull View view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
int flags = view.getSystemUiVisibility();
flags &= ~View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR;
view.setSystemUiVisibility(flags);
}
}
/**
* Recursive binary search to find the best size for the text.
*
* Adapted from https://github.com/grantland/android-autofittextview
*/
public static float getSingleLineTextSize(String text,
TextPaint paint,
float targetWidth,
float low,
float high,
float precision,
DisplayMetrics metrics) {
final float mid = (low + high) / 2.0f;
paint.setTextSize(TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_PX, mid, metrics));
final float maxLineWidth = paint.measureText(text);
if ((high - low) < precision) {
return low;
} else if (maxLineWidth > targetWidth) {
return getSingleLineTextSize(text, paint, targetWidth, low, mid, precision, metrics);
} else if (maxLineWidth < targetWidth) {
return getSingleLineTextSize(text, paint, targetWidth, mid, high, precision, metrics);
} else {
return mid;
}
}
public static final Property<View, Integer> BACKGROUND_COLOR
= new AnimUtils.IntProperty<View>("backgroundColor") {
@Override
public void setValue(View view, int value) {
view.setBackgroundColor(value);
}
@Override
public Integer get(View view) {
Drawable d = view.getBackground();
if (d instanceof ColorDrawable) {
return ((ColorDrawable) d).getColor();
}
return Color.TRANSPARENT;
}
};
public static final Property<ImageView, Integer> IMAGE_ALPHA
= new AnimUtils.IntProperty<ImageView>("imageAlpha") {
@Override
public void setValue(ImageView imageView, int value) {
imageView.setImageAlpha(value);
}
@Override
public Integer get(ImageView imageView) {
return imageView.getImageAlpha();
}
};
public static final ViewOutlineProvider CIRCULAR_OUTLINE = new ViewOutlineProvider() {
@Override
public void getOutline(View view, Outline outline) {
outline.setOval(view.getPaddingLeft(),
view.getPaddingTop(),
view.getWidth() - view.getPaddingRight(),
view.getHeight() - view.getPaddingBottom());
}
};
/**
* Determines if two views intersect in the window.
*/
public static boolean viewsIntersect(View view1, View view2) {
if (view1 == null || view2 == null) return false;
final int[] view1Loc = new int[2];
view1.getLocationOnScreen(view1Loc);
final Rect view1Rect = new Rect(view1Loc[0],
view1Loc[1],
view1Loc[0] + view1.getWidth(),
view1Loc[1] + view1.getHeight());
int[] view2Loc = new int[2];
view2.getLocationOnScreen(view2Loc);
final Rect view2Rect = new Rect(view2Loc[0],
view2Loc[1],
view2Loc[0] + view2.getWidth(),
view2Loc[1] + view2.getHeight());
return view1Rect.intersect(view2Rect);
}
public static void setPaddingStart(View view, int paddingStart) {
view.setPaddingRelative(paddingStart,
view.getPaddingTop(),
view.getPaddingEnd(),
view.getPaddingBottom());
}
public static void setPaddingTop(View view, int paddingTop) {
view.setPaddingRelative(view.getPaddingStart(),
paddingTop,
view.getPaddingEnd(),
view.getPaddingBottom());
}
public static void setPaddingEnd(View view, int paddingEnd) {
view.setPaddingRelative(view.getPaddingStart(),
view.getPaddingTop(),
paddingEnd,
view.getPaddingBottom());
}
public static void setPaddingBottom(View view, int paddingBottom) {
view.setPaddingRelative(view.getPaddingStart(),
view.getPaddingTop(),
view.getPaddingEnd(),
paddingBottom);
}
}
| |
package android.support.v7.app;
import android.app.Activity;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v7.app.ActionBarDrawerToggle.Delegate;
import android.support.v7.appcompat.C0235R;
import android.support.v7.view.ActionMode;
import android.support.v7.view.SupportMenuInflater;
import android.support.v7.view.WindowCallbackWrapper;
import android.support.v7.view.menu.MenuBuilder;
import android.support.v7.widget.TintTypedArray;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
import android.view.Window;
import android.view.Window.Callback;
abstract class AppCompatDelegateImplBase extends AppCompatDelegate {
ActionBar mActionBar;
final AppCompatCallback mAppCompatCallback;
final Callback mAppCompatWindowCallback;
final Context mContext;
boolean mHasActionBar;
private boolean mIsDestroyed;
boolean mIsFloating;
MenuInflater mMenuInflater;
final Callback mOriginalWindowCallback;
boolean mOverlayActionBar;
boolean mOverlayActionMode;
private CharSequence mTitle;
final Window mWindow;
boolean mWindowNoTitle;
private class ActionBarDrawableToggleImpl implements Delegate {
private ActionBarDrawableToggleImpl() {
}
public Drawable getThemeUpIndicator() {
TintTypedArray a = TintTypedArray.obtainStyledAttributes(getActionBarThemedContext(), null, new int[]{C0235R.attr.homeAsUpIndicator});
Drawable result = a.getDrawable(0);
a.recycle();
return result;
}
public Context getActionBarThemedContext() {
return AppCompatDelegateImplBase.this.getActionBarThemedContext();
}
public boolean isNavigationVisible() {
ActionBar ab = AppCompatDelegateImplBase.this.getSupportActionBar();
return (ab == null || (ab.getDisplayOptions() & 4) == 0) ? false : true;
}
public void setActionBarUpIndicator(Drawable upDrawable, int contentDescRes) {
ActionBar ab = AppCompatDelegateImplBase.this.getSupportActionBar();
if (ab != null) {
ab.setHomeAsUpIndicator(upDrawable);
ab.setHomeActionContentDescription(contentDescRes);
}
}
public void setActionBarDescription(int contentDescRes) {
ActionBar ab = AppCompatDelegateImplBase.this.getSupportActionBar();
if (ab != null) {
ab.setHomeActionContentDescription(contentDescRes);
}
}
}
class AppCompatWindowCallbackBase extends WindowCallbackWrapper {
AppCompatWindowCallbackBase(Callback callback) {
super(callback);
}
public boolean dispatchKeyEvent(KeyEvent event) {
return AppCompatDelegateImplBase.this.dispatchKeyEvent(event) || super.dispatchKeyEvent(event);
}
public boolean dispatchKeyShortcutEvent(KeyEvent event) {
return super.dispatchKeyShortcutEvent(event) || AppCompatDelegateImplBase.this.onKeyShortcut(event.getKeyCode(), event);
}
public boolean onCreatePanelMenu(int featureId, Menu menu) {
if (featureId != 0 || (menu instanceof MenuBuilder)) {
return super.onCreatePanelMenu(featureId, menu);
}
return false;
}
public void onContentChanged() {
}
public boolean onPreparePanel(int featureId, View view, Menu menu) {
MenuBuilder mb = menu instanceof MenuBuilder ? (MenuBuilder) menu : null;
if (featureId == 0 && mb == null) {
return false;
}
if (mb != null) {
mb.setOverrideVisibleItems(true);
}
boolean handled = super.onPreparePanel(featureId, view, menu);
if (mb == null) {
return handled;
}
mb.setOverrideVisibleItems(false);
return handled;
}
public boolean onMenuOpened(int featureId, Menu menu) {
super.onMenuOpened(featureId, menu);
AppCompatDelegateImplBase.this.onMenuOpened(featureId, menu);
return true;
}
public void onPanelClosed(int featureId, Menu menu) {
super.onPanelClosed(featureId, menu);
AppCompatDelegateImplBase.this.onPanelClosed(featureId, menu);
}
}
abstract boolean dispatchKeyEvent(KeyEvent keyEvent);
abstract void initWindowDecorActionBar();
abstract boolean onKeyShortcut(int i, KeyEvent keyEvent);
abstract boolean onMenuOpened(int i, Menu menu);
abstract void onPanelClosed(int i, Menu menu);
abstract void onTitleChanged(CharSequence charSequence);
abstract ActionMode startSupportActionModeFromWindow(ActionMode.Callback callback);
AppCompatDelegateImplBase(Context context, Window window, AppCompatCallback callback) {
this.mContext = context;
this.mWindow = window;
this.mAppCompatCallback = callback;
this.mOriginalWindowCallback = this.mWindow.getCallback();
if (this.mOriginalWindowCallback instanceof AppCompatWindowCallbackBase) {
throw new IllegalStateException("AppCompat has already installed itself into the Window");
}
this.mAppCompatWindowCallback = wrapWindowCallback(this.mOriginalWindowCallback);
this.mWindow.setCallback(this.mAppCompatWindowCallback);
}
Callback wrapWindowCallback(Callback callback) {
return new AppCompatWindowCallbackBase(callback);
}
public ActionBar getSupportActionBar() {
initWindowDecorActionBar();
return this.mActionBar;
}
final ActionBar peekSupportActionBar() {
return this.mActionBar;
}
public MenuInflater getMenuInflater() {
if (this.mMenuInflater == null) {
initWindowDecorActionBar();
this.mMenuInflater = new SupportMenuInflater(this.mActionBar != null ? this.mActionBar.getThemedContext() : this.mContext);
}
return this.mMenuInflater;
}
public void setLocalNightMode(int mode) {
}
public final Delegate getDrawerToggleDelegate() {
return new ActionBarDrawableToggleImpl();
}
final Context getActionBarThemedContext() {
Context context = null;
ActionBar ab = getSupportActionBar();
if (ab != null) {
context = ab.getThemedContext();
}
if (context == null) {
return this.mContext;
}
return context;
}
public void onDestroy() {
this.mIsDestroyed = true;
}
public void setHandleNativeActionModesEnabled(boolean enabled) {
}
public boolean isHandleNativeActionModesEnabled() {
return false;
}
public boolean applyDayNight() {
return false;
}
final boolean isDestroyed() {
return this.mIsDestroyed;
}
final Callback getWindowCallback() {
return this.mWindow.getCallback();
}
public final void setTitle(CharSequence title) {
this.mTitle = title;
onTitleChanged(title);
}
public void onSaveInstanceState(Bundle outState) {
}
final CharSequence getTitle() {
if (this.mOriginalWindowCallback instanceof Activity) {
return ((Activity) this.mOriginalWindowCallback).getTitle();
}
return this.mTitle;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.coordination;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransport;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.RemoteTransportException;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import org.junit.Before;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import static java.util.Collections.emptySet;
import static org.elasticsearch.cluster.coordination.PreVoteCollector.REQUEST_PRE_VOTE_ACTION_NAME;
import static org.elasticsearch.node.Node.NODE_NAME_SETTING;
import static org.elasticsearch.threadpool.ThreadPool.Names.SAME;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
public class PreVoteCollectorTests extends ESTestCase {
private DeterministicTaskQueue deterministicTaskQueue;
private PreVoteCollector preVoteCollector;
private boolean electionOccurred = false;
private DiscoveryNode localNode;
private Map<DiscoveryNode, PreVoteResponse> responsesByNode = new HashMap<>();
private long currentTerm, lastAcceptedTerm, lastAcceptedVersion;
private TransportService transportService;
@Before
public void createObjects() {
Settings settings = Settings.builder().put(NODE_NAME_SETTING.getKey(), "node").build();
deterministicTaskQueue = new DeterministicTaskQueue(settings, random());
final MockTransport mockTransport = new MockTransport() {
@Override
protected void onSendRequest(final long requestId, final String action, final TransportRequest request,
final DiscoveryNode node) {
super.onSendRequest(requestId, action, request, node);
assertThat(action, is(REQUEST_PRE_VOTE_ACTION_NAME));
assertThat(request, instanceOf(PreVoteRequest.class));
assertThat(node, not(equalTo(localNode)));
PreVoteRequest preVoteRequest = (PreVoteRequest) request;
assertThat(preVoteRequest.getSourceNode(), equalTo(localNode));
deterministicTaskQueue.scheduleNow(new Runnable() {
@Override
public void run() {
final PreVoteResponse response = responsesByNode.get(node);
if (response == null) {
handleRemoteError(requestId, new ConnectTransportException(node, "no response"));
} else {
handleResponse(requestId, response);
}
}
@Override
public String toString() {
return "response to " + request + " from " + node;
}
});
}
};
lastAcceptedTerm = randomNonNegativeLong();
currentTerm = randomLongBetween(lastAcceptedTerm, Long.MAX_VALUE);
lastAcceptedVersion = randomNonNegativeLong();
localNode = new DiscoveryNode("local-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(localNode, new PreVoteResponse(currentTerm, lastAcceptedTerm, lastAcceptedVersion));
transportService = mockTransport.createTransportService(settings,
deterministicTaskQueue.getThreadPool(), TransportService.NOOP_TRANSPORT_INTERCEPTOR,
boundTransportAddress -> localNode, null, emptySet());
transportService.start();
transportService.acceptIncomingRequests();
preVoteCollector = new PreVoteCollector(transportService, () -> {
assert electionOccurred == false;
electionOccurred = true;
}, l -> {
}, ElectionStrategy.DEFAULT_INSTANCE);
preVoteCollector.update(getLocalPreVoteResponse(), null);
}
private PreVoteResponse getLocalPreVoteResponse() {
return Objects.requireNonNull(responsesByNode.get(localNode));
}
private void startAndRunCollector(DiscoveryNode... votingNodes) {
try (Releasable ignored = startCollector(votingNodes)) {
runCollector();
}
}
private void runCollector() {
deterministicTaskQueue.runAllRunnableTasks();
assertFalse(deterministicTaskQueue.hasDeferredTasks());
assertFalse(deterministicTaskQueue.hasRunnableTasks());
}
private ClusterState makeClusterState(DiscoveryNode[] votingNodes) {
final VotingConfiguration votingConfiguration = VotingConfiguration.of(votingNodes);
return CoordinationStateTests.clusterState(lastAcceptedTerm, lastAcceptedVersion, localNode,
votingConfiguration, votingConfiguration, 0);
}
private Releasable startCollector(DiscoveryNode... votingNodes) {
return preVoteCollector.start(makeClusterState(votingNodes), responsesByNode.keySet());
}
public void testStartsElectionIfLocalNodeIsOnlyNode() {
startAndRunCollector(localNode);
assertTrue(electionOccurred);
}
public void testStartsElectionIfLocalNodeIsQuorum() {
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode, getLocalPreVoteResponse());
startAndRunCollector(otherNode);
assertTrue(electionOccurred);
}
public void testStartsElectionIfOtherNodeIsQuorum() {
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode, getLocalPreVoteResponse());
startAndRunCollector(otherNode);
assertTrue(electionOccurred);
}
public void testDoesNotStartsElectionIfOtherNodeIsQuorumAndDoesNotRespond() {
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode, null);
startAndRunCollector(otherNode);
assertFalse(electionOccurred);
}
public void testDoesNotStartElectionIfStopped() {
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode, getLocalPreVoteResponse());
startCollector(otherNode).close();
runCollector();
assertFalse(electionOccurred);
}
public void testIgnoresPreVotesFromLaterTerms() {
assumeTrue("unluckily chose lastAcceptedTerm too close to currentTerm, no later terms", lastAcceptedTerm < currentTerm - 1);
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode,
new PreVoteResponse(currentTerm, randomLongBetween(lastAcceptedTerm + 1, currentTerm - 1), randomNonNegativeLong()));
startAndRunCollector(otherNode);
assertFalse(electionOccurred);
}
public void testIgnoresPreVotesFromLaterVersionInSameTerm() {
assumeTrue("unluckily hit Long.MAX_VALUE for lastAcceptedVersion, cannot increment", lastAcceptedVersion < Long.MAX_VALUE);
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode,
new PreVoteResponse(currentTerm, lastAcceptedTerm, randomLongBetween(lastAcceptedVersion + 1, Long.MAX_VALUE)));
startAndRunCollector(otherNode);
assertFalse(electionOccurred);
}
public void testAcceptsPreVotesFromAnyVersionInEarlierTerms() {
assumeTrue("unluckily hit 0 for lastAcceptedTerm, cannot decrement", 0 < lastAcceptedTerm);
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode,
new PreVoteResponse(currentTerm, randomLongBetween(0, lastAcceptedTerm - 1), randomNonNegativeLong()));
startAndRunCollector(otherNode);
assertTrue(electionOccurred);
}
private PreVoteResponse randomPreVoteResponse() {
final long currentTerm = randomNonNegativeLong();
return new PreVoteResponse(currentTerm, randomLongBetween(0, currentTerm), randomNonNegativeLong());
}
public void testPrevotingIndicatesElectionSuccess() {
assumeTrue("unluckily hit currentTerm = Long.MAX_VALUE, cannot increment", currentTerm < Long.MAX_VALUE);
final Set<DiscoveryNode> votingNodesSet = new HashSet<>();
final int nodeCount = randomIntBetween(0, 5);
for (int i = 0; i < nodeCount; i++) {
final DiscoveryNode otherNode = new DiscoveryNode("other-node-" + i, buildNewFakeTransportAddress(), Version.CURRENT);
responsesByNode.put(otherNode, randomBoolean() ? null : randomPreVoteResponse());
PreVoteResponse newPreVoteResponse = new PreVoteResponse(currentTerm, lastAcceptedTerm, lastAcceptedVersion);
preVoteCollector.update(newPreVoteResponse, null);
if (randomBoolean()) {
votingNodesSet.add(otherNode);
}
}
DiscoveryNode[] votingNodes = votingNodesSet.toArray(new DiscoveryNode[0]);
startAndRunCollector(votingNodes);
final CoordinationState coordinationState = new CoordinationState(localNode,
new InMemoryPersistedState(currentTerm, makeClusterState(votingNodes)), ElectionStrategy.DEFAULT_INSTANCE);
final long newTerm = randomLongBetween(currentTerm + 1, Long.MAX_VALUE);
coordinationState.handleStartJoin(new StartJoinRequest(localNode, newTerm));
responsesByNode.forEach((otherNode, preVoteResponse) -> {
if (preVoteResponse != null) {
try {
coordinationState.handleJoin(new Join(otherNode, localNode, newTerm,
preVoteResponse.getLastAcceptedTerm(), preVoteResponse.getLastAcceptedVersion()));
} catch (CoordinationStateRejectedException ignored) {
// ok to reject some joins.
}
}
});
assertThat(coordinationState.electionWon(), equalTo(electionOccurred));
}
private PreVoteResponse handlePreVoteRequestViaTransportService(PreVoteRequest preVoteRequest) {
final AtomicReference<PreVoteResponse> responseRef = new AtomicReference<>();
final AtomicReference<TransportException> exceptionRef = new AtomicReference<>();
transportService.sendRequest(localNode, REQUEST_PRE_VOTE_ACTION_NAME, preVoteRequest,
new TransportResponseHandler<PreVoteResponse>() {
@Override
public PreVoteResponse read(StreamInput in) throws IOException {
return new PreVoteResponse(in);
}
@Override
public void handleResponse(PreVoteResponse response) {
responseRef.set(response);
}
@Override
public void handleException(TransportException exp) {
exceptionRef.set(exp);
}
@Override
public String executor() {
return SAME;
}
});
deterministicTaskQueue.runAllRunnableTasks();
assertFalse(deterministicTaskQueue.hasDeferredTasks());
final PreVoteResponse response = responseRef.get();
final TransportException transportException = exceptionRef.get();
if (transportException != null) {
assertThat(response, nullValue());
throw transportException;
}
assertThat(response, not(nullValue()));
return response;
}
public void testResponseIfCandidate() {
final long term = randomNonNegativeLong();
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
PreVoteResponse newPreVoteResponse = new PreVoteResponse(currentTerm, lastAcceptedTerm, lastAcceptedVersion);
preVoteCollector.update(newPreVoteResponse, null);
assertThat(handlePreVoteRequestViaTransportService(new PreVoteRequest(otherNode, term)), equalTo(newPreVoteResponse));
}
public void testResponseToNonLeaderIfNotCandidate() {
final long term = randomNonNegativeLong();
final DiscoveryNode leaderNode = new DiscoveryNode("leader-node", buildNewFakeTransportAddress(), Version.CURRENT);
final DiscoveryNode otherNode = new DiscoveryNode("other-node", buildNewFakeTransportAddress(), Version.CURRENT);
PreVoteResponse newPreVoteResponse = new PreVoteResponse(currentTerm, lastAcceptedTerm, lastAcceptedVersion);
preVoteCollector.update(newPreVoteResponse, leaderNode);
RemoteTransportException remoteTransportException = expectThrows(RemoteTransportException.class, () ->
handlePreVoteRequestViaTransportService(new PreVoteRequest(otherNode, term)));
assertThat(remoteTransportException.getCause(), instanceOf(CoordinationStateRejectedException.class));
}
public void testResponseToRequestFromLeader() {
// This is a _rare_ case where our leader has detected a failure and stepped down, but we are still a follower. It's possible that
// the leader lost its quorum, but while we're still a follower we will not offer joins to any other node so there is no major
// drawback in offering a join to our old leader. The advantage of this is that it makes it slightly more likely that the leader
// won't change, and also that its re-election will happen more quickly than if it had to wait for a quorum of followers to also
// detect its failure.
final long term = randomNonNegativeLong();
final DiscoveryNode leaderNode = new DiscoveryNode("leader-node", buildNewFakeTransportAddress(), Version.CURRENT);
PreVoteResponse newPreVoteResponse = new PreVoteResponse(currentTerm, lastAcceptedTerm, lastAcceptedVersion);
preVoteCollector.update(newPreVoteResponse, leaderNode);
assertThat(handlePreVoteRequestViaTransportService(new PreVoteRequest(leaderNode, term)), equalTo(newPreVoteResponse));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.kafka.pubsub;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.header.Header;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.flowfile.attributes.CoreAttributes;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.serialization.MalformedRecordException;
import org.apache.nifi.serialization.RecordReader;
import org.apache.nifi.serialization.RecordReaderFactory;
import org.apache.nifi.serialization.RecordSetWriter;
import org.apache.nifi.serialization.RecordSetWriterFactory;
import org.apache.nifi.serialization.SchemaValidationException;
import org.apache.nifi.serialization.WriteResult;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordSchema;
import javax.xml.bind.DatatypeConverter;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_2_0.REL_PARSE_FAILURE;
import static org.apache.nifi.processors.kafka.pubsub.ConsumeKafkaRecord_2_0.REL_SUCCESS;
import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.HEX_ENCODING;
import static org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils.UTF8_ENCODING;
/**
* This class represents a lease to access a Kafka Consumer object. The lease is
* intended to be obtained from a ConsumerPool. The lease is closeable to allow
* for the clean model of a try w/resources whereby non-exceptional cases mean
* the lease will be returned to the pool for future use by others. A given
* lease may only belong to a single thread a time.
*/
public abstract class ConsumerLease implements Closeable, ConsumerRebalanceListener {
private final long maxWaitMillis;
private final Consumer<byte[], byte[]> kafkaConsumer;
private final ComponentLog logger;
private final byte[] demarcatorBytes;
private final String keyEncoding;
private final String securityProtocol;
private final String bootstrapServers;
private final RecordSetWriterFactory writerFactory;
private final RecordReaderFactory readerFactory;
private final Charset headerCharacterSet;
private final Pattern headerNamePattern;
private boolean poisoned = false;
//used for tracking demarcated flowfiles to their TopicPartition so we can append
//to them on subsequent poll calls
private final Map<BundleInformation, BundleTracker> bundleMap = new HashMap<>();
private final Map<TopicPartition, OffsetAndMetadata> uncommittedOffsetsMap = new HashMap<>();
private long leaseStartNanos = -1;
private boolean lastPollEmpty = false;
private int totalMessages = 0;
ConsumerLease(
final long maxWaitMillis,
final Consumer<byte[], byte[]> kafkaConsumer,
final byte[] demarcatorBytes,
final String keyEncoding,
final String securityProtocol,
final String bootstrapServers,
final RecordReaderFactory readerFactory,
final RecordSetWriterFactory writerFactory,
final ComponentLog logger,
final Charset headerCharacterSet,
final Pattern headerNamePattern) {
this.maxWaitMillis = maxWaitMillis;
this.kafkaConsumer = kafkaConsumer;
this.demarcatorBytes = demarcatorBytes;
this.keyEncoding = keyEncoding;
this.securityProtocol = securityProtocol;
this.bootstrapServers = bootstrapServers;
this.readerFactory = readerFactory;
this.writerFactory = writerFactory;
this.logger = logger;
this.headerCharacterSet = headerCharacterSet;
this.headerNamePattern = headerNamePattern;
}
/**
* clears out internal state elements excluding session and consumer as
* those are managed by the pool itself
*/
private void resetInternalState() {
bundleMap.clear();
uncommittedOffsetsMap.clear();
leaseStartNanos = -1;
lastPollEmpty = false;
totalMessages = 0;
}
/**
* Kafka will call this method whenever it is about to rebalance the
* consumers for the given partitions. We'll simply take this to mean that
* we need to quickly commit what we've got and will return the consumer to
* the pool. This method will be called during the poll() method call of
* this class and will be called by the same thread calling poll according
* to the Kafka API docs. After this method executes the session and kafka
* offsets are committed and this lease is closed.
*
* @param partitions partitions being reassigned
*/
@Override
public void onPartitionsRevoked(final Collection<TopicPartition> partitions) {
logger.debug("Rebalance Alert: Paritions '{}' revoked for lease '{}' with consumer '{}'", new Object[]{partitions, this, kafkaConsumer});
//force a commit here. Can reuse the session and consumer after this but must commit now to avoid duplicates if kafka reassigns partition
commit();
}
/**
* This will be called by Kafka when the rebalance has completed. We don't
* need to do anything with this information other than optionally log it as
* by this point we've committed what we've got and moved on.
*
* @param partitions topic partition set being reassigned
*/
@Override
public void onPartitionsAssigned(final Collection<TopicPartition> partitions) {
logger.debug("Rebalance Alert: Paritions '{}' assigned for lease '{}' with consumer '{}'", new Object[]{partitions, this, kafkaConsumer});
}
/**
* Executes a poll on the underlying Kafka Consumer and creates any new
* flowfiles necessary or appends to existing ones if in demarcation mode.
*/
void poll() {
/**
* Implementation note:
* Even if ConsumeKafka is not scheduled to poll due to downstream connection back-pressure is engaged,
* for longer than session.timeout.ms (defaults to 10 sec), Kafka consumer sends heartbeat from background thread.
* If this situation lasts longer than max.poll.interval.ms (defaults to 5 min), Kafka consumer sends
* Leave Group request to Group Coordinator. When ConsumeKafka processor is scheduled again, Kafka client checks
* if this client instance is still a part of consumer group. If not, it rejoins before polling messages.
* This behavior has been fixed via Kafka KIP-62 and available from Kafka client 0.10.1.0.
*/
try {
final ConsumerRecords<byte[], byte[]> records = kafkaConsumer.poll(10);
lastPollEmpty = records.count() == 0;
processRecords(records);
} catch (final ProcessException pe) {
throw pe;
} catch (final Throwable t) {
this.poison();
throw t;
}
}
/**
* Notifies Kafka to commit the offsets for the specified topic/partition
* pairs to the specified offsets w/the given metadata. This can offer
* higher performance than the other commitOffsets call as it allows the
* kafka client to collect more data from Kafka before committing the
* offsets.
*
* if false then we didn't do anything and should probably yield if true
* then we committed new data
*
*/
boolean commit() {
if (uncommittedOffsetsMap.isEmpty()) {
resetInternalState();
return false;
}
try {
/**
* Committing the nifi session then the offsets means we have an at
* least once guarantee here. If we reversed the order we'd have at
* most once.
*/
final Collection<FlowFile> bundledFlowFiles = getBundles();
if (!bundledFlowFiles.isEmpty()) {
getProcessSession().transfer(bundledFlowFiles, REL_SUCCESS);
}
getProcessSession().commit();
final Map<TopicPartition, OffsetAndMetadata> offsetsMap = uncommittedOffsetsMap;
kafkaConsumer.commitSync(offsetsMap);
resetInternalState();
return true;
} catch (final IOException ioe) {
poison();
logger.error("Failed to finish writing out FlowFile bundle", ioe);
throw new ProcessException(ioe);
} catch (final KafkaException kex) {
poison();
logger.warn("Duplicates are likely as we were able to commit the process"
+ " session but received an exception from Kafka while committing"
+ " offsets.");
throw kex;
} catch (final Throwable t) {
poison();
throw t;
}
}
/**
* Indicates whether we should continue polling for data. If we are not
* writing data with a demarcator then we're writing individual flow files
* per kafka message therefore we must be very mindful of memory usage for
* the flow file objects (not their content) being held in memory. The
* content of kafka messages will be written to the content repository
* immediately upon each poll call but we must still be mindful of how much
* memory can be used in each poll call. We will indicate that we should
* stop polling our last poll call produced no new results or if we've
* polling and processing data longer than the specified maximum polling
* time or if we have reached out specified max flow file limit or if a
* rebalance has been initiated for one of the partitions we're watching;
* otherwise true.
*
* @return true if should keep polling; false otherwise
*/
boolean continuePolling() {
//stop if the last poll produced new no data
if (lastPollEmpty) {
return false;
}
//stop if we've gone past our desired max uncommitted wait time
if (leaseStartNanos < 0) {
leaseStartNanos = System.nanoTime();
}
final long durationMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - leaseStartNanos);
if (durationMillis > maxWaitMillis) {
return false;
}
//stop if we've generated enough flowfiles that we need to be concerned about memory usage for the objects
if (bundleMap.size() > 200) { //a magic number - the number of simultaneous bundles to track
return false;
} else {
return totalMessages < 1000;//admittedlly a magic number - good candidate for processor property
}
}
/**
* Indicates that the underlying session and consumer should be immediately
* considered invalid. Once closed the session will be rolled back and the
* pool should destroy the underlying consumer. This is useful if due to
* external reasons, such as the processor no longer being scheduled, this
* lease should be terminated immediately.
*/
private void poison() {
poisoned = true;
}
/**
* @return true if this lease has been poisoned; false otherwise
*/
boolean isPoisoned() {
return poisoned;
}
/**
* Trigger the consumer's {@link KafkaConsumer#wakeup() wakeup()} method.
*/
public void wakeup() {
kafkaConsumer.wakeup();
}
/**
* Abstract method that is intended to be extended by the pool that created
* this ConsumerLease object. It should ensure that the session given to
* create this session is rolled back and that the underlying kafka consumer
* is either returned to the pool for continued use or destroyed if this
* lease has been poisoned. It can only be called once. Calling it more than
* once can result in undefined and non threadsafe behavior.
*/
@Override
public void close() {
resetInternalState();
}
public abstract ProcessSession getProcessSession();
public abstract void yield();
private void processRecords(final ConsumerRecords<byte[], byte[]> records) {
records.partitions().stream().forEach(partition -> {
List<ConsumerRecord<byte[], byte[]>> messages = records.records(partition);
if (!messages.isEmpty()) {
//update maximum offset map for this topic partition
long maxOffset = messages.stream()
.mapToLong(record -> record.offset())
.max()
.getAsLong();
//write records to content repository and session
if (demarcatorBytes != null) {
writeDemarcatedData(getProcessSession(), messages, partition);
} else if (readerFactory != null && writerFactory != null) {
writeRecordData(getProcessSession(), messages, partition);
} else {
messages.stream().forEach(message -> {
writeData(getProcessSession(), message, partition);
});
}
totalMessages += messages.size();
uncommittedOffsetsMap.put(partition, new OffsetAndMetadata(maxOffset + 1L));
}
});
}
private static String encodeKafkaKey(final byte[] key, final String encoding) {
if (key == null) {
return null;
}
if (HEX_ENCODING.getValue().equals(encoding)) {
return DatatypeConverter.printHexBinary(key);
} else if (UTF8_ENCODING.getValue().equals(encoding)) {
return new String(key, StandardCharsets.UTF_8);
} else {
return null; // won't happen because it is guaranteed by the Allowable Values
}
}
private Collection<FlowFile> getBundles() throws IOException {
final List<FlowFile> flowFiles = new ArrayList<>();
for (final BundleTracker tracker : bundleMap.values()) {
final boolean includeBundle = processBundle(tracker);
if (includeBundle) {
flowFiles.add(tracker.flowFile);
}
}
return flowFiles;
}
private boolean processBundle(final BundleTracker bundle) throws IOException {
final RecordSetWriter writer = bundle.recordWriter;
if (writer != null) {
final WriteResult writeResult;
try {
writeResult = writer.finishRecordSet();
} finally {
writer.close();
}
if (writeResult.getRecordCount() == 0) {
getProcessSession().remove(bundle.flowFile);
return false;
}
final Map<String, String> attributes = new HashMap<>();
attributes.putAll(writeResult.getAttributes());
attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType());
bundle.flowFile = getProcessSession().putAllAttributes(bundle.flowFile, attributes);
}
populateAttributes(bundle);
return true;
}
private void writeData(final ProcessSession session, ConsumerRecord<byte[], byte[]> record, final TopicPartition topicPartition) {
FlowFile flowFile = session.create();
final BundleTracker tracker = new BundleTracker(record, topicPartition, keyEncoding);
tracker.incrementRecordCount(1);
final byte[] value = record.value();
if (value != null) {
flowFile = session.write(flowFile, out -> {
out.write(value);
});
}
flowFile = session.putAllAttributes(flowFile, getAttributes(record));
tracker.updateFlowFile(flowFile);
populateAttributes(tracker);
session.transfer(tracker.flowFile, REL_SUCCESS);
}
private void writeDemarcatedData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) {
// Group the Records by their BundleInformation
final Map<BundleInformation, List<ConsumerRecord<byte[], byte[]>>> map = records.stream()
.collect(Collectors.groupingBy(rec -> new BundleInformation(topicPartition, null, getAttributes(rec))));
for (final Map.Entry<BundleInformation, List<ConsumerRecord<byte[], byte[]>>> entry : map.entrySet()) {
final BundleInformation bundleInfo = entry.getKey();
final List<ConsumerRecord<byte[], byte[]>> recordList = entry.getValue();
final boolean demarcateFirstRecord;
BundleTracker tracker = bundleMap.get(bundleInfo);
FlowFile flowFile;
if (tracker == null) {
tracker = new BundleTracker(recordList.get(0), topicPartition, keyEncoding);
flowFile = session.create();
flowFile = session.putAllAttributes(flowFile, bundleInfo.attributes);
tracker.updateFlowFile(flowFile);
demarcateFirstRecord = false; //have not yet written records for this topic/partition in this lease
} else {
demarcateFirstRecord = true; //have already been writing records for this topic/partition in this lease
}
flowFile = tracker.flowFile;
tracker.incrementRecordCount(recordList.size());
flowFile = session.append(flowFile, out -> {
boolean useDemarcator = demarcateFirstRecord;
for (final ConsumerRecord<byte[], byte[]> record : recordList) {
if (useDemarcator) {
out.write(demarcatorBytes);
}
final byte[] value = record.value();
if (value != null) {
out.write(record.value());
}
useDemarcator = true;
}
});
tracker.updateFlowFile(flowFile);
bundleMap.put(bundleInfo, tracker);
}
}
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause) {
handleParseFailure(consumerRecord, session, cause, "Failed to parse message from Kafka using the configured Record Reader. "
+ "Will route message as its own FlowFile to the 'parse.failure' relationship");
}
private void handleParseFailure(final ConsumerRecord<byte[], byte[]> consumerRecord, final ProcessSession session, final Exception cause, final String message) {
// If we are unable to parse the data, we need to transfer it to 'parse failure' relationship
final Map<String, String> attributes = getAttributes(consumerRecord);
attributes.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(consumerRecord.offset()));
attributes.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(consumerRecord.partition()));
attributes.put(KafkaProcessorUtils.KAFKA_TOPIC, consumerRecord.topic());
FlowFile failureFlowFile = session.create();
final byte[] value = consumerRecord.value();
if (value != null) {
failureFlowFile = session.write(failureFlowFile, out -> out.write(value));
}
failureFlowFile = session.putAllAttributes(failureFlowFile, attributes);
final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, consumerRecord.topic());
session.getProvenanceReporter().receive(failureFlowFile, transitUri);
session.transfer(failureFlowFile, REL_PARSE_FAILURE);
if (cause == null) {
logger.error(message);
} else {
logger.error(message, cause);
}
session.adjustCounter("Parse Failures", 1, false);
}
private Map<String, String> getAttributes(final ConsumerRecord<?, ?> consumerRecord) {
final Map<String, String> attributes = new HashMap<>();
if (headerNamePattern == null) {
return attributes;
}
for (final Header header : consumerRecord.headers()) {
final String attributeName = header.key();
if (headerNamePattern.matcher(attributeName).matches()) {
attributes.put(attributeName, new String(header.value(), headerCharacterSet));
}
}
return attributes;
}
private void writeRecordData(final ProcessSession session, final List<ConsumerRecord<byte[], byte[]>> records, final TopicPartition topicPartition) {
// In order to obtain a RecordReader from the RecordReaderFactory, we need to give it a FlowFile.
// We don't want to create a new FlowFile for each record that we receive, so we will just create
// a "temporary flowfile" that will be removed in the finally block below and use that to pass to
// the createRecordReader method.
RecordSetWriter writer = null;
try {
for (final ConsumerRecord<byte[], byte[]> consumerRecord : records) {
final Map<String, String> attributes = getAttributes(consumerRecord);
final byte[] recordBytes = consumerRecord.value() == null ? new byte[0] : consumerRecord.value();
try (final InputStream in = new ByteArrayInputStream(recordBytes)) {
final RecordReader reader;
try {
reader = readerFactory.createRecordReader(attributes, in, logger);
} catch (final IOException e) {
yield();
rollback(topicPartition);
handleParseFailure(consumerRecord, session, e, "Failed to parse message from Kafka due to comms failure. Will roll back session and try again momentarily.");
closeWriter(writer);
return;
} catch (final Exception e) {
handleParseFailure(consumerRecord, session, e);
continue;
}
try {
Record record;
while ((record = reader.nextRecord()) != null) {
// Determine the bundle for this record.
final RecordSchema recordSchema = record.getSchema();
final BundleInformation bundleInfo = new BundleInformation(topicPartition, recordSchema, attributes);
BundleTracker tracker = bundleMap.get(bundleInfo);
if (tracker == null) {
FlowFile flowFile = session.create();
flowFile = session.putAllAttributes(flowFile, attributes);
final OutputStream rawOut = session.write(flowFile);
final RecordSchema writeSchema;
try {
writeSchema = writerFactory.getSchema(flowFile.getAttributes(), recordSchema);
} catch (final Exception e) {
logger.error("Failed to obtain Schema for FlowFile. Will roll back the Kafka message offsets.", e);
rollback(topicPartition);
yield();
throw new ProcessException(e);
}
writer = writerFactory.createWriter(logger, writeSchema, rawOut);
writer.beginRecordSet();
tracker = new BundleTracker(consumerRecord, topicPartition, keyEncoding, writer);
tracker.updateFlowFile(flowFile);
bundleMap.put(bundleInfo, tracker);
} else {
writer = tracker.recordWriter;
}
try {
writer.write(record);
} catch (final RuntimeException re) {
handleParseFailure(consumerRecord, session, re, "Failed to write message from Kafka using the configured Record Writer. "
+ "Will route message as its own FlowFile to the 'parse.failure' relationship");
continue;
}
tracker.incrementRecordCount(1L);
session.adjustCounter("Records Received", 1L, false);
}
} catch (final IOException | MalformedRecordException | SchemaValidationException e) {
handleParseFailure(consumerRecord, session, e);
continue;
}
}
}
} catch (final Exception e) {
logger.error("Failed to properly receive messages from Kafka. Will roll back session and any un-committed offsets from Kafka.", e);
closeWriter(writer);
rollback(topicPartition);
throw new ProcessException(e);
}
}
private void closeWriter(final RecordSetWriter writer) {
try {
if (writer != null) {
writer.close();
}
} catch (final Exception ioe) {
logger.warn("Failed to close Record Writer", ioe);
}
}
private void rollback(final TopicPartition topicPartition) {
try {
OffsetAndMetadata offsetAndMetadata = uncommittedOffsetsMap.get(topicPartition);
if (offsetAndMetadata == null) {
offsetAndMetadata = kafkaConsumer.committed(topicPartition);
}
final long offset = offsetAndMetadata == null ? 0L : offsetAndMetadata.offset();
kafkaConsumer.seek(topicPartition, offset);
} catch (final Exception rollbackException) {
logger.warn("Attempted to rollback Kafka message offset but was unable to do so", rollbackException);
}
}
private void populateAttributes(final BundleTracker tracker) {
final Map<String, String> kafkaAttrs = new HashMap<>();
kafkaAttrs.put(KafkaProcessorUtils.KAFKA_OFFSET, String.valueOf(tracker.initialOffset));
if (tracker.key != null && tracker.totalRecords == 1) {
kafkaAttrs.put(KafkaProcessorUtils.KAFKA_KEY, tracker.key);
}
kafkaAttrs.put(KafkaProcessorUtils.KAFKA_PARTITION, String.valueOf(tracker.partition));
kafkaAttrs.put(KafkaProcessorUtils.KAFKA_TOPIC, tracker.topic);
if (tracker.totalRecords > 1) {
// Add a record.count attribute to remain consistent with other record-oriented processors. If not
// reading/writing records, then use "kafka.count" attribute.
if (tracker.recordWriter == null) {
kafkaAttrs.put(KafkaProcessorUtils.KAFKA_COUNT, String.valueOf(tracker.totalRecords));
} else {
kafkaAttrs.put("record.count", String.valueOf(tracker.totalRecords));
}
}
final FlowFile newFlowFile = getProcessSession().putAllAttributes(tracker.flowFile, kafkaAttrs);
final long executionDurationMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - leaseStartNanos);
final String transitUri = KafkaProcessorUtils.buildTransitURI(securityProtocol, bootstrapServers, tracker.topic);
getProcessSession().getProvenanceReporter().receive(newFlowFile, transitUri, executionDurationMillis);
tracker.updateFlowFile(newFlowFile);
}
private static class BundleTracker {
final long initialOffset;
final int partition;
final String topic;
final String key;
final RecordSetWriter recordWriter;
FlowFile flowFile;
long totalRecords = 0;
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding) {
this(initialRecord, topicPartition, keyEncoding, null);
}
private BundleTracker(final ConsumerRecord<byte[], byte[]> initialRecord, final TopicPartition topicPartition, final String keyEncoding, final RecordSetWriter recordWriter) {
this.initialOffset = initialRecord.offset();
this.partition = topicPartition.partition();
this.topic = topicPartition.topic();
this.recordWriter = recordWriter;
this.key = encodeKafkaKey(initialRecord.key(), keyEncoding);
}
private void incrementRecordCount(final long count) {
totalRecords += count;
}
private void updateFlowFile(final FlowFile flowFile) {
this.flowFile = flowFile;
}
}
private static class BundleInformation {
private final TopicPartition topicPartition;
private final RecordSchema schema;
private final Map<String, String> attributes;
public BundleInformation(final TopicPartition topicPartition, final RecordSchema schema, final Map<String, String> attributes) {
this.topicPartition = topicPartition;
this.schema = schema;
this.attributes = attributes;
}
@Override
public int hashCode() {
return 41 + 13 * topicPartition.hashCode() + ((schema == null) ? 0 : 13 * schema.hashCode()) + ((attributes == null) ? 0 : 13 * attributes.hashCode());
}
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof BundleInformation)) {
return false;
}
final BundleInformation other = (BundleInformation) obj;
return Objects.equals(topicPartition, other.topicPartition) && Objects.equals(schema, other.schema) && Objects.equals(attributes, other.attributes);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.xml.parsers.ParserConfigurationException;
import com.google.common.collect.ImmutableList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.xml.sax.SAXException;
import com.google.common.annotations.VisibleForTesting;
/**
* Maintains a list of queues as well as scheduling parameters for each queue,
* such as guaranteed share allocations, from the fair scheduler config file.
*
*/
@Private
@Unstable
public class QueueManager {
public static final Log LOG = LogFactory.getLog(
QueueManager.class.getName());
public static final String ROOT_QUEUE = "root";
private final FairScheduler scheduler;
private final Collection<FSLeafQueue> leafQueues =
new CopyOnWriteArrayList<FSLeafQueue>();
private final Map<String, FSQueue> queues = new HashMap<String, FSQueue>();
private FSParentQueue rootQueue;
public QueueManager(FairScheduler scheduler) {
this.scheduler = scheduler;
}
public FSParentQueue getRootQueue() {
return rootQueue;
}
public void initialize(Configuration conf) throws IOException,
SAXException, AllocationConfigurationException, ParserConfigurationException {
rootQueue = new FSParentQueue("root", scheduler, null);
queues.put(rootQueue.getName(), rootQueue);
// Create the default queue
getLeafQueue(YarnConfiguration.DEFAULT_QUEUE_NAME, true);
}
/**
* Get a leaf queue by name, creating it if the create param is true and is necessary.
* If the queue is not or can not be a leaf queue, i.e. it already exists as a
* parent queue, or one of the parents in its name is already a leaf queue,
* null is returned.
*
* The root part of the name is optional, so a queue underneath the root
* named "queue1" could be referred to as just "queue1", and a queue named
* "queue2" underneath a parent named "parent1" that is underneath the root
* could be referred to as just "parent1.queue2".
*/
public FSLeafQueue getLeafQueue(String name, boolean create) {
FSQueue queue = getQueue(name, create, FSQueueType.LEAF);
if (queue instanceof FSParentQueue) {
return null;
}
return (FSLeafQueue) queue;
}
/**
* Remove a leaf queue if empty
* @param name name of the queue
* @return true if queue was removed or false otherwise
*/
public boolean removeLeafQueue(String name) {
name = ensureRootPrefix(name);
return removeEmptyIncompatibleQueues(name, FSQueueType.PARENT);
}
/**
* Get a parent queue by name, creating it if the create param is true and is necessary.
* If the queue is not or can not be a parent queue, i.e. it already exists as a
* leaf queue, or one of the parents in its name is already a leaf queue,
* null is returned.
*
* The root part of the name is optional, so a queue underneath the root
* named "queue1" could be referred to as just "queue1", and a queue named
* "queue2" underneath a parent named "parent1" that is underneath the root
* could be referred to as just "parent1.queue2".
*/
public FSParentQueue getParentQueue(String name, boolean create) {
FSQueue queue = getQueue(name, create, FSQueueType.PARENT);
if (queue instanceof FSLeafQueue) {
return null;
}
return (FSParentQueue) queue;
}
private FSQueue getQueue(String name, boolean create, FSQueueType queueType) {
name = ensureRootPrefix(name);
synchronized (queues) {
FSQueue queue = queues.get(name);
if (queue == null && create) {
// if the queue doesn't exist,create it and return
queue = createQueue(name, queueType);
// Update steady fair share for all queues
if (queue != null) {
rootQueue.recomputeSteadyShares();
}
}
return queue;
}
}
/**
* Creates a leaf or parent queue based on what is specified in 'queueType'
* and places it in the tree. Creates any parents that don't already exist.
*
* @return
* the created queue, if successful. null if not allowed (one of the parent
* queues in the queue name is already a leaf queue)
*/
private FSQueue createQueue(String name, FSQueueType queueType) {
List<String> newQueueNames = new ArrayList<String>();
newQueueNames.add(name);
int sepIndex = name.length();
FSParentQueue parent = null;
// Move up the queue tree until we reach one that exists.
while (sepIndex != -1) {
int prevSepIndex = sepIndex;
sepIndex = name.lastIndexOf('.', sepIndex-1);
String node = name.substring(sepIndex+1, prevSepIndex);
if (!isQueueNameValid(node)) {
throw new InvalidQueueNameException("Illegal node name at offset " +
(sepIndex+1) + " for queue name " + name);
}
FSQueue queue;
String curName = null;
curName = name.substring(0, sepIndex);
queue = queues.get(curName);
if (queue == null) {
newQueueNames.add(curName);
} else {
if (queue instanceof FSParentQueue) {
parent = (FSParentQueue)queue;
break;
} else {
return null;
}
}
}
// At this point, parent refers to the deepest existing parent of the
// queue to create.
// Now that we know everything worked out, make all the queues
// and add them to the map.
AllocationConfiguration queueConf = scheduler.getAllocationConfiguration();
FSLeafQueue leafQueue = null;
for (int i = newQueueNames.size()-1; i >= 0; i--) {
String queueName = newQueueNames.get(i);
if (i == 0 && queueType != FSQueueType.PARENT) {
leafQueue = new FSLeafQueue(name, scheduler, parent);
try {
leafQueue.setPolicy(queueConf.getDefaultSchedulingPolicy());
} catch (AllocationConfigurationException ex) {
LOG.warn("Failed to set default scheduling policy "
+ queueConf.getDefaultSchedulingPolicy() + " on new leaf queue.", ex);
}
parent.addChildQueue(leafQueue);
queues.put(leafQueue.getName(), leafQueue);
leafQueues.add(leafQueue);
leafQueue.updatePreemptionVariables();
return leafQueue;
} else {
FSParentQueue newParent = new FSParentQueue(queueName, scheduler, parent);
try {
newParent.setPolicy(queueConf.getDefaultSchedulingPolicy());
} catch (AllocationConfigurationException ex) {
LOG.warn("Failed to set default scheduling policy "
+ queueConf.getDefaultSchedulingPolicy() + " on new parent queue.", ex);
}
parent.addChildQueue(newParent);
queues.put(newParent.getName(), newParent);
newParent.updatePreemptionVariables();
parent = newParent;
}
}
return parent;
}
/**
* Make way for the given queue if possible, by removing incompatible
* queues with no apps in them. Incompatibility could be due to
* (1) queueToCreate being currently a parent but needs to change to leaf
* (2) queueToCreate being currently a leaf but needs to change to parent
* (3) an existing leaf queue in the ancestry of queueToCreate.
*
* We will never remove the root queue or the default queue in this way.
*
* @return true if we can create queueToCreate or it already exists.
*/
private boolean removeEmptyIncompatibleQueues(String queueToCreate,
FSQueueType queueType) {
queueToCreate = ensureRootPrefix(queueToCreate);
// Ensure queueToCreate is not root and doesn't have the default queue in its
// ancestry.
if (queueToCreate.equals(ROOT_QUEUE) ||
queueToCreate.startsWith(
ROOT_QUEUE + "." + YarnConfiguration.DEFAULT_QUEUE_NAME + ".")) {
return false;
}
FSQueue queue = queues.get(queueToCreate);
// Queue exists already.
if (queue != null) {
if (queue instanceof FSLeafQueue) {
if (queueType == FSQueueType.LEAF) {
// if queue is already a leaf then return true
return true;
}
// remove incompatibility since queue is a leaf currently
// needs to change to a parent.
return removeQueueIfEmpty(queue);
} else {
if (queueType == FSQueueType.PARENT) {
return true;
}
// If it's an existing parent queue and needs to change to leaf,
// remove it if it's empty.
return removeQueueIfEmpty(queue);
}
}
// Queue doesn't exist already. Check if the new queue would be created
// under an existing leaf queue. If so, try removing that leaf queue.
int sepIndex = queueToCreate.length();
sepIndex = queueToCreate.lastIndexOf('.', sepIndex-1);
while (sepIndex != -1) {
String prefixString = queueToCreate.substring(0, sepIndex);
FSQueue prefixQueue = queues.get(prefixString);
if (prefixQueue != null && prefixQueue instanceof FSLeafQueue) {
return removeQueueIfEmpty(prefixQueue);
}
sepIndex = queueToCreate.lastIndexOf('.', sepIndex-1);
}
return true;
}
/**
* Remove the queue if it and its descendents are all empty.
* @param queue
* @return true if removed, false otherwise
*/
private boolean removeQueueIfEmpty(FSQueue queue) {
if (isEmpty(queue)) {
removeQueue(queue);
return true;
}
return false;
}
/**
* Remove a queue and all its descendents.
*/
private void removeQueue(FSQueue queue) {
synchronized (queues) {
if (queue instanceof FSLeafQueue) {
leafQueues.remove(queue);
} else {
for (FSQueue childQueue:queue.getChildQueues()) {
removeQueue(childQueue);
}
}
queues.remove(queue.getName());
FSParentQueue parent = queue.getParent();
parent.removeChildQueue(queue);
}
}
/**
* Returns true if there are no applications, running or not, in the given
* queue or any of its descendents.
*/
protected boolean isEmpty(FSQueue queue) {
if (queue instanceof FSLeafQueue) {
FSLeafQueue leafQueue = (FSLeafQueue)queue;
return queue.getNumRunnableApps() == 0 &&
leafQueue.getNumNonRunnableApps() == 0;
} else {
for (FSQueue child : queue.getChildQueues()) {
if (!isEmpty(child)) {
return false;
}
}
return true;
}
}
/**
* Gets a queue by name.
*/
public FSQueue getQueue(String name) {
name = ensureRootPrefix(name);
synchronized (queues) {
return queues.get(name);
}
}
/**
* Return whether a queue exists already.
*/
public boolean exists(String name) {
name = ensureRootPrefix(name);
synchronized (queues) {
return queues.containsKey(name);
}
}
/**
* Get a collection of all leaf queues
*/
public Collection<FSLeafQueue> getLeafQueues() {
synchronized (queues) {
return leafQueues;
}
}
/**
* Get a collection of all queues
*/
public Collection<FSQueue> getQueues() {
synchronized (queues) {
return ImmutableList.copyOf(queues.values());
}
}
private String ensureRootPrefix(String name) {
if (!name.startsWith(ROOT_QUEUE + ".") && !name.equals(ROOT_QUEUE)) {
name = ROOT_QUEUE + "." + name;
}
return name;
}
public void updateAllocationConfiguration(AllocationConfiguration queueConf) {
// Create leaf queues and the parent queues in a leaf's ancestry if they do not exist
for (String name : queueConf.getConfiguredQueues().get(FSQueueType.LEAF)) {
if (removeEmptyIncompatibleQueues(name, FSQueueType.LEAF)) {
getLeafQueue(name, true);
}
}
// At this point all leaves and 'parents with at least one child' would have been created.
// Now create parents with no configured leaf.
for (String name : queueConf.getConfiguredQueues().get(
FSQueueType.PARENT)) {
if (removeEmptyIncompatibleQueues(name, FSQueueType.PARENT)) {
getParentQueue(name, true);
}
}
for (FSQueue queue : queues.values()) {
// Update queue metrics
FSQueueMetrics queueMetrics = queue.getMetrics();
queueMetrics.setMinShare(queue.getMinShare());
queueMetrics.setMaxShare(queue.getMaxShare());
// Set scheduling policies
try {
SchedulingPolicy policy = queueConf.getSchedulingPolicy(queue.getName());
policy.initialize(scheduler.getClusterResource());
queue.setPolicy(policy);
} catch (AllocationConfigurationException ex) {
LOG.warn("Cannot apply configured scheduling policy to queue "
+ queue.getName(), ex);
}
}
// Update steady fair shares for all queues
rootQueue.recomputeSteadyShares();
// Update the fair share preemption timeouts and preemption for all queues
// recursively
rootQueue.updatePreemptionVariables();
}
/**
* Check whether queue name is valid,
* return true if it is valid, otherwise return false.
*/
@VisibleForTesting
boolean isQueueNameValid(String node) {
return !node.isEmpty() && node.equals(node.trim());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.gen;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.operator.DriverYieldSignal;
import com.facebook.presto.operator.project.PageProcessor;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.PageBuilder;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.function.OperatorType;
import com.facebook.presto.spi.type.StandardTypes;
import com.facebook.presto.sql.relational.RowExpression;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
import io.airlift.tpch.LineItem;
import io.airlift.tpch.LineItemGenerator;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static com.facebook.presto.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static com.facebook.presto.metadata.FunctionKind.SCALAR;
import static com.facebook.presto.metadata.Signature.internalOperator;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.DateType.DATE;
import static com.facebook.presto.spi.type.DoubleType.DOUBLE;
import static com.facebook.presto.spi.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.sql.relational.Expressions.call;
import static com.facebook.presto.sql.relational.Expressions.constant;
import static com.facebook.presto.sql.relational.Expressions.field;
import static com.google.common.base.Preconditions.checkState;
import static io.airlift.slice.Slices.utf8Slice;
@State(Scope.Thread)
@OutputTimeUnit(TimeUnit.SECONDS)
@Fork(5)
@Warmup(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 10, time = 500, timeUnit = TimeUnit.MILLISECONDS)
public class BenchmarkPageProcessor
{
private static final int EXTENDED_PRICE = 0;
private static final int DISCOUNT = 1;
private static final int SHIP_DATE = 2;
private static final int QUANTITY = 3;
private static final Slice MIN_SHIP_DATE = utf8Slice("1994-01-01");
private static final Slice MAX_SHIP_DATE = utf8Slice("1995-01-01");
private Page inputPage;
private PageProcessor compiledProcessor;
@Setup
public void setup()
{
inputPage = createInputPage();
MetadataManager metadata = MetadataManager.createTestMetadataManager();
compiledProcessor = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0)).compilePageProcessor(Optional.of(FILTER), ImmutableList.of(PROJECT)).get();
}
@Benchmark
public Page handCoded()
{
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(DOUBLE));
int count = Tpch1FilterAndProject.process(inputPage, 0, inputPage.getPositionCount(), pageBuilder);
checkState(count == inputPage.getPositionCount());
return pageBuilder.build();
}
@Benchmark
public List<Optional<Page>> compiled()
{
return ImmutableList.copyOf(
compiledProcessor.process(
null,
new DriverYieldSignal(),
newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()),
inputPage));
}
public static void main(String[] args)
throws RunnerException
{
new BenchmarkPageProcessor().setup();
Options options = new OptionsBuilder()
.verbosity(VerboseMode.NORMAL)
.include(".*" + BenchmarkPageProcessor.class.getSimpleName() + ".*")
.build();
new Runner(options).run();
}
private static Page createInputPage()
{
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(DOUBLE, DOUBLE, VARCHAR, DOUBLE));
LineItemGenerator lineItemGenerator = new LineItemGenerator(1, 1, 1);
Iterator<LineItem> iterator = lineItemGenerator.iterator();
for (int i = 0; i < 10_000; i++) {
pageBuilder.declarePosition();
LineItem lineItem = iterator.next();
DOUBLE.writeDouble(pageBuilder.getBlockBuilder(EXTENDED_PRICE), lineItem.getExtendedPrice());
DOUBLE.writeDouble(pageBuilder.getBlockBuilder(DISCOUNT), lineItem.getDiscount());
DATE.writeLong(pageBuilder.getBlockBuilder(SHIP_DATE), lineItem.getShipDate());
DOUBLE.writeDouble(pageBuilder.getBlockBuilder(QUANTITY), lineItem.getQuantity());
}
return pageBuilder.build();
}
private static final class Tpch1FilterAndProject
{
public static int process(Page page, int start, int end, PageBuilder pageBuilder)
{
Block discountBlock = page.getBlock(DISCOUNT);
int position = start;
for (; position < end; position++) {
// where shipdate >= '1994-01-01'
// and shipdate < '1995-01-01'
// and discount >= 0.05
// and discount <= 0.07
// and quantity < 24;
if (filter(position, discountBlock, page.getBlock(SHIP_DATE), page.getBlock(QUANTITY))) {
project(position, pageBuilder, page.getBlock(EXTENDED_PRICE), discountBlock);
}
}
return position;
}
private static void project(int position, PageBuilder pageBuilder, Block extendedPriceBlock, Block discountBlock)
{
pageBuilder.declarePosition();
if (discountBlock.isNull(position) || extendedPriceBlock.isNull(position)) {
pageBuilder.getBlockBuilder(0).appendNull();
}
else {
DOUBLE.writeDouble(pageBuilder.getBlockBuilder(0), DOUBLE.getDouble(extendedPriceBlock, position) * DOUBLE.getDouble(discountBlock, position));
}
}
private static boolean filter(int position, Block discountBlock, Block shipDateBlock, Block quantityBlock)
{
return !shipDateBlock.isNull(position) && VARCHAR.getSlice(shipDateBlock, position).compareTo(MIN_SHIP_DATE) >= 0 &&
!shipDateBlock.isNull(position) && VARCHAR.getSlice(shipDateBlock, position).compareTo(MAX_SHIP_DATE) < 0 &&
!discountBlock.isNull(position) && DOUBLE.getDouble(discountBlock, position) >= 0.05 &&
!discountBlock.isNull(position) && DOUBLE.getDouble(discountBlock, position) <= 0.07 &&
!quantityBlock.isNull(position) && DOUBLE.getDouble(quantityBlock, position) < 24;
}
}
// where shipdate >= '1994-01-01'
// and shipdate < '1995-01-01'
// and discount >= 0.05
// and discount <= 0.07
// and quantity < 24;
private static final RowExpression FILTER = call(new Signature("AND", SCALAR, parseTypeSignature(StandardTypes.BOOLEAN)),
BOOLEAN,
call(internalOperator(OperatorType.GREATER_THAN_OR_EQUAL, BOOLEAN.getTypeSignature(), VARCHAR.getTypeSignature(), VARCHAR.getTypeSignature()),
BOOLEAN,
field(SHIP_DATE, VARCHAR),
constant(MIN_SHIP_DATE, VARCHAR)),
call(new Signature("AND", SCALAR, parseTypeSignature(StandardTypes.BOOLEAN)),
BOOLEAN,
call(internalOperator(OperatorType.LESS_THAN, BOOLEAN.getTypeSignature(), VARCHAR.getTypeSignature(), VARCHAR.getTypeSignature()),
BOOLEAN,
field(SHIP_DATE, VARCHAR),
constant(MAX_SHIP_DATE, VARCHAR)),
call(new Signature("AND", SCALAR, parseTypeSignature(StandardTypes.BOOLEAN)),
BOOLEAN,
call(internalOperator(OperatorType.GREATER_THAN_OR_EQUAL, BOOLEAN.getTypeSignature(), DOUBLE.getTypeSignature(), DOUBLE.getTypeSignature()),
BOOLEAN,
field(DISCOUNT, DOUBLE),
constant(0.05, DOUBLE)),
call(new Signature("AND", SCALAR, parseTypeSignature(StandardTypes.BOOLEAN)),
BOOLEAN,
call(internalOperator(OperatorType.LESS_THAN_OR_EQUAL, BOOLEAN.getTypeSignature(), DOUBLE.getTypeSignature(), DOUBLE.getTypeSignature()),
BOOLEAN,
field(DISCOUNT, DOUBLE),
constant(0.07, DOUBLE)),
call(internalOperator(OperatorType.LESS_THAN, BOOLEAN.getTypeSignature(), DOUBLE.getTypeSignature(), DOUBLE.getTypeSignature()),
BOOLEAN,
field(QUANTITY, DOUBLE),
constant(24.0, DOUBLE))))));
private static final RowExpression PROJECT = call(
internalOperator(OperatorType.MULTIPLY, DOUBLE.getTypeSignature(), DOUBLE.getTypeSignature(), DOUBLE.getTypeSignature()),
DOUBLE,
field(EXTENDED_PRICE, DOUBLE),
field(DISCOUNT, DOUBLE));
}
| |
/**
* Copyright 2012, June Inc
* All Rights Reserved.
**/
package io.academia.me.models;
import io.academia.models.core.Student;
import java.io.Serializable;
import java.util.Date;
import java.util.Objects;
import javax.persistence.*;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlRootElement;
/**
* @author <a href="mailto: iroiso@academia.io">Iroiso </a>
* Stores attendance information related for every student that is verified for an examination.
*/
@Entity
@XmlRootElement
@XmlAccessorType(XmlAccessType.PROPERTY)
public class Attendance implements Serializable{
@XmlEnum
public enum Type { OK, IMPERSONATION }
@Id
@GeneratedValue String id;
String studentGUID;
String comment; //Optional Comment about this case..
String courseCode;
@Lob String template; // Optional=> Base64 Encoded String of a DPFPTemplate, useful for deferred scanning.
@Enumerated(EnumType.STRING) Type type;
@Temporal(TemporalType.DATE) Date paperDate;
@Temporal(TemporalType.TIME) Date entryTime;
@Temporal(TemporalType.TIME) Date exitTime;
/**
* A GUID for this {@link Attendance} record, this is usually automatically generated
* @return
*/
public String getId() {
return id;
}
/**
* Set the GUID for this {@link Attendance} record, id's are usually auto-generated.
* @param id
*/
public void setId(String id) {
this.id = id;
}
/**
* Returns the GUID for the {@link Student} who wrote this {@link Paper}
* @return
*/
public String getStudentGUID() {
return studentGUID;
}
/**
* Set the GUID of the {@link Student} who wrote this {@link Paper}
* @param studentGUID
*/
public void setStudentGUID(String studentGUID) {
this.studentGUID = studentGUID;
}
/**
* Extra comments that the administrator can add for this examination.
* @return
*/
public String getComment() {
return comment;
}
/**
* Extra comments that the administrator can add for this examination.
* @return
*/
public void setComment(String comment) {
this.comment = comment;
}
/**
* Returns the course code of the {@link Paper} that is currently being tested.
* @return
*/
public String getCourseCode() {
return courseCode;
}
/**
* Sets the course code of the {@lik Paper} that is currently being tested.
* @param courseCode
*/
public void setCourseCode(String courseCode) {
this.courseCode = courseCode;
}
/**
* Logs the time which the current {@link Student} entered for this Paper.
* @return
*/
public Date getEntryTime() {
return entryTime;
}
/**
* Sets the time in which the current {@link Student} entered for this {@link Paper}
* @param entryTime
*/
public void setEntryTime(Date entryTime) {
this.entryTime = entryTime;
}
/**
* Sets the time in which the current {@link Student} exited this {@link Paper}
* @param entryTime
*/
public Date getExitTime() {
return exitTime;
}
/**
* Sets the time in which the current {@link Student} exited this {@link Paper}
* @param entryTime
*/
public void setExitTime(Date exitTime) {
this.exitTime = exitTime;
}
/**
* Sets the date in which this {@link Paper} was written,
* @param entryTime
*/
public void setPaperDate(Date paperDate) {
this.paperDate = paperDate;
}
/**
* Gets the date in which this {@link Paper} was written,
* @param entryTime
*/
public Date getPaperDate() {
return paperDate;
}
/**
* Returns a Base64 Encoded DPFPTemplate, of the {@link Student} whose data was
* captured, this is useful for deferred fingerprint scanning.
* @return
*/
public String getTemplate() {
return template;
}
/**
* Sets a Base64 Encoded DPFPTemplate, of the {@link Student} whose data was
* captured, this is useful for deferred fingerprint scanning.
* @return
*/
public void setTemplate(String template) {
this.template = template;
}
/**
* Returns the type of attendance that just occurred, this is either an OK or
* an IMPERSONATION
* @return
*/
public Type getType() {
return type;
}
/**
* Sets the type of attendance that just occurred, this is either an OK or
* an IMPERSONATION
* @return
*/
public void setType(Type type) {
this.type = type;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Attendance other = (Attendance) obj;
if (!Objects.equals(this.id, other.id)) {
return false;
}
if (this.type != other.type) {
return false;
}
if (!Objects.equals(this.studentGUID, other.studentGUID)) {
return false;
}
if (!Objects.equals(this.courseCode, other.courseCode)) {
return false;
}
if (!Objects.equals(this.paperDate, other.paperDate)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 7;
hash = 97 * hash + Objects.hashCode(this.id);
hash = 97 * hash + (this.type != null ? this.type.hashCode() : 0);
hash = 97 * hash + Objects.hashCode(this.studentGUID);
hash = 97 * hash + Objects.hashCode(this.courseCode);
hash = 97 * hash + Objects.hashCode(this.paperDate);
return hash;
}
}
| |
/*
* Copyright 2010 Srikanth Reddy Lingala
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lingala.zip4j.io;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.zip.CRC32;
import net.lingala.zip4j.core.HeaderWriter;
import net.lingala.zip4j.crypto.AESEncrpyter;
import net.lingala.zip4j.crypto.IEncrypter;
import net.lingala.zip4j.crypto.StandardEncrypter;
import net.lingala.zip4j.exception.ZipException;
import net.lingala.zip4j.model.AESExtraDataRecord;
import net.lingala.zip4j.model.CentralDirectory;
import net.lingala.zip4j.model.EndCentralDirRecord;
import net.lingala.zip4j.model.FileHeader;
import net.lingala.zip4j.model.LocalFileHeader;
import net.lingala.zip4j.model.ZipModel;
import net.lingala.zip4j.model.ZipParameters;
import net.lingala.zip4j.util.InternalZipConstants;
import net.lingala.zip4j.util.Raw;
import net.lingala.zip4j.util.Zip4jConstants;
import net.lingala.zip4j.util.Zip4jUtil;
public class CipherOutputStream extends BaseOutputStream {
protected OutputStream outputStream;
private File sourceFile;
protected FileHeader fileHeader;
protected LocalFileHeader localFileHeader;
private IEncrypter encrypter;
protected ZipParameters zipParameters;
protected ZipModel zipModel;
private long totalBytesWritten;
protected CRC32 crc;
private long bytesWrittenForThisFile;
private byte[] pendingBuffer;
private int pendingBufferLength;
private long totalBytesRead;
public CipherOutputStream(OutputStream outputStream, ZipModel zipModel) {
this.outputStream = outputStream;
initZipModel(zipModel);
crc = new CRC32();
this.totalBytesWritten = 0;
this.bytesWrittenForThisFile = 0;
this.pendingBuffer = new byte[InternalZipConstants.AES_BLOCK_SIZE];
this.pendingBufferLength = 0;
this.totalBytesRead = 0;
}
public void putNextEntry(File file, ZipParameters zipParameters) throws ZipException {
if (!zipParameters.isSourceExternalStream() && file == null) {
throw new ZipException("input file is null");
}
if (!zipParameters.isSourceExternalStream() && !Zip4jUtil.checkFileExists(file)) {
throw new ZipException("input file does not exist");
}
try {
sourceFile = file;
this.zipParameters = (ZipParameters)zipParameters.clone();
if (!zipParameters.isSourceExternalStream()) {
if (sourceFile.isDirectory()) {
this.zipParameters.setEncryptFiles(false);
this.zipParameters.setEncryptionMethod(-1);
this.zipParameters.setCompressionMethod(Zip4jConstants.COMP_STORE);
}
} else {
if (!Zip4jUtil.isStringNotNullAndNotEmpty(this.zipParameters.getFileNameInZip())) {
throw new ZipException("file name is empty for external stream");
}
if (this.zipParameters.getFileNameInZip().endsWith("/") ||
this.zipParameters.getFileNameInZip().endsWith("\\")) {
this.zipParameters.setEncryptFiles(false);
this.zipParameters.setEncryptionMethod(-1);
this.zipParameters.setCompressionMethod(Zip4jConstants.COMP_STORE);
}
}
createFileHeader();
createLocalFileHeader();
if (zipModel.isSplitArchive()) {
if (zipModel.getCentralDirectory() == null ||
zipModel.getCentralDirectory().getFileHeaders() == null ||
zipModel.getCentralDirectory().getFileHeaders().size() == 0) {
byte[] intByte = new byte[4];
Raw.writeIntLittleEndian(intByte, 0, (int)InternalZipConstants.SPLITSIG);
outputStream.write(intByte);
totalBytesWritten += 4;
}
}
if (this.outputStream instanceof SplitOutputStream) {
if (totalBytesWritten == 4) {
fileHeader.setOffsetLocalHeader(4);
} else {
fileHeader.setOffsetLocalHeader(((SplitOutputStream)outputStream).getFilePointer());
}
} else {
if (totalBytesWritten == 4) {
fileHeader.setOffsetLocalHeader(4);
} else {
fileHeader.setOffsetLocalHeader(totalBytesWritten);
}
}
HeaderWriter headerWriter = new HeaderWriter();
totalBytesWritten += headerWriter.writeLocalFileHeader(zipModel, localFileHeader, outputStream);
if (this.zipParameters.isEncryptFiles()) {
initEncrypter();
if (encrypter != null) {
if (zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD) {
byte[] headerBytes = ((StandardEncrypter)encrypter).getHeaderBytes();
outputStream.write(headerBytes);
totalBytesWritten += headerBytes.length;
bytesWrittenForThisFile += headerBytes.length;
} else if (zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
byte[] saltBytes = ((AESEncrpyter)encrypter).getSaltBytes();
byte[] passwordVerifier = ((AESEncrpyter)encrypter).getDerivedPasswordVerifier();
outputStream.write(saltBytes);
outputStream.write(passwordVerifier);
totalBytesWritten += saltBytes.length + passwordVerifier.length;
bytesWrittenForThisFile += saltBytes.length + passwordVerifier.length;
}
}
}
crc.reset();
} catch (CloneNotSupportedException e) {
throw new ZipException(e);
} catch (ZipException e) {
throw e;
} catch (Exception e) {
throw new ZipException(e);
}
}
private void initEncrypter() throws ZipException {
if (!zipParameters.isEncryptFiles()) {
encrypter = null;
return;
}
switch (zipParameters.getEncryptionMethod()) {
case Zip4jConstants.ENC_METHOD_STANDARD:
// Since we do not know the crc here, we use the modification time for encrypting.
encrypter = new StandardEncrypter(zipParameters.getPassword(), (localFileHeader.getLastModFileTime() & 0x0000ffff) << 16);
break;
case Zip4jConstants.ENC_METHOD_AES:
encrypter = new AESEncrpyter(zipParameters.getPassword(), zipParameters.getAesKeyStrength());
break;
default:
throw new ZipException("invalid encprytion method");
}
}
private void initZipModel(ZipModel zipModel) {
if (zipModel == null) {
this.zipModel = new ZipModel();
} else {
this.zipModel = zipModel;
}
if (this.zipModel.getEndCentralDirRecord() == null)
this.zipModel.setEndCentralDirRecord(new EndCentralDirRecord());
if (this.zipModel.getCentralDirectory() == null)
this.zipModel.setCentralDirectory(new CentralDirectory());
if (this.zipModel.getCentralDirectory().getFileHeaders() == null)
this.zipModel.getCentralDirectory().setFileHeaders(new ArrayList());
if (this.zipModel.getLocalFileHeaderList() == null)
this.zipModel.setLocalFileHeaderList(new ArrayList());
if (this.outputStream instanceof SplitOutputStream) {
if (((SplitOutputStream)outputStream).isSplitZipFile()) {
this.zipModel.setSplitArchive(true);
this.zipModel.setSplitLength(((SplitOutputStream)outputStream).getSplitLength());
}
}
this.zipModel.getEndCentralDirRecord().setSignature(InternalZipConstants.ENDSIG);
}
public void write(int bval) throws IOException {
byte[] b = new byte[1];
b[0] = (byte) bval;
write(b, 0, 1);
}
public void write(byte[] b) throws IOException {
if (b == null)
throw new NullPointerException();
if (b.length == 0) return;
write(b, 0, b.length);
}
public void write(byte[] b, int off, int len) throws IOException {
if (len == 0) return;
if (zipParameters.isEncryptFiles() &&
zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
if (pendingBufferLength != 0) {
if (len >= (InternalZipConstants.AES_BLOCK_SIZE - pendingBufferLength)) {
System.arraycopy(b, off, pendingBuffer, pendingBufferLength,
(InternalZipConstants.AES_BLOCK_SIZE - pendingBufferLength));
encryptAndWrite(pendingBuffer, 0, pendingBuffer.length);
off = (InternalZipConstants.AES_BLOCK_SIZE - pendingBufferLength);
len = len - off;
pendingBufferLength = 0;
} else {
System.arraycopy(b, off, pendingBuffer, pendingBufferLength,
len);
pendingBufferLength += len;
return;
}
}
if (len != 0 && len % 16 != 0) {
System.arraycopy(b, (len + off) - (len % 16), pendingBuffer, 0, len % 16);
pendingBufferLength = len % 16;
len = len - pendingBufferLength;
}
}
if (len != 0)
encryptAndWrite(b, off, len);
}
private void encryptAndWrite(byte[] b, int off, int len) throws IOException {
if (encrypter != null) {
try {
encrypter.encryptData(b, off, len);
} catch (ZipException e) {
throw new IOException(e.getMessage());
}
}
outputStream.write(b, off, len);
totalBytesWritten += len;
bytesWrittenForThisFile += len;
}
public void closeEntry() throws IOException, ZipException {
if (this.pendingBufferLength != 0) {
encryptAndWrite(pendingBuffer, 0, pendingBufferLength);
pendingBufferLength = 0;
}
if (this.zipParameters.isEncryptFiles() &&
this.zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
if (encrypter instanceof AESEncrpyter) {
outputStream.write(((AESEncrpyter)encrypter).getFinalMac());
bytesWrittenForThisFile += 10;
totalBytesWritten += 10;
} else {
throw new ZipException("invalid encrypter for AES encrypted file");
}
}
fileHeader.setCompressedSize(bytesWrittenForThisFile);
localFileHeader.setCompressedSize(bytesWrittenForThisFile);
if (zipParameters.isSourceExternalStream()) {
fileHeader.setUncompressedSize(totalBytesRead);
if (localFileHeader.getUncompressedSize() != totalBytesRead) {
localFileHeader.setUncompressedSize(totalBytesRead);
}
}
long crc32 = crc.getValue();
if (fileHeader.isEncrypted()) {
if (fileHeader.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
crc32 = 0;
}
}
if (zipParameters.isEncryptFiles() &&
zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
fileHeader.setCrc32(0);
localFileHeader.setCrc32(0);
} else {
fileHeader.setCrc32(crc32);
localFileHeader.setCrc32(crc32);
}
zipModel.getLocalFileHeaderList().add(localFileHeader);
zipModel.getCentralDirectory().getFileHeaders().add(fileHeader);
HeaderWriter headerWriter = new HeaderWriter();
totalBytesWritten += headerWriter.writeExtendedLocalHeader(localFileHeader, outputStream);
crc.reset();
bytesWrittenForThisFile = 0;
encrypter = null;
totalBytesRead = 0;
}
public void finish() throws IOException, ZipException {
zipModel.getEndCentralDirRecord().setOffsetOfStartOfCentralDir(totalBytesWritten);
HeaderWriter headerWriter = new HeaderWriter();
headerWriter.finalizeZipFile(zipModel, outputStream);
}
public void close() throws IOException {
if (outputStream != null)
outputStream.close();
}
private void createFileHeader() throws ZipException {
this.fileHeader = new FileHeader();
fileHeader.setSignature((int)InternalZipConstants.CENSIG);
fileHeader.setVersionMadeBy(20);
fileHeader.setVersionNeededToExtract(20);
if (zipParameters.isEncryptFiles() &&
zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
fileHeader.setCompressionMethod(Zip4jConstants.ENC_METHOD_AES);
fileHeader.setAesExtraDataRecord(generateAESExtraDataRecord(zipParameters));
} else {
fileHeader.setCompressionMethod(zipParameters.getCompressionMethod());
}
if (zipParameters.isEncryptFiles()) {
fileHeader.setEncrypted(true);
fileHeader.setEncryptionMethod(zipParameters.getEncryptionMethod());
}
String fileName = null;
if (zipParameters.isSourceExternalStream()) {
fileHeader.setLastModFileTime((int) Zip4jUtil.javaToDosTime(System.currentTimeMillis()));
if (!Zip4jUtil.isStringNotNullAndNotEmpty(zipParameters.getFileNameInZip())) {
throw new ZipException("fileNameInZip is null or empty");
}
fileName = zipParameters.getFileNameInZip();
} else {
fileHeader.setLastModFileTime((int) Zip4jUtil.javaToDosTime((Zip4jUtil.getLastModifiedFileTime(
sourceFile, zipParameters.getTimeZone()))));
fileHeader.setUncompressedSize(sourceFile.length());
fileName = Zip4jUtil.getRelativeFileName(
sourceFile.getAbsolutePath(), zipParameters.getRootFolderInZip(), zipParameters.getDefaultFolderPath());
}
if (!Zip4jUtil.isStringNotNullAndNotEmpty(fileName)) {
throw new ZipException("fileName is null or empty. unable to create file header");
}
fileHeader.setFileName(fileName);
if (Zip4jUtil.isStringNotNullAndNotEmpty(zipModel.getFileNameCharset())) {
fileHeader.setFileNameLength(Zip4jUtil.getEncodedStringLength(fileName,
zipModel.getFileNameCharset()));
} else {
fileHeader.setFileNameLength(Zip4jUtil.getEncodedStringLength(fileName));
}
if (outputStream instanceof SplitOutputStream) {
fileHeader.setDiskNumberStart(((SplitOutputStream)outputStream).getCurrSplitFileCounter());
} else {
fileHeader.setDiskNumberStart(0);
}
int fileAttrs = 0;
if (!zipParameters.isSourceExternalStream())
fileAttrs = getFileAttributes(sourceFile);
byte[] externalFileAttrs = {(byte)fileAttrs, 0, 0, 0};
fileHeader.setExternalFileAttr(externalFileAttrs);
if (zipParameters.isSourceExternalStream()) {
fileHeader.setDirectory(fileName.endsWith("/") || fileName.endsWith("\\"));
} else {
fileHeader.setDirectory(this.sourceFile.isDirectory());
}
if (fileHeader.isDirectory()) {
fileHeader.setCompressedSize(0);
fileHeader.setUncompressedSize(0);
} else {
if (!zipParameters.isSourceExternalStream()) {
long fileSize = Zip4jUtil.getFileLengh(sourceFile);
if (zipParameters.getCompressionMethod() == Zip4jConstants.COMP_STORE) {
if (zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD) {
fileHeader.setCompressedSize(fileSize
+ InternalZipConstants.STD_DEC_HDR_SIZE);
} else if (zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_AES) {
int saltLength = 0;
switch (zipParameters.getAesKeyStrength()) {
case Zip4jConstants.AES_STRENGTH_128:
saltLength = 8;
break;
case Zip4jConstants.AES_STRENGTH_256:
saltLength = 16;
break;
default:
throw new ZipException("invalid aes key strength, cannot determine key sizes");
}
fileHeader.setCompressedSize(fileSize + saltLength
+ InternalZipConstants.AES_AUTH_LENGTH + 2); //2 is password verifier
} else {
fileHeader.setCompressedSize(fileSize);
}
} else {
fileHeader.setCompressedSize(0);
}
fileHeader.setUncompressedSize(fileSize);
}
}
if (zipParameters.isEncryptFiles() &&
zipParameters.getEncryptionMethod() == Zip4jConstants.ENC_METHOD_STANDARD) {
fileHeader.setCrc32(zipParameters.getSourceFileCRC());
}
byte[] shortByte = new byte[2];
shortByte[0] = Raw.bitArrayToByte(generateGeneralPurposeBitArray(
fileHeader.isEncrypted(), zipParameters.getCompressionMethod()));
boolean isFileNameCharsetSet = Zip4jUtil.isStringNotNullAndNotEmpty(zipModel.getFileNameCharset());
if ((isFileNameCharsetSet &&
zipModel.getFileNameCharset().equalsIgnoreCase(InternalZipConstants.CHARSET_UTF8)) ||
(!isFileNameCharsetSet &&
Zip4jUtil.detectCharSet(fileHeader.getFileName()).equals(InternalZipConstants.CHARSET_UTF8))) {
shortByte[1] = 8;
} else {
shortByte[1] = 0;
}
fileHeader.setGeneralPurposeFlag(shortByte);
}
private void createLocalFileHeader() throws ZipException {
if (fileHeader == null) {
throw new ZipException("file header is null, cannot create local file header");
}
this.localFileHeader = new LocalFileHeader();
localFileHeader.setSignature((int)InternalZipConstants.LOCSIG);
localFileHeader.setVersionNeededToExtract(fileHeader.getVersionNeededToExtract());
localFileHeader.setCompressionMethod(fileHeader.getCompressionMethod());
localFileHeader.setLastModFileTime(fileHeader.getLastModFileTime());
localFileHeader.setUncompressedSize(fileHeader.getUncompressedSize());
localFileHeader.setFileNameLength(fileHeader.getFileNameLength());
localFileHeader.setFileName(fileHeader.getFileName());
localFileHeader.setEncrypted(fileHeader.isEncrypted());
localFileHeader.setEncryptionMethod(fileHeader.getEncryptionMethod());
localFileHeader.setAesExtraDataRecord(fileHeader.getAesExtraDataRecord());
localFileHeader.setCrc32(fileHeader.getCrc32());
localFileHeader.setCompressedSize(fileHeader.getCompressedSize());
localFileHeader.setGeneralPurposeFlag((byte[])fileHeader.getGeneralPurposeFlag().clone());
}
/**
* Checks the file attributes and returns an integer
* @param file
* @return
* @throws ZipException
*/
private int getFileAttributes(File file) throws ZipException {
if (file == null) {
throw new ZipException("input file is null, cannot get file attributes");
}
if (!file.exists()) {
return 0;
}
if (file.isDirectory()) {
if (file.isHidden()) {
return InternalZipConstants.FOLDER_MODE_HIDDEN;
} else {
return InternalZipConstants.FOLDER_MODE_NONE;
}
} else {
if (!file.canWrite() && file.isHidden()) {
return InternalZipConstants.FILE_MODE_READ_ONLY_HIDDEN;
} else if (!file.canWrite()) {
return InternalZipConstants.FILE_MODE_READ_ONLY;
} else if (file.isHidden()) {
return InternalZipConstants.FILE_MODE_HIDDEN;
} else {
return InternalZipConstants.FILE_MODE_NONE;
}
}
}
private int[] generateGeneralPurposeBitArray(boolean isEncrpyted, int compressionMethod) {
int[] generalPurposeBits = new int[8];
if (isEncrpyted) {
generalPurposeBits[0] = 1;
} else {
generalPurposeBits[0] = 0;
}
if (compressionMethod == Zip4jConstants.COMP_DEFLATE) {
// Have to set flags for deflate
} else {
generalPurposeBits[1] = 0;
generalPurposeBits[2] = 0;
}
generalPurposeBits[3] = 1;
return generalPurposeBits;
}
private AESExtraDataRecord generateAESExtraDataRecord(ZipParameters parameters) throws ZipException {
if (parameters == null) {
throw new ZipException("zip parameters are null, cannot generate AES Extra Data record");
}
AESExtraDataRecord aesDataRecord = new AESExtraDataRecord();
aesDataRecord.setSignature(InternalZipConstants.AESSIG);
aesDataRecord.setDataSize(7);
aesDataRecord.setVendorID("AE");
// Always set the version number to 2 as we do not store CRC for any AES encrypted files
// only MAC is stored and as per the specification, if version number is 2, then MAC is read
// and CRC is ignored
aesDataRecord.setVersionNumber(2);
if (parameters.getAesKeyStrength() == Zip4jConstants.AES_STRENGTH_128) {
aesDataRecord.setAesStrength(Zip4jConstants.AES_STRENGTH_128);
} else if (parameters.getAesKeyStrength() == Zip4jConstants.AES_STRENGTH_256) {
aesDataRecord.setAesStrength(Zip4jConstants.AES_STRENGTH_256);
} else {
throw new ZipException("invalid AES key strength, cannot generate AES Extra data record");
}
aesDataRecord.setCompressionMethod(parameters.getCompressionMethod());
return aesDataRecord;
}
public void decrementCompressedFileSize(int value) {
if (value <= 0) return;
if (value <= this.bytesWrittenForThisFile) {
this.bytesWrittenForThisFile -= value;
}
}
protected void updateTotalBytesRead(int toUpdate) {
if (toUpdate > 0) {
totalBytesRead += toUpdate;
}
}
public void setSourceFile(File sourceFile) {
this.sourceFile = sourceFile;
}
public File getSourceFile() {
return sourceFile;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.tasks.TaskSubmissionResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.reindex.BulkByScrollResponse;
import org.elasticsearch.index.reindex.DeleteByQueryAction;
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
import org.elasticsearch.index.reindex.ReindexRequest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.RawTaskStatus;
import org.elasticsearch.tasks.TaskId;
import java.io.IOException;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.everyItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
public class ReindexIT extends ESRestHighLevelClientTestCase {
public void testReindex() throws IOException {
final String sourceIndex = "source1";
final String destinationIndex = "dest";
{
// Prepare
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(sourceIndex, settings);
createIndex(destinationIndex, settings);
BulkRequest bulkRequest = new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
bulkRequest,
RequestOptions.DEFAULT
).status()
);
}
{
// reindex one document with id 1 from source to destination
ReindexRequest reindexRequest = new ReindexRequest();
reindexRequest.setSourceIndices(sourceIndex);
reindexRequest.setDestIndex(destinationIndex);
reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1"));
reindexRequest.setRefresh(true);
BulkByScrollResponse bulkResponse = execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
assertEquals(1, bulkResponse.getCreated());
assertEquals(1, bulkResponse.getTotal());
assertEquals(0, bulkResponse.getDeleted());
assertEquals(0, bulkResponse.getNoops());
assertEquals(0, bulkResponse.getVersionConflicts());
assertEquals(1, bulkResponse.getBatches());
assertTrue(bulkResponse.getTook().getMillis() > 0);
assertEquals(1, bulkResponse.getBatches());
assertEquals(0, bulkResponse.getBulkFailures().size());
assertEquals(0, bulkResponse.getSearchFailures().size());
}
{
// set require_alias to true but the destination index is not an alias
ReindexRequest reindexRequest = new ReindexRequest();
reindexRequest.setSourceIndices(sourceIndex);
reindexRequest.setDestIndex(destinationIndex);
reindexRequest.setSourceQuery(new IdsQueryBuilder().addIds("1"));
reindexRequest.setRefresh(true);
reindexRequest.setRequireAlias(true);
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
execute(reindexRequest, highLevelClient()::reindex, highLevelClient()::reindexAsync);
});
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index [" +
destinationIndex + "] and [require_alias] request flag is [true] and [" +
destinationIndex + "] is not an alias]", exception.getMessage());
}
}
public void testReindexTask() throws Exception {
final String sourceIndex = "source123";
final String destinationIndex = "dest2";
{
// Prepare
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(sourceIndex, settings);
createIndex(destinationIndex, settings);
BulkRequest bulkRequest = new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("foo2", "bar2"), XContentType.JSON))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
bulkRequest,
RequestOptions.DEFAULT
).status()
);
}
{
// tag::submit-reindex-task
ReindexRequest reindexRequest = new ReindexRequest(); // <1>
reindexRequest.setSourceIndices(sourceIndex);
reindexRequest.setDestIndex(destinationIndex);
reindexRequest.setRefresh(true);
TaskSubmissionResponse reindexSubmission = highLevelClient()
.submitReindexTask(reindexRequest, RequestOptions.DEFAULT); // <2>
String taskId = reindexSubmission.getTask(); // <3>
// end::submit-reindex-task
assertBusy(checkTaskCompletionStatus(client(), taskId));
}
}
public void testReindexConflict() throws IOException {
final String sourceIndex = "testreindexconflict_source";
final String destIndex = "testreindexconflict_dest";
final Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(sourceIndex, settings);
createIndex(destIndex, settings);
final BulkRequest bulkRequest = new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2").source(Collections.singletonMap("foo", "bar"), XContentType.JSON))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
assertThat(highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT).status(), equalTo(RestStatus.OK));
putConflictPipeline();
final ReindexRequest reindexRequest = new ReindexRequest();
reindexRequest.setSourceIndices(sourceIndex);
reindexRequest.setDestIndex(destIndex);
reindexRequest.setRefresh(true);
reindexRequest.setDestPipeline(CONFLICT_PIPELINE_ID);
final BulkByScrollResponse response = highLevelClient().reindex(reindexRequest, RequestOptions.DEFAULT);
assertThat(response.getVersionConflicts(), equalTo(2L));
assertThat(response.getSearchFailures(), empty());
assertThat(response.getBulkFailures(), hasSize(2));
assertThat(
response.getBulkFailures().stream().map(BulkItemResponse.Failure::getMessage).collect(Collectors.toSet()),
everyItem(containsString("version conflict"))
);
assertThat(response.getTotal(), equalTo(2L));
assertThat(response.getCreated(), equalTo(0L));
assertThat(response.getUpdated(), equalTo(0L));
assertThat(response.getDeleted(), equalTo(0L));
assertThat(response.getNoops(), equalTo(0L));
assertThat(response.getBatches(), equalTo(1));
assertTrue(response.getTook().getMillis() > 0);
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/60811#issuecomment-830040692")
public void testDeleteByQuery() throws Exception {
final String sourceIndex = "source1";
{
// Prepare
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(sourceIndex, settings);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1")
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2")
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("3")
.source(Collections.singletonMap("foo", 3), XContentType.JSON))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT
).status()
);
}
{
// test1: delete one doc
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
deleteByQueryRequest.indices(sourceIndex);
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1"));
deleteByQueryRequest.setRefresh(true);
BulkByScrollResponse bulkResponse =
execute(deleteByQueryRequest, highLevelClient()::deleteByQuery, highLevelClient()::deleteByQueryAsync);
assertEquals(1, bulkResponse.getTotal());
assertEquals(1, bulkResponse.getDeleted());
assertEquals(0, bulkResponse.getNoops());
assertEquals(0, bulkResponse.getVersionConflicts());
assertEquals(1, bulkResponse.getBatches());
assertTrue(bulkResponse.getTook().getMillis() > 0);
assertEquals(1, bulkResponse.getBatches());
assertEquals(0, bulkResponse.getBulkFailures().size());
assertEquals(0, bulkResponse.getSearchFailures().size());
assertEquals(
2,
highLevelClient().search(new SearchRequest(sourceIndex), RequestOptions.DEFAULT).getHits().getTotalHits().value
);
}
{
// test delete-by-query rethrottling
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
deleteByQueryRequest.indices(sourceIndex);
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("2", "3"));
deleteByQueryRequest.setRefresh(true);
// this following settings are supposed to halt reindexing after first document
deleteByQueryRequest.setBatchSize(1);
deleteByQueryRequest.setRequestsPerSecond(0.00001f);
final CountDownLatch taskFinished = new CountDownLatch(1);
highLevelClient().deleteByQueryAsync(deleteByQueryRequest, RequestOptions.DEFAULT, new ActionListener<BulkByScrollResponse>() {
@Override
public void onResponse(BulkByScrollResponse response) {
taskFinished.countDown();
}
@Override
public void onFailure(Exception e) {
fail(e.toString());
}
});
TaskId taskIdToRethrottle = findTaskToRethrottle(DeleteByQueryAction.NAME, deleteByQueryRequest.getDescription());
float requestsPerSecond = 1000f;
ListTasksResponse response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond),
highLevelClient()::deleteByQueryRethrottle, highLevelClient()::deleteByQueryRethrottleAsync);
assertThat(response.getTasks(), hasSize(1));
assertEquals(taskIdToRethrottle, response.getTasks().get(0).getTaskId());
assertThat(response.getTasks().get(0).getStatus(), instanceOf(RawTaskStatus.class));
assertEquals(Float.toString(requestsPerSecond),
((RawTaskStatus) response.getTasks().get(0).getStatus()).toMap().get("requests_per_second").toString());
assertTrue(taskFinished.await(10, TimeUnit.SECONDS));
// any rethrottling after the delete-by-query is done performed with the same taskId should result in a failure
response = execute(new RethrottleRequest(taskIdToRethrottle, requestsPerSecond),
highLevelClient()::deleteByQueryRethrottle, highLevelClient()::deleteByQueryRethrottleAsync);
assertTrue(response.getTasks().isEmpty());
assertFalse(response.getNodeFailures().isEmpty());
assertEquals(1, response.getNodeFailures().size());
assertEquals("Elasticsearch exception [type=resource_not_found_exception, reason=task [" + taskIdToRethrottle + "] is missing]",
response.getNodeFailures().get(0).getCause().getMessage());
}
}
public void testDeleteByQueryTask() throws Exception {
final String sourceIndex = "source456";
{
// Prepare
Settings settings = Settings.builder()
.put("number_of_shards", 1)
.put("number_of_replicas", 0)
.build();
createIndex(sourceIndex, settings);
assertEquals(
RestStatus.OK,
highLevelClient().bulk(
new BulkRequest()
.add(new IndexRequest(sourceIndex).id("1")
.source(Collections.singletonMap("foo", 1), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("2")
.source(Collections.singletonMap("foo", 2), XContentType.JSON))
.add(new IndexRequest(sourceIndex).id("3")
.source(Collections.singletonMap("foo", 3), XContentType.JSON))
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE),
RequestOptions.DEFAULT
).status()
);
}
{
// tag::submit-delete_by_query-task
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
deleteByQueryRequest.indices(sourceIndex);
deleteByQueryRequest.setQuery(new IdsQueryBuilder().addIds("1"));
deleteByQueryRequest.setRefresh(true);
TaskSubmissionResponse deleteByQuerySubmission = highLevelClient()
.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT);
String taskId = deleteByQuerySubmission.getTask();
// end::submit-delete_by_query-task
assertBusy(checkTaskCompletionStatus(client(), taskId));
}
}
}
| |
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.worker;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.devtools.build.lib.worker.TestUtils.createWorkerKey;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.clock.BlazeClock;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem;
import java.io.IOException;
import java.lang.Thread.State;
import org.apache.commons.pool2.impl.DefaultPooledObject;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnit;
import org.mockito.junit.MockitoRule;
/** Tests WorkerPool. */
@RunWith(JUnit4.class)
public class WorkerPoolTest {
@Rule public final MockitoRule mockito = MockitoJUnit.rule();
@Mock WorkerFactory factoryMock;
private FileSystem fileSystem;
private int workerIds = 1;
private static class TestWorker extends Worker {
TestWorker(WorkerKey workerKey, int workerId, Path workDir, Path logFile) {
super(workerKey, workerId, workDir, logFile);
}
}
@Before
public void setUp() throws Exception {
fileSystem = new InMemoryFileSystem(BlazeClock.instance());
doAnswer(
arg -> {
return new DefaultPooledObject<>(
new TestWorker(
arg.getArgument(0),
workerIds++,
fileSystem.getPath("/workDir"),
fileSystem.getPath("/logDir")));
})
.when(factoryMock)
.makeObject(any());
when(factoryMock.validateObject(any(), any())).thenReturn(true);
}
@Test
public void testBorrow_createsWhenNeeded() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("mnem", 2, "", 1),
ImmutableMap.of(),
Lists.newArrayList());
WorkerKey workerKey = createWorkerKey(fileSystem, "mnem", false);
Worker worker1 = workerPool.borrowObject(workerKey);
Worker worker2 = workerPool.borrowObject(workerKey);
assertThat(worker1.getWorkerId()).isEqualTo(1);
assertThat(worker2.getWorkerId()).isEqualTo(2);
verify(factoryMock, times(2)).makeObject(workerKey);
}
@Test
public void testBorrow_reusesWhenPossible() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("mnem", 2, "", 1),
ImmutableMap.of(),
Lists.newArrayList());
WorkerKey workerKey = createWorkerKey(fileSystem, "mnem", false);
Worker worker1 = workerPool.borrowObject(workerKey);
workerPool.returnObject(workerKey, worker1);
Worker worker2 = workerPool.borrowObject(workerKey);
assertThat(worker1).isSameInstanceAs(worker2);
verify(factoryMock, times(1)).makeObject(workerKey);
}
@Test
public void testBorrow_usesDefault() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("mnem", 2, "", 1),
ImmutableMap.of(),
Lists.newArrayList());
WorkerKey workerKey1 = createWorkerKey(fileSystem, "mnem", false);
Worker worker1 = workerPool.borrowObject(workerKey1);
Worker worker1a = workerPool.borrowObject(workerKey1);
assertThat(worker1.getWorkerId()).isEqualTo(1);
assertThat(worker1a.getWorkerId()).isEqualTo(2);
WorkerKey workerKey2 = createWorkerKey(fileSystem, "other", false);
Worker worker2 = workerPool.borrowObject(workerKey2);
assertThat(worker2.getWorkerId()).isEqualTo(3);
verify(factoryMock, times(2)).makeObject(workerKey1);
verify(factoryMock, times(1)).makeObject(workerKey2);
}
@Test
public void testBorrow_pooledByKey() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("mnem", 2, "", 1),
ImmutableMap.of(),
Lists.newArrayList());
WorkerKey workerKey1 = createWorkerKey(fileSystem, "mnem", false);
Worker worker1 = workerPool.borrowObject(workerKey1);
Worker worker1a = workerPool.borrowObject(workerKey1);
assertThat(worker1.getWorkerId()).isEqualTo(1);
assertThat(worker1a.getWorkerId()).isEqualTo(2);
WorkerKey workerKey2 = createWorkerKey(fileSystem, "mnem", false, "arg1");
Worker worker2 = workerPool.borrowObject(workerKey2);
assertThat(worker2.getWorkerId()).isEqualTo(3);
verify(factoryMock, times(2)).makeObject(workerKey1);
verify(factoryMock, times(1)).makeObject(workerKey2);
}
@Test
public void testBorrow_separateMultiplexWorkers() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("mnem", 1, "", 1),
ImmutableMap.of("mnem", 2, "", 1),
Lists.newArrayList());
WorkerKey workerKey = createWorkerKey(fileSystem, "mnem", false);
Worker worker1 = workerPool.borrowObject(workerKey);
assertThat(worker1.getWorkerId()).isEqualTo(1);
workerPool.returnObject(workerKey, worker1);
WorkerKey multiplexKey = createWorkerKey(fileSystem, "mnem", true);
Worker multiplexWorker1 = workerPool.borrowObject(multiplexKey);
Worker multiplexWorker2 = workerPool.borrowObject(multiplexKey);
Worker worker1a = workerPool.borrowObject(workerKey);
assertThat(multiplexWorker1.getWorkerId()).isEqualTo(2);
assertThat(multiplexWorker2.getWorkerId()).isEqualTo(3);
assertThat(worker1a.getWorkerId()).isEqualTo(1);
verify(factoryMock, times(1)).makeObject(workerKey);
verify(factoryMock, times(2)).makeObject(multiplexKey);
}
@Test
public void testBorrow_allowsOneHiPrio() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("loprio", 2, "hiprio", 2, "", 1),
ImmutableMap.of(),
ImmutableList.of("hiprio"));
WorkerKey workerKey1 = createWorkerKey(fileSystem, "hiprio", false);
Worker worker1 = workerPool.borrowObject(workerKey1);
assertThat(worker1.getWorkerId()).isEqualTo(1);
// A single hiprio worker should not block.
WorkerKey workerKey2 = createWorkerKey(fileSystem, "loprio", false);
Worker worker2 = workerPool.borrowObject(workerKey2);
assertThat(worker2.getWorkerId()).isEqualTo(2);
verify(factoryMock, times(1)).makeObject(workerKey1);
verify(factoryMock, times(1)).makeObject(workerKey2);
}
@Test
public void testBorrow_twoHiPrioBlocks() throws Exception {
WorkerPool workerPool =
new WorkerPool(
factoryMock,
ImmutableMap.of("loprio", 2, "hiprio", 2, "", 1),
ImmutableMap.of(),
ImmutableList.of("hiprio"));
WorkerKey workerKey1 = createWorkerKey(fileSystem, "hiprio", false);
Worker worker1 = workerPool.borrowObject(workerKey1);
Worker worker1a = workerPool.borrowObject(workerKey1);
assertThat(worker1.getWorkerId()).isEqualTo(1);
assertThat(worker1a.getWorkerId()).isEqualTo(2);
WorkerKey workerKey2 = createWorkerKey(fileSystem, "loprio", false);
Thread t =
new Thread(
() -> {
try {
workerPool.borrowObject(workerKey2);
} catch (IOException | InterruptedException e) {
// Ignorable
}
});
t.start();
boolean waited = false;
for (int tries = 0; tries < 1000; tries++) {
if (t.getState() == State.WAITING) {
waited = true;
break;
}
Thread.sleep(1);
}
assertWithMessage("Expected low-priority worker to wait").that(waited).isTrue();
workerPool.returnObject(workerKey1, worker1);
boolean continued = false;
for (int tries = 0; tries < 1000; tries++) {
if (t.getState() != State.WAITING) {
continued = true;
break;
}
Thread.sleep(1);
}
assertWithMessage("Expected low-priority worker to eventually continue")
.that(continued)
.isTrue();
verify(factoryMock, times(2)).makeObject(workerKey1);
verify(factoryMock, times(1)).makeObject(workerKey2);
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.persistence.entity;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.camunda.bpm.engine.impl.ProcessEngineLogger;
import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.db.DbEntity;
import org.camunda.bpm.engine.impl.db.HasDbReferences;
import org.camunda.bpm.engine.impl.db.HasDbRevision;
import org.camunda.bpm.engine.impl.history.HistoryLevel;
import org.camunda.bpm.engine.impl.history.event.HistoryEvent;
import org.camunda.bpm.engine.impl.history.event.HistoryEventProcessor;
import org.camunda.bpm.engine.impl.history.event.HistoryEventType;
import org.camunda.bpm.engine.impl.history.event.HistoryEventTypes;
import org.camunda.bpm.engine.impl.history.producer.HistoryEventProducer;
import org.camunda.bpm.engine.impl.incident.IncidentContext;
import org.camunda.bpm.engine.impl.incident.IncidentLogger;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.runtime.Incident;
/**
* @author roman.smirnov
*/
public class IncidentEntity implements Incident, DbEntity, HasDbRevision, HasDbReferences {
protected static final IncidentLogger LOG = ProcessEngineLogger.INCIDENT_LOGGER;
protected int revision;
protected String id;
protected Date incidentTimestamp;
protected String incidentType;
protected String executionId;
protected String activityId;
protected String processInstanceId;
protected String processDefinitionId;
protected String causeIncidentId;
protected String rootCauseIncidentId;
protected String configuration;
protected String incidentMessage;
protected String tenantId;
protected String jobDefinitionId;
protected String historyConfiguration;
public List<IncidentEntity> createRecursiveIncidents() {
List<IncidentEntity> createdIncidents = new ArrayList<IncidentEntity>();
createRecursiveIncidents(id, createdIncidents);
return createdIncidents;
}
/** Instantiate recursive a new incident a super execution
* (i.e. super process instance) which is affected from this
* incident.
* For example: a super process instance called via CallActivity
* a new process instance on which an incident happened, so that
* the super process instance has an incident too. */
protected void createRecursiveIncidents(String rootCauseIncidentId, List<IncidentEntity> createdIncidents) {
final ExecutionEntity execution = getExecution();
if(execution != null) {
ExecutionEntity superExecution = execution.getProcessInstance().getSuperExecution();
if (superExecution != null) {
// create a new incident
IncidentEntity newIncident = create(incidentType);
newIncident.setExecution(superExecution);
newIncident.setActivityId(superExecution.getCurrentActivityId());
newIncident.setProcessDefinitionId(superExecution.getProcessDefinitionId());
newIncident.setTenantId(superExecution.getTenantId());
// set cause and root cause
newIncident.setCauseIncidentId(id);
newIncident.setRootCauseIncidentId(rootCauseIncidentId);
// insert new incident (and create a new historic incident)
insert(newIncident);
// add new incident to result set
createdIncidents.add(newIncident);
newIncident.createRecursiveIncidents(rootCauseIncidentId, createdIncidents);
}
}
}
public static IncidentEntity createAndInsertIncident(String incidentType, IncidentContext context, String message) {
// create new incident
IncidentEntity newIncident = create(incidentType);
newIncident.setIncidentMessage(message);
// set properties from incident context
newIncident.setConfiguration(context.getConfiguration());
newIncident.setActivityId(context.getActivityId());
newIncident.setProcessDefinitionId(context.getProcessDefinitionId());
newIncident.setTenantId(context.getTenantId());
newIncident.setJobDefinitionId(context.getJobDefinitionId());
newIncident.setHistoryConfiguration(context.getHistoryConfiguration());
if (context.getExecutionId() != null) {
// fetch execution
ExecutionEntity execution = Context
.getCommandContext()
.getExecutionManager()
.findExecutionById(context.getExecutionId());
if (execution != null) {
// link incident with execution
newIncident.setExecution(execution);
}
else {
LOG.executionNotFound(context.getExecutionId());
}
}
// insert new incident (and create a new historic incident)
insert(newIncident);
return newIncident;
}
protected static IncidentEntity create(String incidentType) {
String incidentId = Context.getProcessEngineConfiguration()
.getDbSqlSessionFactory()
.getIdGenerator()
.getNextId();
// decorate new incident
IncidentEntity newIncident = new IncidentEntity();
newIncident.setId(incidentId);
newIncident.setIncidentTimestamp(ClockUtil.getCurrentTime());
newIncident.setIncidentType(incidentType);
newIncident.setCauseIncidentId(incidentId);
newIncident.setRootCauseIncidentId(incidentId);
return newIncident;
}
protected static void insert(IncidentEntity incident) {
// persist new incident
Context
.getCommandContext()
.getDbEntityManager()
.insert(incident);
incident.fireHistoricIncidentEvent(HistoryEventTypes.INCIDENT_CREATE);
}
public void delete() {
remove(false);
}
public void resolve() {
remove(true);
}
protected void remove(boolean resolved) {
ExecutionEntity execution = getExecution();
if(execution != null) {
// Extract possible super execution of the assigned execution
ExecutionEntity superExecution = null;
if (execution.getId().equals(execution.getProcessInstanceId())) {
superExecution = execution.getSuperExecution();
} else {
superExecution = execution.getProcessInstance().getSuperExecution();
}
if (superExecution != null) {
// get the incident, where this incident is the cause
IncidentEntity parentIncident = superExecution.getIncidentByCauseIncidentId(getId());
if (parentIncident != null) {
// remove the incident
parentIncident.remove(resolved);
}
}
// remove link to execution
execution.removeIncident(this);
}
// always delete the incident
Context
.getCommandContext()
.getDbEntityManager()
.delete(this);
// update historic incident
HistoryEventType eventType = resolved ? HistoryEventTypes.INCIDENT_RESOLVE : HistoryEventTypes.INCIDENT_DELETE;
fireHistoricIncidentEvent(eventType);
}
protected void fireHistoricIncidentEvent(final HistoryEventType eventType) {
ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration();
HistoryLevel historyLevel = processEngineConfiguration.getHistoryLevel();
if(historyLevel.isHistoryEventProduced(eventType, this)) {
HistoryEventProcessor.processHistoryEvents(new HistoryEventProcessor.HistoryEventCreator() {
@Override
public HistoryEvent createHistoryEvent(HistoryEventProducer producer) {
HistoryEvent event = null;
if (HistoryEvent.INCIDENT_CREATE.equals(eventType.getEventName())) {
event = producer.createHistoricIncidentCreateEvt(IncidentEntity.this);
} else if (HistoryEvent.INCIDENT_RESOLVE.equals(eventType.getEventName())) {
event = producer.createHistoricIncidentResolveEvt(IncidentEntity.this);
} else if (HistoryEvent.INCIDENT_DELETE.equals(eventType.getEventName())) {
event = producer.createHistoricIncidentDeleteEvt(IncidentEntity.this);
}
return event;
}
});
}
}
@Override
public Set<String> getReferencedEntityIds() {
Set<String> referenceIds = new HashSet<String>();
if (causeIncidentId != null) {
referenceIds.add(causeIncidentId);
}
return referenceIds;
}
@Override
public Map<String, Class> getReferencedEntitiesIdAndClass() {
Map<String, Class> referenceIdAndClass = new HashMap<String, Class>();
if (causeIncidentId != null) {
referenceIdAndClass.put(causeIncidentId, IncidentEntity.class);
}
if (processDefinitionId != null) {
referenceIdAndClass.put(processDefinitionId, ProcessDefinitionEntity.class);
}
if (processInstanceId != null) {
referenceIdAndClass.put(processInstanceId, ExecutionEntity.class);
}
if (jobDefinitionId != null) {
referenceIdAndClass.put(jobDefinitionId, JobDefinitionEntity.class);
}
if (executionId != null) {
referenceIdAndClass.put(executionId, ExecutionEntity.class);
}
if (rootCauseIncidentId != null) {
referenceIdAndClass.put(rootCauseIncidentId, IncidentEntity.class);
}
return referenceIdAndClass;
}
@Override
public String getId() {
return id;
}
@Override
public void setId(String id) {
this.id = id;
}
@Override
public Date getIncidentTimestamp() {
return incidentTimestamp;
}
public void setIncidentTimestamp(Date incidentTimestamp) {
this.incidentTimestamp = incidentTimestamp;
}
@Override
public String getIncidentType() {
return incidentType;
}
public void setIncidentType(String incidentType) {
this.incidentType = incidentType;
}
@Override
public String getIncidentMessage() {
return incidentMessage;
}
public void setIncidentMessage(String incidentMessage) {
this.incidentMessage = incidentMessage;
}
@Override
public String getExecutionId() {
return executionId;
}
public void setExecutionId(String executionId) {
this.executionId = executionId;
}
@Override
public String getActivityId() {
return activityId;
}
public void setActivityId(String activityId) {
this.activityId = activityId;
}
@Override
public String getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public ProcessDefinitionEntity getProcessDefinition() {
if (processDefinitionId != null) {
return Context
.getProcessEngineConfiguration()
.getDeploymentCache()
.findDeployedProcessDefinitionById(processDefinitionId);
}
return null;
}
@Override
public String getProcessDefinitionId() {
return processDefinitionId;
}
public void setProcessDefinitionId(String processDefinitionId) {
this.processDefinitionId = processDefinitionId;
}
@Override
public String getCauseIncidentId() {
return causeIncidentId;
}
public void setCauseIncidentId(String causeIncidentId) {
this.causeIncidentId = causeIncidentId;
}
@Override
public String getRootCauseIncidentId() {
return rootCauseIncidentId;
}
public void setRootCauseIncidentId(String rootCauseIncidentId) {
this.rootCauseIncidentId = rootCauseIncidentId;
}
@Override
public String getConfiguration() {
return configuration;
}
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
@Override
public String getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
public void setJobDefinitionId(String jobDefinitionId) {
this.jobDefinitionId = jobDefinitionId;
}
public String getJobDefinitionId() {
return jobDefinitionId;
}
public void setExecution(ExecutionEntity execution) {
if (execution != null) {
executionId = execution.getId();
processInstanceId = execution.getProcessInstanceId();
execution.addIncident(this);
}
else {
ExecutionEntity oldExecution = getExecution();
if (oldExecution != null) {
oldExecution.removeIncident(this);
}
executionId = null;
processInstanceId = null;
}
}
public ExecutionEntity getExecution() {
if(executionId != null) {
ExecutionEntity execution = Context.getCommandContext()
.getExecutionManager()
.findExecutionById(executionId);
if (execution == null) {
LOG.executionNotFound(executionId);
}
return execution;
}
else {
return null;
}
}
@Override
public Object getPersistentState() {
Map<String, Object> persistentState = new HashMap<String, Object>();
persistentState.put("executionId", executionId);
persistentState.put("processDefinitionId", processDefinitionId);
persistentState.put("activityId", activityId);
persistentState.put("jobDefinitionId", jobDefinitionId);
return persistentState;
}
@Override
public void setRevision(int revision) {
this.revision = revision;
}
@Override
public int getRevision() {
return revision;
}
@Override
public int getRevisionNext() {
return revision + 1;
}
public String getHistoryConfiguration() {
return historyConfiguration;
}
public void setHistoryConfiguration(String historyConfiguration) {
this.historyConfiguration = historyConfiguration;
}
@Override
public String toString() {
return this.getClass().getSimpleName()
+ "[id=" + id
+ ", incidentTimestamp=" + incidentTimestamp
+ ", incidentType=" + incidentType
+ ", executionId=" + executionId
+ ", activityId=" + activityId
+ ", processInstanceId=" + processInstanceId
+ ", processDefinitionId=" + processDefinitionId
+ ", causeIncidentId=" + causeIncidentId
+ ", rootCauseIncidentId=" + rootCauseIncidentId
+ ", configuration=" + configuration
+ ", tenantId=" + tenantId
+ ", incidentMessage=" + incidentMessage
+ ", jobDefinitionId=" + jobDefinitionId
+ "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
IncidentEntity other = (IncidentEntity) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
}
| |
/*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.grpc.async;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.same;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.bigtable.v1.CheckAndMutateRowRequest;
import com.google.bigtable.v1.MutateRowRequest;
import com.google.bigtable.v1.ReadModifyWriteRowRequest;
import com.google.bigtable.v1.ReadRowsRequest;
import com.google.cloud.bigtable.grpc.BigtableDataClient;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.protobuf.ByteString;
/**
* Tests for {@link AsyncExecutor}
*/
@SuppressWarnings("unchecked")
@RunWith(JUnit4.class)
public class TestAsyncExecutor {
@Mock
private BigtableDataClient client;
@SuppressWarnings("rawtypes")
@Mock
private ListenableFuture future;
private List<Runnable> futureRunnables = new ArrayList<>();
private AsyncExecutor underTest;
private ExecutorService heapSizeExecutorService = MoreExecutors.newDirectExecutorService();
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
futureRunnables.clear();
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
futureRunnables.add((Runnable)invocation.getArguments()[0]);
return null;
}
}).when(future).addListener(any(Runnable.class), same(heapSizeExecutorService));
underTest = new AsyncExecutor(client, 10, 1000, heapSizeExecutorService);
}
@Test
public void testNoMutation() throws IOException {
Assert.assertFalse(underTest.hasInflightRequests());
}
@Test
public void testMutation() throws IOException, InterruptedException {
when(client.mutateRowAsync(any(MutateRowRequest.class))).thenReturn(future);
underTest.mutateRowAsync(MutateRowRequest.getDefaultInstance());
Assert.assertTrue(underTest.hasInflightRequests());
completeCall();
Assert.assertFalse(underTest.hasInflightRequests());
}
@Test
public void testCheckAndMutate() throws IOException, InterruptedException {
when(client.checkAndMutateRowAsync(any(CheckAndMutateRowRequest.class))).thenReturn(future);
underTest.checkAndMutateRowAsync(CheckAndMutateRowRequest.getDefaultInstance());
Assert.assertTrue(underTest.hasInflightRequests());
completeCall();
Assert.assertFalse(underTest.hasInflightRequests());
}
@Test
public void testReadWriteModify() throws IOException, InterruptedException {
when(client.readModifyWriteRowAsync(any(ReadModifyWriteRowRequest.class))).thenReturn(future);
underTest.readModifyWriteRowAsync(ReadModifyWriteRowRequest.getDefaultInstance());
Assert.assertTrue(underTest.hasInflightRequests());
completeCall();
Assert.assertFalse(underTest.hasInflightRequests());
}
@Test
public void testReadRowsAsync() throws IOException, InterruptedException {
when(client.readRowsAsync(any(ReadRowsRequest.class))).thenReturn(future);
underTest.readRowsAsync(ReadRowsRequest.getDefaultInstance());
Assert.assertTrue(underTest.hasInflightRequests());
completeCall();
Assert.assertFalse(underTest.hasInflightRequests());
}
@Test
public void testInvalidMutation() throws Exception {
try {
when(client.mutateRowAsync(any(MutateRowRequest.class))).thenThrow(new RuntimeException());
underTest.mutateRowAsync(MutateRowRequest.getDefaultInstance());
} catch(Exception ignored) {
}
completeCall();
Assert.assertFalse(underTest.hasInflightRequests());
}
@Test
/**
* Tests to make sure that mutateRowAsync will perform a wait() if there is a bigger count of RPCs
* than the maximum of the HeapSizeManager.
*/
public void testRegisterWaitsAfterCountLimit() throws Exception {
ExecutorService testExecutor = Executors.newCachedThreadPool();
try {
when(client.mutateRowAsync(any(MutateRowRequest.class))).thenReturn(future);
// Fill up the Queue
for (int i = 0; i < 10; i++) {
underTest.mutateRowAsync(MutateRowRequest.getDefaultInstance());
}
final AtomicBoolean eleventRpcInvoked = new AtomicBoolean(false);
testExecutor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.mutateRowAsync(MutateRowRequest.getDefaultInstance());
eleventRpcInvoked.set(true);
return null;
}
});
Thread.sleep(10);
Assert.assertFalse(eleventRpcInvoked.get());
completeCall();
Thread.sleep(10);
Assert.assertTrue(eleventRpcInvoked.get());
} finally {
testExecutor.shutdownNow();
}
}
@Test
/**
* Tests to make sure that mutateRowAsync will perform a wait() if there is a bigger accumulated
* serialized size of RPCs than the maximum of the HeapSizeManager.
*/
public void testRegisterWaitsAfterSizeLimit() throws Exception {
ExecutorService testExecutor = Executors.newCachedThreadPool();
try {
when(client.mutateRowAsync(any(MutateRowRequest.class))).thenReturn(future);
// Send a huge request to block further RPCs.
underTest.mutateRowAsync(MutateRowRequest.newBuilder()
.setRowKey(ByteString.copyFrom(new byte[1000])).build());
final AtomicBoolean newRpcInvoked = new AtomicBoolean(false);
Future<Void> future = testExecutor.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
underTest.mutateRowAsync(MutateRowRequest.getDefaultInstance());
newRpcInvoked.set(true);
return null;
}
});
try {
future.get(100, TimeUnit.MILLISECONDS);
Assert.fail("The future.get() call should timeout.");
} catch(TimeoutException expected) {
// Expected Exception.
}
completeCall();
future.get(100, TimeUnit.MILLISECONDS);
Assert.assertTrue(newRpcInvoked.get());
} finally {
testExecutor.shutdownNow();
}
}
private void completeCall() {
// futureRunnables can be updated asynchronously as the current batch of Runnables
// completes requests and releases locks.
List<Runnable> copy = Lists.newArrayList(futureRunnables);
futureRunnables.clear();
for (Runnable runnable : copy) {
runnable.run();
}
}
}
| |
/*
* Copyright 2017, Google Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opencensus.stats;
import static com.google.common.truth.Truth.assertThat;
import static io.opencensus.stats.StatsTestUtil.createContext;
import io.opencensus.common.Duration;
import io.opencensus.common.Timestamp;
import io.opencensus.internal.SimpleEventQueue;
import io.opencensus.stats.Measure.MeasureDouble;
import io.opencensus.stats.ViewData.DistributionViewData;
import io.opencensus.stats.View.DistributionView;
import io.opencensus.stats.View.IntervalView;
import io.opencensus.testing.common.TestClock;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link ViewManagerImpl}. */
@RunWith(JUnit4.class)
public class ViewManagerImplTest {
@Rule
public final ExpectedException thrown = ExpectedException.none();
private static final TagKey KEY = TagKey.create("KEY");
private static final TagValue VALUE = TagValue.create("VALUE");
private static final TagValue VALUE_2 = TagValue.create("VALUE_2");
private static final String MEASURE_NAME = "my measurement";
private static final String MEASURE_NAME_2 = "my measurement 2";
private static final String MEASURE_UNIT = "us";
private static final String MEASURE_DESCRIPTION = "measure description";
private static final MeasureDouble MEASURE =
Measure.MeasureDouble.create(MEASURE_NAME, MEASURE_DESCRIPTION, MEASURE_UNIT);
private static final View.Name VIEW_NAME = View.Name.create("my view");
private static final View.Name VIEW_NAME_2 = View.Name.create("my view 2");
private static final String VIEW_DESCRIPTION = "view description";
private static final BucketBoundaries BUCKET_BOUNDARIES =
BucketBoundaries.create(
Arrays.asList(
0.0, 0.2, 0.5, 1.0, 2.0, 3.0, 4.0, 5.0, 7.0, 10.0, 15.0, 20.0, 30.0, 40.0, 50.0));
private static final DistributionAggregation DISTRIBUTION_AGGREGATION_DESCRIPTOR =
DistributionAggregation.create(BUCKET_BOUNDARIES.getBoundaries());
private final TestClock clock = TestClock.create();
private final StatsComponentImplBase statsComponent =
new StatsComponentImplBase(new SimpleEventQueue(), clock);
private final StatsContextFactoryImpl factory = statsComponent.getStatsContextFactory();
private final ViewManagerImpl viewManager = statsComponent.getViewManager();
private final StatsRecorder statsRecorder = statsComponent.getStatsRecorder();
private static DistributionView createDistributionView() {
return createDistributionView(
VIEW_NAME, MEASURE, DISTRIBUTION_AGGREGATION_DESCRIPTOR, Arrays.asList(KEY));
}
private static DistributionView createDistributionView(
View.Name name,
Measure measure,
DistributionAggregation distributionAggregation,
List<TagKey> keys) {
return DistributionView.create(name, VIEW_DESCRIPTION, measure, distributionAggregation, keys);
}
@Test
public void testRegisterAndGetView() {
DistributionView view = createDistributionView();
viewManager.registerView(view);
assertThat(viewManager.getView(VIEW_NAME).getView()).isEqualTo(view);
}
@Test
public void preventRegisteringIntervalView() {
View intervalView =
IntervalView.create(
VIEW_NAME,
VIEW_DESCRIPTION,
MEASURE,
IntervalAggregation.create(Arrays.asList(Duration.fromMillis(1000))),
Arrays.asList(KEY));
thrown.expect(UnsupportedOperationException.class);
viewManager.registerView(intervalView);
}
@Test
public void allowRegisteringSameViewTwice() {
DistributionView view = createDistributionView();
viewManager.registerView(view);
viewManager.registerView(view);
assertThat(viewManager.getView(VIEW_NAME).getView()).isEqualTo(view);
}
@Test
public void preventRegisteringDifferentViewWithSameName() {
View view1 =
DistributionView.create(
VIEW_NAME,
"View description.",
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY));
viewManager.registerView(view1);
View view2 =
DistributionView.create(
VIEW_NAME,
"This is a different description.",
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY));
try {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("A different view with the same name is already registered");
viewManager.registerView(view2);
} finally {
assertThat(viewManager.getView(VIEW_NAME).getView()).isEqualTo(view1);
}
}
@Test
public void disallowGettingNonexistentViewData() {
thrown.expect(IllegalArgumentException.class);
viewManager.getView(VIEW_NAME);
}
@Test
public void testRecord() {
DistributionView view =
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY));
clock.setTime(Timestamp.create(1, 2));
viewManager.registerView(view);
StatsContextImpl tags = createContext(factory, KEY, VALUE);
for (double val : Arrays.asList(10.0, 20.0, 30.0, 40.0)) {
statsRecorder.record(tags, MeasureMap.builder().set(MEASURE, val).build());
}
clock.setTime(Timestamp.create(3, 4));
DistributionViewData viewData = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertThat(viewData.getView()).isEqualTo(view);
assertThat(viewData.getStart()).isEqualTo(Timestamp.create(1, 2));
assertThat(viewData.getEnd()).isEqualTo(Timestamp.create(3, 4));
assertDistributionAggregatesEquivalent(
viewData.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)),
BUCKET_BOUNDARIES,
Arrays.asList(10.0, 20.0, 30.0, 40.0))));
}
@Test
public void getViewDoesNotClearStats() {
DistributionView view =
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY));
clock.setTime(Timestamp.create(10, 0));
viewManager.registerView(view);
StatsContextImpl tags = createContext(factory, KEY, VALUE);
statsRecorder.record(tags, MeasureMap.builder().set(MEASURE, 0.1).build());
clock.setTime(Timestamp.create(11, 0));
DistributionViewData viewData1 = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertThat(viewData1.getStart()).isEqualTo(Timestamp.create(10, 0));
assertThat(viewData1.getEnd()).isEqualTo(Timestamp.create(11, 0));
assertDistributionAggregatesEquivalent(
viewData1.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)), BUCKET_BOUNDARIES, Arrays.asList(0.1))));
statsRecorder.record(tags, MeasureMap.builder().set(MEASURE, 0.2).build());
clock.setTime(Timestamp.create(12, 0));
DistributionViewData viewData2 = (DistributionViewData) viewManager.getView(VIEW_NAME);
// The second view should have the same start time as the first view, and it should include both
// recorded values:
assertThat(viewData2.getStart()).isEqualTo(Timestamp.create(10, 0));
assertThat(viewData2.getEnd()).isEqualTo(Timestamp.create(12, 0));
assertDistributionAggregatesEquivalent(
viewData2.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)),
BUCKET_BOUNDARIES,
Arrays.asList(0.1, 0.2))));
}
@Test
public void testRecordMultipleTagValues() {
viewManager.registerView(
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY)));
statsRecorder.record(
createContext(factory, KEY, VALUE),
MeasureMap.builder().set(MEASURE, 10.0).build());
statsRecorder.record(
createContext(factory, KEY, VALUE_2),
MeasureMap.builder().set(MEASURE, 30.0).build());
statsRecorder.record(
createContext(factory, KEY, VALUE_2),
MeasureMap.builder().set(MEASURE, 50.0).build());
DistributionViewData viewData = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertDistributionAggregatesEquivalent(
viewData.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)), BUCKET_BOUNDARIES, Arrays.asList(10.0)),
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE_2)),
BUCKET_BOUNDARIES,
Arrays.asList(30.0, 50.0))));
}
// This test checks that StatsRecorder.record(...) does not throw an exception when no views are
// registered.
@Test
public void allowRecordingWithoutRegisteringMatchingViewData() {
statsRecorder.record(
createContext(factory, KEY, VALUE),
MeasureMap.builder().set(MEASURE, 10).build());
}
@Test
public void testRecordWithEmptyStatsContext() {
viewManager.registerView(
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY)));
// DEFAULT doesn't have tags, but the view has tag key "KEY".
statsRecorder.record(factory.getDefault(),
MeasureMap.builder().set(MEASURE, 10.0).build());
DistributionViewData viewData = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertDistributionAggregatesEquivalent(
viewData.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
// Tag is missing for associated measureValues, should use default tag value
// "unknown/not set"
Arrays.asList(Tag.create(KEY, MutableViewData.UNKNOWN_TAG_VALUE)),
BUCKET_BOUNDARIES,
// Should record stats with default tag value: "KEY" : "unknown/not set".
Arrays.asList(10.0))));
}
@Test
public void testRecordWithNonExistentMeasurement() {
viewManager.registerView(
createDistributionView(
VIEW_NAME,
Measure.MeasureDouble.create(MEASURE_NAME, "measure", MEASURE_UNIT),
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY)));
MeasureDouble measure2 =
Measure.MeasureDouble.create(MEASURE_NAME_2, "measure", MEASURE_UNIT);
statsRecorder.record(createContext(factory, KEY, VALUE),
MeasureMap.builder().set(measure2, 10.0).build());
DistributionViewData view = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertThat(view.getDistributionAggregates()).isEmpty();
}
@Test
public void testRecordWithTagsThatDoNotMatchViewData() {
viewManager.registerView(
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY)));
statsRecorder.record(
createContext(factory, TagKey.create("wrong key"), VALUE),
MeasureMap.builder().set(MEASURE, 10.0).build());
statsRecorder.record(
createContext(factory, TagKey.create("another wrong key"), VALUE),
MeasureMap.builder().set(MEASURE, 50.0).build());
DistributionViewData view = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertDistributionAggregatesEquivalent(
view.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
// Won't record the unregistered tag key, will use default tag instead:
// "KEY" : "unknown/not set".
Arrays.asList(Tag.create(KEY, MutableViewData.UNKNOWN_TAG_VALUE)),
BUCKET_BOUNDARIES,
// Should record stats with default tag value: "KEY" : "unknown/not set".
Arrays.asList(10.0, 50.0))));
}
@Test
public void testViewDataWithMultipleTagKeys() {
TagKey key1 = TagKey.create("Key-1");
TagKey key2 = TagKey.create("Key-2");
viewManager.registerView(
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(key1, key2)));
statsRecorder.record(
createContext(factory, key1, TagValue.create("v1"), key2, TagValue.create("v10")),
MeasureMap.builder().set(MEASURE, 1.1).build());
statsRecorder.record(
createContext(factory, key1, TagValue.create("v1"), key2, TagValue.create("v20")),
MeasureMap.builder().set(MEASURE, 2.2).build());
statsRecorder.record(
createContext(factory, key1, TagValue.create("v2"), key2, TagValue.create("v10")),
MeasureMap.builder().set(MEASURE, 3.3).build());
statsRecorder.record(
createContext(factory, key1, TagValue.create("v1"), key2, TagValue.create("v10")),
MeasureMap.builder().set(MEASURE, 4.4).build());
DistributionViewData view = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertDistributionAggregatesEquivalent(
view.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(
Tag.create(key1, TagValue.create("v1")),
Tag.create(key2, TagValue.create("v10"))),
BUCKET_BOUNDARIES,
Arrays.asList(1.1, 4.4)),
StatsTestUtil.createDistributionAggregate(
Arrays.asList(
Tag.create(key1, TagValue.create("v1")),
Tag.create(key2, TagValue.create("v20"))),
BUCKET_BOUNDARIES,
Arrays.asList(2.2)),
StatsTestUtil.createDistributionAggregate(
Arrays.asList(
Tag.create(key1, TagValue.create("v2")),
Tag.create(key2, TagValue.create("v10"))),
BUCKET_BOUNDARIES,
Arrays.asList(3.3))));
}
@Test
public void testMultipleViewDatasSameMeasure() {
View view1 =
createDistributionView(
VIEW_NAME,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY));
View view2 =
createDistributionView(
VIEW_NAME_2,
MEASURE,
DISTRIBUTION_AGGREGATION_DESCRIPTOR,
Arrays.asList(KEY));
clock.setTime(Timestamp.create(1, 1));
viewManager.registerView(view1);
clock.setTime(Timestamp.create(2, 2));
viewManager.registerView(view2);
statsRecorder.record(
createContext(factory, KEY, VALUE),
MeasureMap.builder().set(MEASURE, 5.0).build());
List<DistributionAggregate> expectedAggs =
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)), BUCKET_BOUNDARIES, Arrays.asList(5.0)));
clock.setTime(Timestamp.create(3, 3));
DistributionViewData viewData1 = (DistributionViewData) viewManager.getView(VIEW_NAME);
clock.setTime(Timestamp.create(4, 4));
DistributionViewData viewData2 = (DistributionViewData) viewManager.getView(VIEW_NAME_2);
assertThat(viewData1.getStart()).isEqualTo(Timestamp.create(1, 1));
assertThat(viewData1.getEnd()).isEqualTo(Timestamp.create(3, 3));
assertDistributionAggregatesEquivalent(viewData1.getDistributionAggregates(), expectedAggs);
assertThat(viewData2.getStart()).isEqualTo(Timestamp.create(2, 2));
assertThat(viewData2.getEnd()).isEqualTo(Timestamp.create(4, 4));
assertDistributionAggregatesEquivalent(viewData2.getDistributionAggregates(), expectedAggs);
}
@Test
public void testMultipleViewsDifferentMeasures() {
MeasureDouble measure1 =
Measure.MeasureDouble.create(MEASURE_NAME, MEASURE_DESCRIPTION, MEASURE_UNIT);
MeasureDouble measure2 =
Measure.MeasureDouble.create(MEASURE_NAME_2, MEASURE_DESCRIPTION, MEASURE_UNIT);
View view1 =
createDistributionView(
VIEW_NAME, measure1, DISTRIBUTION_AGGREGATION_DESCRIPTOR, Arrays.asList(KEY));
View view2 =
createDistributionView(
VIEW_NAME_2, measure2, DISTRIBUTION_AGGREGATION_DESCRIPTOR, Arrays.asList(KEY));
clock.setTime(Timestamp.create(1, 0));
viewManager.registerView(view1);
clock.setTime(Timestamp.create(2, 0));
viewManager.registerView(view2);
statsRecorder.record(
createContext(factory, KEY, VALUE),
MeasureMap.builder().set(measure1, 1.1).set(measure2, 2.2).build());
clock.setTime(Timestamp.create(3, 0));
DistributionViewData viewData1 = (DistributionViewData) viewManager.getView(VIEW_NAME);
clock.setTime(Timestamp.create(4, 0));
DistributionViewData viewData2 = (DistributionViewData) viewManager.getView(VIEW_NAME_2);
assertThat(viewData1.getStart()).isEqualTo(Timestamp.create(1, 0));
assertThat(viewData1.getEnd()).isEqualTo(Timestamp.create(3, 0));
assertDistributionAggregatesEquivalent(
viewData1.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)), BUCKET_BOUNDARIES, Arrays.asList(1.1))));
assertThat(viewData2.getStart()).isEqualTo(Timestamp.create(2, 0));
assertThat(viewData2.getEnd()).isEqualTo(Timestamp.create(4, 0));
assertDistributionAggregatesEquivalent(
viewData2.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)), BUCKET_BOUNDARIES, Arrays.asList(2.2))));
}
@Test
public void testGetDistributionViewDataWithoutBucketBoundaries() {
View view =
createDistributionView(
VIEW_NAME, MEASURE, DistributionAggregation.create(),
Arrays.asList(KEY));
clock.setTime(Timestamp.create(1, 0));
viewManager.registerView(view);
statsRecorder.record(
createContext(factory, KEY, VALUE),
MeasureMap.builder().set(MEASURE, 1.1).build());
clock.setTime(Timestamp.create(3, 0));
DistributionViewData viewData = (DistributionViewData) viewManager.getView(VIEW_NAME);
assertThat(viewData.getStart()).isEqualTo(Timestamp.create(1, 0));
assertThat(viewData.getEnd()).isEqualTo(Timestamp.create(3, 0));
assertDistributionAggregatesEquivalent(
viewData.getDistributionAggregates(),
Arrays.asList(
StatsTestUtil.createDistributionAggregate(
Arrays.asList(Tag.create(KEY, VALUE)), Arrays.asList(1.1))));
}
// TODO(sebright) Consider making this helper method work with larger ranges of double values and
// moving it to StatsTestUtil.
private static void assertDistributionAggregatesEquivalent(
Collection<DistributionAggregate> actual, Collection<DistributionAggregate> expected) {
StatsTestUtil.assertDistributionAggregatesEquivalent(1e-6, actual, expected);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexGraveyard;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TestCustomMetaData;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
/**
* Tests for the {@link ClusterChangedEvent} class.
*/
public class ClusterChangedEventTests extends ESTestCase {
private static final ClusterName TEST_CLUSTER_NAME = new ClusterName("test");
private static final String NODE_ID_PREFIX = "node_";
private static final String INITIAL_CLUSTER_ID = UUIDs.randomBase64UUID();
// the initial indices which every cluster state test starts out with
private static final List<Index> initialIndices = Arrays.asList(new Index("idx1", UUIDs.randomBase64UUID()),
new Index("idx2", UUIDs.randomBase64UUID()),
new Index("idx3", UUIDs.randomBase64UUID()));
/**
* Test basic properties of the ClusterChangedEvent class:
* (1) make sure there are no null values for any of its properties
* (2) make sure you can't create a ClusterChangedEvent with any null values
*/
public void testBasicProperties() {
ClusterState newState = createSimpleClusterState();
ClusterState previousState = createSimpleClusterState();
ClusterChangedEvent event = new ClusterChangedEvent("_na_", newState, previousState);
assertThat(event.source(), equalTo("_na_"));
assertThat(event.state(), equalTo(newState));
assertThat(event.previousState(), equalTo(previousState));
assertNotNull("nodesDelta should not be null", event.nodesDelta());
// should not be able to create a ClusterChangedEvent with null values for any of the constructor args
try {
event = new ClusterChangedEvent(null, newState, previousState);
fail("should not have created a ClusterChangedEvent from a null source: " + event.source());
} catch (NullPointerException e) {
}
try {
event = new ClusterChangedEvent("_na_", null, previousState);
fail("should not have created a ClusterChangedEvent from a null state: " + event.state());
} catch (NullPointerException e) {
}
try {
event = new ClusterChangedEvent("_na_", newState, null);
fail("should not have created a ClusterChangedEvent from a null previousState: " + event.previousState());
} catch (NullPointerException e) {
}
}
/**
* Test whether the ClusterChangedEvent returns the correct value for whether the local node is master,
* based on what was set on the cluster state.
*/
public void testLocalNodeIsMaster() {
final int numNodesInCluster = 3;
ClusterState previousState = createSimpleClusterState();
ClusterState newState = createState(numNodesInCluster, true, initialIndices);
ClusterChangedEvent event = new ClusterChangedEvent("_na_", newState, previousState);
assertTrue("local node should be master", event.localNodeMaster());
newState = createState(numNodesInCluster, false, initialIndices);
event = new ClusterChangedEvent("_na_", newState, previousState);
assertFalse("local node should not be master", event.localNodeMaster());
}
/**
* Test that the indices created and indices deleted lists between two cluster states
* are correct when there is a change in indices added and deleted. Also tests metadata
* equality between cluster states.
*/
public void testIndicesMetaDataChanges() {
final int numNodesInCluster = 3;
ClusterState previousState = createState(numNodesInCluster, randomBoolean(), initialIndices);
for (TombstoneDeletionQuantity quantity : TombstoneDeletionQuantity.valuesInRandomizedOrder()) {
final ClusterState newState = executeIndicesChangesTest(previousState, quantity);
previousState = newState; // serves as the base cluster state for the next iteration
}
}
/**
* Test that the indices deleted list is correct when the previous cluster state is
* not initialized/recovered. This should trigger the use of the index tombstones to
* determine the deleted indices.
*/
public void testIndicesDeletionWithNotRecoveredState() {
// test with all the various tombstone deletion quantities
for (TombstoneDeletionQuantity quantity : TombstoneDeletionQuantity.valuesInRandomizedOrder()) {
final ClusterState previousState = createNonInitializedState(randomIntBetween(3, 5), randomBoolean());
executeIndicesChangesTest(previousState, quantity);
}
}
/**
* Test the index metadata change check.
*/
public void testIndexMetaDataChange() {
final int numNodesInCluster = 3;
final ClusterState state = createState(numNodesInCluster, randomBoolean(), initialIndices);
// test when its not the same IndexMetaData
final Index index = initialIndices.get(0);
final IndexMetaData originalIndexMeta = state.metaData().index(index);
// make sure the metadata is actually on the cluster state
assertNotNull("IndexMetaData for " + index + " should exist on the cluster state", originalIndexMeta);
IndexMetaData newIndexMeta = createIndexMetadata(index, originalIndexMeta.getVersion() + 1);
assertTrue("IndexMetaData with different version numbers must be considered changed",
ClusterChangedEvent.indexMetaDataChanged(originalIndexMeta, newIndexMeta));
// test when it doesn't exist
newIndexMeta = createIndexMetadata(new Index("doesntexist", UUIDs.randomBase64UUID()));
assertTrue("IndexMetaData that didn't previously exist should be considered changed",
ClusterChangedEvent.indexMetaDataChanged(originalIndexMeta, newIndexMeta));
// test when its the same IndexMetaData
assertFalse("IndexMetaData should be the same", ClusterChangedEvent.indexMetaDataChanged(originalIndexMeta, originalIndexMeta));
}
/**
* Test nodes added/removed/changed checks.
*/
public void testNodesAddedAndRemovedAndChanged() {
final int numNodesInCluster = 4;
final ClusterState originalState = createState(numNodesInCluster, randomBoolean(), initialIndices);
// test when nodes have not been added or removed between cluster states
ClusterState newState = createState(numNodesInCluster, randomBoolean(), initialIndices);
ClusterChangedEvent event = new ClusterChangedEvent("_na_", newState, originalState);
assertFalse("Nodes should not have been added between cluster states", event.nodesAdded());
assertFalse("Nodes should not have been removed between cluster states", event.nodesRemoved());
assertFalse("Nodes should not have been changed between cluster states", event.nodesChanged());
// test when nodes have been removed between cluster states
newState = createState(numNodesInCluster - 1, randomBoolean(), initialIndices);
event = new ClusterChangedEvent("_na_", newState, originalState);
assertTrue("Nodes should have been removed between cluster states", event.nodesRemoved());
assertFalse("Nodes should not have been added between cluster states", event.nodesAdded());
assertTrue("Nodes should have been changed between cluster states", event.nodesChanged());
// test when nodes have been added between cluster states
newState = createState(numNodesInCluster + 1, randomBoolean(), initialIndices);
event = new ClusterChangedEvent("_na_", newState, originalState);
assertFalse("Nodes should not have been removed between cluster states", event.nodesRemoved());
assertTrue("Nodes should have been added between cluster states", event.nodesAdded());
assertTrue("Nodes should have been changed between cluster states", event.nodesChanged());
// test when nodes both added and removed between cluster states
// here we reuse the newState from the previous run which already added extra nodes
newState = nextState(newState, randomBoolean(), Collections.emptyList(), Collections.emptyList(), 1);
event = new ClusterChangedEvent("_na_", newState, originalState);
assertTrue("Nodes should have been removed between cluster states", event.nodesRemoved());
assertTrue("Nodes should have been added between cluster states", event.nodesAdded());
assertTrue("Nodes should have been changed between cluster states", event.nodesChanged());
}
/**
* Test the routing table changes checks.
*/
public void testRoutingTableChanges() {
final int numNodesInCluster = 3;
final ClusterState originalState = createState(numNodesInCluster, randomBoolean(), initialIndices);
// routing tables and index routing tables are same object
ClusterState newState = ClusterState.builder(originalState).build();
ClusterChangedEvent event = new ClusterChangedEvent("_na_", originalState, newState);
assertFalse("routing tables should be the same object", event.routingTableChanged());
assertFalse("index routing table should be the same object", event.indexRoutingTableChanged(initialIndices.get(0).getName()));
// routing tables and index routing tables aren't same object
newState = createState(numNodesInCluster, randomBoolean(), initialIndices);
event = new ClusterChangedEvent("_na_", originalState, newState);
assertTrue("routing tables should not be the same object", event.routingTableChanged());
assertTrue("index routing table should not be the same object", event.indexRoutingTableChanged(initialIndices.get(0).getName()));
// index routing tables are different because they don't exist
newState = createState(numNodesInCluster, randomBoolean(), initialIndices.subList(1, initialIndices.size()));
event = new ClusterChangedEvent("_na_", originalState, newState);
assertTrue("routing tables should not be the same object", event.routingTableChanged());
assertTrue("index routing table should not be the same object", event.indexRoutingTableChanged(initialIndices.get(0).getName()));
}
/**
* Test custom metadata change checks
*/
public void testChangedCustomMetaDataSet() {
final int numNodesInCluster = 3;
final ClusterState originalState = createState(numNodesInCluster, randomBoolean(), initialIndices);
CustomMetaData1 customMetaData1 = new CustomMetaData1("data");
final ClusterState stateWithCustomMetaData = nextState(originalState, Collections.singletonList(customMetaData1));
// no custom metadata present in any state
ClusterState nextState = ClusterState.builder(originalState).build();
ClusterChangedEvent event = new ClusterChangedEvent("_na_", originalState, nextState);
assertTrue(event.changedCustomMetaDataSet().isEmpty());
// next state has new custom metadata
nextState = nextState(originalState, Collections.singletonList(customMetaData1));
event = new ClusterChangedEvent("_na_", originalState, nextState);
Set<String> changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.size() == 1);
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData1.getWriteableName()));
// next state has same custom metadata
nextState = nextState(originalState, Collections.singletonList(customMetaData1));
event = new ClusterChangedEvent("_na_", stateWithCustomMetaData, nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.isEmpty());
// next state has equivalent custom metadata
nextState = nextState(originalState, Collections.singletonList(new CustomMetaData1("data")));
event = new ClusterChangedEvent("_na_", stateWithCustomMetaData, nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.isEmpty());
// next state removes custom metadata
nextState = originalState;
event = new ClusterChangedEvent("_na_", stateWithCustomMetaData, nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.size() == 1);
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData1.getWriteableName()));
// next state updates custom metadata
nextState = nextState(stateWithCustomMetaData, Collections.singletonList(new CustomMetaData1("data1")));
event = new ClusterChangedEvent("_na_", stateWithCustomMetaData, nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.size() == 1);
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData1.getWriteableName()));
// next state adds new custom metadata type
CustomMetaData2 customMetaData2 = new CustomMetaData2("data2");
nextState = nextState(stateWithCustomMetaData, Arrays.asList(customMetaData1, customMetaData2));
event = new ClusterChangedEvent("_na_", stateWithCustomMetaData, nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.size() == 1);
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData2.getWriteableName()));
// next state adds two custom metadata type
nextState = nextState(originalState, Arrays.asList(customMetaData1, customMetaData2));
event = new ClusterChangedEvent("_na_", originalState, nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.size() == 2);
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData2.getWriteableName()));
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData1.getWriteableName()));
// next state removes two custom metadata type
nextState = originalState;
event = new ClusterChangedEvent("_na_",
nextState(originalState, Arrays.asList(customMetaData1, customMetaData2)), nextState);
changedCustomMetaDataTypeSet = event.changedCustomMetaDataSet();
assertTrue(changedCustomMetaDataTypeSet.size() == 2);
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData2.getWriteableName()));
assertTrue(changedCustomMetaDataTypeSet.contains(customMetaData1.getWriteableName()));
}
private static class CustomMetaData2 extends TestCustomMetaData {
protected CustomMetaData2(String data) {
super(data);
}
@Override
public String getWriteableName() {
return "2";
}
@Override
public Version getMinimalSupportedVersion() {
return Version.CURRENT;
}
@Override
public EnumSet<MetaData.XContentContext> context() {
return EnumSet.of(MetaData.XContentContext.GATEWAY);
}
}
private static class CustomMetaData1 extends TestCustomMetaData {
protected CustomMetaData1(String data) {
super(data);
}
@Override
public String getWriteableName() {
return "1";
}
@Override
public Version getMinimalSupportedVersion() {
return Version.CURRENT;
}
@Override
public EnumSet<MetaData.XContentContext> context() {
return EnumSet.of(MetaData.XContentContext.GATEWAY);
}
}
private static ClusterState createSimpleClusterState() {
return ClusterState.builder(TEST_CLUSTER_NAME).build();
}
// Create a basic cluster state with a given set of indices
private static ClusterState createState(final int numNodes, final boolean isLocalMaster, final List<Index> indices) {
final MetaData metaData = createMetaData(indices);
return ClusterState.builder(TEST_CLUSTER_NAME)
.nodes(createDiscoveryNodes(numNodes, isLocalMaster))
.metaData(metaData)
.routingTable(createRoutingTable(1, metaData))
.build();
}
// Create a non-initialized cluster state
private static ClusterState createNonInitializedState(final int numNodes, final boolean isLocalMaster) {
final ClusterState withoutBlock = createState(numNodes, isLocalMaster, Collections.emptyList());
return ClusterState.builder(withoutBlock)
.blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK).build())
.build();
}
private static ClusterState nextState(final ClusterState previousState, List<TestCustomMetaData> customMetaDataList) {
final ClusterState.Builder builder = ClusterState.builder(previousState);
builder.stateUUID(UUIDs.randomBase64UUID());
MetaData.Builder metaDataBuilder = new MetaData.Builder(previousState.metaData());
for (ObjectObjectCursor<String, MetaData.Custom> customMetaData : previousState.metaData().customs()) {
if (customMetaData.value instanceof TestCustomMetaData) {
metaDataBuilder.removeCustom(customMetaData.key);
}
}
for (TestCustomMetaData testCustomMetaData : customMetaDataList) {
metaDataBuilder.putCustom(testCustomMetaData.getWriteableName(), testCustomMetaData);
}
builder.metaData(metaDataBuilder);
return builder.build();
}
// Create a modified cluster state from another one, but with some number of indices added and deleted.
private static ClusterState nextState(final ClusterState previousState, final boolean changeClusterUUID,
final List<Index> addedIndices, final List<Index> deletedIndices, final int numNodesToRemove) {
final ClusterState.Builder builder = ClusterState.builder(previousState);
builder.stateUUID(UUIDs.randomBase64UUID());
final MetaData.Builder metaBuilder = MetaData.builder(previousState.metaData());
if (changeClusterUUID || addedIndices.size() > 0 || deletedIndices.size() > 0) {
// there is some change in metadata cluster state
if (changeClusterUUID) {
metaBuilder.clusterUUID(UUIDs.randomBase64UUID());
}
for (Index index : addedIndices) {
metaBuilder.put(createIndexMetadata(index), true);
}
for (Index index : deletedIndices) {
metaBuilder.remove(index.getName());
IndexGraveyard.Builder graveyardBuilder = IndexGraveyard.builder(metaBuilder.indexGraveyard());
graveyardBuilder.addTombstone(index);
metaBuilder.indexGraveyard(graveyardBuilder.build());
}
builder.metaData(metaBuilder);
}
if (numNodesToRemove > 0) {
final int discoveryNodesSize = previousState.getNodes().getSize();
final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(previousState.getNodes());
for (int i = 0; i < numNodesToRemove && i < discoveryNodesSize; i++) {
nodesBuilder.remove(NODE_ID_PREFIX + i);
}
builder.nodes(nodesBuilder);
}
builder.blocks(ClusterBlocks.builder().build());
return builder.build();
}
// Create the discovery nodes for a cluster state. For our testing purposes, we want
// the first to be master, the second to be master eligible, the third to be a data node,
// and the remainder can be any kinds of nodes (master eligible, data, or both).
private static DiscoveryNodes createDiscoveryNodes(final int numNodes, final boolean isLocalMaster) {
assert (numNodes >= 3) : "the initial cluster state for event change tests should have a minimum of 3 nodes " +
"so there are a minimum of 2 master nodes for testing master change events.";
final DiscoveryNodes.Builder builder = DiscoveryNodes.builder();
final int localNodeIndex = isLocalMaster ? 0 : randomIntBetween(1, numNodes - 1); // randomly assign the local node if not master
for (int i = 0; i < numNodes; i++) {
final String nodeId = NODE_ID_PREFIX + i;
Set<DiscoveryNode.Role> roles = new HashSet<>();
if (i == 0) {
// the master node
builder.masterNodeId(nodeId);
roles.add(DiscoveryNode.Role.MASTER);
} else if (i == 1) {
// the alternate master node
roles.add(DiscoveryNode.Role.MASTER);
} else if (i == 2) {
// we need at least one data node
roles.add(DiscoveryNode.Role.DATA);
} else {
// remaining nodes can be anything (except for master)
if (randomBoolean()) {
roles.add(DiscoveryNode.Role.MASTER);
}
if (randomBoolean()) {
roles.add(DiscoveryNode.Role.DATA);
}
}
final DiscoveryNode node = newNode(nodeId, roles);
builder.add(node);
if (i == localNodeIndex) {
builder.localNodeId(nodeId);
}
}
return builder.build();
}
// Create a new DiscoveryNode
private static DiscoveryNode newNode(final String nodeId, Set<DiscoveryNode.Role> roles) {
return new DiscoveryNode(nodeId, nodeId, nodeId, "host", "host_address", buildNewFakeTransportAddress(),
Collections.emptyMap(), roles, Version.CURRENT);
}
// Create the metadata for a cluster state.
private static MetaData createMetaData(final List<Index> indices) {
final MetaData.Builder builder = MetaData.builder();
builder.clusterUUID(INITIAL_CLUSTER_ID);
for (Index index : indices) {
builder.put(createIndexMetadata(index), true);
}
return builder.build();
}
// Create the index metadata for a given index.
private static IndexMetaData createIndexMetadata(final Index index) {
return createIndexMetadata(index, 1);
}
// Create the index metadata for a given index, with the specified version.
private static IndexMetaData createIndexMetadata(final Index index, final long version) {
final Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
.build();
return IndexMetaData.builder(index.getName())
.settings(settings)
.numberOfShards(1)
.numberOfReplicas(0)
.creationDate(System.currentTimeMillis())
.version(version)
.build();
}
// Create the routing table for a cluster state.
private static RoutingTable createRoutingTable(final long version, final MetaData metaData) {
final RoutingTable.Builder builder = RoutingTable.builder().version(version);
for (ObjectCursor<IndexMetaData> cursor : metaData.indices().values()) {
builder.addAsNew(cursor.value);
}
return builder.build();
}
// Create a list of indices to add
private static List<Index> addIndices(final int numIndices, final String id) {
final List<Index> list = new ArrayList<>();
for (int i = 0; i < numIndices; i++) {
list.add(new Index("newIdx_" + id + "_" + i, UUIDs.randomBase64UUID()));
}
return list;
}
// Create a list of indices to delete from a list that already belongs to a particular cluster state.
private static List<Index> delIndices(final int numIndices, final List<Index> currIndices) {
final List<Index> list = new ArrayList<>();
for (int i = 0; i < numIndices; i++) {
list.add(currIndices.get(i));
}
return list;
}
// execute the indices changes test by generating random index additions and deletions and
// checking the values on the cluster changed event.
private static ClusterState executeIndicesChangesTest(final ClusterState previousState,
final TombstoneDeletionQuantity deletionQuantity) {
final int numAdd = randomIntBetween(0, 5); // add random # of indices to the next cluster state
final List<Index> stateIndices = new ArrayList<>();
for (Iterator<IndexMetaData> iter = previousState.metaData().indices().valuesIt(); iter.hasNext();) {
stateIndices.add(iter.next().getIndex());
}
final int numDel;
switch (deletionQuantity) {
case DELETE_ALL: {
numDel = stateIndices.size();
break;
}
case DELETE_NONE: {
numDel = 0;
break;
}
case DELETE_RANDOM: {
numDel = randomIntBetween(0, Math.max(stateIndices.size() - 1, 0));
break;
}
default: throw new AssertionError("Unhandled mode [" + deletionQuantity + "]");
}
final boolean changeClusterUUID = randomBoolean();
final List<Index> addedIndices = addIndices(numAdd, randomAlphaOfLengthBetween(5, 10));
List<Index> delIndices;
if (changeClusterUUID) {
delIndices = new ArrayList<>();
} else {
delIndices = delIndices(numDel, stateIndices);
}
final ClusterState newState = nextState(previousState, changeClusterUUID, addedIndices, delIndices, 0);
ClusterChangedEvent event = new ClusterChangedEvent("_na_", newState, previousState);
final List<String> addsFromEvent = event.indicesCreated();
List<Index> delsFromEvent = event.indicesDeleted();
assertThat(new HashSet<>(addsFromEvent), equalTo(addedIndices.stream().map(Index::getName).collect(Collectors.toSet())));
assertThat(new HashSet<>(delsFromEvent), equalTo(new HashSet<>(delIndices)));
assertThat(event.metaDataChanged(), equalTo(changeClusterUUID || addedIndices.size() > 0 || delIndices.size() > 0));
final IndexGraveyard newGraveyard = event.state().metaData().indexGraveyard();
final IndexGraveyard oldGraveyard = event.previousState().metaData().indexGraveyard();
assertThat(((IndexGraveyard.IndexGraveyardDiff)newGraveyard.diff(oldGraveyard)).getAdded().size(), equalTo(delIndices.size()));
return newState;
}
private enum TombstoneDeletionQuantity {
DELETE_RANDOM, // delete a random number of tombstones from cluster state (not zero and not all)
DELETE_NONE, // delete none of the tombstones from cluster state
DELETE_ALL; // delete all tombstones from cluster state
static List<TombstoneDeletionQuantity> valuesInRandomizedOrder() {
final List<TombstoneDeletionQuantity> randomOrderQuantities = new ArrayList<>(EnumSet.allOf(TombstoneDeletionQuantity.class));
Collections.shuffle(randomOrderQuantities, random());
return randomOrderQuantities;
}
}
}
| |
package org.processmining.framework.packages.impl;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.Writer;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.processmining.framework.boot.Boot;
import org.processmining.framework.packages.PackageDescriptor;
import org.processmining.framework.packages.PackageDescriptor.OS;
import org.processmining.framework.packages.PackageManager;
import org.processmining.framework.packages.PackageManager.Canceller;
import org.processmining.framework.packages.PackageSet;
import org.processmining.framework.packages.Repository;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class PackageConfigPerister {
private static final String nl = System.getProperty("line.separator");
private PackageConfigPerister() {
}
public static void read(File f, Set<Repository> repositories, PackageSet available, PackageSet installed,
Canceller canceller) throws ParserConfigurationException, SAXException, IOException {
InputStream is = new BufferedInputStream(new FileInputStream(f));
try {
read(is, repositories, available, installed, canceller);
} finally {
is.close();
}
}
public static void read(InputStream is, Set<Repository> repositories, PackageSet available, PackageSet installed,
Canceller canceller) throws ParserConfigurationException, SAXException, IOException {
BufferedInputStream bis = new BufferedInputStream(is);
ConfigHandler handler = new ConfigHandler(repositories, available, installed, canceller);
SAXParserFactory parserFactory = SAXParserFactory.newInstance();
parserFactory.setNamespaceAware(false);
parserFactory.setValidating(false);
try {
// Some old JAXP versions may throw the UnsupportedOperation Exception in the next call.
parserFactory.setSchema(null);
} catch (UnsupportedOperationException ex) {
// Ignore.
}
SAXParser parser = parserFactory.newSAXParser();
parser.parse(bis, handler);
bis.close();
}
public static Set<PackageDescriptor> listRepository(InputStream is, Canceller canceller)
throws ParserConfigurationException, SAXException, IOException {
Set<Repository> repos = new HashSet<Repository>();
PackageSet available = new PackageSet();
PackageSet installed = new PackageSet();
read(is, repos, available, installed, canceller);
return available;
}
static private class ConfigHandler extends DefaultHandler {
private static final String PACKAGES = "packages";
private static final String URL_ATTR = "url";
private static final String VERSION_ATTR = "version";
private static final String NAME_ATTR = "name";
private static final String DEPENDENCY = "dependency";
private static final String PACKAGE = "package";
private static final String REPOSITORY = "repository";
private static final String CONFLICT = "conflict";
private static final String DESCRIPTION_ATTR = "desc";
private static final String ORGANISATION_ATTR = "org";
private static final String AUTHOR_ATTR = "author";
private static final String AUTO_ATTR = "auto";
private static final String LICENSE_ATTR = "license";
private static final Object INSTALLED = "installed-packages";
private static final String HAS_PLUGINS_ATTR = "hasPlugins";
private static final String OS_ATTR = "os";
private static final String MAINTAINER_ATTR = "maintainer";
private static final String LOGO_URL_ATTR = "logo";
private static final String KEYWORDS_ATTR = "keywords";
private Repository curRepo = null;
private String curPackageName = null;
private String curPackageVersion = null;
private final List<String> dependencies = new ArrayList<String>();
private final List<String> conflicts = new ArrayList<String>();
private String curPackageURL;
private String curLogoURL;
private String curPackageDesc;
private String curPackageOrg;
private String curPackageAuthor;
private String curPackageLicense;
private String curPackageAutoInstalled;
private String curPackageHasPlugins;
private boolean insideInstalled = false;
private final Set<Repository> repositories;
private final Set<PackageDescriptor> available;
private final Set<PackageDescriptor> installed;
private final Canceller canceller;
private String curPackageOS;
private String curPackageMaintainer;
private String curKeywords;
public ConfigHandler(Set<Repository> repositories, Set<PackageDescriptor> available,
Set<PackageDescriptor> installed, Canceller canceller) {
this.available = available;
this.installed = installed;
this.repositories = repositories;
this.canceller = canceller;
}
@Override
public void startElement(String uri, String local, String qName, Attributes attributes) throws SAXException {
if ((canceller != null) && canceller.isCancelled()) {
throw new CancelledException();
}
qName = qName.toLowerCase();
if ((curRepo == null) && (curPackageName == null)) {
if (qName.equals(INSTALLED)) {
insideInstalled = true;
} else if (!insideInstalled && qName.equals(REPOSITORY)) {
String url = attributes.getValue(URL_ATTR);
if ((url != null) && (url.trim().length() > 0)) {
try {
curRepo = new Repository(new URL(url.trim()));
} catch (MalformedURLException e) {
System.err.println("Invalid URL for repository, skipping: " + url);
}
}
} else if (qName.equals(PACKAGE)) {
String name = attributes.getValue(NAME_ATTR);
String version = attributes.getValue(VERSION_ATTR);
String url = attributes.getValue(URL_ATTR);
String logo = attributes.getValue(LOGO_URL_ATTR);
String desc = attributes.getValue(DESCRIPTION_ATTR);
String org = attributes.getValue(ORGANISATION_ATTR);
String license = attributes.getValue(LICENSE_ATTR);
String author = attributes.getValue(AUTHOR_ATTR);
String auto = attributes.getValue(AUTO_ATTR);
String hasPlugins = attributes.getValue(HAS_PLUGINS_ATTR);
String os = attributes.getValue(OS_ATTR);
String maintainer = attributes.getValue(MAINTAINER_ATTR);
String keywords = attributes.getValue(KEYWORDS_ATTR);
if ((name != null) && (name.trim().length() > 0) && //
(version != null) && (version.trim().length() > 0) && //
(os != null) && (os.trim().length() > 0) && //
(url != null) && (url.trim().length() > 0)) {
curPackageName = name;
curPackageVersion = version;
curPackageURL = url;
curPackageOS = os;
curLogoURL = logo == null ? "" : logo;
curPackageDesc = desc == null ? "" : desc;
curPackageOrg = org == null ? "" : org;
curPackageLicense = license == null ? "" : license;
curPackageAuthor = author == null ? "" : author;
curPackageMaintainer = maintainer == null ? author : maintainer;
curPackageAutoInstalled = auto == null ? "" : auto;
curPackageHasPlugins = hasPlugins == null ? "" : hasPlugins;
curKeywords = keywords == null ? "" : keywords;
dependencies.clear();
conflicts.clear();
}
}
} else if ((curPackageName != null) && qName.equals(DEPENDENCY)) {
String name = attributes.getValue(NAME_ATTR);
if ((name != null) && (name.trim().length() > 0)) {
dependencies.add(name);
}
} else if ((curPackageName != null) && qName.equals(CONFLICT)) {
String name = attributes.getValue(NAME_ATTR);
if ((name != null) && (name.trim().length() > 0)) {
conflicts.add(name);
}
}
}
@Override
public void endElement(String uri, String local, String qName) throws SAXException {
if ((canceller != null) && canceller.isCancelled()) {
throw new CancelledException();
}
qName = qName.toLowerCase();
if (qName.equals(INSTALLED)) {
insideInstalled = false;
} else if ((curRepo != null) && qName.equals(REPOSITORY)) {
repositories.add(curRepo);
curRepo = null;
} else if ((curPackageName != null) && qName.equals(PACKAGE)) {
OS os = OS.fromString(curPackageOS);
if (os.isUsable()) {
PackageDescriptor pack = new PackageDescriptor(curPackageName, curPackageVersion, os,
curPackageDesc, curPackageOrg, curPackageAuthor, curPackageMaintainer, curPackageLicense,
curPackageURL, curLogoURL, curKeywords, "true".equals(curPackageAutoInstalled), !"false"
.equals(curPackageHasPlugins), dependencies, conflicts);
if (insideInstalled) {
installed.add(pack);
} else {
if (Boot.HIDE_OLD_PACKAGES) {
// Suggested by Massimiliano de Leoni
PackageDescriptor foundPack = null;
for (PackageDescriptor availablePack : available) {
if (availablePack.getName().equals(pack.getName())) {
foundPack = availablePack;
break;
}
}
if (foundPack != null) {
if (foundPack.getVersion().lessThan(pack.getVersion())) {
available.remove(foundPack);
available.add(pack);
} else {
// Skip, pack is dominated by foundPack.
}
} else {
available.add(pack);
}
} else {
available.add(pack);
}
}
}
curPackageName = null;
}
}
}
public static void write(File config, Set<Repository> repositories, Set<PackageDescriptor> available,
Set<PackageDescriptor> installed) throws IOException {
Writer writer = new FileWriter(config);
// TODO properly escape all raw strings
writer.write("<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>" + nl);
writer.write("<" + ConfigHandler.PACKAGES + ">" + nl);
for (Repository repo : repositories) {
writer.write(" <" + ConfigHandler.REPOSITORY + " " + ConfigHandler.URL_ATTR + "=\"" + repo.getURL() + "\""
+ " />" + nl);
}
for (PackageDescriptor pack : available) {
/*
* Do not write to local repo if known to be unavailable.
*/
if (PackageManager.getInstance().isAvailable(pack)) {
writePackage(pack, writer);
}
}
writer.write(" <" + ConfigHandler.INSTALLED + ">" + nl);
for (PackageDescriptor pack : installed) {
writePackage(pack, writer);
}
writer.write(" </" + ConfigHandler.INSTALLED + ">" + nl);
writer.write("</" + ConfigHandler.PACKAGES + ">" + nl);
writer.close();
}
private static void writePackage(PackageDescriptor pack, Writer writer) throws IOException {
writer.write(" <" + ConfigHandler.PACKAGE + //
" " + ConfigHandler.NAME_ATTR + "=\"" + pack.getName() + "\"" + //
" " + ConfigHandler.VERSION_ATTR + "=\"" + pack.getVersion() + "\"" + //
" " + ConfigHandler.OS_ATTR + "=\"" + pack.getOS().getName() + "\"" + //
" " + ConfigHandler.URL_ATTR + "=\"" + pack.getURL() + "\"" + //
" " + ConfigHandler.DESCRIPTION_ATTR + "=\"" + pack.getDescription() + "\"" + //
" " + ConfigHandler.ORGANISATION_ATTR + "=\"" + pack.getOrganisation() + "\"" + //
" " + ConfigHandler.AUTO_ATTR + "=\"" + (pack.getAutoInstalled() ? "true" : "false") + "\"" + //
" " + ConfigHandler.HAS_PLUGINS_ATTR + "=\"" + (pack.hasPlugins() ? "true" : "false") + "\"" + //
" " + ConfigHandler.LICENSE_ATTR + "=\"" + pack.getLicense() + "\"" + //
" " + ConfigHandler.AUTHOR_ATTR + "=\"" + pack.getAuthor() + "\"" + //
" " + ConfigHandler.MAINTAINER_ATTR + "=\"" + pack.getMaintainer() + "\"" + //
" " + ConfigHandler.LOGO_URL_ATTR + "=\"" + pack.getLogoURL() + "\"" + //
" " + ConfigHandler.KEYWORDS_ATTR + "=\"" + pack.getKeywords() + "\"" + //
">" + nl);
for (String dep : pack.getDependencies()) {
writer.write(" <" + ConfigHandler.DEPENDENCY + " " + ConfigHandler.NAME_ATTR + "=\"" + dep + "\""
+ " />" + nl);
}
for (String confl : pack.getConflicts()) {
writer.write(" <" + ConfigHandler.CONFLICT + " " + ConfigHandler.NAME_ATTR + "=\"" + confl + "\""
+ " />" + nl);
}
writer.write(" </" + ConfigHandler.PACKAGE + ">" + nl);
}
}
| |
package org.cqframework.cql.cql2elm.model;
import org.hl7.cql.model.*;
import org.hl7.elm_modelinfo.r1.*;
import java.util.*;
public class ModelImporter {
private ModelInfo modelInfo;
private Map<String, TypeInfo> typeInfoIndex;
private Map<String, DataType> resolvedTypes;
private List<DataType> dataTypes;
private List<Conversion> conversions;
public ModelImporter(ModelInfo modelInfo, Iterable<DataType> systemTypes) {
if (modelInfo == null) {
throw new IllegalArgumentException("modelInfo is null");
}
this.modelInfo = modelInfo;
this.typeInfoIndex = new HashMap<>();
this.resolvedTypes = new HashMap<>();
this.dataTypes = new ArrayList<>();
this.conversions = new ArrayList<>();
// Import system types
if (systemTypes != null) {
for (DataType systemType : systemTypes) {
if (systemType instanceof NamedType) {
NamedType namedSystemType = (NamedType)systemType;
this.resolvedTypes.put(namedSystemType.getName(), systemType);
}
}
}
// Import model types
for (TypeInfo t : this.modelInfo.getTypeInfo()) {
if (t instanceof SimpleTypeInfo) {
typeInfoIndex.put(ensureUnqualified(((SimpleTypeInfo)t).getName()), t);
}
else if (t instanceof ClassInfo) {
ClassInfo classInfo = (ClassInfo)t;
if (classInfo.getName() != null) {
typeInfoIndex.put(ensureUnqualified(classInfo.getName()), classInfo);
}
}
}
// Import model conversions
for (ConversionInfo c : this.modelInfo.getConversionInfo()) {
DataType fromType = resolveTypeNameOrSpecifier(c.getFromType(), c.getFromTypeSpecifier());
DataType toType = resolveTypeNameOrSpecifier(c.getToType(), c.getToTypeSpecifier());
int qualifierIndex = c.getFunctionName().indexOf('.');
String libraryName = qualifierIndex >= 0 ? c.getFunctionName().substring(0, qualifierIndex) : null;
String functionName = qualifierIndex >= 0 ? c.getFunctionName().substring(qualifierIndex + 1) : null;
Operator operator = new Operator(functionName, new Signature(fromType), toType);
if (libraryName != null) {
operator.setLibraryName(libraryName);
}
// All conversions loaded as part of a model are implicit
Conversion conversion = new Conversion(operator, true);
conversions.add(conversion);
}
for (TypeInfo t: this.modelInfo.getTypeInfo()) {
dataTypes.add(resolveTypeInfo(t));
}
if (systemTypes != null) {
for (DataType systemType : systemTypes) {
if (systemType instanceof NamedType) {
NamedType namedSystemType = (NamedType)systemType;
this.resolvedTypes.remove(namedSystemType.getName());
}
}
}
}
public Map<String, DataType> getTypes() { return resolvedTypes; }
public Iterable<Conversion> getConversions() { return conversions; }
private String casify(String typeName) {
return casify(typeName, this.modelInfo.isCaseSensitive() != null ? this.modelInfo.isCaseSensitive() : false);
}
private String casify(String typeName, boolean caseSensitive) {
return caseSensitive ? typeName.toLowerCase() : typeName;
}
private DataType resolveTypeInfo(TypeInfo t) {
if (t instanceof SimpleTypeInfo) {
return resolveSimpleType((SimpleTypeInfo)t);
}
else if (t instanceof ClassInfo) {
return resolveClassType((ClassInfo)t);
}
else if (t instanceof TupleTypeInfo) {
return resolveTupleType((TupleTypeInfo)t);
}
else if (t instanceof IntervalTypeInfo) {
return resolveIntervalType((IntervalTypeInfo)t);
}
else if (t instanceof ListTypeInfo) {
return resolveListType((ListTypeInfo)t);
}
else if (t instanceof ChoiceTypeInfo) {
return resolveChoiceType((ChoiceTypeInfo)t);
}
return null;
}
private DataType resolveTypeSpecifier(TypeSpecifier typeSpecifier) {
if (typeSpecifier == null) {
return null;
}
if (typeSpecifier instanceof NamedTypeSpecifier) {
NamedTypeSpecifier namedTypeSpecifier = (NamedTypeSpecifier)typeSpecifier;
String qualifier = namedTypeSpecifier.getModelName();
if (qualifier == null || qualifier.isEmpty()) {
qualifier = this.modelInfo.getName();
}
String qualifiedTypeName = String.format("%s.%s", qualifier, namedTypeSpecifier.getName());
return resolveTypeName(qualifiedTypeName);
}
if (typeSpecifier instanceof IntervalTypeSpecifier) {
IntervalTypeSpecifier intervalTypeSpecifier = (IntervalTypeSpecifier)typeSpecifier;
DataType pointType = resolveTypeNameOrSpecifier(intervalTypeSpecifier.getPointType(), intervalTypeSpecifier.getPointTypeSpecifier());
return new IntervalType(pointType);
}
if (typeSpecifier instanceof ListTypeSpecifier) {
ListTypeSpecifier listTypeSpecifier = (ListTypeSpecifier)typeSpecifier;
DataType elementType = resolveTypeNameOrSpecifier(listTypeSpecifier.getElementType(), listTypeSpecifier.getElementTypeSpecifier());
return new ListType(elementType);
}
if (typeSpecifier instanceof ChoiceTypeSpecifier) {
ChoiceTypeSpecifier choiceTypeSpecifier = (ChoiceTypeSpecifier)typeSpecifier;
List<DataType> choices = new ArrayList<>();
for (TypeSpecifier choice : choiceTypeSpecifier.getChoice()) {
DataType choiceType = resolveTypeSpecifier(choice);
choices.add(choiceType);
}
return new ChoiceType(choices);
}
return null;
}
private DataType resolveTypeName(String typeName) {
if (typeName == null) {
throw new IllegalArgumentException("typeName is null");
}
// NOTE: Preserving the ability to parse string type specifiers for backwards loading compatibility
// typeSpecifier: simpleTypeSpecifier | intervalTypeSpecifier | listTypeSpecifier
// simpleTypeSpecifier: (identifier '.')? identifier
// intervalTypeSpecifier: 'interval' '<' typeSpecifier '>'
// listTypeSpecifier: 'list' '<' typeSpecifier '>'
if (typeName.toLowerCase().startsWith("interval<")) {
DataType pointType = resolveTypeName(typeName.substring(typeName.indexOf('<') + 1, typeName.lastIndexOf('>')));
return new IntervalType(pointType);
}
else if (typeName.toLowerCase().startsWith("list<")) {
DataType elementType = resolveTypeName(typeName.substring(typeName.indexOf('<') + 1, typeName.lastIndexOf('>')));
return new ListType(elementType);
}
DataType result = lookupType(typeName);
if (result == null) {
TypeInfo typeInfo = lookupTypeInfo(ensureUnqualified(typeName));
if (typeInfo == null) {
throw new IllegalArgumentException(String.format("Could not resolve type info for type name %s.", typeName));
}
result = resolveTypeInfo(typeInfo);
}
return result;
}
private DataType resolveTypeNameOrSpecifier(String typeName, TypeSpecifier typeSpecifier) {
if ((typeName == null || typeName.isEmpty()) && typeSpecifier == null) {
return null;
}
if (typeSpecifier != null) {
return resolveTypeSpecifier(typeSpecifier);
}
return resolveTypeName(typeName);
}
private DataType lookupType(String typeName) {
if (typeName == null) {
throw new IllegalArgumentException("typeName is null");
}
return resolvedTypes.get(casify(typeName));
}
private TypeInfo lookupTypeInfo(String typeName) {
if (typeName == null) {
throw new IllegalArgumentException("typeName is null");
}
return typeInfoIndex.get(typeName);
}
// This method is used to ensure backwards compatible loading, type names in model info may be qualified with the model name
private String ensureQualified(String name) {
String qualifier = String.format("%s.", this.modelInfo.getName());
if (!name.startsWith(qualifier)) {
return String.format("%s%s", qualifier, name);
}
return name;
}
// This method is used to ensure backwards compatible loading, type names in model info may be qualified with the model name
private String ensureUnqualified(String name) {
if (name.startsWith(String.format("%s.", this.modelInfo.getName()))) {
return name.substring(name.indexOf('.') + 1);
}
return name;
}
private SimpleType resolveSimpleType(SimpleTypeInfo t) {
String qualifiedTypeName = ensureQualified(t.getName());
DataType lookupType = lookupType(qualifiedTypeName);
if (lookupType instanceof ClassType) {
throw new IllegalArgumentException("Expected instance of SimpleType but found instance of ClassType instead.");
}
SimpleType result = (SimpleType)lookupType(qualifiedTypeName);
if (result == null) {
if (qualifiedTypeName.equals(DataType.ANY.getName())) {
result = DataType.ANY;
}
else {
result = new SimpleType(qualifiedTypeName, resolveTypeNameOrSpecifier(t.getBaseType(), t.getBaseTypeSpecifier()));
}
resolvedTypes.put(casify(result.getName()), result);
}
return result;
}
private Collection<TupleTypeElement> resolveTupleTypeElements(Collection<TupleTypeInfoElement> infoElements) {
List<TupleTypeElement> elements = new ArrayList();
for (TupleTypeInfoElement e : infoElements) {
elements.add(new TupleTypeElement(e.getName(), resolveTypeNameOrSpecifier(e.getType(), e.getTypeSpecifier())));
}
return elements;
}
private TupleType resolveTupleType(TupleTypeInfo t) {
TupleType result = new TupleType(resolveTupleTypeElements(t.getElement()));
return result;
}
private Collection<ClassTypeElement> resolveClassTypeElements(Collection<ClassInfoElement> infoElements) {
List<ClassTypeElement> elements = new ArrayList();
for (ClassInfoElement e : infoElements) {
elements.add(new ClassTypeElement(e.getName(), resolveTypeNameOrSpecifier(e.getType(), e.getTypeSpecifier()), e.isProhibited(), e.isOneBased()));
}
return elements;
}
private ClassType resolveClassType(ClassInfo t) {
if (t.getName() == null) {
throw new IllegalArgumentException("Class definition must have a name.");
}
String qualifiedName = ensureQualified(t.getName());
ClassType result = (ClassType)lookupType(qualifiedName);
if (result == null) {
if (t instanceof ProfileInfo) {
result = new ProfileType(qualifiedName, resolveTypeNameOrSpecifier(t.getBaseType(), t.getBaseTypeSpecifier()));
}
else {
result = new ClassType(qualifiedName, resolveTypeNameOrSpecifier(t.getBaseType(), t.getBaseTypeSpecifier()));
}
resolvedTypes.put(casify(result.getName()), result);
result.addElements(resolveClassTypeElements(t.getElement()));
result.setIdentifier(t.getIdentifier());
result.setLabel(t.getLabel());
result.setRetrievable(t.isRetrievable());
result.setPrimaryCodePath(t.getPrimaryCodePath());
}
return result;
}
private IntervalType resolveIntervalType(IntervalTypeInfo t) {
IntervalType result = new IntervalType(resolveTypeNameOrSpecifier(t.getPointType(), t.getPointTypeSpecifier()));
return result;
}
private ListType resolveListType(ListTypeInfo t) {
ListType result = new ListType(resolveTypeNameOrSpecifier(t.getElementType(), t.getElementTypeSpecifier()));
return result;
}
private ChoiceType resolveChoiceType(ChoiceTypeInfo t) {
ArrayList<DataType> types = new ArrayList<DataType>();
for (TypeSpecifier typeSpecifier : t.getType()) {
types.add(resolveTypeSpecifier(typeSpecifier));
}
return new ChoiceType(types);
}
}
| |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.abcdroid.devfest12.ui;
import com.google.analytics.tracking.android.EasyTracker;
import net.abcdroid.devfest12.R;
import com.google.api.android.plus.GooglePlus;
import com.google.api.android.plus.PlusOneButton;
import com.actionbarsherlock.app.SherlockFragment;
import com.actionbarsherlock.view.Menu;
import com.actionbarsherlock.view.MenuInflater;
import com.actionbarsherlock.view.MenuItem;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.RectF;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
import net.abcdroid.devfest12.Config;
import net.abcdroid.devfest12.calendar.SessionAlarmService;
import net.abcdroid.devfest12.calendar.SessionCalendarService;
import net.abcdroid.devfest12.provider.ScheduleContract;
import net.abcdroid.devfest12.util.FractionalTouchDelegate;
import net.abcdroid.devfest12.util.HelpUtils;
import net.abcdroid.devfest12.util.ImageFetcher;
import net.abcdroid.devfest12.util.SessionsHelper;
import net.abcdroid.devfest12.util.UIUtils;
import static net.abcdroid.devfest12.util.LogUtils.LOGD;
import static net.abcdroid.devfest12.util.LogUtils.makeLogTag;
/**
* A fragment that shows detail information for a session, including session title, abstract,
* time information, speaker photos and bios, etc.
*
* <p>This fragment is used in a number of activities, including
* {@link net.abcdroid.devfest12.ui.phone.SessionDetailActivity},
* {@link net.abcdroid.devfest12.ui.tablet.SessionsVendorsMultiPaneActivity},
* {@link net.abcdroid.devfest12.ui.tablet.MapMultiPaneActivity}, etc.
*/
public class SessionDetailFragment extends SherlockFragment implements
LoaderManager.LoaderCallbacks<Cursor> {
private static final String TAG = makeLogTag(SessionDetailFragment.class);
// Set this boolean extra to true to show a variable height header
public static final String EXTRA_VARIABLE_HEIGHT_HEADER =
"com.google.android.iosched.extra.VARIABLE_HEIGHT_HEADER";
private String mSessionId;
private Uri mSessionUri;
private long mSessionBlockStart;
private long mSessionBlockEnd;
private String mTitleString;
private String mHashtags;
private String mUrl;
private String mRoomId;
private String mRoomName;
private boolean mStarred;
private boolean mInitStarred;
private MenuItem mShareMenuItem;
private MenuItem mStarMenuItem;
private MenuItem mSocialStreamMenuItem;
private ViewGroup mRootView;
private TextView mTitle;
private TextView mSubtitle;
private PlusOneButton mPlusOneButton;
private TextView mAbstract;
private TextView mRequirements;
private boolean mSessionCursor = false;
private boolean mSpeakersCursor = false;
private boolean mHasSummaryContent = false;
private boolean mVariableHeightHeader = false;
private ImageFetcher mImageFetcher;
private List<Runnable> mDeferredUiOperations = new ArrayList<Runnable>();
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
GooglePlus.initialize(getActivity(), Config.API_KEY, Config.CLIENT_ID);
final Intent intent = BaseActivity.fragmentArgumentsToIntent(getArguments());
mSessionUri = intent.getData();
if (mSessionUri == null) {
return;
}
mSessionId = ScheduleContract.Sessions.getSessionId(mSessionUri);
mVariableHeightHeader = intent.getBooleanExtra(EXTRA_VARIABLE_HEIGHT_HEADER, false);
LoaderManager manager = getLoaderManager();
manager.restartLoader(SessionsQuery._TOKEN, null, this);
manager.restartLoader(SpeakersQuery._TOKEN, null, this);
mImageFetcher = UIUtils.getImageFetcher(getActivity());
mImageFetcher.setImageFadeIn(false);
setHasOptionsMenu(true);
HelpUtils.maybeShowAddToScheduleTutorial(getActivity());
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mRootView = (ViewGroup) inflater.inflate(R.layout.fragment_session_detail, null);
mTitle = (TextView) mRootView.findViewById(R.id.session_title);
mSubtitle = (TextView) mRootView.findViewById(R.id.session_subtitle);
// Larger target triggers plus one button
mPlusOneButton = (PlusOneButton) mRootView.findViewById(R.id.plus_one_button);
final View plusOneParent = mRootView.findViewById(R.id.header_session);
FractionalTouchDelegate.setupDelegate(plusOneParent, mPlusOneButton,
new RectF(0.6f, 0f, 1f, 1.0f));
mAbstract = (TextView) mRootView.findViewById(R.id.session_abstract);
mRequirements = (TextView) mRootView.findViewById(R.id.session_requirements);
if (mVariableHeightHeader) {
View headerView = mRootView.findViewById(R.id.header_session);
ViewGroup.LayoutParams layoutParams = headerView.getLayoutParams();
layoutParams.height = ViewGroup.LayoutParams.WRAP_CONTENT;
headerView.setLayoutParams(layoutParams);
}
return mRootView;
}
@Override
public void onStop() {
super.onStop();
if (mInitStarred != mStarred) {
// Update Calendar event through the Calendar API on Android 4.0 or new versions.
if (UIUtils.hasICS()) {
Intent intent;
if (mStarred) {
// Set up intent to add session to Calendar, if it doesn't exist already.
intent = new Intent(SessionCalendarService.ACTION_ADD_SESSION_CALENDAR,
mSessionUri);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_BLOCK_START,
mSessionBlockStart);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_BLOCK_END,
mSessionBlockEnd);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_ROOM, mRoomName);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_TITLE, mTitleString);
} else {
// Set up intent to remove session from Calendar, if exists.
intent = new Intent(SessionCalendarService.ACTION_REMOVE_SESSION_CALENDAR,
mSessionUri);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_BLOCK_START,
mSessionBlockStart);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_BLOCK_END,
mSessionBlockEnd);
intent.putExtra(SessionCalendarService.EXTRA_SESSION_TITLE, mTitleString);
}
intent.setClass(getActivity(), SessionCalendarService.class);
getActivity().startService(intent);
}
if (mStarred && System.currentTimeMillis() < mSessionBlockStart) {
setupNotification();
}
}
}
@Override
public void onPause() {
super.onPause();
mImageFetcher.flushCache();
}
@Override
public void onDestroy() {
super.onDestroy();
mImageFetcher.closeCache();
}
private void setupNotification() {
// Schedule an alarm that fires a system notification when expires.
final Context ctx = getActivity();
Intent scheduleIntent = new Intent(
SessionAlarmService.ACTION_SCHEDULE_STARRED_BLOCK,
null, ctx, SessionAlarmService.class);
scheduleIntent.putExtra(SessionAlarmService.EXTRA_SESSION_START, mSessionBlockStart);
scheduleIntent.putExtra(SessionAlarmService.EXTRA_SESSION_END, mSessionBlockEnd);
ctx.startService(scheduleIntent);
}
/**
* Handle {@link SessionsQuery} {@link Cursor}.
*/
private void onSessionQueryComplete(Cursor cursor) {
mSessionCursor = true;
if (!cursor.moveToFirst()) {
return;
}
mTitleString = cursor.getString(SessionsQuery.TITLE);
// Format time block this session occupies
mSessionBlockStart = cursor.getLong(SessionsQuery.BLOCK_START);
mSessionBlockEnd = cursor.getLong(SessionsQuery.BLOCK_END);
mRoomName = cursor.getString(SessionsQuery.ROOM_NAME);
final String subtitle = UIUtils.formatSessionSubtitle(
mTitleString, mSessionBlockStart, mSessionBlockEnd, mRoomName, getActivity());
mTitle.setText(mTitleString);
mUrl = cursor.getString(SessionsQuery.URL);
if (TextUtils.isEmpty(mUrl)) {
mUrl = "";
}
mHashtags = cursor.getString(SessionsQuery.HASHTAGS);
if (!TextUtils.isEmpty(mHashtags)) {
enableSocialStreamMenuItemDeferred();
}
mRoomId = cursor.getString(SessionsQuery.ROOM_ID);
setupShareMenuItemDeferred();
showStarredDeferred(mInitStarred = (cursor.getInt(SessionsQuery.STARRED) != 0));
final String sessionAbstract = cursor.getString(SessionsQuery.ABSTRACT);
if (!TextUtils.isEmpty(sessionAbstract)) {
UIUtils.setTextMaybeHtml(mAbstract, sessionAbstract);
mAbstract.setVisibility(View.VISIBLE);
mHasSummaryContent = true;
} else {
mAbstract.setVisibility(View.GONE);
}
mPlusOneButton.setSize(PlusOneButton.Size.TALL);
String url = cursor.getString(SessionsQuery.URL);
if (TextUtils.isEmpty(url)) {
mPlusOneButton.setVisibility(View.GONE);
} else {
mPlusOneButton.setUrl(url);
}
final View requirementsBlock = mRootView.findViewById(R.id.session_requirements_block);
final String sessionRequirements = cursor.getString(SessionsQuery.REQUIREMENTS);
if (!TextUtils.isEmpty(sessionRequirements)) {
UIUtils.setTextMaybeHtml(mRequirements, sessionRequirements);
requirementsBlock.setVisibility(View.VISIBLE);
mHasSummaryContent = true;
} else {
requirementsBlock.setVisibility(View.GONE);
}
// Show empty message when all data is loaded, and nothing to show
if (mSpeakersCursor && !mHasSummaryContent) {
mRootView.findViewById(android.R.id.empty).setVisibility(View.VISIBLE);
}
ViewGroup linksContainer = (ViewGroup) mRootView.findViewById(R.id.links_container);linksContainer.setEnabled(false);
linksContainer.removeAllViews();
LayoutInflater inflater = getLayoutInflater(null);
boolean hasLinks = false;
final Context context = mRootView.getContext();
// Render I/O live link
final boolean hasLivestream = !TextUtils.isEmpty(
cursor.getString(SessionsQuery.LIVESTREAM_URL));
long currentTimeMillis = UIUtils.getCurrentTime(context);
if (UIUtils.hasHoneycomb() // Needs Honeycomb+ for the live stream
&& hasLivestream
&& currentTimeMillis > mSessionBlockStart
&& currentTimeMillis <= mSessionBlockEnd) {
hasLinks = true;
// Create the link item
ViewGroup linkContainer = (ViewGroup)
inflater.inflate(R.layout.list_item_session_link, linksContainer, false);
((TextView) linkContainer.findViewById(R.id.link_text)).setText(
R.string.session_link_livestream);
linkContainer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
fireLinkEvent(R.string.session_link_livestream);
Intent livestreamIntent = new Intent(Intent.ACTION_VIEW, mSessionUri);
livestreamIntent.setClass(context, SessionLivestreamActivity.class);
startActivity(livestreamIntent);
}
});
linksContainer.addView(linkContainer);
}
// Render normal links
for (int i = 0; i < SessionsQuery.LINKS_INDICES.length; i++) {
final String linkUrl = cursor.getString(SessionsQuery.LINKS_INDICES[i]);
if (!TextUtils.isEmpty(linkUrl)) {
hasLinks = true;
// Create the link item
ViewGroup linkContainer = (ViewGroup)
inflater.inflate(R.layout.list_item_session_link, linksContainer, false);
((TextView) linkContainer.findViewById(R.id.link_text)).setText(
SessionsQuery.LINKS_TITLES[i]);
final int linkTitleIndex = i;
linkContainer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
fireLinkEvent(SessionsQuery.LINKS_TITLES[linkTitleIndex]);
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(linkUrl));
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
UIUtils.safeOpenLink(context, intent);
}
});
linksContainer.addView(linkContainer);
}
}
// Show past/present/future and livestream status for this block.
UIUtils.updateTimeAndLivestreamBlockUI(context,
mSessionBlockStart, mSessionBlockEnd, hasLivestream,
null, null, mSubtitle, subtitle);
// mRootView.findViewById(R.id.session_links_block)
// .setVisibility(hasLinks ? View.VISIBLE : View.GONE);
EasyTracker.getTracker().trackView("Session: " + mTitleString);
LOGD("Tracker", "Session: " + mTitleString);
}
private void enableSocialStreamMenuItemDeferred() {
mDeferredUiOperations.add(new Runnable() {
@Override
public void run() {
mSocialStreamMenuItem.setVisible(false);
}
});
tryExecuteDeferredUiOperations();
}
private void showStarredDeferred(final boolean starred) {
mDeferredUiOperations.add(new Runnable() {
@Override
public void run() {
showStarred(starred);
}
});
tryExecuteDeferredUiOperations();
}
private void showStarred(boolean starred) {
mStarMenuItem.setTitle(starred
? R.string.description_remove_schedule
: R.string.description_add_schedule);
mStarMenuItem.setIcon(starred
? R.drawable.ic_action_remove_schedule
: R.drawable.ic_action_add_schedule);
mStarred = starred;
}
private void setupShareMenuItemDeferred() {
mDeferredUiOperations.add(new Runnable() {
@Override
public void run() {
new SessionsHelper(getActivity())
.tryConfigureShareMenuItem(mShareMenuItem, R.string.share_template,
mTitleString, mHashtags, mUrl);
}
});
tryExecuteDeferredUiOperations();
}
private void tryExecuteDeferredUiOperations() {
if (mStarMenuItem != null && mSocialStreamMenuItem != null) {
for (Runnable r : mDeferredUiOperations) {
r.run();
}
mDeferredUiOperations.clear();
}
}
private void onSpeakersQueryComplete(Cursor cursor) {
mSpeakersCursor = true;
// TODO: remove existing speakers from layout, since this cursor might be from a data change
final ViewGroup speakersGroup = (ViewGroup)
mRootView.findViewById(R.id.session_speakers_block);
final LayoutInflater inflater = getActivity().getLayoutInflater();
boolean hasSpeakers = false;
while (cursor.moveToNext()) {
final String speakerName = cursor.getString(SpeakersQuery.SPEAKER_NAME);
if (TextUtils.isEmpty(speakerName)) {
continue;
}
final String speakerImageUrl = cursor.getString(SpeakersQuery.SPEAKER_IMAGE_URL);
final String speakerCompany = cursor.getString(SpeakersQuery.SPEAKER_COMPANY);
final String speakerUrl = cursor.getString(SpeakersQuery.SPEAKER_URL);
final String speakerAbstract = cursor.getString(SpeakersQuery.SPEAKER_ABSTRACT);
String speakerHeader = speakerName;
if (!TextUtils.isEmpty(speakerCompany)) {
speakerHeader += ", " + speakerCompany;
}
final View speakerView = inflater
.inflate(R.layout.speaker_detail, speakersGroup, false);
final TextView speakerHeaderView = (TextView) speakerView
.findViewById(R.id.speaker_header);
final ImageView speakerImageView = (ImageView) speakerView
.findViewById(R.id.speaker_image);
final TextView speakerAbstractView = (TextView) speakerView
.findViewById(R.id.speaker_abstract);
if (!TextUtils.isEmpty(speakerImageUrl)) {
mImageFetcher.loadThumbnailImage(speakerImageUrl, speakerImageView,
R.drawable.person_image_empty);
}
speakerHeaderView.setText(speakerHeader);
speakerImageView.setContentDescription(
getString(R.string.speaker_googleplus_profile, speakerHeader));
UIUtils.setTextMaybeHtml(speakerAbstractView, speakerAbstract);
if (!TextUtils.isEmpty(speakerUrl)) {
speakerImageView.setEnabled(true);
speakerImageView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
Intent speakerProfileIntent = new Intent(Intent.ACTION_VIEW,
Uri.parse(speakerUrl));
speakerProfileIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
UIUtils.preferPackageForIntent(getActivity(), speakerProfileIntent,
UIUtils.GOOGLE_PLUS_PACKAGE_NAME);
UIUtils.safeOpenLink(getActivity(), speakerProfileIntent);
}
});
} else {
speakerImageView.setEnabled(false);
speakerImageView.setOnClickListener(null);
}
speakersGroup.addView(speakerView);
hasSpeakers = true;
mHasSummaryContent = true;
}
speakersGroup.setVisibility(hasSpeakers ? View.VISIBLE : View.GONE);
// Show empty message when all data is loaded, and nothing to show
if (mSessionCursor && !mHasSummaryContent) {
mRootView.findViewById(android.R.id.empty).setVisibility(View.VISIBLE);
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.session_detail, menu);
mStarMenuItem = menu.findItem(R.id.menu_star);
mSocialStreamMenuItem = menu.findItem(R.id.menu_social_stream);
mShareMenuItem = menu.findItem(R.id.menu_share);
tryExecuteDeferredUiOperations();
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
SessionsHelper helper = new SessionsHelper(getActivity());
switch (item.getItemId()) {
case R.id.menu_map:
EasyTracker.getTracker().trackEvent(
"Session", "Map", mTitleString, 0L);
LOGD("Tracker", "Map: " + mTitleString);
helper.startMapActivity(mRoomId);
return true;
case R.id.menu_star:
boolean star = !mStarred;
showStarred(star);
helper.setSessionStarred(mSessionUri, star, mTitleString);
Toast.makeText(
getActivity(),
getResources().getQuantityString(star
? R.plurals.toast_added_to_schedule
: R.plurals.toast_removed_from_schedule, 1, 1),
Toast.LENGTH_SHORT).show();
EasyTracker.getTracker().trackEvent(
"Session", star ? "Starred" : "Unstarred", mTitleString, 0L);
LOGD("Tracker", (star ? "Starred: " : "Unstarred: ") + mTitleString);
return true;
case R.id.menu_share:
// On ICS+ devices, we normally won't reach this as ShareActionProvider will handle
// sharing.
helper.shareSession(getActivity(), R.string.share_template, mTitleString,
mHashtags, mUrl);
return true;
case R.id.menu_social_stream:
EasyTracker.getTracker().trackEvent(
"Session", "Stream", mTitleString, 0L);
LOGD("Tracker", "Stream: " + mTitleString);
helper.startSocialStream(UIUtils.getSessionHashtagsString(mHashtags));
return true;
}
return super.onOptionsItemSelected(item);
}
/*
* Event structure:
* Category -> "Session Details"
* Action -> Link Text
* Label -> Session's Title
* Value -> 0.
*/
public void fireLinkEvent(int actionId) {
EasyTracker.getTracker().trackEvent(
"Session", getActivity().getString(actionId), mTitleString, 0L);
LOGD("Tracker", getActivity().getString(actionId) + ": " + mTitleString);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle data) {
CursorLoader loader = null;
if (id == SessionsQuery._TOKEN){
loader = new CursorLoader(getActivity(), mSessionUri, SessionsQuery.PROJECTION, null,
null, null);
} else if (id == SpeakersQuery._TOKEN && mSessionUri != null){
Uri speakersUri = ScheduleContract.Sessions.buildSpeakersDirUri(mSessionId);
loader = new CursorLoader(getActivity(), speakersUri, SpeakersQuery.PROJECTION, null,
null, null);
}
return loader;
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
if (getActivity() == null) {
return;
}
if (loader.getId() == SessionsQuery._TOKEN) {
onSessionQueryComplete(cursor);
} else if (loader.getId() == SpeakersQuery._TOKEN) {
onSpeakersQueryComplete(cursor);
} else {
cursor.close();
}
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {}
/**
* {@link net.abcdroid.devfest12.provider.ScheduleContract.Sessions} query parameters.
*/
private interface SessionsQuery {
int _TOKEN = 0x1;
String[] PROJECTION = {
ScheduleContract.Blocks.BLOCK_START,
ScheduleContract.Blocks.BLOCK_END,
ScheduleContract.Sessions.SESSION_LEVEL,
ScheduleContract.Sessions.SESSION_TITLE,
ScheduleContract.Sessions.SESSION_ABSTRACT,
ScheduleContract.Sessions.SESSION_REQUIREMENTS,
ScheduleContract.Sessions.SESSION_STARRED,
ScheduleContract.Sessions.SESSION_HASHTAGS,
ScheduleContract.Sessions.SESSION_URL,
ScheduleContract.Sessions.SESSION_YOUTUBE_URL,
ScheduleContract.Sessions.SESSION_PDF_URL,
ScheduleContract.Sessions.SESSION_NOTES_URL,
ScheduleContract.Sessions.SESSION_LIVESTREAM_URL,
ScheduleContract.Sessions.ROOM_ID,
ScheduleContract.Rooms.ROOM_NAME,
};
int BLOCK_START = 0;
int BLOCK_END = 1;
int LEVEL = 2;
int TITLE = 3;
int ABSTRACT = 4;
int REQUIREMENTS = 5;
int STARRED = 6;
int HASHTAGS = 7;
int URL = 8;
int YOUTUBE_URL = 9;
int PDF_URL = 10;
int NOTES_URL = 11;
int LIVESTREAM_URL = 12;
int ROOM_ID = 13;
int ROOM_NAME = 14;
int[] LINKS_INDICES = {
URL,
YOUTUBE_URL,
PDF_URL,
NOTES_URL,
};
int[] LINKS_TITLES = {
R.string.session_link_main,
R.string.session_link_youtube,
R.string.session_link_pdf,
R.string.session_link_notes,
};
}
private interface SpeakersQuery {
int _TOKEN = 0x3;
String[] PROJECTION = {
ScheduleContract.Speakers.SPEAKER_NAME,
ScheduleContract.Speakers.SPEAKER_IMAGE_URL,
ScheduleContract.Speakers.SPEAKER_COMPANY,
ScheduleContract.Speakers.SPEAKER_ABSTRACT,
ScheduleContract.Speakers.SPEAKER_URL,
};
int SPEAKER_NAME = 0;
int SPEAKER_IMAGE_URL = 1;
int SPEAKER_COMPANY = 2;
int SPEAKER_ABSTRACT = 3;
int SPEAKER_URL = 4;
}
}
| |
/**
* DayBodyPane.java
*
* Copyright (c) 2011-2015, JFXtras
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the organization nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package jfxtras.internal.scene.control.skin.agenda.base24hour;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.scene.Cursor;
import javafx.scene.input.MouseButton;
import javafx.scene.layout.Pane;
import javafx.scene.shape.Rectangle;
import jfxtras.internal.scene.control.skin.DateTimeToCalendarHelper;
import jfxtras.internal.scene.control.skin.agenda.AllAppointments;
import jfxtras.scene.control.agenda.Agenda;
import jfxtras.scene.control.agenda.Agenda.Appointment;
import jfxtras.util.NodeUtil;
/**
* Responsible for rendering the appointments within a day
*/
class DayBodyPane extends Pane
{
/**
*
*/
public DayBodyPane(LocalDate localDate, AllAppointments allAppointments, LayoutHelp layoutHints) {
this.localDateObjectProperty.set(localDate);
this.allAppointments = allAppointments;
this.layoutHelp = layoutHints;
construct();
}
final ObjectProperty<LocalDate> localDateObjectProperty = new SimpleObjectProperty<LocalDate>(this, "localDate");
final AllAppointments allAppointments;
final LayoutHelp layoutHelp;
/**
*
*/
private void construct() {
// for debugging setStyle("-fx-border-color:PINK;-fx-border-width:4px;");
getStyleClass().add("Day");
setId("DayBodyPane" + localDateObjectProperty.get()); // for testing
// react to changes in the appointments
allAppointments.addOnChangeListener( () -> {
setupAppointments();
});
setupAppointments();
// change the layout related to the size
widthProperty().addListener( (observable) -> {
relayout();
});
heightProperty().addListener( (observable) -> {
relayout();
});
setupMouseDrag();
// for testing
localDateObjectProperty.addListener( (observable) -> {
setId("DayBody" + localDateObjectProperty.get());
});
setId("DayBody" + localDateObjectProperty.get());
}
/**
*
*/
private void setupMouseDrag() {
// start new appointment
setOnMousePressed((mouseEvent) -> {
// only on primary
if (mouseEvent.getButton().equals(MouseButton.PRIMARY) == false) {
return;
}
// if there is no one to handle the result, don't even bother
if (layoutHelp.skinnable.createAppointmentCallbackProperty().get() == null && layoutHelp.skinnable.newAppointmentCallbackProperty().get() == null) {
return;
}
// show the rectangle
setCursor(Cursor.V_RESIZE);
double lY = NodeUtil.snapXY(mouseEvent.getScreenY() - NodeUtil.screenY(DayBodyPane.this));
resizeRectangle = new Rectangle(0, lY, layoutHelp.dayWidthProperty.get(), 10);
resizeRectangle.getStyleClass().add("GhostRectangle");
getChildren().add(resizeRectangle);
// this event should not be processed by the appointment area
mouseEvent.consume();
dragged = false;
layoutHelp.skinnable.selectedAppointments().clear();
});
// visualize resize
setOnMouseDragged((mouseEvent) -> {
if (resizeRectangle == null) {
return;
}
// - calculate the number of pixels from onscreen nodeY (layoutY) to onscreen mouseY
double lHeight = mouseEvent.getScreenY() - NodeUtil.screenY(resizeRectangle);
if (lHeight < 5) {
lHeight = 5;
}
resizeRectangle.setHeight(lHeight);
// no one else
mouseEvent.consume();
dragged = true;
});
// end resize
setOnMouseReleased((mouseEvent) -> {
if (resizeRectangle == null) {
return;
}
// no one else
mouseEvent.consume();
// reset ui
setCursor(Cursor.HAND);
getChildren().remove(resizeRectangle);
// must have dragged (otherwise it is considered an "unselect all" action)
if (dragged == false) {
return;
}
// calculate the starttime
LocalDateTime lStartDateTime = localDateObjectProperty.get().atStartOfDay();
lStartDateTime = lStartDateTime.plusSeconds( (int)(resizeRectangle.getY() * layoutHelp.durationInMSPerPixelProperty.get() / 1000) );
lStartDateTime = layoutHelp.roundTimeToNearestMinutes(lStartDateTime, 5);
// calculate the new end date for the appointment (recalculating the duration)
LocalDateTime lEndDateTime = lStartDateTime.plusSeconds( (int)(resizeRectangle.getHeight() * layoutHelp.durationInMSPerPixelProperty.get() / 1000) );
lEndDateTime = layoutHelp.roundTimeToNearestMinutes(lEndDateTime, 5);
// clean up
resizeRectangle = null;
// ask the control to create a new appointment (null may be returned)
Agenda.Appointment lAppointment = null;
if (layoutHelp.skinnable.newAppointmentCallbackProperty().get() != null) {
lAppointment = layoutHelp.skinnable.newAppointmentCallbackProperty().get().call(new Agenda.LocalDateTimeRange(lStartDateTime, lEndDateTime));
}
if (layoutHelp.skinnable.createAppointmentCallbackProperty().get() != null) {
lAppointment = layoutHelp.skinnable.createAppointmentCallbackProperty().get().call(new Agenda.CalendarRange(DateTimeToCalendarHelper.createCalendarFromLocalDateTime(lStartDateTime, TimeZone.getDefault(), Locale.getDefault()), DateTimeToCalendarHelper.createCalendarFromLocalDateTime(lEndDateTime, TimeZone.getDefault(), Locale.getDefault())));
}
if (lAppointment != null) {
layoutHelp.skinnable.appointments().add(lAppointment); // the appointments collection is listened to, so they will automatically be refreshed
}
});
}
private Rectangle resizeRectangle = null;
private boolean dragged = false;
/**
* The tracked panes are too complex to do via binding (unlike the wholeday flagpoles)
*/
private void relayout()
{
// prepare
int lWholedayCnt = wholedayAppointmentBodyPanes.size();
double lAllFlagpolesWidth = layoutHelp.wholedayAppointmentFlagpoleWidthProperty.get() * lWholedayCnt;
double lDayWidth = layoutHelp.dayContentWidthProperty.get();
double lRemainingWidthForAppointments = lDayWidth - lAllFlagpolesWidth;
double lNumberOfPixelsPerMinute = layoutHelp.dayHeightProperty.get() / (24 * 60);
// then add all tracked appointments (regular & task) to the day
for (AppointmentAbstractTrackedPane lAppointmentAbstractTrackedPane : trackedAppointmentBodyPanes) {
// for this pane specifically
double lNumberOfTracks = (double)lAppointmentAbstractTrackedPane.clusterOwner.clusterTracks.size();
double lTrackWidth = lRemainingWidthForAppointments / lNumberOfTracks;
double lTrackIdx = (double)lAppointmentAbstractTrackedPane.clusterTrackIdx;
// the X is determined by offsetting the wholeday appointments and then calculate the X of the track the appointment is placed in (available width / number of tracks)
double lX = lAllFlagpolesWidth + (lTrackWidth * lTrackIdx);
lAppointmentAbstractTrackedPane.setLayoutX( NodeUtil.snapXY(lX));
// the Y is determined by the start time in minutes projected onto the total day height (being 24 hours)
int lStartOffsetInMinutes = (lAppointmentAbstractTrackedPane.startDateTime.getHour() * 60) + lAppointmentAbstractTrackedPane.startDateTime.getMinute();
double lY = lNumberOfPixelsPerMinute * lStartOffsetInMinutes;
lAppointmentAbstractTrackedPane.setLayoutY( NodeUtil.snapXY(lY) );
// the width is the remaining width (subtracting the wholeday appointments) divided by the number of tracks in the cluster
double lW = lTrackWidth;
// all but the most right appointment get 50% extra width, so they underlap the next track
if (lTrackIdx < lNumberOfTracks - 1) {
lW *= 1.75;
}
lAppointmentAbstractTrackedPane.setPrefWidth( NodeUtil.snapWH(lAppointmentAbstractTrackedPane.getLayoutX(), lW) );
// the height is determined by the duration projected against the total dayHeight (being 24 hours)
double lH;
if (lAppointmentAbstractTrackedPane instanceof AppointmentTaskBodyPane) {
lH = 5; // task height
}
else {
long lHeightInMinutes = lAppointmentAbstractTrackedPane.durationInMS / 1000 / 60;
lH = lNumberOfPixelsPerMinute * lHeightInMinutes;
// the height has a minimum size, in order to be able to render sensibly
if (lH < 2 * layoutHelp.paddingProperty.get()) {
lH = 2 * layoutHelp.paddingProperty.get();
}
}
lAppointmentAbstractTrackedPane.setPrefHeight( NodeUtil.snapWH(lAppointmentAbstractTrackedPane.getLayoutY(), lH) );
}
}
void setupAppointments() {
setupWholedayAppointments();
setupTaskAppointments();
setupRegularAppointments();
// place appointments in tracks
trackedAppointmentBodyPanes.clear();
trackedAppointmentBodyPanes.addAll(regularAppointmentBodyPanes);
trackedAppointmentBodyPanes.addAll(taskAppointmentBodyPanes);
List<? extends AppointmentAbstractTrackedPane> determineTracks = AppointmentRegularBodyPane.determineTracks(trackedAppointmentBodyPanes);
// add the appointments to the pane in the correct order, so they overlap nicely
//getChildren().removeAll(determineTracks);
getChildren().addAll(determineTracks);
relayout();
}
final List<AppointmentAbstractTrackedPane> trackedAppointmentBodyPanes = new ArrayList<>();
/**
*
*/
private void setupWholedayAppointments() {
wholedayAppointments.clear();
wholedayAppointments.addAll( allAppointments.collectWholedayFor(localDateObjectProperty.get()) );
// remove all appointments
getChildren().removeAll(wholedayAppointmentBodyPanes);
wholedayAppointmentBodyPanes.clear();
// for all wholeday appointments on this date, create a header appointment pane
int lCnt = 0;
for (Appointment lAppointment : wholedayAppointments) {
// create pane
AppointmentWholedayBodyPane lAppointmentPane = new AppointmentWholedayBodyPane(localDateObjectProperty.get(), lAppointment, layoutHelp);
wholedayAppointmentBodyPanes.add(lAppointmentPane);
lAppointmentPane.setId(lAppointmentPane.getClass().getSimpleName() + localDateObjectProperty.get() + "/" + lCnt); // for testing
// position by binding
lAppointmentPane.layoutXProperty().bind(NodeUtil.snapXY( layoutHelp.wholedayAppointmentFlagpoleWidthProperty.multiply(lCnt) ));
lAppointmentPane.setLayoutY(0);
lAppointmentPane.prefWidthProperty().bind(layoutHelp.wholedayAppointmentFlagpoleWidthProperty);
lAppointmentPane.prefHeightProperty().bind(layoutHelp.dayHeightProperty);
lCnt++;
}
getChildren().addAll(wholedayAppointmentBodyPanes);
}
final private List<Appointment> wholedayAppointments = new ArrayList<>();
final private List<AppointmentWholedayBodyPane> wholedayAppointmentBodyPanes = new ArrayList<>();
/**
*
*/
private void setupTaskAppointments() {
taskAppointments.clear();
taskAppointments.addAll( allAppointments.collectTaskFor(localDateObjectProperty.get()) );
// remove all appointments
getChildren().removeAll(taskAppointmentBodyPanes);
taskAppointmentBodyPanes.clear();
// for all task appointments on this date, create a header appointment pane
int lCnt = 0;
for (Appointment lAppointment : taskAppointments) {
AppointmentTaskBodyPane lAppointmentPane = new AppointmentTaskBodyPane(lAppointment, layoutHelp);
taskAppointmentBodyPanes.add(lAppointmentPane);
lAppointmentPane.setId(lAppointmentPane.getClass().getSimpleName() + localDateObjectProperty.get() + "/" + lCnt); // for testing
lCnt++;
}
//getChildren().addAll(taskAppointmentBodyPanes);
}
final private List<Appointment> taskAppointments = new ArrayList<>();
final private List<AppointmentTaskBodyPane> taskAppointmentBodyPanes = new ArrayList<>();
/**
*
*/
private void setupRegularAppointments() {
regularAppointments.clear();
regularAppointments.addAll( allAppointments.collectRegularFor(localDateObjectProperty.get()) );
// remove all appointments
getChildren().removeAll(regularAppointmentBodyPanes);
regularAppointmentBodyPanes.clear();
// for all regular appointments on this date, create a header appointment pane
int lCnt = 0;
for (Appointment lAppointment : regularAppointments) {
AppointmentRegularBodyPane lAppointmentPane = new AppointmentRegularBodyPane(localDateObjectProperty.get(), lAppointment, layoutHelp);
regularAppointmentBodyPanes.add(lAppointmentPane);
lAppointmentPane.setId(lAppointmentPane.getClass().getSimpleName() + localDateObjectProperty.get() + "/" + lCnt); // for testing
lCnt++;
}
//getChildren().addAll(regularAppointmentBodyPanes);
}
final private List<Appointment> regularAppointments = new ArrayList<>();
final private List<AppointmentRegularBodyPane> regularAppointmentBodyPanes = new ArrayList<>();
/**
*
* @param x scene coordinate
* @param y scene coordinate
* @return a localDateTime where nano seconds == 1
*/
LocalDateTime convertClickInSceneToDateTime(double x, double y) {
Rectangle r = new Rectangle(NodeUtil.sceneX(this), NodeUtil.sceneY(this), this.getWidth(), this.getHeight());
if (r.contains(x, y)) {
LocalDate localDate = localDateObjectProperty.get();
double lHeightOffset = (y - r.getY());
int ms = (int)(lHeightOffset * layoutHelp.durationInMSPerPixelProperty.get());
LocalDateTime localDateTime = localDate.atStartOfDay().plusSeconds(ms / 1000);
localDateTime = localDateTime.withNano(AppointmentAbstractPane.DRAG_DAY); // we abuse the nano second to deviate body panes from header panes
return localDateTime;
}
return null;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.nestedQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class InnerHitsIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class);
}
public static class CustomScriptPlugin extends MockScriptPlugin {
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("5", script -> "5");
}
}
public void testSimpleNested() throws Exception {
assertAcked(prepareCreate("articles").setMapping(jsonBuilder().startObject().startObject("_doc")
.startObject("properties")
.startObject("comments")
.field("type", "nested")
.startObject("properties")
.startObject("message")
.field("type", "text")
.field("fielddata", true)
.endObject()
.endObject()
.endObject()
.startObject("title")
.field("type", "text")
.endObject()
.endObject().endObject().endObject()));
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles").setId("1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startArray("comments")
.startObject().field("message", "fox eat quick").endObject()
.startObject().field("message", "fox ate rabbit x y z").endObject()
.startObject().field("message", "rabbit got away").endObject()
.endArray()
.endObject()));
requests.add(client().prepareIndex("articles").setId("2").setSource(jsonBuilder().startObject()
.field("title", "big gray elephant")
.startArray("comments")
.startObject().field("message", "elephant captured").endObject()
.startObject().field("message", "mice squashed by elephant x").endObject()
.startObject().field("message", "elephant scared by mice x y").endObject()
.endArray()
.endObject()));
indexRandom(true, requests);
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("comment"))
).get();
assertNoFailures(response);
assertHitCount(response, 1);
assertSearchHit(response, 1, hasId("1"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getTotalHits().value, equalTo(2L));
assertThat(innerHits.getHits().length, equalTo(2));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(innerHits.getAt(1).getId(), equalTo("1"));
assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1));
response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("comment"))
).get();
assertNoFailures(response);
assertHitCount(response, 1);
assertSearchHit(response, 1, hasId("2"));
assertThat(response.getHits().getAt(0).getShard(), notNullValue());
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getTotalHits().value, equalTo(3L));
assertThat(innerHits.getHits().length, equalTo(3));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(innerHits.getAt(1).getId(), equalTo("2"));
assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1));
assertThat(innerHits.getAt(2).getId(), equalTo("2"));
assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2));
response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(
new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message"))
.setExplain(true)
.addFetchField("comments.mes*")
.addScriptField("script",
new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap()))
.setSize(1))).get();
assertNoFailures(response);
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getTotalHits().value, equalTo(2L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(),
equalTo("<em>fox</em> eat quick"));
assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(comments.message:fox in"));
assertThat(
innerHits.getAt(0).getFields().get("comments").getValue(),
equalTo(Collections.singletonMap("message", Collections.singletonList("fox eat quick")))
);
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(
new InnerHitBuilder()
.addDocValueField("comments.mes*")
.setSize(1))).get();
assertNoFailures(response);
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat"));
}
public void testRandomNested() throws Exception {
assertAcked(prepareCreate("idx").setMapping("field1", "type=nested", "field2", "type=nested"));
int numDocs = scaledRandomIntBetween(25, 100);
List<IndexRequestBuilder> requestBuilders = new ArrayList<>();
int[] field1InnerObjects = new int[numDocs];
int[] field2InnerObjects = new int[numDocs];
for (int i = 0; i < numDocs; i++) {
int numInnerObjects = field1InnerObjects[i] = scaledRandomIntBetween(1, numDocs);
XContentBuilder source = jsonBuilder().startObject()
.field("foo", i)
.startArray("field1");
for (int j = 0; j < numInnerObjects; j++) {
source.startObject().field("x", "y").endObject();
}
numInnerObjects = field2InnerObjects[i] = scaledRandomIntBetween(1, numDocs);
source.endArray().startArray("field2");
for (int j = 0; j < numInnerObjects; j++) {
source.startObject().field("x", "y").endObject();
}
source.endArray().endObject();
requestBuilders.add(client().prepareIndex("idx").setId(Integer.toString(i)).setSource(source));
}
indexRandom(true, requestBuilders);
int size = randomIntBetween(0, numDocs);
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("a").setSize(size)
.addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC))));
boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("b")
.addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)).setSize(size)));
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(boolQuery)
.setSize(numDocs)
.addSort("foo", SortOrder.ASC)
.get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, numDocs);
assertThat(searchResponse.getHits().getHits().length, equalTo(numDocs));
for (int i = 0; i < numDocs; i++) {
SearchHit searchHit = searchResponse.getHits().getAt(i);
assertThat(searchHit.getShard(), notNullValue());
SearchHits inner = searchHit.getInnerHits().get("a");
assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i]));
for (int j = 0; j < field1InnerObjects[i] && j < size; j++) {
SearchHit innerHit = inner.getAt(j);
assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1"));
assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j));
assertThat(innerHit.getNestedIdentity().getChild(), nullValue());
}
inner = searchHit.getInnerHits().get("b");
assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i]));
for (int j = 0; j < field2InnerObjects[i] && j < size; j++) {
SearchHit innerHit = inner.getAt(j);
assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2"));
assertThat(innerHit.getNestedIdentity().getOffset(), equalTo(j));
assertThat(innerHit.getNestedIdentity().getChild(), nullValue());
}
}
}
public void testNestedMultipleLayers() throws Exception {
assertAcked(prepareCreate("articles").setMapping(jsonBuilder().startObject()
.startObject("_doc").startObject("properties")
.startObject("comments")
.field("type", "nested")
.startObject("properties")
.startObject("message")
.field("type", "text")
.endObject()
.startObject("remarks")
.field("type", "nested")
.startObject("properties")
.startObject("message").field("type", "text").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.startObject("title")
.field("type", "text")
.endObject()
.endObject().endObject().endObject()));
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles").setId("1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startArray("comments")
.startObject()
.field("message", "fox eat quick")
.startArray("remarks").startObject().field("message", "good").endObject().endArray()
.endObject()
.startObject()
.field("message", "hippo is hungry")
.startArray("remarks").startObject().field("message", "neutral").endObject().endArray()
.endObject()
.endArray()
.endObject()));
requests.add(client().prepareIndex("articles").setId("2").setSource(jsonBuilder().startObject()
.field("title", "big gray elephant")
.startArray("comments")
.startObject()
.field("message", "elephant captured")
.startArray("remarks").startObject().field("message", "bad").endObject().endArray()
.endObject()
.endArray()
.endObject()));
indexRandom(true, requests);
// Check we can load the first doubly-nested document.
SearchResponse response = client().prepareSearch("articles")
.setQuery(
nestedQuery("comments",
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("remark")),
ScoreMode.Avg).innerHit(new InnerHitBuilder())
).get();
assertNoFailures(response);
assertHitCount(response, 1);
assertSearchHit(response, 1, hasId("1"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks"));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
// Check we can load the second doubly-nested document.
response = client().prepareSearch("articles")
.setQuery(
nestedQuery("comments",
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "neutral"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("remark")),
ScoreMode.Avg).innerHit(new InnerHitBuilder())
).get();
assertNoFailures(response);
assertHitCount(response, 1);
assertSearchHit(response, 1, hasId("1"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks"));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
// Directly refer to the second level:
response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder())).get();
assertNoFailures(response);
assertHitCount(response, 1);
assertSearchHit(response, 1, hasId("2"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks"));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
response = client().prepareSearch("articles")
.setQuery(
nestedQuery("comments",
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("remark")),
ScoreMode.Avg).innerHit(new InnerHitBuilder())
).get();
assertNoFailures(response);
assertHitCount(response, 1);
assertSearchHit(response, 1, hasId("2"));
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits().value, equalTo(1L));
assertThat(innerHits.getHits().length, equalTo(1));
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks"));
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
// Check that inner hits contain _source even when it's disabled on the parent request.
response = client().prepareSearch("articles")
.setFetchSource(false)
.setQuery(
nestedQuery("comments",
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("remark")), ScoreMode.Avg)
.innerHit(new InnerHitBuilder())
).get();
assertNoFailures(response);
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertNotNull(innerHits.getAt(0).getSourceAsMap());
assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty());
response = client().prepareSearch("articles")
.setQuery(
nestedQuery("comments",
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder("remark")), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))
).get();
assertNoFailures(response);
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertNotNull(innerHits.getAt(0).getSourceAsMap());
assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty());
}
// Issue #9723
public void testNestedDefinedAsObject() throws Exception {
assertAcked(prepareCreate("articles").setMapping("comments", "type=nested", "title", "type=text"));
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles").setId("1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startObject("comments").field("message", "fox eat quick").endObject()
.endObject()));
indexRandom(true, requests);
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder()))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(),
equalTo("comments"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
}
public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception {
assertAcked(prepareCreate("articles")
// number_of_shards = 1, because then we catch the expected exception in the same way.
// (See expectThrows(...) below)
.setSettings(Settings.builder().put("index.number_of_shards", 1))
.setMapping(jsonBuilder().startObject()
.startObject("properties")
.startObject("comments")
.field("type", "object")
.startObject("properties")
.startObject("messages").field("type", "nested").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(client().prepareIndex("articles").setId("1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startArray("comments")
.startObject()
.startArray("messages")
.startObject().field("message", "fox eat quick").endObject()
.startObject().field("message", "bear eat quick").endObject()
.endArray()
.endObject()
.startObject()
.startArray("messages")
.startObject().field("message", "no fox").endObject()
.endArray()
.endObject()
.endArray()
.endObject()));
indexRandom(true, requests);
Exception e = expectThrows(Exception.class, () -> client().prepareSearch("articles").setQuery(nestedQuery("comments.messages",
matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder())).get());
assertEquals("Cannot execute inner hits. One or more parent object fields of nested field [comments.messages] are " +
"not nested. All parent fields need to be nested fields too", e.getCause().getCause().getMessage());
e = expectThrows(Exception.class, () -> client().prepareSearch("articles").setQuery(nestedQuery("comments.messages",
matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder()
.setFetchSourceContext(new FetchSourceContext(true)))).get());
assertEquals("Cannot execute inner hits. One or more parent object fields of nested field [comments.messages] are " +
"not nested. All parent fields need to be nested fields too", e.getCause().getCause().getMessage());
SearchResponse response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))).get();
assertNoFailures(response);
assertHitCount(response, 1);
SearchHit hit = response.getHits().getAt(0);
assertThat(hit.getId(), equalTo("1"));
SearchHits messages = hit.getInnerHits().get("comments.messages");
assertThat(messages.getTotalHits().value, equalTo(2L));
assertThat(messages.getAt(0).getId(), equalTo("1"));
assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2));
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
assertThat(messages.getAt(1).getId(), equalTo("1"));
assertThat(messages.getAt(1).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(messages.getAt(1).getNestedIdentity().getOffset(), equalTo(0));
assertThat(messages.getAt(1).getNestedIdentity().getChild(), nullValue());
response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))).get();
assertNoFailures(response);
assertHitCount(response, 1);
hit = response.getHits().getAt(0);
assertThat(hit.getId(), equalTo("1"));
messages = hit.getInnerHits().get("comments.messages");
assertThat(messages.getTotalHits().value, equalTo(1L));
assertThat(messages.getAt(0).getId(), equalTo("1"));
assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1));
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
// index the message in an object form instead of an array
requests = new ArrayList<>();
requests.add(client().prepareIndex("articles").setId("1").setSource(jsonBuilder().startObject()
.field("title", "quick brown fox")
.startObject("comments").startObject("messages").field("message", "fox eat quick").endObject().endObject()
.endObject()));
indexRandom(true, requests);
response = client().prepareSearch("articles")
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(false)))).get();
assertNoFailures(response);
assertHitCount(response, 1);
hit = response.getHits().getAt(0);
assertThat(hit.getId(), equalTo("1"));
messages = hit.getInnerHits().get("comments.messages");
assertThat(messages.getTotalHits().value, equalTo(1L));
assertThat(messages.getAt(0).getId(), equalTo("1"));
assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
}
public void testMatchesQueriesNestedInnerHits() throws Exception {
XContentBuilder builder = jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("nested1")
.field("type", "nested")
.startObject("properties")
.startObject("n_field1")
.field("type", "keyword")
.endObject()
.endObject()
.endObject()
.startObject("field1")
.field("type", "long")
.endObject()
.endObject()
.endObject()
.endObject();
assertAcked(prepareCreate("test").setMapping(builder));
ensureGreen();
List<IndexRequestBuilder> requests = new ArrayList<>();
int numDocs = randomIntBetween(2, 35);
requests.add(client().prepareIndex("test").setId("0").setSource(jsonBuilder().startObject()
.field("field1", 0)
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_1")
.field("n_field2", "n_value2_1")
.endObject()
.startObject()
.field("n_field1", "n_value1_2")
.field("n_field2", "n_value2_2")
.endObject()
.endArray()
.endObject()));
requests.add(client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject()
.field("field1", 1)
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_8")
.field("n_field2", "n_value2_5")
.endObject()
.startObject()
.field("n_field1", "n_value1_3")
.field("n_field2", "n_value2_1")
.endObject()
.endArray()
.endObject()));
for (int i = 2; i < numDocs; i++) {
requests.add(client().prepareIndex("test").setId(String.valueOf(i)).setSource(jsonBuilder().startObject()
.field("field1", i)
.startArray("nested1")
.startObject()
.field("n_field1", "n_value1_8")
.field("n_field2", "n_value2_5")
.endObject()
.startObject()
.field("n_field1", "n_value1_2")
.field("n_field2", "n_value2_2")
.endObject()
.endArray()
.endObject()));
}
indexRandom(true, requests);
waitForRelocation(ClusterHealthStatus.GREEN);
QueryBuilder query = boolQuery()
.should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1"))
.should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2"))
.should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3"));
query = nestedQuery("nested1", query, ScoreMode.Avg).innerHit(
new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC)));
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(query)
.setSize(numDocs)
.addSort("field1", SortOrder.ASC)
.get();
assertNoFailures(searchResponse);
assertAllSuccessful(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("0"));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1"));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3"));
assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("1"));
assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2"));
for (int i = 2; i < numDocs; i++) {
assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(String.valueOf(i)));
assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1));
assertThat(searchResponse.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3"));
}
}
public void testNestedSource() throws Exception {
assertAcked(prepareCreate("index1").setMapping("comments", "type=nested"));
client().prepareIndex("index1").setId("1").setSource(jsonBuilder().startObject()
.field("message", "quick brown fox")
.startArray("comments")
.startObject().field("message", "fox eat quick").field("x", "y").endObject()
.startObject().field("message", "fox ate rabbit x y z").field("x", "y").endObject()
.startObject().field("message", "rabbit got away").field("x", "y").endObject()
.endArray()
.endObject()).get();
refresh();
// the field name (comments.message) used for source filtering should be the same as when using that field for
// other features (like in the query dsl or aggs) in order for consistency:
SearchResponse response = client().prepareSearch()
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None)
.innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(true,
new String[]{"comments.message"}, null))))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"),
equalTo("fox eat quick"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().size(), equalTo(1));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"),
equalTo("fox ate rabbit x y z"));
response = client().prepareSearch()
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None)
.innerHit(new InnerHitBuilder()))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"),
equalTo("fox eat quick"));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(1).getSourceAsMap().get("message"),
equalTo("fox ate rabbit x y z"));
// Source filter on a field that does not exist inside the nested document and just check that we do not fail and
// return an empty _source:
response = client().prepareSearch()
.setQuery(nestedQuery("comments", matchQuery("comments.message", "away"), ScoreMode.None)
.innerHit(new InnerHitBuilder().setFetchSourceContext(new FetchSourceContext(true,
new String[]{"comments.missing_field"}, null))))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L));
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0));
// Check that inner hits contain _source even when it's disabled on the root request.
response = client().prepareSearch()
.setFetchSource(false)
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None)
.innerHit(new InnerHitBuilder()))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L));
assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty());
}
public void testInnerHitsWithIgnoreUnmapped() throws Exception {
assertAcked(prepareCreate("index1")
.setMapping("nested_type", "type=nested")
);
createIndex("index2");
client().prepareIndex("index1").setId("1").setSource("nested_type", Collections.singletonMap("key", "value")).get();
client().prepareIndex("index2").setId("3").setSource("key", "value").get();
refresh();
SearchResponse response = client().prepareSearch("index1", "index2")
.setQuery(boolQuery()
.should(nestedQuery("nested_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true)
.innerHit(new InnerHitBuilder().setIgnoreUnmapped(true)))
.should(termQuery("key", "value"))
)
.get();
assertNoFailures(response);
assertHitCount(response, 2);
assertSearchHits(response, "1", "3");
}
public void testUseMaxDocInsteadOfSize() throws Exception {
assertAcked(prepareCreate("index2").setMapping("nested", "type=nested"));
client().admin().indices().prepareUpdateSettings("index2")
.setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH))
.get();
client().prepareIndex("index2").setId("1").setSource(jsonBuilder().startObject()
.startArray("nested")
.startObject()
.field("field", "value1")
.endObject()
.endArray()
.endObject())
.setRefreshPolicy(IMMEDIATE)
.get();
QueryBuilder query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1));
SearchResponse response = client().prepareSearch("index2")
.setQuery(query)
.get();
assertNoFailures(response);
assertHitCount(response, 1);
}
public void testTooHighResultWindow() throws Exception {
assertAcked(prepareCreate("index2").setMapping("nested", "type=nested"));
client().prepareIndex("index2").setId("1").setSource(jsonBuilder().startObject()
.startArray("nested")
.startObject()
.field("field", "value1")
.endObject()
.endArray()
.endObject())
.setRefreshPolicy(IMMEDIATE)
.get();
SearchResponse response = client().prepareSearch("index2")
.setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name")))
.get();
assertNoFailures(response);
assertHitCount(response, 1);
Exception e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index2")
.setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")))
.get());
assertThat(e.getCause().getMessage(),
containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]"));
e = expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("index2")
.setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")))
.get());
assertThat(e.getCause().getMessage(),
containsString("the inner hit definition's [_name]'s from + size must be less than or equal to: [100] but was [110]"));
client().admin().indices().prepareUpdateSettings("index2")
.setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110))
.get();
response = client().prepareSearch("index2")
.setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")))
.get();
assertNoFailures(response);
response = client().prepareSearch("index2")
.setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")))
.get();
assertNoFailures(response);
}
}
| |
/**
* Copyright 2016 Steffen Mueller
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.kit.aifb.mbeantimeseriesreporter;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanException;
import javax.management.MBeanServerConnection;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.MetricRegistry;
import edu.kit.aifb.mbeantimeseriesreporter.xml.Attribute;
import edu.kit.aifb.mbeantimeseriesreporter.xml.MBean;
/**
*
* @author Steffen Mueller
* @version 1.0.0
*/
public final class DynamicMBeanMetrics extends MonitorMetricsBase {
private final MBeanServerConnection connection;
private final List<MBeanMonitoring<?>> mbeanMonitorings = new ArrayList<MBeanMonitoring<?>>();
private final MBean mbean;
private final String hostname;
public DynamicMBeanMetrics(MetricRegistry metricsRegistry, MBeanServerConnection connection, String hostname,
MBean mbean) {
super(metricsRegistry);
this.connection = connection;
this.mbean = mbean;
this.hostname = hostname;
configure();
}
private void configure() {
for (Attribute attribute : mbean.getAttributes()) {
try {
if (attribute.getMetricName() == null || attribute.getMetricName().getValue() == null
|| attribute.getMetricName().getValue().isEmpty())
throw new IllegalArgumentException("MetricName must not be null or empty!");
final String metricName = createMetricName(hostname, attribute.getName());
MBeanMonitoring<?> monitoring = null;
switch (attribute.getGaugeType().getValue()) {
case Double:
monitoring = new MBeanMonitoring<Double>(new ObjectName(mbean.getObjectName()),
attribute.getMetricName().getValue(), metricName);
break;
case Long:
monitoring = new MBeanMonitoring<Long>(new ObjectName(mbean.getObjectName()),
attribute.getMetricName().getValue(), metricName);
break;
case Integer:
monitoring = new MBeanMonitoring<Integer>(new ObjectName(mbean.getObjectName()),
attribute.getMetricName().getValue(), metricName);
break;
case SizeOfList:
monitoring = new MBeanMonitoring<Integer>(new ObjectName(mbean.getObjectName()),
attribute.getMetricName().getValue(), metricName);
monitoring.invokeMethod = "size";
break;
case Boolean:
monitoring = new MBeanMonitoring<Boolean>(new ObjectName(mbean.getObjectName()),
attribute.getMetricName().getValue(), metricName);
break;
default:
System.out.println("Invalid GaugeType!");
}
mbeanMonitorings.add(monitoring);
this.metricsRegistry.register(monitoring.metricName, monitoring.gauge);
this.metricSet.put(monitoring.metricName, monitoring.gauge);
} catch (MalformedObjectNameException e) {
e.printStackTrace();
}
}
}
@Override
public void query() throws MBeanTimeseriesReporterException {
ArrayList<MBeanMonitoring<?>> removes = new ArrayList<MBeanMonitoring<?>>();
for (MBeanMonitoring<?> mbeanMonitoring : mbeanMonitorings) {
try {
Object value = connection.getAttribute(mbeanMonitoring.mbeanName, mbeanMonitoring.attributeName);
if (value != null && mbeanMonitoring.invokeMethod != null){
Class<?> c = value.getClass();
Method m = c.getDeclaredMethod(mbeanMonitoring.invokeMethod);
value = m.invoke(value);
}
mbeanMonitoring.setValue(value);
} catch (InstanceNotFoundException | NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
System.err.println("Instance '" + mbeanMonitoring.mbeanName + "' attribute '"
+ mbeanMonitoring.attributeName + "' not found! Entry will be removed.");
e.printStackTrace();
removes.add(mbeanMonitoring);
} catch (IOException | ReflectionException | AttributeNotFoundException | MBeanException e) {
System.err.println("Error while querying '" + mbeanMonitoring.mbeanName + "' attribute '"
+ mbeanMonitoring.attributeName + "'.");
e.printStackTrace();
}
}
if (removes.size() > 0)
for (MBeanMonitoring<?> mbeanMonitoring : removes) {
System.out.println(
"Removing monitoring for " + mbeanMonitoring.mbeanName + ", " + mbeanMonitoring.attributeName);
mbeanMonitorings.remove(mbeanMonitoring);
}
}
@Override
public String toString() {
return mbean.getObjectName() + "@" + hostname;
}
/**
* Internal class for managing {@link com.codahale.metrics.Gauge} instances.
*
* @author Steffen Mueller
* @version 1.0.0
*
* @param <T>
* The type of the {@link com.codahale.metrics.Gauge}. E.g.,
* Double, String, Long, ...
*/
private class MBeanMonitoring<T> {
/**
* The MBean ObjectName. See e.g.:
* http://www.oracle.com/technetwork/java/javase/tech/best-practices-jsp
* -136021.html#mozTocId654884.
*/
public final ObjectName mbeanName;
/**
* The MBean Attribute Name.
*/
public final String attributeName;
/**
* The name of metric. This name is also used for the .csv-filename.
*/
public final String metricName;
public T value;
public Gauge<T> gauge;
public String invokeMethod = null;
/**
* Constructor.
*
* @param mbeanName
* The MBean ObjectName. See e.g.:
* http://www.oracle.com/technetwork/java/javase/tech/best-
* practices-jsp-136021.html#mozTocId654884.
* @param attributeName
* The MBean Attribute Name.
* @param metricName
* The name of metric. This name is also used for the
* .csv-filename.
*/
public MBeanMonitoring(ObjectName mbeanName, String attributeName, String metricName) {
this.mbeanName = mbeanName;
this.attributeName = attributeName;
this.metricName = metricName;
this.gauge = new Gauge<T>() {
@Override
public T getValue() {
return value;
}
};
}
@SuppressWarnings("unchecked")
public void setValue(Object value) {
this.value = (T) value;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.coord;
import org.apache.hadoop.conf.Configuration;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.service.ELService;
import org.apache.oozie.service.Services;
import org.apache.oozie.test.XTestCase;
import org.apache.oozie.util.DateUtils;
import org.apache.oozie.util.ELEvaluator;
public class TestCoordELFunctions extends XTestCase {
ELEvaluator eval = null;
SyncCoordAction appInst = null;
SyncCoordDataset ds = null;
private Services services;
@Override
protected void setUp() throws Exception {
super.setUp();
services = new Services();
services.init();
}
@Override
protected void tearDown() throws Exception {
services.destroy();
super.tearDown();
}
/*
* public void testSetup() throws Exception { services = new Services();
* services.init(); }
*/
public void testURIVars() throws Exception {
init("coord-job-submit-freq");
String expr = "${YEAR}";
try {
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
fail("should throw exception beacuse coord-job-submit-freq doesn't resolve YEAR/MONTH/DAY");
}
catch (Exception ex) {
}
init("coord-job-submit-nofuncs");
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
expr = "${MONTH}";
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
expr = "${DAY}";
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
expr = "${HOUR}";
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
expr = "${MINUTE}";
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDay() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:days(1)}";
assertEquals("1", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.DAY, (TimeUnit) eval.getVariable("timeunit"));
expr = "${coord:days(256)}";
assertEquals("256", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.DAY, (TimeUnit) eval.getVariable("timeunit"));
}
public void testMonth() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:months(1)}";
assertEquals("1", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MONTH, (TimeUnit) eval.getVariable("timeunit"));
expr = "${coord:months(1) + 7}";
assertEquals("8", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MONTH, (TimeUnit) eval.getVariable("timeunit"));
expr = "${coord:months(256)}";
assertEquals("256", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MONTH, (TimeUnit) eval.getVariable("timeunit"));
expr = "${coord:months(coord:months(7))}";
assertEquals("7", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MONTH, (TimeUnit) eval.getVariable("timeunit"));
}
public void testHours() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:hours(1)}";
assertEquals("60", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MINUTE, (TimeUnit) eval.getVariable("timeunit"));
expr = "${coord:hours(coord:hours(1))}";
assertEquals("3600", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MINUTE, (TimeUnit) eval.getVariable("timeunit"));
}
public void testEndOfDays() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:endOfDays(1)}";
expr = "${coord:endOfDays(3)}";
assertEquals("3", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.DAY, (TimeUnit) eval.getVariable("timeunit"));
assertEquals(TimeUnit.END_OF_DAY, (TimeUnit) eval.getVariable("endOfDuration"));
}
public void testEndOfWeeks() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:endOfWeeks(3)}";
assertEquals("3", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.WEEK, (TimeUnit) eval.getVariable("timeunit"));
assertEquals(TimeUnit.END_OF_WEEK, (TimeUnit) eval.getVariable("endOfDuration"));
}
public void testEndOfMonths() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:endOfMonths(1)}";
expr = "${coord:endOfMonths(3)}";
assertEquals("3", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MONTH, (TimeUnit) eval.getVariable("timeunit"));
assertEquals(TimeUnit.END_OF_MONTH, (TimeUnit) eval.getVariable("endOfDuration"));
}
public void testMinutes() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:minutes(1)}";
expr = "${coord:minutes(1)}";
assertEquals("1", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MINUTE, (TimeUnit) eval.getVariable("timeunit"));
expr = "${coord:minutes(coord:minutes(1))}";
assertEquals("1", CoordELFunctions.evalAndWrap(eval, expr));
assertEquals(TimeUnit.MINUTE, (TimeUnit) eval.getVariable("timeunit"));
}
public void testTzOffsetPh1() throws Exception {
init("coord-job-submit-instances");
String expr = "${coord:current(-coord:tzOffset())}";
assertEquals("${coord:current(-3)}", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDataNamesPh1() throws Exception {
init("coord-job-submit-data");
String expr = "${coord:dataIn('ABC')}";
eval.setVariable("oozie.dataname.ABC", "data-in");
assertEquals("${coord:dataIn('ABC')}", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:dataIn('ABCD')}";
try {
assertEquals("${coord:dataIn('ABCD')}", CoordELFunctions.evalAndWrap(eval, expr));
fail("should throw exception beacuse Data in is not defiend");
}
catch (Exception ex) {
}
expr = "${coord:dataOut('EFG')}";
eval.setVariable("oozie.dataname.EFG", "data-out");
assertEquals("${coord:dataOut('EFG')}", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:dataOut('EFGH')}";
try {
assertEquals("${coord:dataOut('EFGH')}", CoordELFunctions.evalAndWrap(eval, expr));
fail("should throw exception beacuse Data in is not defiend");
}
catch (Exception ex) {
}
}
public void testHoursInDay() throws Exception {
init("coord-action-create");
String expr = "${coord:hoursInDay(1)}";
String res = CoordELFunctions.evalAndWrap(eval, expr);
assertEquals("24", res);
expr = "${coord:hoursInDay(coord:hoursInDay(1))}";
res = CoordELFunctions.evalAndWrap(eval, expr);
SyncCoordAction appInst = new SyncCoordAction();
SyncCoordDataset ds = new SyncCoordDataset();
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-02T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setName("test1");
ds.setType("SYNC");
appInst.setActualTime(DateUtils.parseDateOozieTZ("2009-09-10T23:59Z"));
appInst.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:hoursInDay(-2)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-08T08:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
ds.setTimeZone(DateUtils.getTimeZone("Europe/London"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-08T08:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("23", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:hoursInDay(1)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-11-01T08:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("25", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-08T08:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:hoursInDay(0)}";
assertEquals("23", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:hoursInDay(1)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:hoursInDay(-1)}";
assertEquals("24", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDaysInMonth() throws Exception {
init("coord-action-create");
String expr = "${coord:daysInMonth(1)}";
assertEquals("30", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(coord:daysInMonth(1))}";
assertEquals("31", CoordELFunctions.evalAndWrap(eval, expr));
SyncCoordAction appInst = new SyncCoordAction();
SyncCoordDataset ds = new SyncCoordDataset();
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.MONTH);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-02T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setName("test1");
ds.setType("SYNC");
appInst.setActualTime(DateUtils.parseDateOozieTZ("2009-09-10T23:59Z"));
appInst.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T00:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T11:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:daysInMonth(0)}";
assertEquals("28", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(-1)}";
assertEquals("31", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(2)}";
assertEquals("30", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(-3)}";
assertEquals("30", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(3)}";
assertEquals("31", CoordELFunctions.evalAndWrap(eval, expr));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T11:00Z")); // Feb
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:daysInMonth(0)}";
assertEquals("28", CoordELFunctions.evalAndWrap(eval, expr)); // Jan
// 31
// End of Month
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.MONTH);
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-02T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setName("test1");
ds.setType("SYNC");
appInst.setActualTime(DateUtils.parseDateOozieTZ("2009-09-10T23:59Z"));
appInst.setTimeZone(DateUtils.getTimeZone("UTC"));
// Case 1
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T00:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T11:00Z"));
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:daysInMonth(0)}";
assertEquals("28", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(-1)}";
assertEquals("31", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(2)}";
assertEquals("30", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(-3)}";
assertEquals("30", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:daysInMonth(3)}";
assertEquals("31", CoordELFunctions.evalAndWrap(eval, expr));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T11:00Z")); // Feb
appInst.setActualTime(DateUtils.parseDateOozieTZ("2010-10-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:daysInMonth(0)}";
assertEquals("28", CoordELFunctions.evalAndWrap(eval, expr)); // Jan
// 31
}
public void testTZOffset() throws Exception {
init("coord-action-create");
String expr = "${coord:tzOffset()}";
// eval.setVariable("resolve_tzOffset", "true");
assertEquals("0", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2012-06-13T00:00Z")); //Summer
CoordELFunctions.configureEvaluator(eval, ds, appInst);
// PDT is UTC - 7
assertEquals("-420", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2012-12-13T00:00Z")); //Winter
CoordELFunctions.configureEvaluator(eval, ds, appInst);
// PST is UTC - 8
assertEquals("-480", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDateOffset() throws Exception {
init("coord-action-start");
String expr = "${coord:dateOffset(\"2009-09-08T23:59Z\", 2, \"DAY\")}";
assertEquals("2009-09-10T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:dateOffset(\"2009-09-08T23:59Z\", -1, \"DAY\")}";
assertEquals("2009-09-07T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:dateOffset(\"2009-09-08T23:59Z\", 1, \"YEAR\")}";
assertEquals("2010-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDateTzOffset() throws Exception {
init("coord-action-start");
// PDT is UTC - 7
String expr = "${coord:dateTzOffset(\"2012-06-13T00:00Z\", \"America/Los_Angeles\")}"; //Summer
assertEquals("2012-06-12T17:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:dateTzOffset(\"2012-06-13T00:00Z\", \"PST\")}";
assertEquals("2012-06-12T17:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// PST is UTC - 8
expr = "${coord:dateTzOffset(\"2012-12-13T00:00Z\", \"America/Los_Angeles\")}"; //Winter
assertEquals("2012-12-12T16:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:dateTzOffset(\"2012-12-13T00:00Z\", \"PST\")}";
assertEquals("2012-12-12T16:00Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testCurrentRange() throws Exception {
init("coord-action-create");
String expr = "${coord:currentRange(-1, 0)}";
assertEquals("2009-09-09T23:59Z#2009-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
//test out of range instances, EL should return partial instances
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-09-01T23:59Z"));
assertEquals("2009-09-01T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testCurrent() throws Exception {
init("coord-action-create");
String expr = "${coord:current(-1)}";
assertEquals("2009-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-05-30T00:45Z"));
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-02T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
expr = "${coord:current(0)} ${coord:current(1)} ${coord:current(-1)} ${coord:current(-3)}";
assertEquals("2009-05-29T23:00Z 2009-05-30T23:00Z 2009-05-28T23:00Z 2009-05-26T23:00Z",
CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-05-30T00:45Z"));
ds.setFrequency(30);
ds.setTimeUnit(TimeUnit.MINUTE);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-08T00:00Z"));
expr = "${coord:current(0)} ${coord:current(1)} ${coord:current(-1)} ${coord:current(-3)}";
assertEquals("2009-05-30T00:30Z 2009-05-30T01:00Z 2009-05-30T00:00Z 2009-05-29T23:00Z",
eval.evaluate(expr, String.class));
SyncCoordAction appInst = new SyncCoordAction();
SyncCoordDataset ds = new SyncCoordDataset();
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-02T00:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setName("test1");
ds.setType("SYNC");
appInst.setActualTime(DateUtils.parseDateOozieTZ("2009-09-10T23:59Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-05-30T00:00Z "));
appInst.setTimeZone(DateUtils.getTimeZone("UTC"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-05-30T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-05-31T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(-1)}";
assertEquals("2009-05-29T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(-3)}";
assertEquals("2009-05-27T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
ds.setFrequency(7);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-08T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-05-28T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-06-04T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(-1)}";
assertEquals("2009-05-21T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(-3)}";
assertEquals("2009-05-07T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Spring DST transition
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-08T10:45Z"));
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-03-06T10:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
expr = "${coord:current(-2)} ${coord:current(-1)} ${coord:current(0)} ${coord:current(1)} ${coord:current(2)}";
assertEquals("2009-03-06T10:00Z 2009-03-07T10:00Z 2009-03-08T09:00Z 2009-03-09T09:00Z 2009-03-10T09:00Z",
CoordELFunctions.evalAndWrap(eval, expr));
// Winter DST Transition
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-11-01T08:00Z"));
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-10-30T08:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
expr = "${coord:current(-2)} ${coord:current(-1)} ${coord:current(0)} ${coord:current(1)} ${coord:current(2)}";
// System.out.println("AAAAA " + CoordELFunctions.evalAndWrap(eval,
// expr));
assertEquals("2009-10-30T08:00Z 2009-10-31T08:00Z 2009-11-01T08:00Z 2009-11-02T09:00Z 2009-11-03T09:00Z",
CoordELFunctions.evalAndWrap(eval, expr));
// EndofDay testing
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-02T09:00Z"));
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setName("test1");
ds.setType("SYNC");
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-05-30T12:00Z "));
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-05-30T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-05-31T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// From Specification
// Case 1
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("", CoordELFunctions.evalAndWrap(eval, expr));
// Case 2
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
// assertEquals("2009-01-02T08:00Z", CoordELFunctions.evalAndWrap(eval,
// expr));
assertEquals("", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
// assertEquals("2009-01-03T08:00Z", CoordELFunctions.evalAndWrap(eval,
// expr));
assertEquals("", CoordELFunctions.evalAndWrap(eval, expr));
// Case 3
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T07:01Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-01-01T08:01Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-01-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-01-02T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 4
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T7:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-01-01T18:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-01-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-01-02T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 5
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-03-07T07:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-07T09:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-07T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-08T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 6
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-03-08T07:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-08T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-08T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-09T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 7
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-03-09T07:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-10T08:01Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-10T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-11T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 8
ds.setEndOfDuration(TimeUnit.END_OF_DAY);
ds.setFrequency(2); // Changed
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-03-09T07:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-10T07:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-10T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-12T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Test with EOM
ds.setTimeUnit(TimeUnit.MONTH);
// Case 1
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setFrequency(1);
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T00:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T00:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-02-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 2
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-02-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 3
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-31T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-02-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 4
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-01-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-02-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-02-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-03-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 5
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-02-02T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-02T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-04-01T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 6
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-02-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-04-01T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 7
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setFrequency(3);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-02-01T08:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-03-01T08:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2009-03-01T08:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2009-06-01T07:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Case 8
ds.setEndOfDuration(TimeUnit.END_OF_MONTH);
ds.setFrequency(1);
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
ds.setInitInstance(DateUtils.parseDateOozieTZ("2010-01-01T00:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2016-10-31T00:55Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2016-10-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2016-11-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(-1)}";
assertEquals("2016-09-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
// Test with YEAR
ds.setTimeUnit(TimeUnit.YEAR);
ds.setEndOfDuration(TimeUnit.YEAR);
ds.setFrequency(1);
ds.setTimeZone(DateUtils.getTimeZone("UTC"));
// Initial instance is far behind to accumulate effect of leap years
ds.setInitInstance(DateUtils.parseDateOozieTZ("1963-01-01T00:00Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2016-10-31T00:55Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
expr = "${coord:current(0)}";
assertEquals("2016-01-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(1)}";
assertEquals("2017-01-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:current(-1)}";
assertEquals("2015-01-01T00:00Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testOffset() throws Exception {
init("coord-action-create");
String expr = "${coord:offset(-1440, \"MINUTE\")}";
assertEquals("2009-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(-24, \"HOUR\")}";
assertEquals("2009-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(-1, \"DAY\")}";
assertEquals("2009-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(1, \"MONTH\")}";
assertEquals("2009-10-09T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(1, \"YEAR\")}";
assertEquals("2010-09-09T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(-10, \"DAY\")}";
assertEquals("", CoordELFunctions.evalAndWrap(eval, expr));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2015-01-02T00:45Z"));
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.YEAR);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2010-01-02T00:01Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
// Year
expr = "${coord:offset(0, \"YEAR\")} ${coord:offset(1, \"YEAR\")} ${coord:offset(-1, \"YEAR\")}"
+ " ${coord:offset(-3, \"YEAR\")}";
assertEquals("2015-01-02T00:01Z 2016-01-02T00:01Z 2014-01-02T00:01Z 2012-01-02T00:01Z",
eval.evaluate(expr, String.class));
// Month
expr = "${coord:offset(0, \"MONTH\")} ${coord:offset(12, \"MONTH\")} ${coord:offset(-12, \"MONTH\")}"
+ " ${coord:offset(-36, \"MONTH\")}";
assertEquals("2015-01-02T00:01Z 2016-01-02T00:01Z 2014-01-02T00:01Z 2012-01-02T00:01Z",
eval.evaluate(expr, String.class));
// Day
// its -1096 instead of -1095 because of DST (extra 1 day)
expr = "${coord:offset(0, \"DAY\")} ${coord:offset(365, \"DAY\")} ${coord:offset(-365, \"DAY\")}"
+ " ${coord:offset(-1096, \"DAY\")}";
assertEquals("2015-01-02T00:01Z 2016-01-02T00:01Z 2014-01-02T00:01Z 2012-01-02T00:01Z",
eval.evaluate(expr, String.class));
// Hour
// its -26304 instead of -26280 because of DST (extra 24 hours)
expr = "${coord:offset(0, \"HOUR\")} ${coord:offset(8760, \"HOUR\")} ${coord:offset(-8760, \"HOUR\")}"
+ " ${coord:offset(-26304, \"HOUR\")}";
assertEquals("2015-01-02T00:01Z 2016-01-02T00:01Z 2014-01-02T00:01Z 2012-01-02T00:01Z",
eval.evaluate(expr, String.class));
// Minute
// its -1578240 instead of -1576800 because of DST (extra 1440 minutes)
expr = "${coord:offset(0, \"MINUTE\")} ${coord:offset(525600, \"MINUTE\")} ${coord:offset(-525600, \"MINUTE\")}"
+ " ${coord:offset(-1578240, \"MINUTE\")}";
assertEquals("2015-01-02T00:01Z 2016-01-02T00:01Z 2014-01-02T00:01Z 2012-01-02T00:01Z",
eval.evaluate(expr, String.class));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2015-01-02T00:45Z"));
ds.setFrequency(1);
ds.setTimeUnit(TimeUnit.MINUTE);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2010-01-02T00:01Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
// Minute
expr = "${coord:offset(0, \"MINUTE\")} ${coord:offset(1, \"MINUTE\")} ${coord:offset(-1, \"MINUTE\")}"
+ " ${coord:offset(-3, \"MINUTE\")}";
assertEquals("2015-01-02T00:45Z 2015-01-02T00:46Z 2015-01-02T00:44Z 2015-01-02T00:42Z",
eval.evaluate(expr, String.class));
// Hour
expr = "${coord:offset(0, \"HOUR\")} ${coord:offset(1, \"HOUR\")} ${coord:offset(-1, \"HOUR\")}"
+ " ${coord:offset(-3, \"HOUR\")}";
assertEquals("2015-01-02T00:45Z 2015-01-02T01:45Z 2015-01-01T23:45Z 2015-01-01T21:45Z",
eval.evaluate(expr, String.class));
// Day
expr = "${coord:offset(0, \"DAY\")} ${coord:offset(1, \"DAY\")} ${coord:offset(-1, \"DAY\")}"
+ " ${coord:offset(-3, \"DAY\")}";
assertEquals("2015-01-02T00:45Z 2015-01-03T00:45Z 2015-01-01T00:45Z 2014-12-30T00:45Z",
eval.evaluate(expr, String.class));
// Month
expr = "${coord:offset(0, \"MONTH\")} ${coord:offset(1, \"MONTH\")} ${coord:offset(-1, \"MONTH\")}"
+ " ${coord:offset(-3, \"MONTH\")}";
assertEquals("2015-01-02T00:45Z 2015-02-02T00:45Z 2014-12-02T00:45Z 2014-10-01T23:45Z",
eval.evaluate(expr, String.class));
// Year
expr = "${coord:offset(0, \"YEAR\")} ${coord:offset(1, \"YEAR\")} ${coord:offset(-1, \"YEAR\")}"
+ " ${coord:offset(-3, \"YEAR\")}";
assertEquals("2015-01-02T00:45Z 2016-01-02T00:45Z 2014-01-02T00:45Z 2012-01-02T00:45Z",
eval.evaluate(expr, String.class));
// Test rewinding when the given offset isn't a multiple of the
// frequency
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2015-01-02T00:45Z"));
ds.setFrequency(4);
ds.setTimeUnit(TimeUnit.HOUR);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2010-01-02T00:01Z"));
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
expr = "${coord:offset(5, \"MINUTE\")}";
assertEquals("2015-01-02T00:01Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(1, \"HOUR\")}";
assertEquals("2015-01-02T00:01Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(7, \"HOUR\")}";
assertEquals("2015-01-02T04:01Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(-2, \"HOUR\")}";
assertEquals("2015-01-01T20:01Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:offset(-43825, \"HOUR\")}";
assertEquals("", CoordELFunctions.evalAndWrap(eval, expr));
// "blah" is not a valid TimeUnit
expr = "${coord:offset(1, \"blah\")}";
try {
CoordELFunctions.evalAndWrap(eval, expr);
fail("eval of " + expr + " should have thrown an exception");
}
catch (Exception e) {
assertTrue(e.getMessage().contains("Unable to evaluate"));
}
// 4.5 is not a valid integer
expr = "${coord:offset(4.5, \"blah\")}";
try {
CoordELFunctions.evalAndWrap(eval, expr);
fail("eval of " + expr + " should have thrown an exception");
}
catch (Exception e) {
assertTrue(e.getMessage().contains("Unable to evaluate"));
}
}
public void testLatest() throws Exception {
init("coord-action-start");
String expr = "${coord:latest(0)}";
Configuration conf = new Configuration();
// TODO:Set hadoop properties
eval.setVariable(CoordELFunctions.CONFIGURATION, conf);
getTestCaseDir();
ds.setUriTemplate(getTestCaseFileUri("${YEAR}/${MONTH}/${DAY}"));
createTestCaseSubDir("2009/09/10/_SUCCESS".split("/"));
// TODO: Create the directories
assertEquals("2009-09-10T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
createTestCaseSubDir("2009/09/09/_SUCCESS".split("/"));
expr = "${coord:latest(-1)}";
assertEquals("2009-09-09T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
createTestCaseSubDir("2009/09/08/_SUCCESS".split("/"));
expr = "${coord:latest(-2)}";
assertEquals("2009-09-08T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:latest(-100)}";
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
expr = "${coord:latest(1)}";
try {
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
fail("Should throw exception, because latest for +ve instance is not valid");
}
catch (Exception ex) {
}
// Add test cases with EOM and EOD option
}
public void testPh1Future() throws Exception {
init("coord-job-submit-instances");
String expr = "${coord:future(1, 10)}";
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
}
public void testFormatTime() throws Exception {
String expr1 = "${coord:formatTime(\"2009-09-08T23:59Z\", \"yyyy\")}";
String expr2 = "${coord:formatTime(\"2009-09-08T23:59Z\", \"yyyyMMdd_HHmmss\")}";
init("coord-action-create");
assertEquals("2009", CoordELFunctions.evalAndWrap(eval, expr1));
assertEquals("20090908_235900", CoordELFunctions.evalAndWrap(eval, expr2));
init("coord-action-create-inst");
assertEquals("2009", CoordELFunctions.evalAndWrap(eval, expr1));
assertEquals("20090908_235900", CoordELFunctions.evalAndWrap(eval, expr2));
init("coord-action-start");
assertEquals("2009", CoordELFunctions.evalAndWrap(eval, expr1));
assertEquals("20090908_235900", CoordELFunctions.evalAndWrap(eval, expr2));
String utcDate = "2009-09-08T23:59Z";
String expr3 = "${coord:formatTime(date, \"yyyy\")}";
String expr3_eval = "${coord:formatTime('" + utcDate + "' , " + "yyyy)}";
init("coord-job-submit-instances");
eval.setVariable("date", utcDate);
assertEquals(expr3_eval, CoordELFunctions.evalAndWrap(eval, expr3));
init("coord-job-submit-data");
eval.setVariable("date", utcDate);
assertEquals(expr3_eval, CoordELFunctions.evalAndWrap(eval, expr3));
init("coord-sla-submit");
eval.setVariable("date", utcDate);
assertEquals(expr3_eval, CoordELFunctions.evalAndWrap(eval, expr3));
}
public void testEpochTime() throws Exception {
String expr1 = "${coord:epochTime(\"2009-09-08T23:59Z\", \"false\")}";
String expr2 = "${coord:epochTime(\"2009-09-08T23:59Z\", \"true\")}";
init("coord-action-create");
assertEquals("1252454340", CoordELFunctions.evalAndWrap(eval, expr1));
assertEquals("1252454340000", CoordELFunctions.evalAndWrap(eval, expr2));
init("coord-action-create-inst");
assertEquals("1252454340", CoordELFunctions.evalAndWrap(eval, expr1));
assertEquals("1252454340000", CoordELFunctions.evalAndWrap(eval, expr2));
init("coord-action-start");
assertEquals("1252454340", CoordELFunctions.evalAndWrap(eval, expr1));
assertEquals("1252454340000", CoordELFunctions.evalAndWrap(eval, expr2));
String utcDate = "2009-09-08T23:59Z";
String expr3 = "${coord:epochTime(date, \"true\")}";
String expr3_eval = "${coord:epochTime('" + utcDate + "' , " + "true)}";
init("coord-job-submit-instances");
eval.setVariable("date", utcDate);
assertEquals(expr3_eval, CoordELFunctions.evalAndWrap(eval, expr3));
init("coord-job-submit-data");
eval.setVariable("date", utcDate);
assertEquals(expr3_eval, CoordELFunctions.evalAndWrap(eval, expr3));
init("coord-sla-submit");
eval.setVariable("date", utcDate);
assertEquals(expr3_eval, CoordELFunctions.evalAndWrap(eval, expr3));
}
public void testFuture() throws Exception {
init("coord-job-submit-instances");
String expr = "${coord:future(1, 20)}";
init("coord-action-start");
Configuration conf = new Configuration();
// TODO:Set hadoop properties
eval.setVariable(CoordELFunctions.CONFIGURATION, conf);
getTestCaseDir();
ds.setUriTemplate(getTestCaseFileUri("/${YEAR}/${MONTH}/${DAY}"));
createTestCaseSubDir("2009/09/10/_SUCCESS".split("/"));
createTestCaseSubDir("2009/09/11/_SUCCESS".split("/"));
assertEquals("2009-09-11T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
try {
expr = "${coord:future(-1, 3)}";
CoordELFunctions.evalAndWrap(eval, expr);
fail("Should fail for negative instance value");
}
catch (Exception ex) {
}
expr = "${coord:future(4, 20)}";
String res = "${coord:future(4, 20)}";
assertEquals(res, CoordELFunctions.evalAndWrap(eval, expr));
}
public void testNominalTime() throws Exception {
init("coord-job-submit-data");
String expr = "${coord:nominalTime()}";
init("coord-action-start");
expr = "${coord:nominalTime()}";
assertEquals("2009-09-09T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-create");
assertEquals("2009-09-09T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testActualTime() throws Exception {
init("coord-job-submit-data");
String expr = "${coord:actualTime()}";
init("coord-action-start");
expr = "${coord:actualTime()}";
assertEquals("2009-09-10T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-submit");
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-create");
assertEquals("2009-09-10T23:59Z", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDataIn() throws Exception {
init("coord-action-start");
eval.setVariable(".datain.ABC", "file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31");
eval.setVariable(".datain.ABC.unresolved", Boolean.FALSE);
String expr = "${coord:dataIn('ABC')}";
assertEquals("file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31",
CoordELFunctions.evalAndWrap(eval, expr));
eval.setVariable(".datain.ABC", "file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31");
eval.setVariable(".datain.ABC.unresolved", Boolean.TRUE);
assertEquals(expr, CoordELFunctions.evalAndWrap(eval, expr));
}
public void testDataOut() throws Exception {
init("coord-action-start");
eval.setVariable(".dataout.ABC", "file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31");
String expr = "${coord:dataOut('ABC')}";
assertEquals("file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31",
CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-create");
eval.setVariable(".dataout.ABC", "file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31");
assertEquals("file:///tmp/coord/US/2009/1/30,file:///tmp/coord/US/2009/1/31",
CoordELFunctions.evalAndWrap(eval, expr));
}
public void testActionId() throws Exception {
init("coord-action-start");
String expr = "${coord:actionId()}";
assertEquals("00000-oozie-C@1", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-create");
assertEquals("00000-oozie-C@1", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testName() throws Exception {
init("coord-action-start");
String expr = "${coord:name()}";
assertEquals("mycoordinator-app", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testConf() throws Exception {
init("coord-job-submit-freq");
eval.setVariable("my.test.variable", "test");
String expr = "${coord:conf('my.test.variable')}";
assertEquals("test", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testUser() throws Exception {
init("coord-job-submit-freq");
String expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-job-submit-instances");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-job-submit-data");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-submit");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-action-create");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-action-create-inst");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-sla-create");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-action-start");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
init("coord-action-create-inst");
expr = "${coord:user()}";
assertEquals("test_user", CoordELFunctions.evalAndWrap(eval, expr));
}
public void testAbsoluteRange() throws Exception {
init("coord-action-create");
ds = new SyncCoordDataset();
ds.setFrequency(7);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-08-20T01:00Z"));
ds.setTimeUnit(TimeUnit.DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setName("test");
ds.setUriTemplate("hdfs:///tmp/workflows/${YEAR}/${MONTH}/${DAY};region=us");
ds.setType("SYNC");
ds.setDoneFlag("");
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-08-20T01:00Z"));
CoordELFunctions.configureEvaluator(eval, ds, appInst);
String expr = "${coord:absoluteRange(\"2009-08-20T01:00Z\",\"0\")}";
assertEquals(CoordELFunctions.evalAndWrap(eval, expr), "2009-08-20T01:00Z");
expr = "${coord:absoluteRange(\"2009-08-20T01:00Z\",\"1\")}";
assertEquals(CoordELFunctions.evalAndWrap(eval, expr), "2009-08-27T01:00Z#2009-08-20T01:00Z");
try {
expr = "${coord:absoluteRange(\"2009-08-20T01:00Z\",\"-1\")}";
CoordELFunctions.evalAndWrap(eval, expr);
fail("start-instance is greater than the end-instance and there was no exception");
}
catch (Exception e) {
assertTrue(e.getCause().getMessage().contains("start-instance should be equal or earlier than the end-instance"));
}
}
/*
* public void testDetach() throws Exception { Services.get().destroy(); }
*/
void init(String tag) throws Exception {
init(tag, "hdfs://localhost:9000/user/" + getTestUser() + "/US/${YEAR}/${MONTH}/${DAY}");
}
private void init(String tag, String uriTemplate) throws Exception {
eval = Services.get().get(ELService.class).createEvaluator(tag);
eval.setVariable(OozieClient.USER_NAME, "test_user");
eval.setVariable(OozieClient.GROUP_NAME, "test_group");
appInst = new SyncCoordAction();
ds = new SyncCoordDataset();
ds.setFrequency(1);
ds.setInitInstance(DateUtils.parseDateOozieTZ("2009-09-01T23:59Z"));
ds.setTimeUnit(TimeUnit.DAY);
ds.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
ds.setName("test");
ds.setUriTemplate(uriTemplate);
ds.setType("SYNC");
ds.setDoneFlag("");
appInst.setActualTime(DateUtils.parseDateOozieTZ("2009-09-10T23:59Z"));
appInst.setNominalTime(DateUtils.parseDateOozieTZ("2009-09-09T23:59Z"));
appInst.setTimeZone(DateUtils.getTimeZone("America/Los_Angeles"));
appInst.setActionId("00000-oozie-C@1");
appInst.setName("mycoordinator-app");
CoordELFunctions.configureEvaluator(eval, ds, appInst);
}
}
| |
/*
* Copyright (c) 2009-2012 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.material.plugins;
import com.jme3.asset.*;
import com.jme3.material.*;
import com.jme3.material.RenderState.BlendMode;
import com.jme3.material.RenderState.FaceCullMode;
import com.jme3.material.TechniqueDef.LightMode;
import com.jme3.material.TechniqueDef.ShadowMode;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.shader.Shader;
import com.jme3.shader.VarType;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.texture.image.ColorSpace;
import com.jme3.util.PlaceholderAssets;
import com.jme3.util.blockparser.BlockLanguageParser;
import com.jme3.util.blockparser.Statement;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class J3MLoader implements AssetLoader {
private static final Logger logger = Logger.getLogger(J3MLoader.class.getName());
// private ErrorLogger errors;
private ShaderNodeLoaderDelegate nodesLoaderDelegate;
boolean isUseNodes = false;
private AssetManager assetManager;
private AssetKey key;
private MaterialDef materialDef;
private Material material;
private TechniqueDef technique;
private RenderState renderState;
private EnumMap<Shader.ShaderType, String> shaderLanguage;
private EnumMap<Shader.ShaderType, String> shaderName;
private static final String whitespacePattern = "\\p{javaWhitespace}+";
public J3MLoader() {
shaderLanguage = new EnumMap<Shader.ShaderType, String>(Shader.ShaderType.class);
shaderName = new EnumMap<Shader.ShaderType, String>(Shader.ShaderType.class);
}
// <TYPE> <LANG> : <SOURCE>
private void readShaderStatement(String statement) throws IOException {
String[] split = statement.split(":");
if (split.length != 2) {
throw new IOException("Shader statement syntax incorrect" + statement);
}
String[] typeAndLang = split[0].split(whitespacePattern);
if (typeAndLang.length != 2) {
throw new IOException("Shader statement syntax incorrect: " + statement);
}
for (Shader.ShaderType shaderType : Shader.ShaderType.values()) {
if (typeAndLang[0].equals(shaderType.toString() + "Shader")) {
readShaderDefinition(shaderType, split[1].trim(), typeAndLang[1]);
}
}
}
private void readShaderDefinition(Shader.ShaderType shaderType, String name, String language) {
shaderName.put(shaderType, name);
shaderLanguage.put(shaderType, language);
}
// LightMode <MODE>
private void readLightMode(String statement) throws IOException{
String[] split = statement.split(whitespacePattern);
if (split.length != 2){
throw new IOException("LightMode statement syntax incorrect");
}
LightMode lm = LightMode.valueOf(split[1]);
technique.setLightMode(lm);
}
// ShadowMode <MODE>
private void readShadowMode(String statement) throws IOException{
String[] split = statement.split(whitespacePattern);
if (split.length != 2){
throw new IOException("ShadowMode statement syntax incorrect");
}
ShadowMode sm = ShadowMode.valueOf(split[1]);
technique.setShadowMode(sm);
}
private List<String> tokenizeTextureValue(final String value) {
final List<String> matchList = new ArrayList<String>();
final Pattern regex = Pattern.compile("[^\\s\"']+|\"([^\"]*)\"|'([^']*)'");
final Matcher regexMatcher = regex.matcher(value.trim());
while (regexMatcher.find()) {
if (regexMatcher.group(1) != null) {
matchList.add(regexMatcher.group(1));
} else if (regexMatcher.group(2) != null) {
matchList.add(regexMatcher.group(2));
} else {
matchList.add(regexMatcher.group());
}
}
return matchList;
}
private List<TextureOptionValue> parseTextureOptions(final List<String> values) {
final List<TextureOptionValue> matchList = new ArrayList<TextureOptionValue>();
if (values.isEmpty() || values.size() == 1) {
return matchList;
}
// Loop through all but the last value, the last one is going to be the path.
for (int i = 0; i < values.size() - 1; i++) {
final String value = values.get(i);
final TextureOption textureOption = TextureOption.getTextureOption(value);
if (textureOption == null && !value.contains("\\") && !value.contains("/") && !values.get(0).equals("Flip") && !values.get(0).equals("Repeat")) {
logger.log(Level.WARNING, "Unknown texture option \"{0}\" encountered for \"{1}\" in material \"{2}\"", new Object[]{value, key, material.getKey().getName()});
} else if (textureOption != null){
final String option = textureOption.getOptionValue(value);
matchList.add(new TextureOptionValue(textureOption, option));
}
}
return matchList;
}
private boolean isTexturePathDeclaredTheTraditionalWay(final int numberOfValues, final int numberOfTextureOptions, final String texturePath) {
return (numberOfValues > 1 && (texturePath.startsWith("Flip Repeat ") || texturePath.startsWith("Flip ") ||
texturePath.startsWith("Repeat ") || texturePath.startsWith("Repeat Flip "))) || numberOfTextureOptions == 0;
}
private Texture parseTextureType(final VarType type, final String value) {
final List<String> textureValues = tokenizeTextureValue(value);
final List<TextureOptionValue> textureOptionValues = parseTextureOptions(textureValues);
TextureKey textureKey = null;
// If there is only one token on the value, it must be the path to the texture.
if (textureValues.size() == 1) {
textureKey = new TextureKey(textureValues.get(0), false);
} else {
String texturePath = value.trim();
// If there are no valid "new" texture options specified but the path is split into several parts, lets parse the old way.
if (isTexturePathDeclaredTheTraditionalWay(textureValues.size(), textureOptionValues.size(), texturePath)) {
boolean flipY = false;
if (texturePath.startsWith("Flip Repeat ") || texturePath.startsWith("Repeat Flip ")) {
texturePath = texturePath.substring(12).trim();
flipY = true;
} else if (texturePath.startsWith("Flip ")) {
texturePath = texturePath.substring(5).trim();
flipY = true;
} else if (texturePath.startsWith("Repeat ")) {
texturePath = texturePath.substring(7).trim();
}
// Support path starting with quotes (double and single)
if (texturePath.startsWith("\"") || texturePath.startsWith("'")) {
texturePath = texturePath.substring(1);
}
// Support path ending with quotes (double and single)
if (texturePath.endsWith("\"") || texturePath.endsWith("'")) {
texturePath = texturePath.substring(0, texturePath.length() - 1);
}
textureKey = new TextureKey(texturePath, flipY);
}
if (textureKey == null) {
textureKey = new TextureKey(textureValues.get(textureValues.size() - 1), false);
}
// Apply texture options to the texture key
if (!textureOptionValues.isEmpty()) {
for (final TextureOptionValue textureOptionValue : textureOptionValues) {
textureOptionValue.applyToTextureKey(textureKey);
}
}
}
switch (type) {
case Texture3D:
textureKey.setTextureTypeHint(Texture.Type.ThreeDimensional);
break;
case TextureArray:
textureKey.setTextureTypeHint(Texture.Type.TwoDimensionalArray);
break;
case TextureCubeMap:
textureKey.setTextureTypeHint(Texture.Type.CubeMap);
break;
}
textureKey.setGenerateMips(true);
Texture texture;
try {
texture = assetManager.loadTexture(textureKey);
} catch (AssetNotFoundException ex){
logger.log(Level.WARNING, "Cannot locate {0} for material {1}", new Object[]{textureKey, key});
texture = null;
}
if (texture == null){
texture = new Texture2D(PlaceholderAssets.getPlaceholderImage(assetManager));
texture.setKey(textureKey);
texture.setName(textureKey.getName());
}
// Apply texture options to the texture
if (!textureOptionValues.isEmpty()) {
for (final TextureOptionValue textureOptionValue : textureOptionValues) {
textureOptionValue.applyToTexture(texture);
}
}
return texture;
}
private Object readValue(final VarType type, final String value) throws IOException{
if (type.isTextureType()) {
return parseTextureType(type, value);
} else {
String[] split = value.trim().split(whitespacePattern);
switch (type){
case Float:
if (split.length != 1){
throw new IOException("Float value parameter must have 1 entry: " + value);
}
return Float.parseFloat(split[0]);
case Vector2:
if (split.length != 2){
throw new IOException("Vector2 value parameter must have 2 entries: " + value);
}
return new Vector2f(Float.parseFloat(split[0]),
Float.parseFloat(split[1]));
case Vector3:
if (split.length != 3){
throw new IOException("Vector3 value parameter must have 3 entries: " + value);
}
return new Vector3f(Float.parseFloat(split[0]),
Float.parseFloat(split[1]),
Float.parseFloat(split[2]));
case Vector4:
if (split.length != 4){
throw new IOException("Vector4 value parameter must have 4 entries: " + value);
}
return new ColorRGBA(Float.parseFloat(split[0]),
Float.parseFloat(split[1]),
Float.parseFloat(split[2]),
Float.parseFloat(split[3]));
case Int:
if (split.length != 1){
throw new IOException("Int value parameter must have 1 entry: " + value);
}
return Integer.parseInt(split[0]);
case Boolean:
if (split.length != 1){
throw new IOException("Boolean value parameter must have 1 entry: " + value);
}
return Boolean.parseBoolean(split[0]);
default:
throw new UnsupportedOperationException("Unknown type: "+type);
}
}
}
// <TYPE> <NAME> [ "(" <FFBINDING> ")" ] [-LINEAR] [ ":" <DEFAULTVAL> ]
private void readParam(String statement) throws IOException{
String name;
String defaultVal = null;
ColorSpace colorSpace = null;
String[] split = statement.split(":");
// Parse default val
if (split.length == 1){
// Doesn't contain default value
}else{
if (split.length != 2){
throw new IOException("Parameter statement syntax incorrect");
}
statement = split[0].trim();
defaultVal = split[1].trim();
}
if (statement.endsWith("-LINEAR")) {
colorSpace = ColorSpace.Linear;
statement = statement.substring(0, statement.length() - "-LINEAR".length());
}
// Parse ffbinding
int startParen = statement.indexOf("(");
if (startParen != -1){
// get content inside parentheses
int endParen = statement.indexOf(")", startParen);
String bindingStr = statement.substring(startParen+1, endParen).trim();
// don't care about bindingStr
statement = statement.substring(0, startParen);
}
// Parse type + name
split = statement.split(whitespacePattern);
if (split.length != 2){
throw new IOException("Parameter statement syntax incorrect");
}
VarType type;
if (split[0].equals("Color")){
type = VarType.Vector4;
}else{
type = VarType.valueOf(split[0]);
}
name = split[1];
Object defaultValObj = null;
if (defaultVal != null){
defaultValObj = readValue(type, defaultVal);
}
if(type.isTextureType()){
materialDef.addMaterialParamTexture(type, name, colorSpace);
}else{
materialDef.addMaterialParam(type, name, defaultValObj);
}
}
private void readValueParam(String statement) throws IOException{
// Use limit=1 incase filename contains colons
String[] split = statement.split(":", 2);
if (split.length != 2){
throw new IOException("Value parameter statement syntax incorrect");
}
String name = split[0].trim();
// parse value
MatParam p = material.getMaterialDef().getMaterialParam(name);
if (p == null){
throw new IOException("The material parameter: "+name+" is undefined.");
}
Object valueObj = readValue(p.getVarType(), split[1]);
if (p.getVarType().isTextureType()){
material.setTextureParam(name, p.getVarType(), (Texture) valueObj);
}else{
material.setParam(name, p.getVarType(), valueObj);
}
}
private void readMaterialParams(List<Statement> paramsList) throws IOException{
for (Statement statement : paramsList){
readParam(statement.getLine());
}
}
private void readExtendingMaterialParams(List<Statement> paramsList) throws IOException{
for (Statement statement : paramsList){
readValueParam(statement.getLine());
}
}
private void readWorldParams(List<Statement> worldParams) throws IOException{
for (Statement statement : worldParams){
technique.addWorldParam(statement.getLine());
}
}
private boolean parseBoolean(String word){
return word != null && word.equals("On");
}
private void readRenderStateStatement(Statement statement) throws IOException{
String[] split = statement.getLine().split(whitespacePattern);
if (split[0].equals("Wireframe")){
renderState.setWireframe(parseBoolean(split[1]));
}else if (split[0].equals("FaceCull")){
renderState.setFaceCullMode(FaceCullMode.valueOf(split[1]));
}else if (split[0].equals("DepthWrite")){
renderState.setDepthWrite(parseBoolean(split[1]));
}else if (split[0].equals("DepthTest")){
renderState.setDepthTest(parseBoolean(split[1]));
}else if (split[0].equals("Blend")){
renderState.setBlendMode(BlendMode.valueOf(split[1]));
}else if (split[0].equals("AlphaTestFalloff")){
renderState.setAlphaTest(true);
renderState.setAlphaFallOff(Float.parseFloat(split[1]));
}else if (split[0].equals("PolyOffset")){
float factor = Float.parseFloat(split[1]);
float units = Float.parseFloat(split[2]);
renderState.setPolyOffset(factor, units);
}else if (split[0].equals("ColorWrite")){
renderState.setColorWrite(parseBoolean(split[1]));
}else if (split[0].equals("PointSprite")){
renderState.setPointSprite(parseBoolean(split[1]));
}else if (split[0].equals("DepthFunc")){
renderState.setDepthFunc(RenderState.TestFunction.valueOf(split[1]));
}else if (split[0].equals("AlphaFunc")){
renderState.setAlphaFunc(RenderState.TestFunction.valueOf(split[1]));
} else {
throw new MatParseException(null, split[0], statement);
}
}
private void readAdditionalRenderState(List<Statement> renderStates) throws IOException{
renderState = material.getAdditionalRenderState();
for (Statement statement : renderStates){
readRenderStateStatement(statement);
}
renderState = null;
}
private void readRenderState(List<Statement> renderStates) throws IOException{
renderState = new RenderState();
for (Statement statement : renderStates){
readRenderStateStatement(statement);
}
technique.setRenderState(renderState);
renderState = null;
}
private void readForcedRenderState(List<Statement> renderStates) throws IOException{
renderState = new RenderState();
for (Statement statement : renderStates){
readRenderStateStatement(statement);
}
technique.setForcedRenderState(renderState);
renderState = null;
}
// <DEFINENAME> [ ":" <PARAMNAME> ]
private void readDefine(String statement) throws IOException{
String[] split = statement.split(":");
if (split.length == 1){
// add preset define
technique.addShaderPresetDefine(split[0].trim(), VarType.Boolean, true);
}else if (split.length == 2){
technique.addShaderParamDefine(split[1].trim(), split[0].trim());
}else{
throw new IOException("Define syntax incorrect");
}
}
private void readDefines(List<Statement> defineList) throws IOException{
for (Statement statement : defineList){
readDefine(statement.getLine());
}
}
private void readTechniqueStatement(Statement statement) throws IOException{
String[] split = statement.getLine().split("[ \\{]");
if (split[0].equals("VertexShader") ||
split[0].equals("FragmentShader") ||
split[0].equals("GeometryShader") ||
split[0].equals("TessellationControlShader") ||
split[0].equals("TessellationEvaluationShader")) {
readShaderStatement(statement.getLine());
}else if (split[0].equals("LightMode")){
readLightMode(statement.getLine());
}else if (split[0].equals("ShadowMode")){
readShadowMode(statement.getLine());
}else if (split[0].equals("WorldParameters")){
readWorldParams(statement.getContents());
}else if (split[0].equals("RenderState")){
readRenderState(statement.getContents());
}else if (split[0].equals("ForcedRenderState")){
readForcedRenderState(statement.getContents());
}else if (split[0].equals("Defines")){
readDefines(statement.getContents());
} else if (split[0].equals("ShaderNodesDefinitions")) {
initNodesLoader();
if (isUseNodes) {
nodesLoaderDelegate.readNodesDefinitions(statement.getContents());
}
} else if (split[0].equals("VertexShaderNodes")) {
initNodesLoader();
if (isUseNodes) {
nodesLoaderDelegate.readVertexShaderNodes(statement.getContents());
}
} else if (split[0].equals("FragmentShaderNodes")) {
initNodesLoader();
if (isUseNodes) {
nodesLoaderDelegate.readFragmentShaderNodes(statement.getContents());
}
} else if (split[0].equals("NoRender")) {
technique.setNoRender(true);
} else {
throw new MatParseException(null, split[0], statement);
}
}
private void readTransparentStatement(String statement) throws IOException{
String[] split = statement.split(whitespacePattern);
if (split.length != 2){
throw new IOException("Transparent statement syntax incorrect");
}
material.setTransparent(parseBoolean(split[1]));
}
private void readTechnique(Statement techStat) throws IOException{
isUseNodes = false;
String[] split = techStat.getLine().split(whitespacePattern);
if (split.length == 1) {
technique = new TechniqueDef(null);
} else if (split.length == 2) {
String techName = split[1];
technique = new TechniqueDef(techName);
} else {
throw new IOException("Technique statement syntax incorrect");
}
for (Statement statement : techStat.getContents()){
readTechniqueStatement(statement);
}
if(isUseNodes){
nodesLoaderDelegate.computeConditions();
//used for caching later, the shader here is not a file.
technique.setShaderFile(technique.hashCode() + "", technique.hashCode() + "", "GLSL100", "GLSL100");
}
if (shaderName.containsKey(Shader.ShaderType.Vertex) && shaderName.containsKey(Shader.ShaderType.Fragment)) {
technique.setShaderFile(shaderName, shaderLanguage);
}
materialDef.addTechniqueDef(technique);
technique = null;
shaderLanguage.clear();
shaderName.clear();
}
private void loadFromRoot(List<Statement> roots) throws IOException{
if (roots.size() == 2){
Statement exception = roots.get(0);
String line = exception.getLine();
if (line.startsWith("Exception")){
throw new AssetLoadException(line.substring("Exception ".length()));
}else{
throw new IOException("In multiroot material, expected first statement to be 'Exception'");
}
}else if (roots.size() != 1){
throw new IOException("Too many roots in J3M/J3MD file");
}
boolean extending = false;
Statement materialStat = roots.get(0);
String materialName = materialStat.getLine();
if (materialName.startsWith("MaterialDef")){
materialName = materialName.substring("MaterialDef ".length()).trim();
extending = false;
}else if (materialName.startsWith("Material")){
materialName = materialName.substring("Material ".length()).trim();
extending = true;
}else{
throw new IOException("Specified file is not a Material file");
}
String[] split = materialName.split(":", 2);
if (materialName.equals("")){
throw new MatParseException("Material name cannot be empty", materialStat);
}
if (split.length == 2){
if (!extending){
throw new MatParseException("Must use 'Material' when extending.", materialStat);
}
String extendedMat = split[1].trim();
MaterialDef def = (MaterialDef) assetManager.loadAsset(new AssetKey(extendedMat));
if (def == null) {
throw new MatParseException("Extended material " + extendedMat + " cannot be found.", materialStat);
}
material = new Material(def);
material.setKey(key);
// material.setAssetName(fileName);
}else if (split.length == 1){
if (extending){
throw new MatParseException("Expected ':', got '{'", materialStat);
}
materialDef = new MaterialDef(assetManager, materialName);
// NOTE: pass file name for defs so they can be loaded later
materialDef.setAssetName(key.getName());
}else{
throw new MatParseException("Cannot use colon in material name/path", materialStat);
}
for (Statement statement : materialStat.getContents()){
split = statement.getLine().split("[ \\{]");
String statType = split[0];
if (extending){
if (statType.equals("MaterialParameters")){
readExtendingMaterialParams(statement.getContents());
}else if (statType.equals("AdditionalRenderState")){
readAdditionalRenderState(statement.getContents());
}else if (statType.equals("Transparent")){
readTransparentStatement(statement.getLine());
}
}else{
if (statType.equals("Technique")){
readTechnique(statement);
}else if (statType.equals("MaterialParameters")){
readMaterialParams(statement.getContents());
}else{
throw new MatParseException("Expected material statement, got '"+statType+"'", statement);
}
}
}
}
public Object load(AssetInfo info) throws IOException {
this.assetManager = info.getManager();
InputStream in = info.openStream();
try {
key = info.getKey();
if (key.getExtension().equals("j3m") && !(key instanceof MaterialKey)) {
throw new IOException("Material instances must be loaded via MaterialKey");
} else if (key.getExtension().equals("j3md") && key instanceof MaterialKey) {
throw new IOException("Material definitions must be loaded via AssetKey");
}
loadFromRoot(BlockLanguageParser.parse(in));
} finally {
if (in != null){
in.close();
}
}
if (material != null){
// material implementation
return material;
}else{
// material definition
return materialDef;
}
}
public MaterialDef loadMaterialDef(List<Statement> roots, AssetManager manager, AssetKey key) throws IOException {
this.key = key;
this.assetManager = manager;
loadFromRoot(roots);
return materialDef;
}
protected void initNodesLoader() {
if (!isUseNodes) {
isUseNodes = shaderName.get(Shader.ShaderType.Vertex) == null && shaderName.get(Shader.ShaderType.Fragment) == null;
if (isUseNodes) {
if (nodesLoaderDelegate == null) {
nodesLoaderDelegate = new ShaderNodeLoaderDelegate();
}else{
nodesLoaderDelegate.clear();
}
nodesLoaderDelegate.setTechniqueDef(technique);
nodesLoaderDelegate.setMaterialDef(materialDef);
nodesLoaderDelegate.setAssetManager(assetManager);
}
}
}
/**
* Texture options allow you to specify how a texture should be initialized by including an option before
* the path to the texture in the .j3m file.
* <p>
* <b>Example:</b>
* <pre>
* DiffuseMap: MinTrilinear MagBilinear WrapRepeat_S "some/path/to a/texture.png"
* </pre>
* This would apply a minification filter of "Trilinear", a magnification filter of "Bilinear" and set the wrap mode to "Repeat".
* </p>
* <p>
* <b>Note:</b> If several filters of the same type are added, eg. MinTrilinear MinNearestLinearMipMap, the last one will win.
* </p>
*/
private enum TextureOption {
/**
* Applies a {@link com.jme3.texture.Texture.MinFilter} to the texture.
*/
Min {
@Override
public void applyToTexture(final String option, final Texture texture) {
texture.setMinFilter(Texture.MinFilter.valueOf(option));
}
},
/**
* Applies a {@link com.jme3.texture.Texture.MagFilter} to the texture.
*/
Mag {
@Override
public void applyToTexture(final String option, final Texture texture) {
texture.setMagFilter(Texture.MagFilter.valueOf(option));
}
},
/**
* Applies a {@link com.jme3.texture.Texture.WrapMode} to the texture. This also supports {@link com.jme3.texture.Texture.WrapAxis}
* by adding "_AXIS" to the texture option. For instance if you wanted to repeat on the S (horizontal) axis, you
* would use <pre>WrapRepeat_S</pre> as a texture option.
*/
Wrap {
@Override
public void applyToTexture(final String option, final Texture texture) {
final int separatorPosition = option.indexOf("_");
if (separatorPosition >= option.length() - 2) {
final String axis = option.substring(separatorPosition + 1);
final String mode = option.substring(0, separatorPosition);
final Texture.WrapAxis wrapAxis = Texture.WrapAxis.valueOf(axis);
texture.setWrap(wrapAxis, Texture.WrapMode.valueOf(mode));
} else {
texture.setWrap(Texture.WrapMode.valueOf(option));
}
}
},
/**
* Applies a {@link com.jme3.texture.Texture.WrapMode#Repeat} to the texture. This is simply an alias for
* WrapRepeat, please use WrapRepeat instead if possible as this may become deprecated later on.
*/
Repeat {
@Override
public void applyToTexture(final String option, final Texture texture) {
Wrap.applyToTexture("Repeat", texture);
}
},
/**
* Applies flipping on the Y axis to the {@link TextureKey#setFlipY(boolean)}.
*/
Flip {
@Override
public void applyToTextureKey(final String option, final TextureKey textureKey) {
textureKey.setFlipY(true);
}
};
public String getOptionValue(final String option) {
return option.substring(name().length());
}
public void applyToTexture(final String option, final Texture texture) {
}
public void applyToTextureKey(final String option, final TextureKey textureKey) {
}
public static TextureOption getTextureOption(final String option) {
for(final TextureOption textureOption : TextureOption.values()) {
if (option.startsWith(textureOption.name())) {
return textureOption;
}
}
return null;
}
}
/**
* Internal object used for holding a {@link com.jme3.material.plugins.J3MLoader.TextureOption} and it's value. Also
* contains a couple of convenience methods for applying the TextureOption to either a TextureKey or a Texture.
*/
private static class TextureOptionValue {
private final TextureOption textureOption;
private final String value;
public TextureOptionValue(TextureOption textureOption, String value) {
this.textureOption = textureOption;
this.value = value;
}
public void applyToTextureKey(final TextureKey textureKey) {
textureOption.applyToTextureKey(value, textureKey);
}
public void applyToTexture(final Texture texture) {
textureOption.applyToTexture(value, texture);
}
}
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.generator;
import com.gs.fw.common.mithra.generator.metamodel.*;
import com.gs.fw.common.mithra.generator.queryparser.ASTLiteral;
import com.gs.fw.common.mithra.generator.type.*;
import com.gs.fw.common.mithra.generator.util.StringBuilderBuilder;
import com.gs.fw.common.mithra.generator.util.StringUtility;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public abstract class AbstractAttribute implements CommonAttribute, Comparable
{
private MithraObjectTypeWrapper owner;
public static final String QUOTE = "\"";
private AttributePureType wrapped;
private List<DependentRelationship> dependentRelationshipsToSet;
protected JavaType type;
private boolean asOfAttributeTo;
private boolean asOfAttributeFrom;
private boolean setAsString = false;
private boolean isInherited = false;
private MithraObjectTypeWrapper originalOwner;
protected int offHeapFieldOffset = -1;
protected int offHeapNullBitsOffset = -1;
protected int offHeapNullBitsPosition = -1;
protected int onHeapNullableIndex = -1;
protected int onHeapMutablePkNullableIndex = -1;
public AbstractAttribute(AttributePureType wrapped, MithraObjectTypeWrapper owner) throws JavaTypeException
{
this(owner, wrapped);
this.type = JavaType.create(this.getAttributeType().getJavaType());
}
protected AbstractAttribute(MithraObjectTypeWrapper owner)
{
this.owner = owner;
}
protected AbstractAttribute(MithraObjectTypeWrapper owner, AttributePureType wrapped)
{
this.owner = owner;
this.wrapped = wrapped;
}
public int getOnHeapMutablePkNullableIndex()
{
return onHeapMutablePkNullableIndex;
}
public void setOnHeapMutablePkNullableIndex(int onHeapMutablePkNullableIndex)
{
if (this.onHeapMutablePkNullableIndex != -1) throw new RuntimeException("should not get here");
this.onHeapMutablePkNullableIndex = onHeapMutablePkNullableIndex;
}
public boolean isComparableUsingOffHeapBytes()
{
return (this.isPrimitive() && !this.isNullable()) || this.isBooleanAttribute() || this.isStringAttribute() || this.isTimestampAttribute() || this.isDateAttribute();
}
public String getOffHeapComparisonMethod()
{
int size = this.getType().getOffHeapSize();
switch(size)
{
case 1:
return "zGetByte";
case 2:
return "zGetShort";
case 4:
return "zGetInteger";
case 8:
return "zGetLong";
}
throw new RuntimeException("should not get here");
}
public int getOnHeapNullableIndex()
{
return onHeapNullableIndex;
}
public void setOnHeapNullableIndex(int onHeapNullableIndex)
{
if (this.onHeapNullableIndex != -1) throw new RuntimeException("should not get here");
this.onHeapNullableIndex = onHeapNullableIndex;
}
public int getOffHeapFieldOffset()
{
return offHeapFieldOffset;
}
public void setOffHeapFieldOffset(int offHeapFieldOffset)
{
this.offHeapFieldOffset = offHeapFieldOffset;
}
public int getOffHeapNullBitsOffset()
{
return offHeapNullBitsOffset;
}
public boolean hasOffHeapNullBitsOffset()
{
return this.offHeapNullBitsOffset >= 0;
}
public void setOffHeapNullBitsOffset(int offHeapNullBitsOffset)
{
this.offHeapNullBitsOffset = offHeapNullBitsOffset;
}
public int getOffHeapNullBitsPosition()
{
return offHeapNullBitsPosition;
}
public void setOffHeapNullBitsPosition(int offHeapNullBitsPosition)
{
this.offHeapNullBitsPosition = offHeapNullBitsPosition;
}
public String getOffHeapGetterExpression()
{
String result = "zGet"+this.getType().getJavaTypeStringPrimary()+"(_storage, ";
result += this.getOffHeapFieldOffset();
result += ")";
return result;
}
public String getOffHeapSetterExpression()
{
String result = "zSet"+this.getType().getJavaTypeStringPrimary()+"(_storage, ";
result += " value, ";
result += this.getOffHeapFieldOffset();
if (this.isPrimitive() && !this.isBooleanAttribute())
{
result += ", "+this.getOffHeapNullBitsOffset();
result += ", "+this.getOffHeapNullBitsPosition();
}
result += ")";
return result;
}
public String getNullGetterExpression()
{
return this.getOwner().getNullGetterExpressionForIndex(this.onHeapNullableIndex);
}
public String getNullGetterExpressionForMutable()
{
return this.getOwner().getNullGetterExpressionForMutableIndex(this.onHeapMutablePkNullableIndex);
}
public String getNullSetterExpression()
{
return this.getOwner().getNullSetterExpressionForIndex(this.onHeapNullableIndex);
}
public String getNullSetterExpressionForMutable()
{
return this.getOwner().getNullSetterExpressionForMutableForIndex(this.onHeapMutablePkNullableIndex);
}
public String getNotNullSetterExpression()
{
return this.getOwner().getNotNullSetterExpressionForIndex(this.onHeapNullableIndex);
}
public String getNotNullSetterExpressionForMutablePk()
{
return this.getOwner().getNotNullSetterExpressionForMutablePk(this.onHeapMutablePkNullableIndex);
}
public void setJavaType(String type) throws JavaTypeException
{
this.type = JavaType.create(type);
}
public boolean isInherited()
{
return this.isInherited;
}
public void setInherited(boolean inherited)
{
this.isInherited = inherited;
}
@Override
public boolean isFinalGetter()
{
return this.wrapped.isFinalGetterSet() ? this.wrapped.isFinalGetter() : this.owner.isDefaultFinalGetters();
}
public boolean hasUniqueAlias()
{
return this.getUniqueAlias() != null;
}
public String getUniqueAlias()
{
if (this.originalOwner != null) return this.originalOwner.getUniqueAlias();
return this.owner.getUniqueAlias();
}
public boolean isUsedForOptimisticLocking()
{
return this.getAttributeType() != null && this.getAttributeType().isUseForOptimisticLocking();
}
public boolean isSetAsString()
{
AttributePureType type = this.getAttributeType();
return this.setAsString || (type != null && type.isSetAsString());
}
public void setSetAsString(boolean setAsString)
{
this.setAsString = setAsString;
}
public String getDefaultIfNull()
{
return this.getAttributeType().getDefaultIfNull();
}
public String getName()
{
return this.getAttributeType().getName();
}
protected void setName(String name)
{
this.getAttributeType().setName(name);
}
public boolean hasModifyTimePrecisionOnSet()
{
return this.getAttributeType().isModifyTimePrecisionOnSetSet();
}
public TimePrecisionType getModifyTimePrecisionOnSet()
{
return this.getAttributeType().getModifyTimePrecisionOnSet();
}
public boolean hasMaxLength()
{
return this.getAttributeType().isMaxLengthSet();
}
public int getMaxLength()
{
return this.getAttributeType().getMaxLength();
}
public int getScale()
{
return this.getAttributeType().getScale();
}
public int getPrecision()
{
return this.getAttributeType().getPrecision();
}
public String getConvertTimeZoneString(String databaseTimeZone)
{
String retStr = "MithraTimestamp.DefaultTimeZone";
if (isUTCTimezone())
{
retStr = "MithraTimestamp.UtcTimeZone";
}
else if (isDatabaseTimezone())
{
retStr = databaseTimeZone;
}
return retStr;
}
public String getConvertTimeZoneString()
{
return getConvertTimeZoneString("databaseTimeZone");
}
public String getSqlSetParameters(String attributeGetter)
{
String result = "";
if (this.isTimestampAttribute())
{
result = "conversionTimeZone = " + this.getConvertTimeZoneString() + ";\n";
if (this.isAsOfAttributeTo())
{
result += "if (data." + attributeGetter + ".getTime() == " + this.getOwner().getFinderClassName() + "." + this.getAsOfAttributeNameForAsOfAttributeTo() + "().getInfinityDate().getTime())\n";
result += "{ \n conversionTimeZone = MithraTimestamp.DefaultTimeZone; \n }\n";
}
result += "dt.setTimestamp(stm, pos, data." + attributeGetter + ", false, conversionTimeZone);\n";
result += "pos++;\n";
}
else if(this.isTimeAttribute())
{
result += "stm." + this.getSqlParameterSetter() + "(pos++, " + this.convertSqlParameter("data." + attributeGetter) + ".convertToSql());\n";
}
else
{
result += "stm." + this.getSqlParameterSetter() + "(pos++, " + this.convertSqlParameter("data." + attributeGetter) + ");\n";
}
return result;
}
public int getMaxLengthForComparison()
{
if (this.hasMaxLength())
{
return this.getAttributeType().getMaxLength();
}
return 255;
}
public boolean isSetPoolable()
{
return this.getAttributeType().isPoolableSet();
}
public boolean isPoolable()
{
return getType().canBePooled() && this.getAttributeType().isPoolable();
}
public boolean isNullable()
{
return this.getAttributeType().isNullable() && (this.getAttributeType().isNullableSet() || !this.getAttributeType().isPrimaryKey());
}
public boolean isReadonly()
{
return this.getAttributeType().isReadonly();
}
public boolean trimString()
{
return this.isStringAttribute() && this.getAttributeType().isTrim();
}
public boolean mustTrim()
{
return this.trimString();
}
public String getQuotedColumnName()
{
return "\"" + this.getColumnName() + "\"";
}
public boolean mustWarnDuringCreation()
{
return this.getType() instanceof StringJavaType && !this.hasMaxLength();
}
public String getColumnCreationStatement(boolean addComma)
{
StringBuilderBuilder sbb = new StringBuilderBuilder();
sbb.appendConstant(this.getColumnName()).appendConstant(" ");
int sqlType = this.getType().getSqlType();
if (sqlType == java.sql.Types.BIT)
{
sbb.appendCode("dt.getSqlDataTypeForBoolean()");
}
else if (sqlType == java.sql.Types.TIMESTAMP)
{
sbb.appendCode("dt.getSqlDataTypeForTimestamp()");
}
else if (sqlType == java.sql.Types.TIME)
{
sbb.appendCode("dt.getSqlDataTypeForTime()");
}
else if (sqlType == java.sql.Types.TINYINT)
{
sbb.appendCode("dt.getSqlDataTypeForTinyInt()");
}
else if (sqlType == java.sql.Types.VARBINARY)
{
sbb.appendCode("dt.getSqlDataTypeForVarBinary()");
}
else if (sqlType == java.sql.Types.DOUBLE)
{
sbb.appendCode("dt.getSqlDataTypeForDouble()");
}
else if (sqlType == java.sql.Types.BIGINT)
{
sbb.appendCode("dt.getSqlDataTypeForLong()");
}
else
{
sbb.appendConstant(this.getType().getSqlDataType());
if (sqlType == java.sql.Types.VARCHAR)
{
if (this.hasMaxLength())
{
sbb.appendConstant("("+this.getMaxLength()+")");
}
else
{
sbb.appendConstant("(255)");
}
}
}
if (this.isIdentity())
{
sbb.appendCode("dt.getIdentityTableCreationStatement()");
}
else
{
if (!this.isNullable())
{
sbb.appendConstant(" not null");
}
else
{
sbb.appendCode("dt.getNullableColumnConstraintString()");
}
}
if (addComma)
{
sbb.appendConstant(",");
}
return sbb.getResult();
}
public JavaType getType()
{
return this.type;
}
public String getColumnName()
{
return this.getAttributeType().getColumnName();
}
public void setColumnName(String columnName)
{
this.getAttributeType().setColumnName(columnName);
}
public boolean isPrimaryKey()
{
return this.getAttributeType().isPrimaryKey();
}
public boolean isIdentity()
{
return this.getAttributeType().isIdentity();
}
public boolean isMutablePrimaryKey()
{
return this.getAttributeType() != null && this.getAttributeType().isMutablePrimaryKey();
}
public boolean isInPlaceUpdate()
{
return this.getAttributeType() != null && this.getAttributeType().isInPlaceUpdate();
}
public boolean isShadowAttribute()
{
return this.isMutablePrimaryKey() || (this.isUsedForOptimisticLocking() && this.isTimestampAttribute());
}
public String getResultSetGetter(String paramOne, String paramTwo)
{
return "_rs.get" + this.getResultSetType() + "(" + paramOne + "," + paramTwo + ")";
}
public boolean isPrimaryKeyUsingSimulatedSequence()
{
return this.getAttributeType().isSimulatedSequenceSet();
}
public SimulatedSequenceType getSimulatedSequence()
{
return this.getAttributeType().getSimulatedSequence();
}
public boolean hasSimulatedSequenceSourceAttribute()
{
return this.getSimulatedSequence().isHasSourceAttribute() || (!this.getSimulatedSequence().isHasSourceAttributeSet() && this.getOwner().hasSourceAttribute());
}
public boolean isSetPrimaryKeyGeneratorStrategy()
{
return this.getAttributeType() != null && this.getAttributeType().isPrimaryKeyGeneratorStrategySet();
}
public String getPrimaryKeyGeneratorStrategy()
{
return this.getAttributeType().getPrimaryKeyGeneratorStrategy().value();
}
public String getResultSetGetter(String params)
{
String result = "_rs.get" + this.getResultSetType() + "(" + params + ")";
if (this.getType().isBigDecimal())
{
result = "com.gs.fw.common.mithra.util.BigDecimalUtil.validateBigDecimalValue("+result+", "+this.getPrecision()+", "+this.getScale()+")";
}
else if(this.getType().isTime())
{
result = "_dt.getTime(_rs, _pos++)";
}
return result;
}
public String getResultSetGetterForString(String params)
{
String postProcess = "";
if (this.trimString())
{
postProcess = "trimString";
}
return postProcess+"(_rs.get" + this.getResultSetType() + "(" + params + "))";
}
public String getSqlParameterSetter()
{
if (this.isSetAsString())
{
return "setString";
}
return "set"+this.getResultSetType();
}
public String convertSqlParameter(String param)
{
if (this.isSetAsString())
{
if (this.getType().isTimestamp())
{
return "convertTimestampToString(" + param + ", dt)";
}
if(this.getType().isTime())
{
return "convertTimeToString(" + param + ", dt)";
}
else
{
return "convertDateOnlyToString(" + param + ", dt)";
}
}
return this.getType().convertSqlParameter(param);
}
public List<String> validateAndUseMissingValuesFromSuperClass(CommonAttribute attribute)
{
Attribute superClassAttribute = (Attribute) attribute;
List<String> errors = this.checkAttributeMismatch(superClassAttribute);
if(errors.isEmpty())
{
this.validateIsSetPoolable(superClassAttribute);
this.validateIsSetTrim(superClassAttribute);
this.validateIsSetTruncate(superClassAttribute);
this.validateIsSetNullable(superClassAttribute);
this.validateIsSetTimezoneConversion(superClassAttribute);
this.validateColumnName(superClassAttribute);
this.validateDefaultIfNull(superClassAttribute);
}
return errors;
}
protected void validateIsSetPoolable(Attribute superClassAttribute)
{
if (!this.getAttributeType().isPoolableSet() && superClassAttribute.getAttributeType().isPoolableSet())
{
this.getAttributeType().setPoolable(superClassAttribute.isPoolable());
}
}
protected void validateIsSetTrim(Attribute superClassAttribute)
{
if (!this.getAttributeType().isTrimSet() && superClassAttribute.getAttributeType().isTrimSet())
{
this.getAttributeType().setTrim(superClassAttribute.getAttributeType().isTrim());
}
}
protected void validateIsSetTruncate(Attribute superClassAttribute)
{
if (!this.getAttributeType().isTruncateSet() && superClassAttribute.getAttributeType().isTruncateSet())
{
this.getAttributeType().setTruncate(superClassAttribute.getAttributeType().isTruncate());
}
}
protected void validateIsSetNullable(Attribute superClassAttribute)
{
if (!this.getAttributeType().isNullableSet() && superClassAttribute.getAttributeType().isNullableSet())
{
this.getAttributeType().setNullable(superClassAttribute.getAttributeType().isNullable());
}
}
protected void validateIsSetTimezoneConversion(Attribute superClassAttribute)
{
if (!this.getAttributeType().isTimezoneConversionSet() && superClassAttribute.getAttributeType().isTimezoneConversionSet())
{
this.getAttributeType().setTimezoneConversion(superClassAttribute.getAttributeType().getTimezoneConversion());
}
}
protected void validateColumnName(Attribute superClassAttribute)
{
if (this.getAttributeType().getColumnName() == null)
{
this.getAttributeType().setColumnName(superClassAttribute.getColumnName());
}
}
protected void validateDefaultIfNull(Attribute superClassAttribute)
{
if (this.getAttributeType().getDefaultIfNull() == null)
{
this.getAttributeType().setDefaultIfNull(superClassAttribute.getDefaultIfNull());
}
}
protected List<String> checkAttributeMismatch(Attribute superClassAttribute)
{
List<String> errors = new ArrayList<String>();
if(!superClassAttribute.getType().equals(this.getType()))
{
errors.add("java type for attribute '" + this.getName() + "' does not match java type for same attribute in superclass '" + superClassAttribute.getName());
}
if(!(superClassAttribute.isPrimaryKey() == this.isPrimaryKey()))
{
if(superClassAttribute.isPrimaryKey())
{
errors.add("attribute '" + this.getName() + "' is a primaryKey in superclass");
}
else
{
errors.add("attribute '" + this.getName() + "' is not a primaryKey in superclass");
}
}
return errors;
}
public void setIsAsOfAttributeTo(boolean asOfAttributeTo)
{
this.asOfAttributeTo = asOfAttributeTo;
}
public void setIsAsOfAttributeFrom(boolean asOfAttributeFrom)
{
this.asOfAttributeFrom = asOfAttributeFrom;
}
public boolean isAsOfAttributeTo()
{
return this.asOfAttributeTo;
}
public boolean isAsOfAttributeInfinityNull()
{
return this.isAsOfAttributeTo() && ((AsOfAttribute)this.getOwner().getAttributeByName(this.getAsOfAttributeNameForAsOfAttributeTo())).isInfinityNull();
}
public boolean isAsOfAttributeFrom()
{
return this.asOfAttributeFrom;
}
public boolean needsUntilImplementation()
{
return this.getOwner().hasBusinessDateAsOfAttribute() && this.getOwner().isTransactional() && !(this.isAsOfAttributeFrom() || this.isAsOfAttributeTo());
}
public Attribute cloneForNewOwner(MithraObjectTypeWrapper newOwner)
{
AbstractAttribute result = new Attribute(this.getAttributeType(), newOwner);
result.asOfAttributeTo = this.asOfAttributeTo;
result.asOfAttributeFrom = this.asOfAttributeFrom;
result.setAsString = this.setAsString;
if (this.originalOwner != null)
{
result.originalOwner = this.originalOwner;
}
else
{
result.originalOwner = this.owner;
}
return (Attribute) result;
}
public boolean truncate()
{
return this.getAttributeType().isTruncate();
}
//
// public boolean modifyTimePrecisionOnSet()
// {
// return this.getAttributeType().isModifyTimePrecisionOnSet();
// }
public MithraObjectTypeWrapper getOriginalOwner()
{
return this.originalOwner;
}
public boolean mustSetRelatedObjectAttribute()
{
if (this.dependentRelationshipsToSet != null)
{
DependentRelationship[] relationshipsToSet = this.getDependentRelationships();
for(int r=0;r<relationshipsToSet.length;r++)
{
RelationshipAttribute relationshipAttribute = relationshipsToSet[ r ].getRelationshipAttribute();
if (relationshipAttribute.isRelatedDependent())
{
return true;
}
}
}
return false;
}
public void addDependentRelationship(RelationshipAttribute relationshipAttribute, AbstractAttribute attributeToSet)
{
if (this.dependentRelationshipsToSet == null)
{
this.dependentRelationshipsToSet = new ArrayList<DependentRelationship>();
}
this.dependentRelationshipsToSet.add(new DependentRelationship(relationshipAttribute, attributeToSet));
}
public DependentRelationship[] getDependentRelationships()
{
DependentRelationship[] result = new DependentRelationship[this.dependentRelationshipsToSet.size()];
this.dependentRelationshipsToSet.toArray(result);
return result;
}
public boolean hasParameters()
{
return false;
}
public String getGetter()
{
return this.getType().getGetterPrefix()+StringUtility.firstLetterToUpper(this.getName());
}
public String getPrivateGetter()
{
return this.getGetter();
}
public String getGetterOrMutableGetter()
{
String result = this.getGetter();
if (this.isMutablePrimaryKey())
{
result = "_old" + result;
}
return result;
}
public String getNullGetter()
{
return "is" + StringUtility.firstLetterToUpper(this.getName()) + "Null()";
}
public String getNullSetter()
{
return "set" + StringUtility.firstLetterToUpper(this.getName()) + "Null()";
}
public String getNullGetterUseMutableIfApplicable()
{
String result = this.getNullGetter();
if (this.isMutablePrimaryKey())
{
result = "_old"+result;
}
return result;
}
public String getSetter()
{
return "set"+StringUtility.firstLetterToUpper(this.getName());
}
public String getPrivateSetter()
{
return getSetter();
}
public AttributePureType getAttributeType()
{
return this.wrapped;
}
public boolean isComparableTo(ASTLiteral node)
{
return node.isJavaLiteral() || this.getType().isComparableTo(node);
}
public boolean isComparableTo(AbstractAttribute attribute)
{
return this.getType().isComparableTo(attribute.getType());
}
public String getTypeAsString()
{
return this.getType().getJavaTypeString();
}
public String getSetterTypeAsString()
{
if (this.isDateAttribute())
{
return "java.util.Date";
}
return this.getTypeAsString();
}
public String getStorageType()
{
return this.getType().getJavaTypeString();
}
public String getResultSetType()
{
return StringUtility.firstLetterToUpper(this.getType().getResultSetName());
}
public String getFinderAttributeSuperClassType()
{
return getType().getFinderAttributeSuperClassType();
}
public String getFinderAttributeType()
{
return getType().getFinderAttributeType();
}
public String getExtractionMethodName()
{
return getType().getExtractionMethodName();
}
public String getValueSetterMethodName()
{
return getType().getValueSetterMethodName();
}
public boolean isPrimitive()
{
return getType().isPrimitive();
}
public boolean isArray()
{
return getType().isArray();
}
public String getIsNullMethodName()
{
return "is"+StringUtility.firstLetterToUpper(this.getName())+"Null";
}
public boolean isSourceAttribute()
{
return false;
}
public boolean isAsOfAttribute()
{
return false;
}
public boolean needsSourceAttribute()
{
return this.getOwner().hasSourceAttribute() && !this.isSourceAttribute();
}
public boolean needsAsOfAttributes()
{
return this.getOwner().hasAsOfAttributes();
}
public boolean hasSourceOrAsOfAttribute()
{
return this.needsSourceAttribute() || this.getOwner().hasAsOfAttributes();
}
public String getEqualityMethodName()
{
boolean needsSourceAttribute = this.needsSourceAttribute();
boolean needsAsOfAttribute = this.getOwner().hasAsOfAttributes();
if (needsSourceAttribute && needsAsOfAttribute)
{
return "eqWithSourceAndAsOfCheck";
}
else if (needsSourceAttribute)
{
return "eqWithSourceCheck";
}
else if (needsAsOfAttribute)
{
return "eqWithAsOfCheck";
}
else
{
throw new IllegalArgumentException("method called but there is no source or asOfAttribute");
}
}
public boolean isNullablePrimitive()
{
return this.isNullable() && this.isPrimitive();
}
public MithraObjectTypeWrapper getOwner()
{
return this.owner;
}
public List getProperty()
{
return this.getAttributeType().getProperties();
}
public boolean hasProperties()
{
return !(this.getAttributeType() == null || this.getAttributeType().getProperties().isEmpty());
}
public Map<String, String> getProperties()
{
Map<String, String> properties = new HashMap<String, String>();
if (this.hasProperties())
{
List<PropertyType> propertyTypes = this.getAttributeType().getProperties();
for (PropertyType property : propertyTypes)
{
properties.put(property.getKey(), (property.getValue() == null) ? "Boolean.TRUE" : property.getValue());
}
}
return properties;
}
public int compareTo(Object o)
{
if (o instanceof AbstractAttribute)
{
AbstractAttribute other = (AbstractAttribute) o;
return this.getName().compareTo(other.getName());
}
return 0;
}
public String getSqlTypeAsString()
{
return getType().getSqlTypeAsString();
}
public String getSqlTypeAsStringForNull()
{
return getType().getSqlTypeAsStringForNull();
}
public boolean isDoubleAttribute()
{
return getType() instanceof DoubleJavaType;
}
public boolean isCharAttribute()
{
return getType() instanceof CharJavaType;
}
public boolean hasLength()
{
return getType() instanceof StringJavaType || getType() instanceof ByteArrayJavaType;
}
public boolean hasTrim()
{
return getType() instanceof StringJavaType;
}
public boolean isStringAttribute()
{
return getType() instanceof StringJavaType;
}
public boolean isTimestampAttribute()
{
return getType() instanceof TimestampJavaType;
}
public boolean isTimeAttribute()
{
return getType() instanceof TimeJavaType;
}
public boolean isDateAttribute()
{
return getType() instanceof DateJavaType;
}
public boolean isBigDecimalAttribute()
{
return getType() instanceof BigDecimalJavaType;
}
public boolean isBooleanAttribute()
{
return getType() instanceof BooleanJavaType;
}
public String getIncrementer()
{
return "increment"+StringUtility.firstLetterToUpper(this.getName());
}
public String getOffHeapDeserializationStatement()
{
String readStatement;
if (this.getType() instanceof ByteArrayJavaType)
{
throw new RuntimeException("not implemented");
}
if (this.isTimestampAttribute())
{
String methodStart = "";
methodStart += "MithraTimestamp.read";
if (this.isTimezoneNone())
{
methodStart += "TimezoneInsensitiveTimestamp";
}
else
{
methodStart += "Timestamp";
}
if (this.isAsOfAttributeTo())
{
methodStart += "WithInfinity(in, "+
this.getOwner().getFinderClassName()+"."+this.getAsOfAttributeNameForAsOfAttributeTo()+"().getInfinityDate())";
}
else
{
methodStart += "(in)";
}
readStatement = methodStart;
}
if (this.isDateAttribute())
{
readStatement = this.getType().getIoCast()+"MithraTimestamp.readTimezoneInsensitiveDate(in)";
}
readStatement = this.getType().getIoCast()+ "in.read"+ this.getType().getIoType()+"()";
String setStatement = "zSet"+this.getType().getJavaTypeStringPrimary()+"(_storage, "+readStatement+", "+this.getOffHeapFieldOffset();
if (this.isPrimitive() && !this.isBooleanAttribute())
{
setStatement += ", "+this.getOffHeapNullBitsOffset()+", "+
this.getOffHeapNullBitsPosition();
if (this.isNullable())
{
setStatement += ", "+getNullGetterExpression();
}
else
{
setStatement += ", false";
}
}
setStatement += ")";
return setStatement;
}
public String getDeserializationStatement()
{
String name = this.getName();
return getDeserializationStatementForName(name);
}
public String getDeserializationStatementForName(String name)
{
if (this.getType() instanceof ByteArrayJavaType)
{
String lengthVar = "_"+ name +"Length";
String result = "int "+lengthVar+" = in.readInt(); ";
result += "if ("+lengthVar+" == -1) "+ name +" = null; ";
result += "else { ";
result += name +" = new byte["+lengthVar+"];";
result += "in.readFully("+ name +");";
result += "}";
return result;
}
if (this.isTimestampAttribute())
{
String methodStart = "";
if (this.isPoolable())
{
methodStart = this.getType().getJavaTypeString()+"Pool.getInstance().getOrAddToCache(";
}
methodStart += "MithraTimestamp.read";
if (this.isTimezoneNone())
{
methodStart += "TimezoneInsensitiveTimestamp";
}
else
{
methodStart += "Timestamp";
}
if (this.isAsOfAttributeTo())
{
methodStart += "WithInfinity(in, "+
this.getOwner().getFinderClassName()+"."+this.getAsOfAttributeNameForAsOfAttributeTo()+"().getInfinityDate())";
}
else
{
methodStart += "(in)";
}
if (this.isPoolable())
{
methodStart += ", "+this.getOwner().getFinderClassName()+".isFullCache(), "+this.getOwner().getFinderClassName()+".isOffHeap())";
}
return "this."+ name + " = "+methodStart;
}
if (this.isDateAttribute())
{
return "this."+ name +" = "+this.getType().getIoCast()+"MithraTimestamp.readTimezoneInsensitiveDate(in)";
}
if (this.isPoolable())
{
return "this."+ name +" = "+this.getType().getJavaTypeString()+"Pool.getInstance().getOrAddToCache("+this.getType().getIoCast()+ "in.read"+ this.getType().getIoType()+"(), "+this.getOwner().getFinderClassName()+".isFullCache())";
}
return "this."+ name +" = "+this.getType().getIoCast()+ "in.read"+ this.getType().getIoType()+"()";
}
public String getSerializationStatement()
{
return getSerializationStatementForName(this.getName());
}
public String getOffHeapSerializationStatement()
{
String getter = this.getGetter()+"()";
if (this.isTimestampAttribute())
{
getter = getTimestampLongGetter();
}
return getSerializationStatementForName(getter);
}
public String getSerializationStatementForName(String name)
{
if (this.getType() instanceof ByteArrayJavaType)
{
String result = "if ("+ name +" == null) out.writeInt(-1); ";
result += "else { ";
result += "out.writeInt("+ name +".length); ";
result += "out.write("+ name +"); ";
result += "}";
return result;
}
if (this.isTimestampAttribute())
{
String methodStart = "MithraTimestamp.write";
if (this.isTimezoneNone())
{
methodStart += "TimezoneInsensitiveTimestamp";
}
else
{
methodStart += "Timestamp";
}
if (this.isAsOfAttributeTo())
{
return methodStart+"WithInfinity(out, "+"this."+ name +", "+
this.getOwner().getFinderClassName()+"."+this.getAsOfAttributeNameForAsOfAttributeTo()+"().getInfinityDate())";
}
else
{
return methodStart+"(out, "+"this."+ name +")";
}
}
if (this.isDateAttribute())
{
return "MithraTimestamp.writeTimezoneInsensitiveDate(out, "+"this."+ name +")";
}
return "out.write"+this.getType().getIoType()+"(this."+ name +")";
}
public String getTimestampLongGetter()
{
return "zGet"+StringUtility.firstLetterToUpper(this.getName())+"AsLong()";
}
public String getTimeLongGetter()
{
return "zGet"+StringUtility.firstLetterToUpper(this.getName())+"AsLong()";
}
public String getOffHeapTimeLongGetter()
{
return "zGetOffHeap"+StringUtility.firstLetterToUpper(this.getName())+"AsLong()";
}
public String getStringOffHeapIntGetter()
{
return "zGet"+StringUtility.firstLetterToUpper(this.getName())+"AsInt()";
}
public String getPrintableForm()
{
if (this.isAsOfAttributeFrom() || this.isAsOfAttributeTo())
{
return QUOTE+this.getName()+": "+QUOTE+"+"+"(is"+StringUtility.firstLetterToUpper(getName())+"Null() ? \"null\" : "+"PrintablePreparedStatement.timestampFormat.print("+getTimestampLongGetter()+"))";
}
return QUOTE+this.getName()+": "+QUOTE+"+"+this.getType().getPrintableForm(this.getGetter()+"()", this.getNullGetter(), this.isNullable());
}
public String getVisibility()
{
return "public";
}
public boolean equals(Object obj)
{
if (obj instanceof Attribute)
{
Attribute other = (Attribute) obj;
if (this.getName().equals(other.getName()) && this.getOwner().getClassName().equals(other.getOwner().getClassName()))
{
return true;
}
}
return false;
}
public int hashCode()
{
return this.getName().hashCode();
}
public String getAsOfAttributeNameForAsOfAttributeTo()
{
return this.getName().substring(0, this.getName().length() - 2); // chop off "To"
}
public String getAsOfAttributeNameForAsOfAttributeFrom()
{
return this.getName().substring(0, this.getName().length() - 4); // chop off "From"
}
public TimezoneConversionType getDefaultTimezoneConversion()
{
return TimezoneConversionType.NONE;
}
public boolean isDatabaseTimezone()
{
return this.getTimezoneConversion().isConvertToDatabaseTimezone();
}
public TimezoneConversionType getTimezoneConversion()
{
TimezoneConversionType timezoneConversion = this.getAttributeType().getTimezoneConversion();
if (timezoneConversion == null)
{
timezoneConversion = this.getDefaultTimezoneConversion();
}
return timezoneConversion;
}
public boolean isUTCTimezone()
{
return this.getTimezoneConversion().isConvertToUtc();
}
public boolean isTimezoneNone()
{
return this.getTimezoneConversion().isNone();
}
public boolean isTimezoneConversionNeeded()
{
return ( (this.getType() instanceof TimestampJavaType) && (!isTimezoneNone()));
}
public String getPrimitiveCastType(AbstractAttribute other)
{
if (other.getType().equals(this.getType()))
{
return "";
}
return "("+this.getType().getJavaTypeString()+")";
}
public boolean isImmutable()
{
return (this.isPrimaryKey() && !this.isMutablePrimaryKey()) || this.isReadonly();
}
public String getDirtyVersion()
{
if (this.isTimestampAttribute())
{
return "new Timestamp(0)";
}
return "-1";
}
public String getIncrementedVersion()
{
if (this.isTimestampAttribute())
{
return "new Timestamp(MithraManagerProvider.getMithraManager().getCurrentProcessingTime())";
}
return "data."+this.getGetter()+"() + 1";
}
public void validate(List<String> errors)
{
if (!this.isSetPrimaryKeyGeneratorStrategy() && this.isPrimaryKeyUsingSimulatedSequence())
{
errors.add(this.owner.getClassName()+" attribute "+this.getName()+" has a simulated sequence defined, but primaryKeyGeneratorStrategy=\"SimulatedSequence\" is not specified for the attribute");
}
if (!this.getType().canBePooled() && this.isSetPoolable())
{
errors.add(this.owner.getClassName()+" cannot pool attribute "+this.getName()+" only String and Timestamp attributes can be pooled");
}
if (this.getAttributeType().isTrimSet() && !(getType() instanceof StringJavaType))
{
errors.add(this.owner.getClassName()+" cannot trim attribute "+this.getName()+" only String attributes can be trimmed");
}
if (this.getAttributeType().isSetAsString())
{
if (!this.isTimestampAttribute() && !this.isDateAttribute() && !this.isAsOfAttribute())
{
errors.add(this.owner.getClassName()+" setAsString for attribute "+this.getName()+" can only be used with Date or Timestamp attributes");
}
}
if(this.isBigDecimalAttribute())
{
if(!(this.getAttributeType().isScaleSet() && this.getAttributeType().isPrecisionSet()))
{
errors.add("BigDecimal attribute '"+this.getName()+"' in "+this.owner.getClassName()+" must specify precision and scale.");
}
int scale = this.getAttributeType().getScale();
int precision = this.getAttributeType().getPrecision();
if(scale < 0)
{
errors.add("Invalid scale value "+scale+". BigDecimal attribute '"+this.getName()+"' in "+this.owner.getClassName()+" must specify a non-negative scale value.");
}
if(precision < 1)
{
errors.add("Invalid precision value "+precision+". BigDecimal attribute '"+this.getName()+"' in "+this.owner.getClassName()+" must specify a precision > 1.");
}
if(scale > precision)
{
errors.add("Invalid scale value "+scale+". BigDecimal attribute '"+this.getName()+"' in "+this.owner.getClassName()+" must specify a scale < precision.");
}
}
}
private String quoteString(String s)
{
if (s == null) return "null";
else return '"'+s+'"';
}
/*
String columnName, String uniqueAlias, String attributeName,
String dataClassName, String busClassName, boolean isNullable,
boolean hasBusDate, RelatedFinder relatedFinder, Map<String, Object> properties,
boolean isTransactional, boolean isOptimistic
String and ByteArray have a length
Timestamp has: byte conversionType, boolean setAsString, boolean isAsOfAttributeTo, Timestamp infinity
BigDecimal has: int precision, int scale
*/
public String getGeneratorParameters()
{
String result = getCommonParameters("this", true);
if (this.getType().isIntOrLong())
{
result += ",";
result += this.isPrimaryKeyUsingSimulatedSequence();
}
result += ", "+(this.isMutablePrimaryKey() || this.isUsedForOptimisticLocking());
return result;
}
public TimestampPrecisionType getTimestampPrecision()
{
return wrapped.getTimestampPrecision();
}
protected String getCommonParameters(String finder, boolean forGeneration)
{
String result = getQuotedColumnName();
result += ",";
if (this.getUniqueAlias() == null)
{
result += "\"\"";
}
else
{
result += quoteString(this.getUniqueAlias());
}
result += ",";
result += getCommonConstructorParameters(finder);
if (forGeneration)
{
result += ",";
result += this.offHeapFieldOffset;
result += ",";
result += this.offHeapNullBitsOffset;
result += ",";
result += this.offHeapNullBitsPosition;
}
if (this.hasLength())
{
result += ",";
if (this.hasMaxLength())
{
result += this.getMaxLength();
}
else
{
result += "Integer.MAX_VALUE";
}
}
if(this.hasTrim())
{
result += ",";
result += this.mustTrim();
}
if (this.isTimestampAttribute())
{
result += ",";
if (this.isTimezoneConversionNeeded())
{
if (this.isUTCTimezone())
{
result += "TimestampAttribute.CONVERT_TO_UTC";
}
else if (this.isDatabaseTimezone())
{
result += "TimestampAttribute.CONVERT_TO_DATABASE";
}
}
else
{
result += "TimestampAttribute.CONVERT_NONE";
}
result += ",";
result += this.isSetAsString();
result += ",";
result += this.isAsOfAttributeTo();
result += ",";
if (this.isAsOfAttributeTo())
{
result += this.getOwner().getFinderClassName()+"."+this.getAsOfAttributeNameForAsOfAttributeTo()+"Infinity";
}
else if (this.isAsOfAttributeFrom())
{
result += this.getOwner().getFinderClassName()+"."+this.getAsOfAttributeNameForAsOfAttributeFrom()+"Infinity";
}
else
{
result += "null";
}
result += ", "+getTimestampPrecision().asByte();
}
if (this.isDateAttribute())
{
result += ",";
result += this.isSetAsString();
}
if(this.isBigDecimalAttribute())
{
result += ","+this.getPrecision()+","+this.getScale();
}
return result;
}
public String getConstructorParameters()
{
return this.getCommonParameters("finder", false);
}
protected String getCommonConstructorParameters()
{
return getCommonConstructorParameters("this");
}
protected String getCommonConstructorParameters(String finder)
{
String result = quoteString(this.getName());
result += ",";
result += "BUSINESS_CLASS_NAME_WITH_DOTS";
result += ",";
result += "IMPL_CLASS_NAME_WITH_SLASHES";
result += ",";
result += this.isNullable();
result += ",";
result += this.getOwner().hasBusinessDateAsOfAttribute();
result += ",";
result += finder;
result += ",";
if (this.hasProperties())
{
result += this.getName()+"Properties";
}
else
{
result += "null";
}
result += ",";
result += this.getOwner().isTransactional();
result += ",";
result += this.getOwner().getOptimisticLockAttribute() == this;
if (this.mayBeIdentity())
{
result += ","+this.isIdentity();
}
return result;
}
protected boolean mayBeIdentity()
{
return this.getType().mayBeIdentity();
}
public boolean needsGeneratedAttribute()
{
return !this.getVisibility().equals("public");
}
public String getExtractorSuperClass()
{
return "Just"+this.getType().getJavaTypeStringPrimary()+"Extractor";
}
public String parseLiteralAndCast(String value)
{
return this.getType().parseLiteralAndCast(value);
}
public String getTableQualifiedMappedColumnName()
{
return this.getOwner().getDefaultTable()+"."+this.getColumnName();
}
public boolean isMapped()
{
return true;
}
public String getBeanGetter(int intCount, int longCount, int objectCount)
{
return this.getType().getBeanGetter(intCount, longCount, objectCount);
}
public String getBeanSetter(BeanState beanState)
{
return this.getType().getBeanSetter(beanState);
}
public boolean isBeanIntType()
{
return this.getType().isBeanIntType();
}
public boolean isBeanLongType()
{
return this.getType().isBeanLongType();
}
public boolean isBeanObjectType()
{
return this.getType().isBeanObjectType();
}
}
| |
/**
* Copyright (C) 2009 - 2013 SC 4ViewSoft SRL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.panthole.androidchart.chart;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.panthole.androidchart.model.XYMultipleSeriesDataset;
import com.panthole.androidchart.model.XYSeries;
import com.panthole.androidchart.renderer.XYMultipleSeriesRenderer;
import android.graphics.Canvas;
import android.graphics.Paint;
/**
* The time chart rendering class.
*/
public class TimeChart extends LineChart {
/** The constant to identify this chart type. */
public static final String TYPE = "Time";
/** The number of milliseconds in a day. */
public static final long DAY = 24 * 60 * 60 * 1000;
/** The date format pattern to be used in formatting the X axis labels. */
private String mDateFormat;
/** The starting point for labels. */
private Double mStartPoint;
TimeChart() {
}
/**
* Builds a new time chart instance.
*
* @param dataset the multiple series dataset
* @param renderer the multiple series renderer
*/
public TimeChart(XYMultipleSeriesDataset dataset, XYMultipleSeriesRenderer renderer) {
super(dataset, renderer);
}
/**
* Returns the date format pattern to be used for formatting the X axis
* labels.
*
* @return the date format pattern for the X axis labels
*/
public String getDateFormat() {
return mDateFormat;
}
/**
* Sets the date format pattern to be used for formatting the X axis labels.
*
* @param format the date format pattern for the X axis labels. If null, an
* appropriate default format will be used.
*/
public void setDateFormat(String format) {
mDateFormat = format;
}
/**
* The graphical representation of the labels on the X axis.
*
* @param xLabels the X labels values
* @param xTextLabelLocations the X text label locations
* @param canvas the canvas to paint to
* @param paint the paint to be used for drawing
* @param left the left value of the labels area
* @param top the top value of the labels area
* @param bottom the bottom value of the labels area
* @param xPixelsPerUnit the amount of pixels per one unit in the chart labels
* @param minX the minimum value on the X axis in the chart
* @param maxX the maximum value on the X axis in the chart
*/
@Override
protected void drawXLabels(List<Double> xLabels, Double[] xTextLabelLocations, Canvas canvas,
Paint paint, int left, int top, int bottom, double xPixelsPerUnit, double minX, double maxX) {
int length = xLabels.size();
if (length > 0) {
boolean showLabels = mRenderer.isShowLabels();
boolean showGridY = mRenderer.isShowGridY();
DateFormat format = getDateFormat(xLabels.get(0), xLabels.get(length - 1));
for (int i = 0; i < length; i++) {
long label = Math.round(xLabels.get(i));
float xLabel = (float) (left + xPixelsPerUnit * (label - minX));
if (showLabels) {
paint.setColor(mRenderer.getXLabelsColor());
canvas
.drawLine(xLabel, bottom, xLabel, bottom + mRenderer.getLabelsTextSize() / 3, paint);
drawText(canvas, format.format(new Date(label)), xLabel,
bottom + mRenderer.getLabelsTextSize() * 4 / 3 + mRenderer.getXLabelsPadding(), paint, mRenderer.getXLabelsAngle());
}
if (showGridY) {
paint.setColor(mRenderer.getGridColor(0));
canvas.drawLine(xLabel, bottom, xLabel, top, paint);
}
}
}
drawXTextLabels(xTextLabelLocations, canvas, paint, true, left, top, bottom, xPixelsPerUnit,
minX, maxX);
}
/**
* Returns the date format pattern to be used, based on the date range.
*
* @param start the start date in milliseconds
* @param end the end date in milliseconds
* @return the date format
*/
private DateFormat getDateFormat(double start, double end) {
if (mDateFormat != null) {
SimpleDateFormat format = null;
try {
format = new SimpleDateFormat(mDateFormat);
return format;
} catch (Exception e) {
// do nothing here
}
}
DateFormat format = SimpleDateFormat.getDateInstance(SimpleDateFormat.MEDIUM);
double diff = end - start;
if (diff > DAY && diff < 5 * DAY) {
format = SimpleDateFormat.getDateTimeInstance(SimpleDateFormat.SHORT, SimpleDateFormat.SHORT);
} else if (diff < DAY) {
format = SimpleDateFormat.getTimeInstance(SimpleDateFormat.MEDIUM);
}
return format;
}
/**
* Returns the chart type identifier.
*
* @return the chart type
*/
public String getChartType() {
return TYPE;
}
@Override
protected List<Double> getXLabels(double min, double max, int count) {
final List<Double> result = new ArrayList<Double>();
if (!mRenderer.isXRoundedLabels()) {
if (mDataset.getSeriesCount() > 0) {
XYSeries series = mDataset.getSeriesAt(0);
int length = series.getItemCount();
int intervalLength = 0;
int startIndex = -1;
for (int i = 0; i < length; i++) {
double value = series.getX(i);
if (min <= value && value <= max) {
intervalLength++;
if (startIndex < 0) {
startIndex = i;
}
}
}
if (intervalLength < count) {
for (int i = startIndex; i < startIndex + intervalLength; i++) {
result.add(series.getX(i));
}
} else {
float step = (float) intervalLength / count;
int intervalCount = 0;
for (int i = 0; i < length && intervalCount < count; i++) {
double value = series.getX(Math.round(i * step));
if (min <= value && value <= max) {
result.add(value);
intervalCount++;
}
}
}
return result;
} else {
return super.getXLabels(min, max, count);
}
}
if (mStartPoint == null) {
mStartPoint = min - (min % DAY) + DAY + new Date(Math.round(min)).getTimezoneOffset() * 60
* 1000;
}
if (count > 25) {
count = 25;
}
final double cycleMath = (max - min) / count;
if (cycleMath <= 0) {
return result;
}
double cycle = DAY;
if (cycleMath <= DAY) {
while (cycleMath < cycle / 2) {
cycle = cycle / 2;
}
} else {
while (cycleMath > cycle) {
cycle = cycle * 2;
}
}
double val = mStartPoint - Math.floor((mStartPoint - min) / cycle) * cycle;
int i = 0;
while (val < max && i++ <= count) {
result.add(val);
val += cycle;
}
return result;
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.websocket.jsr356;
import java.io.IOException;
import java.net.URI;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import javax.websocket.CloseReason;
import javax.websocket.EndpointConfig;
import javax.websocket.Extension;
import javax.websocket.MessageHandler;
import javax.websocket.RemoteEndpoint.Async;
import javax.websocket.RemoteEndpoint.Basic;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.websocket.api.BatchMode;
import org.eclipse.jetty.websocket.api.extensions.ExtensionConfig;
import org.eclipse.jetty.websocket.common.LogicalConnection;
import org.eclipse.jetty.websocket.common.SessionListener;
import org.eclipse.jetty.websocket.common.WebSocketSession;
import org.eclipse.jetty.websocket.common.events.EventDriver;
import org.eclipse.jetty.websocket.jsr356.endpoints.AbstractJsrEventDriver;
import org.eclipse.jetty.websocket.jsr356.metadata.DecoderMetadata;
import org.eclipse.jetty.websocket.jsr356.metadata.EndpointMetadata;
import org.eclipse.jetty.websocket.jsr356.metadata.MessageHandlerMetadata;
/**
* Session for the JSR.
*/
public class JsrSession extends WebSocketSession implements javax.websocket.Session, Configurable
{
private static final Logger LOG = Log.getLogger(JsrSession.class);
private final ClientContainer container;
private final String id;
private final EndpointConfig config;
private final EndpointMetadata metadata;
private final DecoderFactory decoderFactory;
private final EncoderFactory encoderFactory;
/** Factory for MessageHandlers */
private final MessageHandlerFactory messageHandlerFactory;
/** Array of MessageHandlerWrappers, indexed by {@link MessageType#ordinal()} */
private final MessageHandlerWrapper wrappers[];
private Set<MessageHandler> messageHandlerSet;
private List<Extension> negotiatedExtensions;
private Map<String, String> pathParameters = new HashMap<>();
private JsrAsyncRemote asyncRemote;
private JsrBasicRemote basicRemote;
public JsrSession(URI requestURI, EventDriver websocket, LogicalConnection connection, ClientContainer container, String id, SessionListener... sessionListeners)
{
super(requestURI, websocket, connection, sessionListeners);
if (!(websocket instanceof AbstractJsrEventDriver))
{
throw new IllegalArgumentException("Cannot use, not a JSR WebSocket: " + websocket);
}
AbstractJsrEventDriver jsr = (AbstractJsrEventDriver)websocket;
this.config = jsr.getConfig();
this.metadata = jsr.getMetadata();
this.container = container;
this.id = id;
this.decoderFactory = new DecoderFactory(metadata.getDecoders(),container.getDecoderFactory());
this.encoderFactory = new EncoderFactory(metadata.getEncoders(),container.getEncoderFactory());
this.messageHandlerFactory = new MessageHandlerFactory();
this.wrappers = new MessageHandlerWrapper[MessageType.values().length];
this.messageHandlerSet = new HashSet<>();
}
@Override
public void addMessageHandler(MessageHandler handler) throws IllegalStateException
{
Objects.requireNonNull(handler, "MessageHandler cannot be null");
synchronized (wrappers)
{
for (MessageHandlerMetadata metadata : messageHandlerFactory.getMetadata(handler.getClass()))
{
DecoderFactory.Wrapper wrapper = decoderFactory.getWrapperFor(metadata.getMessageClass());
if (wrapper == null)
{
StringBuilder err = new StringBuilder();
err.append("Unable to find decoder for type <");
err.append(metadata.getMessageClass().getName());
err.append("> used in <");
err.append(metadata.getHandlerClass().getName());
err.append(">");
throw new IllegalStateException(err.toString());
}
MessageType key = wrapper.getMetadata().getMessageType();
MessageHandlerWrapper other = wrappers[key.ordinal()];
if (other != null)
{
StringBuilder err = new StringBuilder();
err.append("Encountered duplicate MessageHandler handling message type <");
err.append(wrapper.getMetadata().getObjectType().getName());
err.append(">, ").append(metadata.getHandlerClass().getName());
err.append("<");
err.append(metadata.getMessageClass().getName());
err.append("> and ");
err.append(other.getMetadata().getHandlerClass().getName());
err.append("<");
err.append(other.getMetadata().getMessageClass().getName());
err.append("> both implement this message type");
throw new IllegalStateException(err.toString());
}
else
{
MessageHandlerWrapper handlerWrapper = new MessageHandlerWrapper(handler,metadata,wrapper);
wrappers[key.ordinal()] = handlerWrapper;
}
}
// Update handlerSet
updateMessageHandlerSet();
}
}
@Override
public void close(CloseReason closeReason) throws IOException
{
close(closeReason.getCloseCode().getCode(),closeReason.getReasonPhrase());
}
@Override
public Async getAsyncRemote()
{
if (asyncRemote == null)
{
asyncRemote = new JsrAsyncRemote(this);
}
return asyncRemote;
}
@Override
public Basic getBasicRemote()
{
if (basicRemote == null)
{
basicRemote = new JsrBasicRemote(this);
}
return basicRemote;
}
@Override
public WebSocketContainer getContainer()
{
return this.container;
}
public DecoderFactory getDecoderFactory()
{
return decoderFactory;
}
public EncoderFactory getEncoderFactory()
{
return encoderFactory;
}
public EndpointConfig getEndpointConfig()
{
return config;
}
public EndpointMetadata getEndpointMetadata()
{
return metadata;
}
@Override
public String getId()
{
return this.id;
}
@Override
public int getMaxBinaryMessageBufferSize()
{
return getPolicy().getMaxBinaryMessageSize();
}
@Override
public long getMaxIdleTimeout()
{
return getPolicy().getIdleTimeout();
}
@Override
public int getMaxTextMessageBufferSize()
{
return getPolicy().getMaxTextMessageSize();
}
public MessageHandlerFactory getMessageHandlerFactory()
{
return messageHandlerFactory;
}
@Override
public Set<MessageHandler> getMessageHandlers()
{
// Always return copy of set, as it is common to iterate and remove from the real set.
return new HashSet<MessageHandler>(messageHandlerSet);
}
public MessageHandlerWrapper getMessageHandlerWrapper(MessageType type)
{
synchronized (wrappers)
{
return wrappers[type.ordinal()];
}
}
@Override
public List<Extension> getNegotiatedExtensions()
{
if (negotiatedExtensions == null)
{
negotiatedExtensions = new ArrayList<Extension>();
for (ExtensionConfig cfg : getUpgradeResponse().getExtensions())
{
negotiatedExtensions.add(new JsrExtension(cfg));
}
}
return negotiatedExtensions;
}
@Override
public String getNegotiatedSubprotocol()
{
String acceptedSubProtocol = getUpgradeResponse().getAcceptedSubProtocol();
if (acceptedSubProtocol == null)
{
return "";
}
return acceptedSubProtocol;
}
@Override
public Set<Session> getOpenSessions()
{
return container.getOpenSessions();
}
@Override
public Map<String, String> getPathParameters()
{
return Collections.unmodifiableMap(pathParameters);
}
@Override
public String getQueryString()
{
return getUpgradeRequest().getRequestURI().getQuery();
}
@Override
public Map<String, List<String>> getRequestParameterMap()
{
return getUpgradeRequest().getParameterMap();
}
@Override
public Principal getUserPrincipal()
{
return getUpgradeRequest().getUserPrincipal();
}
@Override
public Map<String, Object> getUserProperties()
{
return config.getUserProperties();
}
@Override
public void init(EndpointConfig config)
{
// Initialize encoders
encoderFactory.init(config);
// Initialize decoders
decoderFactory.init(config);
}
@Override
public void removeMessageHandler(MessageHandler handler)
{
synchronized (wrappers)
{
try
{
for (MessageHandlerMetadata metadata : messageHandlerFactory.getMetadata(handler.getClass()))
{
DecoderMetadata decoder = decoderFactory.getMetadataFor(metadata.getMessageClass());
MessageType key = decoder.getMessageType();
wrappers[key.ordinal()] = null;
}
updateMessageHandlerSet();
}
catch (IllegalStateException e)
{
LOG.warn("Unable to identify MessageHandler: " + handler.getClass().getName(),e);
}
}
}
@Override
public void setMaxBinaryMessageBufferSize(int length)
{
getPolicy().setMaxBinaryMessageSize(length);
getPolicy().setMaxBinaryMessageBufferSize(length);
}
@Override
public void setMaxIdleTimeout(long milliseconds)
{
getPolicy().setIdleTimeout(milliseconds);
super.setIdleTimeout(milliseconds);
}
@Override
public void setMaxTextMessageBufferSize(int length)
{
getPolicy().setMaxTextMessageSize(length);
getPolicy().setMaxTextMessageBufferSize(length);
}
public void setPathParameters(Map<String, String> pathParams)
{
this.pathParameters.clear();
if (pathParams != null)
{
this.pathParameters.putAll(pathParams);
}
}
private void updateMessageHandlerSet()
{
messageHandlerSet.clear();
for (MessageHandlerWrapper wrapper : wrappers)
{
if (wrapper == null)
{
// skip empty
continue;
}
messageHandlerSet.add(wrapper.getHandler());
}
}
@Override
public BatchMode getBatchMode()
{
// JSR 356 specification mandates default batch mode to be off.
return BatchMode.OFF;
}
}
| |
package dk.hotmovinglobster.battleships;
import java.util.Arrays;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnCancelListener;
import android.content.Intent;
import android.content.res.Resources;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Spinner;
import android.widget.Toast;
import dk.hotmovinglobster.battleships.comm.CommunicationProtocolActivity;
/**
* Lets the user configure the game and then launches PlaceShipsActivity.
*/
public class SetupGameActivity extends CommunicationProtocolActivity {
private Spinner gridSizeSpinner;
/** Dialog to keep the waiting user happy **/
private ProgressDialog dialog_waiting;
/** Warn the waiting user that BACK cancels game */
private AlertDialog dialog_abort_warn;
private Resources res;
private int rules_game_type = 1;
public static final int GAME_TYPE_SHORT = 0;
public static final int GAME_TYPE_MEDIUM = 1;
public static final int GAME_TYPE_LONG = 2;
//public static final int GAME_TYPE_VERY_SHORT = 4;
// private String[] game_types_array = { "Short", "Medium", "Long", "Very short (DEBUG)" };
private String[] game_types_array = { "Short", "Medium", "Long" };
@Override
public void onCreate(Bundle savedInstanceState) {
Log.v(BattleshipsApplication.LOG_TAG, "SetupGameActivity: onCreate()");
assert( BattleshipsApplication.context().Comm != null );
BattleshipsApplication.context().Comm.setListeningActivity( this );
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.setup_game);
res = getResources();
setupComponents();
if ( !BattleshipsApplication.context().Comm.isServer() ) {
showWaitingDialog();
}
}
@Override
public void onStart() {
Log.v(BattleshipsApplication.LOG_TAG, "SetupGameActivity: onShow()");
super.onStart();
}
@Override
public void onResume() {
Log.v(BattleshipsApplication.LOG_TAG, "SetupGameActivity: onResume()");
super.onResume();
}
private void setupComponents() {
///////////////////////////////////
////////// OK BUTTON //////////////
///////////////////////////////////
((Button)findViewById(R.id.setup_game_btn_ok)).setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
BattleshipsApplication.context().Comm.sendRules(rules_game_type);
acceptRulesAndStart(rules_game_type);
}
});
///////////////////////////////////
/////// GAME TYPE SPINNER /////////
///////////////////////////////////
gridSizeSpinner = (Spinner)findViewById(R.id.setup_game_game_type);
ArrayAdapter<String> adapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, game_types_array);
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
gridSizeSpinner.setAdapter( adapter );
gridSizeSpinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view,
int position, long id) {
if (position == 0) {
rules_game_type = GAME_TYPE_SHORT;
} else if (position == 1) {
rules_game_type = GAME_TYPE_MEDIUM;
} else if (position == 2) {
rules_game_type = GAME_TYPE_LONG;
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {}
});
// Choose medium game as default
gridSizeSpinner.setSelection( 1 );
}
private void showWaitingDialog() {
final DialogInterface.OnClickListener dialog_click_listener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which){
case DialogInterface.BUTTON_POSITIVE:
BattleshipsApplication.context().Comm.disconnect();
dialog.dismiss();
SetupGameActivity.this.finish();
break;
case DialogInterface.BUTTON_NEGATIVE:
showWaitingDialog();
break;
}
}
};
dialog_waiting = new ProgressDialog(this);
dialog_waiting.setIndeterminate(true);
dialog_waiting.setMessage(res.getString(R.string.setup_game_waiting_for_host));
dialog_waiting.setCancelable(true);
dialog_waiting.show();
dialog_waiting.setOnCancelListener(new OnCancelListener() {
@Override
public void onCancel(DialogInterface dialog) {
dialog_waiting.dismiss();
dialog_abort_warn = new AlertDialog.Builder(SetupGameActivity.this).setMessage(R.string.place_ships_warn_abort_wait).
setPositiveButton(android.R.string.yes, dialog_click_listener).setNegativeButton(android.R.string.no, dialog_click_listener).show();
}
});
}
@Override
public void onBackPressed() {
BattleshipsApplication.context().Comm.disconnect();
finish();
}
@Override
public void communicationDisconnected() {
Toast.makeText(this, getString(R.string.bt_disconnnected), Toast.LENGTH_LONG).show();
finish();
}
@Override
public void communicationRulesReceived(int game_type) {
acceptRulesAndStart(game_type);
}
private void acceptRulesAndStart(int game_type) {
if (dialog_abort_warn != null && dialog_abort_warn.isShowing()) {
dialog_abort_warn.dismiss();
}
if (dialog_waiting != null && dialog_waiting.isShowing()) {
dialog_waiting.dismiss();
}
Log.v(BattleshipsApplication.LOG_TAG, "SetupGameActivity: acceptRulesAndStart("+game_type+")");
switch (game_type) {
case GAME_TYPE_SHORT:
BattleshipsApplication.context().GRID_COLUMNS = 6;
BattleshipsApplication.context().GRID_ROWS = 6;
BattleshipsApplication.context().MAX_SHIPS = new int[] { 0, 0, 2, 1, 1, 1 };
break;
case GAME_TYPE_MEDIUM:
default:
BattleshipsApplication.context().GRID_COLUMNS = 8;
BattleshipsApplication.context().GRID_ROWS = 8;
BattleshipsApplication.context().MAX_SHIPS = new int[] { 0, 0, 3, 2, 2, 1 };
break;
case GAME_TYPE_LONG:
BattleshipsApplication.context().GRID_COLUMNS = 10;
BattleshipsApplication.context().GRID_ROWS = 10;
BattleshipsApplication.context().MAX_SHIPS = new int[] { 0, 0, 3, 2, 2, 1 };
break;
}
Log.v(BattleshipsApplication.LOG_TAG, "SetupGameActivity: acceptRulesAndStart("+BattleshipsApplication.context().GRID_COLUMNS+", " +BattleshipsApplication.context().GRID_ROWS+", " +Arrays.toString(BattleshipsApplication.context().MAX_SHIPS)+")");
Intent i = new Intent(SetupGameActivity.this, PlaceShipsActivity.class);
startActivity(i);
finish();
}
}
| |
/*******************************************************************************
* Copyright 2014 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.ai.tests.pfa.tests;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.InputProcessor;
import com.badlogic.gdx.ai.msg.MessageManager;
import com.badlogic.gdx.ai.msg.Telegram;
import com.badlogic.gdx.ai.msg.Telegraph;
import com.badlogic.gdx.ai.pfa.HierarchicalPathFinder;
import com.badlogic.gdx.ai.pfa.PathFinderQueue;
import com.badlogic.gdx.ai.pfa.PathFinderRequest;
import com.badlogic.gdx.ai.pfa.PathFinderRequestControl;
import com.badlogic.gdx.ai.pfa.PathSmoother;
import com.badlogic.gdx.ai.pfa.PathSmootherRequest;
import com.badlogic.gdx.ai.pfa.SmoothableGraphPath;
import com.badlogic.gdx.ai.pfa.indexed.IndexedAStarPathFinder;
import com.badlogic.gdx.ai.sched.LoadBalancingScheduler;
import com.badlogic.gdx.ai.tests.PathFinderTests;
import com.badlogic.gdx.ai.tests.pfa.PathFinderTestBase;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledManhattanDistance;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledNode;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledRaycastCollisionDetector;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.TiledSmoothableGraphPath;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.hrchy.HierarchicalTiledGraph;
import com.badlogic.gdx.ai.tests.pfa.tests.tiled.hrchy.HierarchicalTiledNode;
import com.badlogic.gdx.graphics.Camera;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.ui.CheckBox;
import com.badlogic.gdx.scenes.scene2d.ui.Label;
import com.badlogic.gdx.scenes.scene2d.ui.Slider;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;
import com.badlogic.gdx.utils.Pool;
import com.badlogic.gdx.utils.Pool.Poolable;
/** This test shows interruptible hierarchical pathfinding through a {@link PathFinderQueue}.
*
* @author davebaol */
public class InterruptibleHierarchicalTiledAStarTest extends PathFinderTestBase implements Telegraph {
final static float width = 8; // 5; // 10;
final static int NUM_PATHS = 10;
ShapeRenderer renderer;
Vector3 tmpUnprojection = new Vector3();
int lastScreenX;
int lastScreenY;
int lastEndTileX;
int lastEndTileY;
int startTileX;
int startTileY;
HierarchicalTiledGraph worldMap;
TiledSmoothableGraphPath<HierarchicalTiledNode>[] paths;
int numPaths;
TiledManhattanDistance<HierarchicalTiledNode> heuristic;
HierarchicalPathFinder<HierarchicalTiledNode> pathFinder;
PathSmoother<HierarchicalTiledNode, Vector2> pathSmoother;
PathSmootherRequest<HierarchicalTiledNode, Vector2> pathSmootherRequest;
Pool<MyPathFinderRequest> requestPool;
LoadBalancingScheduler scheduler;
boolean smooth = false;
boolean metrics = false;
CheckBox checkDiagonal;
CheckBox checkSmooth;
CheckBox checkMetrics;
Slider sliderMillisAvailablePerFrame;
public InterruptibleHierarchicalTiledAStarTest (PathFinderTests container) {
super(container, "Interruptible Hierarchical Tiled A*");
}
@SuppressWarnings("unchecked")
@Override
public void create (Table table) {
lastEndTileX = -1;
lastEndTileY = -1;
startTileX = 1;
startTileY = 1;
// Create the map
worldMap = new HierarchicalTiledGraph();
int roomCount = 100;
int roomMinSize = 2;
int roomMaxSize = 8;
int squashIterations = 100;
worldMap.init(roomCount, roomMinSize, roomMaxSize, squashIterations);
paths = (TiledSmoothableGraphPath<HierarchicalTiledNode>[])new TiledSmoothableGraphPath[NUM_PATHS];
for (int i = 0; i < NUM_PATHS; i++) {
paths[i] = new TiledSmoothableGraphPath<HierarchicalTiledNode>();
}
numPaths = 0;
heuristic = new TiledManhattanDistance<HierarchicalTiledNode>();
IndexedAStarPathFinder<HierarchicalTiledNode> levelPathFinder = new IndexedAStarPathFinder<HierarchicalTiledNode>(worldMap,
true);
pathFinder = new HierarchicalPathFinder<HierarchicalTiledNode>(worldMap, levelPathFinder);
pathSmoother = new PathSmoother<HierarchicalTiledNode, Vector2>(new TiledRaycastCollisionDetector<HierarchicalTiledNode>(
worldMap));
pathSmootherRequest = new PathSmootherRequest<HierarchicalTiledNode, Vector2>();
requestPool = new Pool<MyPathFinderRequest>() {
@Override
protected MyPathFinderRequest newObject () {
return new MyPathFinderRequest();
}
};
PathFinderQueue<HierarchicalTiledNode> pathFinderQueue = new PathFinderQueue<HierarchicalTiledNode>(pathFinder);
MessageManager.getInstance().addListener(pathFinderQueue, 1);
scheduler = new LoadBalancingScheduler(100);
scheduler.add(pathFinderQueue, 1, 0);
renderer = new ShapeRenderer();
inputProcessor = new TiledHierarchicalAStarInputProcessor(this);
Table detailTable = new Table(container.skin);
detailTable.row();
checkSmooth = new CheckBox("[RED]S[]mooth Path", container.skin);
checkSmooth.setChecked(smooth);
checkSmooth.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
CheckBox checkBox = (CheckBox)event.getListenerActor();
smooth = checkBox.isChecked();
updatePath(true);
}
});
detailTable.add(checkSmooth);
detailTable.row();
checkDiagonal = new CheckBox("Prefer [RED]D[]iagonal", container.skin);
checkDiagonal.setChecked(worldMap.diagonal);
checkDiagonal.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
CheckBox checkBox = (CheckBox)event.getListenerActor();
worldMap.diagonal = checkBox.isChecked();
updatePath(true);
}
});
detailTable.add(checkDiagonal);
detailTable.row();
addSeparator(detailTable);
detailTable.row();
checkMetrics = new CheckBox("Calculate [RED]M[]etrics", container.skin);
checkMetrics.setChecked(metrics);
checkMetrics.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
CheckBox checkBox = (CheckBox)event.getListenerActor();
metrics = checkBox.isChecked();
updatePath(true);
}
});
detailTable.add(checkMetrics);
detailTable.row();
addSeparator(detailTable);
detailTable.row();
sliderMillisAvailablePerFrame = new Slider(0.1f, 40f, 0.1f, false, container.skin);
sliderMillisAvailablePerFrame.setValue(16);
final Label labelMillisAvailablePerFrame = new Label("Millis Available per Frame [["
+ sliderMillisAvailablePerFrame.getValue() + "]", container.skin);
detailTable.add(labelMillisAvailablePerFrame);
detailTable.row();
sliderMillisAvailablePerFrame.addListener(new ChangeListener() {
@Override
public void changed (ChangeEvent event, Actor actor) {
labelMillisAvailablePerFrame.setText("Millis Available per Frame [[" + sliderMillisAvailablePerFrame.getValue() + "]");
}
});
Table sliderMapfTable = new Table();
sliderMapfTable.add(new Label("[RED]-[] ", container.skin));
sliderMapfTable.add(sliderMillisAvailablePerFrame);
sliderMapfTable.add(new Label(" [RED]+[]", container.skin));
detailTable.add(sliderMapfTable);
detailWindow = createDetailWindow(detailTable);
}
@Override
public void render () {
long timeToRun = (long)(sliderMillisAvailablePerFrame.getValue() * 1000000f);
scheduler.run(timeToRun);
renderer.begin(ShapeType.Filled);
int level = 0;
worldMap.setLevel(level);
int xMax = HierarchicalTiledGraph.sizeX[level];
int yMax = HierarchicalTiledGraph.sizeY[level];
for (int x = 0; x < xMax; x++) {
for (int y = 0; y < yMax; y++) {
switch (worldMap.getNode(x, y).type) {
case TiledNode.TILE_FLOOR:
renderer.setColor(Color.WHITE);
break;
case TiledNode.TILE_WALL:
renderer.setColor(Color.GRAY);
break;
default:
renderer.setColor(Color.BLACK);
break;
}
renderer.rect(x * width, y * width, width, width);
}
}
// Draw path nodes
for (int p = 0; p < numPaths; p++) {
TiledSmoothableGraphPath<HierarchicalTiledNode> path = paths[p];
int nodeCount = path.getCount();
if (nodeCount == 0) break;
renderer.setColor(p % 2 == 0 ? Color.RED : Color.ORANGE);
for (int i = 0; i < nodeCount; i++) {
HierarchicalTiledNode node = path.nodes.get(i);
renderer.rect(node.x * width, node.y * width, width, width);
}
}
if (smooth) {
renderer.end();
renderer.begin(ShapeType.Line);
// Draw lines between path nodes
for (int p = 0; p < numPaths; p++) {
TiledSmoothableGraphPath<HierarchicalTiledNode> path = paths[p];
int nodeCount = path.getCount();
if (nodeCount > 0) {
float hw = width / 2f;
HierarchicalTiledNode prevNode = path.nodes.get(0);
renderer.setColor(p % 2 == 0 ? Color.RED : Color.ORANGE);
for (int i = 1; i < nodeCount; i++) {
HierarchicalTiledNode node = path.nodes.get(i);
renderer.line(node.x * width + hw, node.y * width + hw, prevNode.x * width + hw, prevNode.y * width + hw);
prevNode = node;
}
}
}
}
// Draw the lower level node of the buildings (usually a tile close to the center of mass)
level = 1;
worldMap.setLevel(level);
xMax = HierarchicalTiledGraph.sizeX[level];
yMax = HierarchicalTiledGraph.sizeY[level];
renderer.end();
renderer.begin(ShapeType.Line);
renderer.setColor(Color.MAROON);
float hw = width * .5f;
for (int x = 0; x < xMax; x++) {
for (int y = 0; y < yMax; y++) {
HierarchicalTiledNode lln = worldMap.getNode(x, y).getLowerLevelNode();
renderer.circle(lln.x * width + hw, lln.y * width + hw, hw);
}
}
renderer.end();
}
@Override
public void dispose () {
renderer.dispose();
worldMap = null;
paths = null;
heuristic = null;
pathFinder = null;
pathSmoother = null;
scheduler = null;
MessageManager.getInstance().clear();
}
public Camera getCamera () {
return container.stage.getViewport().getCamera();
}
@Override
public boolean handleMessage (Telegram telegram) {
if (telegram.extraInfo instanceof MyPathFinderRequest) {
if (PathFinderRequestControl.DEBUG) {
@SuppressWarnings("unchecked")
PathFinderQueue<HierarchicalTiledNode> pfQueue = (PathFinderQueue<HierarchicalTiledNode>)telegram.sender;
if (PathFinderRequestControl.DEBUG) System.out.println("pfQueue.size = " + pfQueue.size());
}
MyPathFinderRequest pfr = (MyPathFinderRequest)telegram.extraInfo;
TiledSmoothableGraphPath<HierarchicalTiledNode> path = paths[pfr.pathIndex];
int n = path.getCount();
if (n > 0 && pfr.pathFound && pfr.endNode != path.get(n - 1)) {
pfr.startNode = path.get(n - 1);
pfr.pathIndex++;
pfr.resultPath = paths[pfr.pathIndex];
pfr.changeStatus(PathFinderRequest.SEARCH_NEW);
numPaths = pfr.pathIndex;
} else {
requestPool.free(pfr);
numPaths = pfr.pathIndex + 1;
}
}
return true;
}
private void updatePath (boolean forceUpdate) {
getCamera().unproject(tmpUnprojection.set(lastScreenX, lastScreenY, 0));
int tileX = (int)(tmpUnprojection.x / width);
int tileY = (int)(tmpUnprojection.y / width);
if (forceUpdate || tileX != lastEndTileX || tileY != lastEndTileY) {
worldMap.setLevel(0);
HierarchicalTiledNode startNode = worldMap.getNode(startTileX, startTileY);
HierarchicalTiledNode endNode = worldMap.getNode(tileX, tileY);
if (forceUpdate || endNode.type == TiledNode.TILE_FLOOR) {
if (endNode.type == TiledNode.TILE_FLOOR) {
lastEndTileX = tileX;
lastEndTileY = tileY;
} else {
endNode = worldMap.getNode(lastEndTileX, lastEndTileY);
}
if (metrics)
if (PathFinderRequestControl.DEBUG)
System.out.println("------------ Hierarchical Indexed A* Path Finder Metrics ------------");
requestNewPathFinding(startNode, endNode, 0);
}
}
}
private void requestNewPathFinding (HierarchicalTiledNode startNode, HierarchicalTiledNode endNode, int pathIndex) {
TiledSmoothableGraphPath<HierarchicalTiledNode> path = paths[pathIndex];
MyPathFinderRequest pfRequest = requestPool.obtain();
pfRequest.startNode = startNode;
pfRequest.endNode = endNode;
pfRequest.heuristic = heuristic;
pfRequest.resultPath = path;
pfRequest.pathIndex = pathIndex;
MessageManager.getInstance().dispatchMessage(this, 1, pfRequest);
}
/** An {@link InputProcessor} that allows you to define a path to find.
*
* @autor davebaol */
static class TiledHierarchicalAStarInputProcessor extends InputAdapter {
InterruptibleHierarchicalTiledAStarTest test;
public TiledHierarchicalAStarInputProcessor (InterruptibleHierarchicalTiledAStarTest test) {
this.test = test;
}
@Override
public boolean keyTyped (char character) {
switch (character) {
case 'm':
case 'M':
test.checkMetrics.toggle();
break;
case 'd':
case 'D':
test.checkDiagonal.toggle();
break;
case 's':
case 'S':
test.checkSmooth.toggle();
break;
case '-':
test.sliderMillisAvailablePerFrame.setValue(test.sliderMillisAvailablePerFrame.getValue()
- test.sliderMillisAvailablePerFrame.getStepSize());
break;
case '+':
test.sliderMillisAvailablePerFrame.setValue(test.sliderMillisAvailablePerFrame.getValue()
+ test.sliderMillisAvailablePerFrame.getStepSize());
break;
}
return true;
}
@Override
public boolean touchUp (int screenX, int screenY, int pointer, int button) {
test.getCamera().unproject(test.tmpUnprojection.set(screenX, screenY, 0));
int tileX = (int)(test.tmpUnprojection.x / width);
int tileY = (int)(test.tmpUnprojection.y / width);
test.worldMap.setLevel(0);
HierarchicalTiledNode startNode = test.worldMap.getNode(tileX, tileY);
if (startNode.type == TiledNode.TILE_FLOOR) {
test.startTileX = tileX;
test.startTileY = tileY;
test.updatePath(true);
}
return true;
}
@Override
public boolean mouseMoved (int screenX, int screenY) {
test.lastScreenX = screenX;
test.lastScreenY = screenY;
test.updatePath(false);
return true;
}
}
class MyPathFinderRequest extends PathFinderRequest<HierarchicalTiledNode> implements Poolable {
boolean smoothFinished;
int pathIndex;
public MyPathFinderRequest () {
}
@Override
public boolean initializeSearch (long timeToRun) {
resultPath.clear();
worldMap.startNode = startNode;
return true;
}
@Override
public boolean finalizeSearch (long timeToRun) {
if (statusChanged) {
if (PathFinderRequestControl.DEBUG)
System.out.println("MyPathFinderRequest.finalizeSearch[" + pathIndex
+ "]: statusChanged **********************************");
pathSmootherRequest.refresh(paths[pathIndex]);
smoothFinished = false;
if (pathFound) {
HierarchicalTiledNode l1Start = worldMap.convertNodeBetweenLevels(0, startNode, 1);
SmoothableGraphPath<HierarchicalTiledNode, Vector2> path = paths[pathIndex];
int nodeCount = path.getCount();
if (nodeCount > 0 && endNode != path.get(nodeCount - 1)) {
for (int i = 1; i < nodeCount; i++) {
if (worldMap.convertNodeBetweenLevels(0, path.get(i), 1) != l1Start) {
path.truncatePath(i + 1);
break;
}
}
}
}
}
if (pathFound) {
if (PathFinderRequestControl.DEBUG)
System.out.println("MyPathFinderRequest.finalizeSearch[" + pathIndex
+ "]: pathFound **********************************");
if (smooth && !smoothFinished) {
worldMap.setLevel(0);
smoothFinished = pathSmoother.smoothPath(pathSmootherRequest, timeToRun);
if (!smoothFinished) return false;
}
numPaths = pathIndex + 1;
}
return true;
}
@Override
public void reset () {
this.startNode = null;
this.endNode = null;
this.heuristic = null;
this.resultPath = null;
this.client = null;
}
}
}
| |
package com.afollestad.materialdialogs.color;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Dialog;
import android.content.DialogInterface;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.ArrayRes;
import android.support.annotation.ColorInt;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringDef;
import android.support.annotation.StringRes;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.content.res.ResourcesCompat;
import android.support.v7.app.AppCompatActivity;
import android.text.Editable;
import android.text.InputFilter;
import android.text.TextWatcher;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.EditText;
import android.widget.GridView;
import android.widget.SeekBar;
import android.widget.TextView;
import com.afollestad.materialdialogs.DialogAction;
import com.afollestad.materialdialogs.MaterialDialog;
import com.afollestad.materialdialogs.Theme;
import com.afollestad.materialdialogs.commons.R;
import com.afollestad.materialdialogs.internal.MDTintHelper;
import com.afollestad.materialdialogs.util.DialogUtils;
import java.io.Serializable;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* @author Aidan Follestad (afollestad)
*/
@SuppressWarnings({"FieldCanBeLocal", "ConstantConditions"})
public class ColorChooserDialog extends DialogFragment implements View.OnClickListener, View.OnLongClickListener {
@Retention(RetentionPolicy.SOURCE)
@StringDef({
TAG_PRIMARY,
TAG_ACCENT,
TAG_CUSTOM
})
public @interface ColorChooserTag {
}
public final static String TAG_PRIMARY = "[MD_COLOR_CHOOSER]";
public final static String TAG_ACCENT = "[MD_COLOR_CHOOSER]";
public final static String TAG_CUSTOM = "[MD_COLOR_CHOOSER]";
@NonNull
private int[] mColorsTop;
@Nullable
private int[][] mColorsSub;
private void generateColors() {
Builder builder = getBuilder();
if (builder.mColorsTop != null) {
mColorsTop = builder.mColorsTop;
mColorsSub = builder.mColorsSub;
return;
}
if (builder.mAccentMode) {
mColorsTop = ColorPalette.ACCENT_COLORS;
mColorsSub = ColorPalette.ACCENT_COLORS_SUB;
} else {
mColorsTop = ColorPalette.PRIMARY_COLORS;
mColorsSub = ColorPalette.PRIMARY_COLORS_SUB;
}
}
public ColorChooserDialog() {
}
private int mCircleSize;
private ColorCallback mCallback;
private GridView mGrid;
private View mColorChooserCustomFrame;
private EditText mCustomColorHex;
private View mCustomColorIndicator;
private TextWatcher mCustomColorTextWatcher;
private SeekBar mCustomSeekA;
private TextView mCustomSeekAValue;
private SeekBar mCustomSeekR;
private TextView mCustomSeekRValue;
private SeekBar mCustomSeekG;
private TextView mCustomSeekGValue;
private SeekBar mCustomSeekB;
private TextView mCustomSeekBValue;
private SeekBar.OnSeekBarChangeListener mCustomColorRgbListener;
private int mSelectedCustomColor;
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt("top_index", topIndex());
outState.putBoolean("in_sub", isInSub());
outState.putInt("sub_index", subIndex());
outState.putBoolean("in_custom", mColorChooserCustomFrame != null &&
mColorChooserCustomFrame.getVisibility() == View.VISIBLE);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (!(activity instanceof ColorCallback))
throw new IllegalStateException("ColorChooserDialog needs to be shown from an Activity implementing ColorCallback.");
mCallback = (ColorCallback) activity;
}
private boolean isInSub() {
return getArguments().getBoolean("in_sub", false);
}
private void isInSub(boolean value) {
getArguments().putBoolean("in_sub", value);
}
private int topIndex() {
return getArguments().getInt("top_index", -1);
}
private void topIndex(int value) {
if (value > -1)
findSubIndexForColor(value, mColorsTop[value]);
getArguments().putInt("top_index", value);
}
private int subIndex() {
if (mColorsSub == null) return -1;
return getArguments().getInt("sub_index", -1);
}
private void subIndex(int value) {
if (mColorsSub == null) return;
getArguments().putInt("sub_index", value);
}
@StringRes
public int getTitle() {
Builder builder = getBuilder();
int title;
if (isInSub()) title = builder.mTitleSub;
else title = builder.mTitle;
if (title == 0) title = builder.mTitle;
return title;
}
public String tag() {
Builder builder = getBuilder();
if (builder.mTag != null)
return builder.mTag;
else return super.getTag();
}
public boolean isAccentMode() {
return getBuilder().mAccentMode;
}
@Override
public void onClick(View v) {
if (v.getTag() != null) {
final String[] tag = ((String) v.getTag()).split(":");
final int index = Integer.parseInt(tag[0]);
final MaterialDialog dialog = (MaterialDialog) getDialog();
final Builder builder = getBuilder();
if (isInSub()) {
subIndex(index);
} else {
topIndex(index);
if (mColorsSub != null && index < mColorsSub.length) {
dialog.setActionButton(DialogAction.NEGATIVE, builder.mBackBtn);
isInSub(true);
}
}
if (builder.mAllowUserCustom)
mSelectedCustomColor = getSelectedColor();
invalidateDynamicButtonColors();
invalidate();
}
}
@Override
public boolean onLongClick(View v) {
if (v.getTag() != null) {
final String[] tag = ((String) v.getTag()).split(":");
final int color = Integer.parseInt(tag[1]);
((CircleView) v).showHint(color);
return true;
}
return false;
}
private void invalidateDynamicButtonColors() {
final MaterialDialog dialog = (MaterialDialog) getDialog();
if (dialog == null) return;
final Builder builder = getBuilder();
if (builder.mDynamicButtonColor) {
int selectedColor = getSelectedColor();
if (Color.alpha(selectedColor) < 64 ||
(Color.red(selectedColor) > 247 &&
Color.green(selectedColor) > 247 &&
Color.blue(selectedColor) > 247)) {
// Once we get close to white or transparent, the action buttons and seekbars will be a very light gray
selectedColor = Color.parseColor("#DEDEDE");
}
if (getBuilder().mDynamicButtonColor) {
dialog.getActionButton(DialogAction.POSITIVE).setTextColor(selectedColor);
dialog.getActionButton(DialogAction.NEGATIVE).setTextColor(selectedColor);
dialog.getActionButton(DialogAction.NEUTRAL).setTextColor(selectedColor);
}
if (mCustomSeekR != null) {
if (mCustomSeekA.getVisibility() == View.VISIBLE)
MDTintHelper.setTint(mCustomSeekA, selectedColor);
MDTintHelper.setTint(mCustomSeekR, selectedColor);
MDTintHelper.setTint(mCustomSeekG, selectedColor);
MDTintHelper.setTint(mCustomSeekB, selectedColor);
}
}
}
@ColorInt
private int getSelectedColor() {
if (mColorChooserCustomFrame != null && mColorChooserCustomFrame.getVisibility() == View.VISIBLE)
return mSelectedCustomColor;
int color = 0;
if (subIndex() > -1)
color = mColorsSub[topIndex()][subIndex()];
else if (topIndex() > -1)
color = mColorsTop[topIndex()];
if (color == 0) {
int fallback = 0;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
fallback = DialogUtils.resolveColor(getActivity(), android.R.attr.colorAccent);
color = DialogUtils.resolveColor(getActivity(), R.attr.colorAccent, fallback);
}
return color;
}
public interface ColorCallback {
void onColorSelection(@NonNull ColorChooserDialog dialog, @ColorInt int selectedColor);
}
private void findSubIndexForColor(int topIndex, int color) {
if (mColorsSub == null || mColorsSub.length - 1 < topIndex)
return;
int[] subColors = mColorsSub[topIndex];
for (int subIndex = 0; subIndex < subColors.length; subIndex++) {
if (subColors[subIndex] == color) {
subIndex(subIndex);
break;
}
}
}
@NonNull
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
if (getArguments() == null || !getArguments().containsKey("builder"))
throw new IllegalStateException("ColorChooserDialog should be created using its Builder interface.");
generateColors();
int preselectColor;
boolean foundPreselectColor = false;
if (savedInstanceState != null) {
foundPreselectColor = !savedInstanceState.getBoolean("in_custom", false);
preselectColor = getSelectedColor();
} else {
if (getBuilder().mSetPreselectionColor) {
preselectColor = getBuilder().mPreselect;
if (preselectColor != 0) {
for (int topIndex = 0; topIndex < mColorsTop.length; topIndex++) {
if (mColorsTop[topIndex] == preselectColor) {
foundPreselectColor = true;
topIndex(topIndex);
if (getBuilder().mAccentMode) {
subIndex(2);
} else if (mColorsSub != null) {
findSubIndexForColor(topIndex, preselectColor);
} else {
subIndex(5);
}
break;
}
if (mColorsSub != null) {
for (int subIndex = 0; subIndex < mColorsSub[topIndex].length; subIndex++) {
if (mColorsSub[topIndex][subIndex] == preselectColor) {
foundPreselectColor = true;
topIndex(topIndex);
subIndex(subIndex);
break;
}
}
if (foundPreselectColor) break;
}
}
}
} else {
preselectColor = Color.BLACK;
foundPreselectColor = true;
}
}
mCircleSize = getResources().getDimensionPixelSize(R.dimen.md_colorchooser_circlesize);
final Builder builder = getBuilder();
MaterialDialog.Builder bd = new MaterialDialog.Builder(getActivity())
.title(getTitle())
.autoDismiss(false)
.customView(R.layout.md_dialog_colorchooser, false)
.negativeText(builder.mCancelBtn)
.positiveText(builder.mDoneBtn)
.neutralText(builder.mAllowUserCustom ? builder.mCustomBtn : 0)
.onPositive(new MaterialDialog.SingleButtonCallback() {
@Override
public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) {
mCallback.onColorSelection(ColorChooserDialog.this, getSelectedColor());
dismiss();
}
})
.onNegative(new MaterialDialog.SingleButtonCallback() {
@Override
public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) {
if (isInSub()) {
dialog.setActionButton(DialogAction.NEGATIVE, getBuilder().mCancelBtn);
isInSub(false);
subIndex(-1); // Do this to avoid ArrayIndexOutOfBoundsException
invalidate();
} else {
dialog.cancel();
}
}
})
.onNeutral(new MaterialDialog.SingleButtonCallback() {
@Override
public void onClick(@NonNull MaterialDialog dialog, @NonNull DialogAction which) {
toggleCustom(dialog);
}
})
.showListener(new DialogInterface.OnShowListener() {
@Override
public void onShow(DialogInterface dialog) {
invalidateDynamicButtonColors();
}
});
if (builder.mTheme != null)
bd.theme(builder.mTheme);
final MaterialDialog dialog = bd.build();
final View v = dialog.getCustomView();
mGrid = (GridView) v.findViewById(R.id.md_grid);
if (builder.mAllowUserCustom) {
mSelectedCustomColor = preselectColor;
mColorChooserCustomFrame = v.findViewById(R.id.md_colorChooserCustomFrame);
mCustomColorHex = (EditText) v.findViewById(R.id.md_hexInput);
mCustomColorIndicator = v.findViewById(R.id.md_colorIndicator);
mCustomSeekA = (SeekBar) v.findViewById(R.id.md_colorA);
mCustomSeekAValue = (TextView) v.findViewById(R.id.md_colorAValue);
mCustomSeekR = (SeekBar) v.findViewById(R.id.md_colorR);
mCustomSeekRValue = (TextView) v.findViewById(R.id.md_colorRValue);
mCustomSeekG = (SeekBar) v.findViewById(R.id.md_colorG);
mCustomSeekGValue = (TextView) v.findViewById(R.id.md_colorGValue);
mCustomSeekB = (SeekBar) v.findViewById(R.id.md_colorB);
mCustomSeekBValue = (TextView) v.findViewById(R.id.md_colorBValue);
if (!builder.mAllowUserCustomAlpha) {
v.findViewById(R.id.md_colorALabel).setVisibility(View.GONE);
mCustomSeekA.setVisibility(View.GONE);
mCustomSeekAValue.setVisibility(View.GONE);
mCustomColorHex.setHint("2196F3");
mCustomColorHex.setFilters(new InputFilter[]{new InputFilter.LengthFilter(6)});
} else {
mCustomColorHex.setHint("FF2196F3");
mCustomColorHex.setFilters(new InputFilter[]{new InputFilter.LengthFilter(8)});
}
if (!foundPreselectColor) {
// If color wasn't found in the preset colors, it must be custom
toggleCustom(dialog);
}
}
invalidate();
return dialog;
}
private void toggleCustom(MaterialDialog dialog) {
if (dialog == null)
dialog = (MaterialDialog) getDialog();
if (mGrid.getVisibility() == View.VISIBLE) {
dialog.setTitle(getBuilder().mCustomBtn);
dialog.setActionButton(DialogAction.NEUTRAL, getBuilder().mPresetsBtn);
dialog.setActionButton(DialogAction.NEGATIVE, getBuilder().mCancelBtn);
mGrid.setVisibility(View.INVISIBLE);
mColorChooserCustomFrame.setVisibility(View.VISIBLE);
mCustomColorTextWatcher = new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
try {
mSelectedCustomColor = Color.parseColor("#" + s.toString());
} catch (IllegalArgumentException e) {
mSelectedCustomColor = Color.BLACK;
}
mCustomColorIndicator.setBackgroundColor(mSelectedCustomColor);
if (mCustomSeekA.getVisibility() == View.VISIBLE) {
int alpha = Color.alpha(mSelectedCustomColor);
mCustomSeekA.setProgress(alpha);
mCustomSeekAValue.setText(String.format("%d", alpha));
}
if (mCustomSeekA.getVisibility() == View.VISIBLE) {
int alpha = Color.alpha(mSelectedCustomColor);
mCustomSeekA.setProgress(alpha);
}
int red = Color.red(mSelectedCustomColor);
mCustomSeekR.setProgress(red);
int green = Color.green(mSelectedCustomColor);
mCustomSeekG.setProgress(green);
int blue = Color.blue(mSelectedCustomColor);
mCustomSeekB.setProgress(blue);
isInSub(false);
topIndex(-1);
subIndex(-1);
invalidateDynamicButtonColors();
}
@Override
public void afterTextChanged(Editable s) {
}
};
mCustomColorHex.addTextChangedListener(mCustomColorTextWatcher);
mCustomColorRgbListener = new SeekBar.OnSeekBarChangeListener() {
@SuppressLint("DefaultLocale")
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) {
if (getBuilder().mAllowUserCustomAlpha) {
int color = Color.argb(mCustomSeekA.getProgress(),
mCustomSeekR.getProgress(),
mCustomSeekG.getProgress(),
mCustomSeekB.getProgress());
mCustomColorHex.setText(String.format("%08X", color));
} else {
int color = Color.rgb(mCustomSeekR.getProgress(),
mCustomSeekG.getProgress(),
mCustomSeekB.getProgress());
mCustomColorHex.setText(String.format("%06X", 0xFFFFFF & color));
}
}
mCustomSeekAValue.setText(String.format("%d", mCustomSeekA.getProgress()));
mCustomSeekRValue.setText(String.format("%d", mCustomSeekR.getProgress()));
mCustomSeekGValue.setText(String.format("%d", mCustomSeekG.getProgress()));
mCustomSeekBValue.setText(String.format("%d", mCustomSeekB.getProgress()));
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
};
mCustomSeekR.setOnSeekBarChangeListener(mCustomColorRgbListener);
mCustomSeekG.setOnSeekBarChangeListener(mCustomColorRgbListener);
mCustomSeekB.setOnSeekBarChangeListener(mCustomColorRgbListener);
if (mCustomSeekA.getVisibility() == View.VISIBLE) {
mCustomSeekA.setOnSeekBarChangeListener(mCustomColorRgbListener);
mCustomColorHex.setText(String.format("%08X", mSelectedCustomColor));
} else {
mCustomColorHex.setText(String.format("%06X", 0xFFFFFF & mSelectedCustomColor));
}
} else {
dialog.setTitle(getBuilder().mTitle);
dialog.setActionButton(DialogAction.NEUTRAL, getBuilder().mCustomBtn);
if (isInSub())
dialog.setActionButton(DialogAction.NEGATIVE, getBuilder().mBackBtn);
else dialog.setActionButton(DialogAction.NEGATIVE, getBuilder().mCancelBtn);
mGrid.setVisibility(View.VISIBLE);
mColorChooserCustomFrame.setVisibility(View.GONE);
mCustomColorHex.removeTextChangedListener(mCustomColorTextWatcher);
mCustomColorTextWatcher = null;
mCustomSeekR.setOnSeekBarChangeListener(null);
mCustomSeekG.setOnSeekBarChangeListener(null);
mCustomSeekB.setOnSeekBarChangeListener(null);
mCustomColorRgbListener = null;
}
}
private void invalidate() {
if (mGrid.getAdapter() == null) {
mGrid.setAdapter(new ColorGridAdapter());
mGrid.setSelector(ResourcesCompat.getDrawable(getResources(), R.drawable.md_transparent, null));
} else ((BaseAdapter) mGrid.getAdapter()).notifyDataSetChanged();
if (getDialog() != null)
getDialog().setTitle(getTitle());
}
private class ColorGridAdapter extends BaseAdapter {
public ColorGridAdapter() {
}
@Override
public int getCount() {
if (isInSub()) return mColorsSub[topIndex()].length;
else return mColorsTop.length;
}
@Override
public Object getItem(int position) {
if (isInSub()) return mColorsSub[topIndex()][position];
else return mColorsTop[position];
}
@Override
public long getItemId(int position) {
return position;
}
@SuppressLint("DefaultLocale")
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = new CircleView(getContext());
convertView.setLayoutParams(new GridView.LayoutParams(mCircleSize, mCircleSize));
}
CircleView child = (CircleView) convertView;
@ColorInt
final int color = isInSub() ? mColorsSub[topIndex()][position] : mColorsTop[position];
child.setBackgroundColor(color);
if (isInSub())
child.setSelected(subIndex() == position);
else child.setSelected(topIndex() == position);
child.setTag(String.format("%d:%d", position, color));
child.setOnClickListener(ColorChooserDialog.this);
child.setOnLongClickListener(ColorChooserDialog.this);
return convertView;
}
}
public static class Builder implements Serializable {
@NonNull
protected final transient AppCompatActivity mContext;
@StringRes
protected final int mTitle;
@StringRes
protected int mTitleSub;
@ColorInt
protected int mPreselect;
@StringRes
protected int mDoneBtn = R.string.md_done_label;
@StringRes
protected int mBackBtn = R.string.md_back_label;
@StringRes
protected int mCancelBtn = R.string.md_cancel_label;
@StringRes
protected int mCustomBtn = R.string.md_custom_label;
@StringRes
protected int mPresetsBtn = R.string.md_presets_label;
@Nullable
protected int[] mColorsTop;
@Nullable
protected int[][] mColorsSub;
@Nullable
protected String mTag;
@Nullable
protected Theme mTheme;
protected boolean mAccentMode = false;
protected boolean mDynamicButtonColor = true;
protected boolean mAllowUserCustom = true;
protected boolean mAllowUserCustomAlpha = true;
protected boolean mSetPreselectionColor = false;
public <ActivityType extends AppCompatActivity & ColorCallback> Builder(@NonNull ActivityType context, @StringRes int title) {
mContext = context;
mTitle = title;
}
@NonNull
public Builder titleSub(@StringRes int titleSub) {
mTitleSub = titleSub;
return this;
}
@NonNull
public Builder tag(@Nullable String tag) {
mTag = tag;
return this;
}
@NonNull
public Builder theme(@NonNull Theme theme) {
mTheme = theme;
return this;
}
@NonNull
public Builder preselect(@ColorInt int preselect) {
mPreselect = preselect;
mSetPreselectionColor = true;
return this;
}
@NonNull
public Builder accentMode(boolean accentMode) {
mAccentMode = accentMode;
return this;
}
@NonNull
public Builder doneButton(@StringRes int text) {
mDoneBtn = text;
return this;
}
@NonNull
public Builder backButton(@StringRes int text) {
mBackBtn = text;
return this;
}
@NonNull
public Builder cancelButton(@StringRes int text) {
mCancelBtn = text;
return this;
}
@NonNull
public Builder customButton(@StringRes int text) {
mCustomBtn = text;
return this;
}
@NonNull
public Builder presetsButton(@StringRes int text) {
mPresetsBtn = text;
return this;
}
@NonNull
public Builder dynamicButtonColor(boolean enabled) {
mDynamicButtonColor = enabled;
return this;
}
@NonNull
public Builder customColors(@NonNull int[] topLevel, @Nullable int[][] subLevel) {
mColorsTop = topLevel;
mColorsSub = subLevel;
return this;
}
@NonNull
public Builder customColors(@ArrayRes int topLevel, @Nullable int[][] subLevel) {
mColorsTop = DialogUtils.getColorArray(mContext, topLevel);
mColorsSub = subLevel;
return this;
}
@NonNull
public Builder allowUserColorInput(boolean allow) {
mAllowUserCustom = allow;
return this;
}
@NonNull
public Builder allowUserColorInputAlpha(boolean allow) {
mAllowUserCustomAlpha = allow;
return this;
}
@NonNull
public ColorChooserDialog build() {
ColorChooserDialog dialog = new ColorChooserDialog();
Bundle args = new Bundle();
args.putSerializable("builder", this);
dialog.setArguments(args);
return dialog;
}
@NonNull
public ColorChooserDialog show() {
ColorChooserDialog dialog = build();
dialog.show(mContext);
return dialog;
}
}
private Builder getBuilder() {
if (getArguments() == null || !getArguments().containsKey("builder")) return null;
return (Builder) getArguments().getSerializable("builder");
}
private void dismissIfNecessary(AppCompatActivity context, String tag) {
Fragment frag = context.getSupportFragmentManager().findFragmentByTag(tag);
if (frag != null) {
((DialogFragment) frag).dismiss();
context.getSupportFragmentManager().beginTransaction()
.remove(frag).commit();
}
}
@Nullable
public static ColorChooserDialog findVisible(@NonNull AppCompatActivity context, @ColorChooserTag String tag) {
Fragment frag = context.getSupportFragmentManager().findFragmentByTag(tag);
if (frag != null && frag instanceof ColorChooserDialog)
return (ColorChooserDialog) frag;
return null;
}
@NonNull
public ColorChooserDialog show(AppCompatActivity context) {
String tag;
Builder builder = getBuilder();
if (builder.mColorsTop != null)
tag = TAG_CUSTOM;
else if (builder.mAccentMode)
tag = TAG_ACCENT;
else tag = TAG_PRIMARY;
dismissIfNecessary(context, tag);
show(context.getSupportFragmentManager(), tag);
return this;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.engine.planner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.tajo.LocalTajoTestingUtility;
import org.apache.tajo.TajoConstants;
import org.apache.tajo.TajoTestingCluster;
import org.apache.tajo.algebra.Expr;
import org.apache.tajo.algebra.JoinType;
import org.apache.tajo.benchmark.TPCH;
import org.apache.tajo.catalog.*;
import org.apache.tajo.catalog.proto.CatalogProtos.FunctionType;
import org.apache.tajo.catalog.proto.CatalogProtos.StoreType;
import org.apache.tajo.common.TajoDataTypes.Type;
import org.apache.tajo.datum.TextDatum;
import org.apache.tajo.engine.eval.*;
import org.apache.tajo.engine.function.builtin.SumInt;
import org.apache.tajo.engine.json.CoreGsonHelper;
import org.apache.tajo.engine.parser.SQLAnalyzer;
import org.apache.tajo.engine.planner.logical.*;
import org.apache.tajo.master.TajoMaster;
import org.apache.tajo.master.session.Session;
import org.apache.tajo.util.CommonTestingUtil;
import org.apache.tajo.util.FileUtil;
import org.apache.tajo.util.KeyValueSet;
import org.apache.tajo.util.TUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static org.apache.tajo.TajoConstants.DEFAULT_DATABASE_NAME;
import static org.apache.tajo.TajoConstants.DEFAULT_TABLESPACE_NAME;
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
public class TestLogicalPlanner {
private static TajoTestingCluster util;
private static CatalogService catalog;
private static SQLAnalyzer sqlAnalyzer;
private static LogicalPlanner planner;
private static TPCH tpch;
private static Session session = LocalTajoTestingUtility.createDummySession();
@BeforeClass
public static void setUp() throws Exception {
util = new TajoTestingCluster();
util.startCatalogCluster();
catalog = util.getMiniCatalogCluster().getCatalog();
catalog.createTablespace(DEFAULT_TABLESPACE_NAME, "hdfs://localhost:1234");
catalog.createDatabase(DEFAULT_DATABASE_NAME, DEFAULT_TABLESPACE_NAME);
for (FunctionDesc funcDesc : TajoMaster.initBuiltinFunctions()) {
catalog.createFunction(funcDesc);
}
Schema schema = new Schema();
schema.addColumn("name", Type.TEXT);
schema.addColumn("empid", Type.INT4);
schema.addColumn("deptname", Type.TEXT);
Schema schema2 = new Schema();
schema2.addColumn("deptname", Type.TEXT);
schema2.addColumn("manager", Type.TEXT);
Schema schema3 = new Schema();
schema3.addColumn("deptname", Type.TEXT);
schema3.addColumn("score", Type.INT4);
TableMeta meta = CatalogUtil.newTableMeta(StoreType.CSV);
TableDesc people = new TableDesc(
CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "employee"), schema, meta,
CommonTestingUtil.getTestDir());
catalog.createTable(people);
TableDesc student = new TableDesc(
CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "dept"), schema2, StoreType.CSV, new KeyValueSet(),
CommonTestingUtil.getTestDir());
catalog.createTable(student);
TableDesc score = new TableDesc(
CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "score"), schema3, StoreType.CSV, new KeyValueSet(),
CommonTestingUtil.getTestDir());
catalog.createTable(score);
FunctionDesc funcDesc = new FunctionDesc("sumtest", SumInt.class, FunctionType.AGGREGATION,
CatalogUtil.newSimpleDataType(Type.INT4),
CatalogUtil.newSimpleDataTypeArray(Type.INT4));
// TPC-H Schema for Complex Queries
String [] tpchTables = {
"part", "supplier", "partsupp", "nation", "region", "lineitem"
};
tpch = new TPCH();
tpch.loadSchemas();
tpch.loadOutSchema();
for (String table : tpchTables) {
TableMeta m = CatalogUtil.newTableMeta(StoreType.CSV);
TableDesc d = CatalogUtil.newTableDesc(
CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, table), tpch.getSchema(table), m,
CommonTestingUtil.getTestDir());
catalog.createTable(d);
}
catalog.createFunction(funcDesc);
sqlAnalyzer = new SQLAnalyzer();
planner = new LogicalPlanner(catalog);
}
@AfterClass
public static void tearDown() throws Exception {
util.shutdownCatalogCluster();
}
static String[] QUERIES = {
"select name, empid, deptname from employee where empId > 500", // 0
"select name, empid, e.deptname, manager from employee as e, dept as dp", // 1
"select name, empid, e.deptname, manager, score from employee as e, dept, score", // 2
"select p.deptname, sumtest(score) from dept as p, score group by p.deptName having sumtest(score) > 30", // 3
"select p.deptname, score from dept as p, score order by score asc", // 4
"select name from employee where empId = 100", // 5
"select name, score from employee, score", // 6
"select p.deptName, sumtest(score) from dept as p, score group by p.deptName", // 7
"create table store1 as select p.deptName, sumtest(score) from dept as p, score group by p.deptName", // 8
"select deptName, sumtest(score) from score group by deptName having sumtest(score) > 30", // 9
"select 7 + 8 as res1, 8 * 9 as res2, 10 * 10 as res3", // 10
"create index idx_employee on employee using bitmap (name null first, empId desc) with ('fillfactor' = 70)", // 11
"select name, score from employee, score order by score limit 3", // 12
"select length(name), length(deptname), *, empid+10 from employee where empId > 500", // 13
};
@Test
public final void testSingleRelation() throws CloneNotSupportedException, PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[0]);
LogicalPlan planNode = planner.createPlan(session, expr);
LogicalNode plan = planNode.getRootBlock().getRoot();
assertEquals(NodeType.ROOT, plan.getType());
TestLogicalNode.testCloneLogicalNode(plan);
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(NodeType.SELECTION, projNode.getChild().getType());
SelectionNode selNode = projNode.getChild();
assertEquals(NodeType.SCAN, selNode.getChild().getType());
ScanNode scanNode = selNode.getChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), scanNode.getTableName());
}
public static void assertSchema(Schema expected, Schema schema) {
Column expectedColumn;
Column column;
for (int i = 0; i < expected.size(); i++) {
expectedColumn = expected.getColumn(i);
column = schema.getColumn(expectedColumn.getSimpleName());
assertEquals(expectedColumn.getSimpleName(), column.getSimpleName());
assertEquals(expectedColumn.getDataType(), column.getDataType());
}
}
@Test
public final void testImplicityJoinPlan() throws CloneNotSupportedException, PlanningException {
// two relations
Expr expr = sqlAnalyzer.parse(QUERIES[1]);
LogicalPlan planNode = planner.createPlan(session, expr);
LogicalNode plan = planNode.getRootBlock().getRoot();
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
TestLogicalNode.testCloneLogicalNode(root);
Schema expectedSchema = new Schema();
expectedSchema.addColumn("name", Type.TEXT);
expectedSchema.addColumn("empid", Type.INT4);
expectedSchema.addColumn("deptname", Type.TEXT);
expectedSchema.addColumn("manager", Type.TEXT);
for (int i = 0; i < expectedSchema.size(); i++) {
Column found = root.getOutSchema().getColumn(expectedSchema.getColumn(i).getSimpleName());
assertEquals(expectedSchema.getColumn(i).getDataType(), found.getDataType());
}
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(NodeType.JOIN, projNode.getChild().getType());
JoinNode joinNode = projNode.getChild();
assertEquals(NodeType.SCAN, joinNode.getLeftChild().getType());
ScanNode leftNode = joinNode.getLeftChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), leftNode.getTableName());
assertEquals(NodeType.SCAN, joinNode.getRightChild().getType());
ScanNode rightNode = joinNode.getRightChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "dept"), rightNode.getTableName());
// three relations
expr = sqlAnalyzer.parse(QUERIES[2]);
plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
TestLogicalNode.testCloneLogicalNode(plan);
expectedSchema.addColumn("score", Type.INT4);
assertSchema(expectedSchema, plan.getOutSchema());
assertEquals(NodeType.ROOT, plan.getType());
root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
projNode = root.getChild();
assertEquals(NodeType.JOIN, projNode.getChild().getType());
joinNode = projNode.getChild();
assertEquals(NodeType.JOIN, joinNode.getLeftChild().getType());
assertEquals(NodeType.SCAN, joinNode.getRightChild().getType());
ScanNode scan1 = joinNode.getRightChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "score"), scan1.getTableName());
JoinNode leftNode2 = joinNode.getLeftChild();
assertEquals(NodeType.JOIN, leftNode2.getType());
assertEquals(NodeType.SCAN, leftNode2.getLeftChild().getType());
ScanNode leftScan = leftNode2.getLeftChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), leftScan.getTableName());
assertEquals(NodeType.SCAN, leftNode2.getRightChild().getType());
ScanNode rightScan = leftNode2.getRightChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "dept"), rightScan.getTableName());
}
String [] JOINS = {
"select name, dept.deptName, score from employee natural join dept natural join score", // 0
"select name, dept.deptName, score from employee inner join dept on employee.deptName = dept.deptName inner join score on dept.deptName = score.deptName", // 1
"select name, dept.deptName, score from employee left outer join dept on employee.deptName = dept.deptName right outer join score on dept.deptName = score.deptName" // 2
};
static Schema expectedJoinSchema;
static {
expectedJoinSchema = new Schema();
expectedJoinSchema.addColumn("name", Type.TEXT);
expectedJoinSchema.addColumn("deptname", Type.TEXT);
expectedJoinSchema.addColumn("score", Type.INT4);
}
@Test
public final void testNaturalJoinPlan() throws PlanningException {
// two relations
Expr context = sqlAnalyzer.parse(JOINS[0]);
LogicalNode plan = planner.createPlan(session, context).getRootBlock().getRoot();
testJsonSerDerObject(plan);
assertSchema(expectedJoinSchema, plan.getOutSchema());
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode proj = root.getChild();
assertEquals(NodeType.JOIN, proj.getChild().getType());
JoinNode join = proj.getChild();
assertEquals(JoinType.INNER, join.getJoinType());
assertEquals(NodeType.SCAN, join.getRightChild().getType());
assertTrue(join.hasJoinQual());
ScanNode scan = join.getRightChild();
assertEquals("default.score", scan.getTableName());
assertEquals(NodeType.JOIN, join.getLeftChild().getType());
join = join.getLeftChild();
assertEquals(JoinType.INNER, join.getJoinType());
assertEquals(NodeType.SCAN, join.getLeftChild().getType());
ScanNode outer = join.getLeftChild();
assertEquals("default.employee", outer.getTableName());
assertEquals(NodeType.SCAN, join.getRightChild().getType());
ScanNode inner = join.getRightChild();
assertEquals("default.dept", inner.getTableName());
}
@Test
public final void testInnerJoinPlan() throws PlanningException {
// two relations
Expr expr = sqlAnalyzer.parse(JOINS[1]);
LogicalPlan plan = planner.createPlan(session, expr);
LogicalNode root = plan.getRootBlock().getRoot();
testJsonSerDerObject(root);
assertSchema(expectedJoinSchema, root.getOutSchema());
assertEquals(NodeType.ROOT, root.getType());
assertEquals(NodeType.PROJECTION, ((LogicalRootNode)root).getChild().getType());
ProjectionNode proj = ((LogicalRootNode)root).getChild();
assertEquals(NodeType.JOIN, proj.getChild().getType());
JoinNode join = proj.getChild();
assertEquals(JoinType.INNER, join.getJoinType());
assertEquals(NodeType.SCAN, join.getRightChild().getType());
ScanNode scan = join.getRightChild();
assertEquals("default.score", scan.getTableName());
assertEquals(NodeType.JOIN, join.getLeftChild().getType());
join = join.getLeftChild();
assertEquals(JoinType.INNER, join.getJoinType());
assertEquals(NodeType.SCAN, join.getLeftChild().getType());
ScanNode outer = join.getLeftChild();
assertEquals("default.employee", outer.getTableName());
assertEquals(NodeType.SCAN, join.getRightChild().getType());
ScanNode inner = join.getRightChild();
assertEquals("default.dept", inner.getTableName());
assertTrue(join.hasJoinQual());
assertEquals(EvalType.EQUAL, join.getJoinQual().getType());
}
@Test
public final void testOuterJoinPlan() throws PlanningException {
// two relations
Expr expr = sqlAnalyzer.parse(JOINS[2]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
assertSchema(expectedJoinSchema, plan.getOutSchema());
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode proj = root.getChild();
assertEquals(NodeType.JOIN, proj.getChild().getType());
JoinNode join = proj.getChild();
assertEquals(JoinType.RIGHT_OUTER, join.getJoinType());
assertEquals(NodeType.SCAN, join.getRightChild().getType());
ScanNode scan = join.getRightChild();
assertEquals("default.score", scan.getTableName());
assertEquals(NodeType.JOIN, join.getLeftChild().getType());
join = join.getLeftChild();
assertEquals(JoinType.LEFT_OUTER, join.getJoinType());
assertEquals(NodeType.SCAN, join.getLeftChild().getType());
ScanNode outer = join.getLeftChild();
assertEquals("default.employee", outer.getTableName());
assertEquals(NodeType.SCAN, join.getRightChild().getType());
ScanNode inner = join.getRightChild();
assertEquals("default.dept", inner.getTableName());
assertTrue(join.hasJoinQual());
assertEquals(EvalType.EQUAL, join.getJoinQual().getType());
}
@Test
public final void testGroupby() throws CloneNotSupportedException, PlanningException {
// without 'having clause'
Expr context = sqlAnalyzer.parse(QUERIES[7]);
LogicalNode plan = planner.createPlan(session, context).getRootBlock().getRoot();
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
testQuery7(root.getChild());
// with having clause
context = sqlAnalyzer.parse(QUERIES[3]);
plan = planner.createPlan(session, context).getRootBlock().getRoot();
TestLogicalNode.testCloneLogicalNode(plan);
assertEquals(NodeType.ROOT, plan.getType());
root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(NodeType.HAVING, projNode.getChild().getType());
HavingNode havingNode = projNode.getChild();
assertEquals(NodeType.GROUP_BY, havingNode.getChild().getType());
GroupbyNode groupByNode = havingNode.getChild();
assertEquals(NodeType.JOIN, groupByNode.getChild().getType());
JoinNode joinNode = groupByNode.getChild();
assertEquals(NodeType.SCAN, joinNode.getLeftChild().getType());
ScanNode leftNode = joinNode.getLeftChild();
assertEquals("default.dept", leftNode.getTableName());
assertEquals(NodeType.SCAN, joinNode.getRightChild().getType());
ScanNode rightNode = joinNode.getRightChild();
assertEquals("default.score", rightNode.getTableName());
//LogicalOptimizer.optimize(context, plan);
}
@Test
public final void testMultipleJoin() throws IOException, PlanningException {
Expr expr = sqlAnalyzer.parse(
FileUtil.readTextFile(new File("src/test/resources/queries/TestJoinQuery/testTPCHQ2Join.sql")));
LogicalNode plan = planner.createPlan(LocalTajoTestingUtility.createDummySession(), expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
Schema expected = tpch.getOutSchema("q2");
assertSchema(expected, plan.getOutSchema());
}
private final void findJoinQual(EvalNode evalNode, Map<BinaryEval, Boolean> qualMap,
EvalType leftType, EvalType rightType)
throws IOException, PlanningException {
Preconditions.checkArgument(evalNode instanceof BinaryEval);
BinaryEval qual = (BinaryEval)evalNode;
if (qual.getLeftExpr().getType() == leftType && qual.getRightExpr().getType() == rightType) {
assertEquals(qual.getLeftExpr().getType(), EvalType.FIELD);
FieldEval leftField = (FieldEval)qual.getLeftExpr();
for (Map.Entry<BinaryEval, Boolean> entry : qualMap.entrySet()) {
FieldEval leftJoinField = (FieldEval)entry.getKey().getLeftExpr();
if (qual.getRightExpr().getType() == entry.getKey().getRightExpr().getType()) {
if (rightType == EvalType.FIELD) {
FieldEval rightField = (FieldEval)qual.getRightExpr();
FieldEval rightJoinField = (FieldEval)entry.getKey().getRightExpr();
if (leftJoinField.getColumnRef().getQualifiedName().equals(leftField.getColumnRef().getQualifiedName())
&& rightField.getColumnRef().getQualifiedName().equals(rightJoinField.getColumnRef().getQualifiedName())) {
qualMap.put(entry.getKey(), Boolean.TRUE);
}
} else if (rightType == EvalType.CONST) {
ConstEval rightField = (ConstEval)qual.getRightExpr();
ConstEval rightJoinField = (ConstEval)entry.getKey().getRightExpr();
if (leftJoinField.getColumnRef().getQualifiedName().equals(leftField.getColumnRef().getQualifiedName()) &&
rightField.getValue().equals(rightJoinField.getValue())) {
qualMap.put(entry.getKey(), Boolean.TRUE);
}
} else if (rightType == EvalType.ROW_CONSTANT) {
RowConstantEval rightField = (RowConstantEval)qual.getRightExpr();
RowConstantEval rightJoinField = (RowConstantEval)entry.getKey().getRightExpr();
if (leftJoinField.getColumnRef().getQualifiedName().equals(leftField.getColumnRef().getQualifiedName())) {
assertEquals(rightField.getValues().length, rightJoinField.getValues().length);
for (int i = 0; i < rightField.getValues().length; i++) {
assertEquals(rightField.getValues()[i], rightJoinField.getValues()[i]);
}
qualMap.put(entry.getKey(), Boolean.TRUE);
}
}
}
}
}
}
@Test
public final void testJoinWithMultipleJoinQual1() throws IOException, PlanningException {
Expr expr = sqlAnalyzer.parse(
FileUtil.readTextFile(new File
("src/test/resources/queries/TestJoinQuery/testJoinWithMultipleJoinQual1.sql")));
LogicalPlan plan = planner.createPlan(LocalTajoTestingUtility.createDummySession(),expr);
LogicalNode node = plan.getRootBlock().getRoot();
testJsonSerDerObject(node);
Schema expected = tpch.getOutSchema("q2");
assertSchema(expected, node.getOutSchema());
LogicalOptimizer optimizer = new LogicalOptimizer(util.getConfiguration());
optimizer.optimize(plan);
LogicalNode[] nodes = PlannerUtil.findAllNodes(node, NodeType.JOIN);
Map<BinaryEval, Boolean> qualMap = TUtil.newHashMap();
BinaryEval joinQual = new BinaryEval(EvalType.EQUAL
, new FieldEval(new Column("default.n.n_regionkey", Type.INT4))
, new FieldEval(new Column("default.ps.ps_suppkey", Type.INT4))
);
qualMap.put(joinQual, Boolean.FALSE);
for(LogicalNode eachNode : nodes) {
JoinNode joinNode = (JoinNode)eachNode;
EvalNode[] evalNodes = AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual());
for(EvalNode evalNode : evalNodes) {
findJoinQual(evalNode, qualMap, EvalType.FIELD, EvalType.FIELD);
}
}
for (Map.Entry<BinaryEval, Boolean> entry : qualMap.entrySet()) {
if (!entry.getValue().booleanValue()) {
Preconditions.checkArgument(false,
"JoinQual not found. -> required JoinQual:" + entry.getKey().toJson());
}
}
}
@Test
public final void testJoinWithMultipleJoinQual2() throws IOException, PlanningException {
Expr expr = sqlAnalyzer.parse(
FileUtil.readTextFile(new File
("src/test/resources/queries/TestJoinQuery/testJoinWithMultipleJoinQual2.sql")));
LogicalPlan plan = planner.createPlan(LocalTajoTestingUtility.createDummySession(),expr);
LogicalNode node = plan.getRootBlock().getRoot();
testJsonSerDerObject(node);
LogicalOptimizer optimizer = new LogicalOptimizer(util.getConfiguration());
optimizer.optimize(plan);
LogicalNode[] nodes = PlannerUtil.findAllNodes(node, NodeType.SCAN);
Map<BinaryEval, Boolean> qualMap = TUtil.newHashMap();
BinaryEval joinQual = new BinaryEval(EvalType.EQUAL
, new FieldEval(new Column("default.n.n_name", Type.TEXT))
, new ConstEval(new TextDatum("MOROCCO"))
);
qualMap.put(joinQual, Boolean.FALSE);
for(LogicalNode eachNode : nodes) {
ScanNode scanNode = (ScanNode)eachNode;
if (scanNode.hasQual()) {
EvalNode[] evalNodes = AlgebraicUtil.toConjunctiveNormalFormArray(scanNode.getQual());
for(EvalNode evalNode : evalNodes) {
findJoinQual(evalNode, qualMap, EvalType.FIELD, EvalType.CONST);
}
}
}
for (Map.Entry<BinaryEval, Boolean> entry : qualMap.entrySet()) {
if (!entry.getValue().booleanValue()) {
Preconditions.checkArgument(false,
"SelectionQual not found. -> required JoinQual:" + entry.getKey().toJson());
}
}
}
@Test
public final void testJoinWithMultipleJoinQual3() throws IOException, PlanningException {
Expr expr = sqlAnalyzer.parse(
FileUtil.readTextFile(new File
("src/test/resources/queries/TestJoinQuery/testJoinWithMultipleJoinQual3.sql")));
LogicalPlan plan = planner.createPlan(LocalTajoTestingUtility.createDummySession(),expr);
LogicalNode node = plan.getRootBlock().getRoot();
testJsonSerDerObject(node);
LogicalOptimizer optimizer = new LogicalOptimizer(util.getConfiguration());
optimizer.optimize(plan);
LogicalNode[] nodes = PlannerUtil.findAllNodes(node, NodeType.SCAN);
Map<BinaryEval, Boolean> qualMap = TUtil.newHashMap();
TextDatum[] datums = new TextDatum[3];
datums[0] = new TextDatum("ARGENTINA");
datums[1] = new TextDatum("ETHIOPIA");
datums[2] = new TextDatum("MOROCCO");
BinaryEval joinQual = new BinaryEval(EvalType.EQUAL
, new FieldEval(new Column("default.n.n_name", Type.TEXT))
, new RowConstantEval(datums)
);
qualMap.put(joinQual, Boolean.FALSE);
for(LogicalNode eachNode : nodes) {
ScanNode scanNode = (ScanNode)eachNode;
if (scanNode.hasQual()) {
EvalNode[] evalNodes = AlgebraicUtil.toConjunctiveNormalFormArray(scanNode.getQual());
for(EvalNode evalNode : evalNodes) {
findJoinQual(evalNode, qualMap, EvalType.FIELD, EvalType.ROW_CONSTANT);
}
}
}
for (Map.Entry<BinaryEval, Boolean> entry : qualMap.entrySet()) {
if (!entry.getValue().booleanValue()) {
Preconditions.checkArgument(false,
"ScanQual not found. -> required JoinQual:" + entry.getKey().toJson());
}
}
}
@Test
public final void testJoinWithMultipleJoinQual4() throws IOException, PlanningException {
Expr expr = sqlAnalyzer.parse(
FileUtil.readTextFile(new File
("src/test/resources/queries/TestJoinQuery/testJoinWithMultipleJoinQual4.sql")));
LogicalPlan plan = planner.createPlan(LocalTajoTestingUtility.createDummySession(),expr);
LogicalNode node = plan.getRootBlock().getRoot();
testJsonSerDerObject(node);
LogicalOptimizer optimizer = new LogicalOptimizer(util.getConfiguration());
optimizer.optimize(plan);
Map<BinaryEval, Boolean> scanMap = TUtil.newHashMap();
TextDatum[] datums = new TextDatum[3];
datums[0] = new TextDatum("ARGENTINA");
datums[1] = new TextDatum("ETHIOPIA");
datums[2] = new TextDatum("MOROCCO");
BinaryEval scanQual = new BinaryEval(EvalType.EQUAL
, new FieldEval(new Column("default.n.n_name", Type.TEXT))
, new RowConstantEval(datums)
);
scanMap.put(scanQual, Boolean.FALSE);
Map<BinaryEval, Boolean> joinQualMap = TUtil.newHashMap();
BinaryEval joinQual = new BinaryEval(EvalType.GTH
, new FieldEval(new Column("default.t.n_nationkey", Type.INT4))
, new FieldEval(new Column("default.s.s_suppkey", Type.INT4))
);
joinQualMap.put(joinQual, Boolean.FALSE);
LogicalNode[] nodes = PlannerUtil.findAllNodes(node, NodeType.JOIN);
for(LogicalNode eachNode : nodes) {
JoinNode joinNode = (JoinNode)eachNode;
if (joinNode.hasJoinQual()) {
EvalNode[] evalNodes = AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual());
for(EvalNode evalNode : evalNodes) {
findJoinQual(evalNode, joinQualMap, EvalType.FIELD, EvalType.FIELD);
}
}
}
nodes = PlannerUtil.findAllNodes(node, NodeType.SCAN);
for(LogicalNode eachNode : nodes) {
ScanNode scanNode = (ScanNode)eachNode;
if (scanNode.hasQual()) {
EvalNode[] evalNodes = AlgebraicUtil.toConjunctiveNormalFormArray(scanNode.getQual());
for(EvalNode evalNode : evalNodes) {
findJoinQual(evalNode, scanMap, EvalType.FIELD, EvalType.ROW_CONSTANT);
}
}
}
for (Map.Entry<BinaryEval, Boolean> entry : joinQualMap.entrySet()) {
if (!entry.getValue().booleanValue()) {
Preconditions.checkArgument(false,
"JoinQual not found. -> required JoinQual:" + entry.getKey().toJson());
}
}
for (Map.Entry<BinaryEval, Boolean> entry : scanMap.entrySet()) {
if (!entry.getValue().booleanValue()) {
Preconditions.checkArgument(false,
"ScanQual not found. -> required JoinQual:" + entry.getKey().toJson());
}
}
}
static void testQuery7(LogicalNode plan) {
assertEquals(NodeType.PROJECTION, plan.getType());
ProjectionNode projNode = (ProjectionNode) plan;
assertEquals(NodeType.GROUP_BY, projNode.getChild().getType());
GroupbyNode groupByNode = projNode.getChild();
assertEquals(NodeType.JOIN, groupByNode.getChild().getType());
JoinNode joinNode = groupByNode.getChild();
assertEquals(NodeType.SCAN, joinNode.getLeftChild().getType());
ScanNode leftNode = joinNode.getLeftChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "dept"), leftNode.getTableName());
assertEquals(NodeType.SCAN, joinNode.getRightChild().getType());
ScanNode rightNode = joinNode.getRightChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "score"), rightNode.getTableName());
}
@Test
public final void testStoreTable() throws CloneNotSupportedException, PlanningException {
Expr context = sqlAnalyzer.parse(QUERIES[8]);
LogicalNode plan = planner.createPlan(session, context).getRootBlock().getRoot();
TestLogicalNode.testCloneLogicalNode(plan);
testJsonSerDerObject(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.CREATE_TABLE, root.getChild().getType());
StoreTableNode storeNode = root.getChild();
testQuery7(storeNode.getChild());
}
@Test
public final void testOrderBy() throws CloneNotSupportedException, PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[4]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
TestLogicalNode.testCloneLogicalNode(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(NodeType.SORT, projNode.getChild().getType());
SortNode sortNode = projNode.getChild();
assertEquals(NodeType.JOIN, sortNode.getChild().getType());
JoinNode joinNode = sortNode.getChild();
assertEquals(NodeType.SCAN, joinNode.getLeftChild().getType());
ScanNode leftNode = joinNode.getLeftChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "dept"), leftNode.getTableName());
assertEquals(NodeType.SCAN, joinNode.getRightChild().getType());
ScanNode rightNode = joinNode.getRightChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "score"), rightNode.getTableName());
}
@Test
public final void testLimit() throws CloneNotSupportedException, PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[12]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
TestLogicalNode.testCloneLogicalNode(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(NodeType.LIMIT, projNode.getChild().getType());
LimitNode limitNode = projNode.getChild();
assertEquals(NodeType.SORT, limitNode.getChild().getType());
}
@Test
public final void testSPJPush() throws CloneNotSupportedException, PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[5]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
TestLogicalNode.testCloneLogicalNode(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(NodeType.SELECTION, projNode.getChild().getType());
SelectionNode selNode = projNode.getChild();
assertEquals(NodeType.SCAN, selNode.getChild().getType());
ScanNode scanNode = selNode.getChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), scanNode.getTableName());
}
@Test
public final void testSPJ() throws CloneNotSupportedException, PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[6]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
TestLogicalNode.testCloneLogicalNode(plan);
}
@Test
public final void testJson() throws PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[9]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
String json = plan.toJson();
LogicalNode fromJson = CoreGsonHelper.fromJson(json, LogicalNode.class);
assertEquals(NodeType.ROOT, fromJson.getType());
LogicalNode project = ((LogicalRootNode)fromJson).getChild();
assertEquals(NodeType.PROJECTION, project.getType());
assertEquals(NodeType.HAVING, ((ProjectionNode) project).getChild().getType());
HavingNode havingNode = ((ProjectionNode) project).getChild();
assertEquals(NodeType.GROUP_BY, havingNode.getChild().getType());
GroupbyNode groupbyNode = havingNode.getChild();
assertEquals(NodeType.SCAN, groupbyNode.getChild().getType());
LogicalNode scan = groupbyNode.getChild();
assertEquals(NodeType.SCAN, scan.getType());
}
@Test
public final void testVisitor() throws PlanningException {
// two relations
Expr expr = sqlAnalyzer.parse(QUERIES[1]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
TestVisitor vis = new TestVisitor();
plan.postOrder(vis);
assertEquals(NodeType.ROOT, vis.stack.pop().getType());
assertEquals(NodeType.PROJECTION, vis.stack.pop().getType());
assertEquals(NodeType.JOIN, vis.stack.pop().getType());
assertEquals(NodeType.SCAN, vis.stack.pop().getType());
assertEquals(NodeType.SCAN, vis.stack.pop().getType());
}
private static class TestVisitor implements LogicalNodeVisitor {
Stack<LogicalNode> stack = new Stack<LogicalNode>();
@Override
public void visit(LogicalNode node) {
stack.push(node);
}
}
@Test
public final void testExprNode() throws PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[10]);
LogicalPlan rootNode = planner.createPlan(session, expr);
LogicalNode plan = rootNode.getRootBlock().getRoot();
testJsonSerDerObject(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.EXPRS, root.getChild().getType());
Schema out = root.getOutSchema();
Iterator<Column> it = out.getColumns().iterator();
Column col = it.next();
assertEquals("res1", col.getSimpleName());
col = it.next();
assertEquals("res2", col.getSimpleName());
col = it.next();
assertEquals("res3", col.getSimpleName());
}
@Test
public final void testAsterisk() throws CloneNotSupportedException, PlanningException {
Expr expr = sqlAnalyzer.parse(QUERIES[13]);
LogicalPlan planNode = planner.createPlan(session, expr);
LogicalNode plan = planNode.getRootBlock().getRoot();
assertEquals(NodeType.ROOT, plan.getType());
TestLogicalNode.testCloneLogicalNode(plan);
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projNode = root.getChild();
assertEquals(6, projNode.getOutSchema().size());
assertEquals(NodeType.SELECTION, projNode.getChild().getType());
SelectionNode selNode = projNode.getChild();
assertEquals(NodeType.SCAN, selNode.getChild().getType());
ScanNode scanNode = selNode.getChild();
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), scanNode.getTableName());
}
static final String ALIAS [] = {
"select deptName, sum(score) as total from score group by deptName",
"select em.empId as id, sum(score) as total from employee as em inner join score using (em.deptName) group by id"
};
@Test
public final void testAlias1() throws PlanningException {
Expr expr = sqlAnalyzer.parse(ALIAS[0]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
Schema finalSchema = root.getOutSchema();
Iterator<Column> it = finalSchema.getColumns().iterator();
Column col = it.next();
assertEquals("deptname", col.getSimpleName());
col = it.next();
assertEquals("total", col.getSimpleName());
expr = sqlAnalyzer.parse(ALIAS[1]);
plan = planner.createPlan(session, expr).getRootBlock().getRoot();
root = (LogicalRootNode) plan;
finalSchema = root.getOutSchema();
it = finalSchema.getColumns().iterator();
col = it.next();
assertEquals("id", col.getSimpleName());
col = it.next();
assertEquals("total", col.getSimpleName());
}
@Test
public final void testAlias2() throws PlanningException {
Expr expr = sqlAnalyzer.parse(ALIAS[1]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
Schema finalSchema = root.getOutSchema();
Iterator<Column> it = finalSchema.getColumns().iterator();
Column col = it.next();
assertEquals("id", col.getSimpleName());
col = it.next();
assertEquals("total", col.getSimpleName());
}
static final String CREATE_TABLE [] = {
"create external table table1 (name text, age int, earn bigint, score real) using csv with ('csv.delimiter'='|') location '/tmp/data'"
};
@Test
public final void testCreateTableDef() throws PlanningException {
Expr expr = sqlAnalyzer.parse(CREATE_TABLE[0]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
LogicalRootNode root = (LogicalRootNode) plan;
testJsonSerDerObject(root);
assertEquals(NodeType.CREATE_TABLE, root.getChild().getType());
CreateTableNode createTable = root.getChild();
Schema def = createTable.getTableSchema();
assertEquals("name", def.getColumn(0).getSimpleName());
assertEquals(Type.TEXT, def.getColumn(0).getDataType().getType());
assertEquals("age", def.getColumn(1).getSimpleName());
assertEquals(Type.INT4, def.getColumn(1).getDataType().getType());
assertEquals("earn", def.getColumn(2).getSimpleName());
assertEquals(Type.INT8, def.getColumn(2).getDataType().getType());
assertEquals("score", def.getColumn(3).getSimpleName());
assertEquals(Type.FLOAT4, def.getColumn(3).getDataType().getType());
assertEquals(StoreType.CSV, createTable.getStorageType());
assertEquals("/tmp/data", createTable.getPath().toString());
assertTrue(createTable.hasOptions());
assertEquals("|", createTable.getOptions().get("csv.delimiter"));
}
private static final List<Set<Column>> testGenerateCuboidsResult
= Lists.newArrayList();
private static final int numCubeColumns = 3;
private static final Column [] testGenerateCuboids = new Column[numCubeColumns];
private static final List<Set<Column>> testCubeByResult
= Lists.newArrayList();
private static final Column [] testCubeByCuboids = new Column[2];
static {
testGenerateCuboids[0] = new Column("col1", Type.INT4);
testGenerateCuboids[1] = new Column("col2", Type.INT8);
testGenerateCuboids[2] = new Column("col3", Type.FLOAT4);
testGenerateCuboidsResult.add(new HashSet<Column>());
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[0]));
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[1]));
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[2]));
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[0],
testGenerateCuboids[1]));
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[0],
testGenerateCuboids[2]));
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[1],
testGenerateCuboids[2]));
testGenerateCuboidsResult.add(Sets.newHashSet(testGenerateCuboids[0],
testGenerateCuboids[1], testGenerateCuboids[2]));
testCubeByCuboids[0] = new Column("employee.name", Type.TEXT);
testCubeByCuboids[1] = new Column("employee.empid", Type.INT4);
testCubeByResult.add(new HashSet<Column>());
testCubeByResult.add(Sets.newHashSet(testCubeByCuboids[0]));
testCubeByResult.add(Sets.newHashSet(testCubeByCuboids[1]));
testCubeByResult.add(Sets.newHashSet(testCubeByCuboids[0],
testCubeByCuboids[1]));
}
@Test
public final void testGenerateCuboids() {
Column [] columns = new Column[3];
columns[0] = new Column("col1", Type.INT4);
columns[1] = new Column("col2", Type.INT8);
columns[2] = new Column("col3", Type.FLOAT4);
List<Column[]> cube = LogicalPlanner.generateCuboids(columns);
assertEquals(((int)Math.pow(2, numCubeColumns)), cube.size());
Set<Set<Column>> cuboids = Sets.newHashSet();
for (Column [] cols : cube) {
cuboids.add(Sets.newHashSet(cols));
}
for (Set<Column> result : testGenerateCuboidsResult) {
assertTrue(cuboids.contains(result));
}
}
static final String setStatements [] = {
"select deptName from employee where deptName like 'data%' union select deptName from score where deptName like 'data%'",
};
@Test
public final void testSetPlan() throws PlanningException {
Expr expr = sqlAnalyzer.parse(setStatements[0]);
LogicalNode plan = planner.createPlan(session, expr).getRootBlock().getRoot();
testJsonSerDerObject(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.UNION, root.getChild().getType());
UnionNode union = root.getChild();
assertEquals(NodeType.PROJECTION, union.getLeftChild().getType());
assertEquals(NodeType.PROJECTION, union.getRightChild().getType());
}
static final String [] setQualifiers = {
"select name, empid from employee",
"select distinct name, empid from employee",
"select all name, empid from employee",
};
@Test
public void testSetQualifier() throws PlanningException {
Expr context = sqlAnalyzer.parse(setQualifiers[0]);
LogicalNode plan = planner.createPlan(session, context).getRootBlock().getRoot();
testJsonSerDerObject(plan);
assertEquals(NodeType.ROOT, plan.getType());
LogicalRootNode root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
ProjectionNode projectionNode = root.getChild();
assertEquals(NodeType.SCAN, projectionNode.getChild().getType());
context = sqlAnalyzer.parse(setQualifiers[1]);
plan = planner.createPlan(session, context).getRootBlock().getRoot();
testJsonSerDerObject(plan);
assertEquals(NodeType.ROOT, plan.getType());
root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
projectionNode = root.getChild();
assertEquals(NodeType.GROUP_BY, projectionNode.getChild().getType());
context = sqlAnalyzer.parse(setQualifiers[2]);
plan = planner.createPlan(session, context).getRootBlock().getRoot();
testJsonSerDerObject(plan);
root = (LogicalRootNode) plan;
assertEquals(NodeType.PROJECTION, root.getChild().getType());
projectionNode = root.getChild();
assertEquals(NodeType.SCAN, projectionNode.getChild().getType());
}
public void testJsonSerDerObject(LogicalNode rootNode) {
String json = rootNode.toJson();
LogicalNode fromJson = CoreGsonHelper.fromJson(json, LogicalNode.class);
assertTrue("JSON (de) serialization equivalence check", rootNode.deepEquals(fromJson));
}
// Table descriptions
//
// employee (name text, empid int4, deptname text)
// dept (deptname text, nameger text)
// score (deptname text, score inet4)
static final String [] insertStatements = {
"insert into score select name from employee", // 0
"insert into score select name, empid from employee", // 1
"insert into employee (name, deptname) select * from dept", // 2
"insert into location '/tmp/data' select name, empid from employee", // 3
"insert overwrite into employee (name, deptname) select * from dept", // 4
"insert overwrite into LOCATION '/tmp/data' select * from dept" // 5
};
@Test
public final void testInsertInto0() throws PlanningException {
Expr expr = sqlAnalyzer.parse(insertStatements[0]);
LogicalPlan plan = planner.createPlan(session, expr);
assertEquals(1, plan.getQueryBlocks().size());
InsertNode insertNode = getInsertNode(plan);
assertFalse(insertNode.isOverwrite());
assertTrue(insertNode.hasTargetTable());
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "score"), insertNode.getTableName());
}
@Test
public final void testInsertInto1() throws PlanningException {
Expr expr = sqlAnalyzer.parse(insertStatements[1]);
LogicalPlan plan = planner.createPlan(session, expr);
assertEquals(1, plan.getQueryBlocks().size());
InsertNode insertNode = getInsertNode(plan);
assertFalse(insertNode.isOverwrite());
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "score"), insertNode.getTableName());
}
@Test
public final void testInsertInto2() throws PlanningException {
Expr expr = sqlAnalyzer.parse(insertStatements[2]);
LogicalPlan plan = planner.createPlan(session, expr);
assertEquals(1, plan.getQueryBlocks().size());
InsertNode insertNode = getInsertNode(plan);
assertFalse(insertNode.isOverwrite());
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), insertNode.getTableName());
assertTrue(insertNode.hasTargetSchema());
assertEquals(insertNode.getTargetSchema().getColumn(0).getSimpleName(), "name");
assertEquals(insertNode.getTargetSchema().getColumn(1).getSimpleName(), "deptname");
}
@Test
public final void testInsertInto3() throws PlanningException {
Expr expr = sqlAnalyzer.parse(insertStatements[3]);
LogicalPlan plan = planner.createPlan(session, expr);
assertEquals(1, plan.getQueryBlocks().size());
InsertNode insertNode = getInsertNode(plan);
assertFalse(insertNode.isOverwrite());
assertTrue(insertNode.hasPath());
}
@Test
public final void testInsertInto4() throws PlanningException {
Expr expr = sqlAnalyzer.parse(insertStatements[4]);
LogicalPlan plan = planner.createPlan(session, expr);
assertEquals(1, plan.getQueryBlocks().size());
InsertNode insertNode = getInsertNode(plan);
assertTrue(insertNode.isOverwrite());
assertTrue(insertNode.hasTargetTable());
assertEquals(CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "employee"), insertNode.getTableName());
assertTrue(insertNode.hasTargetSchema());
assertEquals(insertNode.getTargetSchema().getColumn(0).getSimpleName(), "name");
assertEquals(insertNode.getTargetSchema().getColumn(1).getSimpleName(), "deptname");
}
@Test
public final void testInsertInto5() throws PlanningException {
Expr expr = sqlAnalyzer.parse(insertStatements[5]);
LogicalPlan plan = planner.createPlan(session, expr);
assertEquals(1, plan.getQueryBlocks().size());
InsertNode insertNode = getInsertNode(plan);
assertTrue(insertNode.isOverwrite());
assertTrue(insertNode.hasPath());
}
private static InsertNode getInsertNode(LogicalPlan plan) {
LogicalRootNode root = plan.getRootBlock().getRoot();
assertEquals(NodeType.INSERT, root.getChild().getType());
return root.getChild();
}
}
| |
package org.drools.core.rule.builder.dialect.asm;
import org.drools.core.base.TypeResolver;
import org.mvel2.asm.ClassWriter;
import org.mvel2.asm.MethodVisitor;
import org.mvel2.asm.Type;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static java.lang.reflect.Modifier.isAbstract;
import static org.drools.core.util.ClassUtils.convertFromPrimitiveType;
import static org.drools.core.util.ClassUtils.convertPrimitiveNameToType;
import static org.drools.core.util.ClassUtils.convertToPrimitiveType;
import static org.mvel2.asm.Opcodes.AASTORE;
import static org.mvel2.asm.Opcodes.ACC_PUBLIC;
import static org.mvel2.asm.Opcodes.ACC_STATIC;
import static org.mvel2.asm.Opcodes.ACC_SUPER;
import static org.mvel2.asm.Opcodes.ACONST_NULL;
import static org.mvel2.asm.Opcodes.ALOAD;
import static org.mvel2.asm.Opcodes.ANEWARRAY;
import static org.mvel2.asm.Opcodes.ARETURN;
import static org.mvel2.asm.Opcodes.BIPUSH;
import static org.mvel2.asm.Opcodes.CHECKCAST;
import static org.mvel2.asm.Opcodes.D2F;
import static org.mvel2.asm.Opcodes.D2I;
import static org.mvel2.asm.Opcodes.D2L;
import static org.mvel2.asm.Opcodes.DUP;
import static org.mvel2.asm.Opcodes.F2D;
import static org.mvel2.asm.Opcodes.F2I;
import static org.mvel2.asm.Opcodes.F2L;
import static org.mvel2.asm.Opcodes.GETFIELD;
import static org.mvel2.asm.Opcodes.GETSTATIC;
import static org.mvel2.asm.Opcodes.I2B;
import static org.mvel2.asm.Opcodes.I2C;
import static org.mvel2.asm.Opcodes.I2D;
import static org.mvel2.asm.Opcodes.I2F;
import static org.mvel2.asm.Opcodes.I2L;
import static org.mvel2.asm.Opcodes.I2S;
import static org.mvel2.asm.Opcodes.ILOAD;
import static org.mvel2.asm.Opcodes.INSTANCEOF;
import static org.mvel2.asm.Opcodes.INVOKEINTERFACE;
import static org.mvel2.asm.Opcodes.INVOKESPECIAL;
import static org.mvel2.asm.Opcodes.INVOKESTATIC;
import static org.mvel2.asm.Opcodes.INVOKEVIRTUAL;
import static org.mvel2.asm.Opcodes.ISTORE;
import static org.mvel2.asm.Opcodes.L2D;
import static org.mvel2.asm.Opcodes.L2F;
import static org.mvel2.asm.Opcodes.L2I;
import static org.mvel2.asm.Opcodes.NEW;
import static org.mvel2.asm.Opcodes.NEWARRAY;
import static org.mvel2.asm.Opcodes.PUTFIELD;
import static org.mvel2.asm.Opcodes.PUTSTATIC;
import static org.mvel2.asm.Opcodes.RETURN;
import static org.mvel2.asm.Opcodes.T_BOOLEAN;
import static org.mvel2.asm.Opcodes.T_BYTE;
import static org.mvel2.asm.Opcodes.T_CHAR;
import static org.mvel2.asm.Opcodes.T_DOUBLE;
import static org.mvel2.asm.Opcodes.T_FLOAT;
import static org.mvel2.asm.Opcodes.T_INT;
import static org.mvel2.asm.Opcodes.T_LONG;
import static org.mvel2.asm.Opcodes.T_SHORT;
public class ClassGenerator {
private static final boolean DUMP_GENERATED_CLASSES = false;
private final String className;
private final TypeResolver typeResolver;
private final ClassLoader classLoader;
private int access = ACC_PUBLIC + ACC_SUPER;
private String signature;
private Class superClass = Object.class;
private Class<?>[] interfaces;
private final String classDescriptor;
private String superDescriptor;
private List<ClassPartDescr> classParts = new ArrayList<ClassPartDescr>();
private StaticInitializerDescr staticInitializer = null;
private byte[] bytecode;
private Class<?> clazz;
private static final Method defineClassMethod;
static {
try {
defineClassMethod = ClassLoader.class.getDeclaredMethod("defineClass", String.class, byte[].class, int.class, int.class);
defineClassMethod.setAccessible(true);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public ClassGenerator(String className, ClassLoader classLoader) {
this(className, classLoader, null);
}
public ClassGenerator(String className, ClassLoader classLoader, TypeResolver typeResolver) {
this.className = className;
this.classDescriptor = className.replace('.', '/');
this.classLoader = classLoader;
this.typeResolver = typeResolver == null ? new InternalTypeResolver(this.classLoader) : typeResolver;
}
private interface ClassPartDescr {
void write(ClassGenerator cg, ClassWriter cw);
}
public byte[] generateBytecode() {
if (bytecode == null) {
ClassWriter cw = createClassWriter(classLoader, access, getClassDescriptor(), signature, getSuperClassDescriptor(), toInteralNames(interfaces));
for (int i = 0; i < classParts.size(); i++) { // don't use iterator to allow method visits to add more class fields and methods
classParts.get(i).write(this, cw);
}
if (staticInitializer != null) {
staticInitializer.write(this, cw);
}
cw.visitEnd();
bytecode = cw.toByteArray();
if (DUMP_GENERATED_CLASSES) {
dumpGeneratedClass(bytecode);
}
}
return bytecode;
}
private Class<?> generateClass() {
if (clazz == null) {
byte[] bytecode = generateBytecode();
try {
clazz = (Class<?>) defineClassMethod.invoke(classLoader, className, bytecode, 0, bytecode.length);
} catch (Exception e) {
clazz = new InternalClassLoader(classLoader).defineClass(className, bytecode);
}
}
return clazz;
}
private static class InternalClassLoader extends ClassLoader {
InternalClassLoader(ClassLoader classLoader) {
super(classLoader);
}
Class<?> defineClass(String name, byte[] b) {
return defineClass(name, b, 0, b.length);
}
}
public void dumpGeneratedClass() {
if (!DUMP_GENERATED_CLASSES) {
dumpGeneratedClass(generateBytecode());
}
}
private void dumpGeneratedClass(byte[] bytecode) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(className + ".class");
fos.write(bytecode);
fos.flush();
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) { }
}
}
}
public <T> T newInstance() {
try {
return (T)generateClass().newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public <T> T newInstance(Class paramType, Object param) {
try {
return (T)generateClass().getConstructor(paramType).newInstance(param);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
// Accessors
public String getClassDescriptor() {
return classDescriptor;
}
public String getSuperClassDescriptor() {
if (superDescriptor == null) superDescriptor = toInteralName(superClass);
return superDescriptor;
}
public ClassGenerator setAccess(int access) {
this.access = access;
return this;
}
public ClassGenerator setSignature(String signature) {
this.signature = signature;
return this;
}
public ClassGenerator setSuperClass(Class superClass) {
this.superClass = superClass;
return this;
}
public ClassGenerator setInterfaces(Class<?>... interfaces) {
this.interfaces = interfaces;
return this;
}
// Utility
private Map<Class<?>, String> descriptorsCache = new HashMap<Class<?>, String>();
private String descriptorOf(Class<?> type) {
String descriptor = descriptorsCache.get(type);
if (descriptor == null) {
descriptor = Type.getDescriptor(type);
descriptorsCache.put(type, descriptor);
}
return descriptor;
}
public String methodDescr(Class<?> type, Class<?>... args) {
StringBuilder desc = new StringBuilder("(");
if (args != null) for (Class<?> arg : args) desc.append(descriptorOf(arg));
desc.append(")").append(type == null ? "V" : descriptorOf(type));
return desc.toString();
}
private Type toType(Class<?> clazz) {
return toType(clazz.getName());
}
private Type toType(String typeName) {
return Type.getType(toTypeDescriptor(typeName));
}
public String toTypeDescriptor(Class<?> clazz) {
return descriptorOf(clazz);
}
public String toTypeDescriptor(String className) {
String arrayPrefix = "";
while (className.endsWith("[]")) {
arrayPrefix += "[";
className = className.substring(0, className.length()-2);
}
String typeDescriptor;
try {
typeDescriptor = toTypeDescriptor(typeResolver.resolveType(className));
} catch (ClassNotFoundException e) {
typeDescriptor = "L" + className.replace('.', '/') + ";";
}
return arrayPrefix + typeDescriptor;
}
public String toInteralName(Class<?> clazz) {
return clazz.isPrimitive() ? descriptorOf(clazz) : Type.getType(clazz).getInternalName();
}
public String toInteralName(String className) {
String arrayPrefix = "";
while (className.endsWith("[]")) {
arrayPrefix += "[";
className = className.substring(0, className.length()-2);
}
String typeDescriptor;
boolean isPrimitive = false;
try {
Class<?> clazz = typeResolver.resolveType(className);
isPrimitive = clazz.isPrimitive();
typeDescriptor = toInteralName(clazz);
} catch (ClassNotFoundException e) {
typeDescriptor = className.replace('.', '/');
}
if (!isPrimitive && arrayPrefix.length() > 0) typeDescriptor = "L" + typeDescriptor + ";";
return arrayPrefix + typeDescriptor;
}
private String[] toInteralNames(Class<?>[] classes) {
if (classes == null) return null;
String[] internals = new String[classes.length];
for (int i = 0; i < classes.length; i++) internals[i] = toInteralName(classes[i]);
return internals;
}
// FieldDescr
public ClassGenerator addField(int access, String name, Class<?> type) {
return addField(access, name, type, null, null);
}
public ClassGenerator addField(int access, String name, Class<?> type, String signature) {
return addField(access, name, type, signature, null);
}
public ClassGenerator addStaticField(int access, String name, Class<?> type, Object value) {
return addField(access + ACC_STATIC, name, type, null, value);
}
public ClassGenerator addStaticField(int access, String name, Class<?> type, String signature, Object value) {
return addField(access + ACC_STATIC, name, type, signature, value);
}
private ClassGenerator addField(int access, String name, Class<?> type, String signature, Object value) {
classParts.add(new FieldDescr(access, name, descriptorOf(type), signature, value));
return this;
}
private static class FieldDescr implements ClassPartDescr {
private final int access;
private final String name;
private final String desc;
private final String signature;
private final Object value;
FieldDescr(int access, String name, String desc, String signature, Object value) {
this.access = access;
this.name = name;
this.desc = desc;
this.signature = signature;
this.value = value;
}
public void write(ClassGenerator cg, ClassWriter cw) {
cw.visitField(access, name, desc, signature, value).visitEnd();
}
}
public ClassGenerator addDefaultConstructor() {
return addDefaultConstructor(EMPTY_METHOD_BODY);
}
public ClassGenerator addDefaultConstructor(final MethodBody body, Class<?>... args) {
MethodBody constructorBody = new MethodBody() {
public void body(MethodVisitor mv) {
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKESPECIAL, getClassGenerator().getSuperClassDescriptor(), "<init>", "()V"); // super()
body.writeBody(getClassGenerator(), mv);
}
};
return addMethod(ACC_PUBLIC, "<init>", methodDescr(null, args), constructorBody);
}
public ClassGenerator addMethod(int access, String name, String desc) {
return addMethod(access, name, desc, null, null, EMPTY_METHOD_BODY);
}
public ClassGenerator addMethod(int access, String name, String desc, MethodBody body) {
return addMethod(access, name, desc, null, null, body);
}
public ClassGenerator addMethod(int access, String name, String desc, String signature, MethodBody body) {
return addMethod(access, name, desc, signature, null, body);
}
public ClassGenerator addMethod(int access, String name, String desc, String[] exceptions, MethodBody body) {
return addMethod(access, name, desc, null, exceptions, body);
}
public ClassGenerator addMethod(int access, String name, String desc, String signature, String[] exceptions, MethodBody body) {
classParts.add(new MethodDescr(access, name, desc, signature, exceptions, body));
return this;
}
public ClassGenerator addStaticInitBlock(MethodBody body) {
if (staticInitializer == null) {
staticInitializer = new StaticInitializerDescr();
}
staticInitializer.addInitializer(body);
return this;
}
private static final MethodBody EMPTY_METHOD_BODY = new MethodBody() {
protected final void body(MethodVisitor mv) {
mv.visitInsn(RETURN); // return
}
};
// MethodBody
public abstract static class MethodBody {
private ClassGenerator classGenerator;
protected MethodVisitor mv;
private Map<Integer, Type> storedTypes;
protected abstract void body(MethodVisitor mv);
public final void writeBody(ClassGenerator classGenerator, MethodVisitor mv) {
this.classGenerator = classGenerator;
this.mv = mv;
try {
body(mv);
} finally {
this.classGenerator = null;
this.mv = null;
}
}
protected ClassGenerator getClassGenerator() {
return classGenerator;
}
protected final int getCodeForType(Class<?> typeClass, int opcode) {
return Type.getType(typeClass).getOpcode(opcode);
}
protected final int store(int registry, Class<?> typeClass) {
return store(registry, Type.getType(typeClass));
}
protected final int store(int registry, String typeName) {
return store(registry, classGenerator.toType(typeName));
}
protected final int store(int registry, Type t) {
if (storedTypes == null) storedTypes = new HashMap<Integer, Type>();
mv.visitVarInsn(t.getOpcode(ISTORE), registry);
storedTypes.put(registry, t);
return t.getSize();
}
protected final void load(int registry) {
mv.visitVarInsn(storedTypes.get(registry).getOpcode(ILOAD), registry);
}
protected final void loadAsObject(int registry) {
Type type = storedTypes.get(registry);
mv.visitVarInsn(type.getOpcode(ILOAD), registry);
String typeName = type.getClassName();
convertPrimitiveToObject(typeName);
}
protected void convertPrimitiveToObject(Class<?> primitiveClass) {
convertPrimitiveToObject(primitiveClass.getName());
}
private void convertPrimitiveToObject(String typeName) {
if (typeName.equals("int"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Integer", "valueOf", "(I)Ljava/lang/Integer;");
else if (typeName.equals("boolean"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;");
else if (typeName.equals("char"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Character", "valueOf", "(C)Ljava/lang/Character;");
else if (typeName.equals("byte"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Byte", "valueOf", "(B)Ljava/lang/Byte;");
else if (typeName.equals("short"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Short", "valueOf", "(S)Ljava/lang/Short;");
else if (typeName.equals("float"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Float", "valueOf", "(F)Ljava/lang/Float;");
else if (typeName.equals("long"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Long", "valueOf", "(J)Ljava/lang/Long;");
else if (typeName.equals("double"))
mv.visitMethodInsn(INVOKESTATIC, "java/lang/Double", "valueOf", "(D)Ljava/lang/Double;");
}
protected final void print(String msg) {
mv.visitFieldInsn(GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
mv.visitLdcInsn(msg);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/io/PrintStream", "print", "(Ljava/lang/String;)V");
}
protected final void println(String msg) {
mv.visitFieldInsn(GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
mv.visitLdcInsn(msg);
mv.visitMethodInsn(INVOKEVIRTUAL, "java/io/PrintStream", "println", "(Ljava/lang/String;)V");
}
protected final void printRegistryValue(int reg) {
Type type = storedTypes.get(reg);
if (type == null) {
printRegistryValue(reg, Object.class);
return;
}
printRegistryValue(reg, convertPrimitiveNameToType(type.getClassName()));
}
protected final void printRegistryValue(int reg, Class<?> clazz) {
mv.visitFieldInsn(GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
mv.visitVarInsn(Type.getType(clazz).getOpcode(ILOAD), reg);
invokeVirtual(PrintStream.class, "print", null, clazz);
}
protected final void printLastRegistry(Class<?> clazz) {
Type t = Type.getType(clazz);
mv.visitVarInsn(t.getOpcode(ISTORE), 100);
mv.visitFieldInsn(GETSTATIC, "java/lang/System", "out", "Ljava/io/PrintStream;");
mv.visitVarInsn(t.getOpcode(ILOAD), 100);
invokeVirtual(PrintStream.class, "print", null, clazz);
}
protected final void printStack() {
mv.visitTypeInsn(NEW, "java/lang/RuntimeException");
mv.visitInsn(DUP);
mv.visitMethodInsn(INVOKESPECIAL, "java/lang/RuntimeException", "<init>", "()V");
mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/RuntimeException", "printStackTrace", "()V");
mv.visitInsn(RETURN);
}
protected final <T> void returnAsArray(T[] array) {
createArray(array.getClass().getComponentType(), array.length);
for (int i = 0; i < array.length; i++) {
mv.visitInsn(DUP);
push(i);
push(array[i]);
mv.visitInsn(AASTORE);
}
mv.visitInsn(ARETURN);
}
protected final <T> void returnAsArray(Collection<T> collection, Class<T> clazz) {
createArray(clazz, collection.size());
int i = 0;
for (T item : collection) {
mv.visitInsn(DUP);
push(i++);
push(item);
mv.visitInsn(AASTORE);
}
mv.visitInsn(ARETURN);
}
protected final void createArray(Class<?> componentType, int size) {
mv.visitLdcInsn(size);
if (componentType.isPrimitive()) {
int newPrimitiveArrayType = T_BOOLEAN;
if (componentType == int.class) {
newPrimitiveArrayType = T_INT;
} else if (componentType == long.class) {
newPrimitiveArrayType = T_LONG;
} else if (componentType == double.class) {
newPrimitiveArrayType = T_DOUBLE;
} else if (componentType == float.class) {
newPrimitiveArrayType = T_FLOAT;
} else if (componentType == char.class) {
newPrimitiveArrayType = T_CHAR;
} else if (componentType == short.class) {
newPrimitiveArrayType = T_SHORT;
} else if (componentType == byte.class) {
newPrimitiveArrayType = T_BYTE;
}
mv.visitIntInsn(NEWARRAY, newPrimitiveArrayType);
} else {
mv.visitTypeInsn(ANEWARRAY, internalName(componentType));
}
}
protected final void push(Object obj) {
if (obj instanceof Boolean) {
mv.visitFieldInsn(GETSTATIC, "java/lang/Boolean", (Boolean)obj ? "TRUE" : "FALSE", "Ljava/lang/Boolean;");
} else {
mv.visitLdcInsn(obj);
}
}
protected final void push(Object obj, Class<?> type) {
if (obj == null) {
mv.visitInsn(ACONST_NULL);
return;
}
if (type == String.class || type == Object.class) {
mv.visitLdcInsn(obj);
} else if (type == char.class) {
mv.visitIntInsn(BIPUSH, (int)((Character)obj).charValue());
} else if (type.isPrimitive()) {
if (obj instanceof String) {
obj = coerceStringToPrimitive(type, (String)obj);
} else {
obj = coercePrimitiveToPrimitive(type, obj);
}
mv.visitLdcInsn(obj);
} else if (type == Class.class) {
mv.visitLdcInsn(classGenerator.toType((Class<?>) obj));
} else if (type == Character.class) {
invokeConstructor(Character.class, new Object[]{ obj.toString().charAt(0) }, char.class);
} else if (type.isInterface() || isAbstract(type.getModifiers())) {
push(obj, obj.getClass());
} else {
invokeConstructor(type, new Object[]{ obj.toString() }, String.class);
}
}
private Object coercePrimitiveToPrimitive(Class<?> primitiveType, Object value) {
if (primitiveType == long.class) {
return ((Number)value).longValue();
}
if (primitiveType == double.class) {
return ((Number)value).doubleValue();
}
if (primitiveType == float.class) {
return ((Number)value).floatValue();
}
return value;
}
private Object coerceStringToPrimitive(Class<?> primitiveType, String value) {
if (primitiveType == boolean.class) {
return Boolean.valueOf(value);
}
if (primitiveType == int.class) {
return Integer.valueOf(value);
}
if (primitiveType == long.class) {
return Long.valueOf(value);
}
if (primitiveType == float.class) {
return Float.valueOf(value);
}
if (primitiveType == double.class) {
return Double.valueOf(value);
}
if (primitiveType == char.class) {
return Character.valueOf(value.charAt(0));
}
if (primitiveType == short.class) {
return Short.valueOf(value);
}
if (primitiveType == byte.class) {
return Byte.valueOf(value);
}
throw new RuntimeException("Unexpected type: " + primitiveType);
}
protected final void cast(Class<?> from, Class<?> to) {
if (to.isAssignableFrom(from)) {
return;
}
if (from.isPrimitive()) {
if (to.isPrimitive()) {
castPrimitiveToPrimitive(from, to);
} else {
Class toPrimitive = convertToPrimitiveType(to);
castPrimitiveToPrimitive(convertToPrimitiveType(from), toPrimitive);
castFromPrimitive(toPrimitive);
}
} else {
if (to.isPrimitive()) {
Class<?> primitiveFrom = convertToPrimitiveType(from);
castToPrimitive(primitiveFrom);
castPrimitiveToPrimitive(primitiveFrom, to);
} else {
cast(to);
}
}
}
protected final void cast(Class<?> clazz) {
mv.visitTypeInsn(CHECKCAST, internalName(clazz));
}
protected final void instanceOf(Class<?> clazz) {
mv.visitTypeInsn(INSTANCEOF, internalName(clazz));
}
protected final void castPrimitiveToPrimitive(Class<?> from, Class<?> to) {
if (from == to) return;
if (from == int.class) {
if (to == long.class) mv.visitInsn(I2L);
else if (to == float.class) mv.visitInsn(I2F);
else if (to == double.class) mv.visitInsn(I2D);
else if (to == byte.class) mv.visitInsn(I2B);
else if (to == char.class) mv.visitInsn(I2C);
else if (to == short.class) mv.visitInsn(I2S);
} else if (from == long.class) {
if (to == int.class) mv.visitInsn(L2I);
else if (to == float.class) mv.visitInsn(L2F);
else if (to == double.class) mv.visitInsn(L2D);
} else if (from == float.class) {
if (to == int.class) mv.visitInsn(F2I);
else if (to == long.class) mv.visitInsn(F2L);
else if (to == double.class) mv.visitInsn(F2D);
} else if (from == double.class) {
if (to == int.class) mv.visitInsn(D2I);
else if (to == long.class) mv.visitInsn(D2L);
else if (to == float.class) mv.visitInsn(D2F);
}
}
protected final void castFromPrimitive(Class<?> clazz) {
Class<?> boxedType = convertFromPrimitiveType(clazz);
invokeStatic(boxedType, "valueOf", boxedType, clazz);
}
protected final void castToPrimitive(Class<?> clazz) {
if (clazz == boolean.class) {
cast(Boolean.class);
invokeVirtual(Boolean.class, "booleanValue", boolean.class);
} else if (clazz == char.class) {
cast(Character.class);
invokeVirtual(Character.class, "charValue", char.class);
} else {
cast(Number.class);
invokeVirtual(Number.class, clazz.getName() + "Value", clazz);
}
}
protected final void invoke(Method method) {
if ((method.getModifiers() & Modifier.STATIC) > 0) {
invokeStatic(method.getDeclaringClass(), method.getName(), method.getReturnType(), method.getParameterTypes());
} else if (method.getDeclaringClass().isInterface()) {
invokeInterface(method.getDeclaringClass(), method.getName(), method.getReturnType(), method.getParameterTypes());
} else {
invokeVirtual(method.getDeclaringClass(), method.getName(), method.getReturnType(), method.getParameterTypes());
}
}
protected final void invokeThis(String methodName, Class<?> returnedType, Class<?>... paramsType) {
mv.visitMethodInsn(INVOKEVIRTUAL, classDescriptor(), methodName, methodDescr(returnedType, paramsType));
}
protected final void invokeStatic(Class<?> clazz, String methodName, Class<?> returnedType, Class<?>... paramsType) {
invoke(INVOKESTATIC, clazz, methodName, returnedType, paramsType);
}
protected final void invokeVirtual(Class<?> clazz, String methodName, Class<?> returnedType, Class<?>... paramsType) {
invoke(INVOKEVIRTUAL, clazz, methodName, returnedType, paramsType);
}
protected final void invokeInterface(Class<?> clazz, String methodName, Class<?> returnedType, Class<?>... paramsType) {
invoke(INVOKEINTERFACE, clazz, methodName, returnedType, paramsType);
}
protected final void invokeConstructor(Class<?> clazz) {
invokeConstructor(clazz, null);
}
protected final void invokeConstructor(Class<?> clazz, Object[] params, Class<?>... paramsType) {
mv.visitTypeInsn(NEW, internalName(clazz));
mv.visitInsn(DUP);
if (params != null) {
for (Object param : params) mv.visitLdcInsn(param);
}
invokeSpecial(clazz, "<init>", null, paramsType);
}
protected final void invokeSpecial(Class<?> clazz, String methodName, Class<?> returnedType, Class<?>... paramsType) {
invoke(INVOKESPECIAL, clazz, methodName, returnedType, paramsType);
}
protected final void invoke(int opCode, Class<?> clazz, String methodName, Class<?> returnedType, Class<?>... paramsType) {
mv.visitMethodInsn(opCode, internalName(clazz), methodName, methodDescr(returnedType, paramsType));
}
protected final void putStaticField(String name, Class<?> type) {
mv.visitFieldInsn(PUTSTATIC, classDescriptor(), name, classGenerator.descriptorOf(type));
}
protected final void getStaticField(String name, Class<?> type) {
mv.visitFieldInsn(GETSTATIC, classDescriptor(), name, classGenerator.descriptorOf(type));
}
protected final void putFieldInThisFromRegistry(String name, Class<?> type, int regNr) {
mv.visitVarInsn(ALOAD, 0);
mv.visitVarInsn(ALOAD, regNr);
putFieldInThis(name, type);
}
protected final void putFieldInThis(String name, Class<?> type) {
mv.visitFieldInsn(PUTFIELD, classDescriptor(), name, classGenerator.descriptorOf(type));
}
protected final void getFieldFromThis(String name, Class<?> type) {
mv.visitVarInsn(ALOAD, 0);
mv.visitFieldInsn(GETFIELD, classDescriptor(), name, classGenerator.descriptorOf(type));
}
protected final void readField(Field field) {
boolean isStatic = (field.getModifiers() & Modifier.STATIC) != 0;
mv.visitFieldInsn(isStatic ? GETSTATIC : GETFIELD, field.getDeclaringClass().getName().replace('.', '/'), field.getName(), classGenerator.descriptorOf(field.getType()));
}
// ClassGenerator delegates
public String classDescriptor() {
return classGenerator.getClassDescriptor();
}
public String superClassDescriptor() {
return classGenerator.getSuperClassDescriptor();
}
public String methodDescr(Class<?> type, Class<?>... args) {
return classGenerator.methodDescr(type, args);
}
private Type type(String typeName) {
return classGenerator.toType(typeName);
}
public String typeDescr(Class<?> clazz) {
return classGenerator.toTypeDescriptor(clazz);
}
public String typeDescr(String className) {
return classGenerator.toTypeDescriptor(className);
}
public String internalName(Class<?> clazz) {
return classGenerator.toInteralName(clazz);
}
public String internalName(String className) {
return classGenerator.toInteralName(className);
}
}
// MethodDescr
private static class MethodDescr implements ClassPartDescr {
private final int access;
private final String name;
private final String desc;
private final String signature;
private final String[] exceptions;
private final MethodBody body;
private MethodDescr(int access, String name, String desc, String signature, String[] exceptions, MethodBody body) {
this.access = access;
this.name = name;
this.desc = desc;
this.signature = signature;
this.exceptions = exceptions;
this.body = body;
}
public void write(ClassGenerator cg, ClassWriter cw) {
MethodVisitor mv = cw.visitMethod(access, name, desc, signature, exceptions);
mv.visitCode();
try {
body.writeBody(cg, mv);
mv.visitMaxs(0, 0);
} catch (Exception e) {
throw new RuntimeException("Error writing method " + name, e);
}
mv.visitEnd();
}
}
private static class StaticInitializerDescr implements ClassPartDescr {
private final List<MethodBody> initializerBodies = new ArrayList<MethodBody>();
public void write(ClassGenerator cg, ClassWriter cw) {
MethodVisitor mv = cw.visitMethod(ACC_STATIC, "<clinit>", "()V", null, null);
mv.visitCode();
try {
for (MethodBody initializerBody : initializerBodies) {
initializerBody.writeBody(cg, mv);
}
} catch (Exception e) {
throw new RuntimeException("Error writing method static class initializer", e);
}
mv.visitInsn(RETURN);
mv.visitMaxs(0, 0);
mv.visitEnd();
}
private void addInitializer(MethodBody initBlock) {
initializerBodies.add(initBlock);
}
}
// InternalTypeResolver
private static class InternalTypeResolver implements TypeResolver {
public static final Map<String, Class<?>> primitiveClassMap = new HashMap<String, Class<?>>() {{
put("int", int.class);
put("boolean", boolean.class);
put("float", float.class);
put("long", long.class);
put("short", short.class);
put("byte", byte.class);
put("double", double.class);
put("char", char.class);
}};
private final ClassLoader classLoader;
private InternalTypeResolver(ClassLoader classLoader) {
this.classLoader = classLoader;
}
public Set<String> getImports() {
throw new RuntimeException("Not Implemented");
}
public void addImport(String importEntry) {
throw new RuntimeException("Not Implemented");
}
public Class resolveType(String className) throws ClassNotFoundException {
Class primitiveClassName = primitiveClassMap.get(className);
return primitiveClassName != null ? primitiveClassName : Class.forName(className, true, classLoader);
}
public String getFullTypeName(String shortName) throws ClassNotFoundException {
throw new RuntimeException("Not Implemented");
}
}
public static class InternalClassWriter extends ClassWriter {
private ClassLoader classLoader;
public InternalClassWriter(ClassLoader classLoader, int flags) {
super(flags);
this.classLoader = classLoader;
}
protected String getCommonSuperClass(final String type1, final String type2) {
Class c, d;
try {
c = Class.forName(type1.replace('/', '.'), false, classLoader);
d = Class.forName(type2.replace('/', '.'), false, classLoader);
} catch (Exception e) {
throw new RuntimeException(e.toString());
}
if (c.isAssignableFrom(d)) {
return type1;
}
if (d.isAssignableFrom(c)) {
return type2;
}
if (c.isInterface() || d.isInterface()) {
return "java/lang/Object";
} else {
do {
c = c.getSuperclass();
} while (!c.isAssignableFrom(d));
return c.getName().replace('.', '/');
}
}
}
public static ClassWriter createClassWriter(ClassLoader classLoader, int access, String name, String signature, String superName, String[] interfaces) {
ClassWriter cw = new InternalClassWriter(classLoader, ClassWriter.COMPUTE_MAXS | ClassWriter.COMPUTE_FRAMES );
cw.visit(ClassLevel.getJavaVersion(classLoader), access, name, signature, superName, interfaces);
return cw;
}
}
| |
/*
* Copyright 2014, Synthuse.org
* Released under the Apache Version 2.0 License.
*
* last modified by ejakubowski
*/
package org.synthuse;
import java.io.*;
import java.util.Properties;
import java.lang.reflect.Field;
/*
// example class for PropertiesSerializer
public class Configuration extends PropertiesSerializer{
public static final String DEFAULT_PROP_FILENAME = "./ctf.properties";
// General Settings
public String tests_dir = "./tests";
public String logs_dir = "./logs";
public int statusTimer = 2000;
public Configuration() //needed for cloning
{
}
public Configuration(String propertyFilename)
{
super(propertyFilename);
load(propertyFilename);
}
}
*/
public class PropertiesSerializer {
protected Properties prop = new Properties();
protected String propertyFilename = null;
public PropertiesSerializer()
{
}
public PropertiesSerializer(String propertyFilename)
{
this.propertyFilename = propertyFilename;
}
public void load(String propertyFilename)
{
try
{
prop.load(new FileInputStream(propertyFilename));
}
catch (Exception e)
{
System.out.println("Unable to load properties from file: "+propertyFilename+". Default values will be used.");
return;
}
Field[] fields = this.getClass().getFields();
for (int i = 0 ; i < fields.length; i++)
{
String pName = fields[i].getName();
String pType = "String";
try
{
pType = fields[i].get(this).getClass().getSimpleName();
}
catch (Exception e)
{
// e.printStackTrace();
}
final Object myProperty = prop.get(pName);
try
{
if(myProperty==null) {
// System.out.println("Property "+pName+"["+pType+"] not set; input was null");
} else {
if (pType.equalsIgnoreCase("integer"))
fields[i].set(this, Integer.parseInt(myProperty + ""));
if (pType.equalsIgnoreCase("boolean"))
fields[i].set(this, Boolean.parseBoolean(myProperty + ""));
else
fields[i].set(this, myProperty);
// System.out.println("Property "+pName+"["+pType+"] set to: "+myProperty);
}
}
catch (Exception e)
{
// e.printStackTrace();
}
}
}
public void save()
{
Field[] fields = this.getClass().getFields();
for (int i = 0 ; i < fields.length; i++)
{
//fields[i].get(this);
try {
String pName = fields[i].getName();
//String pType = fields[i].get(this).getClass().getSimpleName();
if (fields[i].get(this) == null)
prop.setProperty(pName, "");
else
prop.setProperty(pName, fields[i].get(this) + "");
} catch (Exception e) {
//e.printStackTrace();
}
}
try
{
FileOutputStream fos = new FileOutputStream(propertyFilename);
prop.store(fos, "");
fos.flush();
fos.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
public Object clone()
{
Object newObject = null;
try {
newObject = (Object)this.getClass().newInstance();
}
catch (Exception e)
{
e.printStackTrace();
}
Field[] fields = this.getClass().getFields();
for (int i = 0 ; i < fields.length; i++)
{
try {
//fields[i].get(this);
//String pName = fields[i].getName();
fields[i].set(newObject, fields[i].get(this));
}
catch (Exception e)
{
//e.printStackTrace();
}
}
return newObject;
}
public boolean hasChanged()
{
boolean changes = false;
Field[] fields = this.getClass().getFields();
for (int i = 0 ; i < fields.length; i++)
{
//fields[i].get(this);
try {
String pName = fields[i].getName();
//String pType = fields[i].get(this).getClass().getSimpleName();
if (prop.getProperty(pName).compareTo(fields[i].get(this)+"") != 0)
changes = true;
} catch (Exception e) {
//e.printStackTrace();
}
}
return changes;
}
public String getPropertyFilename()
{
return this.propertyFilename;
}
public void setPropertyFilename(String filename)
{
this.propertyFilename = filename;
}
public String readValue(String propertyName)
{
String val = "";
val = prop.getProperty(propertyName);
return val;
}
public void writeValue(String propertyName, String propertValue)
{
prop.setProperty(propertyName, propertValue);
try {
prop.store(new FileOutputStream(propertyFilename), null);
} catch (Exception e) {
e.printStackTrace();
}
}
}
| |
package view;
import view.assets.AssetLoader;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.ArrayList;
import java.util.HashMap;
/**
* Created by TheNotoriousOOP on 4/12/2017.
* Class Description:
* Responsibilities:
*/
public class ResearchTablePanel extends JPanel {
private java.util.List<PanelObserver> observers = new ArrayList<PanelObserver>();
private Icon brightIdea;
private Icon drilling;
private Icon enlargement;
private Icon newShafts;
private Icon rowing;
private Icon shipping;
private Icon specialization;
private Icon trucking;
private JButton truckingButton;
private JButton brightIdeaButton;
private JButton drillingButton;
private JButton enlargementButton;
private JButton rowingButton;
private JButton newShaftsButton;
private JButton shippingButton;
private JButton specializationButton;
private JButton backToGame;
private JPanel researchInfoPanel;
private JTextArea researchInfo;
private Image background;
private AssetLoader assets;
private HashMap<JButton, String> researchInfoTexts = new HashMap<>();
public ResearchTablePanel(AssetLoader assets){
this.assets = assets;
this.background = assets.getImage("RESEARCHBG");
this.brightIdea = new ImageIcon(assets.getImage("BRIGHT_IDEA"));
this.drilling = new ImageIcon(assets.getImage("DRILLING"));
this.enlargement = new ImageIcon(assets.getImage("ENLARGEMENT"));
this.newShafts = new ImageIcon(assets.getImage("NEW_SHAFTS"));
this.rowing = new ImageIcon(assets.getImage("ROWING"));
this.shipping = new ImageIcon(assets.getImage("SHIPPING"));
this.specialization = new ImageIcon(assets.getImage("SPECIALIZATION"));
this.trucking = new ImageIcon(assets.getImage("TRUCKING"));
this.truckingButton = new JButton(trucking);
this.brightIdeaButton = new JButton(brightIdea);
this.drillingButton = new JButton(drilling);
this.enlargementButton = new JButton(enlargement);
this.rowingButton = new JButton(rowing);
this.newShaftsButton = new JButton(newShafts);
this.shippingButton = new JButton(shipping);
this.specializationButton = new JButton(specialization);
truckingButton.setBackground(Color.black);
brightIdeaButton.setBackground(Color.black);
drillingButton.setBackground(Color.black);
enlargementButton.setBackground(Color.black);
rowingButton.setBackground(Color.black);
newShaftsButton.setBackground(Color.black);
shippingButton.setBackground(Color.black);
specializationButton.setBackground(Color.black);
researchInfoTexts.put(rowingButton, "Rowing: Upon researching, ability to build rowboat factories is achieved.");
researchInfoTexts.put(brightIdeaButton, "Bright Idea: No effect. Reserved for expansion rules.");
researchInfoTexts.put(truckingButton, "Trucking: Upon researching, ability to build truck factories is achieved.");
researchInfoTexts.put(shippingButton, "Shipping: Upon researching, ability to build steam-ship factories is achieved.");
researchInfoTexts.put(drillingButton, "Drilling: Upon researching, ability to build oil-rigs is achieved.");
researchInfoTexts.put(specializationButton, "Specialization: Upon researching, ability to fill mine bags with either gold or iron is achieved.");
researchInfoTexts.put(enlargementButton, "Enlargement: Upon researching, ability to fill mine bags with 5 gold and 5 iron is achieved.");
researchInfoTexts.put(newShaftsButton, "New Shafts: Upon researching, ability to replenish bags of existing mines is achieved.");
this.backToGame = new JButton("Back to Game");
Dimension ddd = new Dimension(30,30);
backToGame.setPreferredSize(ddd);
backToGame.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
notifyAllObservers("GameViewPanel");
}
});
JPanel buttonPanels = new JPanel();
buttonPanels.add(truckingButton);
buttonPanels.add(brightIdeaButton);
buttonPanels.add(drillingButton);
buttonPanels.add(enlargementButton);
buttonPanels.add(rowingButton);
buttonPanels.add(newShaftsButton);
buttonPanels.add(shippingButton);
buttonPanels.add(specializationButton);
buttonPanels.setBackground(Color.black);
shippingButton.getText();
this.setLayout(new GridBagLayout());
GridBagConstraints c = new GridBagConstraints();
c.gridx = 1;
c.gridy = 1;
c.weightx = 1;
c.weighty = 1;
this.add(buttonPanels, c);
c.gridx = 1;
c.gridy = 2;
c.weightx = 1;
c.weighty = 1;
researchInfoPanel = new JPanel(new GridLayout(2,1));
researchInfo = new JTextArea(" ");
Font font = new Font("Times New Roman", Font.BOLD, 20);
researchInfo.setFont(font);
researchInfo.setForeground(Color.black);
researchInfo.setBackground(Color.lightGray);
Dimension d = new Dimension(500, 60);
researchInfo.setPreferredSize(d);
researchInfo.setEditable(false);
researchInfo.setLineWrap(true);
researchInfo.setWrapStyleWord(true);
researchInfoPanel.add(researchInfo);
researchInfoPanel.add(backToGame);
researchInfoPanel.setBackground(Color.black);
truckingButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(truckingButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
brightIdeaButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(brightIdeaButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
drillingButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(drillingButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
enlargementButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(enlargementButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
specializationButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(specializationButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
rowingButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(rowingButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
newShaftsButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(newShaftsButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
shippingButton.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
researchInfo.setText(researchInfoTexts.get(shippingButton));
}
public void mouseExited(java.awt.event.MouseEvent evt) {
researchInfo.setText(" ");
}
});
this.add(researchInfoPanel, c);
}
@Override
public void paintComponent(Graphics g){
Graphics2D g2 = (Graphics2D)g;
super.paintComponent(g);
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g2.setRenderingHint(RenderingHints.KEY_RENDERING,
RenderingHints.VALUE_RENDER_QUALITY);
g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL,
RenderingHints.VALUE_STROKE_PURE);
g2.drawImage(background, 0,0,getWidth(), getHeight(), this);
}
public void attach(PanelObserver observer){
observers.add(observer);
}
public void notifyAllObservers(String panelName){
for(PanelObserver observer : observers){
observer.update(panelName);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.hops;
import java.util.ArrayList;
import org.apache.sysml.lops.FunctionCallCP;
import org.apache.sysml.lops.Lop;
import org.apache.sysml.lops.LopsException;
import org.apache.sysml.lops.LopProperties.ExecType;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.parser.Expression.ValueType;
import org.apache.sysml.runtime.controlprogram.Program;
import org.apache.sysml.runtime.controlprogram.parfor.opt.CostEstimatorHops;
/**
* This FunctionOp represents the call to a DML-bodied or external function.
*
* Note: Currently, we support expressions in function arguments but no function calls
* in expressions.
*/
public class FunctionOp extends Hop
{
public static String OPSTRING = "extfunct";
public enum FunctionType{
DML,
EXTERNAL_MEM,
EXTERNAL_FILE,
MULTIRETURN_BUILTIN,
UNKNOWN
}
private FunctionType _type = null;
private String _fnamespace = null;
private String _fname = null;
private String[] _outputs = null;
private ArrayList<Hop> _outputHops = null;
private FunctionOp() {
//default constructor for clone
}
public FunctionOp(FunctionType type, String fnamespace, String fname, ArrayList<Hop> finputs, String[] outputs, ArrayList<Hop> outputHops) {
this(type, fnamespace, fname, finputs, outputs);
_outputHops = outputHops;
}
public FunctionOp(FunctionType type, String fnamespace, String fname, ArrayList<Hop> finputs, String[] outputs)
{
super(fnamespace + Program.KEY_DELIM + fname, DataType.UNKNOWN, ValueType.UNKNOWN );
_type = type;
_fnamespace = fnamespace;
_fname = fname;
_outputs = outputs;
for( Hop in : finputs )
{
getInput().add(in);
in.getParent().add(this);
}
}
public String getFunctionNamespace()
{
return _fnamespace;
}
public String getFunctionName()
{
return _fname;
}
public void setFunctionName( String fname )
{
_fname = fname;
}
public ArrayList<Hop> getOutputs() {
return _outputHops;
}
public String[] getOutputVariableNames()
{
return _outputs;
}
public FunctionType getFunctionType()
{
return _type;
}
@Override
public boolean allowsAllExecTypes() {
return false;
}
@Override
public void computeMemEstimate( MemoTable memo )
{
//overwrites default hops behavior
if( _type == FunctionType.DML )
_memEstimate = 1; //minimal mem estimate
else if( _type == FunctionType.EXTERNAL_MEM )
_memEstimate = 2* getInputSize(); //in/out
else if( _type == FunctionType.EXTERNAL_FILE || _type == FunctionType.UNKNOWN )
_memEstimate = CostEstimatorHops.DEFAULT_MEM_MR;
else if ( _type == FunctionType.MULTIRETURN_BUILTIN ) {
boolean outputDimsKnown = true;
for(Hop out : getOutputs()){
outputDimsKnown &= out.dimsKnown();
}
if( outputDimsKnown ) {
long lnnz = ((_nnz>=0)?_nnz:_dim1*_dim2);
_outputMemEstimate = computeOutputMemEstimate( _dim1, _dim2, lnnz );
_processingMemEstimate = computeIntermediateMemEstimate(_dim1, _dim2, lnnz);
}
_memEstimate = getInputOutputSize();
}
}
@Override
protected double computeOutputMemEstimate( long dim1, long dim2, long nnz )
{
if ( getFunctionType() != FunctionType.MULTIRETURN_BUILTIN )
throw new RuntimeException("Invalid call of computeOutputMemEstimate in FunctionOp.");
else {
if ( getFunctionName().equalsIgnoreCase("qr") ) {
// upper-triangular and lower-triangular matrices
long outputH = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(0).getDim1(), getOutputs().get(0).getDim2(), 0.5);
long outputR = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(1).getDim1(), getOutputs().get(1).getDim2(), 0.5);
return outputH+outputR;
}
else if ( getFunctionName().equalsIgnoreCase("lu") ) {
// upper-triangular and lower-triangular matrices
long outputP = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(1).getDim1(), getOutputs().get(1).getDim2(), 1.0/getOutputs().get(1).getDim2());
long outputL = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(0).getDim1(), getOutputs().get(0).getDim2(), 0.5);
long outputU = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(1).getDim1(), getOutputs().get(1).getDim2(), 0.5);
return outputL+outputU+outputP;
}
else if ( getFunctionName().equalsIgnoreCase("eigen") ) {
long outputVectors = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(0).getDim1(), getOutputs().get(0).getDim2(), 1.0);
long outputValues = OptimizerUtils.estimateSizeExactSparsity(getOutputs().get(1).getDim1(), 1, 1.0);
return outputVectors+outputValues;
}
else
throw new RuntimeException("Invalid call of computeOutputMemEstimate in FunctionOp.");
}
}
@Override
protected double computeIntermediateMemEstimate( long dim1, long dim2, long nnz )
{
if ( getFunctionType() != FunctionType.MULTIRETURN_BUILTIN )
throw new RuntimeException("Invalid call of computeIntermediateMemEstimate in FunctionOp.");
else {
if ( getFunctionName().equalsIgnoreCase("qr") ) {
// matrix of size same as the input
double interOutput = OptimizerUtils.estimateSizeExactSparsity(getInput().get(0).getDim1(), getInput().get(0).getDim2(), 1.0);
//System.out.println("QRInter " + interOutput/1024/1024);
return interOutput;
}
else if ( getFunctionName().equalsIgnoreCase("lu")) {
// 1D vector
double interOutput = OptimizerUtils.estimateSizeExactSparsity(getInput().get(0).getDim1(), 1, 1.0);
//System.out.println("LUInter " + interOutput/1024/1024);
return interOutput;
}
else if ( getFunctionName().equalsIgnoreCase("eigen")) {
// One matrix of size original input and three 1D vectors (used to represent tridiagonal matrix)
double interOutput = OptimizerUtils.estimateSizeExactSparsity(getInput().get(0).getDim1(), getInput().get(0).getDim2(), 1.0)
+ 3*OptimizerUtils.estimateSizeExactSparsity(getInput().get(0).getDim1(), 1, 1.0);
//System.out.println("EigenInter " + interOutput/1024/1024);
return interOutput;
}
else
throw new RuntimeException("Invalid call of computeIntermediateMemEstimate in FunctionOp.");
}
}
@Override
protected long[] inferOutputCharacteristics( MemoTable memo )
{
throw new RuntimeException("Invalid call of inferOutputCharacteristics in FunctionOp.");
}
@Override
public Lop constructLops()
throws HopsException, LopsException
{
//return already created lops
if( getLops() != null )
return getLops();
ExecType et = optFindExecType();
if ( et != ExecType.CP ) {
throw new HopsException("Invalid execution type for function: " + _fname);
}
//construct input lops (recursive)
ArrayList<Lop> tmp = new ArrayList<Lop>();
for( Hop in : getInput() )
tmp.add( in.constructLops() );
//construct function call
FunctionCallCP fcall = new FunctionCallCP( tmp, _fnamespace, _fname, _outputs, _outputHops );
setLineNumbers( fcall );
setLops( fcall );
//note: no reblock lop because outputs directly bound
return getLops();
}
@Override
public String getOpString()
{
return OPSTRING;
}
@Override
protected ExecType optFindExecType()
throws HopsException
{
if ( getFunctionType() == FunctionType.MULTIRETURN_BUILTIN ) {
// Since the memory estimate is only conservative, do not throw
// exception if the estimated memory is larger than the budget
// Nevertheless, memory estimates these functions are useful for
// other purposes, such as compiling parfor
return ExecType.CP;
// check if there is sufficient memory to execute this function
/*if ( getMemEstimate() < OptimizerUtils.getMemBudget(true) ) {
return ExecType.CP;
}
else {
throw new HopsException("Insufficient memory to execute function: " + getFunctionName());
}*/
}
// the actual function call is always CP
return ExecType.CP;
}
@Override
public void refreshSizeInformation()
{
//do nothing
}
@Override
@SuppressWarnings("unchecked")
public Object clone() throws CloneNotSupportedException
{
FunctionOp ret = new FunctionOp();
//copy generic attributes
ret.clone(this, false);
//copy specific attributes
ret._type = _type;
ret._fnamespace = _fnamespace;
ret._fname = _fname;
ret._outputs = _outputs.clone();
if( _outputHops != null )
ret._outputHops = (ArrayList<Hop>) _outputHops.clone();
return ret;
}
@Override
public boolean compare( Hop that )
{
return false;
}
}
| |
package com.brettonw.bag;
import com.brettonw.AppTest;
import com.brettonw.bag.test.*;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.Test;
import java.time.LocalTime;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertArrayEquals;
public class SerializerTest {
private static final Logger log = LogManager.getLogger (SerializerTest.class);
@Test
public void testBareType() {
// serialize a bare type
int x = 24;
BagObject bagObject = Serializer.toBagObject (x);
log.info (bagObject.toString ());
int deserializedX = Serializer.fromBagObject (bagObject);
AppTest.report (deserializedX, x, "Serializer - test bare type");
}
@Test
public void testNewSerializer() {
new Serializer ();
}
@Test
public void testPojo() {
// serialize a POJO
TestClassA testClass = new TestClassA (5, true, 123.0, "pdq", TestEnumXYZ.ABC);
BagObject bagObject = Serializer.toBagObject (testClass);
log.info (bagObject.toString ());
TestClassA reconClass = Serializer.fromBagObject (bagObject);
BagObject reconBagObject = Serializer.toBagObject (reconClass);
AppTest.report (reconBagObject.toString (),bagObject.toString (), "Serializer test round trip");
}
@Test
public void testUnwrappedPojo() {
// serialize a POJO
TestClassA testClass = new TestClassA (5, true, 123.0, "pdq", TestEnumXYZ.ABC);
BagObject serialized = Serializer.toBagObject (testClass);
BagObject bagObject = Serializer.Unwrap (serialized);
log.info (bagObject.toString ());
TestClassA reconClass = Serializer.fromBagAsType (bagObject, TestClassA.class);
BagObject reconBagObject = Serializer.toBagObject (reconClass);
AppTest.report (reconBagObject.toString (), serialized.toString (), "Serializer test round trip");
}
@Test
public void testArray() {
Integer testArray[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
BagObject bagObject = Serializer.toBagObject (testArray);
log.info (bagObject.toString ());
Integer reconArray[] = Serializer.fromBagObject (bagObject);
assertArrayEquals("Check array reconstitution", testArray, reconArray);
int testArray2[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
bagObject = Serializer.toBagObject (testArray2);
log.info (bagObject.toString ());
int reconArray2[] = Serializer.fromBagObject (bagObject);
assertArrayEquals("Check array reconstitution", testArray2, reconArray2);
int testArray3[][] = { {0,0}, {1,1}, {2,2} };
bagObject = Serializer.toBagObject (testArray3);
log.info (bagObject.toString ());
int reconArray3[][] = Serializer.fromBagObject (bagObject);
assertArrayEquals("Check array reconstitution", testArray3, reconArray3);
}
@Test
public void testArrayList() {
ArrayList<Integer> arrayList = new ArrayList<> (3);
arrayList.add(1);
arrayList.add (3);
arrayList.add (5);
BagObject bagObject = Serializer.toBagObject (arrayList);
log.info (bagObject.toString ());
ArrayList<Integer> reconArrayList = Serializer.fromBagObject (bagObject);
assertArrayEquals ("Check array list reconstitution", arrayList.toArray (), reconArrayList.toArray ());
}
@Test
public void testMap() {
HashMap<String, Integer> hashMap = new HashMap<> (3);
hashMap.put ("A", 1);
hashMap.put ("B", 3);
hashMap.put ("C", 5);
BagObject bagObject = Serializer.toBagObject (hashMap);
log.info (bagObject.toString ());
HashMap<String, Integer> reconHashMap = Serializer.fromBagObject (bagObject);
assertArrayEquals ("Check hash map reconstitution - keys", hashMap.keySet ().toArray (), reconHashMap.keySet ().toArray ());
assertArrayEquals ("Check hash map reconstitution - values", hashMap.values ().toArray (), reconHashMap.values ().toArray ());
// add a few other simple serializations...
BagObject anotherBagObject = Serializer.toBagObject (bagObject);
AppTest.report (Serializer.fromBagObject (anotherBagObject), bagObject, "Serializer test reconstituting a bag object");
}
@Test
public void testBagArray() {
BagArray bagArray = new BagArray (2).add (1).add (7.0);
BagObject anotherBagObject = Serializer.toBagObject (bagArray);
AppTest.report (Serializer.fromBagObject (anotherBagObject), bagArray, "Serializer test reconstituting a bag array");
log.info ("got here");
}
@Test
public void testVersionHandler() {
try {
BagObject mockup = new BagObject ()
.put (Serializer.VERSION_KEY, "0.9")
.put (Serializer.VALUE_KEY, new BagObject ()
.put (Serializer.TYPE_KEY, "java.lang.String")
.put (Serializer.VALUE_KEY, "PDQ")
);
String serializedString = mockup.toString ();
BagObject serializedStringBagObject = BagObjectFrom.string (serializedString);
String deserializedString = Serializer.fromBagObject (serializedStringBagObject);
AppTest.report (serializedString, deserializedString, "Serializer test reconstituting a string with a bad version should throw exception");
} catch (BadVersionException exception) {
AppTest.report (false, false, "Serializer test reconstituting a string with a bad version should fail");
}
}
@Test
public void testError() {
BagObject mockup = new BagObject ()
.put (Serializer.VERSION_KEY, Serializer.SERIALIZER_VERSION)
.put (Serializer.VALUE_KEY, new BagObject ()
.put (Serializer.TYPE_KEY, "java.lang.Sring")
.put (Serializer.VALUE_KEY, "PDQ")
);
String serializedString = mockup.toString ();
BagObject serializedStringBagObject = BagObjectFrom.string (serializedString);
String deserializedString = Serializer.fromBagObject (serializedStringBagObject);
AppTest.report (deserializedString, null, "Serializer test reconstituting a modified source");
}
@Test
public void testNonPojo() {
TestClassC testClassC = new TestClassC (1, 2L, 3.0f, 10, 20L, 30.0f);
BagObject bagObjectC = Serializer.toBagObject (testClassC);
log.info (bagObjectC.toString ());
TestClassC reconClassC = Serializer.fromBagObject (bagObjectC);
AppTest.report (reconClassC.test (1, 2L, 3.0f, 10, 20L, 30.0f), true, "Serializer - Confirm reconstituted object matches original");
}
@Test
public void testArrayTypes() {
long testArrayLong[] = {0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L};
BagObject bagObject = Serializer.toBagObject (testArrayLong);
assertArrayEquals (testArrayLong, Serializer.fromBagObject (bagObject));
short testArrayShort[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
bagObject = Serializer.toBagObject (testArrayShort);
assertArrayEquals (testArrayShort, Serializer.fromBagObject (bagObject));
byte testArrayByte[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
bagObject = Serializer.toBagObject (testArrayByte);
assertArrayEquals (testArrayByte, Serializer.fromBagObject (bagObject));
double testArrayDouble[] = {0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0};
bagObject = Serializer.toBagObject (testArrayDouble);
assertArrayEquals (testArrayDouble, Serializer.fromBagObject (bagObject), 1.0e-9);
float testArrayFloat[] = {0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
bagObject = Serializer.toBagObject (testArrayFloat);
assertArrayEquals (testArrayFloat, Serializer.fromBagObject (bagObject), 1.0e-6f);
boolean testArrayBoolean[] = {true, false, true, false, true, false, true, false};
bagObject = Serializer.toBagObject (testArrayBoolean);
assertArrayEquals (testArrayBoolean, Serializer.fromBagObject (bagObject));
char testArrayCharacter[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
bagObject = Serializer.toBagObject (testArrayCharacter);
assertArrayEquals (testArrayCharacter, Serializer.fromBagObject (bagObject));
}
@Test
public void testBoxedArrayTypes() {
Long testArrayLong[] = {0L, 1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L};
BagObject bagObject = Serializer.toBagObject (testArrayLong);
assertArrayEquals (testArrayLong, Serializer.fromBagObject (bagObject));
Short testArrayShort[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
bagObject = Serializer.toBagObject (testArrayShort);
assertArrayEquals (testArrayShort, Serializer.fromBagObject (bagObject));
Byte testArrayByte[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
bagObject = Serializer.toBagObject (testArrayByte);
assertArrayEquals (testArrayByte, Serializer.fromBagObject (bagObject));
Double testArrayDouble[] = {0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0};
bagObject = Serializer.toBagObject (testArrayDouble);
assertArrayEquals (testArrayDouble, Serializer.fromBagObject (bagObject));
Float testArrayFloat[] = {0.0f, 1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f};
bagObject = Serializer.toBagObject (testArrayFloat);
assertArrayEquals (testArrayFloat, Serializer.fromBagObject (bagObject));
Boolean testArrayBoolean[] = {true, false, true, false, true, false, true, false};
bagObject = Serializer.toBagObject (testArrayBoolean);
assertArrayEquals (testArrayBoolean, Serializer.fromBagObject (bagObject));
Character testArrayCharacter[] = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
bagObject = Serializer.toBagObject (testArrayCharacter);
assertArrayEquals (testArrayCharacter, Serializer.fromBagObject (bagObject));
}
@Test
public void testPojoArray() {
TestClassA testArrayA[] = {
new TestClassA (1, true, 3.5, "Joe", TestEnumXYZ.ABC),
new TestClassA (2, true, 3.6, "Dave", TestEnumXYZ.DEF),
new TestClassA (3, false, 19.2, "Bret", TestEnumXYZ.GHI),
new TestClassA (4, true, 4.5, "Roxy", TestEnumXYZ.GHI)
};
BagObject bagObject = Serializer.toBagObject (testArrayA);
log.info (bagObject.toString ());
TestClassA reconTestArrayA[] = Serializer.fromBagObject (bagObject);
boolean pass = true;
for (int i = 0, end = testArrayA.length; i < end; ++i) {
TestClassA left = testArrayA[i];
TestClassA right = reconTestArrayA[i];
// not a *COMPLETE* test, but spot checking
pass = pass && (left.abc.equals (right.abc)) && (left.sub.b == right.sub.b);
}
AppTest.report (pass, true, "Serializer - test array of complex POJOs");
}
@Test
public void testBogusArrayString() {
BagObject mockup = new BagObject ()
.put (Serializer.VERSION_KEY, Serializer.SERIALIZER_VERSION)
.put (Serializer.VALUE_KEY, new BagObject ()
.put (Serializer.TYPE_KEY, "[java.lang.Integer;")
.put (Serializer.VALUE_KEY, new BagArray ()
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 0))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 1))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 2))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 3))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 4))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 5))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 6))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 7))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 8))
.add (new BagObject ().put (Serializer.TYPE_KEY, "java.lang.Integer").put (Serializer.VALUE_KEY, 9))
)
);
//String bogusArrayString = "{\"type\":\"[java.lang.Integer;\",\"v\":\"1.0\",\"value\":[{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"0\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"1\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"2\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"3\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"4\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"5\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"6\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"7\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"8\"},{\"type\":\"java.lang.Integer\",\"v\":\"1.0\",\"value\":\"9\"}]}";
String bogusArrayString = mockup.toString ();
BagObject bogusArray = BagObjectFrom.string (bogusArrayString);
Object result = Serializer.fromBagObject (bogusArray);
AppTest.report (result, null, "Serializer - test bogus array string");
}
@Test
public void testOffsetDateTime () {
// deal with a type that has a no default constructor?
OffsetDateTime odt = OffsetDateTime.now ();
BagObject bagObject = Serializer.toBagObject (odt);
log.info (bagObject.toString ());
OffsetDateTime reconOdt = Serializer.fromBagObject (bagObject);
AppTest.report (odt, reconOdt, "Reconstructed OffsetDateTime should match the original");
}
@Test
public void testBogusType () {
// deal with a type that has a no default constructor?
OffsetDateTime odt = OffsetDateTime.now ();
BagObject bagObject = Serializer.toBagObject (odt);
log.info (bagObject.toString ());
try {
LocalTime localTime = Serializer.fromBagObject (bagObject);
AppTest.report (odt, localTime, "This should fail");
} catch (ClassCastException exception) {
AppTest.report (false, false, "Properly throw an exception if we can't cast the value");
}
}
@Test
public void testBadConstructorHandling () {
// deal with a type that has a no default constructor and no registered extension
TestClassD d = new TestClassD ("Hello");
BagObject bagObject = Serializer.toBagObject (d);
log.info (bagObject.toString ());
TestClassD xxx = Serializer.fromBagObject (bagObject);
AppTest.report (d.equals (xxx), true, "Properly construct on a type without a default constructor");
}
@Test
public void testClassE () {
TestClassE d = new TestClassE ("Hello");
BagObject bagObject = Serializer.toBagObject (d);
log.info (bagObject.toString ());
TestClassE xxx = Serializer.fromBagObject (bagObject);
AppTest.report (d, xxx, "Properly handle a serialized typen");
}
@Test
public void testNull () {
BagObject bagObject = Serializer.toBagObject (null);
AppTest.report (bagObject, null, "Serialize null results in null");
Object object = Serializer.fromBagObject (null);
AppTest.report (object, null, "Deserialize null results in null");
}
@Test
public void testSimpleSerializer () {
BagObject testClassC = new BagObject ()
.put ("a", 1).put ("b", 2).put ("c", 3.0).put ("d", 10).put ("e", 20).put ("f", 30.0).put ("g", 30);
TestClassC reconC = Serializer.fromBagAsType (testClassC, TestClassC.class);
AppTest.report (reconC.getF (), testClassC.getFloat ("f"), "Simple deserialization - f");
}
@Test
public void testMapTypeSerialization () {
Map<String, String> map = new HashMap<> (5);
map.put ("he", "she");
map.put ("whe", "when");
map.put ("the", "they");
map.put ("che", "chen");
BagObject bagObject = Serializer.toBagObject (map);
Map<String, String> recon = Serializer.fromBagObject (bagObject);
}
@Test
public void testIncompleteSimpleSerializer () {
BagObject testClassC = new BagObject ()
// int a, long b, float c, int d, long e, float f
.put ("a", 5).put ("b", 7).put ("c", 9).put ("e", 600).put ("f", 3.5f);
TestClassC c = Serializer.fromBagAsType (testClassC, TestClassC.class);
TestClassC xxx = new TestClassC ();
AppTest.report (c.getD (), xxx.getD (), "Simple deserialization with missing items");
}
}
| |
package com.yammer.metrics.reporting;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.*;
import com.yammer.metrics.stats.Snapshot;
import com.yammer.metrics.core.MetricPredicate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Locale;
import java.util.Map;
import java.util.SortedMap;
import java.util.concurrent.TimeUnit;
/**
* A simple reporter which sends out application metrics to a <a href="http://ganglia.sourceforge.net/">Ganglia</a>
* server periodically.
* <p/>
* NOTE: this reporter only works with Ganglia 3.1 and greater. The message protocol for earlier
* versions of Ganglia is different.
* <p/>
* This code heavily borrows from GangliaWriter in <a href="http://code.google.com/p/jmxtrans/source/browse/trunk/src/com/googlecode/jmxtrans/model/output/GangliaWriter.java">JMXTrans</a>
* which is based on <a href="http://search-hadoop.com/c/Hadoop:/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/ganglia/GangliaContext31.java">GangliaContext31</a>
* from Hadoop.
*/
public class GangliaReporter extends AbstractPollingReporter implements MetricProcessor<String> {
private static final Logger LOG = LoggerFactory.getLogger(GangliaReporter.class);
private static final int GANGLIA_TMAX = 60;
private static final int GANGLIA_DMAX = 0;
private static final String GANGLIA_INT_TYPE = "int32";
private static final String GANGLIA_DOUBLE_TYPE = "double";
private static final String GANGLIA_STRING_TYPE = "string";
private final MetricPredicate predicate;
private final VirtualMachineMetrics vm;
private final Locale locale = Locale.US;
private String hostLabel;
private String groupPrefix = "";
private boolean compressPackageNames;
private final GangliaMessageBuilder gangliaMessageBuilder;
public boolean printVMMetrics = true;
/**
* Enables the ganglia reporter to send data for the default metrics registry to ganglia server
* with the specified period.
*
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param gangliaHost the gangliaHost name of ganglia server (carbon-cache agent)
* @param port the port number on which the ganglia server is listening
*/
public static void enable(long period, TimeUnit unit, String gangliaHost, int port) {
enable(Metrics.defaultRegistry(), period, unit, gangliaHost, port, "");
}
/**
* Enables the ganglia reporter to send data for the default metrics registry to ganglia server
* with the specified period.
*
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param gangliaHost the gangliaHost name of ganglia server (carbon-cache agent)
* @param port the port number on which the ganglia server is listening
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
*/
public static void enable(long period, TimeUnit unit, String gangliaHost, int port, String groupPrefix) {
enable(Metrics.defaultRegistry(), period, unit, gangliaHost, port, groupPrefix);
}
/**
* Enables the ganglia reporter to send data for the default metrics registry to ganglia server
* with the specified period.
*
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param gangliaHost the gangliaHost name of ganglia server (carbon-cache agent)
* @param port the port number on which the ganglia server is listening
* @param compressPackageNames if true reporter will compress package names e.g.
* com.foo.MetricName becomes c.f.MetricName
*/
public static void enable(long period, TimeUnit unit, String gangliaHost, int port, boolean compressPackageNames) {
enable(Metrics.defaultRegistry(),
period,
unit,
gangliaHost,
port,
"",
MetricPredicate.ALL,
compressPackageNames);
}
/**
* Enables the ganglia reporter to send data for the given metrics registry to ganglia server
* with the specified period.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param gangliaHost the gangliaHost name of ganglia server (carbon-cache agent)
* @param port the port number on which the ganglia server is listening
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
*/
public static void enable(MetricsRegistry metricsRegistry, long period, TimeUnit unit, String gangliaHost, int port, String groupPrefix) {
enable(metricsRegistry, period, unit, gangliaHost, port, groupPrefix, MetricPredicate.ALL);
}
/**
* Enables the ganglia reporter to send data to ganglia server with the specified period.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param gangliaHost the gangliaHost name of ganglia server (carbon-cache agent)
* @param port the port number on which the ganglia server is listening
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
* @param predicate filters metrics to be reported
*/
public static void enable(MetricsRegistry metricsRegistry, long period, TimeUnit unit, String gangliaHost, int port, String groupPrefix, MetricPredicate predicate) {
enable(metricsRegistry, period, unit, gangliaHost, port, groupPrefix, predicate, false);
}
/**
* Enables the ganglia reporter to send data to ganglia server with the specified period.
*
* @param metricsRegistry the metrics registry
* @param period the period between successive outputs
* @param unit the time unit of {@code period}
* @param gangliaHost the gangliaHost name of ganglia server (carbon-cache agent)
* @param port the port number on which the ganglia server is listening
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
* @param predicate filters metrics to be reported
* @param compressPackageNames if true reporter will compress package names e.g.
* com.foo.MetricName becomes c.f.MetricName
*/
public static void enable(MetricsRegistry metricsRegistry, long period, TimeUnit unit, String gangliaHost,
int port, String groupPrefix, MetricPredicate predicate, boolean compressPackageNames) {
try {
final GangliaReporter reporter = new GangliaReporter(metricsRegistry,
gangliaHost,
port,
groupPrefix,
predicate,
compressPackageNames);
reporter.start(period, unit);
} catch (Exception e) {
LOG.error("Error creating/starting ganglia reporter:", e);
}
}
/**
* Creates a new {@link GangliaReporter}.
*
* @param gangliaHost is ganglia server
* @param port is port on which ganglia server is running
* @throws java.io.IOException if there is an error connecting to the ganglia server
*/
public GangliaReporter(String gangliaHost, int port) throws IOException {
this(Metrics.defaultRegistry(), gangliaHost, port, "");
}
/**
* Creates a new {@link GangliaReporter}.
*
* @param gangliaHost is ganglia server
* @param port is port on which ganglia server is running
* @param compressPackageNames whether or not Metrics' package names will be shortened
* @throws java.io.IOException if there is an error connecting to the ganglia server
*/
public GangliaReporter(String gangliaHost, int port, boolean compressPackageNames) throws IOException {
this(Metrics.defaultRegistry(),
gangliaHost,
port,
"",
MetricPredicate.ALL,
compressPackageNames);
}
/**
* Creates a new {@link GangliaReporter}.
*
* @param metricsRegistry the metrics registry
* @param gangliaHost is ganglia server
* @param port is port on which ganglia server is running
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
* @throws java.io.IOException if there is an error connecting to the ganglia server
*/
public GangliaReporter(MetricsRegistry metricsRegistry, String gangliaHost, int port, String groupPrefix) throws IOException {
this(metricsRegistry, gangliaHost, port, groupPrefix, MetricPredicate.ALL);
}
/**
* Creates a new {@link GangliaReporter}.
*
* @param metricsRegistry the metrics registry
* @param gangliaHost is ganglia server
* @param port is port on which ganglia server is running
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
* @param predicate filters metrics to be reported
* @throws java.io.IOException if there is an error connecting to the ganglia server
*/
public GangliaReporter(MetricsRegistry metricsRegistry, String gangliaHost, int port, String groupPrefix, MetricPredicate predicate) throws IOException {
this(metricsRegistry, gangliaHost, port, groupPrefix, predicate, false);
}
/**
* Creates a new {@link GangliaReporter}.
*
* @param metricsRegistry the metrics registry
* @param gangliaHost is ganglia server
* @param port is port on which ganglia server is running
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
* @param predicate filters metrics to be reported
* @param compressPackageNames if true reporter will compress package names e.g.
* com.foo.MetricName becomes c.f.MetricName
* @throws java.io.IOException if there is an error connecting to the ganglia server
*/
public GangliaReporter(MetricsRegistry metricsRegistry, String gangliaHost, int port, String groupPrefix,
MetricPredicate predicate, boolean compressPackageNames) throws IOException {
this(metricsRegistry,
groupPrefix,
predicate,
compressPackageNames,
new GangliaMessageBuilder(gangliaHost, port), VirtualMachineMetrics.getInstance());
}
/**
* Creates a new {@link GangliaReporter}.
*
* @param metricsRegistry the metrics registry
* @param groupPrefix prefix to the ganglia group name (such as myapp_counter)
* @param predicate filters metrics to be reported
* @param compressPackageNames if true reporter will compress package names e.g.
* com.foo.MetricName becomes c.f.MetricName
* @param gangliaMessageBuilder a {@link GangliaMessageBuilder} instance
* @param vm a {@link VirtualMachineMetrics} isntance
* @throws java.io.IOException if there is an error connecting to the ganglia server
*/
public GangliaReporter(MetricsRegistry metricsRegistry, String groupPrefix,
MetricPredicate predicate, boolean compressPackageNames,
GangliaMessageBuilder gangliaMessageBuilder, VirtualMachineMetrics vm) throws IOException {
super(metricsRegistry, "ganglia-reporter");
this.gangliaMessageBuilder = gangliaMessageBuilder;
this.groupPrefix = groupPrefix + "_";
this.hostLabel = getHostLabel();
this.predicate = predicate;
this.compressPackageNames = compressPackageNames;
this.vm = vm;
}
@Override
public void run() {
if (this.printVMMetrics) {
printVmMetrics();
}
printRegularMetrics();
}
private void printRegularMetrics() {
for (Map.Entry<String, SortedMap<MetricName, Metric>> entry : getMetricsRegistry().getGroupedMetrics(
predicate).entrySet()) {
for (Map.Entry<MetricName, Metric> subEntry : entry.getValue().entrySet()) {
final Metric metric = subEntry.getValue();
if (metric != null) {
try {
metric.processWith(this, subEntry.getKey(), null);
} catch (Exception ignored) {
LOG.error("Error printing regular metrics:", ignored);
}
}
}
}
}
private void sendToGanglia(String metricName, String metricType, String metricValue, String groupName, String units) {
try {
sendMetricData(metricType, metricName, metricValue, groupPrefix + groupName, units);
if (LOG.isTraceEnabled()) {
LOG.trace("Emitting metric " + metricName + ", type " + metricType + ", value " + metricValue + " for gangliaHost: " + this
.gangliaMessageBuilder
.getHostName() + ":" + this.gangliaMessageBuilder.getPort());
}
} catch (IOException e) {
LOG.error("Error sending to ganglia:", e);
}
}
private void sendToGanglia(String metricName, String metricType, String metricValue, String groupName) {
sendToGanglia(metricName, metricType, metricValue, groupName, "");
}
private void sendMetricData(String metricType, String metricName, String metricValue, String groupName, String units) throws IOException {
this.gangliaMessageBuilder.newMessage()
.addInt(128)// metric_id = metadata_msg
.addString(this.hostLabel)// hostname
.addString(metricName)// metric name
.addInt(0)// spoof = True
.addString(metricType)// metric type
.addString(metricName)// metric name
.addString(units)// units
.addInt(3)// slope see gmetric.c
.addInt(GANGLIA_TMAX)// tmax, the maximum time between metrics
.addInt(GANGLIA_DMAX)// dmax, the maximum data value
.addInt(1)
.addString("GROUP")// Group attribute
.addString(groupName)// Group value
.send();
this.gangliaMessageBuilder.newMessage()
.addInt(133)// we are sending a string value
.addString(this.hostLabel)// hostLabel
.addString(metricName)// metric name
.addInt(0)// spoof = True
.addString("%s")// format field
.addString(metricValue) // metric value
.send();
}
@Override
public void processGauge(MetricName name, Gauge<?> gauge, String x) throws IOException {
final Object value = gauge.getValue();
final Class<?> klass = value.getClass();
final String type;
if (klass == Integer.class || klass == Long.class) {
type = GANGLIA_INT_TYPE;
} else if (klass == Float.class || klass == Double.class) {
type = GANGLIA_DOUBLE_TYPE;
} else {
type = GANGLIA_STRING_TYPE;
}
sendToGanglia(sanitizeName(name),
type,
String.format(locale, "%s", gauge.getValue()),
"gauge");
}
@Override
public void processCounter(MetricName name, Counter counter, String x) throws IOException {
sendToGanglia(sanitizeName(name),
GANGLIA_INT_TYPE,
String.format(locale, "%d", counter.getCount()),
"counter");
}
@Override
public void processMeter(MetricName name, Metered meter, String x) throws IOException {
final String sanitizedName = sanitizeName(name);
final String rateUnits = meter.getRateUnit().name();
final String rateUnit = rateUnits.substring(0, rateUnits.length() - 1).toLowerCase(Locale.US);
final String unit = meter.getEventType() + '/' + rateUnit;
printLongField(sanitizedName + ".count", meter.getCount(), "metered", meter.getEventType());
printDoubleField(sanitizedName + ".meanRate", meter.getMeanRate(), "metered", unit);
printDoubleField(sanitizedName + ".1MinuteRate", meter.getOneMinuteRate(), "metered", unit);
printDoubleField(sanitizedName + ".5MinuteRate", meter.getFiveMinuteRate(), "metered", unit);
printDoubleField(sanitizedName + ".15MinuteRate", meter.getFifteenMinuteRate(), "metered", unit);
}
@Override
public void processHistogram(MetricName name, Histogram histogram, String x) throws IOException {
final String sanitizedName = sanitizeName(name);
final Snapshot snapshot = histogram.getSnapshot();
// TODO: what units make sense for histograms? should we add event type to the Histogram metric?
printDoubleField(sanitizedName + ".min", histogram.getMin(), "histo");
printDoubleField(sanitizedName + ".max", histogram.getMax(), "histo");
printDoubleField(sanitizedName + ".mean", histogram.getMean(), "histo");
printDoubleField(sanitizedName + ".stddev", histogram.getStdDev(), "histo");
printDoubleField(sanitizedName + ".median", snapshot.getMedian(), "histo");
printDoubleField(sanitizedName + ".75percentile", snapshot.get75thPercentile(), "histo");
printDoubleField(sanitizedName + ".95percentile", snapshot.get95thPercentile(), "histo");
printDoubleField(sanitizedName + ".98percentile", snapshot.get98thPercentile(), "histo");
printDoubleField(sanitizedName + ".99percentile", snapshot.get99thPercentile(), "histo");
printDoubleField(sanitizedName + ".999percentile", snapshot.get999thPercentile(), "histo");
}
@Override
public void processTimer(MetricName name, Timer timer, String x) throws IOException {
processMeter(name, timer, x);
final String sanitizedName = sanitizeName(name);
final Snapshot snapshot = timer.getSnapshot();
final String durationUnit = timer.getDurationUnit().name();
printDoubleField(sanitizedName + ".min", timer.getMin(), "timer", durationUnit);
printDoubleField(sanitizedName + ".max", timer.getMax(), "timer", durationUnit);
printDoubleField(sanitizedName + ".mean", timer.getMean(), "timer", durationUnit);
printDoubleField(sanitizedName + ".stddev", timer.getStdDev(), "timer", durationUnit);
printDoubleField(sanitizedName + ".median", snapshot.getMedian(), "timer", durationUnit);
printDoubleField(sanitizedName + ".75percentile", snapshot.get75thPercentile(), "timer", durationUnit);
printDoubleField(sanitizedName + ".95percentile", snapshot.get95thPercentile(), "timer", durationUnit);
printDoubleField(sanitizedName + ".98percentile", snapshot.get98thPercentile(), "timer", durationUnit);
printDoubleField(sanitizedName + ".99percentile", snapshot.get99thPercentile(), "timer", durationUnit);
printDoubleField(sanitizedName + ".999percentile", snapshot.get999thPercentile(), "timer", durationUnit);
}
private void printDoubleField(String name, double value, String groupName, String units) {
sendToGanglia(name,
GANGLIA_DOUBLE_TYPE,
String.format(locale, "%2.2f", value),
groupName,
units);
}
private void printDoubleField(String name, double value, String groupName) {
printDoubleField(name, value, groupName, "");
}
private void printLongField(String name, long value, String groupName) {
printLongField(name, value, groupName, "");
}
private void printLongField(String name, long value, String groupName, String units) {
// TODO: ganglia does not support int64, what should we do here?
sendToGanglia(name, GANGLIA_INT_TYPE, String.format(locale, "%d", value), groupName, units);
}
private void printVmMetrics() {
printDoubleField("jvm.memory.heap_usage", vm.getHeapUsage(), "jvm");
printDoubleField("jvm.memory.non_heap_usage", vm.getNonHeapUsage(), "jvm");
for (Map.Entry<String, Double> pool : vm.getMemoryPoolUsage().entrySet()) {
printDoubleField("jvm.memory.memory_pool_usages." + pool.getKey(),
pool.getValue(),
"jvm");
}
printDoubleField("jvm.daemon_thread_count", vm.getDaemonThreadCount(), "jvm");
printDoubleField("jvm.thread_count", vm.getThreadCount(), "jvm");
printDoubleField("jvm.uptime", vm.getUptime(), "jvm");
printDoubleField("jvm.fd_usage", vm.getFileDescriptorUsage(), "jvm");
for (Map.Entry<Thread.State, Double> entry : vm.getThreadStatePercentages().entrySet()) {
printDoubleField("jvm.thread-states." + entry.getKey().toString().toLowerCase(),
entry.getValue(),
"jvm");
}
for (Map.Entry<String, VirtualMachineMetrics.GarbageCollectorStats> entry : vm.getGarbageCollectors().entrySet()) {
printLongField("jvm.gc." + entry.getKey() + ".time",
entry.getValue().getTime(TimeUnit.MILLISECONDS),
"jvm");
printLongField("jvm.gc." + entry.getKey() + ".runs", entry.getValue().getRuns(), "jvm");
}
}
String getHostLabel() {
try {
final InetAddress addr = InetAddress.getLocalHost();
return addr.getHostAddress() + ":" + addr.getHostName();
} catch (UnknownHostException e) {
LOG.error("Unable to get local gangliaHost name: ", e);
return "unknown";
}
}
protected String sanitizeName(MetricName name) {
if (name == null) {
return "";
}
final String qualifiedTypeName = name.getGroup() + "." + name.getType() + "." + name.getName();
final String metricName = name.hasScope() ? qualifiedTypeName + '.' + name.getScope() : qualifiedTypeName;
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < metricName.length(); i++) {
final char p = metricName.charAt(i);
if (!(p >= 'A' && p <= 'Z')
&& !(p >= 'a' && p <= 'z')
&& !(p >= '0' && p <= '9')
&& (p != '_')
&& (p != '-')
&& (p != '.')
&& (p != '\0')) {
sb.append('_');
} else {
sb.append(p);
}
}
return compressPackageName(sb.toString());
}
private String compressPackageName(String name) {
if (compressPackageNames && name.indexOf(".") > 0) {
final String[] nameParts = name.split("\\.");
final StringBuilder sb = new StringBuilder();
final int numParts = nameParts.length;
int count = 0;
for (String namePart : nameParts) {
if (++count < numParts - 1) {
sb.append(namePart.charAt(0));
sb.append(".");
} else {
sb.append(namePart);
if (count == numParts - 1) {
sb.append(".");
}
}
}
name = sb.toString();
}
return name;
}
}
| |
/*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.socket.oio;
import io.netty.buffer.ByteBufAllocator;
import io.netty.channel.ChannelException;
import io.netty.channel.ChannelOption;
import io.netty.channel.MessageSizeEstimator;
import io.netty.channel.PreferHeapByteBufAllocator;
import io.netty.channel.RecvByteBufAllocator;
import io.netty.channel.WriteBufferWaterMark;
import io.netty.channel.socket.DefaultSocketChannelConfig;
import io.netty.channel.socket.SocketChannel;
import java.io.IOException;
import java.net.Socket;
import java.util.Map;
import static io.netty.channel.ChannelOption.*;
/**
* Default {@link OioSocketChannelConfig} implementation
*/
public class DefaultOioSocketChannelConfig extends DefaultSocketChannelConfig implements OioSocketChannelConfig {
@Deprecated
public DefaultOioSocketChannelConfig(SocketChannel channel, Socket javaSocket) {
super(channel, javaSocket);
setAllocator(new PreferHeapByteBufAllocator(getAllocator()));
}
DefaultOioSocketChannelConfig(OioSocketChannel channel, Socket javaSocket) {
super(channel, javaSocket);
setAllocator(new PreferHeapByteBufAllocator(getAllocator()));
}
@Override
public Map<ChannelOption<?>, Object> getOptions() {
return getOptions(
super.getOptions(), SO_TIMEOUT);
}
@SuppressWarnings("unchecked")
@Override
public <T> T getOption(ChannelOption<T> option) {
if (option == SO_TIMEOUT) {
return (T) Integer.valueOf(getSoTimeout());
}
return super.getOption(option);
}
@Override
public <T> boolean setOption(ChannelOption<T> option, T value) {
validate(option, value);
if (option == SO_TIMEOUT) {
setSoTimeout((Integer) value);
} else {
return super.setOption(option, value);
}
return true;
}
@Override
public OioSocketChannelConfig setSoTimeout(int timeout) {
try {
javaSocket.setSoTimeout(timeout);
} catch (IOException e) {
throw new ChannelException(e);
}
return this;
}
@Override
public int getSoTimeout() {
try {
return javaSocket.getSoTimeout();
} catch (IOException e) {
throw new ChannelException(e);
}
}
@Override
public OioSocketChannelConfig setTcpNoDelay(boolean tcpNoDelay) {
super.setTcpNoDelay(tcpNoDelay);
return this;
}
@Override
public OioSocketChannelConfig setSoLinger(int soLinger) {
super.setSoLinger(soLinger);
return this;
}
@Override
public OioSocketChannelConfig setSendBufferSize(int sendBufferSize) {
super.setSendBufferSize(sendBufferSize);
return this;
}
@Override
public OioSocketChannelConfig setReceiveBufferSize(int receiveBufferSize) {
super.setReceiveBufferSize(receiveBufferSize);
return this;
}
@Override
public OioSocketChannelConfig setKeepAlive(boolean keepAlive) {
super.setKeepAlive(keepAlive);
return this;
}
@Override
public OioSocketChannelConfig setTrafficClass(int trafficClass) {
super.setTrafficClass(trafficClass);
return this;
}
@Override
public OioSocketChannelConfig setReuseAddress(boolean reuseAddress) {
super.setReuseAddress(reuseAddress);
return this;
}
@Override
public OioSocketChannelConfig setPerformancePreferences(int connectionTime, int latency, int bandwidth) {
super.setPerformancePreferences(connectionTime, latency, bandwidth);
return this;
}
@Override
public OioSocketChannelConfig setAllowHalfClosure(boolean allowHalfClosure) {
super.setAllowHalfClosure(allowHalfClosure);
return this;
}
@Override
public OioSocketChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis) {
super.setConnectTimeoutMillis(connectTimeoutMillis);
return this;
}
@Override
@Deprecated
public OioSocketChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead) {
super.setMaxMessagesPerRead(maxMessagesPerRead);
return this;
}
@Override
public OioSocketChannelConfig setWriteSpinCount(int writeSpinCount) {
super.setWriteSpinCount(writeSpinCount);
return this;
}
@Override
public OioSocketChannelConfig setAllocator(ByteBufAllocator allocator) {
super.setAllocator(allocator);
return this;
}
@Override
public OioSocketChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) {
super.setRecvByteBufAllocator(allocator);
return this;
}
@Override
public OioSocketChannelConfig setAutoRead(boolean autoRead) {
super.setAutoRead(autoRead);
return this;
}
@Override
protected void autoReadCleared() {
if (channel instanceof OioSocketChannel) {
((OioSocketChannel) channel).clearReadPending0();
}
}
@Override
public OioSocketChannelConfig setAutoClose(boolean autoClose) {
super.setAutoClose(autoClose);
return this;
}
@Override
public OioSocketChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark) {
super.setWriteBufferHighWaterMark(writeBufferHighWaterMark);
return this;
}
@Override
public OioSocketChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark) {
super.setWriteBufferLowWaterMark(writeBufferLowWaterMark);
return this;
}
@Override
public OioSocketChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark) {
super.setWriteBufferWaterMark(writeBufferWaterMark);
return this;
}
@Override
public OioSocketChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) {
super.setMessageSizeEstimator(estimator);
return this;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.components.impl;
import com.intellij.application.options.PathMacrosImpl;
import com.intellij.application.options.ReplacePathToMacroMap;
import com.intellij.openapi.application.PathMacros;
import com.intellij.openapi.components.ExpandMacroToPathMap;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.components.PathMacroUtil;
import com.intellij.openapi.components.TrackingPathMacroSubstitutor;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.IVirtualFileSystem;
import com.intellij.openapi.vfs.StandardFileSystems;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.containers.FactoryMap;
import org.jdom.Element;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class BasePathMacroManager extends PathMacroManager {
private PathMacrosImpl myPathMacros;
public BasePathMacroManager(@Nullable PathMacros pathMacros) {
myPathMacros = (PathMacrosImpl)pathMacros;
}
protected static void addFileHierarchyReplacements(ExpandMacroToPathMap result, String macroName, @Nullable String path) {
if (path == null) return;
addFileHierarchyReplacements(result, getLocalFileSystem().findFileByPath(path), "$" + macroName + "$");
}
private static void addFileHierarchyReplacements(ExpandMacroToPathMap result, @Nullable VirtualFile f, String macro) {
if (f == null) return;
addFileHierarchyReplacements(result, f.getParent(), macro + "/..");
result.put(macro, StringUtil.trimEnd(f.getPath(), "/"));
}
protected static void addFileHierarchyReplacements(ReplacePathToMacroMap result, String macroName, @Nullable String path, @Nullable String stopAt) {
if (path == null) return;
String macro = "$" + macroName + "$";
path = StringUtil.trimEnd(FileUtil.toSystemIndependentName(path), "/");
boolean overwrite = true;
while (StringUtil.isNotEmpty(path) && path.contains("/")) {
result.addReplacement(path, macro, overwrite);
if (path.equals(stopAt)) {
break;
}
macro += "/..";
overwrite = false;
path = StringUtil.getPackageName(path, '/');
}
}
private static IVirtualFileSystem getLocalFileSystem() {
// Use VFM directly because of mocks in tests.
return VirtualFileManager.getInstance().getFileSystem(StandardFileSystems.FILE_PROTOCOL);
}
public ExpandMacroToPathMap getExpandMacroMap() {
ExpandMacroToPathMap result = new ExpandMacroToPathMap();
for (Map.Entry<String, String> entry : PathMacroUtil.getGlobalSystemMacros().entrySet()) {
result.addMacroExpand(entry.getKey(), entry.getValue());
}
getPathMacros().addMacroExpands(result);
return result;
}
protected ReplacePathToMacroMap getReplacePathMap() {
ReplacePathToMacroMap result = new ReplacePathToMacroMap();
for (Map.Entry<String, String> entry : PathMacroUtil.getGlobalSystemMacros().entrySet()) {
result.addMacroReplacement(entry.getValue(), entry.getKey());
}
getPathMacros().addMacroReplacements(result);
return result;
}
@Override
public TrackingPathMacroSubstitutor createTrackingSubstitutor() {
return new MyTrackingPathMacroSubstitutor();
}
@Override
public String expandPath(final String path) {
return getExpandMacroMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public String collapsePath(final String path) {
return getReplacePathMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void collapsePathsRecursively(final Element element) {
getReplacePathMap().substitute(element, SystemInfo.isFileSystemCaseSensitive, true);
}
@Override
public String collapsePathsRecursively(final String text) {
return getReplacePathMap().substituteRecursively(text, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void expandPaths(final Element element) {
getExpandMacroMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void collapsePaths(final Element element) {
getReplacePathMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
public PathMacrosImpl getPathMacros() {
if (myPathMacros == null) {
myPathMacros = PathMacrosImpl.getInstanceEx();
}
return myPathMacros;
}
private class MyTrackingPathMacroSubstitutor implements TrackingPathMacroSubstitutor {
private final Map<String, Set<String>> myMacroToComponentNames = new FactoryMap<String, Set<String>>() {
@Override
protected Set<String> create(String key) {
return new HashSet<String>();
}
};
private final Map<String, Set<String>> myComponentNameToMacros = new FactoryMap<String, Set<String>>() {
@Override
protected Set<String> create(String key) {
return new HashSet<String>();
}
};
public MyTrackingPathMacroSubstitutor() {
}
@Override
public void reset() {
myMacroToComponentNames.clear();
myComponentNameToMacros.clear();
}
@Override
public String expandPath(final String path) {
return getExpandMacroMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public String collapsePath(final String path) {
return getReplacePathMap().substitute(path, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void expandPaths(final Element element) {
getExpandMacroMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
@Override
public void collapsePaths(final Element element) {
getReplacePathMap().substitute(element, SystemInfo.isFileSystemCaseSensitive);
}
public int hashCode() {
return getExpandMacroMap().hashCode();
}
@Override
public void invalidateUnknownMacros(final Set<String> macros) {
for (final String macro : macros) {
final Set<String> components = myMacroToComponentNames.get(macro);
for (final String component : components) {
myComponentNameToMacros.remove(component);
}
myMacroToComponentNames.remove(macro);
}
}
@Override
public Collection<String> getComponents(final Collection<String> macros) {
final Set<String> result = new HashSet<String>();
for (String macro : myMacroToComponentNames.keySet()) {
if (macros.contains(macro)) {
result.addAll(myMacroToComponentNames.get(macro));
}
}
return result;
}
@Override
public Collection<String> getUnknownMacros(final String componentName) {
final Set<String> result = new HashSet<String>();
result.addAll(componentName == null ? myMacroToComponentNames.keySet() : myComponentNameToMacros.get(componentName));
return Collections.unmodifiableCollection(result);
}
@Override
public void addUnknownMacros(final String componentName, final Collection<String> unknownMacros) {
if (unknownMacros.isEmpty()) return;
for (String unknownMacro : unknownMacros) {
final Set<String> stringList = myMacroToComponentNames.get(unknownMacro);
stringList.add(componentName);
}
myComponentNameToMacros.get(componentName).addAll(unknownMacros);
}
}
protected static boolean pathsEqual(@Nullable String path1, @Nullable String path2) {
return path1 != null && path2 != null &&
FileUtil.pathsEqual(FileUtil.toSystemIndependentName(path1), FileUtil.toSystemIndependentName(path2));
}
}
| |
package org.testng.xml;
import static org.testng.internal.Utils.isStringBlank;
import org.testng.ITestObjectFactory;
import org.testng.TestNGException;
import org.testng.collections.Lists;
import org.testng.collections.Maps;
import org.testng.internal.Utils;
import org.testng.log4testng.Logger;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.helpers.DefaultHandler;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Stack;
/**
* Suite definition parser utility.
*
* @author Cedric Beust
* @author <a href='mailto:the_mindstorm@evolva.ro'>Alexandru Popescu</a>
*/
public class TestNGContentHandler extends DefaultHandler {
private XmlSuite m_currentSuite = null;
private XmlTest m_currentTest = null;
private List<String> m_currentDefines = null;
private List<String> m_currentRuns = null;
private List<XmlClass> m_currentClasses = null;
private int m_currentTestIndex = 0;
private int m_currentClassIndex = 0;
private int m_currentIncludeIndex = 0;
private List<XmlPackage> m_currentPackages = null;
private XmlPackage m_currentPackage = null;
private List<XmlSuite> m_suites = Lists.newArrayList();
private XmlGroups m_currentGroups = null;
private List<String> m_currentIncludedGroups = null;
private List<String> m_currentExcludedGroups = null;
private Map<String, String> m_currentTestParameters = null;
private Map<String, String> m_currentSuiteParameters = null;
private Map<String, String> m_currentClassParameters = null;
private Include m_currentInclude;
private List<String> m_currentMetaGroup = null;
private String m_currentMetaGroupName;
enum Location {
SUITE,
TEST,
CLASS,
INCLUDE,
EXCLUDE
}
private Stack<Location> m_locations = new Stack<>();
private XmlClass m_currentClass = null;
private ArrayList<XmlInclude> m_currentIncludedMethods = null;
private List<String> m_currentExcludedMethods = null;
private ArrayList<XmlMethodSelector> m_currentSelectors = null;
private XmlMethodSelector m_currentSelector = null;
private String m_currentLanguage = null;
private String m_currentExpression = null;
private List<String> m_suiteFiles = Lists.newArrayList();
private boolean m_enabledTest;
private List<String> m_listeners;
private String m_fileName;
private boolean m_loadClasses;
private boolean m_validate = false;
private boolean m_hasWarn = false;
public TestNGContentHandler(String fileName, boolean loadClasses) {
m_fileName = fileName;
m_loadClasses = loadClasses;
}
/*
* (non-Javadoc)
*
* @see org.xml.sax.EntityResolver#resolveEntity(java.lang.String,
* java.lang.String)
*/
@Override
public InputSource resolveEntity(String systemId, String publicId)
throws IOException, SAXException {
InputSource result;
if (Parser.DEPRECATED_TESTNG_DTD_URL.equals(publicId)
|| Parser.TESTNG_DTD_URL.equals(publicId)) {
m_validate = true;
InputStream is = getClass().getClassLoader().getResourceAsStream(Parser.TESTNG_DTD);
if (null == is) {
is = Thread.currentThread().getContextClassLoader().getResourceAsStream(Parser.TESTNG_DTD);
if (null == is) {
System.out.println(
"WARNING: couldn't find in classpath "
+ publicId
+ "\n"
+ "Fetching it from the Web site.");
result = super.resolveEntity(systemId, publicId);
} else {
result = new InputSource(is);
}
} else {
result = new InputSource(is);
}
} else {
result = super.resolveEntity(systemId, publicId);
}
return result;
}
/** Parse <suite-file> */
private void xmlSuiteFile(boolean start, Attributes attributes) {
if (start) {
String path = attributes.getValue("path");
pushLocation(Location.SUITE);
m_suiteFiles.add(path);
} else {
m_currentSuite.setSuiteFiles(m_suiteFiles);
popLocation();
}
}
/** Parse <suite> */
private void xmlSuite(boolean start, Attributes attributes) {
if (start) {
pushLocation(Location.SUITE);
String name = attributes.getValue("name");
if (isStringBlank(name)) {
throw new TestNGException("The <suite> tag must define the name attribute");
}
m_currentSuite = new XmlSuite();
m_currentSuite.setFileName(m_fileName);
m_currentSuite.setName(name);
m_currentSuiteParameters = Maps.newHashMap();
String verbose = attributes.getValue("verbose");
if (null != verbose) {
m_currentSuite.setVerbose(Integer.parseInt(verbose));
}
String jUnit = attributes.getValue("junit");
if (null != jUnit) {
m_currentSuite.setJUnit(Boolean.valueOf(jUnit));
}
String parallel = attributes.getValue("parallel");
if (parallel != null) {
XmlSuite.ParallelMode mode = XmlSuite.ParallelMode.getValidParallel(parallel);
if (mode != null) {
m_currentSuite.setParallel(mode);
} else {
Utils.log(
"Parser",
1,
"[WARN] Unknown value of attribute 'parallel' at suite level: '" + parallel + "'.");
}
}
String parentModule = attributes.getValue("parent-module");
if (parentModule != null) {
m_currentSuite.setParentModule(parentModule);
}
String guiceStage = attributes.getValue("guice-stage");
if (guiceStage != null) {
m_currentSuite.setGuiceStage(guiceStage);
}
XmlSuite.FailurePolicy configFailurePolicy =
XmlSuite.FailurePolicy.getValidPolicy(attributes.getValue("configfailurepolicy"));
if (null != configFailurePolicy) {
m_currentSuite.setConfigFailurePolicy(configFailurePolicy);
}
String groupByInstances = attributes.getValue("group-by-instances");
if (groupByInstances != null) {
m_currentSuite.setGroupByInstances(Boolean.valueOf(groupByInstances));
}
String skip = attributes.getValue("skipfailedinvocationcounts");
if (skip != null) {
m_currentSuite.setSkipFailedInvocationCounts(Boolean.valueOf(skip));
}
String threadCount = attributes.getValue("thread-count");
if (null != threadCount) {
m_currentSuite.setThreadCount(Integer.parseInt(threadCount));
}
String dataProviderThreadCount = attributes.getValue("data-provider-thread-count");
if (null != dataProviderThreadCount) {
m_currentSuite.setDataProviderThreadCount(Integer.parseInt(dataProviderThreadCount));
}
String timeOut = attributes.getValue("time-out");
if (null != timeOut) {
m_currentSuite.setTimeOut(timeOut);
}
String objectFactory = attributes.getValue("object-factory");
if (null != objectFactory && m_loadClasses) {
try {
m_currentSuite.setObjectFactory(
(ITestObjectFactory) Class.forName(objectFactory).newInstance());
} catch (Exception e) {
Utils.log(
"Parser",
1,
"[ERROR] Unable to create custom object factory '" + objectFactory + "' :" + e);
}
}
String preserveOrder = attributes.getValue("preserve-order");
if (preserveOrder != null) {
m_currentSuite.setPreserveOrder(Boolean.valueOf(preserveOrder));
}
String allowReturnValues = attributes.getValue("allow-return-values");
if (allowReturnValues != null) {
m_currentSuite.setAllowReturnValues(Boolean.valueOf(allowReturnValues));
}
} else {
m_currentSuite.setParameters(m_currentSuiteParameters);
m_suites.add(m_currentSuite);
m_currentSuiteParameters = null;
popLocation();
}
}
/** Parse <define> */
private void xmlDefine(boolean start, Attributes attributes) {
if (start) {
String name = attributes.getValue("name");
m_currentDefines = Lists.newArrayList();
m_currentMetaGroup = Lists.newArrayList();
m_currentMetaGroupName = name;
} else {
if (m_currentTest != null) {
m_currentTest.addMetaGroup(m_currentMetaGroupName, m_currentMetaGroup);
} else {
XmlDefine define = new XmlDefine();
define.setName(m_currentMetaGroupName);
define.getIncludes().addAll(m_currentMetaGroup);
m_currentGroups.addDefine(define);
}
m_currentDefines = null;
}
}
/** Parse <script> */
private void xmlScript(boolean start, Attributes attributes) {
if (start) {
m_currentLanguage = attributes.getValue("language");
m_currentExpression = "";
} else {
XmlScript script = new XmlScript();
script.setExpression(m_currentExpression);
script.setLanguage(m_currentLanguage);
m_currentSelector.setScript(script);
if (m_locations.peek() == Location.TEST) {
m_currentTest.setScript(script);
}
m_currentLanguage = null;
m_currentExpression = null;
}
}
/** Parse <test> */
private void xmlTest(boolean start, Attributes attributes) {
if (start) {
m_currentTest = new XmlTest(m_currentSuite, m_currentTestIndex++);
pushLocation(Location.TEST);
m_currentTestParameters = Maps.newHashMap();
final String testName = attributes.getValue("name");
if (isStringBlank(testName)) {
throw new TestNGException("The <test> tag must define the name attribute");
}
m_currentTest.setName(attributes.getValue("name"));
String verbose = attributes.getValue("verbose");
if (null != verbose) {
m_currentTest.setVerbose(Integer.parseInt(verbose));
}
String jUnit = attributes.getValue("junit");
if (null != jUnit) {
m_currentTest.setJUnit(Boolean.valueOf(jUnit));
}
String skip = attributes.getValue("skipfailedinvocationcounts");
if (skip != null) {
m_currentTest.setSkipFailedInvocationCounts(Boolean.valueOf(skip));
}
String groupByInstances = attributes.getValue("group-by-instances");
if (groupByInstances != null) {
m_currentTest.setGroupByInstances(Boolean.valueOf(groupByInstances));
}
String preserveOrder = attributes.getValue("preserve-order");
if (preserveOrder != null) {
m_currentTest.setPreserveOrder(Boolean.valueOf(preserveOrder));
}
String parallel = attributes.getValue("parallel");
if (parallel != null) {
XmlSuite.ParallelMode mode = XmlSuite.ParallelMode.getValidParallel(parallel);
if (mode != null) {
m_currentTest.setParallel(mode);
} else {
Utils.log(
"Parser",
1,
"[WARN] Unknown value of attribute 'parallel' for test '"
+ m_currentTest.getName()
+ "': '"
+ parallel
+ "'");
}
}
String threadCount = attributes.getValue("thread-count");
if (null != threadCount) {
m_currentTest.setThreadCount(Integer.parseInt(threadCount));
}
String timeOut = attributes.getValue("time-out");
if (null != timeOut) {
m_currentTest.setTimeOut(Long.parseLong(timeOut));
}
m_enabledTest = true;
String enabledTestString = attributes.getValue("enabled");
if (null != enabledTestString) {
m_enabledTest = Boolean.valueOf(enabledTestString);
}
} else {
if (null != m_currentTestParameters && m_currentTestParameters.size() > 0) {
m_currentTest.setParameters(m_currentTestParameters);
}
if (null != m_currentClasses) {
m_currentTest.setXmlClasses(m_currentClasses);
}
m_currentClasses = null;
m_currentTest = null;
m_currentTestParameters = null;
popLocation();
if (!m_enabledTest) {
List<XmlTest> tests = m_currentSuite.getTests();
tests.remove(tests.size() - 1);
}
}
}
/** Parse <classes> */
public void xmlClasses(boolean start, Attributes attributes) {
if (start) {
m_currentClasses = Lists.newArrayList();
m_currentClassIndex = 0;
} else {
m_currentTest.setXmlClasses(m_currentClasses);
m_currentClasses = null;
}
}
/** Parse <listeners> */
public void xmlListeners(boolean start, Attributes attributes) {
if (start) {
m_listeners = Lists.newArrayList();
} else {
if (null != m_listeners) {
m_currentSuite.setListeners(m_listeners);
m_listeners = null;
}
}
}
/** Parse <listener> */
public void xmlListener(boolean start, Attributes attributes) {
if (start) {
String listener = attributes.getValue("class-name");
m_listeners.add(listener);
}
}
/** Parse <packages> */
public void xmlPackages(boolean start, Attributes attributes) {
if (start) {
m_currentPackages = Lists.newArrayList();
} else {
if (null != m_currentPackages) {
Location location = m_locations.peek();
switch (location) {
case TEST:
m_currentTest.setXmlPackages(m_currentPackages);
break;
case SUITE:
m_currentSuite.setXmlPackages(m_currentPackages);
break;
case CLASS:
throw new UnsupportedOperationException("CLASS");
default:
throw new AssertionError("Unexpected value: " + location);
}
}
m_currentPackages = null;
m_currentPackage = null;
}
}
/** Parse <method-selectors> */
public void xmlMethodSelectors(boolean start, Attributes attributes) {
if (start) {
m_currentSelectors = new ArrayList<>();
} else {
switch (m_locations.peek()) {
case TEST:
m_currentTest.setMethodSelectors(m_currentSelectors);
break;
default:
m_currentSuite.setMethodSelectors(m_currentSelectors);
break;
}
m_currentSelectors = null;
}
}
/** Parse <selector-class> */
public void xmlSelectorClass(boolean start, Attributes attributes) {
if (start) {
m_currentSelector.setName(attributes.getValue("name"));
String priority = attributes.getValue("priority");
if (priority == null) {
priority = "0";
}
m_currentSelector.setPriority(Integer.parseInt(priority));
}
}
/** Parse <method-selector> */
public void xmlMethodSelector(boolean start, Attributes attributes) {
if (start) {
m_currentSelector = new XmlMethodSelector();
} else {
m_currentSelectors.add(m_currentSelector);
m_currentSelector = null;
}
}
private void xmlMethod(boolean start) {
if (start) {
m_currentIncludedMethods = new ArrayList<>();
m_currentExcludedMethods = Lists.newArrayList();
m_currentIncludeIndex = 0;
} else {
m_currentClass.setIncludedMethods(m_currentIncludedMethods);
m_currentClass.setExcludedMethods(m_currentExcludedMethods);
m_currentIncludedMethods = null;
m_currentExcludedMethods = null;
}
}
/** Parse <run> */
public void xmlRun(boolean start, Attributes attributes) throws SAXException {
if (start) {
m_currentRuns = Lists.newArrayList();
} else {
if (m_currentTest != null) {
m_currentTest.setIncludedGroups(m_currentIncludedGroups);
m_currentTest.setExcludedGroups(m_currentExcludedGroups);
} else {
m_currentSuite.setIncludedGroups(m_currentIncludedGroups);
m_currentSuite.setExcludedGroups(m_currentExcludedGroups);
}
m_currentRuns = null;
}
}
/** Parse <group> */
public void xmlGroup(boolean start, Attributes attributes) throws SAXException {
if (start) {
m_currentTest.addXmlDependencyGroup(
attributes.getValue("name"), attributes.getValue("depends-on"));
}
}
/** Parse <groups> */
public void xmlGroups(boolean start, Attributes attributes) throws SAXException {
if (start) {
m_currentGroups = new XmlGroups();
m_currentIncludedGroups = Lists.newArrayList();
m_currentExcludedGroups = Lists.newArrayList();
} else {
if (m_currentTest == null) {
m_currentSuite.setGroups(m_currentGroups);
}
m_currentGroups = null;
}
}
/**
* NOTE: I only invoke xml*methods (e.g. xmlSuite()) if I am acting on both the start and the end
* of the tag. This way I can keep the treatment of this tag in one place. If I am only doing
* something when the tag opens, the code is inlined below in the startElement() method.
*/
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
if (!m_validate && !m_hasWarn) {
Logger.getLogger(TestNGContentHandler.class)
.warn(
"It is strongly recommended to add "
+ "\"<!DOCTYPE suite SYSTEM \"http://testng.org/testng-1.0.dtd\" >\" at the top of your file, "
+ "otherwise TestNG may fail or not work as expected.");
m_hasWarn = true;
}
String name = attributes.getValue("name");
// ppp("START ELEMENT uri:" + uri + " sName:" + localName + " qName:" + qName +
// " " + attributes);
if ("suite".equals(qName)) {
xmlSuite(true, attributes);
} else if ("suite-file".equals(qName)) {
xmlSuiteFile(true, attributes);
} else if ("test".equals(qName)) {
xmlTest(true, attributes);
} else if ("script".equals(qName)) {
xmlScript(true, attributes);
} else if ("method-selector".equals(qName)) {
xmlMethodSelector(true, attributes);
} else if ("method-selectors".equals(qName)) {
xmlMethodSelectors(true, attributes);
} else if ("selector-class".equals(qName)) {
xmlSelectorClass(true, attributes);
} else if ("classes".equals(qName)) {
xmlClasses(true, attributes);
} else if ("packages".equals(qName)) {
xmlPackages(true, attributes);
} else if ("listeners".equals(qName)) {
xmlListeners(true, attributes);
} else if ("listener".equals(qName)) {
xmlListener(true, attributes);
} else if ("class".equals(qName)) {
// If m_currentClasses is null, the XML is invalid and SAX
// will complain, but in the meantime, dodge the NPE so SAX
// can finish parsing the file.
if (null != m_currentClasses) {
m_currentClass = new XmlClass(name, m_currentClassIndex++, m_loadClasses);
m_currentClass.setXmlTest(m_currentTest);
m_currentClassParameters = Maps.newHashMap();
m_currentClasses.add(m_currentClass);
pushLocation(Location.CLASS);
}
} else if ("package".equals(qName)) {
if (null != m_currentPackages) {
m_currentPackage = new XmlPackage();
m_currentPackage.setName(name);
m_currentPackages.add(m_currentPackage);
}
} else if ("define".equals(qName)) {
xmlDefine(true, attributes);
} else if ("run".equals(qName)) {
xmlRun(true, attributes);
} else if ("group".equals(qName)) {
xmlGroup(true, attributes);
} else if ("groups".equals(qName)) {
xmlGroups(true, attributes);
} else if ("methods".equals(qName)) {
xmlMethod(true);
} else if ("include".equals(qName)) {
xmlInclude(true, attributes);
} else if ("exclude".equals(qName)) {
xmlExclude(true, attributes);
} else if ("parameter".equals(qName)) {
String value = expandValue(attributes.getValue("value"));
Location location = m_locations.peek();
switch (location) {
case TEST:
m_currentTestParameters.put(name, value);
break;
case SUITE:
m_currentSuiteParameters.put(name, value);
break;
case CLASS:
m_currentClassParameters.put(name, value);
break;
case INCLUDE:
m_currentInclude.parameters.put(name, value);
break;
default:
throw new AssertionError("Unexpected value: " + location);
}
}
}
private static class Include {
String name;
String invocationNumbers;
String description;
Map<String, String> parameters = Maps.newHashMap();
Include(String name, String numbers) {
this.name = name;
this.invocationNumbers = numbers;
}
}
private void xmlInclude(boolean start, Attributes attributes) {
if (start) {
m_locations.push(Location.INCLUDE);
m_currentInclude =
new Include(attributes.getValue("name"), attributes.getValue("invocation-numbers"));
m_currentInclude.description = attributes.getValue("description");
} else {
String name = m_currentInclude.name;
if (null != m_currentIncludedMethods) {
String in = m_currentInclude.invocationNumbers;
XmlInclude include;
if (!Utils.isStringEmpty(in)) {
include = new XmlInclude(name, stringToList(in), m_currentIncludeIndex++);
} else {
include = new XmlInclude(name, m_currentIncludeIndex++);
}
for (Map.Entry<String, String> entry : m_currentInclude.parameters.entrySet()) {
include.addParameter(entry.getKey(), entry.getValue());
}
include.setDescription(m_currentInclude.description);
m_currentIncludedMethods.add(include);
} else if (null != m_currentDefines) {
m_currentMetaGroup.add(name);
} else if (null != m_currentRuns) {
m_currentIncludedGroups.add(name);
} else if (null != m_currentPackage) {
m_currentPackage.getInclude().add(name);
}
popLocation();
m_currentInclude = null;
}
}
private void xmlExclude(boolean start, Attributes attributes) {
if (start) {
m_locations.push(Location.EXCLUDE);
String name = attributes.getValue("name");
if (null != m_currentExcludedMethods) {
m_currentExcludedMethods.add(name);
} else if (null != m_currentRuns) {
m_currentExcludedGroups.add(name);
} else if (null != m_currentPackage) {
m_currentPackage.getExclude().add(name);
}
} else {
popLocation();
}
}
private void pushLocation(Location l) {
m_locations.push(l);
}
private Location popLocation() {
return m_locations.pop();
}
private List<Integer> stringToList(String in) {
String[] numbers = in.split(" ");
List<Integer> result = Lists.newArrayList();
for (String n : numbers) {
result.add(Integer.parseInt(n));
}
return result;
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if ("suite".equals(qName)) {
xmlSuite(false, null);
} else if ("suite-file".equals(qName)) {
xmlSuiteFile(false, null);
} else if ("test".equals(qName)) {
xmlTest(false, null);
} else if ("define".equals(qName)) {
xmlDefine(false, null);
} else if ("run".equals(qName)) {
xmlRun(false, null);
} else if ("groups".equals(qName)) {
xmlGroups(false, null);
} else if ("methods".equals(qName)) {
xmlMethod(false);
} else if ("classes".equals(qName)) {
xmlClasses(false, null);
} else if ("packages".equals(qName)) {
xmlPackages(false, null);
} else if ("class".equals(qName)) {
m_currentClass.setParameters(m_currentClassParameters);
m_currentClassParameters = null;
popLocation();
} else if ("listeners".equals(qName)) {
xmlListeners(false, null);
} else if ("method-selector".equals(qName)) {
xmlMethodSelector(false, null);
} else if ("method-selectors".equals(qName)) {
xmlMethodSelectors(false, null);
} else if ("selector-class".equals(qName)) {
xmlSelectorClass(false, null);
} else if ("script".equals(qName)) {
xmlScript(false, null);
} else if ("include".equals(qName)) {
xmlInclude(false, null);
} else if ("exclude".equals(qName)) {
xmlExclude(false, null);
}
}
@Override
public void error(SAXParseException e) throws SAXException {
if (m_validate) {
throw e;
}
}
private boolean areWhiteSpaces(char[] ch, int start, int length) {
for (int i = start; i < start + length; i++) {
char c = ch[i];
if (c != '\n' && c != '\t' && c != ' ') {
return false;
}
}
return true;
}
@Override
public void characters(char ch[], int start, int length) {
if (null != m_currentLanguage && !areWhiteSpaces(ch, start, length)) {
m_currentExpression += new String(ch, start, length);
}
}
public XmlSuite getSuite() {
return m_currentSuite;
}
private static String expandValue(String value) {
StringBuilder result = null;
int startIndex;
int endIndex;
int startPosition = 0;
String property;
while ((startIndex = value.indexOf("${", startPosition)) > -1
&& (endIndex = value.indexOf("}", startIndex + 3)) > -1) {
property = value.substring(startIndex + 2, endIndex);
if (result == null) {
result = new StringBuilder(value.substring(startPosition, startIndex));
} else {
result.append(value.substring(startPosition, startIndex));
}
String propertyValue = System.getProperty(property);
if (propertyValue == null) {
propertyValue = System.getenv(property);
}
if (propertyValue != null) {
result.append(propertyValue);
} else {
result.append("${");
result.append(property);
result.append("}");
}
startPosition = startIndex + 3 + property.length();
}
if (result != null) {
result.append(value.substring(startPosition));
return result.toString();
} else {
return value;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jasper.compiler;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FilePermission;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.CodeSource;
import java.security.PermissionCollection;
import java.security.Policy;
import java.security.cert.Certificate;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletContext;
import javax.servlet.jsp.JspFactory;
import org.apache.jasper.Constants;
import org.apache.jasper.JspCompilationContext;
import org.apache.jasper.Options;
import org.apache.jasper.runtime.ExceptionUtils;
import org.apache.jasper.runtime.JspFactoryImpl;
import org.apache.jasper.security.SecurityClassLoad;
import org.apache.jasper.servlet.JspServletWrapper;
import org.apache.jasper.util.FastRemovalDequeue;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
/**
* Class for tracking JSP compile time file dependencies when the
* &060;%@include file="..."%&062; directive is used.
*
* A background thread periodically checks the files a JSP page
* is dependent upon. If a dependent file changes the JSP page
* which included it is recompiled.
*
* Only used if a web application context is a directory.
*
* @author Glenn L. Nielsen
*/
public final class JspRuntimeContext {
// Logger
private final Log log = LogFactory.getLog(JspRuntimeContext.class);
/*
* Counts how many times the webapp's JSPs have been reloaded.
*/
private AtomicInteger jspReloadCount = new AtomicInteger(0);
/*
* Counts how many times JSPs have been unloaded in this webapp.
*/
private AtomicInteger jspUnloadCount = new AtomicInteger(0);
/**
* Preload classes required at runtime by a JSP servlet so that
* we don't get a defineClassInPackage security exception.
*/
static {
JspFactoryImpl factory = new JspFactoryImpl();
SecurityClassLoad.securityClassLoad(factory.getClass().getClassLoader());
if( System.getSecurityManager() != null ) {
String basePackage = "org.apache.jasper.";
try {
factory.getClass().getClassLoader().loadClass( basePackage +
"runtime.JspFactoryImpl$PrivilegedGetPageContext");
factory.getClass().getClassLoader().loadClass( basePackage +
"runtime.JspFactoryImpl$PrivilegedReleasePageContext");
factory.getClass().getClassLoader().loadClass( basePackage +
"runtime.JspRuntimeLibrary");
factory.getClass().getClassLoader().loadClass( basePackage +
"runtime.ServletResponseWrapperInclude");
factory.getClass().getClassLoader().loadClass( basePackage +
"servlet.JspServletWrapper");
} catch (ClassNotFoundException ex) {
throw new IllegalStateException(ex);
}
}
JspFactory.setDefaultFactory(factory);
}
// ----------------------------------------------------------- Constructors
/**
* Create a JspRuntimeContext for a web application context.
*
* Loads in any previously generated dependencies from file.
*
* @param context ServletContext for web application
*/
public JspRuntimeContext(ServletContext context, Options options) {
this.context = context;
this.options = options;
// Get the parent class loader
ClassLoader loader = Thread.currentThread().getContextClassLoader();
if (loader == null) {
loader = this.getClass().getClassLoader();
}
if (log.isDebugEnabled()) {
if (loader != null) {
log.debug(Localizer.getMessage("jsp.message.parent_class_loader_is",
loader.toString()));
} else {
log.debug(Localizer.getMessage("jsp.message.parent_class_loader_is",
"<none>"));
}
}
parentClassLoader = loader;
classpath = initClassPath();
if (context instanceof org.apache.jasper.servlet.JspCServletContext) {
codeSource = null;
permissionCollection = null;
return;
}
if (Constants.IS_SECURITY_ENABLED) {
SecurityHolder holder = initSecurity();
codeSource = holder.cs;
permissionCollection = holder.pc;
} else {
codeSource = null;
permissionCollection = null;
}
// If this web application context is running from a
// directory, start the background compilation thread
String appBase = context.getRealPath("/");
if (!options.getDevelopment()
&& appBase != null
&& options.getCheckInterval() > 0) {
lastCompileCheck = System.currentTimeMillis();
}
if (options.getMaxLoadedJsps() > 0) {
jspQueue = new FastRemovalDequeue<JspServletWrapper>(options.getMaxLoadedJsps());
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.message.jsp_queue_created",
"" + options.getMaxLoadedJsps(), context.getContextPath()));
}
}
/* Init parameter is in seconds, locally we use milliseconds */
jspIdleTimeout = options.getJspIdleTimeout() * 1000;
}
// ----------------------------------------------------- Instance Variables
/**
* This web applications ServletContext
*/
private final ServletContext context;
private final Options options;
private final ClassLoader parentClassLoader;
private final PermissionCollection permissionCollection;
private final CodeSource codeSource;
private final String classpath;
private volatile long lastCompileCheck = -1L;
private volatile long lastJspQueueUpdate = System.currentTimeMillis();
/* JSP idle timeout in milliseconds */
private long jspIdleTimeout;
/**
* Maps JSP pages to their JspServletWrapper's
*/
private final Map<String, JspServletWrapper> jsps =
new ConcurrentHashMap<String, JspServletWrapper>();
/**
* Keeps JSP pages ordered by last access.
*/
private FastRemovalDequeue<JspServletWrapper> jspQueue = null;
// ------------------------------------------------------ Public Methods
/**
* Add a new JspServletWrapper.
*
* @param jspUri JSP URI
* @param jsw Servlet wrapper for JSP
*/
public void addWrapper(String jspUri, JspServletWrapper jsw) {
jsps.put(jspUri, jsw);
}
/**
* Get an already existing JspServletWrapper.
*
* @param jspUri JSP URI
* @return JspServletWrapper for JSP
*/
public JspServletWrapper getWrapper(String jspUri) {
return jsps.get(jspUri);
}
/**
* Remove a JspServletWrapper.
*
* @param jspUri JSP URI of JspServletWrapper to remove
*/
public void removeWrapper(String jspUri) {
jsps.remove(jspUri);
}
/**
* Push a newly compiled JspServletWrapper into the queue at first
* execution of jsp. Destroy any JSP that has been replaced in the queue.
*
* @param jsw Servlet wrapper for jsp.
* @return an unloadHandle that can be pushed to front of queue at later execution times.
* */
public FastRemovalDequeue<JspServletWrapper>.Entry push(JspServletWrapper jsw) {
if (log.isTraceEnabled()) {
log.trace(Localizer.getMessage("jsp.message.jsp_added",
jsw.getJspUri(), context.getContextPath()));
}
FastRemovalDequeue<JspServletWrapper>.Entry entry = jspQueue.push(jsw);
JspServletWrapper replaced = entry.getReplaced();
if (replaced != null) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.message.jsp_removed_excess",
replaced.getJspUri(), context.getContextPath()));
}
unloadJspServletWrapper(replaced);
entry.clearReplaced();
}
return entry;
}
/**
* Push unloadHandle for JspServletWrapper to front of the queue.
*
* @param unloadHandle the unloadHandle for the jsp.
* */
public void makeYoungest(FastRemovalDequeue<JspServletWrapper>.Entry unloadHandle) {
if (log.isTraceEnabled()) {
JspServletWrapper jsw = unloadHandle.getContent();
log.trace(Localizer.getMessage("jsp.message.jsp_queue_update",
jsw.getJspUri(), context.getContextPath()));
}
jspQueue.moveFirst(unloadHandle);
}
/**
* Returns the number of JSPs for which JspServletWrappers exist, i.e.,
* the number of JSPs that have been loaded into the webapp.
*
* @return The number of JSPs that have been loaded into the webapp
*/
public int getJspCount() {
return jsps.size();
}
/**
* Get the SecurityManager Policy CodeSource for this web
* application context.
*
* @return CodeSource for JSP
*/
public CodeSource getCodeSource() {
return codeSource;
}
/**
* Get the parent ClassLoader.
*
* @return ClassLoader parent
*/
public ClassLoader getParentClassLoader() {
return parentClassLoader;
}
/**
* Get the SecurityManager PermissionCollection for this
* web application context.
*
* @return PermissionCollection permissions
*/
public PermissionCollection getPermissionCollection() {
return permissionCollection;
}
/**
* Process a "destroy" event for this web application context.
*/
public void destroy() {
Iterator<JspServletWrapper> servlets = jsps.values().iterator();
while (servlets.hasNext()) {
servlets.next().destroy();
}
}
/**
* Increments the JSP reload counter.
*/
public void incrementJspReloadCount() {
jspReloadCount.incrementAndGet();
}
/**
* Resets the JSP reload counter.
*
* @param count Value to which to reset the JSP reload counter
*/
public void setJspReloadCount(int count) {
jspReloadCount.set(count);
}
/**
* Gets the current value of the JSP reload counter.
*
* @return The current value of the JSP reload counter
*/
public int getJspReloadCount() {
return jspReloadCount.intValue();
}
/**
* Gets the number of JSPs that are in the JSP limiter queue
*
* @return The number of JSPs (in the webapp with which this JspServlet is
* associated) that are in the JSP limiter queue
*/
public int getJspQueueLength() {
if (jspQueue != null) {
return jspQueue.getSize();
}
return -1;
}
/**
* Increments the JSP unload counter.
*/
public void incrementJspUnloadCount() {
jspUnloadCount.incrementAndGet();
}
/**
* Gets the number of JSPs that have been unloaded.
*
* @return The number of JSPs (in the webapp with which this JspServlet is
* associated) that have been unloaded
*/
public int getJspUnloadCount() {
return jspUnloadCount.intValue();
}
/**
* Method used by background thread to check the JSP dependencies
* registered with this class for JSP's.
*/
public void checkCompile() {
if (lastCompileCheck < 0) {
// Checking was disabled
return;
}
long now = System.currentTimeMillis();
if (now > (lastCompileCheck + (options.getCheckInterval() * 1000L))) {
lastCompileCheck = now;
} else {
return;
}
Object [] wrappers = jsps.values().toArray();
for (int i = 0; i < wrappers.length; i++ ) {
JspServletWrapper jsw = (JspServletWrapper)wrappers[i];
JspCompilationContext ctxt = jsw.getJspEngineContext();
// JspServletWrapper also synchronizes on this when
// it detects it has to do a reload
synchronized(jsw) {
try {
ctxt.compile();
} catch (FileNotFoundException ex) {
ctxt.incrementRemoved();
} catch (Throwable t) {
ExceptionUtils.handleThrowable(t);
jsw.getServletContext().log("Background compile failed",
t);
}
}
}
}
/**
* The classpath that is passed off to the Java compiler.
*/
public String getClassPath() {
return classpath;
}
/**
* Last time the update background task has run
*/
public long getLastJspQueueUpdate() {
return lastJspQueueUpdate;
}
// -------------------------------------------------------- Private Methods
/**
* Method used to initialize classpath for compiles.
*/
private String initClassPath() {
StringBuilder cpath = new StringBuilder();
if (parentClassLoader instanceof URLClassLoader) {
URL [] urls = ((URLClassLoader)parentClassLoader).getURLs();
for(int i = 0; i < urls.length; i++) {
// Tomcat 4 can use URL's other than file URL's,
// a protocol other than file: will generate a
// bad file system path, so only add file:
// protocol URL's to the classpath.
if( urls[i].getProtocol().equals("file") ) {
cpath.append(urls[i].getFile()+File.pathSeparator);
}
}
}
cpath.append(options.getScratchDir() + File.pathSeparator);
String cp = (String) context.getAttribute(Constants.SERVLET_CLASSPATH);
if (cp == null || cp.equals("")) {
cp = options.getClassPath();
}
String path = cpath.toString() + cp;
if(log.isDebugEnabled()) {
log.debug("Compilation classpath initialized: " + path);
}
return path;
}
// Helper class to allow initSecurity() to return two items
private static class SecurityHolder{
private final CodeSource cs;
private final PermissionCollection pc;
private SecurityHolder(CodeSource cs, PermissionCollection pc){
this.cs = cs;
this.pc = pc;
}
}
/**
* Method used to initialize SecurityManager data.
*/
private SecurityHolder initSecurity() {
// Setup the PermissionCollection for this web app context
// based on the permissions configured for the root of the
// web app context directory, then add a file read permission
// for that directory.
Policy policy = Policy.getPolicy();
CodeSource source = null;
PermissionCollection permissions = null;
if( policy != null ) {
try {
// Get the permissions for the web app context
String docBase = context.getRealPath("/");
if( docBase == null ) {
docBase = options.getScratchDir().toString();
}
String codeBase = docBase;
if (!codeBase.endsWith(File.separator)){
codeBase = codeBase + File.separator;
}
File contextDir = new File(codeBase);
URL url = contextDir.getCanonicalFile().toURI().toURL();
source = new CodeSource(url,(Certificate[])null);
permissions = policy.getPermissions(source);
// Create a file read permission for web app context directory
if (!docBase.endsWith(File.separator)){
permissions.add
(new FilePermission(docBase,"read"));
docBase = docBase + File.separator;
} else {
permissions.add
(new FilePermission
(docBase.substring(0,docBase.length() - 1),"read"));
}
docBase = docBase + "-";
permissions.add(new FilePermission(docBase,"read"));
// Spec says apps should have read/write for their temp
// directory. This is fine, as no security sensitive files, at
// least any that the app doesn't have full control of anyway,
// will be written here.
String workDir = options.getScratchDir().toString();
if (!workDir.endsWith(File.separator)){
permissions.add
(new FilePermission(workDir,"read,write"));
workDir = workDir + File.separator;
}
workDir = workDir + "-";
permissions.add(new FilePermission(
workDir,"read,write,delete"));
// Allow the JSP to access org.apache.jasper.runtime.HttpJspBase
permissions.add( new RuntimePermission(
"accessClassInPackage.org.apache.jasper.runtime") );
if (parentClassLoader instanceof URLClassLoader) {
URL [] urls = ((URLClassLoader)parentClassLoader).getURLs();
String jarUrl = null;
String jndiUrl = null;
for (int i=0; i<urls.length; i++) {
if (jndiUrl == null
&& urls[i].toString().startsWith("jndi:") ) {
jndiUrl = urls[i].toString() + "-";
}
if (jarUrl == null
&& urls[i].toString().startsWith("jar:jndi:")
) {
jarUrl = urls[i].toString();
jarUrl = jarUrl.substring(0,jarUrl.length() - 2);
jarUrl = jarUrl.substring(0,
jarUrl.lastIndexOf('/')) + "/-";
}
}
if (jarUrl != null) {
permissions.add(
new FilePermission(jarUrl,"read"));
permissions.add(
new FilePermission(jarUrl.substring(4),"read"));
}
if (jndiUrl != null)
permissions.add(
new FilePermission(jndiUrl,"read") );
}
} catch(Exception e) {
context.log("Security Init for context failed",e);
}
}
return new SecurityHolder(source, permissions);
}
private void unloadJspServletWrapper(JspServletWrapper jsw) {
removeWrapper(jsw.getJspUri());
synchronized(jsw) {
jsw.destroy();
}
jspUnloadCount.incrementAndGet();
}
/**
* Method used by background thread to check if any JSP's should be unloaded.
*/
public void checkUnload() {
if (log.isTraceEnabled()) {
int queueLength = -1;
if (jspQueue != null) {
queueLength = jspQueue.getSize();
}
log.trace(Localizer.getMessage("jsp.message.jsp_unload_check",
context.getContextPath(), "" + jsps.size(), "" + queueLength));
}
long now = System.currentTimeMillis();
if (jspIdleTimeout > 0) {
long unloadBefore = now - jspIdleTimeout;
Object [] wrappers = jsps.values().toArray();
for (int i = 0; i < wrappers.length; i++ ) {
JspServletWrapper jsw = (JspServletWrapper)wrappers[i];
synchronized(jsw) {
if (jsw.getLastUsageTime() < unloadBefore) {
if (log.isDebugEnabled()) {
log.debug(Localizer.getMessage("jsp.message.jsp_removed_idle",
jsw.getJspUri(), context.getContextPath(),
"" + (now-jsw.getLastUsageTime())));
}
if (jspQueue != null) {
jspQueue.remove(jsw.getUnloadHandle());
}
unloadJspServletWrapper(jsw);
}
}
}
}
lastJspQueueUpdate = now;
}
}
| |
package com.quollwriter.text.rules;
import java.util.*;
import javax.swing.*;
import javax.swing.event.*;
import javax.swing.text.*;
import java.text.*;
import com.gentlyweb.utils.*;
import com.gentlyweb.xml.*;
import com.quollwriter.*;
import com.quollwriter.text.*;
import com.quollwriter.ui.forms.*;
import org.jdom.Element;
import org.jdom.JDOMException;
public class SentenceComplexityRule extends AbstractSentenceRule
{
public class XMLConstants
{
public static final String ratio = "ratio";
public static final String wordCount = "wordCount";
}
private float ratio = 0;
private int wordCount = 0;
private JSpinner ratioF = null;
private JSpinner wordCountF = null;
public SentenceComplexityRule ()
{
}
public SentenceComplexityRule (float syllableWordRatio,
int wordCount,
boolean user)
{
this.ratio = syllableWordRatio;
this.wordCount = wordCount;
this.setUserRule (user);
}
@Override
public String getDescription ()
{
String d = super.getDescription ();
d = StringUtils.replaceString (d,
"[RATIO]",
Environment.formatNumber (this.ratio) + "");
d = StringUtils.replaceString (d,
"[COUNT]",
this.wordCount + "");
return d;
}
@Override
public String getSummary ()
{
String t = StringUtils.replaceString (super.getSummary (),
"[RATIO]",
Environment.formatNumber (this.ratio) + "");
t = StringUtils.replaceString (t,
"[COUNT]",
this.wordCount + "");
return t;
}
@Override
public void init (Element root)
throws JDOMException
{
super.init (root);
this.ratio = JDOMUtils.getAttributeValueAsFloat (root,
XMLConstants.ratio);
this.wordCount = JDOMUtils.getAttributeValueAsInt (root,
XMLConstants.wordCount);
}
@Override
public Element getAsElement ()
{
Element root = super.getAsElement ();
root.setAttribute (XMLConstants.ratio,
this.ratio + "");
root.setAttribute (XMLConstants.wordCount,
this.wordCount + "");
return root;
}
@Override
public List<Issue> getIssues (Sentence sentence)
{
List<Issue> issues = new ArrayList ();
float wordC = (float) sentence.getWordCount ();
if (wordC <= this.wordCount)
{
return issues;
}
float syllC = (float) sentence.getSyllableCount ();
float r = syllC / wordC;
r = (float) Math.round (r * 10) / 10;
if (r > this.ratio)
{
DecimalFormat df = new DecimalFormat ("##.#");
String n = df.format (r);
Issue iss = new Issue (String.format (Environment.getUIString (LanguageStrings.problemfinder,
LanguageStrings.issues,
LanguageStrings.sentencecomplexity,
LanguageStrings.text),
//"Sentence syllable/word ratio is: <b>%s</b>. (Max is: %s)",
n,
Environment.formatNumber (this.ratio)),
sentence,
sentence.getAllTextStartOffset () + "-sentencetoocomplex-" + r,
this);
issues.add (iss);
}
return issues;
}
@Override
public String getCategory ()
{
return Rule.SENTENCE_CATEGORY;
}
@Override
public Set<FormItem> getFormItems ()
{
List<String> pref = new ArrayList ();
pref.add (LanguageStrings.problemfinder);
pref.add (LanguageStrings.config);
pref.add (LanguageStrings.rules);
pref.add (LanguageStrings.sentencecomplexity);
pref.add (LanguageStrings.labels);
Set<FormItem> items = new LinkedHashSet ();
this.ratioF = new JSpinner (new SpinnerNumberModel (this.ratio,
0.1f,
3.0f,
0.1));
Box b = new Box (BoxLayout.X_AXIS);
b.add (this.ratioF);
b.add (Box.createHorizontalGlue ());
this.ratioF.setMaximumSize (this.ratioF.getPreferredSize ());
items.add (new AnyFormItem (Environment.getUIString (pref,
LanguageStrings.ratio),
b));
this.wordCountF = new JSpinner (new SpinnerNumberModel (this.wordCount,
1,
500,
1));
b = new Box (BoxLayout.X_AXIS);
b.add (this.wordCountF);
b.add (Box.createHorizontalGlue ());
this.wordCountF.setMaximumSize (this.wordCountF.getPreferredSize ());
items.add (new AnyFormItem (Environment.getUIString (pref,
LanguageStrings.sentencelength),
//"Sentence length (words)",
b));
return items;
}
public String getFormError ()
{
return null;
}
public void updateFromForm ()
{
this.ratio = ((SpinnerNumberModel) this.ratioF.getModel ()).getNumber ().floatValue ();
this.wordCount = ((SpinnerNumberModel) this.wordCountF.getModel ()).getNumber ().intValue ();
}
}
| |
/*
* Copyright (C) 2017 Clivern <http://clivern.com>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.racter.example;
import static spark.Spark.*;
import com.clivern.racter.BotPlatform;
import com.clivern.racter.receivers.webhook.*;
import com.clivern.racter.senders.*;
import com.clivern.racter.senders.templates.*;
import org.pmw.tinylog.Logger;
import java.util.HashMap;
import java.util.Map;
import java.io.IOException;
public class Main {
public static void main(String[] args) throws IOException
{
// Verify Token Route
get("/", (request, response) -> {
BotPlatform platform = new BotPlatform("src/main/java/resources/config.properties");
platform.getVerifyWebhook().setHubMode(( request.queryParams("hub.mode") != null ) ? request.queryParams("hub.mode") : "");
platform.getVerifyWebhook().setHubVerifyToken(( request.queryParams("hub.verify_token") != null ) ? request.queryParams("hub.verify_token") : "");
platform.getVerifyWebhook().setHubChallenge(( request.queryParams("hub.challenge") != null ) ? request.queryParams("hub.challenge") : "");
if( platform.getVerifyWebhook().challenge() ){
response.status(200);
return ( request.queryParams("hub.challenge") != null ) ? request.queryParams("hub.challenge") : "";
}
response.status(403);
return "Verification token mismatch";
});
// ---------------------------------
// Test Case
// curl -X POST -H "Content-Type: application/json" -d '{"object":"page","entry":[{"id":"pageid829292","time":1458692752478,"messaging":[{"sender":{"id":"userid83992"},"recipient":{"id":"pageid032"},"timestamp":1458692752478,"message":{"mid":"mid.1457764197618:41d102a3e1ae206a38","text":"hello, world!","attachments":[{"type":"image","payload":{"url":"http://clivern.com"}}]}}]}]}' "http://localhost:4567"
// ---------------------------------
post("/", (request, response) -> {
String body = request.body();
BotPlatform platform = new BotPlatform("src/main/java/resources/config.properties");
platform.getBaseReceiver().set(body).parse();
HashMap<String, MessageReceivedWebhook> messages = (HashMap<String, MessageReceivedWebhook>) platform.getBaseReceiver().getMessages();
for (MessageReceivedWebhook message : messages.values()) {
String user_id = (message.hasUserId()) ? message.getUserId() : "";
String page_id = (message.hasPageId()) ? message.getPageId() : "";
String message_id = (message.hasMessageId()) ? message.getMessageId() : "";
String message_text = (message.hasMessageText()) ? message.getMessageText() : "";
String quick_reply_payload = (message.hasQuickReplyPayload()) ? message.getQuickReplyPayload() : "";
Long timestamp = (message.hasTimestamp()) ? message.getTimestamp() : 0;
HashMap<String, String> attachments = (message.hasAttachment()) ? (HashMap<String, String>) message.getAttachment() : new HashMap<String, String>();
Logger.info("User ID#:" + user_id);
Logger.info("Page ID#:" + page_id);
Logger.info("Message ID#:" + message_id);
Logger.info("Message Text#:" + message_text);
Logger.info("Quick Reply Payload#:" + quick_reply_payload);
for (String attachment : attachments.values()) {
Logger.info("Attachment#:" + attachment);
}
String text = message.getMessageText();
MessageTemplate message_tpl = platform.getBaseSender().getMessageTemplate();
ButtonTemplate button_message_tpl = platform.getBaseSender().getButtonTemplate();
ListTemplate list_message_tpl = platform.getBaseSender().getListTemplate();
GenericTemplate generic_message_tpl = platform.getBaseSender().getGenericTemplate();
ReceiptTemplate receipt_message_tpl = platform.getBaseSender().getReceiptTemplate();
if( text.equals("text") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Hello World");
message_tpl.setNotificationType("REGULAR");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("image") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setAttachment("image", "http://techslides.com/demos/samples/sample.jpg", false);
message_tpl.setNotificationType("SILENT_PUSH");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("file") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setAttachment("file", "http://techslides.com/demos/samples/sample.pdf", false);
message_tpl.setNotificationType("NO_PUSH");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("video") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setAttachment("video", "http://techslides.com/demos/samples/sample.mp4", false);
platform.getBaseSender().send(message_tpl);
}else if( text.equals("audio") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setAttachment("audio", "http://techslides.com/demos/samples/sample.mp3", false);
platform.getBaseSender().send(message_tpl);
}else if( text.equals("mark_seen") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setSenderAction("mark_seen");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("typing_on") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setSenderAction("typing_on");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("typing_off") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setSenderAction("typing_off");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("quick_text_reply") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Select a Color!");
message_tpl.setQuickReply("text", "Red", "text_reply_red_click", "");
message_tpl.setQuickReply("text", "Green", "text_reply_green_click", "");
message_tpl.setQuickReply("text", "Black", "text_reply_black_click", "");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("quick_text_image_reply") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Select a Color!");
message_tpl.setQuickReply("text", "Red", "text_reply_red_click", "http://static.wixstatic.com/media/f0a6df_9ae4c70963244e16ba0d89d021407335.png");
message_tpl.setQuickReply("text", "Green", "text_reply_green_click", "http://static.wixstatic.com/media/f0a6df_9ae4c70963244e16ba0d89d021407335.png");
message_tpl.setQuickReply("text", "Black", "text_reply_black_click", "http://static.wixstatic.com/media/f0a6df_9ae4c70963244e16ba0d89d021407335.png");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("quick_location_reply") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Please share your location!");
message_tpl.setQuickReply("location", "", "", "");
platform.getBaseSender().send(message_tpl);
}else if( text.equals("web_url_button") ){
button_message_tpl.setRecipientId(message.getUserId());
button_message_tpl.setMessageText("Click Below!");
button_message_tpl.setButton("web_url", "Take the Hat Quiz", "https://m.me/petershats?ref=take_quiz", "", "");
platform.getBaseSender().send(button_message_tpl);
}else if( text.equals("postback_button") ){
button_message_tpl.setRecipientId(message.getUserId());
button_message_tpl.setMessageText("Click Below!");
button_message_tpl.setButton("postback", "Bookmark Item", "", "DEVELOPER_DEFINED_PAYLOAD", "");
platform.getBaseSender().send(button_message_tpl);
}else if( text.equals("phone_number_button") ){
button_message_tpl.setRecipientId(message.getUserId());
button_message_tpl.setMessageText("Click Below!");
button_message_tpl.setButton("phone_number", "Call Representative", "", "+15105551234", "");
platform.getBaseSender().send(button_message_tpl);
}else if( text.equals("account_link_button") ){
button_message_tpl.setRecipientId(message.getUserId());
button_message_tpl.setMessageText("Click Below!");
button_message_tpl.setButton("account_link", "", "https://www.example.com/authorize", "", "");
platform.getBaseSender().send(button_message_tpl);
}else if( text.equals("account_unlink_button") ){
button_message_tpl.setRecipientId(message.getUserId());
button_message_tpl.setMessageText("Click Below!");
button_message_tpl.setButton("account_unlink", "", "", "", "");
platform.getBaseSender().send(button_message_tpl);
}else if( text.equals("list_template") ){
list_message_tpl.setRecipientId(message.getUserId());
list_message_tpl.setElementStyle("compact");
// Element
Integer element_index = list_message_tpl.setElement("Classic T-Shirt Collection", "https://peterssendreceiveapp.ngrok.io/img/collection.png", "See all our colors");
list_message_tpl.setElementDefaultAction(element_index, "web_url", "https://peterssendreceiveapp.ngrok.io/view?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
list_message_tpl.setElementButton(element_index, "Shop Now", "web_url", "https://peterssendreceiveapp.ngrok.io/shop?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
// Element
element_index = list_message_tpl.setElement("Classic T-Shirt Collection", "https://peterssendreceiveapp.ngrok.io/img/collection.png", "See all our colors");
list_message_tpl.setElementDefaultAction(element_index, "web_url", "https://peterssendreceiveapp.ngrok.io/view?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
list_message_tpl.setElementButton(element_index, "Shop Now", "web_url", "https://peterssendreceiveapp.ngrok.io/shop?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
// Set Button
list_message_tpl.setButton("postback", "View More", "", "payload", "");
platform.getBaseSender().send(list_message_tpl);
}else if( text.equals("generic_template") ){
generic_message_tpl.setRecipientId(message.getUserId());
// Element
Integer element_index = generic_message_tpl.setElement("Classic T-Shirt Collection", "https://peterssendreceiveapp.ngrok.io/img/collection.png", "See all our colors");
generic_message_tpl.setElementDefaultAction(element_index, "web_url", "https://peterssendreceiveapp.ngrok.io/view?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
generic_message_tpl.setElementButton(element_index, "Shop Now", "web_url", "https://peterssendreceiveapp.ngrok.io/shop?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
// Element
element_index = generic_message_tpl.setElement("Classic T-Shirt Collection", "https://peterssendreceiveapp.ngrok.io/img/collection.png", "See all our colors");
generic_message_tpl.setElementDefaultAction(element_index, "web_url", "https://peterssendreceiveapp.ngrok.io/view?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
generic_message_tpl.setElementButton(element_index, "Shop Now", "web_url", "https://peterssendreceiveapp.ngrok.io/shop?item=102", true, "tall", "https://peterssendreceiveapp.ngrok.io/");
platform.getBaseSender().send(generic_message_tpl);
}else if( text.equals("receipt_template") ){
receipt_message_tpl.setRecipientId(message.getUserId());
receipt_message_tpl.setRecipientName("Stephane Crozatier");
receipt_message_tpl.setOrderNumber("12345678902");
receipt_message_tpl.setCurrency("USD");
receipt_message_tpl.setPaymentMethod("Visa 2345");
receipt_message_tpl.setOrderUrl("http://petersapparel.parseapp.com/order?order_id=123456");
receipt_message_tpl.setTimestamp("1428444852");
receipt_message_tpl.setElement("Classic White T-Shirt", "100% Soft and Luxurious Cotton", "2", "50", "USD", "https://image.spreadshirtmedia.com/image-server/v1/products/1001491830/views/1,width=800,height=800,appearanceId=2,version=1473664654/black-rap-nation-t-shirt-men-s-premium-t-shirt.png");
receipt_message_tpl.setElement("Classic Gray T-Shirt", "100% Soft and Luxurious Cotton", "2", "50", "USD", "https://static1.squarespace.com/static/57a088e05016e13b82b0beac/t/584fe89720099e4b5211c624/1481631899763/darts-is-my-religion-ally-pally-is-my-church-t-shirt-maenner-maenner-t-shirt.png");
receipt_message_tpl.setAddress("1 Hacker Way", "", "Menlo Park", "94025", "CA", "US");
receipt_message_tpl.setSummary("75.00", "4.95", "6.19", "56.14");
receipt_message_tpl.setAdjustment("New Customer Discount", "20");
receipt_message_tpl.setAdjustment("$10 Off Coupon", "10");
platform.getBaseSender().send(receipt_message_tpl);
}
if( quick_reply_payload.equals("text_reply_red_click") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Red Clicked");
platform.getBaseSender().send(message_tpl);
}else if( quick_reply_payload.equals("text_reply_green_click") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Green Clicked");
platform.getBaseSender().send(message_tpl);
}else if( quick_reply_payload.equals("text_reply_black_click") ){
message_tpl.setRecipientId(message.getUserId());
message_tpl.setMessageText("Black Clicked");
platform.getBaseSender().send(message_tpl);
}
return "ok";
}
return "bla";
});
}
}
| |
package tonius.simplyjetpacks.item;
import cofh.api.energy.IEnergyContainerItem;
import net.minecraft.client.model.ModelBiped;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.EntityEquipmentSlot;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraft.util.DamageSource;
import net.minecraft.world.World;
import net.minecraftforge.common.ISpecialArmor;
import net.minecraftforge.fluids.FluidRegistry;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.IFluidContainerItem;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import tonius.simplyjetpacks.SimplyJetpacks;
import tonius.simplyjetpacks.client.util.RenderUtils;
import tonius.simplyjetpacks.config.Config;
import tonius.simplyjetpacks.integration.ModType;
import tonius.simplyjetpacks.item.meta.FluxPack;
import tonius.simplyjetpacks.item.meta.Jetpack;
import tonius.simplyjetpacks.item.meta.PackBase;
import tonius.simplyjetpacks.setup.FuelType;
import tonius.simplyjetpacks.setup.ModCreativeTab;
import tonius.simplyjetpacks.setup.ModEnchantments;
import tonius.simplyjetpacks.setup.ModKey;
import tonius.simplyjetpacks.util.math.MathHelper;
import tonius.simplyjetpacks.util.NBTHelper;
import tonius.simplyjetpacks.util.SJStringHelper;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public class ItemPack<T extends PackBase> extends ItemArmor implements IControllableArmor, ISpecialArmor, IEnergyContainerItem, IFluidContainerItem, IHUDInfoProvider
{
private static final String TAG_ENERGY = "Energy";
private static final String TAG_FLUID = "Fluid";
public final ModType modType;
private final Map<Integer, T> packs = new LinkedHashMap<Integer, T>();
protected String name;
public ItemPack(ModType modType, String registryName)
{
super(ArmorMaterial.IRON, 2, EntityEquipmentSlot.CHEST);
this.name = registryName;
this.modType = modType;
this.setUnlocalizedName(SimplyJetpacks.PREFIX + "pack" + modType.suffix);
this.setHasSubtypes(true);
this.setMaxDamage(0);
this.setCreativeTab(ModCreativeTab.instance);
this.setRegistryName(registryName);
}
public ItemStack putPack(int meta, T pack, boolean returnFull)
{
this.packs.put(meta, pack);
ItemStack stack = new ItemStack(this, 1, meta);
if(returnFull)
{
this.addFuel(stack, this.getMaxFuelStored(stack), false);
}
return stack;
}
public ItemStack putPack(int meta, T pack)
{
return this.putPack(meta, pack, false);
}
public T getPack(ItemStack stack)
{
return this.packs.get(stack.getItemDamage());
}
public Collection<T> getPacks()
{
return this.packs.values();
}
@Override
public void onUpdate(ItemStack stack, World world, Entity entity, int par4, boolean par5)
{
T pack = this.getPack(stack);
if(pack != null && entity instanceof EntityPlayer)
{
pack.tickInventory(world, (EntityPlayer) entity, stack, this);
}
}
@Override
public void onArmorTick(World world, EntityPlayer player, ItemStack stack)
{
T pack = this.getPack(stack);
if(pack != null)
{
pack.tickArmor(world, player, stack, this);
}
}
@Override
public String getUnlocalizedName(ItemStack stack)
{
T pack = this.getPack(stack);
if(pack != null)
{
return "item." + SimplyJetpacks.PREFIX + pack.getBaseName(true) + this.modType.suffix;
}
return super.getUnlocalizedName();
}
@Override
public EnumRarity getRarity(ItemStack stack)
{
T pack = this.getPack(stack);
if(pack != null)
{
return pack.rarity;
}
return super.getRarity(stack);
}
@Override
public boolean showDurabilityBar(ItemStack stack)
{
T pack = this.getPack(stack);
if(pack != null)
{
return pack.hasFuelIndicator;
}
return super.showDurabilityBar(stack);
}
@Override
public double getDurabilityForDisplay(ItemStack stack)
{
double stored = this.getMaxFuelStored(stack) - this.getFuelStored(stack) + 1;
double max = this.getMaxFuelStored(stack) + 1;
return stored / max;
}
@Override
public int getItemEnchantability(ItemStack stack)
{
T pack = this.getPack(stack);
if(pack != null)
{
return pack.enchantability;
}
return super.getItemEnchantability(stack);
}
@Override
public boolean isItemTool(ItemStack stack)
{
return this.getItemEnchantability() > 0;
}
@Override
public boolean isBookEnchantable(ItemStack stack, ItemStack book)
{
return this.getItemEnchantability() > 0;
}
@Override
@SideOnly(Side.CLIENT)
@SuppressWarnings("unchecked")
public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean par4)
{
T pack = this.getPack(stack);
if(pack != null)
{
pack.addInformation(stack, this, player, list);
if(SJStringHelper.canShowDetails())
{
pack.addShiftInformation(stack, this, player, list);
}
else
{
list.add(SJStringHelper.getShiftText());
}
}
}
@Override
@SideOnly(Side.CLIENT)
@SuppressWarnings("unchecked")
public void getSubItems(Item item, CreativeTabs tab, List list)
{
for(Entry<Integer, T> e : this.packs.entrySet())
{
e.getValue().addSubItems(this, e.getKey(), list);
}
}
@Override
@SideOnly(Side.CLIENT)
public String getArmorTexture(ItemStack stack, Entity entity, EntityEquipmentSlot slot, String type)
{
T pack = this.getPack(stack);
if(pack != null)
{
return pack.getArmorTexture(stack, entity, slot, this.modType);
}
return super.getArmorTexture(stack, entity, slot, type);
}
@Override
@SideOnly(Side.CLIENT)
public ModelBiped getArmorModel(EntityLivingBase entityLiving, ItemStack stack, EntityEquipmentSlot armorSlot, ModelBiped _default)
{
T pack = this.getPack(stack);
if(pack != null && pack.armorModel != null && Config.enableArmor3DModels)
{
ModelBiped model = RenderUtils.getArmorModel(pack, entityLiving);
if(model != null)
{
return model;
}
}
return super.getArmorModel(entityLiving, stack, armorSlot, _default);
}
// control
@Override
public void onKeyPressed(ItemStack stack, EntityPlayer player, ModKey key, boolean showInChat)
{
T pack = this.getPack(stack);
if(pack != null)
{
switch(key)
{
case TOGGLE_PRIMARY:
pack.togglePrimary(stack, player, showInChat);
break;
case TOGGLE_SECONDARY:
pack.toggleSecondary(stack, player, showInChat);
break;
case MODE_PRIMARY:
pack.switchModePrimary(stack, player, showInChat);
break;
case MODE_SECONDARY:
pack.switchModeSecondary(stack, player, showInChat);
break;
/*case OPEN_PACK_GUI: TODO: Readd GUIs
player.openGui(SimplyJetpacks.instance, GuiHandler.PACK, player.worldObj, 0, 0, 0);
break;*/
default:
}
}
}
// armor
protected int getFuelPerDamage(ItemStack stack, T pack)
{
if(ModEnchantments.fuelEffeciency == null)
{
return pack.armorFuelPerHit;
}
int fuelEfficiencyLevel = MathHelper.clampI(EnchantmentHelper.getEnchantmentLevel(ModEnchantments.fuelEffeciency, stack), 0, 4);
return (int) Math.round(pack.armorFuelPerHit * (5 - fuelEfficiencyLevel) / 5.0D);
}
@Override
public ArmorProperties getProperties(EntityLivingBase player, ItemStack armor, DamageSource source, double damage, int slot)
{
T pack = this.getPack(armor);
if(pack != null && pack.isArmored && !source.isUnblockable())
{
if(pack.isFluxBased && source.damageType.equals("flux"))
{
return new ArmorProperties(0, 0.5D, Integer.MAX_VALUE);
}
int energyPerDamage = this.getFuelPerDamage(armor, pack);
int maxAbsorbed = energyPerDamage > 0 ? 25 * (this.getFuelStored(armor) / energyPerDamage) : 0;
return new ArmorProperties(0, 0.85D * (pack.armorReduction / 20.0D), maxAbsorbed);
}
return new ArmorProperties(0, 1, 0);
}
@Override
public int getArmorDisplay(EntityPlayer player, ItemStack armor, int slot)
{
T pack = this.getPack(armor);
if(pack != null && pack.isArmored)
{
if(this.getFuelStored(armor) >= pack.armorFuelPerHit)
{
return pack.armorReduction;
}
}
return 0;
}
@Override
public void damageArmor(EntityLivingBase entity, ItemStack armor, DamageSource source, int damage, int slot)
{
T pack = this.getPack(armor);
if(pack != null && pack.isArmored && pack.usesFuel)
{
if(pack.fuelType == FuelType.ENERGY && pack.isFluxBased && source.damageType.equals("flux"))
{
this.addFuel(armor, damage * (source.getEntity() == null ? pack.armorFuelPerHit / 2 : this.getFuelPerDamage(armor, pack)), false);
}
else
{
this.useFuel(armor, damage * this.getFuelPerDamage(armor, pack), false);
}
}
}
// fuel
public int getFuelStored(ItemStack stack)
{
T pack = this.getPack(stack);
switch(pack.fuelType)
{
case ENERGY:
default:
return this.getEnergyStored(stack);
case FLUID:
FluidStack stored = this.getFluid(stack);
return stored != null ? stored.amount : 0;
}
}
public int getMaxFuelStored(ItemStack stack)
{
T pack = this.getPack(stack);
switch(pack.fuelType)
{
case ENERGY:
default:
return this.getMaxEnergyStored(stack);
case FLUID:
return this.getCapacity(stack);
}
}
public int addFuel(ItemStack stack, int maxAdd, boolean simulate)
{
T pack = this.getPack(stack);
if(pack == null)
{
return 0;
}
switch(pack.fuelType)
{
case ENERGY:
default:
int energy = this.getEnergyStored(stack);
int energyReceived = Math.min(this.getMaxEnergyStored(stack) - energy, maxAdd);
if(!simulate)
{
energy += energyReceived;
NBTHelper.setInt(stack, TAG_ENERGY, energy);
}
return energyReceived;
case FLUID:
if(pack.fuelFluid == null)
{
return 0;
}
FluidStack fluid = this.getFluid(stack);
int amount = fluid != null ? fluid.amount : 0;
int fluidReceived = Math.min(this.getCapacity(stack) - amount, maxAdd);
if(!simulate)
{
amount += fluidReceived;
NBTHelper.setInt(stack, TAG_FLUID, amount);
}
return fluidReceived;
}
}
public int useFuel(ItemStack stack, int maxUse, boolean simulate)
{
T pack = this.getPack(stack);
switch(pack.fuelType)
{
case ENERGY:
default:
int energy = this.getEnergyStored(stack);
int energyExtracted = Math.min(energy, maxUse);
if(!simulate)
{
energy -= energyExtracted;
NBTHelper.setInt(stack, TAG_ENERGY, energy);
}
return energyExtracted;
case FLUID:
if(pack.fuelFluid == null)
{
return 0;
}
FluidStack fluid = this.getFluid(stack);
int amount = fluid != null ? fluid.amount : 0;
int fluidExtracted = Math.min(amount, maxUse);
if(!simulate)
{
amount -= fluidExtracted;
NBTHelper.setInt(stack, TAG_FLUID, amount);
}
return fluidExtracted;
}
}
@Override
public int receiveEnergy(ItemStack container, int maxReceive, boolean simulate)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.ENERGY)
{
return 0;
}
int energy = this.getEnergyStored(container);
int energyReceived = Math.min(this.getMaxEnergyStored(container) - energy, Math.min(maxReceive, pack.fuelPerTickIn));
if(!simulate)
{
energy += energyReceived;
NBTHelper.setInt(container, TAG_ENERGY, energy);
}
return energyReceived;
}
@Override
public int extractEnergy(ItemStack container, int maxExtract, boolean simulate)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.ENERGY)
{
return 0;
}
int energy = this.getEnergyStored(container);
int energyExtracted = Math.min(energy, Math.min(maxExtract, pack.fuelPerTickOut));
if(!simulate)
{
energy -= energyExtracted;
NBTHelper.setInt(container, TAG_ENERGY, energy);
}
return energyExtracted;
}
@Override
public int getEnergyStored(ItemStack container)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.ENERGY)
{
return 0;
}
return NBTHelper.getInt(container, TAG_ENERGY);
}
@Override
public int getMaxEnergyStored(ItemStack container)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.ENERGY)
{
return 0;
}
return pack.fuelCapacity;
}
@Override
public FluidStack getFluid(ItemStack container)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.FLUID || pack.fuelFluid == null)
{
return null;
}
int amount = NBTHelper.getInt(container, TAG_FLUID);
return amount > 0 ? new FluidStack(FluidRegistry.getFluid(pack.fuelFluid), amount) : null;
}
@Override
public int getCapacity(ItemStack container)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.FLUID || pack.fuelFluid == null)
{
return 0;
}
return pack.fuelCapacity;
}
@Override
public int fill(ItemStack container, FluidStack resource, boolean doFill)
{
if(resource == null)
{
return 0;
}
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.FLUID || pack.fuelFluid == null || resource.getFluid() != FluidRegistry.getFluid(pack.fuelFluid))
{
return 0;
}
FluidStack fluid = this.getFluid(container);
int amount = fluid != null ? fluid.amount : 0;
int fluidReceived = Math.min(this.getCapacity(container) - amount, Math.min(resource.amount, pack.fuelPerTickIn));
if(doFill)
{
amount += fluidReceived;
NBTHelper.setInt(container, TAG_FLUID, amount);
}
return fluidReceived;
}
@Override
public FluidStack drain(ItemStack container, int maxDrain, boolean doDrain)
{
T pack = this.getPack(container);
if(pack == null || pack.fuelType != FuelType.FLUID || pack.fuelFluid == null)
{
return null;
}
FluidStack fluid = this.getFluid(container);
int amount = fluid != null ? fluid.amount : 0;
int fluidExtracted = Math.min(amount, Math.min(maxDrain, pack.fuelPerTickOut));
if(doDrain)
{
amount -= fluidExtracted;
NBTHelper.setInt(container, TAG_FLUID, amount);
}
return fluidExtracted > 0 ? new FluidStack(FluidRegistry.getFluid(pack.fuelFluid), fluidExtracted) : null;
}
// HUD info
@Override
@SideOnly(Side.CLIENT)
public void addHUDInfo(List<String> list, ItemStack stack, boolean showFuel, boolean showState)
{
T pack = this.getPack(stack);
if(pack != null)
{
if(showFuel && pack.hasFuelIndicator)
{
list.add(pack.getHUDFuelInfo(stack, this));
}
if(showState && pack.hasStateIndicators)
{
list.add(pack.getHUDStatesInfo(stack, this));
}
}
}
public static class ItemJetpack extends ItemPack<Jetpack>
{
public ItemJetpack(ModType modType, String registryName)
{
super(modType, registryName);
}
}
public static class ItemFluxPack extends ItemPack<FluxPack>
{
public ItemFluxPack(ModType modType, String registryName)
{
super(modType, registryName);
}
}
public void registerItemModel()
{
SimplyJetpacks.proxy.registerItemRenderer(this, 0, name);
}
}
| |
package aQute.remote.main;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.osgi.framework.Constants;
import aQute.lib.io.IO;
import aQute.libg.shacache.ShaCache;
import aQute.libg.shacache.ShaSource;
import aQute.remote.util.Link;
import aQute.service.reporter.Reporter;
/**
* Creates a framework and through that framework's class loader it will create
* an AgentServer.
*/
public class EnvoyDispatcher implements Closeable {
private ShaCache cache;
private ShaSource source;
private Reporter main;
private File storage;
private String network;
private int port;
private Map<String,DispatcherInfo> frameworks = new HashMap<>();
private Set<EnvoyImpl> envoys = new HashSet<>();
class DispatcherInfo {
String name;
int port;
URLClassLoader cl;
Class< ? > dispatcher;
Map<String,Object> properties;
Collection<String> runpath;
Closeable framework;
File storage;
@SuppressWarnings("deprecation")
void close() {
try {
main.trace("closing framework for %s", this);
framework.close();
frameworks.remove(name);
} catch (Exception e) {
main.exception(e, "Closing framework for %s", this);
}
}
public String toString() {
return name + "(" + framework + ") [" + port + "]";
}
}
public class EnvoyImpl implements Envoy {
private Link<Envoy,EnvoySupervisor> link;
EnvoyImpl(Socket socket) throws IOException {
envoys.add(this);
socket.setSoTimeout(500);
this.link = new Link<>(EnvoySupervisor.class, this, socket.getInputStream(),
socket.getOutputStream());
setRemote(this.link.getRemote());
}
void open() {
link.open();
}
/*
* If the supervisor gets a true on isEnvoy() then it should first
* create a framework and then an agent. A supervisor should first try
* to create a framework. If this framework already exists or has a
* different runpath/properties, we return true. Otherwise, we close a
* previous framework under this name and create a new one with the
* given props.
*/
@SuppressWarnings("deprecation")
@Override
public boolean createFramework(String name, Collection<String> runpath, Map<String,Object> properties)
throws Exception {
main.trace("create framework %s - %s --- %s", name, runpath, properties);
if (!name.matches("[a-zA-Z0-9_.$-]+"))
throw new IllegalArgumentException("Name must match symbolic name");
try {
DispatcherInfo existing = frameworks.get(name);
if (existing != null) {
if (existing.runpath.equals(runpath) && existing.properties.equals(properties)) {
createAgent(existing, false);
return false;
} else {
existing.close();
frameworks.remove(name);
}
}
DispatcherInfo info = create(name, runpath, properties);
frameworks.put(name, info);
createAgent(info, true);
return true;
} catch (Exception e) {
main.trace("creating framework %s: %s", name, e);
main.exception(e, "creating framework");
throw e;
}
}
@SuppressWarnings("deprecation")
private void createAgent(DispatcherInfo info, boolean state) throws Exception {
main.trace("Adding an agent for %s", info.name);
link.transfer(state);
Method toAgent = info.dispatcher.getMethod("toAgent", info.framework.getClass(), DataInputStream.class,
DataOutputStream.class);
toAgent.invoke(null, info.framework, link.getInput(), link.getOutput());
close();
}
@SuppressWarnings("deprecation")
private DispatcherInfo create(String name, Collection<String> runpath, Map<String,Object> properties)
throws Exception {
List<URL> files = new ArrayList<>();
for (String sha : runpath) {
files.add(cache.getFile(sha, source).toURI().toURL());
}
main.trace("runpath %s", files);
DispatcherInfo info = new DispatcherInfo();
info.name = name;
info.cl = new URLClassLoader(files.toArray(new URL[0]));
info.properties = new HashMap<>(properties);
info.runpath = runpath;
info.storage = new File(storage, name);
info.dispatcher = info.cl.loadClass("aQute.remote.agent.AgentDispatcher");
File storage = new File(EnvoyDispatcher.this.storage, name);
IO.mkdirs(storage);
if (!storage.isDirectory())
throw new IllegalArgumentException("Cannot create framework storage " + storage);
properties.put(Constants.FRAMEWORK_STORAGE, info.storage.getAbsolutePath());
Method newFw = info.dispatcher.getMethod("createFramework", String.class, Map.class, File.class,
File.class);
info.framework = (Closeable) newFw.invoke(null, name, properties, storage, cache.getRoot());
return info;
}
@Override
public boolean isEnvoy() {
return true;
}
public void close() throws IOException {
if (envoys.remove(this) && link != null)
link.close();
link = null;
}
@Override
public boolean ping() {
return true;
}
}
public EnvoyDispatcher(Reporter main, File cache, File storage, String network, int port) {
this.main = main;
this.cache = new ShaCache(cache);
this.storage = storage;
this.network = network;
this.port = port;
}
public void setRemote(final EnvoySupervisor remote) {
this.source = new ShaSource() {
@Override
public boolean isFast() {
return false;
}
@Override
public InputStream get(String sha) throws Exception {
byte[] data = remote.getFile(sha);
if (data == null)
return null;
return new ByteArrayInputStream(data);
}
};
}
@Override
public void close() throws IOException {
for (EnvoyImpl envoy : envoys)
envoy.close();
for (DispatcherInfo di : frameworks.values()) {
di.close();
}
}
@SuppressWarnings("deprecation")
public void run() {
while (!Thread.currentThread().isInterrupted())
try {
InetAddress address = network.equals("*") ? null : InetAddress.getByName(network);
ServerSocket server = address == null ? new ServerSocket(port) : new ServerSocket(port, 3, address);
main.trace("Will wait for %s:%s to finish", address, port);
while (!Thread.currentThread().isInterrupted())
try {
Socket socket = server.accept();
main.trace("Got a request on %s", socket);
EnvoyImpl envoyImpl = new EnvoyImpl(socket);
envoyImpl.open();
} catch (Exception e) {
main.exception(e, "while listening for incoming requests on %s:%s", network, port);
break;
}
server.close();
} catch (Exception e) {
try {
Thread.sleep(2000);
} catch (InterruptedException e1) {}
}
try {
close();
} catch (IOException e) {
//
}
}
}
| |
/* Copyright (C) 2003 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
package cc.mallet.grmm.inference.gbp;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.util.*;
import cc.mallet.grmm.inference.AbstractInferencer;
import cc.mallet.grmm.types.*;
import cc.mallet.util.MalletLogger;
import cc.mallet.util.Timing;
/**
* Created: May 27, 2005
*
* @author <A HREF="mailto:casutton@cs.umass.edu>casutton@cs.umass.edu</A>
* @version $Id: ParentChildGBP.java,v 1.1 2007/10/22 21:37:58 mccallum Exp $
*/
public class ParentChildGBP extends AbstractInferencer {
private static final Logger logger = MalletLogger.getLogger (ParentChildGBP.class.getName());
private static final boolean debug = false;
private RegionGraphGenerator regioner;
private MessageStrategy sender;
private boolean useInertia = true;
private double inertiaWeight = 0.5;
// convergence criteria
private static final double THRESHOLD = 1e-3;
private static final int MAX_ITER = 500;
// current inferencing state
private MessageArray oldMessages;
private MessageArray newMessages;
private RegionGraph rg;
private FactorGraph mdl;
private ParentChildGBP ()
{
}
public ParentChildGBP (RegionGraphGenerator regioner)
{
this (regioner, new FullMessageStrategy ());
}
public ParentChildGBP (RegionGraphGenerator regioner, MessageStrategy sender)
{
this.regioner = regioner;
this.sender = sender;
}
public static ParentChildGBP makeBPInferencer ()
{
ParentChildGBP inferencer = new ParentChildGBP ();
inferencer.regioner = new BPRegionGenerator ();
inferencer.sender = new FullMessageStrategy ();
return inferencer;
}
public static ParentChildGBP makeKikuchiInferencer ()
{
ParentChildGBP inferencer = new ParentChildGBP ();
inferencer.regioner = new Kikuchi4SquareRegionGenerator ();
inferencer.sender = new FullMessageStrategy ();
return inferencer;
}
// accessors
public boolean getUseInertia ()
{
return useInertia;
}
public void setUseInertia (boolean useInertia)
{
this.useInertia = useInertia;
}
public double getInertiaWeight ()
{
return inertiaWeight;
}
public void setInertiaWeight (double inertiaWeight)
{
this.inertiaWeight = inertiaWeight;
}
// inferencer interface
public Factor lookupMarginal (Variable variable)
{
Region region = rg.findContainingRegion (variable);
if (region == null)
throw new IllegalArgumentException ("Could not find region containing variable "+variable+" in region graph "+rg);
Factor belief = computeBelief (region);
Factor varBelief = belief.marginalize (variable);
return varBelief;
}
public Factor lookupMarginal (VarSet varSet)
{
Region region = rg.findContainingRegion (varSet);
if (region == null)
throw new IllegalArgumentException ("Could not find region containing clique "+varSet +" in region graph "+rg);
Factor belief = computeBelief (region);
Factor cliqueBelief = belief.marginalize (varSet);
return cliqueBelief;
}
private Factor computeBelief (Region region)
{
return computeBelief (region, newMessages);
}
static Factor computeBelief (Region region, MessageArray messages)
{
DiscreteFactor result = new LogTableFactor(region.vars);
for (Iterator it = region.factors.iterator(); it.hasNext();) {
Factor factor = (Factor) it.next();
result.multiplyBy(factor);
}
for (Iterator it = region.parents.iterator(); it.hasNext();) {
Region parent = (Region) it.next();
Factor msg = messages.getMessage(parent, region);
result.multiplyBy(msg);
}
for (Iterator it = region.descendants.iterator(); it.hasNext();) {
Region child = (Region) it.next();
for (Iterator it2 = child.parents.iterator(); it2.hasNext();) {
Region uncle = (Region) it2.next();
if (uncle != region && !region.descendants.contains(uncle)) {
result.multiplyBy(messages.getMessage(uncle, child));
}
}
}
result.normalize();
return result;
}
public double lookupLogJoint (Assignment assn)
{
double factorProduct = mdl.logValue (assn);
// value += computeFreeEnergy (rg);
double F = computeFreeEnergy (rg);
double value = factorProduct + F;
if (debug)
System.err.println ("GBP factor product:"+factorProduct+" + free energy: "+F+" = value:"+value);
return value;
}
private double computeFreeEnergy (RegionGraph rg)
{
double avgEnergy = 0;
double entropy = 0;
for (Iterator it = rg.iterator (); it.hasNext();) {
Region region = (Region) it.next();
Factor belief = computeBelief(region);
double thisEntropy = belief.entropy();
if (debug)
System.err.println("Region " + region + " c:" + region.countingNumber + " entropy:" + thisEntropy);
entropy += region.countingNumber * thisEntropy;
DiscreteFactor product = new LogTableFactor(belief.varSet());
for (Iterator ptlIt = region.factors.iterator(); ptlIt.hasNext();) {
Factor ptl = (Factor) ptlIt.next();
product.multiplyBy(ptl);
}
double thisAvgEnergy = 0;
for (AssignmentIterator assnIt = belief.assignmentIterator(); assnIt.hasNext();) {
Assignment assn = assnIt.assignment();
// Note: Do not use assnIt here before fixing variable ordering issues.
double thisEnergy = -product.logValue(assn);
// double thisEnergy = product.phi (assnIt);
double thisBel = belief.value(assn);
thisAvgEnergy += thisBel * thisEnergy;
assnIt.advance();
}
if (debug) {
System.err.println("Region " + region + " c:" + region.countingNumber + " avgEnergy: " + thisAvgEnergy);
/* DiscretePotential b2 = belief.duplicate ();
b2.delogify ();
System.err.println ("BELIEF:"+b2);
System.err.println ("ENERGY:"+product);
*/
}
avgEnergy += region.countingNumber * thisAvgEnergy;
}
if (debug)
System.err.println ("GBP computeFreeEnergy: avgEnergy:"+avgEnergy+" entropy:"+entropy+" free energy:"+(avgEnergy-entropy));
// return avgEnergy + entropy;
return avgEnergy - entropy;
}
public void computeMarginals (FactorGraph mdl)
{
Timing timing = new Timing ();
this.mdl = mdl;
rg = regioner.constructRegionGraph (mdl);
RegionEdge[] pairs = chooseMessageSendingOrder ();
newMessages = new MessageArray (rg);
timing.tick ("GBP Region Graph construction");
int iter = 0;
do {
oldMessages = newMessages;
newMessages = oldMessages.duplicate ();
sender.setMessageArray (oldMessages, newMessages);
for (int i = 0; i < pairs.length; i++) {
RegionEdge edge = pairs[i];
sender.sendMessage (edge);
}
if (logger.isLoggable (Level.FINER)) {
timing.tick ("GBP iteration "+iter);
}
iter++;
if (useInertia)
newMessages = sender.averageMessages (rg, oldMessages, newMessages, inertiaWeight);
} while (!hasConverged () && (iter < MAX_ITER));
logger.info ("GBP: Used "+iter+" iterations.");
if (iter >= MAX_ITER) {
logger.warning ("***WARNING: GBP not converged!");
}
}
private RegionEdge[] chooseMessageSendingOrder ()
{
List l = new ArrayList ();
for (Iterator it = rg.edgeIterator (); it.hasNext();) {
RegionEdge edge = (RegionEdge) it.next ();
l.add (edge);
}
Collections.sort (l, new Comparator () {
public int compare (Object o1, Object o2)
{
RegionEdge e1 = (RegionEdge) o1;
RegionEdge e2 = (RegionEdge) o2;
int l1 = e1.to.vars.size();
int l2 = e2.to.vars.size();
return Double.compare (l1, l2);
};
});
return (RegionEdge[]) l.toArray (new RegionEdge [l.size()]);
}
private boolean hasConverged ()
{
for (Iterator it = rg.edgeIterator (); it.hasNext();) {
RegionEdge edge = (RegionEdge) it.next ();
Factor oldMsg = oldMessages.getMessage (edge.from, edge.to);
Factor newMsg = newMessages.getMessage (edge.from, edge.to);
if (oldMsg == null) {
assert newMsg == null;
} else {
if (!oldMsg.almostEquals (newMsg, THRESHOLD)) {
/*
//xxx debug
if (sender instanceof SparseMessageSender)
System.out.println ("NOT CONVERGED:\n"+newMsg+"\n.......");
*/
return false;
}
}
}
return true;
}
public void dump ()
{
for (Iterator it = rg.edgeIterator (); it.hasNext();) {
RegionEdge edge = (RegionEdge) it.next ();
Factor newMsg = newMessages.getMessage (edge.from, edge.to);
System.out.println ("Message: "+edge.from+" --> "+edge.to+" "+newMsg);
}
}
}
| |
package io.hawt.web.proxy;
import java.util.*;
import java.util.regex.Pattern;
import javax.servlet.http.HttpServletRequest;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ProxyDetailsTest {
@Test
public void testPathInfoWithUserPasswordPort() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/admin:admin@localhost/8181/jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", "admin", details.getUserName());
assertEquals("getPassword()", "admin", details.getPassword());
assertEquals("getHost()", "localhost", details.getHost());
assertEquals("getHostAndPort()", "localhost:8181", details.getHostAndPort());
assertEquals("getPort()", 8181, details.getPort());
assertEquals("getProxyPath()", "/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "http", details.getScheme());
assertEquals("getFullProxyUrl()", "http://localhost:8181/jolokia/", details.getFullProxyUrl());
}
@Test
public void testPathInfoWithUserPasswordDefaultPort() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/admin:admin@localhost//jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", "admin", details.getUserName());
assertEquals("getPassword()", "admin", details.getPassword());
assertEquals("getHost()", "localhost", details.getHost());
assertEquals("getHostAndPort()", "localhost", details.getHostAndPort());
assertEquals("getPort()", 80, details.getPort());
assertEquals("getProxyPath()", "/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "http", details.getScheme());
assertEquals("getFullProxyUrl()", "http://localhost/jolokia/", details.getFullProxyUrl());
}
@Test
public void testPathInfoWithDefaultPort() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/localhost//jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "localhost", details.getHost());
assertEquals("getHostAndPort()", "localhost", details.getHostAndPort());
assertEquals("getPort()", 80, details.getPort());
assertEquals("getProxyPath()", "/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "http", details.getScheme());
assertEquals("getFullProxyUrl()", "http://localhost/jolokia/", details.getFullProxyUrl());
}
@Test
public void testPathInfoWithPort() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/localhost/90/jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "localhost", details.getHost());
assertEquals("getHostAndPort()", "localhost:90", details.getHostAndPort());
assertEquals("getPort()", 90, details.getPort());
assertEquals("getProxyPath()", "/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "http", details.getScheme());
assertEquals("getFullProxyUrl()", "http://localhost:90/jolokia/", details.getFullProxyUrl());
}
@Test
public void testPathInfoWithWhitespace() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/http/localhost/10001/jolokia/read/java.lang:type=MemoryManager,name=Metaspace Manager/Name");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getFullProxyUrl()", "http://localhost:10001/jolokia/read/java.lang:type=MemoryManager,name=Metaspace%20Manager/Name", details.getFullProxyUrl());
}
@Test
public void testDefaultPort() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/somerest-davsclaus2.rhcloud.com/cxf/crm/customerservice/customers/123");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "somerest-davsclaus2.rhcloud.com", details.getHost());
assertEquals("getHostAndPort()", "somerest-davsclaus2.rhcloud.com", details.getHostAndPort());
assertEquals("getPort()", 80, details.getPort());
assertEquals("getProxyPath()", "/cxf/crm/customerservice/customers/123", details.getProxyPath());
assertEquals("getScheme()", "http", details.getScheme());
assertEquals("getFullProxyUrl()", "http://somerest-davsclaus2.rhcloud.com/cxf/crm/customerservice/customers/123", details.getFullProxyUrl());
}
@Test
public void testHttpsUrl() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://www.myhost.com/443/myApp/jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com:443", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com:443/myApp/jolokia/", details.getFullProxyUrl());
}
@Test
@Ignore("auth-info not supported")
public void testHttpsWithCredentialsUrl() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://test:user@www.myhost.com/443/myApp/jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", "test", details.getUserName());
assertEquals("getPassword()", "user", details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com/myApp/jolokia/", details.getFullProxyUrl());
}
@Test
public void testHttpsUrlWithNoPort() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://www.myhost.com/myApp/jolokia/");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com/myApp/jolokia/", details.getFullProxyUrl());
}
@Test
@Ignore("Mock code must support getParameterNames/getParameterValues")
public void testWithQueryString() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://www.myhost.com/myApp/jolokia/");
when(mockReq.getQueryString()).thenReturn("foo=bar");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com/myApp/jolokia/?foo=bar", details.getFullProxyUrl());
}
@Test
public void testQueryStringWithIgnoredParameter() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://www.myhost.com/myApp/jolokia/");
when(mockReq.getQueryString()).thenReturn("url=bar");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com/myApp/jolokia/", details.getFullProxyUrl());
}
@Test
public void testQueryStringWithMultipleIgnoredParameters() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://www.myhost.com/myApp/jolokia/");
when(mockReq.getQueryString()).thenReturn("url=bar&_user=test");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com/myApp/jolokia/", details.getFullProxyUrl());
}
@Test
@Ignore("Mock code must support getParameterNames/getParameterValues")
public void testQueryStringWithMultipleIgnoredAndValidParameters() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo()).thenReturn("/https://www.myhost.com/myApp/jolokia/");
when(mockReq.getQueryString()).thenReturn("url=bar&search=1234&_user=test&page=4");
ProxyDetails details = new ProxyDetails(mockReq);
assertEquals("getUserName()", null, details.getUserName());
assertEquals("getPassword()", null, details.getPassword());
assertEquals("getHost()", "www.myhost.com", details.getHost());
assertEquals("getHostAndPort()", "www.myhost.com", details.getHostAndPort());
assertEquals("getPort()", 443, details.getPort());
assertEquals("getProxyPath()", "/myApp/jolokia/", details.getProxyPath());
assertEquals("getScheme()", "https", details.getScheme());
assertEquals("getFullProxyUrl()", "https://www.myhost.com/myApp/jolokia/?search=1234&page=4", details.getFullProxyUrl());
}
@Test
public void testIsAllowed() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo())
.thenReturn("/localhost/9000/jolokia/")
.thenReturn("/localhost:8181/jolokia/")
.thenReturn("/www.myhost.com/jolokia/")
.thenReturn("/myhost1.com/jolokia/")
.thenReturn("/myhost22.com/jolokia/")
.thenReturn("/www.banned.com/jolokia/");
Set<String> whitelist = new HashSet<>(Arrays.asList("localhost", "www.myhost.com"));
List<Pattern> regexWhitelist = Collections.singletonList(Pattern.compile("myhost[0-9]+\\.com"));
ProxyDetails details1 = new ProxyDetails(mockReq);
ProxyDetails details2 = new ProxyDetails(mockReq);
ProxyDetails details3 = new ProxyDetails(mockReq);
ProxyDetails details4 = new ProxyDetails(mockReq);
ProxyDetails details5 = new ProxyDetails(mockReq);
ProxyDetails details6 = new ProxyDetails(mockReq);
assertTrue("localhost/9000", details1.isAllowed(whitelist));
assertTrue("localhost:8181", details2.isAllowed(whitelist));
assertTrue("www.myhost.com", details3.isAllowed(whitelist));
assertTrue("myhost1.com", details4.isAllowed(regexWhitelist));
assertTrue("myhost22.com", details5.isAllowed(regexWhitelist));
assertFalse("www.banned.com", details6.isAllowed(whitelist));
}
@Test
public void testIsAllowedWithAllowAll() throws Exception {
HttpServletRequest mockReq = mock(HttpServletRequest.class);
when(mockReq.getPathInfo())
.thenReturn("/localhost/9000/jolokia/")
.thenReturn("/www.myhost.com/jolokia/")
.thenReturn("/www.banned.com/jolokia/");
Set<String> whitelist = new HashSet<>(Arrays.asList("*"));
ProxyDetails details1 = new ProxyDetails(mockReq);
ProxyDetails details2 = new ProxyDetails(mockReq);
ProxyDetails details3 = new ProxyDetails(mockReq);
assertTrue("localhost", details1.isAllowed(whitelist));
assertTrue("www.myhost.com", details2.isAllowed(whitelist));
assertTrue("www.banned.com", details3.isAllowed(whitelist));
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.filestructurefinder;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure;
import org.supercsv.prefs.CsvPreference;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xpack.ml.filestructurefinder.DelimitedFileStructureFinder.levenshteinFieldwiseCompareRows;
import static org.elasticsearch.xpack.ml.filestructurefinder.DelimitedFileStructureFinder.levenshteinDistance;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.not;
public class DelimitedFileStructureFinderTests extends FileStructureTestCase {
private FileStructureFinderFactory csvFactory = new DelimitedFileStructureFinderFactory(',', '"', 2, false);
private FileStructureFinderFactory tsvFactory = new DelimitedFileStructureFinderFactory('\t', '"', 3, false);
public void testCreateConfigsGivenCompleteCsv() throws Exception {
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("time", "message"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenIncompleteCsv() throws Exception {
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"badrow\n" + // REALLY bad row
"2018-05-17T13:41:25,hello\n" +
"2018-05-17T13:41:26,hello\n" +
"2018-05-17T13:41:27,hello\n" +
"2018-05-17T13:41:28,hello\n" +
"2018-05-17T13:41:29,hello\n" +
"2018-05-17T13:41:30,hello\n" +
"2018-05-17T13:41:31,hello\n" +
"2018-05-17T13:41:32,hello\n" +
"2018-05-17T13:41:35\n" + // Just missing the column
"2018-05-17T13:41:33,hello again\n";
assertFalse(csvFactory.canCreateFromSample(explanation, sample, 0.05));
assertTrue("assertion failed. Explanation " + explanation,
csvFactory.canCreateFromSample(explanation, sample, 0.10));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertEquals("time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
assertEquals(Arrays.asList("time", "message"), structure.getColumnNames());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertEquals(structure.getNumMessagesAnalyzed(), 10);
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getMultilineStartPattern());
assertNull(structure.getShouldTrimFields());
assertNull(structure.getGrokPattern());
}
public void testCreateConfigsGivenIncompleteCsvWithMultiLinedRows() throws Exception {
String sample = "time,message\n" +
"2018-05-17T13:41:23,\"hello\nnew line\"\n" +
"\"badrow\n\n\n\n\"\n" + // REALLY bad row
"2018-05-17T13:41:25,\"hello\nnew line\"\n" +
"2018-05-17T13:41:26,\"hello\nnew line\"\n" +
"2018-05-17T13:41:27,\"hello\nnew line\"\n" +
"2018-05-17T13:41:28,\"hello\nnew line\"\n" +
"2018-05-17T13:41:29,\"hello\nnew line\"\n" +
"2018-05-17T13:41:30,\"hello\nnew line\"\n" +
"2018-05-17T13:41:31,\"hello\nnew line\"\n" +
"2018-05-17T13:41:32,\"hello\nnew line\"\n" +
"2018-05-17T13:41:35\n" + // Just missing the column
"2018-05-17T13:41:33,\"hello again\nnew line\"\n";
assertFalse(csvFactory.canCreateFromSample(explanation, sample, 0.05));
assertTrue("assertion failed. Explanation " + explanation,
csvFactory.canCreateFromSample(explanation, sample, 0.10));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT,
FileStructureOverrides.builder().setQuote('"').build(),
NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertEquals("time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
assertEquals(Arrays.asList("time", "message"), structure.getColumnNames());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertEquals(structure.getNumMessagesAnalyzed(), 10);
assertTrue(structure.getHasHeaderRow());
assertEquals("^\"?\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}", structure.getMultilineStartPattern());
assertNull(structure.getShouldTrimFields());
assertNull(structure.getGrokPattern());
}
public void testCreateConfigsGivenCompleteCsvAndColumnNamesOverride() throws Exception {
FileStructureOverrides overrides = FileStructureOverrides.builder().setColumnNames(Arrays.asList("my_time", "my_message")).build();
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("my_time", "my_message"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("my_time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCompleteCsvAndHasHeaderRowOverride() throws Exception {
// It's obvious the first row really should be a header row, so by overriding
// detection with the wrong choice the results will be completely changed
FileStructureOverrides overrides = FileStructureOverrides.builder().setHasHeaderRow(false).build();
String sample = "time,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertNull(structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertFalse(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("column1", "column2"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertNull(structure.getTimestampField());
assertNull(structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithIncompleteLastRecord() throws Exception {
String sample = "time,message,count\n" +
"2018-05-17T13:41:23,\"hello\n" +
"world\",1\n" +
"2019-01-18T14:46:57,\"hello again\n"; // note that this last record is truncated
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?time\"?,\"?message\"?,\"?count\"?", structure.getExcludeLinesPattern());
assertEquals("^\"?\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}", structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("time", "message", "count"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("time", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNulls() throws Exception {
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount,,\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?,\"?\"?,\"?\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance",
"RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax",
"tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("tpep_pickup_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNullsAndOverriddenTimeField() throws Exception {
// Default timestamp field is the first field from the start of each row that contains a
// consistent timestamp format, so if we want the second we need an override
FileStructureOverrides overrides = FileStructureOverrides.builder().setTimestampField("tpep_dropoff_datetime").build();
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount,,\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?,\"?\"?,\"?\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance",
"RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax",
"tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("tpep_dropoff_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeader() throws Exception {
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance",
"RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax",
"tip_amount", "tolls_amount", "improvement_surcharge", "total_amount"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("tpep_pickup_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeaderAndColumnNamesOverride() throws Exception {
FileStructureOverrides overrides = FileStructureOverrides.builder()
.setColumnNames(Arrays.asList("my_VendorID", "my_tpep_pickup_datetime", "my_tpep_dropoff_datetime", "my_passenger_count",
"my_trip_distance", "my_RatecodeID", "my_store_and_fwd_flag", "my_PULocationID", "my_DOLocationID", "my_payment_type",
"my_fare_amount", "my_extra", "my_mta_tax", "my_tip_amount", "my_tolls_amount", "my_improvement_surcharge",
"my_total_amount")).build();
String sample = "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID," +
"store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount," +
"improvement_surcharge,total_amount\n" +
"2,2016-12-31 15:15:01,2016-12-31 15:15:09,1,.00,1,N,264,264,2,1,0,0.5,0,0,0.3,1.8,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:10:22,1,1.60,1,N,163,143,2,9,0.5,0.5,0,0,0.3,10.3,,\n" +
"1,2016-12-01 00:00:01,2016-12-01 00:11:01,1,1.40,1,N,164,229,1,9,0.5,0.5,2.05,0,0.3,12.35,,\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, overrides, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?," +
"\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?," +
"\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("my_VendorID", "my_tpep_pickup_datetime", "my_tpep_dropoff_datetime", "my_passenger_count",
"my_trip_distance", "my_RatecodeID", "my_store_and_fwd_flag", "my_PULocationID", "my_DOLocationID", "my_payment_type",
"my_fare_amount", "my_extra", "my_mta_tax", "my_tip_amount", "my_tolls_amount", "my_improvement_surcharge", "my_total_amount"),
structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("my_tpep_pickup_datetime", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenCsvWithTimeLastColumn() throws Exception {
String sample = "\"pos_id\",\"trip_id\",\"latitude\",\"longitude\",\"altitude\",\"timestamp\"\n" +
"\"1\",\"3\",\"4703.7815\",\"1527.4713\",\"359.9\",\"2017-01-19 16:19:04.742113\"\n" +
"\"2\",\"3\",\"4703.7815\",\"1527.4714\",\"359.9\",\"2017-01-19 16:19:05.741890\"\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?pos_id\"?,\"?trip_id\"?,\"?latitude\"?,\"?longitude\"?,\"?altitude\"?,\"?timestamp\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("timestamp", structure.getTimestampField());
assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenTsvWithSyslogLikeTimestamp() throws Exception {
String sample = "Latitude\tLongitude\tloc\tTimestamp\n" +
"25.78042\t18.441196\t\"25.7804200000,18.4411960000\"\tJun 30 2019 13:21:24\n" +
"25.743484\t18.443047\t\"25.7434840000,18.4430470000\"\tJun 30 2019 06:02:35\n" +
"25.744583\t18.442783\t\"25.7445830000,18.4427830000\"\tJun 30 2019 06:02:35\n" +
"25.754593\t18.431637\t\"25.7545930000,18.4316370000\"\tJul 1 2019 06:02:43\n" +
"25.768574\t18.433483\t\"25.7685740000,18.4334830000\"\tJul 1 2019 06:21:28\n" +
"25.757736\t18.438683\t\"25.7577360000,18.4386830000\"\tJul 1 2019 12:06:08\n" +
"25.76615\t18.436565\t\"25.7661500000,18.4365650000\"\tJul 1 2019 12:06:08\n" +
"25.76896\t18.43586\t\"25.7689600000,18.4358600000\"\tJul 1 2019 12:13:50\n" +
"25.76423\t18.43705\t\"25.7642300000,18.4370500000\"\tJul 1 2019 12:39:10\n";
assertTrue(tsvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = tsvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
assertEquals("^\"?Latitude\"?\\t\"?Longitude\"?\\t\"?loc\"?\\t\"?Timestamp\"?",
structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf('\t'), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("Latitude", "Longitude", "loc", "Timestamp"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("Timestamp", structure.getTimestampField());
assertEquals(Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"),
structure.getJodaTimestampFormats());
}
public void testCreateConfigsGivenDotInFieldName() throws Exception {
String sample = "time.iso8601,message\n" +
"2018-05-17T13:41:23,hello\n" +
"2018-05-17T13:41:32,hello again\n";
assertTrue(csvFactory.canCreateFromSample(explanation, sample, 0.0));
String charset = randomFrom(POSSIBLE_CHARSETS);
Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset);
FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker,
FileStructureFinderManager.DEFAULT_LINE_MERGE_SIZE_LIMIT, FileStructureOverrides.EMPTY_OVERRIDES, NOOP_TIMEOUT_CHECKER);
FileStructure structure = structureFinder.getStructure();
assertEquals(FileStructure.Format.DELIMITED, structure.getFormat());
assertEquals(charset, structure.getCharset());
if (hasByteOrderMarker == null) {
assertNull(structure.getHasByteOrderMarker());
} else {
assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker());
}
// The exclude pattern needs to work on the raw text, so reflects the unmodified field names
assertEquals("^\"?time\\.iso8601\"?,\"?message\"?", structure.getExcludeLinesPattern());
assertNull(structure.getMultilineStartPattern());
assertEquals(Character.valueOf(','), structure.getDelimiter());
assertEquals(Character.valueOf('"'), structure.getQuote());
assertTrue(structure.getHasHeaderRow());
assertNull(structure.getShouldTrimFields());
assertEquals(Arrays.asList("time_iso8601", "message"), structure.getColumnNames());
assertNull(structure.getGrokPattern());
assertEquals("time_iso8601", structure.getTimestampField());
assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats());
}
public void testFindHeaderFromSampleGivenHeaderInSample() throws IOException {
String withHeader = "time,airline,responsetime,sourcetype\n" +
"2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" +
"2014-06-23 00:00:00Z,JZA,990.4628,farequote\n" +
"2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" +
"2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n";
Tuple<Boolean, String[]> header = DelimitedFileStructureFinder.findHeaderFromSample(explanation,
DelimitedFileStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE, NOOP_TIMEOUT_CHECKER).v1(),
FileStructureOverrides.EMPTY_OVERRIDES);
assertTrue(header.v1());
assertThat(header.v2(), arrayContaining("time", "airline", "responsetime", "sourcetype"));
}
public void testFindHeaderFromSampleGivenHeaderNotInSample() throws IOException {
String noHeader = "2014-06-23 00:00:00Z,AAL,132.2046,farequote\n" +
"2014-06-23 00:00:00Z,JZA,990.4628,farequote\n" +
"2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" +
"2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n";
Tuple<Boolean, String[]> header = DelimitedFileStructureFinder.findHeaderFromSample(explanation,
DelimitedFileStructureFinder.readRows(noHeader, CsvPreference.EXCEL_PREFERENCE, NOOP_TIMEOUT_CHECKER).v1(),
FileStructureOverrides.EMPTY_OVERRIDES);
assertFalse(header.v1());
assertThat(header.v2(), arrayContaining("", "", "", ""));
}
public void testLevenshteinDistance() {
assertEquals(0, levenshteinDistance("cat", "cat"));
assertEquals(3, levenshteinDistance("cat", "dog"));
assertEquals(5, levenshteinDistance("cat", "mouse"));
assertEquals(3, levenshteinDistance("cat", ""));
assertEquals(3, levenshteinDistance("dog", "cat"));
assertEquals(0, levenshteinDistance("dog", "dog"));
assertEquals(4, levenshteinDistance("dog", "mouse"));
assertEquals(3, levenshteinDistance("dog", ""));
assertEquals(5, levenshteinDistance("mouse", "cat"));
assertEquals(4, levenshteinDistance("mouse", "dog"));
assertEquals(0, levenshteinDistance("mouse", "mouse"));
assertEquals(5, levenshteinDistance("mouse", ""));
assertEquals(3, levenshteinDistance("", "cat"));
assertEquals(3, levenshteinDistance("", "dog"));
assertEquals(5, levenshteinDistance("", "mouse"));
assertEquals(0, levenshteinDistance("", ""));
}
public void testMakeShortFieldMask() {
List<List<String>> rows = new ArrayList<>();
rows.add(Arrays.asList(randomAlphaOfLength(5), randomAlphaOfLength(20), randomAlphaOfLength(5)));
rows.add(Arrays.asList(randomAlphaOfLength(50), randomAlphaOfLength(5), randomAlphaOfLength(5)));
rows.add(Arrays.asList(randomAlphaOfLength(5), randomAlphaOfLength(5), randomAlphaOfLength(5)));
rows.add(Arrays.asList(randomAlphaOfLength(5), randomAlphaOfLength(5), randomAlphaOfLength(80)));
BitSet shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 110);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet("111")));
shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 80);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet("11 ")));
shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 50);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet(" 1 ")));
shortFieldMask = DelimitedFileStructureFinder.makeShortFieldMask(rows, 20);
assertThat(shortFieldMask, equalTo(TimestampFormatFinder.stringToNumberPosBitSet(" ")));
}
public void testLevenshteinCompareRows() {
assertEquals(0, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "dog")));
assertEquals(3, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "cat")));
assertEquals(6, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("dog", "cat")));
assertEquals(8, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("mouse", "cat")));
assertEquals(10, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "dog", "cat")));
assertEquals(9, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "mouse", "mouse")));
assertEquals(12, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "cat", "dog")));
}
public void testLevenshteinCompareRowsWithMask() {
assertEquals(0, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "dog"),
TimestampFormatFinder.stringToNumberPosBitSet(randomFrom(" ", "1 ", " 1", "11"))));
assertEquals(0, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("cat", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(randomFrom(" ", "1 "))));
assertEquals(3, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("dog", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(randomFrom(" 1", "1 "))));
assertEquals(3, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog"), Arrays.asList("mouse", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(" 1")));
assertEquals(5, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "dog", "cat"),
TimestampFormatFinder.stringToNumberPosBitSet(" 11")));
assertEquals(4, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "mouse", "mouse"),
TimestampFormatFinder.stringToNumberPosBitSet(" 11")));
assertEquals(7, levenshteinFieldwiseCompareRows(Arrays.asList("cat", "dog", "mouse"), Arrays.asList("mouse", "cat", "dog"),
TimestampFormatFinder.stringToNumberPosBitSet(" 11")));
}
public void testLineHasUnescapedQuote() {
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,b\",c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,b,c\"", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,\"b\",c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,\"c\"", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,\"b\"\"\",c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,\"c\"\"\"", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"\"\"a\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\"\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,\"\"b\",c", CsvPreference.EXCEL_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("between\"words,b,c", CsvPreference.EXCEL_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("x and \"y\",b,c", CsvPreference.EXCEL_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\tb\"\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\tb\tc\"", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\t\"b\"\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\t\"b\"\"\"\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"\"\"", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"\"\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\"\"\tb\tc", CsvPreference.TAB_PREFERENCE));
assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\t\"\"b\"\tc", CsvPreference.TAB_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("between\"words\tb\tc", CsvPreference.TAB_PREFERENCE));
assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("x and \"y\"\tb\tc", CsvPreference.TAB_PREFERENCE));
}
public void testRowContainsDuplicateNonEmptyValues() {
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Collections.singletonList("a")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Collections.singletonList("")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "b", "c")));
assertEquals("a", DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "b", "a")));
assertEquals("b", DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "b", "b")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("a", "", "")));
assertNull(DelimitedFileStructureFinder.findDuplicateNonEmptyValues(Arrays.asList("", "a", "")));
}
public void testMakeCsvProcessorSettings() {
String field = randomAlphaOfLength(10);
List<String> targetFields = Arrays.asList(generateRandomStringArray(10, field.length() - 1, false, false));
char separator = randomFrom(',', ';', '\t', '|');
char quote = randomFrom('"', '\'');
boolean trim = randomBoolean();
Map<String, Object> settings = DelimitedFileStructureFinder.makeCsvProcessorSettings(field, targetFields, separator, quote, trim);
assertThat(settings.get("field"), equalTo(field));
assertThat(settings.get("target_fields"), equalTo(targetFields));
assertThat(settings.get("ignore_missing"), equalTo(false));
if (separator == ',') {
assertThat(settings, not(hasKey("separator")));
} else {
assertThat(settings.get("separator"), equalTo(String.valueOf(separator)));
}
if (quote == '"') {
assertThat(settings, not(hasKey("quote")));
} else {
assertThat(settings.get("quote"), equalTo(String.valueOf(quote)));
}
if (trim) {
assertThat(settings.get("trim"), equalTo(true));
} else {
assertThat(settings, not(hasKey("trim")));
}
}
public void testMultilineStartPatternGivenNoMultiline() {
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
String timeFieldName;
TimestampFormatFinder timeFieldFormat;
if (randomBoolean()) {
timeFieldName = columnNames.get(randomIntBetween(0, columnNames.size() - 1));
timeFieldFormat = new TimestampFormatFinder(explanation, true, true, true, NOOP_TIMEOUT_CHECKER);
timeFieldFormat.addSample("2020-01-30T15:05:09");
} else {
timeFieldName = null;
timeFieldFormat = null;
}
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
if (columnName.equals(timeFieldName)) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date"));
} else {
mappings.put(columnName,
Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING,
randomFrom("boolean", "long", "double", "text", "keyword")));
}
}
assertNull(DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 1, ",", "\"", mappings, timeFieldName,
timeFieldFormat));
assertThat(explanation, contains("Not creating a multi-line start pattern as no sampled message spanned multiple lines"));
}
public void testMultilineStartPatternFromTimeField() {
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
int timeFieldColumnIndex = randomIntBetween(0, columnNames.size() - 2);
String timeFieldName = columnNames.get(timeFieldColumnIndex);
TimestampFormatFinder timeFieldFormat = new TimestampFormatFinder(explanation, true, true, true, NOOP_TIMEOUT_CHECKER);
timeFieldFormat.addSample("2020-01-30T15:05:09");
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
if (columnName.equals(timeFieldName)) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date"));
} else {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, randomFrom("text", "keyword")));
}
}
String expected = "^" + Stream.generate(() -> ".*?,").limit(timeFieldColumnIndex).collect(Collectors.joining()) +
"\"?\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}";
assertEquals(expected, DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 2, ",", "\"", mappings,
timeFieldName, timeFieldFormat));
assertThat(explanation, contains("Created a multi-line start pattern based on timestamp column [" + timeFieldName + "]"));
}
public void testMultilineStartPatternFromMappings() {
int randomIndex = randomIntBetween(0, 2);
String type = new String[]{ "boolean", "long", "double" }[randomIndex];
String expectedTypePattern =
new String[]{ "(?:true|false)", "[+-]?\\d+", "[+-]?(?:\\d+(?:\\.\\d+)?|\\.\\d+)(?:[eE][+-]?\\d+)?" }[randomIndex];
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
int chosenFieldColumnIndex = randomIntBetween(0, columnNames.size() - 2);
String chosenField = columnNames.get(chosenFieldColumnIndex);
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
if (columnName.equals(chosenField)) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, type));
} else {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, randomFrom("text", "keyword")));
}
}
String expected = "^" + Stream.generate(() -> ".*?,").limit(chosenFieldColumnIndex).collect(Collectors.joining()) +
"(?:" + expectedTypePattern + "|\"" + expectedTypePattern + "\"),";
assertEquals(expected, DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 2, ",", "\"", mappings,
null, null));
assertThat(explanation, contains("Created a multi-line start pattern based on [" + type + "] column [" + chosenField + "]"));
}
public void testMultilineStartPatternDeterminationTooHard() {
List<String> columnNames = Stream.generate(() -> randomAlphaOfLengthBetween(5, 10)).limit(10).collect(Collectors.toList());
Map<String, Object> mappings = new TreeMap<>();
for (String columnName : columnNames) {
mappings.put(columnName, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, randomFrom("text", "keyword")));
}
assertNull(DelimitedFileStructureFinder.makeMultilineStartPattern(explanation, columnNames, 2, ",", "\"", mappings, null, null));
assertThat(explanation, contains("Failed to create a suitable multi-line start pattern"));
}
static Map<String, Object> randomCsvProcessorSettings() {
String field = randomAlphaOfLength(10);
return DelimitedFileStructureFinder.makeCsvProcessorSettings(field,
Arrays.asList(generateRandomStringArray(10, field.length() - 1, false, false)), randomFrom(',', ';', '\t', '|'),
randomFrom('"', '\''), randomBoolean());
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2019 Groupon, Inc
* Copyright 2014-2019 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.entitlement.api;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import javax.annotation.Nullable;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.callcontext.InternalCallContext;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.CatalogApiException;
import org.killbill.billing.catalog.api.Plan;
import org.killbill.billing.catalog.api.PlanPhase;
import org.killbill.billing.catalog.api.PriceList;
import org.killbill.billing.catalog.api.Product;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.entitlement.DefaultEntitlementService;
import org.killbill.billing.entitlement.EventsStream;
import org.killbill.billing.entitlement.api.EntitlementPluginExecution.WithEntitlementPlugin;
import org.killbill.billing.entitlement.block.BlockingChecker;
import org.killbill.billing.entitlement.dao.BlockingStateDao;
import org.killbill.billing.entitlement.engine.core.EntitlementNotificationKey;
import org.killbill.billing.entitlement.engine.core.EntitlementNotificationKeyAction;
import org.killbill.billing.entitlement.engine.core.EntitlementUtils;
import org.killbill.billing.entitlement.engine.core.EventsStreamBuilder;
import org.killbill.billing.entitlement.plugin.api.EntitlementContext;
import org.killbill.billing.entitlement.plugin.api.OperationType;
import org.killbill.billing.entity.EntityBase;
import org.killbill.billing.junction.DefaultBlockingState;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.billing.platform.api.KillbillService.KILLBILL_SERVICES;
import org.killbill.billing.security.Logical;
import org.killbill.billing.security.Permission;
import org.killbill.billing.security.SecurityApiException;
import org.killbill.billing.security.api.SecurityApi;
import org.killbill.billing.subscription.api.SubscriptionBase;
import org.killbill.billing.subscription.api.SubscriptionBaseInternalApi;
import org.killbill.billing.subscription.api.user.SubscriptionBaseApiException;
import org.killbill.billing.subscription.api.user.SubscriptionBaseBundle;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.InternalCallContextFactory;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.clock.Clock;
import org.killbill.notificationq.api.NotificationEvent;
import org.killbill.notificationq.api.NotificationQueue;
import org.killbill.notificationq.api.NotificationQueueService;
import org.killbill.notificationq.api.NotificationQueueService.NoSuchNotificationQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
import static org.killbill.billing.entitlement.logging.EntitlementLoggingHelper.logCancelEntitlement;
import static org.killbill.billing.entitlement.logging.EntitlementLoggingHelper.logChangePlan;
import static org.killbill.billing.entitlement.logging.EntitlementLoggingHelper.logUncancelEntitlement;
import static org.killbill.billing.entitlement.logging.EntitlementLoggingHelper.logUndoChangePlan;
import static org.killbill.billing.entitlement.logging.EntitlementLoggingHelper.logUpdateBCD;
public class DefaultEntitlement extends EntityBase implements Entitlement {
private Logger log = LoggerFactory.getLogger(DefaultEntitlement.class);
private final SecurityApi securityApi;
protected final EventsStreamBuilder eventsStreamBuilder;
protected final EntitlementDateHelper dateHelper;
protected final InternalTenantContext internalTenantContext;
protected final InternalCallContextFactory internalCallContextFactory;
protected final Clock clock;
protected final BlockingChecker checker;
protected final EntitlementApi entitlementApi;
protected final EntitlementPluginExecution pluginExecution;
protected final SubscriptionBaseInternalApi subscriptionInternalApi;
protected final BlockingStateDao blockingStateDao;
protected final NotificationQueueService notificationQueueService;
protected final EntitlementUtils entitlementUtils;
// Refresh-able
protected EventsStream eventsStream;
public DefaultEntitlement(final SubscriptionBaseBundle bundle, final SubscriptionBase subscription, final Collection<SubscriptionBase> allSubscriptionsForBundle, final EventsStreamBuilder eventsStreamBuilder,
final EntitlementApi entitlementApi, final EntitlementPluginExecution pluginExecution, final BlockingStateDao blockingStateDao,
final SubscriptionBaseInternalApi subscriptionInternalApi, final BlockingChecker checker,
final NotificationQueueService notificationQueueService, final EntitlementUtils entitlementUtils,
final EntitlementDateHelper dateHelper, final Clock clock, final SecurityApi securityApi,
final InternalCallContextFactory internalCallContextFactory, final InternalTenantContext internalTenantContext) throws EntitlementApiException {
this(eventsStreamBuilder.buildForEntitlement(bundle, subscription, allSubscriptionsForBundle, internalTenantContext), eventsStreamBuilder,
entitlementApi, pluginExecution, blockingStateDao, subscriptionInternalApi, checker, notificationQueueService,
entitlementUtils, dateHelper, clock, securityApi, internalTenantContext, internalCallContextFactory);
}
public DefaultEntitlement(final EventsStream eventsStream, final EventsStreamBuilder eventsStreamBuilder,
final EntitlementApi entitlementApi, final EntitlementPluginExecution pluginExecution, final BlockingStateDao blockingStateDao,
final SubscriptionBaseInternalApi subscriptionInternalApi, final BlockingChecker checker,
final NotificationQueueService notificationQueueService, final EntitlementUtils entitlementUtils,
final EntitlementDateHelper dateHelper, final Clock clock, final SecurityApi securityApi, final InternalTenantContext internalTenantContext, final InternalCallContextFactory internalCallContextFactory) {
super(eventsStream.getEntitlementId(), eventsStream.getSubscriptionBase().getCreatedDate(), eventsStream.getSubscriptionBase().getUpdatedDate());
this.eventsStreamBuilder = eventsStreamBuilder;
this.eventsStream = eventsStream;
this.dateHelper = dateHelper;
this.entitlementApi = entitlementApi;
this.pluginExecution = pluginExecution;
this.subscriptionInternalApi = subscriptionInternalApi;
this.internalTenantContext = internalTenantContext;
this.internalCallContextFactory = internalCallContextFactory;
this.clock = clock;
this.securityApi = securityApi;
this.checker = checker;
this.blockingStateDao = blockingStateDao;
this.notificationQueueService = notificationQueueService;
this.entitlementUtils = entitlementUtils;
}
public DefaultEntitlement(final DefaultEntitlement in) {
this(in.getEventsStream(),
in.getEventsStreamBuilder(),
in.getEntitlementApi(),
in.getPluginExecution(),
in.getBlockingStateDao(),
in.getSubscriptionInternalApi(),
in.getChecker(),
in.getNotificationQueueService(),
in.getEntitlementUtils(),
in.getDateHelper(),
in.getClock(),
in.getSecurityApi(),
in.getInternalTenantContext(),
in.getInternalCallContextFactory());
}
public EventsStream getEventsStream() {
return eventsStream;
}
// Subscription associated with this entitlement (equals to baseSubscription for base subscriptions)
public SubscriptionBase getSubscriptionBase() {
return eventsStream.getSubscriptionBase();
}
// Base subscription for the bundle if it exists, null otherwise
public SubscriptionBase getBasePlanSubscriptionBase() {
return eventsStream.getBasePlanSubscriptionBase();
}
public EventsStreamBuilder getEventsStreamBuilder() {
return eventsStreamBuilder;
}
public EntitlementDateHelper getDateHelper() {
return dateHelper;
}
public InternalTenantContext getInternalTenantContext() {
return internalTenantContext;
}
public InternalCallContextFactory getInternalCallContextFactory() {
return internalCallContextFactory;
}
public EntitlementApi getEntitlementApi() {
return entitlementApi;
}
public EntitlementPluginExecution getPluginExecution() {
return pluginExecution;
}
public SubscriptionBaseInternalApi getSubscriptionInternalApi() {
return subscriptionInternalApi;
}
public Clock getClock() {
return clock;
}
public BlockingChecker getChecker() {
return checker;
}
public BlockingStateDao getBlockingStateDao() {
return blockingStateDao;
}
public NotificationQueueService getNotificationQueueService() {
return notificationQueueService;
}
public EntitlementUtils getEntitlementUtils() {
return entitlementUtils;
}
public SecurityApi getSecurityApi() {
return securityApi;
}
@Override
public UUID getBaseEntitlementId() {
return eventsStream.getEntitlementId();
}
@Override
public String getExternalKey() {
return eventsStream.getExternalKey();
}
@Override
public UUID getBundleId() {
return eventsStream.getBundleId();
}
@Override
public UUID getAccountId() {
return eventsStream.getAccountId();
}
@Override
public String getBundleExternalKey() {
return eventsStream.getBundleExternalKey();
}
@Override
public EntitlementState getState() {
return eventsStream.getEntitlementState();
}
@Override
public EntitlementSourceType getSourceType() {
return getSubscriptionBase().getSourceType();
}
@Override
public LocalDate getEffectiveStartDate() {
return eventsStream.getEntitlementEffectiveStartDate();
}
@Override
public LocalDate getEffectiveEndDate() {
return eventsStream.getEntitlementEffectiveEndDate();
}
@Override
public Product getLastActiveProduct() {
return getSubscriptionBase().getLastActiveProduct();
}
@Override
public Plan getLastActivePlan() {
return getSubscriptionBase().getLastActivePlan();
}
@Override
public PlanPhase getLastActivePhase() {
return getSubscriptionBase().getLastActivePhase();
}
@Override
public PriceList getLastActivePriceList() {
return getSubscriptionBase().getLastActivePriceList();
}
@Override
public ProductCategory getLastActiveProductCategory() {
return getSubscriptionBase().getLastActiveCategory();
}
@Override
public Integer getBillCycleDayLocal() {
final Integer perSubscriptionBillCycleDayLocal = getSubscriptionBase().getBillCycleDayLocal();
return perSubscriptionBillCycleDayLocal != null ? perSubscriptionBillCycleDayLocal : eventsStream.getDefaultBillCycleDayLocal();
}
@Override
public Entitlement cancelEntitlementWithPolicy(final EntitlementActionPolicy entitlementPolicy, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logCancelEntitlement(log, this, null, null, entitlementPolicy, null);
// Get the latest state from disk - required to have the latest CTD
refresh(callContext);
final LocalDate cancellationDate = getLocalDateFromEntitlementPolicy(entitlementPolicy, callContext);
return cancelEntitlementWithDate(cancellationDate, false, properties, callContext);
}
@Override
public Entitlement cancelEntitlementWithDate(@Nullable final LocalDate entitlementEffectiveDate, final boolean overrideBillingEffectiveDate, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logCancelEntitlement(log, this, entitlementEffectiveDate, overrideBillingEffectiveDate, null, null);
checkForPermissions(Permission.ENTITLEMENT_CAN_CANCEL, callContext);
// Get the latest state from disk
refresh(callContext);
if (entitlementEffectiveDate != null && entitlementEffectiveDate.compareTo(getEffectiveStartDate()) < 0) {
throw new EntitlementApiException(ErrorCode.SUB_INVALID_REQUESTED_DATE, entitlementEffectiveDate, getEffectiveStartDate());
}
final LocalDate billingEffectiveDate = overrideBillingEffectiveDate ? entitlementEffectiveDate : null;
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
entitlementEffectiveDate,
billingEffectiveDate,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.CANCEL_SUBSCRIPTION,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
null,
properties,
callContext);
final WithEntitlementPlugin<Entitlement> cancelEntitlementWithPlugin = new WithEntitlementPlugin<Entitlement>() {
@Override
public Entitlement doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
if (eventsStream.isEntitlementCancelled()) {
throw new EntitlementApiException(ErrorCode.SUB_CANCEL_BAD_STATE, getId(), EntitlementState.CANCELLED);
}
final InternalCallContext contextWithValidAccountRecordId = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
final DateTime billingEffectiveCancelDate = dateHelper.fromLocalDateAndReferenceTimeWithMinimum(billingEffectiveDate, getEventsStream().getSubscriptionBase().getStartDate(), updatedPluginContext.getCreatedDate(), contextWithValidAccountRecordId);
try {
if (overrideBillingEffectiveDate) {
getSubscriptionBase().cancelWithDate(billingEffectiveCancelDate, callContext);
} else {
getSubscriptionBase().cancel(callContext);
}
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
final DateTime entitlementEffectiveCancelDate = dateHelper.fromLocalDateAndReferenceTimeWithMinimum(entitlementEffectiveDate, getEventsStream().getEntitlementEffectiveStartDateTime(), updatedPluginContext.getCreatedDate(), contextWithValidAccountRecordId);
final BlockingState newBlockingState = new DefaultBlockingState(getId(), BlockingStateType.SUBSCRIPTION, DefaultEntitlementApi.ENT_STATE_CANCELLED, KILLBILL_SERVICES.ENTITLEMENT_SERVICE.getServiceName(), true, true, false, entitlementEffectiveCancelDate);
final Collection<NotificationEvent> notificationEvents = new ArrayList<NotificationEvent>();
final Collection<BlockingState> addOnsBlockingStates = computeAddOnBlockingStates(entitlementEffectiveCancelDate, notificationEvents, callContext, contextWithValidAccountRecordId);
// Record the new state first, then insert the notifications to avoid race conditions
setBlockingStates(newBlockingState, addOnsBlockingStates, contextWithValidAccountRecordId);
for (final NotificationEvent notificationEvent : notificationEvents) {
recordFutureNotification(entitlementEffectiveCancelDate, notificationEvent, contextWithValidAccountRecordId);
}
return entitlementApi.getEntitlementForId(getId(), callContext);
}
};
return pluginExecution.executeWithPlugin(cancelEntitlementWithPlugin, pluginContext);
}
@Override
public void uncancelEntitlement(final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logUncancelEntitlement(log, this);
checkForPermissions(Permission.ENTITLEMENT_CAN_CANCEL, callContext);
// Get the latest state from disk
refresh(callContext);
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
null,
null,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.UNDO_PENDING_SUBSCRIPTION_OPERATION,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
null,
properties,
callContext);
final WithEntitlementPlugin<Void> uncancelEntitlementWithPlugin = new WithEntitlementPlugin<Void>() {
@Override
public Void doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
if (eventsStream.isSubscriptionCancelled()) {
throw new EntitlementApiException(ErrorCode.SUB_UNCANCEL_BAD_STATE, getId());
}
final InternalCallContext contextWithValidAccountRecordId = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
final Collection<BlockingState> pendingEntitlementCancellationEvents = eventsStream.getPendingEntitlementCancellationEvents();
if (eventsStream.isEntitlementCancelled()) {
final BlockingState cancellationEvent = eventsStream.getEntitlementCancellationEvent();
blockingStateDao.unactiveBlockingState(cancellationEvent.getId(), contextWithValidAccountRecordId);
} else if (pendingEntitlementCancellationEvents.size() > 0) {
// Reactivate entitlements
// See also https://github.com/killbill/killbill/issues/111
//
// Today we only support cancellation at SUBSCRIPTION level (Not ACCOUNT or BUNDLE), so we should really have only
// one future event in the list
//
for (final BlockingState futureCancellation : pendingEntitlementCancellationEvents) {
blockingStateDao.unactiveBlockingState(futureCancellation.getId(), contextWithValidAccountRecordId);
}
} else {
// Entitlement is NOT cancelled (or future cancelled), there is nothing to do
throw new EntitlementApiException(ErrorCode.ENT_UNCANCEL_BAD_STATE, getId());
}
// If billing was previously cancelled, reactivate
if (getSubscriptionBase().getFutureEndDate() != null) {
try {
getSubscriptionBase().uncancel(callContext);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
}
return null;
}
};
pluginExecution.executeWithPlugin(uncancelEntitlementWithPlugin, pluginContext);
}
@Override
public Entitlement cancelEntitlementWithPolicyOverrideBillingPolicy(final EntitlementActionPolicy entitlementPolicy, final BillingActionPolicy billingPolicy, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logCancelEntitlement(log, this, null, null, entitlementPolicy, billingPolicy);
// Get the latest state from disk - required to have the latest CTD
refresh(callContext);
final LocalDate cancellationDate = getLocalDateFromEntitlementPolicy(entitlementPolicy, callContext);
return cancelEntitlementWithDateOverrideBillingPolicy(cancellationDate, billingPolicy, properties, callContext);
}
// See also EntitlementInternalApi#cancel for the bulk API
@Override
public Entitlement cancelEntitlementWithDateOverrideBillingPolicy(@Nullable final LocalDate entitlementEffectiveDate, final BillingActionPolicy billingPolicy, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logCancelEntitlement(log, this, entitlementEffectiveDate, null, null, billingPolicy);
checkForPermissions(Permission.ENTITLEMENT_CAN_CANCEL, callContext);
// Get the latest state from disk
refresh(callContext);
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
entitlementEffectiveDate,
entitlementEffectiveDate,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.CANCEL_SUBSCRIPTION,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
billingPolicy,
properties,
callContext);
final WithEntitlementPlugin<Entitlement> cancelEntitlementWithPlugin = new WithEntitlementPlugin<Entitlement>() {
@Override
public Entitlement doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
if (eventsStream.isEntitlementCancelled()) {
throw new EntitlementApiException(ErrorCode.SUB_CANCEL_BAD_STATE, getId(), EntitlementState.CANCELLED);
}
final InternalCallContext contextWithValidAccountRecordId = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
try {
// Cancel subscription base first, to correctly compute the add-ons entitlements we need to cancel (see below)
getSubscriptionBase().cancelWithPolicy(billingPolicy, callContext);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
final DateTime effectiveCancelDate = dateHelper.fromLocalDateAndReferenceTimeWithMinimum(entitlementEffectiveDate, getEventsStream().getEntitlementEffectiveStartDateTime(), updatedPluginContext.getCreatedDate(), contextWithValidAccountRecordId);
final BlockingState newBlockingState = new DefaultBlockingState(getId(), BlockingStateType.SUBSCRIPTION, DefaultEntitlementApi.ENT_STATE_CANCELLED, KILLBILL_SERVICES.ENTITLEMENT_SERVICE.getServiceName(), true, true, false, effectiveCancelDate);
final Collection<NotificationEvent> notificationEvents = new ArrayList<NotificationEvent>();
final Collection<BlockingState> addOnsBlockingStates = computeAddOnBlockingStates(effectiveCancelDate, notificationEvents, callContext, contextWithValidAccountRecordId);
// Record the new state first, then insert the notifications to avoid race conditions
setBlockingStates(newBlockingState, addOnsBlockingStates, contextWithValidAccountRecordId);
for (final NotificationEvent notificationEvent : notificationEvents) {
recordFutureNotification(effectiveCancelDate, notificationEvent, contextWithValidAccountRecordId);
}
return entitlementApi.getEntitlementForId(getId(), callContext);
}
};
return pluginExecution.executeWithPlugin(cancelEntitlementWithPlugin, pluginContext);
}
private LocalDate getLocalDateFromEntitlementPolicy(final EntitlementActionPolicy entitlementPolicy, final CallContext callContext) {
final InternalTenantContext internalTenantContext = internalCallContextFactory.createInternalTenantContext(getAccountId(), callContext);
final LocalDate cancellationDate;
switch (entitlementPolicy) {
case IMMEDIATE:
cancellationDate = internalTenantContext.toLocalDate(callContext.getCreatedDate());
break;
case END_OF_TERM:
if (getSubscriptionBase().getChargedThroughDate() != null) {
cancellationDate = internalTenantContext.toLocalDate(getSubscriptionBase().getChargedThroughDate());
} else {
cancellationDate = internalTenantContext.toLocalDate(callContext.getCreatedDate());
}
break;
default:
throw new RuntimeException("Unsupported policy " + entitlementPolicy);
}
return (cancellationDate.compareTo(getEffectiveStartDate()) < 0) ? getEffectiveStartDate() : cancellationDate;
}
@Override
public Entitlement changePlan(final EntitlementSpecifier spec, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logChangePlan(log, this, spec, null, null);
checkForPermissions(Permission.ENTITLEMENT_CAN_CHANGE_PLAN, callContext);
// Get the latest state from disk
refresh(callContext);
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
null,
null,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.CHANGE_PLAN,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
null,
properties,
callContext);
final WithEntitlementPlugin<Entitlement> changePlanWithPlugin = new WithEntitlementPlugin<Entitlement>() {
@Override
public Entitlement doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
if (!eventsStream.isEntitlementActive()) {
throw new EntitlementApiException(ErrorCode.SUB_CHANGE_NON_ACTIVE, getId(), getState());
}
final InternalCallContext context = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
final DateTime effectiveChangeDate;
try {
effectiveChangeDate = subscriptionInternalApi.getDryRunChangePlanEffectiveDate(getSubscriptionBase(), spec, null, null, context);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
} catch (final CatalogApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
}
try {
checker.checkBlockedChange(getSubscriptionBase(), effectiveChangeDate, context);
} catch (final BlockingApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
}
try {
getSubscriptionBase().changePlan(spec, callContext);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
final Collection<NotificationEvent> notificationEvents = new ArrayList<NotificationEvent>();
final Iterable<BlockingState> addOnsBlockingStates = computeAddOnBlockingStates(effectiveChangeDate, notificationEvents, callContext, context);
// Record the new state first, then insert the notifications to avoid race conditions
setBlockingStates(addOnsBlockingStates, context);
for (final NotificationEvent notificationEvent : notificationEvents) {
recordFutureNotification(effectiveChangeDate, notificationEvent, context);
}
return entitlementApi.getEntitlementForId(getId(), callContext);
}
};
return pluginExecution.executeWithPlugin(changePlanWithPlugin, pluginContext);
}
@Override
public void undoChangePlan(final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logUndoChangePlan(log, this);
checkForPermissions(Permission.ENTITLEMENT_CAN_CHANGE_PLAN, callContext);
// Get the latest state from disk
refresh(callContext);
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
null,
null,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.UNDO_PENDING_SUBSCRIPTION_OPERATION,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
null,
properties,
callContext);
final WithEntitlementPlugin<Void> undoChangePlanEntitlementWithPlugin = new WithEntitlementPlugin<Void>() {
@Override
public Void doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
try {
getSubscriptionBase().undoChangePlan(callContext);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
return null;
}
};
pluginExecution.executeWithPlugin(undoChangePlanEntitlementWithPlugin, pluginContext);
}
@Override
public Entitlement changePlanWithDate(final EntitlementSpecifier spec, @Nullable final LocalDate effectiveDate, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logChangePlan(log, this, spec, effectiveDate, null);
checkForPermissions(Permission.ENTITLEMENT_CAN_CHANGE_PLAN, callContext);
// Get the latest state from disk
refresh(callContext);
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
effectiveDate,
effectiveDate,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.CHANGE_PLAN,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
null,
properties,
callContext);
final WithEntitlementPlugin<Entitlement> changePlanWithPlugin = new WithEntitlementPlugin<Entitlement>() {
@Override
public Entitlement doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
if (effectiveDate != null && effectiveDate.compareTo(eventsStream.getEntitlementEffectiveStartDate()) < 0) {
throw new EntitlementApiException(ErrorCode.SUB_CHANGE_NON_ACTIVE, getId(), getState());
}
final InternalCallContext context = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
final DateTime effectiveChangeDate = effectiveDate != null ? dateHelper.fromLocalDateAndReferenceTime(effectiveDate, context.getCreatedDate(), context) : null;
final DateTime resultingEffectiveDate;
try {
resultingEffectiveDate = subscriptionInternalApi.getDryRunChangePlanEffectiveDate(getSubscriptionBase(), spec, effectiveChangeDate, null, context);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
} catch (final CatalogApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
}
try {
checker.checkBlockedChange(getSubscriptionBase(), resultingEffectiveDate, context);
} catch (final BlockingApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
}
try {
getSubscriptionBase().changePlanWithDate(spec, resultingEffectiveDate, callContext);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
final Collection<NotificationEvent> notificationEvents = new ArrayList<NotificationEvent>();
final Iterable<BlockingState> addOnsBlockingStates = computeAddOnBlockingStates(resultingEffectiveDate, notificationEvents, callContext, context);
// Record the new state first, then insert the notifications to avoid race conditions
setBlockingStates(addOnsBlockingStates, context);
for (final NotificationEvent notificationEvent : notificationEvents) {
recordFutureNotification(resultingEffectiveDate, notificationEvent, context);
}
return entitlementApi.getEntitlementForId(getId(), callContext);
}
};
return pluginExecution.executeWithPlugin(changePlanWithPlugin, pluginContext);
}
@Override
public Entitlement changePlanOverrideBillingPolicy(final EntitlementSpecifier spec, final LocalDate unused, final BillingActionPolicy actionPolicy, final Iterable<PluginProperty> properties, final CallContext callContext) throws EntitlementApiException {
logChangePlan(log, this, spec, null, actionPolicy);
checkForPermissions(Permission.ENTITLEMENT_CAN_CHANGE_PLAN, callContext);
// Get the latest state from disk
refresh(callContext);
final BaseEntitlementWithAddOnsSpecifier baseEntitlementWithAddOnsSpecifier = new DefaultBaseEntitlementWithAddOnsSpecifier(
getBundleId(),
getBundleExternalKey(),
null,
null,
null,
false);
final List<BaseEntitlementWithAddOnsSpecifier> baseEntitlementWithAddOnsSpecifierList = new ArrayList<BaseEntitlementWithAddOnsSpecifier>();
baseEntitlementWithAddOnsSpecifierList.add(baseEntitlementWithAddOnsSpecifier);
final EntitlementContext pluginContext = new DefaultEntitlementContext(OperationType.CHANGE_PLAN,
getAccountId(),
null,
baseEntitlementWithAddOnsSpecifierList,
actionPolicy,
properties,
callContext);
final WithEntitlementPlugin<Entitlement> changePlanWithPlugin = new WithEntitlementPlugin<Entitlement>() {
@Override
public Entitlement doCall(final EntitlementApi entitlementApi, final DefaultEntitlementContext updatedPluginContext) throws EntitlementApiException {
final InternalCallContext context = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
final DateTime resultingEffectiveDate;
try {
resultingEffectiveDate = subscriptionInternalApi.getDryRunChangePlanEffectiveDate(getSubscriptionBase(), spec, null, actionPolicy, context);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
} catch (final CatalogApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
}
try {
checker.checkBlockedChange(getSubscriptionBase(), resultingEffectiveDate, context);
} catch (final BlockingApiException e) {
throw new EntitlementApiException(e, e.getCode(), e.getMessage());
}
try {
getSubscriptionBase().changePlanWithPolicy(spec, actionPolicy, callContext);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
final Collection<NotificationEvent> notificationEvents = new ArrayList<NotificationEvent>();
final Iterable<BlockingState> addOnsBlockingStates = computeAddOnBlockingStates(resultingEffectiveDate, notificationEvents, callContext, context);
// Record the new state first, then insert the notifications to avoid race conditions
setBlockingStates(addOnsBlockingStates, context);
for (final NotificationEvent notificationEvent : notificationEvents) {
recordFutureNotification(resultingEffectiveDate, notificationEvent, context);
}
return entitlementApi.getEntitlementForId(getId(), callContext);
}
};
return pluginExecution.executeWithPlugin(changePlanWithPlugin, pluginContext);
}
@Override
public void updateBCD(final int newBCD, @Nullable final LocalDate effectiveFromDate, final CallContext callContext) throws EntitlementApiException {
logUpdateBCD(log, this, newBCD, effectiveFromDate);
final InternalCallContext context = internalCallContextFactory.createInternalCallContext(getAccountId(), callContext);
try {
subscriptionInternalApi.updateBCD(getId(), newBCD, effectiveFromDate, context);
} catch (final SubscriptionBaseApiException e) {
throw new EntitlementApiException(e);
}
}
private void refresh(final TenantContext context) throws EntitlementApiException {
eventsStream = eventsStreamBuilder.refresh(eventsStream, context);
}
public Collection<BlockingState> computeAddOnBlockingStates(final DateTime effectiveDate, final Collection<NotificationEvent> notificationEvents, final TenantContext context, final InternalCallContext internalCallContext) throws EntitlementApiException {
// Optimization - bail early
if (!ProductCategory.BASE.equals(getSubscriptionBase().getCategory())) {
// Only base subscriptions have add-ons
return ImmutableList.<BlockingState>of();
}
// Get the latest state from disk (we just got cancelled or changed plan)
refresh(context);
// If cancellation/change occurs in the future, do nothing for now but add a notification entry.
// This is to distinguish whether a future cancellation was requested by the user, or was a side effect
// (e.g. base plan cancellation): future entitlement cancellations for add-ons on disk always reflect
// an explicit cancellation. This trick lets us determine what to do when un-cancelling.
// This mirror the behavior in subscription base (see DefaultSubscriptionBaseApiService).
if (effectiveDate.compareTo(internalCallContext.getCreatedDate()) > 0) {
// Note that usually we record the notification from the DAO. We cannot do it here because not all calls
// go through the DAO (e.g. change)
final boolean isBaseEntitlementCancelled = eventsStream.isEntitlementCancelled();
final NotificationEvent notificationEvent = new EntitlementNotificationKey(getId(), getBundleId(), isBaseEntitlementCancelled ? EntitlementNotificationKeyAction.CANCEL : EntitlementNotificationKeyAction.CHANGE, effectiveDate);
notificationEvents.add(notificationEvent);
return ImmutableList.<BlockingState>of();
}
return eventsStream.computeAddonsBlockingStatesForNextSubscriptionBaseEvent(effectiveDate);
}
private void recordFutureNotification(final DateTime effectiveDate,
final NotificationEvent notificationEvent,
final InternalCallContext context) {
try {
final NotificationQueue subscriptionEventQueue = notificationQueueService.getNotificationQueue(KILLBILL_SERVICES.ENTITLEMENT_SERVICE.getServiceName(),
DefaultEntitlementService.NOTIFICATION_QUEUE_NAME);
subscriptionEventQueue.recordFutureNotification(effectiveDate, notificationEvent, context.getUserToken(), context.getAccountRecordId(), context.getTenantRecordId());
} catch (final NoSuchNotificationQueue e) {
throw new RuntimeException(e);
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
private void setBlockingStates(final BlockingState entitlementBlockingState, final Collection<BlockingState> addOnsBlockingStates, final InternalCallContext internalCallContext) {
final Collection<BlockingState> states = new LinkedList<BlockingState>();
states.add(entitlementBlockingState);
states.addAll(addOnsBlockingStates);
setBlockingStates(states, internalCallContext);
}
private void setBlockingStates(final Iterable<BlockingState> blockingStates, final InternalCallContext internalCallContext) {
entitlementUtils.setBlockingStatesAndPostBlockingTransitionEvent(blockingStates, getBundleId(), internalCallContext);
}
//
// Unfortunately the permission checks for the entitlement api cannot *simply* rely on the KillBillShiroAopModule because some of the operations (CANCEL, CHANGE) are
// done through objects that are not injected by Guice, and so the check needs to happen explicitly.
//
private void checkForPermissions(final Permission permission, final TenantContext callContext) throws EntitlementApiException {
//
// If authentication had been done (CorsBasicHttpAuthenticationFilter) we verify the correct permissions exist.
//
if (securityApi.isSubjectAuthenticated()) {
try {
securityApi.checkCurrentUserPermissions(ImmutableList.of(permission), Logical.AND, callContext);
} catch (final SecurityApiException e) {
throw new EntitlementApiException(ErrorCode.SECURITY_NOT_ENOUGH_PERMISSIONS);
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.gpu;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ResourceMappings;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.gpu.GpuResourceAllocator;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.docker.DockerRunCommand;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.docker.DockerVolumeCommand;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.resourceplugin.DockerCommandPlugin;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException;
import java.io.IOException;
import java.io.Serializable;
import java.io.StringWriter;
import java.net.URL;
import java.net.URLConnection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.docker.DockerVolumeCommand.VOLUME_NAME_PATTERN;
/**
* Implementation to use nvidia-docker v1 as GPU docker command plugin.
*/
public class NvidiaDockerV1CommandPlugin implements DockerCommandPlugin {
final static Logger LOG = LoggerFactory.
getLogger(NvidiaDockerV1CommandPlugin.class);
private Configuration conf;
private Map<String, Set<String>> additionalCommands = null;
private String volumeDriver = "local";
// Known option
private String DEVICE_OPTION = "--device";
private String VOLUME_DRIVER_OPTION = "--volume-driver";
private String MOUNT_RO_OPTION = "--volume";
public NvidiaDockerV1CommandPlugin(Configuration conf) {
this.conf = conf;
}
// Get value from key=value
// Throw exception if '=' not found
private String getValue(String input) throws IllegalArgumentException {
int index = input.indexOf('=');
if (index < 0) {
throw new IllegalArgumentException(
"Failed to locate '=' from input=" + input);
}
return input.substring(index + 1);
}
private void addToCommand(String key, String value) {
if (additionalCommands == null) {
additionalCommands = new HashMap<>();
}
if (!additionalCommands.containsKey(key)) {
additionalCommands.put(key, new HashSet<>());
}
additionalCommands.get(key).add(value);
}
private void init() throws ContainerExecutionException {
String endpoint = conf.get(
YarnConfiguration.NVIDIA_DOCKER_PLUGIN_V1_ENDPOINT,
YarnConfiguration.DEFAULT_NVIDIA_DOCKER_PLUGIN_V1_ENDPOINT);
if (null == endpoint || endpoint.isEmpty()) {
LOG.info(YarnConfiguration.NVIDIA_DOCKER_PLUGIN_V1_ENDPOINT
+ " set to empty, skip init ..");
return;
}
String cliOptions;
try {
// Talk to plugin server and get options
URL url = new URL(endpoint);
URLConnection uc = url.openConnection();
uc.setRequestProperty("X-Requested-With", "Curl");
StringWriter writer = new StringWriter();
IOUtils.copy(uc.getInputStream(), writer, "utf-8");
cliOptions = writer.toString();
LOG.info("Additional docker CLI options from plugin to run GPU "
+ "containers:" + cliOptions);
// Parse cli options
// Examples like:
// --device=/dev/nvidiactl --device=/dev/nvidia-uvm --device=/dev/nvidia0
// --volume-driver=nvidia-docker
// --volume=nvidia_driver_352.68:/usr/local/nvidia:ro
for (String str : cliOptions.split(" ")) {
str = str.trim();
if (str.startsWith(DEVICE_OPTION)) {
addToCommand(DEVICE_OPTION, getValue(str));
} else if (str.startsWith(VOLUME_DRIVER_OPTION)) {
volumeDriver = getValue(str);
LOG.debug("Found volume-driver:{}", volumeDriver);
} else if (str.startsWith(MOUNT_RO_OPTION)) {
String mount = getValue(str);
if (!mount.endsWith(":ro")) {
throw new IllegalArgumentException(
"Should not have mount other than ro, command=" + str);
}
addToCommand(MOUNT_RO_OPTION,
mount.substring(0, mount.lastIndexOf(':')));
} else{
throw new IllegalArgumentException("Unsupported option:" + str);
}
}
} catch (RuntimeException e) {
LOG.warn(
"RuntimeException of " + this.getClass().getSimpleName() + " init:",
e);
throw new ContainerExecutionException(e);
} catch (IOException e) {
LOG.warn("IOException of " + this.getClass().getSimpleName() + " init:",
e);
throw new ContainerExecutionException(e);
}
}
private int getGpuIndexFromDeviceName(String device) {
final String NVIDIA = "nvidia";
int idx = device.lastIndexOf(NVIDIA);
if (idx < 0) {
return -1;
}
// Get last part
String str = device.substring(idx + NVIDIA.length());
for (int i = 0; i < str.length(); i++) {
if (!Character.isDigit(str.charAt(i))) {
return -1;
}
}
return Integer.parseInt(str);
}
private Set<GpuDevice> getAssignedGpus(Container container) {
ResourceMappings resourceMappings = container.getResourceMappings();
// Copy of assigned Resources
Set<GpuDevice> assignedResources = null;
if (resourceMappings != null) {
assignedResources = new HashSet<>();
for (Serializable s : resourceMappings.getAssignedResources(
ResourceInformation.GPU_URI)) {
assignedResources.add((GpuDevice) s);
}
}
if (assignedResources == null || assignedResources.isEmpty()) {
// When no GPU resource assigned, don't need to update docker command.
return Collections.emptySet();
}
return assignedResources;
}
@VisibleForTesting
protected boolean requestsGpu(Container container) {
return GpuResourceAllocator.getRequestedGpus(container.getResource()) > 0;
}
/**
* Do initialize when GPU requested
* @param container nmContainer
* @return if #GPU-requested > 0
* @throws ContainerExecutionException when any issue happens
*/
private boolean initializeWhenGpuRequested(Container container)
throws ContainerExecutionException {
if (!requestsGpu(container)) {
return false;
}
// Do lazy initialization of gpu-docker plugin
if (additionalCommands == null) {
init();
}
return true;
}
@Override
public synchronized void updateDockerRunCommand(
DockerRunCommand dockerRunCommand, Container container)
throws ContainerExecutionException {
if (!initializeWhenGpuRequested(container)) {
return;
}
Set<GpuDevice> assignedResources = getAssignedGpus(container);
if (assignedResources == null || assignedResources.isEmpty()) {
return;
}
// Write to dockerRunCommand
for (Map.Entry<String, Set<String>> option : additionalCommands
.entrySet()) {
String key = option.getKey();
Set<String> values = option.getValue();
if (key.equals(DEVICE_OPTION)) {
int foundGpuDevices = 0;
for (String deviceName : values) {
// When specified is a GPU card (device name like /dev/nvidia[n]
// Get index of the GPU (which is [n]).
Integer gpuIdx = getGpuIndexFromDeviceName(deviceName);
if (gpuIdx >= 0) {
// Use assignedResources to filter --device given by
// nvidia-docker-plugin.
for (GpuDevice gpuDevice : assignedResources) {
if (gpuDevice.getIndex() == gpuIdx) {
foundGpuDevices++;
dockerRunCommand.addDevice(deviceName, deviceName);
}
}
} else{
// When gpuIdx < 0, it is a controller device (such as
// /dev/nvidiactl). In this case, add device directly.
dockerRunCommand.addDevice(deviceName, deviceName);
}
}
// Cannot get all assigned Gpu devices from docker plugin output
if (foundGpuDevices < assignedResources.size()) {
throw new ContainerExecutionException(
"Cannot get all assigned Gpu devices from docker plugin output");
}
} else if (key.equals(MOUNT_RO_OPTION)) {
for (String value : values) {
int idx = value.indexOf(':');
String source = value.substring(0, idx);
String target = value.substring(idx + 1);
dockerRunCommand.addReadOnlyMountLocation(source, target, true);
}
} else{
throw new ContainerExecutionException("Unsupported option:" + key);
}
}
}
@Override
public DockerVolumeCommand getCreateDockerVolumeCommand(Container container)
throws ContainerExecutionException {
if (!initializeWhenGpuRequested(container)) {
return null;
}
String newVolumeName = null;
// Get volume name
Set<String> mounts = additionalCommands.get(MOUNT_RO_OPTION);
for (String mount : mounts) {
int idx = mount.indexOf(':');
if (idx >= 0) {
String mountSource = mount.substring(0, idx);
if (VOLUME_NAME_PATTERN.matcher(mountSource).matches()) {
// This is a valid named volume
newVolumeName = mountSource;
LOG.debug("Found volume name for GPU:{}", newVolumeName);
break;
} else{
LOG.debug("Failed to match {} to named-volume regex pattern",
mountSource);
}
}
}
if (newVolumeName != null) {
DockerVolumeCommand command = new DockerVolumeCommand(
DockerVolumeCommand.VOLUME_CREATE_SUB_COMMAND);
command.setDriverName(volumeDriver);
command.setVolumeName(newVolumeName);
return command;
}
return null;
}
@Override
public DockerVolumeCommand getCleanupDockerVolumesCommand(Container container)
throws ContainerExecutionException {
// No cleanup needed.
return null;
}
}
| |
/*
* Copyright (c) 2001, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
package sun.jvm.hotspot.debugger.cdbg.basic;
import java.util.*;
import sun.jvm.hotspot.debugger.*;
import sun.jvm.hotspot.debugger.cdbg.*;
import sun.jvm.hotspot.utilities.AddressOps;
import sun.jvm.hotspot.utilities.Assert;
public class BasicCDebugInfoDataBase implements CDebugInfoDataBase {
private static final int INITIALIZED_STATE = 0;
private static final int CONSTRUCTION_STATE = 1;
private static final int RESOLVED_STATE = 2;
private static final int COMPLETE_STATE = 3;
private int state = INITIALIZED_STATE;
///////////
// Types //
///////////
// Used only during construction
private Map lazyTypeMap;
// Used during construction and at run time for iteration
private List types;
// Used only during runtime
private Map nameToTypeMap;
/////////////
// Symbols //
/////////////
// Used only during construction
private Map lazySymMap;
// List of blocks in increasing order by starting address. These can
// then be binary searched.
private List blocks;
// Name-to-global symbol table
private Map nameToSymMap;
//////////////////
// Line numbers //
//////////////////
private BasicLineNumberMapping lineNumbers;
/** Supports lazy instantiation and references between types and
symbols via insertion using arbitrary Object keys that are
wrapped by LazyTypes. Once the database has been fully
constructed and all types are present, one should call
resolveTypes(), which will resolve all LazyTypes down to
concrete types (and signal an error if some lazy types were
unresolved). */
public void beginConstruction() {
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == INITIALIZED_STATE, "wrong state");
}
state = CONSTRUCTION_STATE;
// Types
lazyTypeMap = new HashMap();
types = new ArrayList();
// Symbols
lazySymMap = new HashMap();
blocks = new ArrayList();
nameToSymMap = new HashMap();
// Line numbers
lineNumbers = new BasicLineNumberMapping();
}
/** Add a type which may later in construction be referred to via a
LazyType with this key. lazyKey may be null. */
public void addType(Object lazyKey, Type type) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == CONSTRUCTION_STATE, "wrong state");
}
if (lazyKey != null) {
if (lazyTypeMap.put(lazyKey, type) != null) {
throw new RuntimeException("Type redefined for lazy key " + lazyKey);
}
} else {
types.add(type);
}
}
public void resolve(ResolveListener listener) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == CONSTRUCTION_STATE, "wrong state");
}
// Go through all types in lazyTypeMap and types.
// Resolve all LazyTypes.
resolveLazyMap(listener);
for (ListIterator iter = types.listIterator(); iter.hasNext(); ) {
BasicType t = (BasicType) iter.next();
BasicType t2 = (BasicType) t.resolveTypes(this, listener);
if (t != t2) {
iter.set(t2);
}
}
// Go through all symbols and resolve references to types and
// references to other symbols
for (Iterator iter = blocks.iterator(); iter.hasNext(); ) {
((BasicSym) iter.next()).resolve(this, listener);
}
for (Iterator iter = nameToSymMap.values().iterator(); iter.hasNext(); ) {
((BasicSym) iter.next()).resolve(this, listener);
}
// Sort blocks in ascending order of starting address (but do not
// change ordering among blocks with the same starting address)
Collections.sort(blocks, new Comparator() {
public int compare(Object o1, Object o2) {
BlockSym b1 = (BlockSym) o1;
BlockSym b2 = (BlockSym) o2;
Address a1 = b1.getAddress();
Address a2 = b2.getAddress();
if (AddressOps.lt(a1, a2)) { return -1; }
if (AddressOps.gt(a1, a2)) { return 1; }
return 0;
}
});
state = RESOLVED_STATE;
}
public void endConstruction() {
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == RESOLVED_STATE, "wrong state");
}
// Move all types to type list
for (Iterator iter = lazyTypeMap.values().iterator(); iter.hasNext(); ) {
types.add(iter.next());
}
// Build name-to-type map
nameToTypeMap = new HashMap();
for (Iterator iter = types.iterator(); iter.hasNext(); ) {
Type t = (Type) iter.next();
if (!t.isConst() && !t.isVolatile()) {
nameToTypeMap.put(t.getName(), t);
}
}
// Lose lazy maps
lazyTypeMap = null;
lazySymMap = null;
// Sort and finish line number information
lineNumbers.sort();
// FIXME: on some platforms it might not be necessary to call
// recomputeEndPCs(). Will have to see what stabs information
// looks like. Should make configurable whether we make this call
// or not.
lineNumbers.recomputeEndPCs();
state = COMPLETE_STATE;
}
public Type lookupType(String name) {
return lookupType(name, 0);
}
public Type lookupType(String name, int cvAttributes) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == COMPLETE_STATE, "wrong state");
}
BasicType t = (BasicType) nameToTypeMap.get(name);
if (t != null) {
if (cvAttributes != 0) {
t = (BasicType) t.getCVVariant(cvAttributes);
}
}
return t;
}
public void iterate(TypeVisitor v) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == COMPLETE_STATE, "wrong state");
}
for (Iterator iter = types.iterator(); iter.hasNext(); ) {
BasicType t = (BasicType) iter.next();
t.visit(v);
}
}
/** Add a BlockSym to the debug information database. The given
BlockSym may be referred to by a LazyBlockSym wrapping the given
Object key, which must be non-null. Any references to other
blocks (for example, the parent scope) should be made with
LazyBlockSyms. These references will be resolved after the
database is built. */
public void addBlock(Object key, BlockSym block) {
if (Assert.ASSERTS_ENABLED) {
Assert.that(key != null, "key must be non-null");
}
lazySymMap.put(key, block);
blocks.add(block);
}
public void addGlobalSym(GlobalSym sym) {
nameToSymMap.put(sym.getName(), sym);
}
public BlockSym debugInfoForPC(Address pc) {
return searchBlocks(pc, 0, blocks.size() - 1);
}
public GlobalSym lookupSym(String name) {
return (GlobalSym) nameToSymMap.get(name);
}
public void addLineNumberInfo(BasicLineNumberInfo info) {
lineNumbers.addLineNumberInfo(info);
}
public LineNumberInfo lineNumberForPC(Address pc) throws DebuggerException {
return lineNumbers.lineNumberForPC(pc);
}
public void iterate(LineNumberVisitor v) {
lineNumbers.iterate(v);
}
//----------------------------------------------------------------------
// Internals only below this point
//
/** Intended only to be used by the BasicType implementation. */
public Type resolveType(Type containingType, Type targetType, ResolveListener listener, String detail) {
BasicType basicTargetType = (BasicType) targetType;
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == CONSTRUCTION_STATE, "wrong state");
}
if (basicTargetType.isLazy()) {
BasicType resolved = (BasicType) lazyTypeMap.get(((LazyType) targetType).getKey());
// FIXME: would like to have an assert here that the target is
// non-null, but apparently have bugs here with forward
// references of pointer types
if (resolved == null) {
listener.resolveFailed(containingType, (LazyType) targetType, detail + " because target type was not found");
return targetType;
}
if (resolved.isLazy()) {
// Might happen for const/var variants for forward references
if (resolved.isConst() || resolved.isVolatile()) {
resolved = (BasicType) resolved.resolveTypes(this, listener);
}
if (resolved.isLazy()) {
listener.resolveFailed(containingType, (LazyType) targetType,
detail + " because target type (with key " +
((Integer) ((LazyType) resolved).getKey()).intValue() +
(resolved.isConst() ? ", const" : ", not const") +
(resolved.isVolatile() ? ", volatile" : ", not volatile") +
") was lazy");
}
}
return resolved;
}
return targetType;
}
/** Intended only to be usd by the BasicSym implementation. */
public Type resolveType(Sym containingSymbol, Type targetType, ResolveListener listener, String detail) {
BasicType basicTargetType = (BasicType) targetType;
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == CONSTRUCTION_STATE, "wrong state");
}
if (basicTargetType.isLazy()) {
BasicType resolved = (BasicType) lazyTypeMap.get(((LazyType) targetType).getKey());
// FIXME: would like to have an assert here that the target is
// non-null, but apparently have bugs here
if (resolved == null) {
listener.resolveFailed(containingSymbol, (LazyType) targetType, detail);
return targetType;
}
if (resolved.isLazy()) {
// Might happen for const/var variants for forward references
if (resolved.isConst() || resolved.isVolatile()) {
resolved = (BasicType) resolved.resolveTypes(this, listener);
}
if (resolved.isLazy()) {
listener.resolveFailed(containingSymbol, (LazyType) targetType, detail);
}
}
return resolved;
}
return targetType;
}
/** Intended only to be usd by the BasicSym implementation. */
public Sym resolveSym(Sym containingSymbol, Sym targetSym, ResolveListener listener, String detail) {
if (targetSym == null) return null;
BasicSym basicTargetSym = (BasicSym) targetSym;
if (Assert.ASSERTS_ENABLED) {
Assert.that(state == CONSTRUCTION_STATE, "wrong state");
}
if (basicTargetSym.isLazy()) {
BasicSym resolved = (BasicSym) lazySymMap.get(((LazyBlockSym) targetSym).getKey());
// FIXME: would like to have an assert here that the target is
// non-null, but apparently have bugs here
if (resolved == null) {
listener.resolveFailed(containingSymbol, (LazyBlockSym) targetSym, detail);
return targetSym;
}
if (resolved.isLazy()) {
listener.resolveFailed(containingSymbol, (LazyBlockSym) targetSym, detail);
}
return resolved;
}
return targetSym;
}
private void resolveLazyMap(ResolveListener listener) {
for (Iterator iter = lazyTypeMap.entrySet().iterator(); iter.hasNext(); ) {
Map.Entry entry = (Map.Entry) iter.next();
BasicType t = (BasicType) entry.getValue();
BasicType t2 = (BasicType) t.resolveTypes(this, listener);
if (t2 != t) {
entry.setValue(t2);
}
}
}
/** Find the block whose starting address is closest to but less
than the given address. */
private BlockSym searchBlocks(Address addr, int lowIdx, int highIdx) {
if (highIdx < lowIdx) return null;
if ((lowIdx == highIdx) || (lowIdx == highIdx - 1)) {
// Base case: start with highIdx and walk backward. See whether
// addr is greater than any of the blocks' starting addresses,
// and if so, return that block.
Address lastAddr = null;
BlockSym ret = null;
for (int i = highIdx; i >= 0; --i) {
BlockSym block = (BlockSym) blocks.get(i);
if (AddressOps.lte(block.getAddress(), addr)) {
if ((lastAddr == null) || (AddressOps.equal(block.getAddress(), lastAddr))) {
lastAddr = block.getAddress();
ret = block;
} else {
break;
}
}
}
return ret;
}
int midIdx = (lowIdx + highIdx) >> 1;
BlockSym block = (BlockSym) blocks.get(midIdx);
// See address relationship
if (AddressOps.lte(block.getAddress(), addr)) {
// Always move search up
return searchBlocks(addr, midIdx, highIdx);
} else {
// Always move search down
return searchBlocks(addr, lowIdx, midIdx);
}
}
}
| |
package org.basex.qt3ts.fn;
import org.basex.tests.bxapi.*;
import org.basex.tests.qt3ts.*;
/**
* Tests for the adjust-date-to-timezone() function.
*
* @author BaseX Team 2005-15, BSD License
* @author Leo Woerteler
*/
@SuppressWarnings("all")
public class FnAdjustDateToTimezone extends QT3TestSet {
/**
* A test whose essence is: `adjust-date-to-timezone()`. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc1() {
final XQuery query = new XQuery(
"adjust-date-to-timezone()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPST0017")
);
}
/**
* Example from F&O. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc10() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"), xs:dayTimeDuration(\"-PT5H0M\")) eq xs:date(\"2002-03-07-05:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Example from F&O. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc11() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2002-03-07\"), xs:dayTimeDuration(\"-PT10H\")) eq xs:date(\"2002-03-07-10:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Example from F&O. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc12() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"), xs:dayTimeDuration(\"-PT10H\")) eq xs:date(\"2002-03-06-10:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Example from F&O. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc13() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2002-03-07\"), ()) eq xs:date(\"2002-03-07\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Example from F&O. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc14() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"), ()) eq xs:date(\"2002-03-07\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* A test whose essence is: `adjust-date-to-timezone((), (), "WRONG PARAM")`. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc2() {
final XQuery query = new XQuery(
"adjust-date-to-timezone((), (), \"WRONG PARAM\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("XPST0017")
);
}
/**
* A test whose essence is: `empty(adjust-date-to-timezone(()))`. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc3() {
final XQuery query = new XQuery(
"empty(adjust-date-to-timezone(()))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* A test whose essence is: `empty(adjust-date-to-timezone((), ()))`. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc4() {
final XQuery query = new XQuery(
"empty(adjust-date-to-timezone((), ()))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* A test whose essence is: `adjust-date-to-timezone(()) instance of xs:date?`. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc5() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(()) instance of xs:date?",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Passing a too large xs:dayTimeDuration as timezone to adjust-date-to-timezone(). .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc6() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2001-02-03\"), xs:dayTimeDuration(\"PT14H1M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("FODT0003")
);
}
/**
* Passing a too small xs:dayTimeDuration as timezone to adjust-date-to-timezone(). .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc7() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2001-02-03\"), xs:dayTimeDuration(\"-PT14H1M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("FODT0003")
);
}
/**
* Passing a xs:dayTimeDuration as timezone to adjust-date-to-timezone() which isn't an integral number of minutes. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc8() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2001-02-03\"), xs:dayTimeDuration(\"PT14H0M0.001S\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("FODT0003")
);
}
/**
* Example from F&O. .
*/
@org.junit.Test
public void kAdjDateToTimezoneFunc9() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"2002-03-07\"), xs:dayTimeDuration(\"-PT5H0M\")) eq xs:date(\"2002-03-07-05:00\")",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Check optimization codepath in adjust-date-to-timezone .
*/
@org.junit.Test
public void cbclAdjustDateToTimezone001() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(current-date(), implicit-timezone()) eq current-date()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test that adjust-date-to-timezone underflows nicely. .
*/
@org.junit.Test
public void cbclAdjustDateToTimezone002() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"-25252734927766555-06-07+02:00\"), xs:dayTimeDuration(\"PT0S\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("FODT0001")
);
}
/**
* A query testing that adjust-date-to-timezone overflows nicely. .
*/
@org.junit.Test
public void cbclAdjustDateToTimezone003() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(xs:date(\"25252734927766555-07-28-12:00\"), xs:dayTimeDuration(\"PT12H\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
error("FODT0001")
);
}
/**
* Check optimization codepath in adjust-date-to-timezone .
*/
@org.junit.Test
public void cbclAdjustDateToTimezone004() {
final XQuery query = new XQuery(
"adjust-date-to-timezone(current-date(), xs:dayTimeDuration(\"PT12H\")) eq adjust-date-to-timezone(current-date(), xs:dayTimeDuration(\"-PT12H\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as per example 1 (for this function) of the F&O specs. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone1() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07-05:00\"),xs:dayTimeDuration(\"-PT5H0M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-07-05:00")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as part of an subtraction expression. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone10() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\")) - fn:adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "PT0S")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as an argument to a string function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone11() {
final XQuery query = new XQuery(
"fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),()))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-07")
);
}
/**
* Test Description: Evaluates the string value The "adjust-date-to-timezone" function as an argument to a boolean function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone12() {
final XQuery query = new XQuery(
"fn:boolean(fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),())))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test Description: Evaluates The string value of "adjust-date-to-timezone" function as an argument to the "fn:not" function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone13() {
final XQuery query = new XQuery(
"fn:not(fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),())))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* Test Description: Evaluates The string value of "adjust-date-to-timezone" function as part of a boolean (or) expression and the fn:true function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone14() {
final XQuery query = new XQuery(
"fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),())) or fn:true()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test Description: Evaluates The string value of "adjust-date-to-timezone" function as part of a boolean (or) expression and the fn:false function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone15() {
final XQuery query = new XQuery(
"fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),())) or fn:false()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test Description: Evaluates The string value of "adjust-date-to-timezone" function as part of a boolean (and) expression and the fn:true function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone16() {
final XQuery query = new XQuery(
"fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),())) and fn:true()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(true)
);
}
/**
* Test Description: Evaluates The string value of "adjust-date-to-timezone" function as part of a boolean (and) expression and the fn:false function. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone17() {
final XQuery query = new XQuery(
"fn:string(fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\"),())) and fn:false()",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as part of a subtraction expression, which results on a negative number. Uses one adjust-date-to-timezone function and one xs:date constructor. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone18() {
final XQuery query = new XQuery(
"let $tz := xs:dayTimeDuration(\"PT10H\") \n" +
" return fn:adjust-date-to-timezone(xs:date(\"2002-03-07Z\"),$tz) - xs:date(\"2006-03-07Z\")\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "-P1461DT10H")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function where an xs:date value is subtracted. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone19() {
final XQuery query = new XQuery(
"let $tz := xs:dayTimeDuration(\"PT10H\") \n" +
" return fn:adjust-date-to-timezone(xs:date(\"2004-03-07Z\"),$tz) - xs:date(\"2001-03-07Z\")\n" +
" ",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "P1095DT14H")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as per example 2 (for this function) of the F&O specs. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone2() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"),xs:dayTimeDuration(\"-PT5H0M\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-07-05:00")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as part of a comparisson expression (ge operator). .
*/
@org.junit.Test
public void fnAdjustDateToTimezone20() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07-04:00\")) ge fn:adjust-date-to-timezone(xs:date(\"2005-03-07-04:00\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertBoolean(false)
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as per example 3 (for this function) of the F&O specs. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone3() {
final XQuery query = new XQuery(
"let $tz := xs:dayTimeDuration(\"-PT10H\") \n" +
" return fn:adjust-date-to-timezone(xs:date(\"2002-03-07\"), $tz)",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-07-10:00")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as per example 4 (for this function) of the F&O specs. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone4() {
final XQuery query = new XQuery(
"let $tz := xs:dayTimeDuration(\"-PT10H\") \n" +
" return fn:adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"), $tz)",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-06-10:00")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as per example 5 (for this function) of the F&O specs. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone5() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07\"), ())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-07")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as per example 6 (for this function) of the F&O specs. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone6() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\"), ())",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2002-03-07")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function using the empty sequence as a value to the first argument. Uses "fn:count" to avoid empty file. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone7() {
final XQuery query = new XQuery(
"fn:count(fn:adjust-date-to-timezone(()))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "0")
);
}
/**
* Test Description: Evaluates The "adjust-date-to-timezone" function as part of a subtraction expression, whicg results on a negative number. Uses two adjust-date-to-timezone functions. .
*/
@org.junit.Test
public void fnAdjustDateToTimezone9() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2002-03-07-07:00\")) - fn:adjust-date-to-timezone(xs:date(\"2006-03-07-07:00\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "-P1461D")
);
}
/**
* Evaluates The "adjust-date-to-timezone" function with the arguments set as follows: $arg = xs:date(lower bound) .
*/
@org.junit.Test
public void fnAdjustDateToTimezone1args1() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"1970-01-01Z\"),xs:dayTimeDuration(\"-PT10H\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "1969-12-31-10:00")
);
}
/**
* Evaluates The "adjust-date-to-timezone" function with the arguments set as follows: $arg = xs:date(mid range) .
*/
@org.junit.Test
public void fnAdjustDateToTimezone1args2() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"1983-11-17Z\"),xs:dayTimeDuration(\"-PT10H\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "1983-11-16-10:00")
);
}
/**
* Evaluates The "adjust-date-to-timezone" function with the arguments set as follows: $arg = xs:date(upper bound) .
*/
@org.junit.Test
public void fnAdjustDateToTimezone1args3() {
final XQuery query = new XQuery(
"fn:adjust-date-to-timezone(xs:date(\"2030-12-31Z\"),xs:dayTimeDuration(\"-PT10H\"))",
ctx);
try {
result = new QT3Result(query.value());
} catch(final Throwable trw) {
result = new QT3Result(trw);
} finally {
query.close();
}
test(
assertStringValue(false, "2030-12-30-10:00")
);
}
}
| |
package com.x5.util;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// This bare-bones class provides a bare minimum XML parsing functionality
// and is perfect for thin clients that get a lot of very short messages
// formatted in XML. The XML must be very well-formed, any extraneous spaces
// or angle brackets will cause the parser to choke. The need to keep
// the client small outweighs the convenience that better exception handling
// would add here.
public class LiteXml
{
private String xml;
private Map<String,String> attrs = null;
// can handle this many child nodes in a parse for children
// before pausing to grow the endpoints array.
// nested nodes will survive ok, they don't add to the final count.
private static final int MAX_PARSE = 15;
public LiteXml(String xmlNode)
{
this.xml = xmlNode;
}
public String getNodeType()
{
if (xml == null) return null;
int startAt = 0;
int headerPos = xml.indexOf("?>");
if (headerPos > -1) {
startAt = headerPos+2;
}
int begPos = xml.indexOf('<',startAt);
if (begPos < 0) return null;
// assume space or > follows nodetype (ie no cr/lf)
// assume no space between < and nodetype
int spacePos = xml.indexOf(' ',begPos);
// assume attribute names and values do not contain >
int endPos = xml.indexOf('>',begPos);
if (spacePos > -1 && spacePos < endPos) endPos = spacePos;
if (endPos < begPos+1) return null;
String nodeType = xml.substring(begPos+1,endPos);
return nodeType;
}
public Map<String,String> getAttributes()
{
if (xml == null) return null;
if (this.attrs != null) return this.attrs;
// pick a candidate for close-tag-position (may disprove later if nec)
int tagEndPos = xml.indexOf('>');
// not found? bail.
if (tagEndPos < 0) return null;
int spacePos = xml.indexOf(' ');
// no attributes? bail.
if (spacePos < 0 || spacePos > tagEndPos) return null;
// narrow the parsing space to just this tag's attributes
String attrDefs = xml.substring(spacePos+1,tagEndPos);
this.attrs = parseAttributes(attrDefs);
return this.attrs;
}
private Map<String,String> parseAttributes(String attrDef)
{
Map<String,String> attrs = new HashMap<String,String>();
int cursor = 0;
while (cursor < attrDef.length()) {
// establish value location: first seek =, then the opening double-quote
int openQuotePos = attrDef.indexOf('=',cursor);
if (openQuotePos < 0) break;
// store the param name
String param = attrDef.substring(cursor,openQuotePos);
// look for double-quote following = sign.
openQuotePos = attrDef.indexOf('"',openQuotePos + 1);
if (openQuotePos < 0) break;
cursor = openQuotePos + 1;
// look for unescaped closing double-quote
int closeQuotePos = nextUnescapedDelim("\"",attrDef,cursor);
if (closeQuotePos < 0) break;
// okay, if we made it this far we have a properly delimited value
String val = attrDef.substring(cursor,closeQuotePos);
// unescape slashes and quotes in the value
val = val.replaceAll("\\\\\"", "\"");
val = val.replaceAll("\\\\\\\\", "\\\\");
// store name-value pair
attrs.put(param.trim(), unescapeXML(val));
// seek to next attribute
cursor = attrDef.indexOf(' ',closeQuotePos+1);
if (cursor < 0) break;
cursor++;
}
return attrs;
}
public static int nextUnescapedDelim(String delim, String toScan, int searchFrom)
{
int delimPos = toScan.indexOf(delim, searchFrom);
boolean isProvenDelimeter = false;
while (!isProvenDelimeter) {
// count number of backslashes that precede this forward slash
int bsCount = 0;
while (delimPos-(1+bsCount) >= searchFrom && toScan.charAt(delimPos - (1+bsCount)) == '\\') {
bsCount++;
}
// if odd number of backslashes precede this delimiter char, it's escaped
// if even number precede, it's not escaped, it's the true delimiter
// (because it's preceded by either no backslash or an escaped backslash)
if (bsCount % 2 == 0) {
isProvenDelimeter = true;
} else {
// keep looking for real delimiter
delimPos = toScan.indexOf(delim, delimPos+1);
// if the regex is not legal (missing delimiters??), bail out
if (delimPos < 0) return -1;
}
}
return delimPos;
}
public String getAttribute(String attr)
{
Map<String,String> myAttrs = getAttributes();
if (myAttrs == null || myAttrs.size() < 1) {
return null;
}
return myAttrs.get(attr);
}
private String getRawNodeValue()
{
if (xml == null) return null;
// assume single node
String nodeType = getNodeType();
if (nodeType == null) return null;
int topTagEnd = xml.indexOf(nodeType) + nodeType.length();
topTagEnd = xml.indexOf('>',topTagEnd);
int endTagBeg = xml.lastIndexOf('<');
if (topTagEnd < 0 || endTagBeg < topTagEnd) {
return null;
} else {
if (xml.indexOf(nodeType,endTagBeg) < 0) {
// begin and end tags are NOT matched.
// this string is probably orphaned sub-elements
return xml;
} else {
return xml.substring(topTagEnd+1,endTagBeg);
}
}
}
private boolean isCDATA(String x)
{
if (x == null) return false;
String contents = x.trim();
if (contents.startsWith("<![CDATA[") && contents.endsWith("]]>")) {
return true;
} else {
return false;
}
}
public String getNodeValue()
{
String contents = getRawNodeValue();
if (contents == null) return null;
if (isCDATA(contents)) {
return contents.trim().substring(9,contents.length()-3);
} else {
return LiteXml.unescapeXML(contents);
}
}
/**
* getChildNodes(nodeType) shares the limitations of the method below.
*
* only returns nodes which match specified nodetype.
*
* @param nodeType
* @return matching child nodes as an array of LiteXml objects
*/
public LiteXml[] getChildNodes(String nodeType)
{
if (nodeType == null) return null;
LiteXml[] children = getChildNodes();
if (children == null) return null;
// count matching nodes
boolean[] isMatch = new boolean[children.length];
int matches = 0;
for (int i=0; i<children.length; i++) {
LiteXml child = children[i];
if (child.getNodeType().equals(nodeType)) {
matches++;
isMatch[i] = true;
}
}
if (matches == 0) return null;
if (matches == children.length) return children;
// go back through and build smaller array of matching-only
LiteXml[] matchingNodes = new LiteXml[matches];
matches = 0;
for (int i=0; i<isMatch.length; i++) {
if (isMatch[i]) {
matchingNodes[matches] = children[i];
matches++;
}
}
return matchingNodes;
}
/**
* There are two things that make this class lightweight. One is extremely
* limited support for xpath and zero support for namespaces. The other is
* the fact that instead of building a vector or arraylist to handle a
* potentially large number of child nodes, we build an array of child node
* positions that doubles in size as capacity needs increase.
*
* @return any child nodes as an array of LiteXml objects
*/
public LiteXml[] getChildNodes()
{
if (xml == null) return null;
String insides = getRawNodeValue();
if (insides == null || isCDATA(insides)) return null;
// assume all angle brackets are xml boundaries
// ie, assume no angle brackets in data
// parse...
// most docs will have less than MAX_PARSE children
// (2003aug26 no longer a limit on children -tjm)
int[] endpoints = new int[MAX_PARSE*2];
int marker = 0;
int len = insides.length();
int count = 0;
while (marker < len) {
if (count*2 >= endpoints.length) {
// hacked to enable unlimited children -tjm
endpoints = extendArray(endpoints);
}
// locate beginning of child
int opening = insides.indexOf('<',marker);
if (opening < 0) {
break;
//marker = opening + 1;
//continue;
}
// verify that this is not a closing tag eg </TAG>
if (insides.charAt(opening+1) == '/') return null;
int closing = insides.indexOf('>',opening+1);
if (closing < 0) return null;
// check for self-closing tag eg </TAG attr="data" />
if (insides.charAt(closing-1) == '/') {
endpoints[count*2] = opening;
endpoints[count*2+1] = closing+1;
count++;
marker = closing+1;
continue;
}
// scan ahead for end tag, then verify that it matches our tag and
// not some nested tag of the same nodetype.
int spacePos = insides.indexOf(' ',opening+1);
int bracketPos = insides.indexOf('>',opening+1);
if (spacePos < 0 && bracketPos < 0) return null;
int typeEnd = spacePos;
if (typeEnd < 0 || typeEnd > bracketPos) typeEnd = bracketPos;
String type = insides.substring(opening+1,typeEnd);
String childEnd = "</" + type;
int childEndPos = insides.indexOf(childEnd,closing+1);
String nestedSOB = "<" + type;
int nestedPos = insides.indexOf(nestedSOB,closing+1);
// handle nesting if discovered
while (nestedPos > -1 && nestedPos < childEndPos) {
// this first time here we matched the most nested endtag...
// 1:A
// 2:B
// 3:C
// 4:/C
// 5:D
// 6:/D
// 7:/B
// 8:/A
// ie we found two and four so keep searching for
// pairs (3/6,5/7) until you get to an unmatched one (8).
// It doesn't matter that the pairs aren't correctly matched,
// just that they are found in pairs.
childEndPos = insides.indexOf(childEnd,childEndPos + 3);
if (childEndPos < 0) return null;
nestedPos = insides.indexOf(nestedSOB,nestedPos + 3);
}
int finalBoundary = insides.indexOf('>',childEndPos+2);
if (finalBoundary < 0) return null; // fatal
endpoints[count*2] = opening;
endpoints[count*2+1] = finalBoundary+1;
count++;
marker = finalBoundary+1;
}
if (count < 1) return null;
LiteXml[] children = new LiteXml[count];
for (int i=0; i<count; i++) {
int beg = endpoints[i*2];
int end = endpoints[i*2+1];
LiteXml child = new LiteXml(insides.substring(beg,end));
children[i] = child;
}
return children;
}
// extendArray grows the endpoints array as necessary to allow for docs
// which may have unlimited children elements. Didn't want to
// use a vector here since this code is used in applet-land which
// might execute in a JVM without java.util.Vector.
private int[] extendArray(int[] endpoints)
{
int[] largerArray = new int[endpoints.length + MAX_PARSE*2];
System.arraycopy(endpoints,0,largerArray,0,endpoints.length);
return largerArray;
}
public LiteXml getFirstChild()
{
LiteXml[] children = this.getChildNodes();
if (children == null) return null;
return children[0];
}
public String getPathValue(String xpathLite)
{
LiteXml x = findNode(xpathLite);
if (x == null) return null;
return x.getNodeValue();
}
public String getNodeValue(String branchPath)
{
if (branchPath == null) return null;
String actualPath = normalizeBranchPath(branchPath);
return getPathValue(actualPath);
}
/**
* normalizeBranchPath converts a relative child path to an absolute path
* by prepending a "*" node.
*/
private String normalizeBranchPath(String branchPath)
{
if (branchPath == null) return null;
if (branchPath.startsWith("/")) {
return "*" + branchPath;
} else {
return "*/" + branchPath;
}
}
public LiteXml findChildNode(String branchPath)
{
return findNode( normalizeBranchPath(branchPath) );
}
public LiteXml findNode(String xpathLite)
{
if (xpathLite == null) return null;
if (xpathLite.charAt(0) == '/') {
if (xpathLite.charAt(1) == '/') {
// leading double slash // not supported
return null;
}
xpathLite = xpathLite.substring(1);
}
java.util.StringTokenizer splitter = new java.util.StringTokenizer(xpathLite, "/");
int depth = splitter.countTokens();
String[] nodeNames = new String[depth];
for (int i=0; i<depth; i++) {
nodeNames[i] = splitter.nextToken();
}
return findNode(nodeNames);
}
public LiteXml findNode(String[] xpathLite)
{
return findNodeX(xpathLite, 0);
}
private static boolean isMatch(String nodeName, String pattern)
{
if (nodeName == null || pattern == null) return false;
if (nodeName.equals(pattern)) return true;
if (pattern.equals("*")) return true;
return false;
}
private LiteXml findNodeX(String[] xpathLite, int x)
{
if ( isMatch(this.getNodeType(), xpathLite[x]) ) {
LiteXml[] childNodes = this.getChildNodes();
if (childNodes == null) return null;
for (int i=0; i<childNodes.length; i++) {
LiteXml child = childNodes[i];
String nodeType = child.getNodeType();
if ( isMatch(nodeType, xpathLite[x+1]) ) {
if (xpathLite.length == x+2) return child;
LiteXml node = child.findNodeX(xpathLite, x+1);
if (node != null) return node;
}
}
return null;
} else {
return null;
}
}
private static final Pattern XML_ENTITY_REGEX = Pattern.compile( "&(#?)([^;]+);" );
private static final Map<String,String> STD_ENTITIES = getStandardEntities();
public static String unescapeXML( final String xml )
{
//Unfortunately, Matcher requires a StringBuffer instead of a StringBuilder
StringBuffer unescapedOutput = new StringBuffer( xml.length() );
Matcher m = XML_ENTITY_REGEX.matcher( xml );
String entity;
String hashmark;
String ent;
int code;
while ( m.find() ) {
ent = m.group(2);
hashmark = m.group(1);
if ( (hashmark != null) && (hashmark.length() > 0) ) {
if ( ent.substring(0,1).toLowerCase().equals("x") ) {
code = Integer.parseInt( ent.substring(1), 16 );
} else {
code = Integer.parseInt( ent );
}
entity = Character.toString( (char) code );
} else {
entity = STD_ENTITIES.get( ent );
if ( entity == null ) {
//not a known entity - ignore it
entity = "&" + ent + ';';
}
}
m.appendReplacement( unescapedOutput, entity );
}
m.appendTail( unescapedOutput );
return unescapedOutput.toString();
}
private static Map<String,String> getStandardEntities()
{
Map<String,String> entities = new HashMap<String,String>(10);
entities.put( "lt", "<" );
entities.put( "gt", ">" );
entities.put( "amp", "&" );
entities.put( "apos", "'" );
entities.put( "quot", "\"" );
return entities;
}
public String toString()
{
return xml;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.folding.impl;
import com.intellij.lang.folding.FoldingDescriptor;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.FoldRegion;
import com.intellij.openapi.editor.FoldingGroup;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.FoldingModelEx;
import com.intellij.openapi.editor.impl.FoldingModelImpl;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SmartPointerManager;
import com.intellij.util.ObjectUtils;
import com.intellij.util.SlowOperations;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import java.util.*;
final class UpdateFoldRegionsOperation implements Runnable {
enum ApplyDefaultStateMode { YES, EXCEPT_CARET_REGION, NO }
private static final Logger LOG = Logger.getInstance(UpdateFoldRegionsOperation.class);
private static final Key<Boolean> CAN_BE_REMOVED_WHEN_COLLAPSED = Key.create("canBeRemovedWhenCollapsed");
static final Key<Boolean> COLLAPSED_BY_DEFAULT = Key.create("collapsedByDefault");
static final Key<String> SIGNATURE = Key.create("signature");
static final String NO_SIGNATURE = "no signature";
private static final Comparator<PsiElement> COMPARE_BY_OFFSET_REVERSED = (element, element1) -> {
int startOffsetDiff = element1.getTextRange().getStartOffset() - element.getTextRange().getStartOffset();
return startOffsetDiff == 0 ? element1.getTextRange().getEndOffset() - element.getTextRange().getEndOffset() : startOffsetDiff;
};
private final Project myProject;
private final Editor myEditor;
private final PsiFile myFile;
@NotNull
private final ApplyDefaultStateMode myApplyDefaultState;
private final FoldingMap myElementsToFoldMap = new FoldingMap();
private final Set<FoldingUpdate.RegionInfo> myRegionInfos = new LinkedHashSet<>();
private final MultiMap<FoldingGroup, FoldingUpdate.RegionInfo> myGroupedRegionInfos = new MultiMap<>();
private final boolean myKeepCollapsedRegions;
private final boolean myForInjected;
UpdateFoldRegionsOperation(@NotNull Project project,
@NotNull Editor editor,
@NotNull PsiFile file,
@NotNull List<? extends FoldingUpdate.RegionInfo> elementsToFold,
@NotNull ApplyDefaultStateMode applyDefaultState,
boolean keepCollapsedRegions,
boolean forInjected) {
myProject = project;
myEditor = editor;
myFile = file;
myApplyDefaultState = applyDefaultState;
myKeepCollapsedRegions = keepCollapsedRegions;
myForInjected = forInjected;
for (FoldingUpdate.RegionInfo regionInfo : elementsToFold) {
myElementsToFoldMap.putValue(regionInfo.element, regionInfo);
myRegionInfos.add(regionInfo);
FoldingGroup group = regionInfo.descriptor.getGroup();
if (group != null) myGroupedRegionInfos.putValue(group, regionInfo);
}
}
@Override
public void run() {
EditorFoldingInfo info = EditorFoldingInfo.get(myEditor);
FoldingModelEx foldingModel = (FoldingModelEx)myEditor.getFoldingModel();
Map<TextRange,Boolean> rangeToExpandStatusMap = new HashMap<>();
removeInvalidRegions(info, foldingModel, rangeToExpandStatusMap);
Map<FoldRegion, Boolean> shouldExpand = new HashMap<>();
Map<FoldingGroup, Boolean> groupExpand = new HashMap<>();
List<FoldRegion> newRegions = addNewRegions(info, foldingModel, rangeToExpandStatusMap, shouldExpand, groupExpand);
applyExpandStatus(newRegions, shouldExpand, groupExpand);
foldingModel.clearDocumentRangesModificationStatus();
}
private static void applyExpandStatus(@NotNull List<? extends FoldRegion> newRegions,
@NotNull Map<FoldRegion, Boolean> shouldExpand,
@NotNull Map<FoldingGroup, Boolean> groupExpand) {
for (final FoldRegion region : newRegions) {
final FoldingGroup group = region.getGroup();
final Boolean expanded = group == null ? shouldExpand.get(region) : groupExpand.get(group);
if (expanded != null) {
region.setExpanded(expanded.booleanValue());
}
}
}
private @NotNull List<FoldRegion> addNewRegions(@NotNull EditorFoldingInfo info,
@NotNull FoldingModelEx foldingModel,
@NotNull Map<TextRange, Boolean> rangeToExpandStatusMap,
@NotNull Map<FoldRegion, Boolean> shouldExpand,
@NotNull Map<FoldingGroup, Boolean> groupExpand) {
List<FoldRegion> newRegions = new ArrayList<>();
SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(myProject);
for (FoldingUpdate.RegionInfo regionInfo : myRegionInfos) {
ProgressManager.checkCanceled();
FoldingDescriptor descriptor = regionInfo.descriptor;
FoldingGroup group = descriptor.getGroup();
TextRange range = descriptor.getRange();
String placeholder = null;
try {
placeholder = descriptor.getPlaceholderText();
}
catch (IndexNotReadyException ignore) {
}
if (range.getEndOffset() > myEditor.getDocument().getTextLength()) {
LOG.error(String.format("Invalid folding descriptor detected (%s). It ends beyond the document range (%d)",
descriptor, myEditor.getDocument().getTextLength()));
continue;
}
FoldRegion region = foldingModel.createFoldRegion(range.getStartOffset(), range.getEndOffset(),
placeholder == null ? "..." : placeholder,
group,
descriptor.isNonExpandable());
if (region == null) continue;
PsiElement psi = descriptor.getElement().getPsi();
if (psi == null || !psi.isValid() || !myFile.isValid()) {
region.dispose();
continue;
}
region.setGutterMarkEnabledForSingleLine(descriptor.isGutterMarkEnabledForSingleLine());
if (descriptor.canBeRemovedWhenCollapsed()) region.putUserData(CAN_BE_REMOVED_WHEN_COLLAPSED, Boolean.TRUE);
region.putUserData(COLLAPSED_BY_DEFAULT, regionInfo.collapsedByDefault);
region.putUserData(SIGNATURE, ObjectUtils.chooseNotNull(regionInfo.signature, NO_SIGNATURE));
info.addRegion(region, smartPointerManager.createSmartPsiElementPointer(psi));
newRegions.add(region);
if (descriptor.isNonExpandable()) {
region.putUserData(FoldingModelImpl.SELECT_REGION_ON_CARET_NEARBY, Boolean.TRUE);
}
else {
boolean expandStatus = shouldExpandNewRegion(range, rangeToExpandStatusMap, regionInfo.collapsedByDefault);
if (group == null) {
shouldExpand.put(region, expandStatus);
}
else {
final Boolean alreadyExpanded = groupExpand.get(group);
groupExpand.put(group, alreadyExpanded == null ? expandStatus : alreadyExpanded.booleanValue() || expandStatus);
}
}
}
return newRegions;
}
private boolean shouldExpandNewRegion(TextRange range,
Map<TextRange, Boolean> rangeToExpandStatusMap,
boolean collapsedByDefault) {
if (myApplyDefaultState != ApplyDefaultStateMode.NO) {
// Considering that this code is executed only on initial fold regions construction on editor opening.
if (myApplyDefaultState == ApplyDefaultStateMode.EXCEPT_CARET_REGION) {
TextRange lineRange = OpenFileDescriptor.getRangeToUnfoldOnNavigation(myEditor);
if (lineRange.intersects(range)) {
return true;
}
}
return !collapsedByDefault;
}
final Boolean oldStatus = rangeToExpandStatusMap.get(range);
return oldStatus == null || oldStatus.booleanValue() || FoldingUtil.caretInsideRange(myEditor, range);
}
private void removeInvalidRegions(@NotNull EditorFoldingInfo info,
@NotNull FoldingModelEx foldingModel,
@NotNull Map<TextRange, Boolean> rangeToExpandStatusMap) {
List<FoldRegion> toRemove = new ArrayList<>();
Ref<FoldingUpdate.RegionInfo> infoRef = Ref.create();
Set<FoldingGroup> processedGroups = new HashSet<>();
List<FoldingUpdate.RegionInfo> matchedInfos = new ArrayList<>();
for (FoldRegion region : foldingModel.getAllFoldRegions()) {
FoldingGroup group = region.getGroup();
if (group != null && !processedGroups.add(group)) continue;
List<FoldRegion> regionsToProcess = group == null ? Collections.singletonList(region) : foldingModel.getGroupedRegions(group);
matchedInfos.clear();
boolean shouldRemove = false;
boolean isLight = true;
for (FoldRegion regionToProcess : regionsToProcess) {
if (!regionToProcess.isValid() || shouldRemoveRegion(regionToProcess, info, rangeToExpandStatusMap, infoRef)) {
shouldRemove = true;
}
isLight &= regionToProcess.getUserData(SIGNATURE) == null;
FoldingUpdate.RegionInfo regionInfo = infoRef.get();
matchedInfos.add(regionInfo);
}
if (!shouldRemove && group != null && !isLight) {
FoldingGroup requestedGroup = null;
for (FoldingUpdate.RegionInfo matchedInfo : matchedInfos) {
if (matchedInfo == null) {
shouldRemove = true;
break;
}
FoldingGroup g = matchedInfo.descriptor.getGroup();
if (g == null) {
shouldRemove = true;
break;
}
if (requestedGroup == null) {
requestedGroup = g;
}
else if (!requestedGroup.equals(g)) {
shouldRemove = true;
break;
}
}
if (myGroupedRegionInfos.get(requestedGroup).size() != matchedInfos.size()) {
shouldRemove = true;
}
}
if (shouldRemove) {
for (FoldRegion r : regionsToProcess) {
rangeToExpandStatusMap.putIfAbsent(TextRange.create(r), r.isExpanded());
}
toRemove.addAll(regionsToProcess);
}
else {
for (FoldingUpdate.RegionInfo matchedInfo : matchedInfos) {
if (matchedInfo != null) {
myElementsToFoldMap.remove(matchedInfo.element, matchedInfo);
myRegionInfos.remove(matchedInfo);
}
}
}
}
for (final FoldRegion region : toRemove) {
foldingModel.removeFoldRegion(region);
info.removeRegion(region);
}
}
private boolean shouldRemoveRegion(@NotNull FoldRegion region, @NotNull EditorFoldingInfo info,
@NotNull Map<TextRange, Boolean> rangeToExpandStatusMap, @NotNull Ref<? super FoldingUpdate.RegionInfo> matchingInfo) {
matchingInfo.set(null);
PsiElement element = SlowOperations.allowSlowOperations(() -> info.getPsiElement(region));
if (element != null) {
PsiFile containingFile = element.getContainingFile();
boolean isInjected = InjectedLanguageManager.getInstance(myProject).isInjectedFragment(containingFile);
if (isInjected != myForInjected) return false;
}
boolean forceKeepRegion = myKeepCollapsedRegions && !region.isExpanded() && !regionOrGroupCanBeRemovedWhenCollapsed(region);
Boolean storedCollapsedByDefault = region.getUserData(COLLAPSED_BY_DEFAULT);
final Collection<FoldingUpdate.RegionInfo> regionInfos;
if (element != null && !(regionInfos = myElementsToFoldMap.get(element)).isEmpty()) {
FoldingUpdate.RegionInfo[] array = regionInfos.toArray(new FoldingUpdate.RegionInfo[0]);
for (FoldingUpdate.RegionInfo regionInfo : array) {
FoldingDescriptor descriptor = regionInfo.descriptor;
TextRange range = descriptor.getRange();
if (TextRange.areSegmentsEqual(region, range)) {
if (storedCollapsedByDefault != null && storedCollapsedByDefault != regionInfo.collapsedByDefault) {
rangeToExpandStatusMap.put(range, !regionInfo.collapsedByDefault);
return true;
}
else if (!region.getPlaceholderText().equals(descriptor.getPlaceholderText()) || range.getLength() < 2) {
return true;
}
else {
matchingInfo.set(regionInfo);
return false;
}
}
}
if (!forceKeepRegion) {
for (FoldingUpdate.RegionInfo regionInfo : regionInfos) {
rangeToExpandStatusMap.put(regionInfo.descriptor.getRange(), region.isExpanded());
}
return true;
}
}
else {
return !forceKeepRegion && !(region.getUserData(SIGNATURE) == null /* 'light' region */);
}
return false;
}
private boolean regionOrGroupCanBeRemovedWhenCollapsed(@NotNull FoldRegion region) {
FoldingGroup group = region.getGroup();
List<FoldRegion> affectedRegions = group != null && myEditor instanceof EditorEx
? ((EditorEx)myEditor).getFoldingModel().getGroupedRegions(group)
: Collections.singletonList(region);
for (FoldRegion affectedRegion : affectedRegions) {
if (regionCanBeRemovedWhenCollapsed(affectedRegion)) return true;
}
return false;
}
private boolean regionCanBeRemovedWhenCollapsed(@NotNull FoldRegion region) {
return Boolean.TRUE.equals(region.getUserData(CAN_BE_REMOVED_WHEN_COLLAPSED)) ||
((FoldingModelEx)myEditor.getFoldingModel()).hasDocumentRegionChangedFor(region) ||
!region.isValid() ||
isRegionInCaretLine(region);
}
private boolean isRegionInCaretLine(@NotNull FoldRegion region) {
int regionStartLine = myEditor.getDocument().getLineNumber(region.getStartOffset());
int regionEndLine = myEditor.getDocument().getLineNumber(region.getEndOffset());
int caretLine = myEditor.getCaretModel().getLogicalPosition().line;
return caretLine >= regionStartLine && caretLine <= regionEndLine;
}
private static final class FoldingMap extends MultiMap<PsiElement, FoldingUpdate.RegionInfo> {
private FoldingMap() {
super(new TreeMap<>(COMPARE_BY_OFFSET_REVERSED));
}
@NotNull
@Override
protected Collection<FoldingUpdate.RegionInfo> createCollection() {
return new ArrayList<>();
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xml;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.xml.events.DomEvent;
import com.intellij.util.xml.impl.DomTestCase;
import com.intellij.util.xml.ui.DomUIFactory;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
* @author peter
*/
public class DomSimpleValuesTest extends DomTestCase {
private MyElement createElement(final String xml) throws IncorrectOperationException {
return createElement(xml, MyElement.class);
}
public void testGetValue() {
final String text = "<a>foo</a>";
assertEquals("foo", createElement(text).getTagValue());
assertEquals("foo", createElement(text).getValue());
}
public void testSetValue() {
final MyElement element = createElement("<a/>");
assertEquals("", element.getValue());
element.setValue(239);
assertEquals("239", element.getValue());
assertEquals("239", element.getXmlTag().getValue().getText());
myCallRegistry.putExpected(new DomEvent(element, false));
myCallRegistry.assertResultsAndClear();
}
public void testDefineAndSet() {
final MyElement element = getDomManager().getFileElement(createXmlFile(""), MyElement.class, "root").getRootElement();
myCallRegistry.clear();
assertNull(element.getXmlTag());
element.setValue(42);
assertNotNull(element.getXmlTag());
assertEquals("42", element.getXmlTag().getValue().getText());
final DomElement element1 = element;
myCallRegistry.putExpected(new DomEvent(element1, true));
myCallRegistry.putExpected(new DomEvent(element, false));
element.setValue((Integer)null);
assertNull(element.getXmlTag());
assertEquals(null, element.getValue());
myCallRegistry.putExpected(new DomEvent(element, false));
myCallRegistry.assertResultsAndClear();
}
public void testSimpleConverters() {
assertEquals(239, createElement("<a>239</a>").getInt());
assertEquals(true, createElement("<a>true</a>").getBoolean());
assertEquals("true", createElement("<a>true</a>").getBuffer().toString());
assertEquals((short)239, createElement("<a>239</a>").getShort());
assertEquals(new Long("239"), createElement("<a>239</a>").getLong());
assertEquals(new Float("239.42"), createElement("<a>239.42</a>").getFloat());
assertEquals(new BigDecimal("239.42"), createElement("<a>239.42</a>").getBigDecimal());
final MyElement bigDecimalValue = createElement("<a>239.42</a>");
bigDecimalValue.setValue(new BigDecimal("111.234"));
assertEquals("111.234", bigDecimalValue.getValue());
try {
createElement("<a>true</a>").getInt();
fail();
}
catch (NullPointerException e) {
}
try {
createElement("<a>42</a>").getBoolean();
fail();
}
catch (NullPointerException e) {
}
}
public void testComment() {
assertEquals(239, createElement("<a>" +
" <!-- some comment-->" +
" 239" +
" <!-- some another comment-->" +
"</a>").getInt());
}
public void testPsiClassConverter() {
final String className = Object.class.getName();
final PsiClass objectClass = getJavaFacade().findClass(className, GlobalSearchScope.allScope(getProject()));
assertEquals(objectClass, createElement("<a>" + className + "</a>").getPsiClass());
assertNull(createElement("<a>abcdef</a>").getPsiClass());
}
public void testEnums() {
final MyElement element = createElement("<a/>", MyElement.class);
assertNull(element.getEnum());
element.setEnum(DomTestCase.MyEnum.BAR);
assertEquals(DomTestCase.MyEnum.BAR, element.getEnum());
assertEquals(DomTestCase.MyEnum.BAR.getValue(), element.getXmlTag().getValue().getText());
element.setEnum(null);
assertNull(element.getEnum());
assertNull(element.getXmlTag());
element.setValue(239);
assertNull(element.getEnum());
}
public void testAttributeValues() {
final MyElement element = createElement("<a attra=\"foo\"/>");
final GenericAttributeValue<String> attributeValue = element.getAttributeValue();
assertEquals("attra", attributeValue.getXmlElementName());
assertEquals("foo", attributeValue.getValue());
final GenericAttributeValue<Integer> attr = element.getAttr();
attr.setValue(239);
assertEquals(239, (int)attr.getValue());
assertEquals("239", element.getXmlTag().getAttributeValue("attr"));
final DomElement element1 = attr;
myCallRegistry.putExpected(new DomEvent(element1, true));
myCallRegistry.assertResultsAndClear();
attr.setValue(42);
myCallRegistry.putExpected(new DomEvent(attr, false));
myCallRegistry.assertResultsAndClear();
attr.setValue(null);
assertNull(attr.getValue());
assertNull(element.getXmlTag().getAttributeValue("attr"));
assertNull(element.getXmlTag().getAttribute("attr", ""));
myCallRegistry.putExpected(new DomEvent(attr, false));
myCallRegistry.assertResultsAndClear();
assertEquals("some-attribute", element.getSomeAttribute().getXmlElementName());
assertNull(createElement("<a attra\"attr\"/>").getAttributeValue().getStringValue());
assertNull(createElement("<a attra\"\"/>").getAttributeValue().getStringValue());
assertNull(createElement("<a attra\"/>").getAttributeValue().getStringValue());
}
public void testGenericValue() {
final MyElement element = createElement("<a><generic-child>239</generic-child></a>");
final GenericDomValue<Integer> integerChild = element.getGenericChild();
assertEquals(239, (int)integerChild.getValue());
assertEquals("239", integerChild.getStringValue());
integerChild.setValue(42);
assertEquals(42, (int)integerChild.getValue());
assertEquals("42", integerChild.getStringValue());
}
public void testAnnotatedGenericValue() {
final MyElement element = createElement("<a><buffer>239</buffer></a>");
element.getGenericChild().getValue();
final GenericDomValue<StringBuffer> genericChild2 = element.getGenericChild2();
assertEquals("239", genericChild2.getValue().toString());
}
public void testSpecialCharacters() {
final MyElement element = createElement("");
element.setValue("<");
assertEquals("<", element.getValue());
assertEquals("<![CDATA[<]]>", element.getXmlTag().getValue().getText());
element.getAttributeValue().setValue("<");
assertEquals("<", element.getAttributeValue().getValue());
assertEquals("\"<\"", element.getXmlTag().getAttribute("attra", null).getValueElement().getText());
}
public void testIndicators() {
final MyElement element = createElement("<a><indicator/></a>");
final GenericDomValue<Boolean> indicator = element.getIndicator();
assertTrue(indicator.getValue());
indicator.setValue(false);
assertFalse(indicator.getValue());
assertNull(indicator.getStringValue());
assertNull(indicator.getXmlTag());
assertEquals(0, element.getXmlTag().getSubTags().length);
putExpected(new DomEvent(indicator, false));
assertResultsAndClear();
indicator.setValue(true);
assertTrue(indicator.getValue());
assertEquals("", indicator.getStringValue());
assertSame(indicator.getXmlTag(), element.getXmlTag().getSubTags()[0]);
final DomElement element1 = indicator;
putExpected(new DomEvent(element1, true));
assertResultsAndClear();
final XmlTag tag = element.getXmlTag();
new WriteCommandAction(getProject()) {
@Override
protected void run(@NotNull Result result) {
tag.add(createTag("<indicator/>"));
tag.add(createTag("<indicator/>"));
}
}.execute();
assertTrue(element.isValid());
assertTrue(element.getIndicator().getValue());
element.getIndicator().setValue(false);
assertFalse(element.getIndicator().getValue());
assertEquals(0, element.getXmlTag().findSubTags("indicator").length);
}
public void testConcreteGenericValue() throws Throwable {
final ConcreteGeneric generic = createElement("", ConcreteGeneric.class);
generic.setValue("abc");
assertEquals("abc", generic.getValue());
DomUIFactory.SET_VALUE_METHOD.invoke(generic, "def");
assertEquals("def", DomUIFactory.GET_VALUE_METHOD.invoke(generic));
}
public void testConcreteGenericValueWithMethods() throws Throwable {
final ConcreteGenericWithMethods generic = createElement("", ConcreteGenericWithMethods.class);
generic.setValue("abc");
assertEquals("abc", generic.getValue());
DomUIFactory.SET_VALUE_METHOD.invoke(generic, "def");
assertEquals("def", DomUIFactory.GET_VALUE_METHOD.invoke(generic));
}
public void testNameValueInPresentation() {
final MyElement element = createElement("");
element.getAttr().setValue(23942);
assertEquals("23942", element.getPresentation().getElementName());
}
public void testResolveToDomElement() {
final RootInterface element = createElement("", RootInterface.class);
final MyElement child1 = element.addChild();
child1.getAttr().setValue(555);
final MyElement child2 = element.addChild();
child2.getAttr().setValue(777);
final GenericDomValue<MyElement> resolve = child2.getResolve();
resolve.setStringValue("777");
assertEquals(child2, resolve.getValue());
resolve.setValue(child1);
assertEquals("555", resolve.getStringValue());
assertEquals(child1, resolve.getValue());
resolve.setStringValue("239");
assertNull(resolve.getValue());
final GenericDomValue<MyElement> resolve2 = child2.getResolve2();
resolve2.setStringValue("777");
assertEquals(child2, resolve2.getValue());
}
public void testPlainPsiTypeConverter() {
assertNull(createElement("").getPsiType());
assertSame(PsiType.INT, createElement("<a>int</a>").getPsiType());
final PsiType psiType = createElement("<a>java.lang.String</a>").getPsiType();
assertEquals(CommonClassNames.JAVA_LANG_STRING, assertInstanceOf(psiType, PsiClassType.class).getCanonicalText());
final PsiType arrayType = createElement("<a>int[]</a>").getPsiType();
assertTrue(arrayType instanceof PsiArrayType);
assertSame(PsiType.INT, ((PsiArrayType) arrayType).getComponentType());
}
public void testJvmPsiTypeConverter() {
assertNull(createElement("").getJvmPsiType());
assertNotNull(createElement("<a>int</a>").getJvmPsiType());
final PsiClassType string = PsiType.getJavaLangString(getPsiManager(), GlobalSearchScope.allScope(getProject()));
final PsiType psiType = createElement("<a>java.lang.String</a>").getJvmPsiType();
assertEquals(CommonClassNames.JAVA_LANG_STRING, assertInstanceOf(psiType, PsiClassType.class).getCanonicalText());
final PsiArrayType intArray = assertInstanceOf(createElement("<a>[I</a>").getJvmPsiType(), PsiArrayType.class);
final PsiArrayType stringArray = assertInstanceOf(createElement("<a>[Ljava.lang.String;</a>").getJvmPsiType(), PsiArrayType.class);
assertSame(PsiType.INT, intArray.getComponentType());
assertEquals(CommonClassNames.JAVA_LANG_STRING, assertInstanceOf(stringArray.getComponentType(), PsiClassType.class).getCanonicalText());
assertJvmPsiTypeToString(intArray, "[I");
assertJvmPsiTypeToString(stringArray, "[Ljava.lang.String;");
assertJvmPsiTypeToString(string, "java.lang.String");
}
public void testValueCaching() {
final GenericDomValue<String> element = createElement("<a><cached-value/></a>", MyElement.class).getCachedValue();
assertEquals(0, ((MyConverter) element.getConverter()).fromStringCalls);
assertEquals("", element.getValue());
assertEquals(1, ((MyConverter) element.getConverter()).fromStringCalls);
assertEquals("", element.getValue());
assertEquals(1, ((MyConverter) element.getConverter()).fromStringCalls);
element.setValue("1");
assertEquals(1, ((MyConverter) element.getConverter()).fromStringCalls);
assertEquals("1", element.getValue());
assertEquals(2, ((MyConverter) element.getConverter()).fromStringCalls);
}
private void assertJvmPsiTypeToString(final PsiType type, final String expected) throws IncorrectOperationException {
final MyElement element = createElement("");
element.setJvmPsiType(type);
assertEquals(expected, element.getValue());
}
public void testJavaStyledElement() throws IncorrectOperationException {
JavaStyledElement element = createElement("<tag javaStyledAttribute=\"666\"></tag>", JavaStyledElement.class);
assertEquals(element.getJavaStyledAttribute().getXmlElementName(), "javaStyledAttribute");
}
public void testGenericValueListConverter() {
final MyElement element = createElement("<a><string-buffer>abc</string-buffer></a>");
assertEquals("abc", element.getStringBuffers().get(0).getValue().toString());
}
public void testConvertAnnotationOnType() {
final MyElement element =
createElement("<a>" + "<my-generic-value>abc</my-generic-value>" + "<my-foo-generic-value>abc</my-foo-generic-value>" + "");
assertEquals("bar", element.getMyGenericValue().getValue());
assertEquals("foo", element.getMyFooGenericValue().getValue());
}
public void testEntities() {
final MyElement element = createElement("<!DOCTYPE a SYSTEM \"aaa\"\n" +
"[<!ENTITY idgenerator \"identity\">]>\n" +
"<a attra=\"a<b\" some-attribute=\"&idgenerator;\">&xxx;+&idgenerator;+&</a>");
assertEquals("a<b", element.getAttributeValue().getValue());
assertEquals("identity", element.getSomeAttribute().getValue());
// assertEquals("&xxx;+identity+&", element.getValue());
}
public interface RootInterface extends DomElement {
List<MyElement> getChildren();
MyElement addChild();
}
public interface MyElement extends DomElement {
@Attribute("attra")
GenericAttributeValue<String> getAttributeValue();
@NameValue
GenericAttributeValue<Integer> getAttr();
GenericAttributeValue<String> getSomeAttribute();
String getValue();
void setValue(Integer value);
@TagValue()
int getInt();
void setValue(String value);
@TagValue()
boolean getBoolean();
@TagValue()
PsiType getPsiType();
@Convert(JvmPsiTypeConverter.class)
@TagValue()
PsiType getJvmPsiType();
@TagValue()
@Convert(JvmPsiTypeConverter.class)
void setJvmPsiType(PsiType psiType);
@TagValue()
DomTestCase.MyEnum getEnum();
@TagValue
void setEnum(DomTestCase.MyEnum value);
@TagValue()
@Convert(StringBufferConverter.class)
StringBuffer getBuffer();
@TagValue()
PsiClass getPsiClass();
@TagValue()
String getTagValue();
@TagValue
Long getLong();
@TagValue
Float getFloat();
@TagValue
short getShort();
@TagValue
BigDecimal getBigDecimal();
void setValue(BigDecimal value);
GenericDomValue<Integer> getGenericChild();
@SubTag("buffer")
@Convert(StringBufferConverter.class)
GenericDomValue<StringBuffer> getGenericChild2();
@SubTag(indicator = true)
GenericDomValue<Boolean> getIndicator();
@Resolve(MyElement.class)
GenericDomValue<MyElement> getResolve();
GenericDomValue<MyElement> getResolve2();
@Convert(StringBufferConverter.class)
List<GenericDomValue<StringBuffer>> getStringBuffers();
@Convert(MyConverter.class) GenericDomValue<String> getCachedValue();
MyGenericValue getMyGenericValue();
@Convert(FooConverter.class)
MyGenericValue getMyFooGenericValue();
}
@Convert(BarConverter.class)
public interface MyGenericValue extends GenericDomValue<String> {
}
@NameStrategyForAttributes(JavaNameStrategy.class)
public interface JavaStyledElement extends DomElement {
GenericAttributeValue<String> getJavaStyledAttribute();
}
public interface ConcreteGeneric extends GenericDomValue<String> {
}
public interface ConcreteGenericWithMethods extends GenericDomValue<String> {
@Override
String getValue();
@Override
void setValue(String s);
}
public void testFuhrer() {
final FieldGroup group = createElement("<field-group>\n" +
"<group-name>myGroup</load-group-name>\n" +
"<field-name>myField1</field-name>\n" +
"<field-name>myField2</field-name>\n" +
"</field-group>",
FieldGroup.class);
assertEquals(2, group.getFieldNames().size());
assertEquals("myField1", group.getFieldNames().get(0).getValue().getName().getValue());
assertEquals(null, group.getFieldNames().get(1).getValue());
}
public interface JavaeeModelElement {
}
public interface JavaeeDomModelElement extends JavaeeModelElement, DomElement {
}
public interface FieldGroup extends JavaeeDomModelElement {
GenericDomValue<String> getGroupName();
List<GroupField> getFieldNames();
GroupField addFieldName();
}
public interface GroupField extends JavaeeDomModelElement {
@Convert(value = CmpFieldConverter.class)
CmpField getValue();
@Convert(value = CmpFieldConverter.class)
void setValue(CmpField value);
}
public static class CmpFieldConverter extends ResolvingConverter<CmpField> {
@Override
public CmpField fromString(String s, ConvertContext context) {
return ElementPresentationManager.findByName(getVariants(context), s);
}
@Override
public String toString(CmpField t, ConvertContext context) {
return t == null ? null : t.getName().getValue();
}
@Override
@NotNull
public Collection<CmpField> getVariants(ConvertContext context) {
final DomElement element = context.getInvocationElement();
return Arrays.asList(createCmpField(null, element), createCmpField("myField1", element), createCmpField("def", element));
}
private CmpField createCmpField(String name, DomElement element) {
final CmpField mockElement = element.getManager().createMockElement(CmpField.class, element.getModule(), false);
mockElement.getName().setValue(name);
return mockElement;
}
}
public interface CmpField extends JavaeeDomModelElement {
@NameValue
public GenericDomValue<String> getName();
}
public static class MyConverter extends Converter<String> {
public int fromStringCalls = 0;
@Override
public String fromString(@Nullable @NonNls String s, final ConvertContext context) {
fromStringCalls++;
return s;
}
@Override
public String toString(@Nullable String s, final ConvertContext context) {
return s;
}
}
public static class FooConverter extends Converter<String> {
@Override
public String fromString(@Nullable @NonNls final String s, final ConvertContext context) {
return s == null ? null : "foo";
}
@Override
public String toString(@Nullable final String s, final ConvertContext context) {
return s;
}
}
public static class BarConverter extends Converter<String> {
@Override
public String fromString(@Nullable @NonNls final String s, final ConvertContext context) {
return s == null ? null : "bar";
}
@Override
public String toString(@Nullable final String s, final ConvertContext context) {
return s;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.collector.git;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.fabric8.collector.BuildConfigProcessor;
import io.fabric8.collector.NamespaceName;
import io.fabric8.collector.elasticsearch.JsonNodes;
import io.fabric8.collector.elasticsearch.ResultsDTO;
import io.fabric8.collector.elasticsearch.SearchDTO;
import io.fabric8.collector.git.elasticsearch.CommitDTO;
import io.fabric8.collector.git.elasticsearch.GitElasticsearchClient;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigSpec;
import io.fabric8.openshift.api.model.BuildSource;
import io.fabric8.openshift.api.model.GitBuildSource;
import io.fabric8.utils.Files;
import io.fabric8.utils.Function;
import io.fabric8.utils.Strings;
import io.fabric8.utils.cxf.JsonHelper;
import io.fabric8.utils.cxf.WebClients;
import org.eclipse.jgit.api.CloneCommand;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.PullCommand;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.StoredConfig;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.storage.file.FileRepositoryBuilder;
import org.eclipse.jgit.transport.CredentialsProvider;
import org.gitective.core.CommitFinder;
import org.gitective.core.CommitUtils;
import org.gitective.core.filter.commit.CommitListFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.WebApplicationException;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Callable;
import static io.fabric8.collector.git.elasticsearch.Searches.createMinMaxGitCommitSearch;
/**
*/
public class GitBuildConfigProcessor implements BuildConfigProcessor {
private static final transient Logger LOG = LoggerFactory.getLogger(GitBuildConfigProcessor.class);
private final GitElasticsearchClient elasticsearchClient;
private final File cloneFolder;
private int commitLimit;
private String esIndex = "git";
private String esType = "commit";
private boolean initialised;
public GitBuildConfigProcessor(GitElasticsearchClient elasticsearchClient, File cloneFolder, int commitLimit) {
this.elasticsearchClient = elasticsearchClient;
this.cloneFolder = cloneFolder;
this.commitLimit = commitLimit;
}
public static void cloneRepo(File projectFolder, String cloneUrl, CredentialsProvider credentialsProvider, final File sshPrivateKey, final File sshPublicKey, String remote) {
// clone the repo!
boolean cloneAll = false;
LOG.info("Cloning git repo " + cloneUrl + " into directory " + projectFolder.getAbsolutePath());
CloneCommand command = Git.cloneRepository();
GitHelpers.configureCommand(command, credentialsProvider, sshPrivateKey, sshPublicKey);
command = command.setCredentialsProvider(credentialsProvider).
setCloneAllBranches(cloneAll).setURI(cloneUrl).setDirectory(projectFolder).setRemote(remote);
try {
command.call();
} catch (Throwable e) {
LOG.error("Failed to command remote repo " + cloneUrl + " due: " + e.getMessage(), e);
throw new RuntimeException("Failed to command remote repo " + cloneUrl + " due: " + e.getMessage());
}
}
public static GitBuildSource gitBuildSource(BuildConfig buildConfig) {
GitBuildSource git = null;
BuildConfigSpec spec = buildConfig.getSpec();
if (spec != null) {
BuildSource source = spec.getSource();
if (source != null) {
git = source.getGit();
}
}
return git;
}
/**
* A helper method to handle REST APIs which throw a 404 by just returning null
*/
public static <T> T handle404ByReturningNull(Callable<T> callable) {
try {
return callable.call();
} catch (WebApplicationException e) {
if (e.getResponse().getStatus() == 404) {
return null;
} else {
throw e;
}
} catch (Exception e) {
throw new WebApplicationException(e);
}
}
@Override
public void process(NamespaceName name, BuildConfig buildConfig) throws Exception {
GitBuildSource git = gitBuildSource(buildConfig);
if (git != null) {
String uri = git.getUri();
if (Strings.isNotBlank(uri)) {
processGitRepo(name, buildConfig, git, uri);
}
}
}
protected void checkInitialised() throws JsonProcessingException {
if (!initialised) {
configureMappings();
initialised = true;
}
}
protected void configureMappings() throws JsonProcessingException {
ObjectNode results = elasticsearchClient.createIndexIfMissing(esIndex, esType, new Function<ObjectNode, Boolean>() {
@Override
public Boolean apply(ObjectNode index) {
return true;
}
});
if (LOG.isDebugEnabled()) {
LOG.debug("Updated index results: " + JsonHelper.toJson(results));
}
// now lets update mappings
results = elasticsearchClient.createIndexMappingIfMissing(esIndex, esType, new Function<ObjectNode, Boolean>() {
@Override
public Boolean apply(ObjectNode properties) {
String[] notAnalysed = {"app", "namespace", "branch", "name", "sha", "repo_url"};
for (String propertyName : notAnalysed) {
ObjectNode property = JsonNodes.setObjects(properties, propertyName);
JsonNodes.set(property, "index", "not_analyzed");
if (!property.has("type")) {
JsonNodes.set(property, "type", "string");
}
}
String[] timeProperties = {"commit_time"};
for (String propertyName : timeProperties) {
ObjectNode property = JsonNodes.setObjects(properties, propertyName);
JsonNodes.set(property, "type", "date");
JsonNodes.set(property, "format", "strict_date_optional_time||epoch_millis");
}
return true;
}
});
if (LOG.isDebugEnabled()) {
LOG.debug("Updated mapping results: " + JsonHelper.toJson(results));
}
}
/**
* This method is public for easier unit testing
*/
public int processGitRepo(NamespaceName name, String gitUrl, String gitRef) throws IOException {
BuildConfig buildConfig = new BuildConfig();
BuildConfigSpec buildConfigSpec = new BuildConfigSpec();
buildConfig.setSpec(buildConfigSpec);
BuildSource buildSource = new BuildSource();
buildSource.setType("Git");
GitBuildSource gitSource = new GitBuildSource();
gitSource.setUri(gitUrl);
if (Strings.isNullOrBlank(gitRef)) {
gitRef = "master";
}
gitSource.setRef(gitRef);
buildSource.setGit(gitSource);
buildConfigSpec.setSource(buildSource);
return processGitRepo(name, buildConfig, gitSource, gitUrl);
}
protected int processGitRepo(NamespaceName name, BuildConfig buildConfig, GitBuildSource git, String uri) throws IOException {
// we may need to modify the schema now!
checkInitialised();
File namespaceFolder = new File(cloneFolder, name.getNamespace());
File nameFolder = new File(namespaceFolder, name.getName());
nameFolder.mkdirs();
UserDetails userDetails = new UserDetails();
String branch = git.getRef();
if (Strings.isNullOrBlank(branch)) {
branch = "master";
}
File gitFolder = cloneOrPullRepository(userDetails, nameFolder, uri, branch);
return processHistory(name, gitFolder, buildConfig, uri, branch);
}
/**
* Lets process the commit history going back in time until we have persisted all the commits into Elasticsearch
*/
protected int processHistory(NamespaceName name, File gitFolder, BuildConfig buildConfig, String uri, String branch) throws IOException {
Git git = GitHelpers.gitFromGitFolder(gitFolder);
Repository r = git.getRepository();
try {
getHEAD(git);
} catch (Exception e) {
LOG.error("Cannot find HEAD of the git repository for " + name + ": " + e, e);
return 0;
}
CommitFinder finder = new CommitFinder(r);
CommitListFilter filter = new CommitListFilter();
finder.setFilter(filter);
finder.find();
List<RevCommit> commits = filter.getCommits();
commits = filterAndSortCommits(name, branch, commits);
int counter = 0;
for (RevCommit commit : commits) {
processCommit(name, git, commit, buildConfig, uri, branch);
if (commitLimit > 0) {
if (++counter >= commitLimit) {
break;
}
}
}
if (counter > 0) {
LOG.info(name + " Processed " + counter + " commit(s)");
}
return counter;
}
/**
* Lets filter and sort the commits to filter out any commits we have already processed
* using the newest and oldest commit sha in Elasticsearch.
* Any newer commits we process in reverse order, oldest first - so that we keep a continuous
* range of commits in Elasticsearch at all times - to avoid repeatedly posting data.
* <p/>
* When we catch up, there should be no need to post any more data; just a query now and again to see
* if any newer or older commits are available.
*/
protected List<RevCommit> filterAndSortCommits(NamespaceName name, String branch, List<RevCommit> commits) {
String namespace = name.getNamespace();
String app = name.getName();
if (commits.size() == 0) {
return commits;
}
String newestSha = findFirstId(createMinMaxGitCommitSearch(namespace, app, branch, false));
String oldsetSha = null;
if (newestSha != null) {
oldsetSha = findFirstId(createMinMaxGitCommitSearch(namespace, app, branch, true));
}
if (oldsetSha == null || newestSha == null) {
return commits;
}
if (LOG.isDebugEnabled()) {
LOG.debug("" + name + " found newest SHA: " + newestSha + " oldest SHA: " + oldsetSha);
}
List<RevCommit> newCommits = new ArrayList<>();
List<RevCommit> oldCommits = new ArrayList<>();
boolean foundNewest = false;
boolean foundOldset = false;
for (RevCommit commit : commits) {
String sha = commit.getName();
if (Objects.equals(sha, newestSha)) {
foundNewest = true;
} else if (Objects.equals(sha, oldsetSha)) {
foundOldset = true;
} else {
if (foundNewest) {
if (foundOldset) {
oldCommits.add(commit);
} else {
// lets ignore this old commit which is >= newest and <= oldest
}
} else {
newCommits.add(commit);
}
}
}
// lets reverse the order of any new commits so we processes the oldest first
// so we keep a continnuous block of commits between oldest <-> newest
Collections.reverse(newCommits);
newCommits.addAll(oldCommits);
if (LOG.isDebugEnabled()) {
LOG.debug("" + name + " found " + newCommits.size() + " commit(s)");
}
return newCommits;
}
protected String findFirstId(final SearchDTO search) {
return WebClients.handle404ByReturningNull(new Callable<String>() {
@Override
public String call() throws Exception {
ObjectNode results = elasticsearchClient.search(esIndex, esType, search);
JsonNode hitsArray = results.path("hits").path("hits");
JsonNode idNode = hitsArray.path(0).path("_id");
String latestSha = null;
if (idNode.isTextual()) {
latestSha = idNode.textValue();
}
if (LOG.isDebugEnabled()) {
LOG.debug("Searching for " + JsonHelper.toJson(search) + " => " + latestSha);
LOG.debug("Found hits " + hitsArray.size());
/*
LOG.debug("JSON: " + JsonHelper.toJson(results));
*/
}
return latestSha;
}
});
}
protected void processCommit(NamespaceName projectName, Git git, RevCommit commit, BuildConfig buildConfig, String uri, String branch) throws JsonProcessingException {
CommitDTO dto = new CommitDTO(git, projectName, commit, uri, branch);
String sha = dto.getSha();
if (LOG.isDebugEnabled()) {
LOG.debug(projectName + " processing commit: " + sha + " time: " + dto.getCommitTime() + " message: " + dto.getShortMessage());
}
ResultsDTO results = elasticsearchClient.storeCommit(esIndex, esType, sha, dto);
if (LOG.isDebugEnabled()) {
LOG.debug("Results: " + JsonHelper.toJson(results));
}
}
protected ObjectId getBranchObjectId(Git git, String branch) {
Ref branchRef = null;
try {
String branchRevName = "refs/heads/" + branch;
List<Ref> branches = git.branchList().call();
for (Ref ref : branches) {
String revName = ref.getName();
if (Objects.equals(branchRevName, revName)) {
branchRef = ref;
break;
}
}
} catch (GitAPIException e) {
LOG.warn("Failed to find branches " + e, e);
}
ObjectId branchObjectId = null;
if (branchRef != null) {
branchObjectId = branchRef.getObjectId();
}
return branchObjectId;
}
protected String getHEAD(Git git) {
RevCommit commit = CommitUtils.getHead(git.getRepository());
return commit.getName();
}
protected File cloneOrPullRepository(UserDetails userDetails, File projectFolder, String cloneUrl, String branch) {
File gitFolder = new File(projectFolder, ".git");
CredentialsProvider credentialsProvider = userDetails.createCredentialsProvider();
if (!Files.isDirectory(gitFolder) || !Files.isDirectory(projectFolder)) {
// lets clone the git repository!
cloneRepo(projectFolder, cloneUrl, credentialsProvider, userDetails.getSshPrivateKey(), userDetails.getSshPublicKey(), userDetails.getRemote());
} else {
doPull(gitFolder, credentialsProvider, branch, userDetails.createPersonIdent(), userDetails);
}
return gitFolder;
}
protected void doPull(File gitFolder, CredentialsProvider cp, String branch, PersonIdent personIdent, UserDetails userDetails) {
try {
FileRepositoryBuilder builder = new FileRepositoryBuilder();
Repository repository = builder.setGitDir(gitFolder)
.readEnvironment() // scan environment GIT_* variables
.findGitDir() // scan up the file system tree
.build();
Git git = new Git(repository);
File projectFolder = repository.getDirectory();
StoredConfig config = repository.getConfig();
String url = config.getString("remote", userDetails.getRemote(), "url");
if (Strings.isNullOrBlank(url)) {
LOG.warn("No remote repository url for " + branch + " defined for the git repository at " + projectFolder.getCanonicalPath() + " so cannot pull");
//return;
}
String mergeUrl = config.getString("branch", branch, "merge");
if (Strings.isNullOrBlank(mergeUrl)) {
LOG.warn("No merge spec for branch." + branch + ".merge in the git repository at " + projectFolder.getCanonicalPath() + " so not doing a pull");
//return;
}
LOG.debug("Performing a pull in git repository " + projectFolder.getCanonicalPath() + " on remote URL: " + url);
PullCommand pull = git.pull();
GitHelpers.configureCommand(pull, userDetails);
pull.setRebase(true).call();
} catch (Throwable e) {
LOG.error("Failed to pull from the remote git repo with credentials " + cp + " due: " + e.getMessage() + ". This exception is ignored.", e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.job.yarn;
import java.util.Map;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.samza.coordinator.JobModelManager;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
public class SamzaAppState {
/**
* Represents an invalid or unknown Samza container ID.
*/
private static final int UNUSED_CONTAINER_ID = -1;
/**
* Job Coordinator is started in the AM and follows the {@link org.apache.samza.job.yarn.SamzaAppMasterService}
* lifecycle. It helps querying JobModel related info in {@link org.apache.samza.webapp.ApplicationMasterRestServlet}
* and locality information when host-affinity is enabled in {@link org.apache.samza.job.yarn.SamzaTaskManager}
*/
public final JobModelManager jobCoordinator;
/* The following state variables are primarily used for reference in the AM web services */
/**
* Task Id of the AM
* Used for displaying in the AM UI. Usage found in {@link org.apache.samza.webapp.ApplicationMasterRestServlet}
* and scalate/WEB-INF/views/index.scaml
*/
public final int taskId;
/**
* Id of the AM container (as allocated by the RM)
* Used for displaying in the AM UI. Usage in {@link org.apache.samza.webapp.ApplicationMasterRestServlet}
* and scalate/WEB-INF/views/index.scaml
*/
public final ContainerId amContainerId;
/**
* Host name of the NM on which the AM is running
* Used for displaying in the AM UI. See scalate/WEB-INF/views/index.scaml
*/
public final String nodeHost;
/**
* NM port on which the AM is running
* Used for displaying in the AM UI. See scalate/WEB-INF/views/index.scaml
*/
public final int nodePort;
/**
* Http port of the NM on which the AM is running
* Used for displaying in the AM UI. See scalate/WEB-INF/views/index.scaml
*/
public final int nodeHttpPort;
/**
* Application Attempt Id as provided by Yarn
* Used for displaying in the AM UI. See scalate/WEB-INF/views/index.scaml
* and {@link org.apache.samza.webapp.ApplicationMasterRestServlet}
*/
public final ApplicationAttemptId appAttemptId;
/**
* JMX Server URL, if enabled
* Used for displaying in the AM UI. See scalate/WEB-INF/views/index.scaml
*/
public String jmxUrl = "";
/**
* JMX Server Tunneling URL, if enabled
* Used for displaying in the AM UI. See scalate/WEB-INF/views/index.scaml
*/
public String jmxTunnelingUrl = "";
/**
* Job Coordinator URL
* Usage in {@link org.apache.samza.job.yarn.SamzaAppMasterService} & ContainerUtil
*/
public URL coordinatorUrl = null;
/**
* URL of the {@link org.apache.samza.webapp.ApplicationMasterRestServlet}
*/
public URL rpcUrl = null;
/**
* URL of the {@link org.apache.samza.webapp.ApplicationMasterWebServlet}
*/
public URL trackingUrl = null;
/**
* The following state variables are required for the correct functioning of the TaskManager
* Some of them are shared between the AMRMCallbackThread and the ContainerAllocator thread, as mentioned below.
*/
/**
* Number of containers that have completed their execution and exited successfully
*/
public AtomicInteger completedContainers = new AtomicInteger(0);
/**
* Number of failed containers
* */
public AtomicInteger failedContainers = new AtomicInteger(0);
/**
* Number of containers released due to extra allocation returned by the RM
*/
public AtomicInteger releasedContainers = new AtomicInteger(0);
/**
* ContainerStatus of failed containers.
*/
public ConcurrentMap<String, ContainerStatus> failedContainersStatus = new ConcurrentHashMap<String, ContainerStatus>();
/**
* Number of containers configured for the job
*/
public int containerCount = 0;
/**
* Set of finished containers - TODO: Can be changed to a counter
*/
public Set<Integer> finishedContainers = new HashSet<Integer>();
/**
* Number of containers needed for the job to be declared healthy
* Modified by both the AMRMCallbackThread and the ContainerAllocator thread
*/
public AtomicInteger neededContainers = new AtomicInteger(0);
/**
* Map of the samzaContainerId to the {@link org.apache.samza.job.yarn.YarnContainer} on which it is running
* Modified by both the AMRMCallbackThread and the ContainerAllocator thread
*/
public ConcurrentMap<Integer, YarnContainer> runningContainers = new ConcurrentHashMap<Integer, YarnContainer>(0);
/**
* Final status of the application
* Modified by both the AMRMCallbackThread and the ContainerAllocator thread
*/
public FinalApplicationStatus status = FinalApplicationStatus.UNDEFINED;
/**
* State indicating whether the job is healthy or not
* Modified by both the AMRMCallbackThread and the ContainerAllocator thread
*/
public AtomicBoolean jobHealthy = new AtomicBoolean(true);
public AtomicInteger containerRequests = new AtomicInteger(0);
public AtomicInteger matchedContainerRequests = new AtomicInteger(0);
public SamzaAppState(JobModelManager jobModelManager,
int taskId,
ContainerId amContainerId,
String nodeHost,
int nodePort,
int nodeHttpPort) {
this.jobCoordinator = jobModelManager;
this.taskId = taskId;
this.amContainerId = amContainerId;
this.nodeHost = nodeHost;
this.nodePort = nodePort;
this.nodeHttpPort = nodeHttpPort;
this.appAttemptId = amContainerId.getApplicationAttemptId();
}
/**
* Returns the Samza container ID if the specified YARN container ID corresponds to a running container.
*
* @param yarnContainerId the YARN container ID.
* @return the Samza container ID if it is running,
* otherwise {@link SamzaAppState#UNUSED_CONTAINER_ID}.
*/
public int getRunningSamzaContainerId(ContainerId yarnContainerId) {
int containerId = UNUSED_CONTAINER_ID;
for(Map.Entry<Integer, YarnContainer> entry: runningContainers.entrySet()) {
if(entry.getValue().id().equals(yarnContainerId)) {
containerId = entry.getKey();
break;
}
}
return containerId;
}
/**
* @param samzaContainerId the Samza container ID to validate.
* @return {@code true} if the ID is valid, {@code false} otherwise
*/
public static boolean isValidContainerId(int samzaContainerId) {
return samzaContainerId != UNUSED_CONTAINER_ID;
}
}
| |
/*
* @(#)PoleFigureOutput.java created 27/10/1999 Berkeley
*
* Copyright (c) 1999 Luca Lutterotti All Rights Reserved.
*
* This software is the research result of Luca Lutterotti and it is
* provided as it is as confidential and proprietary information.
* You shall not disclose such Confidential Information and shall use
* it only in accordance with the terms of the license agreement you
* entered into with Luca Lutterotti.
*
* THE AUTHOR MAKES NO REPRESENTATIONS OR WARRANTIES ABOUT THE SUITABILITY OF THE
* SOFTWARE, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* PURPOSE, OR NON-INFRINGEMENT. THE AUTHOR SHALL NOT BE LIABLE FOR ANY DAMAGES
* SUFFERED BY LICENSEE AS A RESULT OF USING, MODIFYING OR DISTRIBUTING
* THIS SOFTWARE OR ITS DERIVATIVES.
*
*/
package it.unitn.ing.rista.diffr.rta;
import java.lang.*;
import java.io.*;
import java.util.Vector;
import it.unitn.ing.rista.util.*;
import it.unitn.ing.rista.diffr.*;
import it.unitn.ing.rista.awt.*;
/**
* The PoleFigureOutput is a class
*
*
* @version $Revision: 1.6 $, $Date: 2006/01/19 14:45:57 $
* @author Luca Lutterotti
* @since JDK1.1
*/
public class PoleFigureOutput {
String outputFormat = null;
Phase phase = null;
String filename = null;
BufferedWriter PFwriter = null;
String title1 = null;
String title2 = null;
double resolution = 5.0;
int alphamax = 73;
int old1387max = 1387;
public PoleFigureOutput(String outputFormat, String filename, Phase aphase) {
this.outputFormat = outputFormat;
phase = aphase;
this.filename = filename;
}
public PoleFigureOutput(String filename, Phase aphase) {
phase = aphase;
this.filename = filename;
}
public void openOutput() {
PFwriter = Misc.getWriter(filename);
title1 = new String(phase.toXRDcatString() + ": experimental pole figure, ");
title2 = new String(phase.toXRDcatString() + ": recalculated pole figure, ");
}
public void write(int h, int k, int l, double[] polf, boolean experimental) {
StringBuffer tmp = null;
if (experimental)
tmp = new StringBuffer(title1);
else
tmp = new StringBuffer(title2);
tmp = tmp.append(" ").append(Integer.toString(h)).
append(",").append(Integer.toString(k)).
append(",").append(Integer.toString(l));
int bufflength = tmp.length();
for (int i = 0; i < 79 - bufflength; i++)
tmp = tmp.append(" ");
String commentLine = new String(tmp.toString().substring(0, 79) + "#");
try {
PFwriter.write(commentLine);
for (int i = 0; i < 5; i++)
PFwriter.write(Constants.lineSeparator);
// new String(" 1.0000 1.0000 1.0000 90.0000 90.0000 90.0000 7 1");
PFwriter.write(Misc.getFirstPFline(phase));
PFwriter.write(Constants.lineSeparator);
String firstline = new String(" " + Misc.getIntStringFormatted(h, 3) +
Misc.getIntStringFormatted(k, 3) +
Misc.getIntStringFormatted(l, 3) +
" .0 90.0" + Misc.getDoubleStringFormatted(resolution, 3, 1) +
" .0360.0" + Misc.getDoubleStringFormatted(resolution, 3, 1) +
" 1 1");
PFwriter.write(firstline);
PFwriter.write(Constants.lineSeparator);
} catch (IOException io) {
}
int until18 = 0;
int skip73 = 0;
for (int ny = 0; ny < old1387max; ++ny) {
if (++skip73 != alphamax) {
if (polf[ny] < 0.0)
polf[ny] = 0.0;
int imh = (int) (polf[ny] * 100.000001);
try {
if (until18 == 0)
PFwriter.write(" ");
PFwriter.write(Misc.getIntStringFormatted(imh, 4));
if (++until18 >= 18) {
until18 = 0;
PFwriter.write(Constants.lineSeparator);
}
} catch (IOException io) {
}
} else
skip73 = 0;
}
if (until18 != 0)
try {
PFwriter.write(Constants.lineSeparator);
} catch (IOException io) {
}
try {
PFwriter.write(Constants.lineSeparator);
} catch (IOException io) {
}
}
public void closeOutput() {
try {
PFwriter.write(Constants.lineSeparator);
PFwriter.flush();
PFwriter.close();
} catch (IOException io) {
}
}
public void computeAndWrite(Reflection reflex, Texture textureModel) {
double[] polf;
double[][] alphabeta = new double[2][old1387max];
int old19 = 19;
int ij = 0;
for (int i = 0; i < old19; i++) {
for (int j = 0; j < alphamax; j++) {
alphabeta[0][ij] = i * resolution * Constants.DEGTOPI;
alphabeta[1][ij++] = j * resolution * Constants.DEGTOPI;
}
}
polf = textureModel.computeTextureFactor(phase, alphabeta, reflex);
write(reflex.getH(), reflex.getK(), reflex.getL(), polf, false);
}
public void computeAndWrite() {
openOutput();
Texture textureModel = phase.getActiveTexture();
int hklnumber = phase.gethklNumber();
int maxPFs = MaudPreferences.getInteger("textureOutput.maxPFsBeartexFormat", 100);
if (hklnumber > maxPFs) hklnumber = maxPFs;
ProgressFrame prF = null;
if (!Constants.textonly && Constants.showProgressFrame)
try {
prF = new ProgressFrame(hklnumber);
} catch (NullPointerException npe) {
System.out.println("Not able to create frame, MacOSX display sleep bug?");
}
printf("Saving pole figures... ", prF);
for (int i = 0; i < hklnumber; i++) {
Reflection reflex = phase.getReflex(i);
computeAndWrite(reflex, textureModel);
if (prF != null)
prF.increaseProgressBarValue();
printf("Saving pole figure: " + Integer.toString(reflex.getH()) + " " + Integer.toString(reflex.getK()) +
" " + Integer.toString(reflex.getL()), prF);
}
if (prF != null) {
prF.setVisible(false);
prF.dispose();
}
closeOutput();
}
public void computeAndWrite(Vector<Reflection> reflList) {
openOutput();
Texture textureModel = phase.getActiveTexture();
int hklnumber = reflList.size();
for (int i = 0; i < hklnumber; i++)
computeAndWrite(reflList.elementAt(i), textureModel);
closeOutput();
}
public void printf(String message, ProgressFrame prF) {
if (prF != null)
prF.setProgressText(message);
else
System.out.println(message);
}
}
| |
/*
* @(#)NetworkMap.java
*
* Created on 2008. 02. 19
*
* This software is the confidential and proprietary information of
* POSTECH DP&NM. ("Confidential Information"). You shall not
* disclose such Confidential Information and shall use it only in
* accordance with the terms of the license agreement you entered into
* with Eric Kang.
*
* Contact: Eric Kang at eliot@postech.edu
*/
package dpnm.netma.ngom.data;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.io.IOException;
import java.util.Hashtable;
import java.util.Map;
import java.util.Vector;
/**
* This class is for NetworkMap
*
* @author Eric Kang
* @since 2008/02/19
* @version $Revision: 1.1 $
*/
public class NetworkMap {
/*
* constant information
*/
/** device */
public static final String MAP = "map";
public static final String NAME = "name";
public static final String DESCR = "description";
public static final String WIDTH = "width";
public static final String HEIGHT = "height";
public static final String BACKGROUND = "background";
public static final String DEVICE = "device";
public static final String LINK = "link";
protected String name = "";
protected String descr = "";
protected int width = 0;
protected int height = 0;
protected String background = "";
protected Vector<NetworkMapDevice> devices = null;
protected Vector<NetworkMapLink> links = null;
public NetworkMap() {
devices = new Vector<NetworkMapDevice>();
links = new Vector<NetworkMapLink>();
}
/**
* Constructor
*
* @param name map name
* @param descr map description
* @param width map width
* @param height map height
*/
public NetworkMap(String name, String descr, int width, int height) {
this();
setName(name);
setDescr(descr);
setWidth(width);
setHeight(height);
}
public NetworkMap(Node mapNode) {
this();
NodeList nodes = mapNode.getChildNodes();
for(int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
String name = node.getNodeName();
Node firstChild = node.getFirstChild();
String value = (firstChild != null)? firstChild.getNodeValue().trim(): null;
if(name.intern() == NAME.intern()) {
setName(value);
}
else if(name.intern() == DESCR.intern()) {
setDescr(value);
}
else if(name.intern() == WIDTH.intern()) {
setWidth(Integer.parseInt(value));
}
else if(name.intern() == HEIGHT.intern()) {
setHeight(Integer.parseInt(value));
}
else if(name.intern() == BACKGROUND.intern()) {
setBackground(value == null ? "" : value);
}
else if(name.intern() == DEVICE.intern()) {
devices.add(new NetworkMapDevice(node));
}
else if(name.intern() == LINK.intern()) {
links.add(new NetworkMapLink(node));
}
}
}
/**
* add device to the map
*
* @param device device
*/
public void addDevice(NetworkMapDevice device) {
devices.add(device);
}
/**
* remove device from the map
*
* @param device device
*/
public void removeDevice(NetworkMapDevice device) {
devices.remove(devices);
}
/**
* get the number of devices of this map
*
* @return number of devices
*/
public int getDeviceCount() {
return devices.size();
}
/**
* add link to the map
*
* @param link link
*/
public void addLink(NetworkMapLink link) {
links.add(link);
}
/**
* remove the link from the map
*/
public void removeLink(NetworkMapLink link) {
links.remove(link);
}
/**
* get the number of links of this map
*
* @return number of links
*/
public int getLinkCount() {
return links.size();
}
/**
* get the map information
*
* @return map information
*/
public Hashtable getMapInfo() {
Hashtable table = new Hashtable();
table.put(NAME, name);
table.put(DESCR, descr);
table.put(WIDTH, new Integer(width));
table.put(HEIGHT, new Integer(height));
table.put(BACKGROUND, background);
// add device info
Vector<Hashtable> deviceData = new Vector<Hashtable>();
for (int i = 0; i < devices.size(); i++) {
NetworkMapDevice device = devices.get(i);
deviceData.add(device.getDeviceInfo());
}
table.put(DEVICE, deviceData);
// add link info
Vector<Hashtable> linkData = new Vector<Hashtable>();
for (int i = 0; i < links.size(); i++) {
NetworkMapLink link = links.get(i);
linkData.add(link.getLinkInfo());
}
table.put(LINK, linkData);
return table;
}
public void appendXml(Element root) {
Document doc = root.getOwnerDocument();
Element mapElem = doc.createElement(MAP);
root.appendChild(mapElem);
Element nameElem = doc.createElement(NAME);
nameElem.appendChild(doc.createTextNode(name));
mapElem.appendChild(nameElem);
Element descrElem = doc.createElement(DESCR);
descrElem.appendChild(doc.createTextNode(descr));
mapElem.appendChild(descrElem);
Element widthElem = doc.createElement(WIDTH);
widthElem.appendChild(doc.createTextNode(String.valueOf(width)));
mapElem.appendChild(widthElem);
Element heightElem = doc.createElement(HEIGHT);
heightElem.appendChild(doc.createTextNode(String.valueOf(height)));
mapElem.appendChild(heightElem);
Element backgroundElem = doc.createElement(BACKGROUND);
backgroundElem.appendChild(doc.createTextNode(background));
mapElem.appendChild(backgroundElem);
for (int i = 0; i < devices.size(); i++) {
NetworkMapDevice device = devices.elementAt(i);
device.appendXml(mapElem);
}
for (int i = 0; i < links.size(); i++) {
NetworkMapLink link = (NetworkMapLink) links.elementAt(i);
link.appendXml(mapElem);
}
}
/**
* get the description of map
*
* @return map description
*/
public String getDescr() {
return descr;
}
/**
* set the description of map
*
* @param descr description of map
*/
public void setDescr(String descr) {
this.descr = descr;
}
/**
* get the network map link
*
* @return network map link list
*/
public Vector<NetworkMapLink> getLinks() {
return links;
}
/**
* set the network map link
*
* @param links network map link
*/
public void setLinks(Vector<NetworkMapLink> links) {
this.links = links;
}
/**
* get the name of map
*
* @return map name
*/
public String getName() {
return name;
}
/**
* set the name of map
*
* @param name map name
*/
public void setName(String name) {
this.name = name;
}
/**
* get the devices of map
*
* @return devices
*/
public Vector<NetworkMapDevice> getDevices() {
return devices;
}
/**
* set the devices of map
*
* @param devices devices
*/
public void setDevices(Vector<NetworkMapDevice> devices) {
this.devices = devices;
}
/**
* get the height of map
*
* @return height of map
*/
public int getHeight() {
return height;
}
/**
* set the height of map
*
* @param height height of map
*/
public void setHeight(int height) {
this.height = height;
}
/**
* get the width of map
*
* @return width of map
*/
public int getWidth() {
return width;
}
/**
* set the width of map
*
* @param width width of map
*/
public void setWidth(int width) {
this.width = width;
}
/**
* @return the background
*/
public String getBackground() {
return background;
}
/**
* @param background the background to set
*/
public void setBackground(String background) {
this.background = background;
}
/**
* get the device by host name
*
* @return device
*/
public NetworkMapDevice getDeviceByHost(String host) {
for(int i = 0; i < devices.size(); i++) {
NetworkMapDevice device = devices.get(i);
if(device.getHost().equalsIgnoreCase(host)) {
return device;
}
}
return null;
}
/**
* get the link by interface description
*
* @return link
*/
public NetworkMapLink getLinkByIfDescr(String ifDescr) {
for(int i = 0; i < links.size(); i++) {
NetworkMapLink link = links.get(i);
if(link.getIfDescr().equalsIgnoreCase(ifDescr)) {
return link;
}
}
return null;
}
/**
* get the link by interface description, source device and destination
* device.
*
* @return link
*/
public NetworkMapLink getLink(String ifDescr, String src, String dst) {
for(int i = 0; i < links.size(); i++) {
NetworkMapLink link = links.get(i);
if(link.getIfDescr().equalsIgnoreCase(ifDescr) &&
link.getSrc().equalsIgnoreCase(src) &&
link.getDst().equalsIgnoreCase(dst)) {
return link;
}
}
return null;
}
public String toString() {
String buff = new String();
buff += "Map: " + name + ", ";
buff += "descr=" + descr + ", ";
buff += "("+width+ ", " + height + ")" + "\n";
buff += " - "+background + "\n";
if (devices != null) {
// dump routers
for(int i = 0; i < devices.size(); i++) {
NetworkMapDevice router = devices.get(i);
buff += " Router: " + router + "\n";
}
}
if (links != null) {
// dump links
for(int i = 0; i < links.size(); i++) {
NetworkMapLink link = links.get(i);
buff += " Link: " + link + "\n";
}
}
return buff;
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver10;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFActionSetVlanPcpVer10 implements OFActionSetVlanPcp {
private static final Logger logger = LoggerFactory.getLogger(OFActionSetVlanPcpVer10.class);
// version: 1.0
final static byte WIRE_VERSION = 1;
final static int LENGTH = 8;
private final static VlanPcp DEFAULT_VLAN_PCP = VlanPcp.NONE;
// OF message fields
private final VlanPcp vlanPcp;
//
// Immutable default instance
final static OFActionSetVlanPcpVer10 DEFAULT = new OFActionSetVlanPcpVer10(
DEFAULT_VLAN_PCP
);
// package private constructor - used by readers, builders, and factory
OFActionSetVlanPcpVer10(VlanPcp vlanPcp) {
if(vlanPcp == null) {
throw new NullPointerException("OFActionSetVlanPcpVer10: property vlanPcp cannot be null");
}
this.vlanPcp = vlanPcp;
}
// Accessors for OF message fields
@Override
public OFActionType getType() {
return OFActionType.SET_VLAN_PCP;
}
@Override
public VlanPcp getVlanPcp() {
return vlanPcp;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
public OFActionSetVlanPcp.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFActionSetVlanPcp.Builder {
final OFActionSetVlanPcpVer10 parentMessage;
// OF message fields
private boolean vlanPcpSet;
private VlanPcp vlanPcp;
BuilderWithParent(OFActionSetVlanPcpVer10 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFActionType getType() {
return OFActionType.SET_VLAN_PCP;
}
@Override
public VlanPcp getVlanPcp() {
return vlanPcp;
}
@Override
public OFActionSetVlanPcp.Builder setVlanPcp(VlanPcp vlanPcp) {
this.vlanPcp = vlanPcp;
this.vlanPcpSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
@Override
public OFActionSetVlanPcp build() {
VlanPcp vlanPcp = this.vlanPcpSet ? this.vlanPcp : parentMessage.vlanPcp;
if(vlanPcp == null)
throw new NullPointerException("Property vlanPcp must not be null");
//
return new OFActionSetVlanPcpVer10(
vlanPcp
);
}
}
static class Builder implements OFActionSetVlanPcp.Builder {
// OF message fields
private boolean vlanPcpSet;
private VlanPcp vlanPcp;
@Override
public OFActionType getType() {
return OFActionType.SET_VLAN_PCP;
}
@Override
public VlanPcp getVlanPcp() {
return vlanPcp;
}
@Override
public OFActionSetVlanPcp.Builder setVlanPcp(VlanPcp vlanPcp) {
this.vlanPcp = vlanPcp;
this.vlanPcpSet = true;
return this;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_10;
}
//
@Override
public OFActionSetVlanPcp build() {
VlanPcp vlanPcp = this.vlanPcpSet ? this.vlanPcp : DEFAULT_VLAN_PCP;
if(vlanPcp == null)
throw new NullPointerException("Property vlanPcp must not be null");
return new OFActionSetVlanPcpVer10(
vlanPcp
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFActionSetVlanPcp> {
@Override
public OFActionSetVlanPcp readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property type == 2
short type = bb.readShort();
if(type != (short) 0x2)
throw new OFParseError("Wrong type: Expected=OFActionType.SET_VLAN_PCP(2), got="+type);
int length = U16.f(bb.readShort());
if(length != 8)
throw new OFParseError("Wrong length: Expected=8(8), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
VlanPcp vlanPcp = VlanPcp.readByte(bb);
// pad: 3 bytes
bb.skipBytes(3);
OFActionSetVlanPcpVer10 actionSetVlanPcpVer10 = new OFActionSetVlanPcpVer10(
vlanPcp
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", actionSetVlanPcpVer10);
return actionSetVlanPcpVer10;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFActionSetVlanPcpVer10Funnel FUNNEL = new OFActionSetVlanPcpVer10Funnel();
static class OFActionSetVlanPcpVer10Funnel implements Funnel<OFActionSetVlanPcpVer10> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFActionSetVlanPcpVer10 message, PrimitiveSink sink) {
// fixed value property type = 2
sink.putShort((short) 0x2);
// fixed value property length = 8
sink.putShort((short) 0x8);
message.vlanPcp.putTo(sink);
// skip pad (3 bytes)
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFActionSetVlanPcpVer10> {
@Override
public void write(ChannelBuffer bb, OFActionSetVlanPcpVer10 message) {
// fixed value property type = 2
bb.writeShort((short) 0x2);
// fixed value property length = 8
bb.writeShort((short) 0x8);
message.vlanPcp.writeByte(bb);
// pad: 3 bytes
bb.writeZero(3);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFActionSetVlanPcpVer10(");
b.append("vlanPcp=").append(vlanPcp);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFActionSetVlanPcpVer10 other = (OFActionSetVlanPcpVer10) obj;
if (vlanPcp == null) {
if (other.vlanPcp != null)
return false;
} else if (!vlanPcp.equals(other.vlanPcp))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((vlanPcp == null) ? 0 : vlanPcp.hashCode());
return result;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android.packageable;
import com.facebook.buck.android.apkmodule.APKModule;
import com.facebook.buck.android.apkmodule.APKModuleGraph;
import com.facebook.buck.android.packageable.AndroidPackageableCollection.ResourceDetails;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.targetgraph.TargetGraph;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable;
import com.facebook.buck.jvm.core.HasJavaClassHashes;
import com.facebook.buck.rules.coercer.BuildConfigFields;
import com.facebook.buck.util.MoreSuppliers;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.ImmutableSet;
import com.google.common.hash.HashCode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class AndroidPackageableCollector {
private final AndroidPackageableCollection.Builder collectionBuilder =
AndroidPackageableCollection.builder();
private final ResourceDetails.Builder resourceDetailsBuilder = ResourceDetails.builder();
private final ImmutableList.Builder<BuildTarget> resourcesWithNonEmptyResDir =
ImmutableList.builder();
private final ImmutableList.Builder<BuildTarget> resourcesWithAssets = ImmutableList.builder();
private final ImmutableList.Builder<SourcePath> resourceDirectories = ImmutableList.builder();
// Map is used instead of ImmutableMap.Builder for its containsKey() method.
private final Map<String, BuildConfigFields> buildConfigs = new HashMap<>();
private final ImmutableSet.Builder<HasJavaClassHashes> javaClassHashesProviders =
ImmutableSet.builder();
private final BuildTarget collectionRoot;
private final ImmutableSet<BuildTarget> buildTargetsToExcludeFromDex;
private final ImmutableSet<BuildTarget> resourcesToExclude;
private final APKModuleGraph apkModuleGraph;
@VisibleForTesting
public AndroidPackageableCollector(BuildTarget collectionRoot) {
this(
collectionRoot,
ImmutableSet.of(),
ImmutableSet.of(),
new APKModuleGraph(Optional.empty(), Optional.empty(), TargetGraph.EMPTY, collectionRoot));
}
/**
* @param resourcesToExclude Only relevant to {@link AndroidInstrumentationApk} which needs to
* remove resources that are already included in the {@link
* AndroidInstrumentationApkDescription.AndroidInstrumentationApkDescriptionArg#apk}
*/
public AndroidPackageableCollector(
BuildTarget collectionRoot,
ImmutableSet<BuildTarget> buildTargetsToExcludeFromDex,
ImmutableSet<BuildTarget> resourcesToExclude,
APKModuleGraph apkModuleGraph) {
this.collectionRoot = collectionRoot;
this.buildTargetsToExcludeFromDex = buildTargetsToExcludeFromDex;
this.resourcesToExclude = resourcesToExclude;
this.apkModuleGraph = apkModuleGraph;
}
/** Add packageables */
public void addPackageables(
Iterable<AndroidPackageable> packageables, BuildRuleResolver ruleResolver) {
Set<AndroidPackageable> explored = new HashSet<>();
for (AndroidPackageable packageable : packageables) {
postOrderTraverse(packageable, explored, ruleResolver);
}
}
private void postOrderTraverse(
AndroidPackageable packageable,
Set<AndroidPackageable> explored,
BuildRuleResolver ruleResolver) {
if (explored.contains(packageable)) {
return;
}
explored.add(packageable);
for (AndroidPackageable dep : packageable.getRequiredPackageables(ruleResolver)) {
postOrderTraverse(dep, explored, ruleResolver);
}
packageable.addToCollector(this);
}
/**
* Returns all {@link BuildRule}s of the given rules that are {@link AndroidPackageable}. Helper
* for implementations of AndroidPackageable that just want to return all of their packagable
* dependencies.
*/
public static Iterable<AndroidPackageable> getPackageableRules(Iterable<BuildRule> rules) {
return FluentIterable.from(rules).filter(AndroidPackageable.class).toList();
}
public AndroidPackageableCollector addStringWhitelistedResourceDirectory(
BuildTarget owner, SourcePath resourceDir) {
if (resourcesToExclude.contains(owner)) {
return this;
}
resourceDetailsBuilder.addWhitelistedStringDirectories(resourceDir);
doAddResourceDirectory(owner, resourceDir);
return this;
}
public AndroidPackageableCollector addResourceDirectory(
BuildTarget owner, SourcePath resourceDir) {
if (resourcesToExclude.contains(owner)) {
return this;
}
doAddResourceDirectory(owner, resourceDir);
return this;
}
private void doAddResourceDirectory(BuildTarget owner, SourcePath resourceDir) {
resourcesWithNonEmptyResDir.add(owner);
resourceDirectories.add(resourceDir);
}
public AndroidPackageableCollector addNativeLibsDirectory(
BuildTarget owner, SourcePath nativeLibDir) {
APKModule module = apkModuleGraph.findModuleForTarget(owner);
if (module.isRootModule()) {
collectionBuilder.putNativeLibsDirectories(module, nativeLibDir);
} else {
collectionBuilder.putNativeLibAssetsDirectories(module, nativeLibDir);
}
return this;
}
public AndroidPackageableCollector addNativeLinkable(NativeLinkable nativeLinkable) {
APKModule module = apkModuleGraph.findModuleForTarget(nativeLinkable.getBuildTarget());
if (module.isRootModule()) {
collectionBuilder.putNativeLinkables(module, nativeLinkable);
} else {
collectionBuilder.putNativeLinkablesAssets(module, nativeLinkable);
}
return this;
}
public AndroidPackageableCollector addNativeLinkableAsset(NativeLinkable nativeLinkable) {
APKModule module = apkModuleGraph.findModuleForTarget(nativeLinkable.getBuildTarget());
collectionBuilder.putNativeLinkablesAssets(module, nativeLinkable);
return this;
}
public AndroidPackageableCollector addNativeLibAssetsDirectory(
BuildTarget owner, SourcePath assetsDir) {
// We need to build the native target in order to have the assets available still.
APKModule module = apkModuleGraph.findModuleForTarget(owner);
collectionBuilder.putNativeLibAssetsDirectories(module, assetsDir);
return this;
}
public AndroidPackageableCollector addAssetsDirectory(
BuildTarget owner, SourcePath assetsDirectory) {
if (resourcesToExclude.contains(owner)) {
return this;
}
resourcesWithAssets.add(owner);
collectionBuilder.addAssetsDirectories(assetsDirectory);
return this;
}
public AndroidPackageableCollector addProguardConfig(
BuildTarget owner, SourcePath proguardConfig) {
if (!buildTargetsToExcludeFromDex.contains(owner)) {
collectionBuilder.addProguardConfigs(proguardConfig);
}
return this;
}
public AndroidPackageableCollector addClasspathEntry(
HasJavaClassHashes hasJavaClassHashes, SourcePath classpathEntry) {
BuildTarget target = hasJavaClassHashes.getBuildTarget();
if (buildTargetsToExcludeFromDex.contains(target)) {
collectionBuilder.addNoDxClasspathEntries(classpathEntry);
} else {
collectionBuilder.addClasspathEntriesToDex(classpathEntry);
APKModule module = apkModuleGraph.findModuleForTarget(target);
collectionBuilder.putModuleMappedClasspathEntriesToDex(module, classpathEntry);
javaClassHashesProviders.add(hasJavaClassHashes);
}
return this;
}
public AndroidPackageableCollector addManifestPiece(SourcePath manifest) {
collectionBuilder.addAndroidManifestPieces(manifest);
return this;
}
public AndroidPackageableCollector addPathToThirdPartyJar(
BuildTarget owner, SourcePath pathToThirdPartyJar) {
if (buildTargetsToExcludeFromDex.contains(owner)) {
collectionBuilder.addNoDxClasspathEntries(pathToThirdPartyJar);
} else {
collectionBuilder.addPathsToThirdPartyJars(pathToThirdPartyJar);
}
return this;
}
public void addBuildConfig(String javaPackage, BuildConfigFields constants) {
if (buildConfigs.containsKey(javaPackage)) {
throw new HumanReadableException(
"Multiple android_build_config() rules with the same package %s in the "
+ "transitive deps of %s.",
javaPackage, collectionRoot);
}
buildConfigs.put(javaPackage, constants);
}
public AndroidPackageableCollection build() {
collectionBuilder.setBuildConfigs(ImmutableMap.copyOf(buildConfigs));
ImmutableSet<HasJavaClassHashes> javaClassProviders = javaClassHashesProviders.build();
collectionBuilder.addAllJavaLibrariesToDex(
javaClassProviders
.stream()
.map(HasJavaClassHashes::getBuildTarget)
.collect(ImmutableSet.toImmutableSet()));
collectionBuilder.setClassNamesToHashesSupplier(
MoreSuppliers.memoize(
() -> {
Builder<String, HashCode> builder = ImmutableMap.builder();
for (HasJavaClassHashes hasJavaClassHashes : javaClassProviders) {
builder.putAll(hasJavaClassHashes.getClassNamesToHashes());
}
return builder.build();
})
::get);
ImmutableSet<BuildTarget> resources = ImmutableSet.copyOf(resourcesWithNonEmptyResDir.build());
for (BuildTarget buildTarget : resourcesWithAssets.build()) {
if (!resources.contains(buildTarget)) {
resourceDetailsBuilder.addResourcesWithEmptyResButNonEmptyAssetsDir(buildTarget);
}
}
// Reverse the resource directories/targets collections because we perform a post-order
// traversal of the action graph, and we need to return these collections topologically
// sorted.
resourceDetailsBuilder.setResourceDirectories(
resourceDirectories.build().reverse().stream().distinct().collect(Collectors.toList()));
resourceDetailsBuilder.setResourcesWithNonEmptyResDir(
resourcesWithNonEmptyResDir.build().reverse());
collectionBuilder.setResourceDetails(resourceDetailsBuilder.build());
return collectionBuilder.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.core.conf;
import java.io.File;
import java.lang.annotation.Annotation;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import org.apache.accumulo.core.client.security.tokens.PasswordToken;
import org.apache.accumulo.core.file.rfile.RFile;
import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
import org.apache.accumulo.core.util.format.DefaultFormatter;
import org.apache.accumulo.core.util.interpret.DefaultScanInterpreter;
import org.apache.accumulo.start.classloader.AccumuloClassLoader;
import org.apache.accumulo.start.classloader.vfs.AccumuloVFSClassLoader;
import org.apache.commons.configuration.MapConfiguration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.log4j.Logger;
public enum Property {
// Crypto-related properties
CRYPTO_PREFIX("crypto.", null, PropertyType.PREFIX, "Properties in this category related to the configuration of both default and custom crypto modules.",
true, false),
CRYPTO_MODULE_CLASS("crypto.module.class", "NullCryptoModule", PropertyType.STRING,
"Fully qualified class name of the class that implements the CryptoModule interface, to be used in setting up encryption at rest for the WAL and "
+ "(future) other parts of the code.", true, false),
CRYPTO_CIPHER_SUITE("crypto.cipher.suite", "NullCipher", PropertyType.STRING, "Describes the cipher suite to use for the write-ahead log", true, false),
CRYPTO_CIPHER_ALGORITHM_NAME("crypto.cipher.algorithm.name", "NullCipher", PropertyType.STRING,
"States the name of the algorithm used in the corresponding cipher suite. Do not make these different, unless you enjoy mysterious exceptions and bugs.",
true, false),
CRYPTO_CIPHER_KEY_LENGTH("crypto.cipher.key.length", "128", PropertyType.STRING,
"Specifies the key length *in bits* to use for the symmetric key, should probably be 128 or 256 unless you really know what you're doing", true, false),
CRYPTO_SECURE_RNG("crypto.secure.rng", "SHA1PRNG", PropertyType.STRING,
"States the secure random number generator to use, and defaults to the built-in Sun SHA1PRNG", true, false),
CRYPTO_SECURE_RNG_PROVIDER("crypto.secure.rng.provider", "SUN", PropertyType.STRING,
"States the secure random number generator provider to use, and defaults to the built-in SUN provider", true, false),
CRYPTO_SECRET_KEY_ENCRYPTION_STRATEGY_CLASS("crypto.secret.key.encryption.strategy.class", "NullSecretKeyEncryptionStrategy", PropertyType.STRING,
"The class Accumulo should use for its key encryption strategy.", true, false),
CRYPTO_DEFAULT_KEY_STRATEGY_HDFS_URI("crypto.default.key.strategy.hdfs.uri", "", PropertyType.STRING,
"The URL Accumulo should use to connect to DFS. If this is blank, Accumulo will obtain this information from the Hadoop configuration", true, false),
CRYPTO_DEFAULT_KEY_STRATEGY_KEY_LOCATION("crypto.default.key.strategy.key.location", "/accumulo/crypto/secret/keyEncryptionKey", PropertyType.ABSOLUTEPATH,
"The absolute path of where to store the key encryption key within HDFS.", true, false),
// instance properties (must be the same for every node in an instance)
INSTANCE_PREFIX("instance.", null, PropertyType.PREFIX,
"Properties in this category must be consistent throughout a cloud. This is enforced and servers won't be able to communicate if these differ."),
INSTANCE_ZK_HOST("instance.zookeeper.host", "localhost:2181", PropertyType.HOSTLIST, "Comma separated list of zookeeper servers"),
INSTANCE_ZK_TIMEOUT("instance.zookeeper.timeout", "30s", PropertyType.TIMEDURATION,
"Zookeeper session timeout; max value when represented as milliseconds should be no larger than " + Integer.MAX_VALUE),
INSTANCE_DFS_URI("instance.dfs.uri", "", PropertyType.URI,
"The url accumulo should use to connect to DFS. If this is empty, accumulo will obtain this information from the hadoop configuration."),
INSTANCE_DFS_DIR("instance.dfs.dir", "/accumulo", PropertyType.ABSOLUTEPATH,
"HDFS directory in which accumulo instance will run. Do not change after accumulo is initialized."),
INSTANCE_SECRET("instance.secret", "DEFAULT", PropertyType.STRING,
"A secret unique to a given instance that all servers must know in order to communicate with one another."
+ " Change it before initialization. To change it later use ./bin/accumulo accumulo.server.util.ChangeSecret [oldpasswd] [newpasswd], "
+ " and then update conf/accumulo-site.xml everywhere."),
INSTANCE_SECURITY_AUTHENTICATOR("instance.security.authenticator", "org.apache.accumulo.server.security.handler.ZKAuthenticator", PropertyType.CLASSNAME,
"The authenticator class that accumulo will use to determine if a user has privilege to perform an action"),
INSTANCE_SECURITY_AUTHORIZOR("instance.security.authorizor", "org.apache.accumulo.server.security.handler.ZKAuthorizor", PropertyType.CLASSNAME,
"The authorizor class that accumulo will use to determine what labels a user has privilege to see"),
INSTANCE_SECURITY_PERMISSION_HANDLER("instance.security.permissionHandler", "org.apache.accumulo.server.security.handler.ZKPermHandler",
PropertyType.CLASSNAME, "The permission handler class that accumulo will use to determine if a user has privilege to perform an action"),
// general properties
GENERAL_PREFIX("general.", null, PropertyType.PREFIX,
"Properties in this category affect the behavior of accumulo overall, but do not have to be consistent throughout a cloud."),
GENERAL_CLASSPATHS(AccumuloClassLoader.CLASSPATH_PROPERTY_NAME, AccumuloClassLoader.ACCUMULO_CLASSPATH_VALUE, PropertyType.STRING,
"A list of all of the places to look for a class. Order does matter, as it will look for the jar "
+ "starting in the first location to the last. Please note, hadoop conf and hadoop lib directories NEED to be here, "
+ "along with accumulo lib and zookeeper directory. Supports full regex on filename alone."), // needs special treatment in accumulo start jar
GENERAL_DYNAMIC_CLASSPATHS(AccumuloVFSClassLoader.DYNAMIC_CLASSPATH_PROPERTY_NAME, AccumuloVFSClassLoader.DEFAULT_DYNAMIC_CLASSPATH_VALUE,
PropertyType.STRING, "A list of all of the places where changes in jars or classes will force a reload of the classloader."),
GENERAL_RPC_TIMEOUT("general.rpc.timeout", "120s", PropertyType.TIMEDURATION, "Time to wait on I/O for simple, short RPC calls"),
GENERAL_KERBEROS_KEYTAB("general.kerberos.keytab", "", PropertyType.PATH, "Path to the kerberos keytab to use. Leave blank if not using kerberoized hdfs"),
GENERAL_KERBEROS_PRINCIPAL("general.kerberos.principal", "", PropertyType.STRING, "Name of the kerberos principal to use. _HOST will automatically be "
+ "replaced by the machines hostname in the hostname portion of the principal. Leave blank if not using kerberoized hdfs"),
GENERAL_MAX_MESSAGE_SIZE("tserver.server.message.size.max", "1G", PropertyType.MEMORY, "The maximum size of a message that can be sent to a tablet server."),
// properties that are specific to master server behavior
MASTER_PREFIX("master.", null, PropertyType.PREFIX, "Properties in this category affect the behavior of the master server"),
MASTER_CLIENTPORT("master.port.client", "9999", PropertyType.PORT, "The port used for handling client connections on the master"),
MASTER_TABLET_BALANCER("master.tablet.balancer", "org.apache.accumulo.server.master.balancer.TableLoadBalancer", PropertyType.CLASSNAME,
"The balancer class that accumulo will use to make tablet assignment and migration decisions."),
MASTER_RECOVERY_MAXAGE("master.recovery.max.age", "60m", PropertyType.TIMEDURATION, "Recovery files older than this age will be removed."),
MASTER_RECOVERY_MAXTIME("master.recovery.time.max", "30m", PropertyType.TIMEDURATION, "The maximum time to attempt recovery before giving up"),
MASTER_BULK_RETRIES("master.bulk.retries", "3", PropertyType.COUNT, "The number of attempts to bulk-load a file before giving up."),
MASTER_BULK_THREADPOOL_SIZE("master.bulk.threadpool.size", "5", PropertyType.COUNT, "The number of threads to use when coordinating a bulk-import."),
MASTER_BULK_TIMEOUT("master.bulk.timeout", "5m", PropertyType.TIMEDURATION, "The time to wait for a tablet server to process a bulk import request"),
MASTER_MINTHREADS("master.server.threads.minimum", "20", PropertyType.COUNT, "The minimum number of threads to use to handle incoming requests."),
MASTER_THREADCHECK("master.server.threadcheck.time", "1s", PropertyType.TIMEDURATION, "The time between adjustments of the server thread pool."),
MASTER_RECOVERY_DELAY("master.recovery.delay", "10s", PropertyType.TIMEDURATION,
"When a tablet server's lock is deleted, it takes time for it to completely quit. This delay gives it time before log recoveries begin."),
MASTER_LEASE_RECOVERY_WAITING_PERIOD("master.lease.recovery.interval", "5s", PropertyType.TIMEDURATION,
"The amount of time to wait after requesting a WAL file to be recovered"),
MASTER_WALOG_CLOSER_IMPLEMETATION("master.walog.closer.implementation", "org.apache.accumulo.server.master.recovery.HadoopLogCloser", PropertyType.CLASSNAME,
"A class that implements a mechansim to steal write access to a file"),
MASTER_FATE_THREADPOOL_SIZE("master.fate.threadpool.size", "4", PropertyType.COUNT,
"The number of threads used to run FAult-Tolerant Executions. These are primarily table operations like merge."),
// properties that are specific to tablet server behavior
TSERV_PREFIX("tserver.", null, PropertyType.PREFIX, "Properties in this category affect the behavior of the tablet servers"),
TSERV_CLIENT_TIMEOUT("tserver.client.timeout", "3s", PropertyType.TIMEDURATION, "Time to wait for clients to continue scans before closing a session."),
TSERV_DEFAULT_BLOCKSIZE("tserver.default.blocksize", "1M", PropertyType.MEMORY, "Specifies a default blocksize for the tserver caches"),
TSERV_DATACACHE_SIZE("tserver.cache.data.size", "128M", PropertyType.MEMORY, "Specifies the size of the cache for file data blocks."),
TSERV_INDEXCACHE_SIZE("tserver.cache.index.size", "512M", PropertyType.MEMORY, "Specifies the size of the cache for file indices."),
TSERV_PORTSEARCH("tserver.port.search", "false", PropertyType.BOOLEAN, "if the ports above are in use, search higher ports until one is available"),
TSERV_CLIENTPORT("tserver.port.client", "9997", PropertyType.PORT, "The port used for handling client connections on the tablet servers"),
TSERV_MUTATION_QUEUE_MAX("tserver.mutation.queue.max", "1M", PropertyType.MEMORY,
"The amount of memory to use to store write-ahead-log mutations-per-session before flushing them. Since the buffer is per write session, consider the"
+ " max number of concurrent writer when configuring. When using Hadoop 2, Accumulo will call hsync() on the WAL . For a small number of "
+ "concurrent writers, increasing this buffer size decreases the frequncy of hsync calls. For a large number of concurrent writers a small buffers "
+ "size is ok because of group commit."),
TSERV_TABLET_SPLIT_FINDMIDPOINT_MAXOPEN("tserver.tablet.split.midpoint.files.max", "30", PropertyType.COUNT,
"To find a tablets split points, all index files are opened. This setting determines how many index "
+ "files can be opened at once. When there are more index files than this setting multiple passes "
+ "must be made, which is slower. However opening too many files at once can cause problems."),
TSERV_WALOG_MAX_SIZE("tserver.walog.max.size", "1G", PropertyType.MEMORY,
"The maximum size for each write-ahead log. See comment for property tserver.memory.maps.max"),
TSERV_MAJC_DELAY("tserver.compaction.major.delay", "30s", PropertyType.TIMEDURATION,
"Time a tablet server will sleep between checking which tablets need compaction."),
TSERV_MAJC_THREAD_MAXOPEN("tserver.compaction.major.thread.files.open.max", "10", PropertyType.COUNT,
"Max number of files a major compaction thread can open at once. "),
TSERV_SCAN_MAX_OPENFILES("tserver.scan.files.open.max", "100", PropertyType.COUNT,
"Maximum total files that all tablets in a tablet server can open for scans. "),
TSERV_MAX_IDLE("tserver.files.open.idle", "1m", PropertyType.TIMEDURATION, "Tablet servers leave previously used files open for future queries. "
+ "This setting determines how much time an unused file should be kept open until it is closed."),
TSERV_NATIVEMAP_ENABLED("tserver.memory.maps.native.enabled", "true", PropertyType.BOOLEAN,
"An in-memory data store for accumulo implemented in c++ that increases the amount of data accumulo can hold in memory and avoids Java GC pauses."),
TSERV_MAXMEM("tserver.memory.maps.max", "1G", PropertyType.MEMORY,
"Maximum amount of memory that can be used to buffer data written to a tablet server. There are two other properties that can effectively limit memory"
+ " usage table.compaction.minor.logs.threshold and tserver.walog.max.size. Ensure that table.compaction.minor.logs.threshold *"
+ " tserver.walog.max.size >= this property."),
TSERV_MEM_MGMT("tserver.memory.manager", "org.apache.accumulo.server.tabletserver.LargestFirstMemoryManager", PropertyType.CLASSNAME,
"An implementation of MemoryManger that accumulo will use."),
TSERV_SESSION_MAXIDLE("tserver.session.idle.max", "1m", PropertyType.TIMEDURATION, "maximum idle time for a session"),
TSERV_READ_AHEAD_MAXCONCURRENT("tserver.readahead.concurrent.max", "16", PropertyType.COUNT,
"The maximum number of concurrent read ahead that will execute. This effectively"
+ " limits the number of long running scans that can run concurrently per tserver."),
TSERV_METADATA_READ_AHEAD_MAXCONCURRENT("tserver.metadata.readahead.concurrent.max", "8", PropertyType.COUNT,
"The maximum number of concurrent metadata read ahead that will execute."),
TSERV_MIGRATE_MAXCONCURRENT("tserver.migrations.concurrent.max", "1", PropertyType.COUNT,
"The maximum number of concurrent tablet migrations for a tablet server"),
TSERV_MAJC_MAXCONCURRENT("tserver.compaction.major.concurrent.max", "3", PropertyType.COUNT,
"The maximum number of concurrent major compactions for a tablet server"),
TSERV_MINC_MAXCONCURRENT("tserver.compaction.minor.concurrent.max", "4", PropertyType.COUNT,
"The maximum number of concurrent minor compactions for a tablet server"),
TSERV_BLOOM_LOAD_MAXCONCURRENT("tserver.bloom.load.concurrent.max", "4", PropertyType.COUNT,
"The number of concurrent threads that will load bloom filters in the background. "
+ "Setting this to zero will make bloom filters load in the foreground."),
TSERV_MONITOR_FS("tserver.monitor.fs", "true", PropertyType.BOOLEAN,
"When enabled the tserver will monitor file systems and kill itself when one switches from rw to ro. This is usually and indication that Linux has"
+ " detected a bad disk."),
TSERV_MEMDUMP_DIR("tserver.dir.memdump", "/tmp", PropertyType.PATH,
"A long running scan could possibly hold memory that has been minor compacted. To prevent this, the in memory map is dumped to a local file and the "
+ "scan is switched to that local file. We can not switch to the minor compacted file because it may have been modified by iterators. The file "
+ "dumped to the local dir is an exact copy of what was in memory."),
TSERV_LOCK_MEMORY("tserver.memory.lock", "false", PropertyType.BOOLEAN,
"The tablet server must communicate with zookeeper frequently to maintain its locks. If the tablet server's memory is swapped out"
+ " the java garbage collector can stop all processing for long periods. Change this property to true and the tablet server will "
+ " attempt to lock all of its memory to RAM, which may reduce delays during java garbage collection. You will have to modify the "
+ " system limit for \"max locked memory\". This feature is only available when running on Linux. Alternatively you may also "
+ " want to set /proc/sys/vm/swappiness to zero (again, this is Linux-specific)."),
TSERV_BULK_PROCESS_THREADS("tserver.bulk.process.threads", "1", PropertyType.COUNT,
"The master will task a tablet server with pre-processing a bulk file prior to assigning it to the appropriate tablet servers. This configuration"
+ " value controls the number of threads used to process the files."),
TSERV_BULK_ASSIGNMENT_THREADS("tserver.bulk.assign.threads", "1", PropertyType.COUNT,
"The master delegates bulk file processing and assignment to tablet servers. After the bulk file has been processed, the tablet server will assign"
+ " the file to the appropriate tablets on all servers. This property controls the number of threads used to communicate to the other servers."),
TSERV_BULK_RETRY("tserver.bulk.retry.max", "5", PropertyType.COUNT,
"The number of times the tablet server will attempt to assign a file to a tablet as it migrates and splits."),
TSERV_BULK_TIMEOUT("tserver.bulk.timeout", "5m", PropertyType.TIMEDURATION, "The time to wait for a tablet server to process a bulk import request."),
TSERV_MINTHREADS("tserver.server.threads.minimum", "20", PropertyType.COUNT, "The minimum number of threads to use to handle incoming requests."),
TSERV_THREADCHECK("tserver.server.threadcheck.time", "1s", PropertyType.TIMEDURATION, "The time between adjustments of the server thread pool."),
TSERV_HOLD_TIME_SUICIDE("tserver.hold.time.max", "5m", PropertyType.TIMEDURATION,
"The maximum time for a tablet server to be in the \"memory full\" state. If the tablet server cannot write out memory"
+ " in this much time, it will assume there is some failure local to its node, and quit. A value of zero is equivalent to forever."),
TSERV_WAL_BLOCKSIZE("tserver.wal.blocksize", "0", PropertyType.MEMORY,
"The size of the HDFS blocks used to write to the Write-Ahead log. If zero, it will be 110% of tserver.walog.max.size (that is, try to use just one"
+ " block)"),
TSERV_WAL_REPLICATION("tserver.wal.replication", "0", PropertyType.COUNT,
"The replication to use when writing the Write-Ahead log to HDFS. If zero, it will use the HDFS default replication setting."),
TSERV_RECOVERY_MAX_CONCURRENT("tserver.recovery.concurrent.max", "2", PropertyType.COUNT, "The maximum number of threads to use to sort logs during"
+ " recovery"),
TSERV_SORT_BUFFER_SIZE("tserver.sort.buffer.size", "200M", PropertyType.MEMORY, "The amount of memory to use when sorting logs during recovery."),
TSERV_ARCHIVE_WALOGS("tserver.archive.walogs", "false", PropertyType.BOOLEAN, "Keep copies of the WALOGs for debugging purposes"),
TSERV_WORKQ_THREADS("tserver.workq.threads", "2", PropertyType.COUNT,
"The number of threads for the distributed workq. These threads are used for copying failed bulk files."),
TSERV_WAL_SYNC("tserver.wal.sync", "true", PropertyType.BOOLEAN,
"Use the SYNC_BLOCK create flag to sync WAL writes to disk. Prevents problems recovering from sudden system resets."),
TSERV_WAL_SYNC_METHOD("tserver.wal.sync.method", "hsync", PropertyType.STRING, "The method to invoke when sync'ing WALs. HSync will provide "
+ "resiliency in the face of unexpected power outages, at the cost of speed. If method is not available, the legacy 'sync' method "
+ "will be used to ensure backwards compatibility with older Hadoop versions. A value of 'hflush' is the alternative to the default value "
+ "of 'hsync' which will result in faster writes, but with less durability"),
// properties that are specific to logger server behavior
LOGGER_PREFIX("logger.", null, PropertyType.PREFIX, "Properties in this category affect the behavior of the write-ahead logger servers"),
LOGGER_DIR("logger.dir.walog", "walogs", PropertyType.PATH,
"The property only needs to be set if upgrading from 1.4 which used to store write-ahead logs on the local filesystem. In 1.5 write-ahead logs are "
+ "stored in DFS. When 1.5 is started for the first time it will copy any 1.4 write ahead logs into DFS. It is possible to specify a "
+ "comma-separated list of directories."),
// accumulo garbage collector properties
GC_PREFIX("gc.", null, PropertyType.PREFIX, "Properties in this category affect the behavior of the accumulo garbage collector."),
GC_CYCLE_START("gc.cycle.start", "30s", PropertyType.TIMEDURATION, "Time to wait before attempting to garbage collect any old files."),
GC_CYCLE_DELAY("gc.cycle.delay", "5m", PropertyType.TIMEDURATION, "Time between garbage collection cycles. In each cycle, old files "
+ "no longer in use are removed from the filesystem."),
GC_PORT("gc.port.client", "50091", PropertyType.PORT, "The listening port for the garbage collector's monitor service"),
GC_DELETE_THREADS("gc.threads.delete", "16", PropertyType.COUNT, "The number of threads used to delete files"),
GC_TRASH_IGNORE("gc.trash.ignore", "false", PropertyType.BOOLEAN, "Do not use the Trash, even if it is configured"),
// properties that are specific to the monitor server behavior
MONITOR_PREFIX("monitor.", null, PropertyType.PREFIX, "Properties in this category affect the behavior of the monitor web server."),
MONITOR_PORT("monitor.port.client", "50095", PropertyType.PORT, "The listening port for the monitor's http service"),
MONITOR_LOG4J_PORT("monitor.port.log4j", "4560", PropertyType.PORT, "The listening port for the monitor's log4j logging collection."),
MONITOR_BANNER_TEXT("monitor.banner.text", "", PropertyType.STRING, "The banner text displayed on the monitor page."),
MONITOR_BANNER_COLOR("monitor.banner.color", "#c4c4c4", PropertyType.STRING, "The color of the banner text displayed on the monitor page."),
MONITOR_BANNER_BACKGROUND("monitor.banner.background", "#304065", PropertyType.STRING,
"The background color of the banner text displayed on the monitor page."),
MONITOR_SSL_KEYSTORE("monitor.ssl.keyStore", "", PropertyType.PATH, "The keystore for enabling monitor SSL.", true, false),
MONITOR_SSL_KEYSTOREPASS("monitor.ssl.keyStorePassword", "", PropertyType.STRING, "The keystore password for enabling monitor SSL.", true, false),
MONITOR_SSL_TRUSTSTORE("monitor.ssl.trustStore", "", PropertyType.PATH, "The truststore for enabling monitor SSL.", true, false),
MONITOR_SSL_TRUSTSTOREPASS("monitor.ssl.trustStorePassword", "", PropertyType.STRING, "The truststore password for enabling monitor SSL.", true, false),
MONITOR_SSL_INCLUDE_PROTOCOLS("monitor.ssl.include.protocols", "TLSv1,TLSv1.1,TLSv1.2", PropertyType.STRING, "A comma-separate list of allowed SSL protocols"),
MONITOR_LOCK_CHECK_INTERVAL("monitor.lock.check.interval", "5s", PropertyType.TIMEDURATION,
"The amount of time to sleep between checking for the Montior ZooKeeper lock"),
TRACE_PREFIX("trace.", null, PropertyType.PREFIX, "Properties in this category affect the behavior of distributed tracing."),
TRACE_PORT("trace.port.client", "12234", PropertyType.PORT, "The listening port for the trace server"),
TRACE_TABLE("trace.table", "trace", PropertyType.STRING, "The name of the table to store distributed traces"),
TRACE_USER("trace.user", "root", PropertyType.STRING, "The name of the user to store distributed traces"),
TRACE_PASSWORD("trace.password", "secret", PropertyType.STRING, "The password for the user used to store distributed traces"),
TRACE_TOKEN_PROPERTY_PREFIX("trace.token.property", null, PropertyType.PREFIX,
"The prefix used to create a token for storing distributed traces. For each propetry required by trace.token.type, place this prefix in front of it."),
TRACE_TOKEN_TYPE("trace.token.type", PasswordToken.class.getName(), PropertyType.CLASSNAME, "An AuthenticationToken type supported by the authorizer"),
// per table properties
TABLE_PREFIX("table.", null, PropertyType.PREFIX, "Properties in this category affect tablet server treatment of tablets, but can be configured "
+ "on a per-table basis. Setting these properties in the site file will override the default globally "
+ "for all tables and not any specific table. However, both the default and the global setting can be "
+ "overridden per table using the table operations API or in the shell, which sets the overridden value "
+ "in zookeeper. Restarting accumulo tablet servers after setting these properties in the site file "
+ "will cause the global setting to take effect. However, you must use the API or the shell to change "
+ "properties in zookeeper that are set on a table."),
TABLE_MAJC_RATIO("table.compaction.major.ratio", "3", PropertyType.FRACTION,
"minimum ratio of total input size to maximum input file size for running a major compaction. When adjusting this property you may want to also "
+ "adjust table.file.max. Want to avoid the situation where only merging minor compactions occur."),
TABLE_MAJC_COMPACTALL_IDLETIME("table.compaction.major.everything.idle", "1h", PropertyType.TIMEDURATION,
"After a tablet has been idle (no mutations) for this time period it may have all "
+ "of its files compacted into one. There is no guarantee an idle tablet will be compacted. "
+ "Compactions of idle tablets are only started when regular compactions are not running. Idle "
+ "compactions only take place for tablets that have one or more files."),
TABLE_SPLIT_THRESHOLD("table.split.threshold", "1G", PropertyType.MEMORY, "When combined size of files exceeds this amount a tablet is split."),
TABLE_MINC_LOGS_MAX("table.compaction.minor.logs.threshold", "3", PropertyType.COUNT,
"When there are more than this many write-ahead logs against a tablet, it will be minor compacted. See comment for property tserver.memory.maps.max"),
TABLE_MINC_COMPACT_IDLETIME("table.compaction.minor.idle", "5m", PropertyType.TIMEDURATION,
"After a tablet has been idle (no mutations) for this time period it may have its "
+ "in-memory map flushed to disk in a minor compaction. There is no guarantee an idle " + "tablet will be compacted."),
TABLE_SCAN_MAXMEM("table.scan.max.memory", "512K", PropertyType.MEMORY,
"The maximum amount of memory that will be used to cache results of a client query/scan. "
+ "Once this limit is reached, the buffered data is sent to the client."),
TABLE_FILE_TYPE("table.file.type", RFile.EXTENSION, PropertyType.STRING, "Change the type of file a table writes"),
TABLE_LOAD_BALANCER("table.balancer", "org.apache.accumulo.server.master.balancer.DefaultLoadBalancer", PropertyType.STRING,
"This property can be set to allow the LoadBalanceByTable load balancer to change the called Load Balancer for this table"),
TABLE_FILE_COMPRESSION_TYPE("table.file.compress.type", "gz", PropertyType.STRING, "One of gz,lzo,none"),
TABLE_FILE_COMPRESSED_BLOCK_SIZE("table.file.compress.blocksize", "100K", PropertyType.MEMORY,
"Similar to the hadoop io.seqfile.compress.blocksize setting, so that files have better query performance. The maximum value for this is "
+ Integer.MAX_VALUE + ". (This setting is the size threshold prior to compression, and applies even compression is disabled.)"),
TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX("table.file.compress.blocksize.index", "128K", PropertyType.MEMORY,
"Determines how large index blocks can be in files that support multilevel indexes. The maximum value for this is " + Integer.MAX_VALUE + "."
+ " (This setting is the size threshold prior to compression, and applies even compression is disabled.)"),
TABLE_FILE_BLOCK_SIZE("table.file.blocksize", "0B", PropertyType.MEMORY,
"Overrides the hadoop dfs.block.size setting so that files have better query performance. The maximum value for this is " + Integer.MAX_VALUE),
TABLE_FILE_REPLICATION("table.file.replication", "0", PropertyType.COUNT, "Determines how many replicas to keep of a tables' files in HDFS. "
+ "When this value is LTE 0, HDFS defaults are used."),
TABLE_FILE_MAX("table.file.max", "15", PropertyType.COUNT,
"Determines the max # of files each tablet in a table can have. When adjusting this property you may want to consider adjusting"
+ " table.compaction.major.ratio also. Setting this property to 0 will make it default to tserver.scan.files.open.max-1, this will prevent a"
+ " tablet from having more files than can be opened. Setting this property low may throttle ingest and increase query performance."),
TABLE_WALOG_ENABLED("table.walog.enabled", "true", PropertyType.BOOLEAN, "Use the write-ahead log to prevent the loss of data."),
TABLE_BLOOM_ENABLED("table.bloom.enabled", "false", PropertyType.BOOLEAN, "Use bloom filters on this table."),
TABLE_BLOOM_LOAD_THRESHOLD("table.bloom.load.threshold", "1", PropertyType.COUNT,
"This number of seeks that would actually use a bloom filter must occur before a file's bloom filter is loaded."
+ " Set this to zero to initiate loading of bloom filters when a file is opened."),
TABLE_BLOOM_SIZE("table.bloom.size", "1048576", PropertyType.COUNT, "Bloom filter size, as number of keys."),
TABLE_BLOOM_ERRORRATE("table.bloom.error.rate", "0.5%", PropertyType.FRACTION, "Bloom filter error rate."),
TABLE_BLOOM_KEY_FUNCTOR("table.bloom.key.functor", "org.apache.accumulo.core.file.keyfunctor.RowFunctor", PropertyType.CLASSNAME,
"A function that can transform the key prior to insertion and check of bloom filter. org.apache.accumulo.core.file.keyfunctor.RowFunctor,"
+ ",org.apache.accumulo.core.file.keyfunctor.ColumnFamilyFunctor, and org.apache.accumulo.core.file.keyfunctor.ColumnQualifierFunctor are"
+ " allowable values. One can extend any of the above mentioned classes to perform specialized parsing of the key. "),
TABLE_BLOOM_HASHTYPE("table.bloom.hash.type", "murmur", PropertyType.STRING, "The bloom filter hash type"),
TABLE_FAILURES_IGNORE("table.failures.ignore", "false", PropertyType.BOOLEAN,
"If you want queries for your table to hang or fail when data is missing from the system, "
+ "then set this to false. When this set to true missing data will be reported but queries "
+ "will still run possibly returning a subset of the data."),
TABLE_DEFAULT_SCANTIME_VISIBILITY("table.security.scan.visibility.default", "", PropertyType.STRING,
"The security label that will be assumed at scan time if an entry does not have a visibility set.<br />"
+ "Note: An empty security label is displayed as []. The scan results will show an empty visibility even if "
+ "the visibility from this setting is applied to the entry.<br />"
+ "CAUTION: If a particular key has an empty security label AND its table's default visibility is also empty, "
+ "access will ALWAYS be granted for users with permission to that table. Additionally, if this field is changed, "
+ "all existing data with an empty visibility label will be interpreted with the new label on the next scan."),
TABLE_LOCALITY_GROUPS("table.groups.enabled", "", PropertyType.STRING, "A comma separated list of locality group names to enable for this table."),
TABLE_CONSTRAINT_PREFIX("table.constraint.", null, PropertyType.PREFIX,
"Properties in this category are per-table properties that add constraints to a table. "
+ "These properties start with the category prefix, followed by a number, and their values "
+ "correspond to a fully qualified Java class that implements the Constraint interface.<br />"
+ "For example, table.constraint.1 = org.apache.accumulo.core.constraints.MyCustomConstraint "
+ "and table.constraint.2 = my.package.constraints.MySecondConstraint"),
TABLE_INDEXCACHE_ENABLED("table.cache.index.enable", "true", PropertyType.BOOLEAN, "Determines whether index cache is enabled."),
TABLE_BLOCKCACHE_ENABLED("table.cache.block.enable", "false", PropertyType.BOOLEAN, "Determines whether file block cache is enabled."),
TABLE_ITERATOR_PREFIX("table.iterator.", null, PropertyType.PREFIX,
"Properties in this category specify iterators that are applied at various stages (scopes) of interaction "
+ "with a table. These properties start with the category prefix, followed by a scope (minc, majc, scan, etc.), "
+ "followed by a period, followed by a name, as in table.iterator.scan.vers, or table.iterator.scan.custom. "
+ "The values for these properties are a number indicating the ordering in which it is applied, and a class name "
+ "such as table.iterator.scan.vers = 10,org.apache.accumulo.core.iterators.VersioningIterator<br /> "
+ "These iterators can take options if additional properties are set that look like this property, "
+ "but are suffixed with a period, followed by 'opt' followed by another period, and a property name.<br />"
+ "For example, table.iterator.minc.vers.opt.maxVersions = 3"),
TABLE_ITERATOR_SCAN_PREFIX(TABLE_ITERATOR_PREFIX.getKey() + IteratorScope.scan.name() + ".", null, PropertyType.PREFIX,
"Convenience prefix to find options for the scan iterator scope"),
TABLE_ITERATOR_MINC_PREFIX(TABLE_ITERATOR_PREFIX.getKey() + IteratorScope.minc.name() + ".", null, PropertyType.PREFIX,
"Convenience prefix to find options for the minc iterator scope"),
TABLE_ITERATOR_MAJC_PREFIX(TABLE_ITERATOR_PREFIX.getKey() + IteratorScope.majc.name() + ".", null, PropertyType.PREFIX,
"Convenience prefix to find options for the majc iterator scope"),
TABLE_LOCALITY_GROUP_PREFIX("table.group.", null, PropertyType.PREFIX,
"Properties in this category are per-table properties that define locality groups in a table. These properties start "
+ "with the category prefix, followed by a name, followed by a period, and followed by a property for that group.<br />"
+ "For example table.group.group1=x,y,z sets the column families for a group called group1. Once configured, "
+ "group1 can be enabled by adding it to the list of groups in the " + TABLE_LOCALITY_GROUPS.getKey() + " property.<br />"
+ "Additional group options may be specified for a named group by setting table.group.<name>.opt.<key>=<value>."),
TABLE_FORMATTER_CLASS("table.formatter", DefaultFormatter.class.getName(), PropertyType.STRING, "The Formatter class to apply on results in the shell"),
TABLE_INTERPRETER_CLASS("table.interepreter", DefaultScanInterpreter.class.getName(), PropertyType.STRING,
"The ScanInterpreter class to apply on scan arguments in the shell"),
TABLE_CLASSPATH("table.classpath.context", "", PropertyType.STRING, "Per table classpath context"),
// VFS ClassLoader properties
VFS_CLASSLOADER_SYSTEM_CLASSPATH_PROPERTY(AccumuloVFSClassLoader.VFS_CLASSLOADER_SYSTEM_CLASSPATH_PROPERTY, "", PropertyType.STRING,
"Configuration for a system level vfs classloader. Accumulo jar can be configured here and loaded out of HDFS."),
VFS_CONTEXT_CLASSPATH_PROPERTY(AccumuloVFSClassLoader.VFS_CONTEXT_CLASSPATH_PROPERTY, null, PropertyType.PREFIX,
"Properties in this category are define a classpath. These properties start with the category prefix, followed by a context name. "
+ "The value is a comma seperated list of URIs. Supports full regex on filename alone. For example, "
+ "general.vfs.context.classpath.cx1=hdfs://nn1:9902/mylibdir/*.jar. "
+ "You can enable post delegation for a context, which will load classes from the context first instead of the parent first. "
+ "Do this by setting general.vfs.context.classpath.<name>.delegation=post, where <name> is your context name. "
+ "If delegation is not specified, it defaults to loading from parent classloader first."),
VFS_CLASSLOADER_CACHE_DIR(AccumuloVFSClassLoader.VFS_CACHE_DIR, "${java.io.tmpdir}" + File.separator + "accumulo-vfs-cache-${user.name}",
PropertyType.ABSOLUTEPATH, "Directory to use for the vfs cache. The cache will keep a soft reference to all of the classes loaded in the VM."
+ " This should be on local disk on each node with sufficient space. It defaults to ${java.io.tmpdir}/accumulo-vfs-cache-${user.name}", false, true);
private String key, defaultValue, description;
private PropertyType type;
private boolean experimental;
private boolean interpolated;
static Logger log = Logger.getLogger(Property.class);
private Property(String name, String defaultValue, PropertyType type, String description, boolean experimental, boolean interpolated) {
this.key = name;
this.defaultValue = defaultValue;
this.description = description;
this.type = type;
this.experimental = experimental;
// Interpolated items need to be careful, as JVM properties could be updates and we may want that propogated when those changes occur.
// Currently only VFS_CLASSLOADER_CACHE_DIR, which isn't ZK mutable, is interpolated, so this shouldn't be an issue as java.io.tmpdir
// also shouldn't be changing.
this.interpolated = interpolated;
}
private Property(String name, String defaultValue, PropertyType type, String description) {
this(name, defaultValue, type, description, false, false);
}
@Override
public String toString() {
return this.key;
}
public String getKey() {
return this.key;
}
public String getRawDefaultValue() {
return this.defaultValue;
}
public String getDefaultValue() {
if (this.interpolated) {
PropertiesConfiguration pconf = new PropertiesConfiguration();
Properties systemProperties = System.getProperties();
synchronized (systemProperties) {
pconf.append(new MapConfiguration(systemProperties));
}
pconf.addProperty("hack_default_value", this.defaultValue);
String v = pconf.getString("hack_default_value");
if (this.type == PropertyType.ABSOLUTEPATH)
return new File(v).getAbsolutePath();
else
return v;
} else {
return getRawDefaultValue();
}
}
public PropertyType getType() {
return this.type;
}
public String getDescription() {
return this.description;
}
public boolean isExperimental() {
return experimental;
}
private static HashSet<String> validTableProperties = null;
private static HashSet<String> validProperties = null;
private static HashSet<String> validPrefixes = null;
private static boolean isKeyValidlyPrefixed(String key) {
for (String prefix : validPrefixes) {
if (key.startsWith(prefix))
return true;
}
return false;
}
public synchronized static boolean isValidPropertyKey(String key) {
if (validProperties == null) {
validProperties = new HashSet<String>();
validPrefixes = new HashSet<String>();
for (Property p : Property.values()) {
if (p.getType().equals(PropertyType.PREFIX)) {
validPrefixes.add(p.getKey());
} else {
validProperties.add(p.getKey());
}
}
}
return validProperties.contains(key) || isKeyValidlyPrefixed(key);
}
public synchronized static boolean isValidTablePropertyKey(String key) {
if (validTableProperties == null) {
validTableProperties = new HashSet<String>();
for (Property p : Property.values()) {
if (!p.getType().equals(PropertyType.PREFIX) && p.getKey().startsWith(Property.TABLE_PREFIX.getKey())) {
validTableProperties.add(p.getKey());
}
}
}
return validTableProperties.contains(key) || key.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey())
|| key.startsWith(Property.TABLE_ITERATOR_PREFIX.getKey()) || key.startsWith(Property.TABLE_LOCALITY_GROUP_PREFIX.getKey());
}
private static final EnumSet<Property> fixedProperties = EnumSet.of(Property.TSERV_CLIENTPORT, Property.TSERV_NATIVEMAP_ENABLED,
Property.TSERV_SCAN_MAX_OPENFILES, Property.MASTER_CLIENTPORT, Property.GC_PORT);
public static boolean isFixedZooPropertyKey(Property key) {
return fixedProperties.contains(key);
}
public static Set<Property> getFixedProperties() {
return fixedProperties;
}
public static boolean isValidZooPropertyKey(String key) {
// white list prefixes
return key.startsWith(Property.TABLE_PREFIX.getKey()) || key.startsWith(Property.TSERV_PREFIX.getKey()) || key.startsWith(Property.LOGGER_PREFIX.getKey())
|| key.startsWith(Property.MASTER_PREFIX.getKey()) || key.startsWith(Property.GC_PREFIX.getKey())
|| key.startsWith(Property.MONITOR_PREFIX.getKey() + "banner.") || key.startsWith(VFS_CONTEXT_CLASSPATH_PROPERTY.getKey());
}
public static Property getPropertyByKey(String key) {
for (Property prop : Property.values())
if (prop.getKey().equals(key))
return prop;
return null;
}
/**
* @return true if this is a property whose value is expected to be a java class
*/
public static boolean isClassProperty(String key) {
return (key.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey()) && key.substring(Property.TABLE_CONSTRAINT_PREFIX.getKey().length()).split("\\.").length == 1)
|| (key.startsWith(Property.TABLE_ITERATOR_PREFIX.getKey()) && key.substring(Property.TABLE_ITERATOR_PREFIX.getKey().length()).split("\\.").length == 2)
|| key.equals(Property.TABLE_LOAD_BALANCER.getKey());
}
public boolean isDeprecated() {
Logger log = Logger.getLogger(getClass());
try {
for (Annotation a : getClass().getField(name()).getAnnotations())
if (a instanceof Deprecated)
return true;
} catch (SecurityException e) {
log.error(e, e);
} catch (NoSuchFieldException e) {
log.error(e, e);
}
return false;
}
public static <T> T createInstanceFromPropertyName(AccumuloConfiguration conf, Property property, Class<T> base, T defaultInstance) {
String clazzName = conf.get(property);
T instance = null;
try {
Class<? extends T> clazz = AccumuloVFSClassLoader.loadClass(clazzName, base);
instance = clazz.newInstance();
log.info("Loaded class : " + clazzName);
} catch (Exception e) {
log.warn("Failed to load class ", e);
}
if (instance == null) {
log.info("Using " + defaultInstance.getClass().getName());
instance = defaultInstance;
}
return instance;
}
}
| |
// Copyright 2020 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bugreport;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.bugreport.BugReport.BlazeRuntimeInterface;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.events.EventKind;
import com.google.devtools.build.lib.server.FailureDetails;
import com.google.devtools.build.lib.server.FailureDetails.Crash.Code;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.util.CustomExitCodePublisher;
import com.google.devtools.build.lib.util.CustomFailureDetailPublisher;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.ExitCode;
import com.google.protobuf.ExtensionRegistry;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.ArgumentCaptor;
/** Tests for {@link BugReport}. */
@RunWith(Parameterized.class)
public final class BugReportTest {
private enum CrashType {
CRASH(ExitCode.BLAZE_INTERNAL_ERROR, Code.CRASH_UNKNOWN) {
@Override
Throwable createThrowable() {
return new IllegalStateException("Crashed");
}
},
OOM(ExitCode.OOM_ERROR, Code.CRASH_OOM) {
@Override
Throwable createThrowable() {
return new OutOfMemoryError("Java heap space");
}
};
private final ExitCode expectedExitCode;
private final Code expectedFailureDetailCode;
CrashType(ExitCode expectedExitCode, Code expectedFailureDetailCode) {
this.expectedExitCode = expectedExitCode;
this.expectedFailureDetailCode = expectedFailureDetailCode;
}
abstract Throwable createThrowable();
}
@Parameters
public static CrashType[] params() {
return CrashType.values();
}
@Rule public final TemporaryFolder tmp = new TemporaryFolder();
private final CrashType crashType;
private final BlazeRuntimeInterface mockRuntime = mock(BlazeRuntimeInterface.class);
private Path exitCodeFile;
private Path failureDetailFile;
public BugReportTest(CrashType crashType) {
this.crashType = crashType;
}
@Before
public void setup() throws Exception {
when(mockRuntime.getProductName()).thenReturn("myProductName");
BugReport.setRuntime(mockRuntime);
exitCodeFile = tmp.newFolder().toPath().resolve("exit_code_to_use_on_abrupt_exit");
failureDetailFile = tmp.newFolder().toPath().resolve("failure_detail");
CustomExitCodePublisher.setAbruptExitStatusFileDir(exitCodeFile.getParent().toString());
CustomFailureDetailPublisher.setFailureDetailFilePath(failureDetailFile.toString());
}
@After
public void resetPublishers() {
CustomExitCodePublisher.resetAbruptExitStatusFile();
CustomFailureDetailPublisher.resetFailureDetailFilePath();
}
@Test
public void convenienceMethod() throws Exception {
Throwable t = crashType.createThrowable();
FailureDetail expectedFailureDetail =
createExpectedFailureDetail(t, crashType.expectedFailureDetailCode);
assertThrows(SecurityException.class, () -> BugReport.handleCrash(t));
assertThrows(t.getClass(), BugReport::maybePropagateUnprocessedThrowableIfInTest);
verify(mockRuntime)
.cleanUpForCrash(DetailedExitCode.of(crashType.expectedExitCode, expectedFailureDetail));
verifyExitCodeWritten(crashType.expectedExitCode.getNumericExitCode());
verifyFailureDetailWritten(expectedFailureDetail);
}
@Test
public void halt() throws Exception {
Throwable t = crashType.createThrowable();
FailureDetail expectedFailureDetail =
createExpectedFailureDetail(t, crashType.expectedFailureDetailCode);
assertThrows(
SecurityException.class, () -> BugReport.handleCrash(Crash.from(t), CrashContext.halt()));
assertThrows(t.getClass(), BugReport::maybePropagateUnprocessedThrowableIfInTest);
verify(mockRuntime)
.cleanUpForCrash(DetailedExitCode.of(crashType.expectedExitCode, expectedFailureDetail));
verifyExitCodeWritten(crashType.expectedExitCode.getNumericExitCode());
verifyFailureDetailWritten(expectedFailureDetail);
}
@Test
public void keepAlive() throws Exception {
Throwable t = crashType.createThrowable();
FailureDetail expectedFailureDetail =
createExpectedFailureDetail(t, crashType.expectedFailureDetailCode);
BugReport.handleCrash(Crash.from(t), CrashContext.keepAlive());
assertThrows(t.getClass(), BugReport::maybePropagateUnprocessedThrowableIfInTest);
verify(mockRuntime)
.cleanUpForCrash(DetailedExitCode.of(crashType.expectedExitCode, expectedFailureDetail));
verifyNoExitCodeWritten();
verifyFailureDetailWritten(expectedFailureDetail);
}
@Test
public void customEventHandler() {
Throwable t = crashType.createThrowable();
EventHandler handler = mock(EventHandler.class);
ArgumentCaptor<Event> event = ArgumentCaptor.forClass(Event.class);
BugReport.handleCrash(Crash.from(t), CrashContext.keepAlive().reportingTo(handler));
assertThrows(t.getClass(), BugReport::maybePropagateUnprocessedThrowableIfInTest);
verify(handler).handle(event.capture());
assertThat(event.getValue().getKind()).isEqualTo(EventKind.FATAL);
assertThat(event.getValue().getMessage()).contains(Throwables.getStackTraceAsString(t));
}
private void verifyExitCodeWritten(int exitCode) throws Exception {
assertThat(Files.readAllLines(exitCodeFile)).containsExactly(String.valueOf(exitCode));
}
private void verifyNoExitCodeWritten() {
assertThat(exitCodeFile.toFile().exists()).isFalse();
}
private void verifyFailureDetailWritten(FailureDetail expected) throws Exception {
assertThat(
FailureDetail.parseFrom(
Files.readAllBytes(failureDetailFile), ExtensionRegistry.getEmptyRegistry()))
.isEqualTo(expected);
}
private static FailureDetail createExpectedFailureDetail(
Throwable t, Code expectedFailureDetailCode) {
return FailureDetail.newBuilder()
.setMessage(String.format("Crashed: (%s) %s", t.getClass().getName(), t.getMessage()))
.setCrash(
FailureDetails.Crash.newBuilder()
.setCode(expectedFailureDetailCode)
.addCauses(
FailureDetails.Throwable.newBuilder()
.setThrowableClass(t.getClass().getName())
.setMessage(t.getMessage())
.addAllStackTrace(
Lists.transform(
Arrays.asList(t.getStackTrace()), StackTraceElement::toString))))
.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.datastore;
import static com.google.datastore.v1.PropertyFilter.Operator.EQUAL;
import static com.google.datastore.v1.PropertyOrder.Direction.DESCENDING;
import static com.google.datastore.v1.client.DatastoreHelper.makeAndFilter;
import static com.google.datastore.v1.client.DatastoreHelper.makeDelete;
import static com.google.datastore.v1.client.DatastoreHelper.makeFilter;
import static com.google.datastore.v1.client.DatastoreHelper.makeKey;
import static com.google.datastore.v1.client.DatastoreHelper.makeOrder;
import static com.google.datastore.v1.client.DatastoreHelper.makeUpsert;
import static com.google.datastore.v1.client.DatastoreHelper.makeValue;
import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DATASTORE_BATCH_UPDATE_LIMIT;
import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.DEFAULT_BUNDLE_SIZE_BYTES;
import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.QUERY_BATCH_LIMIT;
import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.getEstimatedSizeBytes;
import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.makeRequest;
import static org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.isValidKey;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import com.google.datastore.v1.CommitRequest;
import com.google.datastore.v1.Entity;
import com.google.datastore.v1.EntityResult;
import com.google.datastore.v1.Key;
import com.google.datastore.v1.Mutation;
import com.google.datastore.v1.PartitionId;
import com.google.datastore.v1.Query;
import com.google.datastore.v1.QueryResultBatch;
import com.google.datastore.v1.RunQueryRequest;
import com.google.datastore.v1.RunQueryResponse;
import com.google.datastore.v1.client.Datastore;
import com.google.datastore.v1.client.QuerySplitter;
import com.google.protobuf.Int32Value;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DatastoreWriterFn;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntity;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntityFn;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteKey;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteKeyFn;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.ReadFn;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.SplitQueryFn;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.V1Options;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.UpsertFn;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.V1DatastoreFactory;
import org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Write;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.testing.RunnableOnService;
import org.apache.beam.sdk.transforms.DoFnTester;
import org.apache.beam.sdk.transforms.DoFnTester.CloningBehavior;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.apache.beam.sdk.transforms.display.DisplayDataEvaluator;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PBegin;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.POutput;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* Tests for {@link DatastoreV1}.
*/
@RunWith(JUnit4.class)
public class DatastoreV1Test {
private static final String PROJECT_ID = "testProject";
private static final String NAMESPACE = "testNamespace";
private static final String KIND = "testKind";
private static final Query QUERY;
private static final String LOCALHOST = "localhost:9955";
private static final V1Options V_1_OPTIONS;
static {
Query.Builder q = Query.newBuilder();
q.addKindBuilder().setName(KIND);
QUERY = q.build();
V_1_OPTIONS = V1Options.from(PROJECT_ID, QUERY, NAMESPACE, null);
}
private DatastoreV1.Read initialRead;
@Mock
Datastore mockDatastore;
@Mock
QuerySplitter mockQuerySplitter;
@Mock
V1DatastoreFactory mockDatastoreFactory;
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
initialRead = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE);
when(mockDatastoreFactory.getDatastore(any(PipelineOptions.class), any(String.class),
any(String.class)))
.thenReturn(mockDatastore);
when(mockDatastoreFactory.getQuerySplitter())
.thenReturn(mockQuerySplitter);
}
@Test
public void testBuildRead() throws Exception {
DatastoreV1.Read read = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE);
assertEquals(QUERY, read.getQuery());
assertEquals(PROJECT_ID, read.getProjectId());
assertEquals(NAMESPACE, read.getNamespace());
}
/**
* {@link #testBuildRead} but constructed in a different order.
*/
@Test
public void testBuildReadAlt() throws Exception {
DatastoreV1.Read read = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID).withNamespace(NAMESPACE).withQuery(QUERY)
.withLocalhost(LOCALHOST);
assertEquals(QUERY, read.getQuery());
assertEquals(PROJECT_ID, read.getProjectId());
assertEquals(NAMESPACE, read.getNamespace());
assertEquals(LOCALHOST, read.getLocalhost());
}
@Test
public void testReadValidationFailsProject() throws Exception {
DatastoreV1.Read read = DatastoreIO.v1().read().withQuery(QUERY);
thrown.expect(NullPointerException.class);
thrown.expectMessage("project");
read.validate(null);
}
@Test
public void testReadValidationFailsQuery() throws Exception {
DatastoreV1.Read read = DatastoreIO.v1().read().withProjectId(PROJECT_ID);
thrown.expect(NullPointerException.class);
thrown.expectMessage("query");
read.validate(null);
}
@Test
public void testReadValidationFailsQueryLimitZero() throws Exception {
Query invalidLimit = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(0)).build();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Invalid query limit 0: must be positive");
DatastoreIO.v1().read().withQuery(invalidLimit);
}
@Test
public void testReadValidationFailsQueryLimitNegative() throws Exception {
Query invalidLimit = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(-5)).build();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Invalid query limit -5: must be positive");
DatastoreIO.v1().read().withQuery(invalidLimit);
}
@Test
public void testReadValidationSucceedsNamespace() throws Exception {
DatastoreV1.Read read = DatastoreIO.v1().read().withProjectId(PROJECT_ID).withQuery(QUERY);
/* Should succeed, as a null namespace is fine. */
read.validate(null);
}
@Test
public void testReadDisplayData() {
DatastoreV1.Read read = DatastoreIO.v1().read()
.withProjectId(PROJECT_ID)
.withQuery(QUERY)
.withNamespace(NAMESPACE);
DisplayData displayData = DisplayData.from(read);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
assertThat(displayData, hasDisplayItem("query", QUERY.toString()));
assertThat(displayData, hasDisplayItem("namespace", NAMESPACE));
}
@Test
@Category(RunnableOnService.class)
public void testSourcePrimitiveDisplayData() {
DisplayDataEvaluator evaluator = DisplayDataEvaluator.create();
PTransform<PBegin, ? extends POutput> read = DatastoreIO.v1().read().withProjectId(
"myProject").withQuery(Query.newBuilder().build());
Set<DisplayData> displayData = evaluator.displayDataForPrimitiveSourceTransforms(read);
assertThat("DatastoreIO read should include the project in its primitive display data",
displayData, hasItem(hasDisplayItem("projectId")));
}
@Test
public void testWriteDoesNotAllowNullProject() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
DatastoreIO.v1().write().withProjectId(null);
}
@Test
public void testWriteValidationFailsWithNoProject() throws Exception {
Write write = DatastoreIO.v1().write();
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
write.validate(null);
}
@Test
public void testWriteValidationSucceedsWithProject() throws Exception {
Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID);
write.validate(null);
}
@Test
public void testWriteDisplayData() {
Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID);
DisplayData displayData = DisplayData.from(write);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
}
@Test
public void testDeleteEntityDoesNotAllowNullProject() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
DatastoreIO.v1().deleteEntity().withProjectId(null);
}
@Test
public void testDeleteEntityValidationFailsWithNoProject() throws Exception {
DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity();
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
deleteEntity.validate(null);
}
@Test
public void testDeleteEntityValidationSucceedsWithProject() throws Exception {
DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity().withProjectId(PROJECT_ID);
deleteEntity.validate(null);
}
@Test
public void testDeleteEntityDisplayData() {
DeleteEntity deleteEntity = DatastoreIO.v1().deleteEntity().withProjectId(PROJECT_ID);
DisplayData displayData = DisplayData.from(deleteEntity);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
}
@Test
public void testDeleteKeyDoesNotAllowNullProject() throws Exception {
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
DatastoreIO.v1().deleteKey().withProjectId(null);
}
@Test
public void testDeleteKeyValidationFailsWithNoProject() throws Exception {
DeleteKey deleteKey = DatastoreIO.v1().deleteKey();
thrown.expect(NullPointerException.class);
thrown.expectMessage("projectId");
deleteKey.validate(null);
}
@Test
public void testDeleteKeyValidationSucceedsWithProject() throws Exception {
DeleteKey deleteKey = DatastoreIO.v1().deleteKey().withProjectId(PROJECT_ID);
deleteKey.validate(null);
}
@Test
public void testDeleteKeyDisplayData() {
DeleteKey deleteKey = DatastoreIO.v1().deleteKey().withProjectId(PROJECT_ID);
DisplayData displayData = DisplayData.from(deleteKey);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
}
@Test
@Category(RunnableOnService.class)
public void testWritePrimitiveDisplayData() {
DisplayDataEvaluator evaluator = DisplayDataEvaluator.create();
PTransform<PCollection<Entity>, ?> write =
DatastoreIO.v1().write().withProjectId("myProject");
Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write);
assertThat("DatastoreIO write should include the project in its primitive display data",
displayData, hasItem(hasDisplayItem("projectId")));
assertThat("DatastoreIO write should include the upsertFn in its primitive display data",
displayData, hasItem(hasDisplayItem("upsertFn")));
}
@Test
@Category(RunnableOnService.class)
public void testDeleteEntityPrimitiveDisplayData() {
DisplayDataEvaluator evaluator = DisplayDataEvaluator.create();
PTransform<PCollection<Entity>, ?> write =
DatastoreIO.v1().deleteEntity().withProjectId("myProject");
Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write);
assertThat("DatastoreIO write should include the project in its primitive display data",
displayData, hasItem(hasDisplayItem("projectId")));
assertThat("DatastoreIO write should include the deleteEntityFn in its primitive display data",
displayData, hasItem(hasDisplayItem("deleteEntityFn")));
}
@Test
@Category(RunnableOnService.class)
public void testDeleteKeyPrimitiveDisplayData() {
DisplayDataEvaluator evaluator = DisplayDataEvaluator.create();
PTransform<PCollection<Key>, ?> write =
DatastoreIO.v1().deleteKey().withProjectId("myProject");
Set<DisplayData> displayData = evaluator.displayDataForPrimitiveTransforms(write);
assertThat("DatastoreIO write should include the project in its primitive display data",
displayData, hasItem(hasDisplayItem("projectId")));
assertThat("DatastoreIO write should include the deleteKeyFn in its primitive display data",
displayData, hasItem(hasDisplayItem("deleteKeyFn")));
}
/**
* Test building a Write using builder methods.
*/
@Test
public void testBuildWrite() throws Exception {
DatastoreV1.Write write = DatastoreIO.v1().write().withProjectId(PROJECT_ID);
assertEquals(PROJECT_ID, write.getProjectId());
}
/**
* Test the detection of complete and incomplete keys.
*/
@Test
public void testHasNameOrId() {
Key key;
// Complete with name, no ancestor
key = makeKey("bird", "finch").build();
assertTrue(isValidKey(key));
// Complete with id, no ancestor
key = makeKey("bird", 123).build();
assertTrue(isValidKey(key));
// Incomplete, no ancestor
key = makeKey("bird").build();
assertFalse(isValidKey(key));
// Complete with name and ancestor
key = makeKey("bird", "owl").build();
key = makeKey(key, "bird", "horned").build();
assertTrue(isValidKey(key));
// Complete with id and ancestor
key = makeKey("bird", "owl").build();
key = makeKey(key, "bird", 123).build();
assertTrue(isValidKey(key));
// Incomplete with ancestor
key = makeKey("bird", "owl").build();
key = makeKey(key, "bird").build();
assertFalse(isValidKey(key));
key = makeKey().build();
assertFalse(isValidKey(key));
}
/**
* Test that entities with incomplete keys cannot be updated.
*/
@Test
public void testAddEntitiesWithIncompleteKeys() throws Exception {
Key key = makeKey("bird").build();
Entity entity = Entity.newBuilder().setKey(key).build();
UpsertFn upsertFn = new UpsertFn();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Entities to be written to the Cloud Datastore must have complete keys");
upsertFn.apply(entity);
}
@Test
/**
* Test that entities with valid keys are transformed to upsert mutations.
*/
public void testAddEntities() throws Exception {
Key key = makeKey("bird", "finch").build();
Entity entity = Entity.newBuilder().setKey(key).build();
UpsertFn upsertFn = new UpsertFn();
Mutation exceptedMutation = makeUpsert(entity).build();
assertEquals(upsertFn.apply(entity), exceptedMutation);
}
/**
* Test that entities with incomplete keys cannot be deleted.
*/
@Test
public void testDeleteEntitiesWithIncompleteKeys() throws Exception {
Key key = makeKey("bird").build();
Entity entity = Entity.newBuilder().setKey(key).build();
DeleteEntityFn deleteEntityFn = new DeleteEntityFn();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Entities to be deleted from the Cloud Datastore must have complete keys");
deleteEntityFn.apply(entity);
}
/**
* Test that entities with valid keys are transformed to delete mutations.
*/
@Test
public void testDeleteEntities() throws Exception {
Key key = makeKey("bird", "finch").build();
Entity entity = Entity.newBuilder().setKey(key).build();
DeleteEntityFn deleteEntityFn = new DeleteEntityFn();
Mutation exceptedMutation = makeDelete(entity.getKey()).build();
assertEquals(deleteEntityFn.apply(entity), exceptedMutation);
}
/**
* Test that incomplete keys cannot be deleted.
*/
@Test
public void testDeleteIncompleteKeys() throws Exception {
Key key = makeKey("bird").build();
DeleteKeyFn deleteKeyFn = new DeleteKeyFn();
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Keys to be deleted from the Cloud Datastore must be complete");
deleteKeyFn.apply(key);
}
/**
* Test that valid keys are transformed to delete mutations.
*/
@Test
public void testDeleteKeys() throws Exception {
Key key = makeKey("bird", "finch").build();
DeleteKeyFn deleteKeyFn = new DeleteKeyFn();
Mutation exceptedMutation = makeDelete(key).build();
assertEquals(deleteKeyFn.apply(key), exceptedMutation);
}
@Test
public void testDatastoreWriteFnDisplayData() {
DatastoreWriterFn datastoreWriter = new DatastoreWriterFn(PROJECT_ID, null);
DisplayData displayData = DisplayData.from(datastoreWriter);
assertThat(displayData, hasDisplayItem("projectId", PROJECT_ID));
}
/** Tests {@link DatastoreWriterFn} with entities less than one batch. */
@Test
public void testDatatoreWriterFnWithOneBatch() throws Exception {
datastoreWriterFnTest(100);
}
/** Tests {@link DatastoreWriterFn} with entities of more than one batches, but not a multiple. */
@Test
public void testDatatoreWriterFnWithMultipleBatches() throws Exception {
datastoreWriterFnTest(DATASTORE_BATCH_UPDATE_LIMIT * 3 + 100);
}
/**
* Tests {@link DatastoreWriterFn} with entities of several batches, using an exact multiple of
* write batch size.
*/
@Test
public void testDatatoreWriterFnWithBatchesExactMultiple() throws Exception {
datastoreWriterFnTest(DATASTORE_BATCH_UPDATE_LIMIT * 2);
}
// A helper method to test DatastoreWriterFn for various batch sizes.
private void datastoreWriterFnTest(int numMutations) throws Exception {
// Create the requested number of mutations.
List<Mutation> mutations = new ArrayList<>(numMutations);
for (int i = 0; i < numMutations; ++i) {
mutations.add(
makeUpsert(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)).build()).build());
}
DatastoreWriterFn datastoreWriter = new DatastoreWriterFn(PROJECT_ID, null,
mockDatastoreFactory);
DoFnTester<Mutation, Void> doFnTester = DoFnTester.of(datastoreWriter);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
doFnTester.processBundle(mutations);
int start = 0;
while (start < numMutations) {
int end = Math.min(numMutations, start + DATASTORE_BATCH_UPDATE_LIMIT);
CommitRequest.Builder commitRequest = CommitRequest.newBuilder();
commitRequest.setMode(CommitRequest.Mode.NON_TRANSACTIONAL);
commitRequest.addAllMutations(mutations.subList(start, end));
// Verify all the batch requests were made with the expected mutations.
verify(mockDatastore, times(1)).commit(commitRequest.build());
start = end;
}
}
/**
* Tests {@link DatastoreV1.Read#getEstimatedSizeBytes} to fetch and return estimated size for a
* query.
*/
@Test
public void testEstimatedSizeBytes() throws Exception {
long entityBytes = 100L;
// In seconds
long timestamp = 1234L;
RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE),
NAMESPACE);
RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp);
// Per Kind statistics request and response
RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE);
RunQueryResponse statResponse = makeStatKindResponse(entityBytes);
when(mockDatastore.runQuery(latestTimestampRequest))
.thenReturn(latestTimestampResponse);
when(mockDatastore.runQuery(statRequest))
.thenReturn(statResponse);
assertEquals(entityBytes, getEstimatedSizeBytes(mockDatastore, QUERY, NAMESPACE));
verify(mockDatastore, times(1)).runQuery(latestTimestampRequest);
verify(mockDatastore, times(1)).runQuery(statRequest);
}
/**
* Tests {@link SplitQueryFn} when number of query splits is specified.
*/
@Test
public void testSplitQueryFnWithNumSplits() throws Exception {
int numSplits = 100;
when(mockQuerySplitter.getSplits(
eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class)))
.thenReturn(splitQuery(QUERY, numSplits));
SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory);
DoFnTester<Query, KV<Integer, Query>> doFnTester = DoFnTester.of(splitQueryFn);
/**
* Although Datastore client is marked transient in {@link SplitQueryFn}, when injected through
* mock factory using a when clause for unit testing purposes, it is not serializable
* because it doesn't have a no-arg constructor. Thus disabling the cloning to prevent the
* doFn from being serialized.
*/
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<KV<Integer, Query>> queries = doFnTester.processBundle(QUERY);
assertEquals(queries.size(), numSplits);
verifyUniqueKeys(queries);
verify(mockQuerySplitter, times(1)).getSplits(
eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class));
verifyZeroInteractions(mockDatastore);
}
/**
* Tests {@link SplitQueryFn} when no query splits is specified.
*/
@Test
public void testSplitQueryFnWithoutNumSplits() throws Exception {
// Force SplitQueryFn to compute the number of query splits
int numSplits = 0;
int expectedNumSplits = 20;
long entityBytes = expectedNumSplits * DEFAULT_BUNDLE_SIZE_BYTES;
// In seconds
long timestamp = 1234L;
RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE),
NAMESPACE);
RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp);
// Per Kind statistics request and response
RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE);
RunQueryResponse statResponse = makeStatKindResponse(entityBytes);
when(mockDatastore.runQuery(latestTimestampRequest))
.thenReturn(latestTimestampResponse);
when(mockDatastore.runQuery(statRequest))
.thenReturn(statResponse);
when(mockQuerySplitter.getSplits(
eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class)))
.thenReturn(splitQuery(QUERY, expectedNumSplits));
SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory);
DoFnTester<Query, KV<Integer, Query>> doFnTester = DoFnTester.of(splitQueryFn);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<KV<Integer, Query>> queries = doFnTester.processBundle(QUERY);
assertEquals(queries.size(), expectedNumSplits);
verifyUniqueKeys(queries);
verify(mockQuerySplitter, times(1)).getSplits(
eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class));
verify(mockDatastore, times(1)).runQuery(latestTimestampRequest);
verify(mockDatastore, times(1)).runQuery(statRequest);
}
/**
* Tests {@link DatastoreV1.Read.SplitQueryFn} when the query has a user specified limit.
*/
@Test
public void testSplitQueryFnWithQueryLimit() throws Exception {
Query queryWithLimit = QUERY.toBuilder().clone()
.setLimit(Int32Value.newBuilder().setValue(1))
.build();
SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, 10, mockDatastoreFactory);
DoFnTester<Query, KV<Integer, Query>> doFnTester = DoFnTester.of(splitQueryFn);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<KV<Integer, Query>> queries = doFnTester.processBundle(queryWithLimit);
assertEquals(queries.size(), 1);
verifyUniqueKeys(queries);
verifyNoMoreInteractions(mockDatastore);
verifyNoMoreInteractions(mockQuerySplitter);
}
/** Tests {@link ReadFn} with a query limit less than one batch. */
@Test
public void testReadFnWithOneBatch() throws Exception {
readFnTest(5);
}
/** Tests {@link ReadFn} with a query limit more than one batch, and not a multiple. */
@Test
public void testReadFnWithMultipleBatches() throws Exception {
readFnTest(QUERY_BATCH_LIMIT + 5);
}
/** Tests {@link ReadFn} for several batches, using an exact multiple of batch size results. */
@Test
public void testReadFnWithBatchesExactMultiple() throws Exception {
readFnTest(5 * QUERY_BATCH_LIMIT);
}
/** Helper Methods */
/** A helper function that verifies if all the queries have unique keys. */
private void verifyUniqueKeys(List<KV<Integer, Query>> queries) {
Set<Integer> keys = new HashSet<>();
for (KV<Integer, Query> kv: queries) {
keys.add(kv.getKey());
}
assertEquals(keys.size(), queries.size());
}
/**
* A helper function that creates mock {@link Entity} results in response to a query. Always
* indicates that more results are available, unless the batch is limited to fewer than
* {@link DatastoreV1.Read#QUERY_BATCH_LIMIT} results.
*/
private static RunQueryResponse mockResponseForQuery(Query q) {
// Every query DatastoreV1 sends should have a limit.
assertTrue(q.hasLimit());
// The limit should be in the range [1, QUERY_BATCH_LIMIT]
int limit = q.getLimit().getValue();
assertThat(limit, greaterThanOrEqualTo(1));
assertThat(limit, lessThanOrEqualTo(QUERY_BATCH_LIMIT));
// Create the requested number of entities.
List<EntityResult> entities = new ArrayList<>(limit);
for (int i = 0; i < limit; ++i) {
entities.add(
EntityResult.newBuilder()
.setEntity(Entity.newBuilder().setKey(makeKey("key" + i, i + 1)))
.build());
}
// Fill out the other parameters on the returned result batch.
RunQueryResponse.Builder ret = RunQueryResponse.newBuilder();
ret.getBatchBuilder()
.addAllEntityResults(entities)
.setEntityResultType(EntityResult.ResultType.FULL)
.setMoreResults(
limit == QUERY_BATCH_LIMIT
? QueryResultBatch.MoreResultsType.NOT_FINISHED
: QueryResultBatch.MoreResultsType.NO_MORE_RESULTS);
return ret.build();
}
/** Helper function to run a test reading from a {@link ReadFn}. */
private void readFnTest(int numEntities) throws Exception {
// An empty query to read entities.
Query query = Query.newBuilder().setLimit(
Int32Value.newBuilder().setValue(numEntities)).build();
// Use mockResponseForQuery to generate results.
when(mockDatastore.runQuery(any(RunQueryRequest.class)))
.thenAnswer(new Answer<RunQueryResponse>() {
@Override
public RunQueryResponse answer(InvocationOnMock invocationOnMock) throws Throwable {
Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery();
return mockResponseForQuery(q);
}
});
ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory);
DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn);
/**
* Although Datastore client is marked transient in {@link ReadFn}, when injected through
* mock factory using a when clause for unit testing purposes, it is not serializable
* because it doesn't have a no-arg constructor. Thus disabling the cloning to prevent the
* test object from being serialized.
*/
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<Entity> entities = doFnTester.processBundle(query);
int expectedNumCallsToRunQuery = (int) Math.ceil((double) numEntities / QUERY_BATCH_LIMIT);
verify(mockDatastore, times(expectedNumCallsToRunQuery)).runQuery(any(RunQueryRequest.class));
// Validate the number of results.
assertEquals(numEntities, entities.size());
}
/** Builds a per-kind statistics response with the given entity size. */
private static RunQueryResponse makeStatKindResponse(long entitySizeInBytes) {
RunQueryResponse.Builder statKindResponse = RunQueryResponse.newBuilder();
Entity.Builder entity = Entity.newBuilder();
entity.setKey(makeKey("dummyKind", "dummyId"));
entity.getMutableProperties().put("entity_bytes", makeValue(entitySizeInBytes).build());
EntityResult.Builder entityResult = EntityResult.newBuilder();
entityResult.setEntity(entity);
QueryResultBatch.Builder batch = QueryResultBatch.newBuilder();
batch.addEntityResults(entityResult);
statKindResponse.setBatch(batch);
return statKindResponse.build();
}
/** Builds a response of the given timestamp. */
private static RunQueryResponse makeLatestTimestampResponse(long timestamp) {
RunQueryResponse.Builder timestampResponse = RunQueryResponse.newBuilder();
Entity.Builder entity = Entity.newBuilder();
entity.setKey(makeKey("dummyKind", "dummyId"));
entity.getMutableProperties().put("timestamp", makeValue(new Date(timestamp * 1000)).build());
EntityResult.Builder entityResult = EntityResult.newBuilder();
entityResult.setEntity(entity);
QueryResultBatch.Builder batch = QueryResultBatch.newBuilder();
batch.addEntityResults(entityResult);
timestampResponse.setBatch(batch);
return timestampResponse.build();
}
/** Builds a per-kind statistics query for the given timestamp and namespace. */
private static Query makeStatKindQuery(String namespace, long timestamp) {
Query.Builder statQuery = Query.newBuilder();
if (namespace == null) {
statQuery.addKindBuilder().setName("__Stat_Kind__");
} else {
statQuery.addKindBuilder().setName("__Stat_Ns_Kind__");
}
statQuery.setFilter(makeAndFilter(
makeFilter("kind_name", EQUAL, makeValue(KIND).build()).build(),
makeFilter("timestamp", EQUAL, makeValue(timestamp * 1000000L).build()).build()));
return statQuery.build();
}
/** Builds a latest timestamp statistics query. */
private static Query makeLatestTimestampQuery(String namespace) {
Query.Builder timestampQuery = Query.newBuilder();
if (namespace == null) {
timestampQuery.addKindBuilder().setName("__Stat_Total__");
} else {
timestampQuery.addKindBuilder().setName("__Stat_Ns_Total__");
}
timestampQuery.addOrder(makeOrder("timestamp", DESCENDING));
timestampQuery.setLimit(Int32Value.newBuilder().setValue(1));
return timestampQuery.build();
}
/** Generate dummy query splits. */
private List<Query> splitQuery(Query query, int numSplits) {
List<Query> queries = new LinkedList<>();
for (int i = 0; i < numSplits; i++) {
queries.add(query.toBuilder().clone().build());
}
return queries;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.firejack.platform.core.config.meta.element.resource;
import net.firejack.platform.api.content.model.ResourceType;
import net.firejack.platform.core.config.meta.element.PackageDescriptorElement;
import net.firejack.platform.core.config.meta.utils.DiffUtils;
import net.firejack.platform.core.model.registry.resource.ResourceModel;
import net.firejack.platform.core.utils.ArrayUtils;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlTransient;
import java.util.ArrayList;
import java.util.List;
public class ResourceElement extends PackageDescriptorElement<ResourceModel> {
private Integer lastVersion;
private ResourceType resourceType;
private List<TextResourceVersionElement> textResourceVersionElements;
private List<HtmlResourceVersionElement> htmlResourceVersionElements;
private List<ImageResourceVersionElement> imageResourceVersionElements;
private List<AudioResourceVersionElement> audioResourceVersionElements;
private List<VideoResourceVersionElement> videoResourceVersionElements;
private List<DocumentResourceVersionElement> documentResourceVersionElements;
private List<FileResourceVersionElement> fileResourceVersionElements;
/**
* @return
*/
@XmlAttribute(name = "last-version")
public Integer getLastVersion() {
return lastVersion;
}
/**
* @param lastVersion
*/
public void setLastVersion(Integer lastVersion) {
this.lastVersion = lastVersion;
}
/**
* @return
*/
@XmlAttribute(name = "type")
public ResourceType getResourceType() {
return resourceType;
}
/**
* @param resourceType
*/
public void setResourceType(ResourceType resourceType) {
this.resourceType = resourceType;
}
/**
* @return
*/
@XmlTransient
public TextResourceVersionElement[] getTextResourceVersions() {
return DiffUtils.getArray(textResourceVersionElements, TextResourceVersionElement.class);
}
/**
* @param textResourceVersionElements
*/
public void setTextResourceVersions(List<TextResourceVersionElement> textResourceVersionElements) {
this.textResourceVersionElements = textResourceVersionElements;
}
@XmlElement(name = "text-resource-version")
public List<TextResourceVersionElement> getTextResourceVersionElements() {
return textResourceVersionElements;
}
public void setTextResourceVersionElements(List<TextResourceVersionElement> textResourceVersionElements) {
this.textResourceVersionElements = textResourceVersionElements;
}
/**
* @param textResourceVersionElement
*/
public void addTextResourceVersion(TextResourceVersionElement textResourceVersionElement) {
if (this.textResourceVersionElements == null) {
this.textResourceVersionElements = new ArrayList<TextResourceVersionElement>();
}
this.textResourceVersionElements.add(textResourceVersionElement);
}
/**
* @return
*/
@XmlTransient
public HtmlResourceVersionElement[] getHtmlResourceVersions() {
return DiffUtils.getArray(htmlResourceVersionElements, HtmlResourceVersionElement.class);
}
/**
* @param htmlResourceVersionElements
*/
public void setHtmlResourceVersions(List<HtmlResourceVersionElement> htmlResourceVersionElements) {
this.htmlResourceVersionElements = htmlResourceVersionElements;
}
@XmlElement(name = "html-resource-version")
public List<HtmlResourceVersionElement> getHtmlResourceVersionElements() {
return htmlResourceVersionElements;
}
public void setHtmlResourceVersionElements(List<HtmlResourceVersionElement> htmlResourceVersionElements) {
this.htmlResourceVersionElements = htmlResourceVersionElements;
}
/**
* @param htmlResourceVersionElement
*/
public void addHtmlResourceVersion(HtmlResourceVersionElement htmlResourceVersionElement) {
if (this.htmlResourceVersionElements == null) {
this.htmlResourceVersionElements = new ArrayList<HtmlResourceVersionElement>();
}
this.htmlResourceVersionElements.add(htmlResourceVersionElement);
}
/**
* @return
*/
@XmlTransient
public ImageResourceVersionElement[] getImageResourceVersions() {
return DiffUtils.getArray(imageResourceVersionElements, ImageResourceVersionElement.class);
}
/**
* @param imageResourceVersionElements
*/
public void setImageResourceVersions(List<ImageResourceVersionElement> imageResourceVersionElements) {
this.imageResourceVersionElements = imageResourceVersionElements;
}
@XmlElement(name = "image-resource-version")
public List<ImageResourceVersionElement> getImageResourceVersionElements() {
return imageResourceVersionElements;
}
public void setImageResourceVersionElements(List<ImageResourceVersionElement> imageResourceVersionElements) {
this.imageResourceVersionElements = imageResourceVersionElements;
}
/**
* @param imageResourceVersionElement
*/
public void addImageResourceVersion(ImageResourceVersionElement imageResourceVersionElement) {
if (this.imageResourceVersionElements == null) {
this.imageResourceVersionElements = new ArrayList<ImageResourceVersionElement>();
}
this.imageResourceVersionElements.add(imageResourceVersionElement);
}
/**
* @return
*/
@XmlTransient
public AudioResourceVersionElement[] getAudioResourceVersions() {
return DiffUtils.getArray(audioResourceVersionElements, AudioResourceVersionElement.class);
}
/**
* @param audioResourceVersionElements
*/
public void setAudioResourceVersions(List<AudioResourceVersionElement> audioResourceVersionElements) {
this.audioResourceVersionElements = audioResourceVersionElements;
}
@XmlElement(name = "audio-resource-version")
public List<AudioResourceVersionElement> getAudioResourceVersionElements() {
return audioResourceVersionElements;
}
public void setAudioResourceVersionElements(List<AudioResourceVersionElement> audioResourceVersionElements) {
this.audioResourceVersionElements = audioResourceVersionElements;
}
/**
* @param audioResourceVersionElement
*/
public void addAudioResourceVersion(AudioResourceVersionElement audioResourceVersionElement) {
if (this.audioResourceVersionElements == null) {
this.audioResourceVersionElements = new ArrayList<AudioResourceVersionElement>();
}
this.audioResourceVersionElements.add(audioResourceVersionElement);
}
/**
* @return
*/
@XmlTransient
public VideoResourceVersionElement[] getVideoResourceVersions() {
return DiffUtils.getArray(videoResourceVersionElements, VideoResourceVersionElement.class);
}
/**
* @param videoResourceVersionElements
*/
public void setVideoResourceVersions(List<VideoResourceVersionElement> videoResourceVersionElements) {
this.videoResourceVersionElements = videoResourceVersionElements;
}
@XmlElement(name = "video-resource-version")
public List<VideoResourceVersionElement> getVideoResourceVersionElements() {
return videoResourceVersionElements;
}
public void setVideoResourceVersionElements(List<VideoResourceVersionElement> videoResourceVersionElements) {
this.videoResourceVersionElements = videoResourceVersionElements;
}
/**
* @param videoResourceVersionElement
*/
public void addVideoResourceVersion(VideoResourceVersionElement videoResourceVersionElement) {
if (this.videoResourceVersionElements == null) {
this.videoResourceVersionElements = new ArrayList<VideoResourceVersionElement>();
}
this.videoResourceVersionElements.add(videoResourceVersionElement);
}
/**
* @return
*/
@XmlTransient
public DocumentResourceVersionElement[] getDocumentResourceVersions() {
return DiffUtils.getArray(documentResourceVersionElements, DocumentResourceVersionElement.class);
}
/**
* @param documentResourceVersionElements
*/
public void setDocumentResourceVersions(List<DocumentResourceVersionElement> documentResourceVersionElements) {
this.documentResourceVersionElements = documentResourceVersionElements;
}
@XmlElement(name = "document-resource-version")
public List<DocumentResourceVersionElement> getDocumentResourceVersionElements() {
return documentResourceVersionElements;
}
public void setDocumentResourceVersionElements(List<DocumentResourceVersionElement> documentResourceVersionElements) {
this.documentResourceVersionElements = documentResourceVersionElements;
}
/**
* @param documentResourceVersionElement
*/
public void addDocumentResourceVersion(DocumentResourceVersionElement documentResourceVersionElement) {
if (this.documentResourceVersionElements == null) {
this.documentResourceVersionElements = new ArrayList<DocumentResourceVersionElement>();
}
this.documentResourceVersionElements.add(documentResourceVersionElement);
}
/**
* @return
*/
@XmlTransient
public FileResourceVersionElement[] getFileResourceVersions() {
return DiffUtils.getArray(fileResourceVersionElements, FileResourceVersionElement.class);
}
/**
* @param fileResourceVersionElements
*/
public void setFileResourceVersions(List<FileResourceVersionElement> fileResourceVersionElements) {
this.fileResourceVersionElements = fileResourceVersionElements;
}
@XmlElement(name = "file-resource-version")
public List<FileResourceVersionElement> getFileResourceVersionElements() {
return fileResourceVersionElements;
}
public void setFileResourceVersionElements(List<FileResourceVersionElement> fileResourceVersionElements) {
this.fileResourceVersionElements = fileResourceVersionElements;
}
/**
* @param fileResourceVersionElement
*/
public void addFileResourceVersion(FileResourceVersionElement fileResourceVersionElement) {
if (this.fileResourceVersionElements == null) {
this.fileResourceVersionElements = new ArrayList<FileResourceVersionElement>();
}
this.fileResourceVersionElements.add(fileResourceVersionElement);
}
@Override
public Class<ResourceModel> getEntityClass() {
return ResourceModel.class;
}
@Override
public boolean equals(Object o) {
boolean equals = super.equals(o);
if (equals) {
ResourceElement that = (ResourceElement) o;
if (this.getResourceType() != that.getResourceType()) {
equals = false;
} else {
ResourceVersionElement[] thisElements;
ResourceVersionElement[] thatElements;
switch (this.getResourceType()) {
case TEXT:
thisElements = this.getTextResourceVersions();
thatElements = that.getTextResourceVersions();
break;
case HTML:
thisElements = this.getHtmlResourceVersions();
thatElements = that.getHtmlResourceVersions();
break;
case IMAGE:
thisElements = this.getImageResourceVersions();
thatElements = that.getImageResourceVersions();
break;
case AUDIO:
thisElements = this.getAudioResourceVersions();
thatElements = that.getAudioResourceVersions();
break;
case VIDEO:
thisElements = this.getVideoResourceVersions();
thatElements = that.getVideoResourceVersions();
break;
case DOCUMENT:
thisElements = this.getDocumentResourceVersions();
thatElements = that.getDocumentResourceVersions();
break;
case FILE:
thisElements = this.getFileResourceVersions();
thatElements = that.getFileResourceVersions();
break;
default:
throw new UnsupportedOperationException(
"Diff calculation does not support resources of type = " +
this.getResourceType().name());
}
equals = (thisElements == null && thatElements == null) ||
(thisElements != null && thatElements != null &&
ArrayUtils.containsAll(thisElements, thatElements) &&
ArrayUtils.containsAll(thatElements, thisElements));
}
}
return equals;
}
}
| |
package com.ctrip.platform.dal.dao.shard;
import com.ctrip.platform.dal.dao.*;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @author c7ch23en
*/
public class ExecutionCallbackTest {
private static final String DB_NAME = "dao_test_mysql_exception_shard";
private static final String TABLE_NAME = "no_shard_tbl";
private static final int DB_SHARDS = 4;
private static final int TABLE_SHARDS = 1;
private static final String DROP_TABLE_SQL_TPL = "DROP TABLE IF EXISTS %s";
private static final String CREATE_TABLE_SQL_TPL = "CREATE TABLE %s (" +
"id int NOT NULL PRIMARY KEY AUTO_INCREMENT, " +
"dbIndex int NOT NULL, " +
"tableIndex int NOT NULL, " +
"intCol int, " +
"charCol varchar(64), " +
"lastUpdateTime timestamp DEFAULT CURRENT_TIMESTAMP)";
private static DalClient dalClient;
private DalTableDao<ExecutionCallbackTestTable> tableDao;
private DalQueryDao queryDao;
public ExecutionCallbackTest() throws SQLException {
tableDao = new DalTableDao<>(ExecutionCallbackTestTable.class);
queryDao = new DalQueryDao(DB_NAME);
}
@BeforeClass
public static void beforeClass() throws SQLException {
dalClient = DalClientFactory.getClient(DB_NAME);
createTables();
}
@AfterClass
public static void afterClass() throws SQLException {
dropTables();
}
@Test
public void testInsertList() throws SQLException {
Set<Integer> successIndexes = new HashSet<>();
Set<Integer> errorIndexes = new HashSet<>();
AtomicInteger successCount = new AtomicInteger(0);
AtomicInteger errorCount = new AtomicInteger(0);
tableDao.insert(new DalHints().continueOnError(), createPojos(),
new TestPojoCallback(successIndexes, errorIndexes, successCount, errorCount));
Assert.assertEquals(2, successCount.get());
Assert.assertEquals(2, successIndexes.size());
Assert.assertTrue(successIndexes.contains(0));
Assert.assertTrue(successIndexes.contains(2));
Assert.assertEquals(2, errorCount.get());
Assert.assertEquals(2, errorIndexes.size());
Assert.assertTrue(errorIndexes.contains(1));
Assert.assertTrue(errorIndexes.contains(3));
}
@Test
public void testBatchInsert() throws SQLException {
Set<String> successShards = new HashSet<>();
Set<String> errorShards = new HashSet<>();
AtomicInteger successCount = new AtomicInteger(0);
AtomicInteger errorCount = new AtomicInteger(0);
tableDao.batchInsert(new DalHints().continueOnError(), createPojos(),
new TestShardCallback<>(successShards, errorShards, successCount, errorCount));
Assert.assertEquals(2, successCount.get());
Assert.assertEquals(2, successShards.size());
Assert.assertTrue(successShards.contains("0"));
Assert.assertTrue(successShards.contains("2"));
Assert.assertEquals(2, errorCount.get());
Assert.assertEquals(2, errorShards.size());
Assert.assertTrue(errorShards.contains("1"));
Assert.assertTrue(errorShards.contains("3"));
}
@Test
public void testQueryByPojo() throws SQLException {
Set<String> successShards = new HashSet<>();
Set<String> errorShards = new HashSet<>();
AtomicInteger successCount = new AtomicInteger(0);
AtomicInteger errorCount = new AtomicInteger(0);
tableDao.queryBy(createQueryPojoWithTableIndex(0), new DalHints().inAllShards().continueOnError(),
new TestShardCallback<>(successShards, errorShards, successCount, errorCount));
Assert.assertEquals(2, successCount.get());
Assert.assertEquals(2, successShards.size());
Assert.assertTrue(successShards.contains("0"));
Assert.assertTrue(successShards.contains("2"));
Assert.assertEquals(2, errorCount.get());
Assert.assertEquals(2, errorShards.size());
Assert.assertTrue(errorShards.contains("1"));
Assert.assertTrue(errorShards.contains("3"));
}
@Test
public void testQueryBySql() throws SQLException {
Set<String> successShards = new HashSet<>();
Set<String> errorShards = new HashSet<>();
AtomicInteger successCount = new AtomicInteger(0);
AtomicInteger errorCount = new AtomicInteger(0);
String sql = String.format("select * from %s where id = 1", TABLE_NAME);
queryDao.query(sql, new StatementParameters(), new DalHints().inAllShards().continueOnError(),
ExecutionCallbackTestTable.class,
new TestShardCallback<>(successShards, errorShards, successCount, errorCount));
Assert.assertEquals(2, successCount.get());
Assert.assertEquals(2, successShards.size());
Assert.assertTrue(successShards.contains("0"));
Assert.assertTrue(successShards.contains("2"));
Assert.assertEquals(2, errorCount.get());
Assert.assertEquals(2, errorShards.size());
Assert.assertTrue(errorShards.contains("1"));
Assert.assertTrue(errorShards.contains("3"));
}
private List<ExecutionCallbackTestTable> createPojos() {
List<ExecutionCallbackTestTable> pojos = new ArrayList<>();
for (int i = 0; i < DB_SHARDS; i++)
for (int j = 0; j < TABLE_SHARDS; j++)
pojos.add(createPojo(i, j));
return pojos;
}
private ExecutionCallbackTestTable createPojo(int dbIndex, int tableIndex) {
ExecutionCallbackTestTable pojo = new ExecutionCallbackTestTable();
pojo.setDbIndex(dbIndex);
pojo.setTableIndex(tableIndex);
pojo.setIntCol(dbIndex + tableIndex);
pojo.setCharCol(String.valueOf(dbIndex + tableIndex));
return pojo;
}
private ExecutionCallbackTestTable createQueryPojoWithTableIndex(int tableIndex) {
ExecutionCallbackTestTable pojo = new ExecutionCallbackTestTable();
pojo.setTableIndex(tableIndex);
return pojo;
}
private static void createTables() throws SQLException {
String[] sqls = new String[] {
String.format(DROP_TABLE_SQL_TPL, TABLE_NAME),
String.format(CREATE_TABLE_SQL_TPL, TABLE_NAME)
};
dalClient.batchUpdate(sqls, new DalHints().inShard(0));
sqls = new String[] {
String.format(DROP_TABLE_SQL_TPL, TABLE_NAME)
};
dalClient.batchUpdate(sqls, new DalHints().inShard(1));
}
private static void dropTables() throws SQLException {
String[] sqls = new String[] {
String.format(DROP_TABLE_SQL_TPL, TABLE_NAME)
};
dalClient.batchUpdate(sqls, new DalHints().inShard(0));
dalClient.batchUpdate(sqls, new DalHints().inShard(1));
}
static class TestPojoCallback implements PojoExecutionCallback {
private Set<Integer> successIndexes;
private Set<Integer> errorIndexes;
private AtomicInteger successCount;
private AtomicInteger errorCount;
public TestPojoCallback(Set<Integer> successIndexes, Set<Integer> errorIndexes,
AtomicInteger successCount, AtomicInteger errorCount) {
this.successIndexes = successIndexes;
this.errorIndexes = errorIndexes;
this.successCount = successCount;
this.errorCount = errorCount;
}
@Override
public void handle(PojoExecutionResult pojoResult) {
if (pojoResult.isSuccess()) {
successCount.incrementAndGet();
successIndexes.add(pojoResult.getPojoIndex());
} else {
errorCount.incrementAndGet();
errorIndexes.add(pojoResult.getPojoIndex());
pojoResult.getErrorCause().printStackTrace();
}
}
}
static class TestShardCallback<V> implements ShardExecutionCallback<V> {
private Set<String> successShards;
private Set<String> errorShards;
private AtomicInteger successCount;
private AtomicInteger errorCount;
public TestShardCallback(Set<String> successShards, Set<String> errorShards,
AtomicInteger successCount, AtomicInteger errorCount) {
this.successShards = successShards;
this.errorShards = errorShards;
this.successCount = successCount;
this.errorCount = errorCount;
}
@Override
public void handle(ShardExecutionResult<V> shardResult) {
if (shardResult.isSuccess()) {
successCount.incrementAndGet();
successShards.add(shardResult.getDbShard());
} else {
errorCount.incrementAndGet();
errorShards.add(shardResult.getDbShard());
shardResult.getErrorCause().printStackTrace();
}
}
}
}
| |
package de.tudarmstadt.lt.structuredtopics.evaluate;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Arrays;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Maps;
import de.tudarmstadt.lt.structuredtopics.Utils;
import de.tudarmstadt.lt.structuredtopics.evaluate.DDTStatistics.DDTStats;
public class Experiment2ResultAggregator {
private static final String OPTION_RESULT_DIR = "resultDir";
private static final String OPTION_RESULT_FILE = "out";
private static final Logger LOG = LoggerFactory.getLogger(Experiment2ResultAggregator.class);
public static void main(String[] args) {
Options options = createOptions();
try {
CommandLine cl = new DefaultParser().parse(options, args, true);
File resultDir = new File(cl.getOptionValue(OPTION_RESULT_DIR));
File out = new File(cl.getOptionValue(OPTION_RESULT_FILE));
aggregateResults(resultDir, out);
} catch (ParseException e) {
LOG.error("Invalid arguments: {}", e.getMessage());
StringWriter sw = new StringWriter();
try (PrintWriter w = new PrintWriter(sw)) {
new HelpFormatter().printHelp(w, Integer.MAX_VALUE, "application", "", options, 0, 0, "", true);
}
LOG.error(sw.toString());
} catch (Exception e) {
LOG.error("Error", e);
}
}
private static void aggregateResults(File resultDir, File resultFile) {
DecimalFormat df = new DecimalFormat("0", DecimalFormatSymbols.getInstance(Locale.ENGLISH));
df.setMaximumFractionDigits(340);
Map<String, Map<Boolean, DDTStats>> ddtStats = ddtStats(new File(resultDir, "1_ddts"));
Map<String, Map<Boolean, Map<String, Integer>>> simStats = similaritiesStats(
new File(resultDir, "2_similarities"));
Map<String, Map<Boolean, Map<String, Map<String, Integer>>>> clusterStats = clusterStats(
new File(resultDir, "3_clusters"));
Map<String, Map<Boolean, Map<String, Map<String, MappingStats>>>> mappingsStats = mappingsStats(
new File(resultDir, "4_mappings"));
LOG.info("Writing results to {}", resultFile.getAbsolutePath());
try (BufferedWriter out = Utils.openWriter(resultFile, false)) {
out.write(
"ddtName,filtered,totalSenses,uniqueSenseWords,totalClusterWords,uniqueClusterWords,averageClusterSize,similarityMetric,numberOfEdges,cwOption,numberOfClusters,maxOverlap,avgOverlap,totalOverlap,maxCosineScore,totalCosineScore\n");
for (Entry<String, Map<Boolean, Map<String, Map<String, MappingStats>>>> e : mappingsStats.entrySet()) {
String ddtName = e.getKey();
for (Entry<Boolean, Map<String, Map<String, MappingStats>>> e2 : e.getValue().entrySet()) {
boolean filtered = e2.getKey();
for (Entry<String, Map<String, MappingStats>> e3 : e2.getValue().entrySet()) {
String similarityMetric = e3.getKey();
for (Entry<String, MappingStats> e4 : e3.getValue().entrySet()) {
String cwOption = e4.getKey();
try {
MappingStats stats = e4.getValue();
DDTStats ddtStat = ddtStats.get(ddtName).get(filtered);
Integer numberOfEdges = simStats.get(ddtName).get(filtered).get(similarityMetric);
Integer numberOfClusters = clusterStats.get(ddtName).get(filtered).get(similarityMetric)
.get(cwOption);
String line = ddtName + "," + filtered + "," + ddtStat.totalSenses + ","
+ ddtStat.uniqueSenseWords + "," + ddtStat.totalClusterWords + ","
+ ddtStat.uniqueClusterWords + "," + ddtStat.averageClusterSize + ","
+ similarityMetric + "," + numberOfEdges + "," + cwOption + ","
+ numberOfClusters + "," + df.format(stats.maxOverlap) + ","
+ df.format(stats.totalOverlap / numberOfClusters) + ","
+ df.format(stats.totalOverlap) + "," + df.format(stats.maxCosineScore) + ","
+ df.format(stats.totalCosineScore) + "\n";
out.write(line);
} catch (Exception ex) {
LOG.error(
"Error while writing data for ddt {}, filtered {}, similarity {}, cwOption {}",
ddtName, filtered, similarityMetric, cwOption, e);
}
}
}
}
}
} catch (IOException e) {
LOG.error("Error while writing results to {}", resultFile.getAbsolutePath(), e);
}
}
private static Map<String, Map<Boolean, DDTStats>> ddtStats(File ddtDir) {
// ddt - filtered - similarityMetric - lineCount
Map<String, Map<Boolean, DDTStats>> ddtStats = Maps.newHashMap();
File[] ddts = ddtDir.listFiles();
Arrays.stream(ddts).parallel().forEach(ddt -> {
LOG.debug("Collecting stats for ddt {}", ddt.getAbsolutePath());
boolean filtered = ddt.getName().contains("filtered");
String ddtName = ddtNameFrom(ddt.getName());
DDTStats stats = DDTStatistics.collectStats(ddt);
Map<Boolean, DDTStats> filteredStats = ddtStats.get(ddtName);
if (filteredStats == null) {
filteredStats = Maps.newHashMap();
ddtStats.put(ddtName, filteredStats);
}
filteredStats.put(filtered, stats);
});
LOG.debug("All Stats collected");
return ddtStats;
}
private static Map<String, Map<Boolean, Map<String, Integer>>> similaritiesStats(File simDir) {
// ddt - filtered - similarityMetric - lineCount
Map<String, Map<Boolean, Map<String, Integer>>> simStats = Maps.newHashMap();
File[] similarities = simDir.listFiles();
Arrays.stream(similarities).parallel().forEach(similaritiy -> {
LOG.debug("Collecting stats for similarities {}", similaritiy.getAbsolutePath());
String simName = similaritiy.getName();
boolean filtered = simName.contains("filtered");
String metric = simName.split("[-]")[0];
String ddtName = ddtNameFrom(simName);
int lines = Utils.countLines(similaritiy);
Map<Boolean, Map<String, Integer>> filteredStats = simStats.get(ddtName);
if (filteredStats == null) {
filteredStats = Maps.newHashMap();
simStats.put(ddtName, filteredStats);
}
Map<String, Integer> metricStats = filteredStats.get(filtered);
if (metricStats == null) {
metricStats = Maps.newHashMap();
filteredStats.put(filtered, metricStats);
}
metricStats.put(metric, lines);
});
return simStats;
}
private static Map<String, Map<Boolean, Map<String, Map<String, Integer>>>> clusterStats(File clusterDir) {
// ddt - filtered - similarityMetric - cwOption - lineCount
Map<String, Map<Boolean, Map<String, Map<String, Integer>>>> clusterStats = Maps.newHashMap();
File[] clusters = clusterDir.listFiles();
Arrays.stream(clusters).parallel().forEach(cluster -> {
LOG.debug("Collecting stats for clusters {}", cluster.getAbsolutePath());
String clusterName = cluster.getName();
boolean filtered = clusterName.contains("filtered");
String cwOption = clusterName.replace("clusters-", "").split("[-]")[0];
String metric = clusterName.replace("clusters-", "").split("[-]")[1];
String ddtName = ddtNameFrom(clusterName);
int lines = Utils.countLines(cluster);
Map<Boolean, Map<String, Map<String, Integer>>> filteredStats = clusterStats.get(ddtName);
if (filteredStats == null) {
filteredStats = Maps.newHashMap();
clusterStats.put(ddtName, filteredStats);
}
Map<String, Map<String, Integer>> metricStats = filteredStats.get(filtered);
if (metricStats == null) {
metricStats = Maps.newHashMap();
filteredStats.put(filtered, metricStats);
}
Map<String, Integer> cwOptionStats = metricStats.get(metric);
if (cwOptionStats == null) {
cwOptionStats = Maps.newHashMap();
metricStats.put(metric, cwOptionStats);
}
cwOptionStats.put(cwOption, lines);
});
return clusterStats;
}
private static Map<String, Map<Boolean, Map<String, Map<String, MappingStats>>>> mappingsStats(File clusterDir) {
// ddt - filtered - similarityMetric - cwOption - lineCount
Map<String, Map<Boolean, Map<String, Map<String, MappingStats>>>> mappingsStats = Maps.newHashMap();
File[] clusters = clusterDir.listFiles();
Arrays.stream(clusters).parallel().forEach(mappings -> {
LOG.debug("Collecting stats for mappings {}", mappings.getAbsolutePath());
String mappingName = mappings.getName();
boolean filtered = mappingName.contains("filtered");
String cwOption = mappingName.replace("domains-clusters-", "").split("[-]")[0];
String metric = mappingName.replace("domains-clusters-", "").split("[-]")[1];
String ddtName = ddtNameFrom(mappingName);
MappingStats stats = calculateStatsForMapping(mappings);
Map<Boolean, Map<String, Map<String, MappingStats>>> filteredStats = mappingsStats.get(ddtName);
if (filteredStats == null) {
filteredStats = Maps.newHashMap();
mappingsStats.put(ddtName, filteredStats);
}
Map<String, Map<String, MappingStats>> metricStats = filteredStats.get(filtered);
if (metricStats == null) {
metricStats = Maps.newHashMap();
filteredStats.put(filtered, metricStats);
}
Map<String, MappingStats> cwOptionStats = metricStats.get(metric);
if (cwOptionStats == null) {
cwOptionStats = Maps.newHashMap();
metricStats.put(metric, cwOptionStats);
}
cwOptionStats.put(cwOption, stats);
});
return mappingsStats;
}
private static MappingStats calculateStatsForMapping(File mappings) {
double totalOverlap = 0;
double maxOverlap = 0;
double totalCosineScore = 0;
double maxCosineScore = 0;
try (BufferedReader in = Utils.openReader(mappings)) {
String line = null;
int lineCount = 0;
while ((line = in.readLine()) != null) {
try {
lineCount++;
String[] columns = line.split("\\t");
double overlap = Double.parseDouble(columns[3]);
maxOverlap = Math.max(maxOverlap, overlap);
totalOverlap += overlap;
double cosineScore = Double.parseDouble(columns[9]);
maxCosineScore = Math.max(maxCosineScore, cosineScore);
totalCosineScore += cosineScore;
} catch (Exception e) {
LOG.error("Error while processing line {}, {}", lineCount, line, e);
}
}
} catch (IOException e) {
LOG.error("Error while reading mapping file {}", mappings.getAbsolutePath(), e);
}
MappingStats stats = new MappingStats();
stats.totalOverlap = totalOverlap;
stats.maxOverlap = maxOverlap;
stats.totalCosineScore = totalCosineScore;
stats.maxCosineScore = maxCosineScore;
return stats;
}
private static String ddtNameFrom(String resultFileName) {
return resultFileName.replace(".csv", "").replace(".gz", "").replace("filtered-", "").replace("all-", "")
.replace("lucene-", "").replace("clusters-", "").replace("TOP-", "").replace("DIST_LOG-", "")
.replace("DIST_NOLOG-", "").replace("similarities-", "").replace("domains-", "");
}
private static Options createOptions() {
Options options = new Options();
Option resultDir = Option.builder(OPTION_RESULT_DIR).argName("resultDir").desc("Directory with result folders")
.required().hasArg().build();
options.addOption(resultDir);
Option out = Option.builder(OPTION_RESULT_FILE).argName("out")
.desc("File where the results will be aggregated in csv format").required().hasArg().build();
options.addOption(out);
return options;
}
public static class MappingStats {
public double maxCosineScore;
public double totalCosineScore;
public double totalOverlap;
public double maxOverlap;
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this);
}
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
@Override
public boolean equals(Object obj) {
return EqualsBuilder.reflectionEquals(this, obj);
}
}
}
| |
/*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.quorum.multimap;
import com.hazelcast.config.Config;
import com.hazelcast.core.EntryAdapter;
import com.hazelcast.core.MultiMap;
import com.hazelcast.quorum.AbstractQuorumTest;
import com.hazelcast.quorum.QuorumException;
import com.hazelcast.quorum.QuorumType;
import com.hazelcast.test.HazelcastSerialParametersRunnerFactory;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.util.UuidUtil;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.quorum.QuorumType.READ_WRITE;
import static com.hazelcast.quorum.QuorumType.WRITE;
import static java.util.Arrays.asList;
@RunWith(Parameterized.class)
@UseParametersRunnerFactory(HazelcastSerialParametersRunnerFactory.class)
@Category({QuickTest.class, ParallelTest.class})
public class MultiMapQuorumWriteTest extends AbstractQuorumTest {
@Parameters(name = "quorumType:{0}")
public static Iterable<Object[]> parameters() {
return asList(new Object[][]{{WRITE}, {READ_WRITE}});
}
@Parameter
public static QuorumType quorumType;
@BeforeClass
public static void setUp() {
initTestEnvironment(new Config(), new TestHazelcastInstanceFactory());
}
@AfterClass
public static void tearDown() {
shutdownTestEnvironment();
}
@Test
public void put_successful_whenQuorumSize_met() {
map(0).put("foo", "bar");
}
@Test(expected = QuorumException.class)
public void put_failing_whenQuorumSize_notMet() {
map(3).put("foo", "bar");
}
@Test
public void lock_successful_whenQuorumSize_met() {
map(0).lock(UuidUtil.newUnsecureUuidString());
}
@Test(expected = QuorumException.class)
public void lock_failing_whenQuorumSize_notMet() {
map(3).lock(UuidUtil.newUnsecureUuidString());
}
@Test
public void lockWithTime_successful_whenQuorumSize_met() {
map(0).lock(UuidUtil.newUnsecureUuidString(), 5, TimeUnit.SECONDS);
}
@Test(expected = QuorumException.class)
public void lockWithTime_failing_whenQuorumSize_notMet() {
map(3).lock(UuidUtil.newUnsecureUuidString(), 5, TimeUnit.SECONDS);
}
@Test
public void tryLock_successful_whenQuorumSize_met() {
map(0).tryLock(UuidUtil.newUnsecureUuidString());
}
@Test(expected = QuorumException.class)
public void tryLock_failing_whenQuorumSize_notMet() {
map(3).tryLock(UuidUtil.newUnsecureUuidString());
}
@Test
public void tryLockWithTime_successful_whenQuorumSize_met() throws InterruptedException {
map(0).tryLock(UuidUtil.newUnsecureUuidString(), 5, TimeUnit.SECONDS);
}
@Test(expected = QuorumException.class)
public void tryLockWithTime_failing_whenQuorumSize_notMet() throws InterruptedException {
map(3).tryLock(UuidUtil.newUnsecureUuidString(), 5, TimeUnit.SECONDS);
}
@Test
public void tryLockWithLease_successful_whenQuorumSize_met() throws InterruptedException {
map(0).tryLock(UuidUtil.newUnsecureUuidString(), 5, TimeUnit.SECONDS, 5, TimeUnit.SECONDS);
}
@Test(expected = QuorumException.class)
public void tryLockWithLease_failing_whenQuorumSize_notMet() throws InterruptedException {
map(3).tryLock(UuidUtil.newUnsecureUuidString(), 5, TimeUnit.SECONDS, 5, TimeUnit.SECONDS);
}
@Test
public void unlock_successful_whenQuorumSize_met() {
try {
map(0).unlock("foo");
} catch (IllegalMonitorStateException ex) {
// expected & meaningless
}
}
@Test(expected = QuorumException.class)
public void unlock_failing_whenQuorumSize_notMet() {
try {
map(3).unlock("foo");
} catch (IllegalMonitorStateException ex) {
// expected & meaningless
}
}
@Test
public void forceUnlock_successful_whenQuorumSize_met() {
map(0).forceUnlock("foo");
}
@Test(expected = QuorumException.class)
public void forceUnlock_failing_whenQuorumSize_notMet() {
map(3).forceUnlock("foo");
}
@Test
public void remove_successful_whenQuorumSize_met() {
map(0).remove("foo");
}
@Test(expected = QuorumException.class)
public void remove_failing_whenQuorumSize_notMet() {
map(3).remove("foo");
}
@Test
public void removeWhenExists_successful_whenQuorumSize_met() {
map(0).remove("foo", "bar");
}
@Test(expected = QuorumException.class)
public void removeWhenExists_failing_whenQuorumSize_notMet() {
map(3).remove("foo", "bar");
}
@Test
public void clear_successful_whenQuorumSize_met() {
map(0).clear();
}
@Test(expected = QuorumException.class)
public void clear_failing_whenQuorumSize_notMet() {
map(3).clear();
}
@Test
public void addLocalEntryListener_successful_whenQuorumSize_met() {
try {
map(0).addLocalEntryListener(new EntryAdapter());
} catch (UnsupportedOperationException ignored) {
}
}
@Test
public void addLocalEntryListener_successful_whenQuorumSize_notMet() {
try {
map(3).addLocalEntryListener(new EntryAdapter());
} catch (UnsupportedOperationException ignored) {
}
}
@Test
public void addEntryListener_successful_whenQuorumSize_met() {
map(0).addEntryListener(new EntryAdapter(), true);
}
@Test
public void addEntryListener_successful_whenQuorumSize_notMet() {
map(3).addEntryListener(new EntryAdapter(), true);
}
@Test
public void addEntryListenerWithKey_successful_whenQuorumSize_met() {
map(0).addEntryListener(new EntryAdapter(), "foo", true);
}
@Test
public void addEntryListenerWithKey_successful_whenQuorumSize_notMet() {
map(3).addEntryListener(new EntryAdapter(), "foo", true);
}
@Test
public void removeEntryListenerWithKey_successful_whenQuorumSize_met() {
map(0).removeEntryListener("id123");
}
@Test
public void removeEntryListenerWithKey_successful_whenQuorumSize_notMet() {
map(3).removeEntryListener("id123");
}
protected MultiMap map(int index) {
return multimap(index, quorumType);
}
}
| |
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onlab.util;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import java.time.Duration;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.AbstractExecutorService;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* (Somewhat) predictable ExecutorService.
* <p>
* ExecutorService which behaves similar to the one created by
* {@link Executors#newFixedThreadPool(int, ThreadFactory)},
* but assigns command to specific thread based on
* it's {@link PickyTask#hint()}, {@link Object#hashCode()}, or hint value explicitly
* specified when the command was passed to this {@link PredictableExecutor}.
*/
public class PredictableExecutor
extends AbstractExecutorService
implements ExecutorService {
private final List<ExecutorService> backends;
/**
* Creates {@link PredictableExecutor} instance.
*
* @param buckets number of buckets or 0 to match available processors
* @param threadFactory {@link ThreadFactory} to use to create threads
* @return {@link PredictableExecutor}
*/
public static PredictableExecutor newPredictableExecutor(int buckets, ThreadFactory threadFactory) {
return new PredictableExecutor(buckets, threadFactory);
}
/**
* Creates {@link PredictableExecutor} instance.
*
* @param buckets number of buckets or 0 to match available processors
* @param threadFactory {@link ThreadFactory} to use to create threads
*/
public PredictableExecutor(int buckets, ThreadFactory threadFactory) {
checkArgument(buckets >= 0, "number of buckets must be non zero");
checkNotNull(threadFactory);
if (buckets == 0) {
buckets = Runtime.getRuntime().availableProcessors();
}
this.backends = new ArrayList<>(buckets);
for (int i = 0; i < buckets; ++i) {
this.backends.add(backendExecutorService(threadFactory));
}
}
/**
* Creates {@link PredictableExecutor} instance with
* bucket size set to number of available processors.
*
* @param threadFactory {@link ThreadFactory} to use to create threads
*/
public PredictableExecutor(ThreadFactory threadFactory) {
this(0, threadFactory);
}
/**
* Creates a single thread {@link ExecutorService} to use in the backend.
*
* @param threadFactory {@link ThreadFactory} to use to create threads
* @return single thread {@link ExecutorService}
*/
protected ExecutorService backendExecutorService(ThreadFactory threadFactory) {
return Executors.newSingleThreadExecutor(threadFactory);
}
/**
* Executes given command at some time in the future.
*
* @param command the {@link Runnable} task
* @param hint value to pick thread to run on.
*/
public void execute(Runnable command, int hint) {
int index = Math.abs(hint) % backends.size();
backends.get(index).execute(command);
}
/**
* Executes given command at some time in the future.
*
* @param command the {@link Runnable} task
* @param hintFunction Function to compute hint value
*/
public void execute(Runnable command, Function<Runnable, Integer> hintFunction) {
execute(command, hintFunction.apply(command));
}
private static int hint(Runnable command) {
if (command instanceof PickyTask) {
return ((PickyTask) command).hint();
} else {
return Objects.hashCode(command);
}
}
@Override
public void execute(Runnable command) {
execute(command, PredictableExecutor::hint);
}
@Override
public void shutdown() {
backends.forEach(ExecutorService::shutdown);
}
@Override
public List<Runnable> shutdownNow() {
return backends.stream()
.map(ExecutorService::shutdownNow)
.flatMap(List::stream)
.collect(Collectors.toList());
}
@Override
public boolean isShutdown() {
return backends.stream().allMatch(ExecutorService::isShutdown);
}
@Override
public boolean isTerminated() {
return backends.stream().allMatch(ExecutorService::isTerminated);
}
/**
* {@inheritDoc}
* <p>
* Note: It'll try, but is not assured that the method will return by specified timeout.
*/
@Override
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
final Duration timeoutD = Duration.of(unit.toMillis(timeout), ChronoUnit.MILLIS);
final Instant start = Instant.now();
return backends.parallelStream()
.filter(es -> !es.isTerminated())
.map(es -> {
long timeoutMs = timeoutD.minus(Duration.between(Instant.now(), start)).toMillis();
try {
return es.awaitTermination(timeoutMs, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
})
.allMatch(result -> result);
}
@Override
protected <T> PickyFutureTask<T> newTaskFor(Callable<T> callable) {
return new PickyFutureTask<>(callable);
}
@Override
protected <T> PickyFutureTask<T> newTaskFor(Runnable runnable, T value) {
return new PickyFutureTask<>(runnable, value);
}
/**
* {@link Runnable} also implementing {@link PickyTask}.
*/
public static interface PickyRunnable extends PickyTask, Runnable { }
/**
* {@link Callable} also implementing {@link PickyTask}.
*
* @param <T> result type
*/
public static interface PickyCallable<T> extends PickyTask, Callable<T> { }
/**
* Wraps the given {@link Runnable} into {@link PickyRunnable} returning supplied hint.
*
* @param runnable {@link Runnable}
* @param hint hint value
* @return {@link PickyRunnable}
*/
public static PickyRunnable picky(Runnable runnable, int hint) {
return picky(runnable, (r) -> hint);
}
/**
* Wraps the given {@link Runnable} into {@link PickyRunnable} returning supplied hint.
*
* @param runnable {@link Runnable}
* @param hint hint function
* @return {@link PickyRunnable}
*/
public static PickyRunnable picky(Runnable runnable, Function<Runnable, Integer> hint) {
checkNotNull(runnable);
checkNotNull(hint);
return new PickyRunnable() {
@Override
public void run() {
runnable.run();
}
@Override
public int hint() {
return hint.apply(runnable);
}
};
}
/**
* Wraps the given {@link Callable} into {@link PickyCallable} returning supplied hint.
*
* @param callable {@link Callable}
* @param hint hint value
* @param <T> entity type
* @return {@link PickyCallable}
*/
public static <T> PickyCallable<T> picky(Callable<T> callable, int hint) {
return picky(callable, (c) -> hint);
}
/**
* Wraps the given {@link Callable} into {@link PickyCallable} returning supplied hint.
*
* @param callable {@link Callable}
* @param hint hint function
* @param <T> entity type
* @return {@link PickyCallable}
*/
public static <T> PickyCallable<T> picky(Callable<T> callable, Function<Callable<T>, Integer> hint) {
checkNotNull(callable);
checkNotNull(hint);
return new PickyCallable<T>() {
@Override
public T call() throws Exception {
return callable.call();
}
@Override
public int hint() {
return hint.apply(callable);
}
};
}
/**
* Abstraction to give a task a way to express it's preference to run on
* certain thread.
*/
public static interface PickyTask {
/**
* Returns hint for choosing which Thread to run this task on.
*
* @return hint value
*/
int hint();
}
/**
* A {@link FutureTask} implementing {@link PickyTask}.
* <p>
* Note: if the wrapped {@link Callable} or {@link Runnable} was an instance of
* {@link PickyTask}, it will use {@link PickyTask#hint()} value, if not use {@link Object#hashCode()}.
*
* @param <T> result type.
*/
public static class PickyFutureTask<T>
extends FutureTask<T>
implements PickyTask {
private final Object runnableOrCallable;
/**
* Same as {@link FutureTask#FutureTask(Runnable, Object)}.
*
* @param runnable work to do
* @param value result
*/
public PickyFutureTask(Runnable runnable, T value) {
super(runnable, value);
runnableOrCallable = checkNotNull(runnable);
}
/**
* Same as {@link FutureTask#FutureTask(Callable)}.
*
* @param callable work to be done
*/
public PickyFutureTask(Callable<T> callable) {
super(callable);
runnableOrCallable = checkNotNull(callable);
}
@Override
public int hint() {
if (runnableOrCallable instanceof PickyTask) {
return ((PickyTask) runnableOrCallable).hint();
} else {
return runnableOrCallable.hashCode();
}
}
}
}
| |
/*
* Copyright 2011-2013 Tyler Blair. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and contributors and should not be interpreted as representing official policies,
* either expressed or implied, of anybody else.
*/
package org.blazr.extrastorage.util;
import org.bukkit.Bukkit;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.scheduler.BukkitTask;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.Proxy;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.UUID;
import java.util.logging.Level;
import java.util.zip.GZIPOutputStream;
public class Metrics {
/**
* The current revision number
*/
private final static int REVISION = 7;
/**
* The base url of the metrics domain
*/
private static final String BASE_URL = "http://report.mcstats.org";
/**
* The url used to report a server's status
*/
private static final String REPORT_URL = "/plugin/%s";
/**
* Interval of time to ping (in minutes)
*/
private static final int PING_INTERVAL = 15;
/**
* The plugin this metrics submits for
*/
private final Plugin plugin;
/**
* All of the custom graphs to submit to metrics
*/
private final Set<Graph> graphs = Collections.synchronizedSet(new HashSet<Graph>());
/**
* The plugin configuration file
*/
private final YamlConfiguration configuration;
/**
* The plugin configuration file
*/
private final File configurationFile;
/**
* Unique server id
*/
private final String guid;
/**
* Debug mode
*/
private final boolean debug;
/**
* Lock for synchronization
*/
private final Object optOutLock = new Object();
/**
* The scheduled task
*/
private volatile BukkitTask task = null;
public Metrics(final Plugin plugin) throws IOException {
if (plugin == null) {
throw new IllegalArgumentException("Plugin cannot be null");
}
this.plugin = plugin;
// load the config
configurationFile = getConfigFile();
configuration = YamlConfiguration.loadConfiguration(configurationFile);
// add some defaults
configuration.addDefault("opt-out", false);
configuration.addDefault("guid", UUID.randomUUID().toString());
configuration.addDefault("debug", false);
// Do we need to create the file?
if (configuration.get("guid", null) == null) {
configuration.options().header("http://mcstats.org").copyDefaults(true);
configuration.save(configurationFile);
}
// Load the guid then
guid = configuration.getString("guid");
debug = configuration.getBoolean("debug", false);
}
/**
* Construct and create a Graph that can be used to separate specific plotters to their own graphs on the metrics
* website. Plotters can be added to the graph object returned.
*
* @param name The name of the graph
* @return Graph object created. Will never return NULL under normal circumstances unless bad parameters are given
*/
public Graph createGraph(final String name) {
if (name == null) {
throw new IllegalArgumentException("Graph name cannot be null");
}
// Construct the graph object
final Graph graph = new Graph(name);
// Now we can add our graph
graphs.add(graph);
// and return back
return graph;
}
/**
* Add a Graph object to BukkitMetrics that represents data for the plugin that should be sent to the backend
*
* @param graph The name of the graph
*/
public void addGraph(final Graph graph) {
if (graph == null) {
throw new IllegalArgumentException("Graph cannot be null");
}
graphs.add(graph);
}
/**
* Start measuring statistics. This will immediately create an async repeating task as the plugin and send the
* initial data to the metrics backend, and then after that it will post in increments of PING_INTERVAL * 1200
* ticks.
*
* @return True if statistics measuring is running, otherwise false.
*/
public boolean start() {
synchronized (optOutLock) {
// Did we opt out?
if (isOptOut()) {
return false;
}
// Is metrics already running?
if (task != null) {
return true;
}
// Begin hitting the server with glorious data
task = plugin.getServer().getScheduler().runTaskTimerAsynchronously(plugin, new Runnable() {
private boolean firstPost = true;
public void run() {
try {
// This has to be synchronized or it can collide with the disable method.
synchronized (optOutLock) {
// Disable Task, if it is running and the server owner decided to opt-out
if (isOptOut() && task != null) {
task.cancel();
task = null;
// Tell all plotters to stop gathering information.
for (Graph graph : graphs) {
graph.onOptOut();
}
}
}
// We use the inverse of firstPost because if it is the first time we are posting,
// it is not a interval ping, so it evaluates to FALSE
// Each time thereafter it will evaluate to TRUE, i.e PING!
postPlugin(!firstPost);
// After the first post we set firstPost to false
// Each post thereafter will be a ping
firstPost = false;
} catch (IOException e) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] " + e.getMessage());
}
}
}
}, 0, PING_INTERVAL * 1200);
return true;
}
}
/**
* Has the server owner denied plugin metrics?
*
* @return true if metrics should be opted out of it
*/
public boolean isOptOut() {
synchronized (optOutLock) {
try {
// Reload the metrics file
configuration.load(getConfigFile());
} catch (IOException ex) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] " + ex.getMessage());
}
return true;
} catch (InvalidConfigurationException ex) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] " + ex.getMessage());
}
return true;
}
return configuration.getBoolean("opt-out", false);
}
}
/**
* Enables metrics for the server by setting "opt-out" to false in the config file and starting the metrics task.
*
* @throws java.io.IOException
*/
public void enable() throws IOException {
// This has to be synchronized or it can collide with the check in the task.
synchronized (optOutLock) {
// Check if the server owner has already set opt-out, if not, set it.
if (isOptOut()) {
configuration.set("opt-out", false);
configuration.save(configurationFile);
}
// Enable Task, if it is not running
if (task == null) {
start();
}
}
}
/**
* Disables metrics for the server by setting "opt-out" to true in the config file and canceling the metrics task.
*
* @throws java.io.IOException
*/
public void disable() throws IOException {
// This has to be synchronized or it can collide with the check in the task.
synchronized (optOutLock) {
// Check if the server owner has already set opt-out, if not, set it.
if (!isOptOut()) {
configuration.set("opt-out", true);
configuration.save(configurationFile);
}
// Disable Task, if it is running
if (task != null) {
task.cancel();
task = null;
}
}
}
/**
* Gets the File object of the config file that should be used to store data such as the GUID and opt-out status
*
* @return the File object for the config file
*/
public File getConfigFile() {
// I believe the easiest way to get the base folder (e.g craftbukkit set via -P) for plugins to use
// is to abuse the plugin object we already have
// plugin.getDataFolder() => base/plugins/PluginA/
// pluginsFolder => base/plugins/
// The base is not necessarily relative to the startup directory.
File pluginsFolder = plugin.getDataFolder().getParentFile();
// return => base/plugins/PluginMetrics/config.yml
return new File(new File(pluginsFolder, "PluginMetrics"), "config.yml");
}
/**
* Generic method that posts a plugin to the metrics website
*/
private void postPlugin(final boolean isPing) throws IOException {
// Server software specific section
PluginDescriptionFile description = plugin.getDescription();
String pluginName = description.getName();
boolean onlineMode = Bukkit.getServer().getOnlineMode(); // TRUE if online mode is enabled
String pluginVersion = description.getVersion();
String serverVersion = Bukkit.getVersion();
int playersOnline = Bukkit.getServer().getOnlinePlayers().size();
// END server software specific section -- all code below does not use any code outside of this class / Java
// Construct the post data
StringBuilder json = new StringBuilder(1024);
json.append('{');
// The plugin's description file containg all of the plugin data such as name, version, author, etc
appendJSONPair(json, "guid", guid);
appendJSONPair(json, "plugin_version", pluginVersion);
appendJSONPair(json, "server_version", serverVersion);
appendJSONPair(json, "players_online", Integer.toString(playersOnline));
// New data as of R6
String osname = System.getProperty("os.name");
String osarch = System.getProperty("os.arch");
String osversion = System.getProperty("os.version");
String java_version = System.getProperty("java.version");
int coreCount = Runtime.getRuntime().availableProcessors();
// normalize os arch .. amd64 -> x86_64
if (osarch.equals("amd64")) {
osarch = "x86_64";
}
appendJSONPair(json, "osname", osname);
appendJSONPair(json, "osarch", osarch);
appendJSONPair(json, "osversion", osversion);
appendJSONPair(json, "cores", Integer.toString(coreCount));
appendJSONPair(json, "auth_mode", onlineMode ? "1" : "0");
appendJSONPair(json, "java_version", java_version);
// If we're pinging, append it
if (isPing) {
appendJSONPair(json, "ping", "1");
}
if (graphs.size() > 0) {
synchronized (graphs) {
json.append(',');
json.append('"');
json.append("graphs");
json.append('"');
json.append(':');
json.append('{');
boolean firstGraph = true;
final Iterator<Graph> iter = graphs.iterator();
while (iter.hasNext()) {
Graph graph = iter.next();
StringBuilder graphJson = new StringBuilder();
graphJson.append('{');
for (Plotter plotter : graph.getPlotters()) {
appendJSONPair(graphJson, plotter.getColumnName(), Integer.toString(plotter.getValue()));
}
graphJson.append('}');
if (!firstGraph) {
json.append(',');
}
json.append(escapeJSON(graph.getName()));
json.append(':');
json.append(graphJson);
firstGraph = false;
}
json.append('}');
}
}
// close json
json.append('}');
// Create the url
URL url = new URL(BASE_URL + String.format(REPORT_URL, urlEncode(pluginName)));
// Connect to the website
URLConnection connection;
// Mineshafter creates a socks proxy, so we can safely bypass it
// It does not reroute POST requests so we need to go around it
if (isMineshafterPresent()) {
connection = url.openConnection(Proxy.NO_PROXY);
} else {
connection = url.openConnection();
}
byte[] uncompressed = json.toString().getBytes();
byte[] compressed = gzip(json.toString());
// Headers
connection.addRequestProperty("User-Agent", "MCStats/" + REVISION);
connection.addRequestProperty("Content-Type", "application/json");
connection.addRequestProperty("Content-Encoding", "gzip");
connection.addRequestProperty("Content-Length", Integer.toString(compressed.length));
connection.addRequestProperty("Accept", "application/json");
connection.addRequestProperty("Connection", "close");
connection.setDoOutput(true);
if (debug) {
System.out.println("[Metrics] Prepared request for " + pluginName + " uncompressed=" + uncompressed.length + " compressed=" + compressed.length);
}
// Write the data
OutputStream os = connection.getOutputStream();
os.write(compressed);
os.flush();
// Now read the response
final BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
String response = reader.readLine();
// close resources
os.close();
reader.close();
if (response == null || response.startsWith("ERR") || response.startsWith("7")) {
if (response == null) {
response = "null";
} else if (response.startsWith("7")) {
response = response.substring(response.startsWith("7,") ? 2 : 1);
}
throw new IOException(response);
} else {
// Is this the first update this hour?
if (response.equals("1") || response.contains("This is your first update this hour")) {
synchronized (graphs) {
final Iterator<Graph> iter = graphs.iterator();
while (iter.hasNext()) {
final Graph graph = iter.next();
for (Plotter plotter : graph.getPlotters()) {
plotter.reset();
}
}
}
}
}
}
/**
* GZip compress a string of bytes
*
* @param input
* @return
*/
public static byte[] gzip(String input) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
GZIPOutputStream gzos = null;
try {
gzos = new GZIPOutputStream(baos);
gzos.write(input.getBytes("UTF-8"));
} catch (IOException e) {
e.printStackTrace();
} finally {
if (gzos != null) try {
gzos.close();
} catch (IOException ignore) {
}
}
return baos.toByteArray();
}
/**
* Check if mineshafter is present. If it is, we need to bypass it to send POST requests
*
* @return true if mineshafter is installed on the server
*/
private boolean isMineshafterPresent() {
try {
Class.forName("mineshafter.MineServer");
return true;
} catch (Exception e) {
return false;
}
}
/**
* Appends a json encoded key/value pair to the given string builder.
*
* @param json
* @param key
* @param value
* @throws UnsupportedEncodingException
*/
private static void appendJSONPair(StringBuilder json, String key, String value) throws UnsupportedEncodingException {
boolean isValueNumeric = false;
try {
if (value.equals("0") || !value.endsWith("0")) {
Double.parseDouble(value);
isValueNumeric = true;
}
} catch (NumberFormatException e) {
isValueNumeric = false;
}
if (json.charAt(json.length() - 1) != '{') {
json.append(',');
}
json.append(escapeJSON(key));
json.append(':');
if (isValueNumeric) {
json.append(value);
} else {
json.append(escapeJSON(value));
}
}
/**
* Escape a string to create a valid JSON string
*
* @param text
* @return
*/
private static String escapeJSON(String text) {
StringBuilder builder = new StringBuilder();
builder.append('"');
for (int index = 0; index < text.length(); index++) {
char chr = text.charAt(index);
switch (chr) {
case '"':
case '\\':
builder.append('\\');
builder.append(chr);
break;
case '\b':
builder.append("\\b");
break;
case '\t':
builder.append("\\t");
break;
case '\n':
builder.append("\\n");
break;
case '\r':
builder.append("\\r");
break;
default:
if (chr < ' ') {
String t = "000" + Integer.toHexString(chr);
builder.append("\\u" + t.substring(t.length() - 4));
} else {
builder.append(chr);
}
break;
}
}
builder.append('"');
return builder.toString();
}
/**
* Encode text as UTF-8
*
* @param text the text to encode
* @return the encoded text, as UTF-8
*/
private static String urlEncode(final String text) throws UnsupportedEncodingException {
return URLEncoder.encode(text, "UTF-8");
}
/**
* Represents a custom graph on the website
*/
public static class Graph {
/**
* The graph's name, alphanumeric and spaces only :) If it does not comply to the above when submitted, it is
* rejected
*/
private final String name;
/**
* The set of plotters that are contained within this graph
*/
private final Set<Plotter> plotters = new LinkedHashSet<Plotter>();
private Graph(final String name) {
this.name = name;
}
/**
* Gets the graph's name
*
* @return the Graph's name
*/
public String getName() {
return name;
}
/**
* Add a plotter to the graph, which will be used to plot entries
*
* @param plotter the plotter to add to the graph
*/
public void addPlotter(final Plotter plotter) {
plotters.add(plotter);
}
/**
* Remove a plotter from the graph
*
* @param plotter the plotter to remove from the graph
*/
public void removePlotter(final Plotter plotter) {
plotters.remove(plotter);
}
/**
* Gets an <b>unmodifiable</b> set of the plotter objects in the graph
*
* @return an unmodifiable {@link java.util.Set} of the plotter objects
*/
public Set<Plotter> getPlotters() {
return Collections.unmodifiableSet(plotters);
}
@Override
public int hashCode() {
return name.hashCode();
}
@Override
public boolean equals(final Object object) {
if (!(object instanceof Graph)) {
return false;
}
final Graph graph = (Graph) object;
return graph.name.equals(name);
}
/**
* Called when the server owner decides to opt-out of BukkitMetrics while the server is running.
*/
protected void onOptOut() {
}
}
/**
* Interface used to collect custom data for a plugin
*/
public static abstract class Plotter {
/**
* The plot's name
*/
private final String name;
/**
* Construct a plotter with the default plot name
*/
public Plotter() {
this("Default");
}
/**
* Construct a plotter with a specific plot name
*
* @param name the name of the plotter to use, which will show up on the website
*/
public Plotter(final String name) {
this.name = name;
}
/**
* Get the current value for the plotted point. Since this function defers to an external function it may or may
* not return immediately thus cannot be guaranteed to be thread friendly or safe. This function can be called
* from any thread so care should be taken when accessing resources that need to be synchronized.
*
* @return the current value for the point to be plotted.
*/
public abstract int getValue();
/**
* Get the column name for the plotted point
*
* @return the plotted point's column name
*/
public String getColumnName() {
return name;
}
/**
* Called after the website graphs have been updated
*/
public void reset() {
}
@Override
public int hashCode() {
return getColumnName().hashCode();
}
@Override
public boolean equals(final Object object) {
if (!(object instanceof Plotter)) {
return false;
}
final Plotter plotter = (Plotter) object;
return plotter.name.equals(name) && plotter.getValue() == getValue();
}
}
}
| |
package com.krishagni.catissueplus.rest.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import com.krishagni.catissueplus.core.common.events.RequestEvent;
import com.krishagni.catissueplus.core.common.events.ResponseEvent;
import com.krishagni.rbac.events.GroupDetail;
import com.krishagni.rbac.events.GroupRoleDetail;
import com.krishagni.rbac.events.OperationDetail;
import com.krishagni.rbac.events.PermissionDetail;
import com.krishagni.rbac.events.ResourceDetail;
import com.krishagni.rbac.events.RoleDetail;
import com.krishagni.rbac.events.SubjectRoleDetail;
import com.krishagni.rbac.events.SubjectRoleOp;
import com.krishagni.rbac.events.SubjectRoleOp.OP;
import com.krishagni.rbac.repository.OperationListCriteria;
import com.krishagni.rbac.repository.PermissionListCriteria;
import com.krishagni.rbac.repository.ResourceListCriteria;
import com.krishagni.rbac.repository.RoleListCriteria;
import com.krishagni.rbac.service.RbacService;
@Controller
@RequestMapping("/rbac")
public class RbacController {
@Autowired
private RbacService rbacSvc;
private <T> RequestEvent<T> getRequest(T payload) {
return new RequestEvent<T>(payload);
}
//
// - Resource APIs
//
@RequestMapping(method = RequestMethod.GET, value="/resources")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public List<ResourceDetail> getAllResources(
@RequestParam(value = "name", required = false)
String name,
@RequestParam(value = "startAt", required = false, defaultValue = "0")
int startAt,
@RequestParam(value = "maxResults", required = false, defaultValue = "100")
int maxResults) {
ResourceListCriteria criteria = new ResourceListCriteria()
.query(name)
.startAt(startAt)
.maxResults(maxResults);
ResponseEvent<List<ResourceDetail>> resp = rbacSvc.getResources(getRequest(criteria));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.POST, value="/resources")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public ResourceDetail createResource(@RequestBody ResourceDetail resourceDetail) {
ResponseEvent<ResourceDetail> resp = rbacSvc.saveResource(getRequest(resourceDetail));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.DELETE, value="/resources/{name}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public ResourceDetail deleteResource(@PathVariable("name") String resourceName) {
ResponseEvent<ResourceDetail> resp = rbacSvc.deleteResource(getRequest(resourceName));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
//
// - Operation APIs
//
@RequestMapping(method = RequestMethod.GET, value="/operations")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public List<OperationDetail> getAllOperations(
@RequestParam(value = "name", required = false)
String name,
@RequestParam(value = "startAt", required = false, defaultValue = "0")
int startAt,
@RequestParam(value = "maxResults", required = false, defaultValue = "100")
int maxResults) {
OperationListCriteria criteria = new OperationListCriteria()
.query(name)
.startAt(startAt)
.maxResults(maxResults);
ResponseEvent<List<OperationDetail>> resp = rbacSvc.getOperations(getRequest(criteria));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.POST, value="/operations")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public OperationDetail addOperation(@RequestBody OperationDetail operation) {
ResponseEvent<OperationDetail> resp = rbacSvc.saveOperation(getRequest(operation));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.DELETE, value="/operations/{name}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public OperationDetail deleteOperation(@PathVariable("name") String operationName) {
ResponseEvent<OperationDetail> resp = rbacSvc.deleteOperation(getRequest(operationName));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
//
// - Permission APIs
//
@RequestMapping(method = RequestMethod.GET, value="/permissions")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public List<PermissionDetail> getAllPermissions(
@RequestParam(value = "startAt", required = false, defaultValue = "0")
int startAt,
@RequestParam(value = "maxResults", required = false, defaultValue = "100")
int maxResults) {
PermissionListCriteria criteria = new PermissionListCriteria()
.startAt(startAt)
.maxResults(maxResults);
ResponseEvent<List<PermissionDetail>> resp = rbacSvc.getPermissions(getRequest(criteria));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.POST, value="/permissions")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public PermissionDetail addPermission(@RequestBody PermissionDetail permissionDetail) {
ResponseEvent<PermissionDetail> resp = rbacSvc.addPermission(getRequest(permissionDetail));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.DELETE, value="/permissions")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public PermissionDetail deletePermission(
@RequestParam(value = "resource", required = true)
String resource,
@RequestParam(value = "operation", required = true)
String operation) {
PermissionDetail detail = new PermissionDetail();
detail.setResourceName(resource);
detail.setOperationName(operation);
ResponseEvent<PermissionDetail> resp = rbacSvc.deletePermission(getRequest(detail));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
//
// - Role APIs
//
@RequestMapping(method = RequestMethod.GET, value="/roles")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public List<RoleDetail> getRoles(
@RequestParam(value = "name", required = false)
String name,
@RequestParam(value = "startAt", required = false, defaultValue = "0")
int startAt,
@RequestParam(value = "maxResults", required = false, defaultValue = "100")
int maxResults) {
RoleListCriteria criteria = new RoleListCriteria()
.query(name)
.startAt(startAt)
.maxResults(maxResults);
ResponseEvent<List<RoleDetail>> resp = rbacSvc.getRoles(getRequest(criteria));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.GET, value="/roles/{id}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public RoleDetail getRole(@PathVariable Long id) {
ResponseEvent<RoleDetail> resp = rbacSvc.getRole(getRequest(id));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.POST, value="/roles")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public RoleDetail createRole(@RequestBody RoleDetail roleDetail) {
ResponseEvent<RoleDetail> resp = rbacSvc.saveRole(getRequest(roleDetail));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.PUT, value="/roles/{roleId}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public RoleDetail updateRole(@PathVariable Long roleId, @RequestBody RoleDetail roleDetail) {
roleDetail.setId(roleId);
ResponseEvent<RoleDetail> resp = rbacSvc.updateRole(getRequest(roleDetail));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.DELETE, value="/roles/{id}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public RoleDetail deleteRole(@PathVariable("id") Long id) {
ResponseEvent<RoleDetail> resp = rbacSvc.deleteRole(getRequest(id));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
//
// - Subject APIs
//
@RequestMapping(method = RequestMethod.GET, value="/subjects/{subjectId}/roles")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public List<SubjectRoleDetail> getSubjectRoles(@PathVariable Long subjectId) {
ResponseEvent<List<SubjectRoleDetail>> resp = rbacSvc.getSubjectRoles(getRequest(subjectId));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.POST, value="/subjects/{id}/roles")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public SubjectRoleDetail addSubjectRole(
@PathVariable("id") Long subjectId,
@RequestBody SubjectRoleDetail subjectRole) {
return performSubjectRoleOp(SubjectRoleOp.OP.ADD, subjectId, subjectRole);
}
@RequestMapping(method = RequestMethod.PUT, value="/subjects/{id}/roles/{roleId}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public SubjectRoleDetail updateSubjectRole(
@PathVariable("id") Long subjectId,
@PathVariable("roleId") Long roleId,
@RequestBody SubjectRoleDetail subjectRole) {
subjectRole.setId(roleId);
return performSubjectRoleOp(SubjectRoleOp.OP.UPDATE, subjectId, subjectRole);
}
@RequestMapping(method = RequestMethod.DELETE, value="/subjects/{id}/roles/{roleId}")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public SubjectRoleDetail removeSubjectRole(
@PathVariable("id") Long subjectId,
@PathVariable("roleId") Long roleId) {
SubjectRoleDetail subjectRole= new SubjectRoleDetail();
subjectRole.setId(roleId);
return performSubjectRoleOp(SubjectRoleOp.OP.REMOVE, subjectId, subjectRole);
}
private SubjectRoleDetail performSubjectRoleOp(OP op, Long subjectId, SubjectRoleDetail subjectRole) {
SubjectRoleOp subjectRoleOp = new SubjectRoleOp();
subjectRoleOp.setSubjectId(subjectId);
subjectRoleOp.setOp(op);
subjectRoleOp.setSubjectRole(subjectRole);
ResponseEvent<SubjectRoleDetail> resp = rbacSvc.updateSubjectRole(getRequest(subjectRoleOp));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
//
// - Group APIs
//
@RequestMapping(method = RequestMethod.PUT, value="/groups/{groupId}/roles")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public GroupDetail updateGroupRole(
@PathVariable("groupId") Long groupId,
@RequestBody GroupDetail group) {
group.setId(groupId);
ResponseEvent<GroupDetail> resp = rbacSvc.updateGroupRoles(getRequest(group));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
@RequestMapping(method = RequestMethod.GET, value="/groups/{groupId}/roles")
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public List<GroupRoleDetail> getGroupRoles(@PathVariable Long groupId) {
ResponseEvent<List<GroupRoleDetail>> resp = rbacSvc.getGroupRoles(getRequest(groupId));
resp.throwErrorIfUnsuccessful();
return resp.getPayload();
}
}
| |
/**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson;
import static org.redisson.client.protocol.RedisCommands.EVAL_OBJECT;
import static org.redisson.client.protocol.RedisCommands.LINDEX;
import static org.redisson.client.protocol.RedisCommands.LLEN_INT;
import static org.redisson.client.protocol.RedisCommands.LPOP;
import static org.redisson.client.protocol.RedisCommands.LPUSH_BOOLEAN;
import static org.redisson.client.protocol.RedisCommands.LRANGE;
import static org.redisson.client.protocol.RedisCommands.LREM_SINGLE;
import static org.redisson.client.protocol.RedisCommands.RPUSH_BOOLEAN;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import org.redisson.api.RFuture;
import org.redisson.api.RList;
import org.redisson.api.RedissonClient;
import org.redisson.api.SortOrder;
import org.redisson.api.mapreduce.RCollectionMapReduce;
import org.redisson.client.codec.Codec;
import org.redisson.client.protocol.RedisCommand;
import org.redisson.client.protocol.RedisCommand.ValueType;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.client.protocol.convertor.BooleanNumberReplayConvertor;
import org.redisson.client.protocol.convertor.BooleanReplayConvertor;
import org.redisson.client.protocol.convertor.Convertor;
import org.redisson.client.protocol.convertor.IntegerReplayConvertor;
import org.redisson.command.CommandAsyncExecutor;
import org.redisson.mapreduce.RedissonCollectionMapReduce;
/**
* Distributed and concurrent implementation of {@link java.util.List}
*
* @author Nikita Koksharov
*
* @param <V> the type of elements held in this collection
*/
public class RedissonList<V> extends RedissonExpirable implements RList<V> {
public static final RedisCommand<Boolean> EVAL_BOOLEAN_ARGS2 = new RedisCommand<Boolean>("EVAL", new BooleanReplayConvertor(), 5, ValueType.OBJECTS);
private RedissonClient redisson;
public RedissonList(CommandAsyncExecutor commandExecutor, String name, RedissonClient redisson) {
super(commandExecutor, name);
this.redisson = redisson;
}
public RedissonList(Codec codec, CommandAsyncExecutor commandExecutor, String name, RedissonClient redisson) {
super(codec, commandExecutor, name);
this.redisson = redisson;
}
@Override
public <KOut, VOut> RCollectionMapReduce<V, KOut, VOut> mapReduce() {
return new RedissonCollectionMapReduce<V, KOut, VOut>(this, redisson, commandExecutor.getConnectionManager());
}
@Override
public int size() {
return get(sizeAsync());
}
public RFuture<Integer> sizeAsync() {
return commandExecutor.readAsync(getName(), codec, LLEN_INT, getName());
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(Object o) {
return get(containsAsync(o));
}
@Override
public Iterator<V> iterator() {
return listIterator();
}
@Override
public Object[] toArray() {
List<V> list = readAll();
return list.toArray();
}
@Override
public List<V> readAll() {
return get(readAllAsync());
}
@Override
public RFuture<List<V>> readAllAsync() {
return commandExecutor.readAsync(getName(), codec, LRANGE, getName(), 0, -1);
}
@Override
public <T> T[] toArray(T[] a) {
List<V> list = readAll();
return list.toArray(a);
}
@Override
public boolean add(V e) {
return get(addAsync(e));
}
@Override
public RFuture<Boolean> addAsync(V e) {
return addAsync(e, RPUSH_BOOLEAN);
}
protected <T> RFuture<T> addAsync(V e, RedisCommand<T> command) {
return commandExecutor.writeAsync(getName(), codec, command, getName(), e);
}
@Override
public boolean remove(Object o) {
return get(removeAsync(o));
}
@Override
public RFuture<Boolean> removeAsync(Object o) {
return removeAsync(o, 1);
}
protected RFuture<Boolean> removeAsync(Object o, int count) {
return commandExecutor.writeAsync(getName(), codec, LREM_SINGLE, getName(), count, o);
}
protected boolean remove(Object o, int count) {
return get(removeAsync(o, count));
}
@Override
public RFuture<Boolean> containsAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return newSucceededFuture(true);
}
return commandExecutor.evalReadAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN_WITH_VALUES,
"local items = redis.call('lrange', KEYS[1], 0, -1) " +
"for i=1, #items do " +
"for j = 1, #ARGV, 1 do " +
"if items[i] == ARGV[j] then " +
"table.remove(ARGV, j) " +
"end " +
"end " +
"end " +
"return #ARGV == 0 and 1 or 0",
Collections.<Object>singletonList(getName()), c.toArray());
}
@Override
public boolean containsAll(Collection<?> c) {
return get(containsAllAsync(c));
}
@Override
public boolean addAll(Collection<? extends V> c) {
return get(addAllAsync(c));
}
@Override
public RFuture<Boolean> addAllAsync(final Collection<? extends V> c) {
if (c.isEmpty()) {
return newSucceededFuture(false);
}
List<Object> args = new ArrayList<Object>(c.size() + 1);
args.add(getName());
args.addAll(c);
return commandExecutor.writeAsync(getName(), codec, RPUSH_BOOLEAN, args.toArray());
}
public RFuture<Boolean> addAllAsync(int index, Collection<? extends V> coll) {
if (index < 0) {
throw new IndexOutOfBoundsException("index: " + index);
}
if (coll.isEmpty()) {
return newSucceededFuture(false);
}
if (index == 0) { // prepend elements to list
List<Object> elements = new ArrayList<Object>(coll);
Collections.reverse(elements);
elements.add(0, getName());
return commandExecutor.writeAsync(getName(), codec, LPUSH_BOOLEAN, elements.toArray());
}
List<Object> args = new ArrayList<Object>(coll.size() + 1);
args.add(index);
args.addAll(coll);
return commandExecutor.evalWriteAsync(getName(), codec, EVAL_BOOLEAN_ARGS2,
"local ind = table.remove(ARGV, 1); " + // index is the first parameter
"local size = redis.call('llen', KEYS[1]); " +
"assert(tonumber(ind) <= size, 'index: ' .. ind .. ' but current size: ' .. size); " +
"local tail = redis.call('lrange', KEYS[1], ind, -1); " +
"redis.call('ltrim', KEYS[1], 0, ind - 1); " +
"for i=1, #ARGV, 5000 do "
+ "redis.call('rpush', KEYS[1], unpack(ARGV, i, math.min(i+4999, #ARGV))); "
+ "end " +
"if #tail > 0 then " +
"for i=1, #tail, 5000 do "
+ "redis.call('rpush', KEYS[1], unpack(tail, i, math.min(i+4999, #tail))); "
+ "end "
+ "end;" +
"return 1;",
Collections.<Object>singletonList(getName()), args.toArray());
}
@Override
public boolean addAll(int index, Collection<? extends V> coll) {
return get(addAllAsync(index, coll));
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return newSucceededFuture(false);
}
return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN_WITH_VALUES,
"local v = 0 " +
"for i = 1, #ARGV, 1 do "
+ "if redis.call('lrem', KEYS[1], 0, ARGV[i]) == 1 "
+ "then v = 1 end "
+"end "
+ "return v ",
Collections.<Object>singletonList(getName()), c.toArray());
}
@Override
public boolean removeAll(Collection<?> c) {
return get(removeAllAsync(c));
}
@Override
public boolean retainAll(Collection<?> c) {
return get(retainAllAsync(c));
}
@Override
public RFuture<Boolean> retainAllAsync(Collection<?> c) {
if (c.isEmpty()) {
return deleteAsync();
}
return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN_WITH_VALUES,
"local changed = 0 " +
"local items = redis.call('lrange', KEYS[1], 0, -1) "
+ "local i = 1 "
+ "while i <= #items do "
+ "local element = items[i] "
+ "local isInAgrs = false "
+ "for j = 1, #ARGV, 1 do "
+ "if ARGV[j] == element then "
+ "isInAgrs = true "
+ "break "
+ "end "
+ "end "
+ "if isInAgrs == false then "
+ "redis.call('LREM', KEYS[1], 0, element) "
+ "changed = 1 "
+ "end "
+ "i = i + 1 "
+ "end "
+ "return changed ",
Collections.<Object>singletonList(getName()), c.toArray());
}
@Override
public void clear() {
delete();
}
@Override
public RFuture<V> getAsync(int index) {
return commandExecutor.readAsync(getName(), codec, LINDEX, getName(), index);
}
@Override
public V get(int index) {
checkIndex(index);
return getValue(index);
}
V getValue(int index) {
return get(getAsync(index));
}
private void checkIndex(int index) {
int size = size();
if (!isInRange(index, size))
throw new IndexOutOfBoundsException("index: " + index + " but current size: "+ size);
}
private boolean isInRange(int index, int size) {
return index >= 0 && index < size;
}
@Override
public V set(int index, V element) {
checkIndex(index);
return get(setAsync(index, element));
}
@Override
public RFuture<V> setAsync(int index, V element) {
return commandExecutor.evalWriteAsync(getName(), codec, new RedisCommand<Object>("EVAL", 5),
"local v = redis.call('lindex', KEYS[1], ARGV[1]); " +
"redis.call('lset', KEYS[1], ARGV[1], ARGV[2]); " +
"return v",
Collections.<Object>singletonList(getName()), index, element);
}
@Override
public void fastSet(int index, V element) {
get(fastSetAsync(index, element));
}
@Override
public RFuture<Void> fastSetAsync(int index, V element) {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.LSET, getName(), index, element);
}
@Override
public void add(int index, V element) {
addAll(index, Collections.singleton(element));
}
@Override
public V remove(int index) {
return remove((long) index);
}
public V remove(long index) {
return get(removeAsync(index));
}
public RFuture<V> removeAsync(long index) {
if (index == 0) {
return commandExecutor.writeAsync(getName(), codec, LPOP, getName());
}
return commandExecutor.evalWriteAsync(getName(), codec, EVAL_OBJECT,
"local v = redis.call('lindex', KEYS[1], ARGV[1]); " +
"redis.call('lset', KEYS[1], ARGV[1], 'DELETED_BY_REDISSON');" +
"redis.call('lrem', KEYS[1], 1, 'DELETED_BY_REDISSON');" +
"return v",
Collections.<Object>singletonList(getName()), index);
}
@Override
public void fastRemove(int index) {
get(fastRemoveAsync(index));
}
@Override
public RFuture<Void> fastRemoveAsync(long index) {
return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_VOID,
"redis.call('lset', KEYS[1], ARGV[1], 'DELETED_BY_REDISSON');" +
"redis.call('lrem', KEYS[1], 1, 'DELETED_BY_REDISSON');",
Collections.<Object>singletonList(getName()), index);
}
@Override
public int indexOf(Object o) {
return get(indexOfAsync(o));
}
@Override
public RFuture<Boolean> containsAsync(Object o) {
return indexOfAsync(o, new BooleanNumberReplayConvertor(-1L));
}
public <R> RFuture<R> indexOfAsync(Object o, Convertor<R> convertor) {
return commandExecutor.evalReadAsync(getName(), codec, new RedisCommand<R>("EVAL", convertor, 4),
"local key = KEYS[1] " +
"local obj = ARGV[1] " +
"local items = redis.call('lrange', key, 0, -1) " +
"for i=1,#items do " +
"if items[i] == obj then " +
"return i - 1 " +
"end " +
"end " +
"return -1",
Collections.<Object>singletonList(getName()), o);
}
@Override
public RFuture<Integer> indexOfAsync(Object o) {
return indexOfAsync(o, new IntegerReplayConvertor());
}
@Override
public int lastIndexOf(Object o) {
return get(lastIndexOfAsync(o));
}
@Override
public RFuture<Integer> lastIndexOfAsync(Object o) {
return commandExecutor.evalReadAsync(getName(), codec, new RedisCommand<Integer>("EVAL", new IntegerReplayConvertor(), 4),
"local key = KEYS[1] " +
"local obj = ARGV[1] " +
"local items = redis.call('lrange', key, 0, -1) " +
"for i = #items, 1, -1 do " +
"if items[i] == obj then " +
"return i - 1 " +
"end " +
"end " +
"return -1",
Collections.<Object>singletonList(getName()), o);
}
public <R> RFuture<R> lastIndexOfAsync(Object o, Convertor<R> convertor) {
return commandExecutor.evalReadAsync(getName(), codec, new RedisCommand<R>("EVAL", convertor, 4),
"local key = KEYS[1] " +
"local obj = ARGV[1] " +
"local items = redis.call('lrange', key, 0, -1) " +
"for i = #items, 1, -1 do " +
"if items[i] == obj then " +
"return i - 1 " +
"end " +
"end " +
"return -1",
Collections.<Object>singletonList(getName()), o);
}
@Override
public void trim(int fromIndex, int toIndex) {
get(trimAsync(fromIndex, toIndex));
}
@Override
public RFuture<Void> trimAsync(int fromIndex, int toIndex) {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.LTRIM, getName(), fromIndex, toIndex);
}
@Override
public ListIterator<V> listIterator() {
return listIterator(0);
}
@Override
public ListIterator<V> listIterator(final int ind) {
return new ListIterator<V>() {
private V prevCurrentValue;
private V nextCurrentValue;
private V currentValueHasRead;
private int currentIndex = ind - 1;
private boolean hasBeenModified = true;
@Override
public boolean hasNext() {
V val = RedissonList.this.getValue(currentIndex+1);
if (val != null) {
nextCurrentValue = val;
}
return val != null;
}
@Override
public V next() {
if (nextCurrentValue == null && !hasNext()) {
throw new NoSuchElementException("No such element at index " + currentIndex);
}
currentIndex++;
currentValueHasRead = nextCurrentValue;
nextCurrentValue = null;
hasBeenModified = false;
return currentValueHasRead;
}
@Override
public void remove() {
if (currentValueHasRead == null) {
throw new IllegalStateException("Neither next nor previous have been called");
}
if (hasBeenModified) {
throw new IllegalStateException("Element been already deleted");
}
RedissonList.this.remove(currentIndex);
currentIndex--;
hasBeenModified = true;
currentValueHasRead = null;
}
@Override
public boolean hasPrevious() {
if (currentIndex < 0) {
return false;
}
V val = RedissonList.this.getValue(currentIndex);
if (val != null) {
prevCurrentValue = val;
}
return val != null;
}
@Override
public V previous() {
if (prevCurrentValue == null && !hasPrevious()) {
throw new NoSuchElementException("No such element at index " + currentIndex);
}
currentIndex--;
hasBeenModified = false;
currentValueHasRead = prevCurrentValue;
prevCurrentValue = null;
return currentValueHasRead;
}
@Override
public int nextIndex() {
return currentIndex + 1;
}
@Override
public int previousIndex() {
return currentIndex;
}
@Override
public void set(V e) {
if (hasBeenModified) {
throw new IllegalStateException();
}
RedissonList.this.fastSet(currentIndex, e);
}
@Override
public void add(V e) {
RedissonList.this.add(currentIndex+1, e);
currentIndex++;
hasBeenModified = true;
}
};
}
@Override
public RList<V> subList(int fromIndex, int toIndex) {
int size = size();
if (fromIndex < 0 || toIndex > size) {
throw new IndexOutOfBoundsException("fromIndex: " + fromIndex + " toIndex: " + toIndex + " size: " + size);
}
if (fromIndex > toIndex) {
throw new IllegalArgumentException("fromIndex: " + fromIndex + " toIndex: " + toIndex);
}
return new RedissonSubList<V>(codec, commandExecutor, getName(), fromIndex, toIndex);
}
public String toString() {
Iterator<V> it = iterator();
if (! it.hasNext())
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
V e = it.next();
sb.append(e == this ? "(this Collection)" : e);
if (! it.hasNext())
return sb.append(']').toString();
sb.append(',').append(' ');
}
}
@Override
public boolean equals(Object o) {
if (o == this)
return true;
if (!(o instanceof List))
return false;
Iterator<V> e1 = iterator();
Iterator<?> e2 = ((List<?>) o).iterator();
while (e1.hasNext() && e2.hasNext()) {
V o1 = e1.next();
Object o2 = e2.next();
if (!(o1==null ? o2==null : o1.equals(o2)))
return false;
}
return !(e1.hasNext() || e2.hasNext());
}
@Override
public int hashCode() {
int hashCode = 1;
for (V e : this) {
hashCode = 31*hashCode + (e==null ? 0 : e.hashCode());
}
return hashCode;
}
@Override
public RFuture<Integer> addAfterAsync(V elementToFind, V element) {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.LINSERT_INT, getName(), "AFTER", elementToFind, element);
}
@Override
public RFuture<Integer> addBeforeAsync(V elementToFind, V element) {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.LINSERT_INT, getName(), "BEFORE", elementToFind, element);
}
@Override
public int addAfter(V elementToFind, V element) {
return get(addAfterAsync(elementToFind, element));
}
@Override
public int addBefore(V elementToFind, V element) {
return get(addBeforeAsync(elementToFind, element));
}
@Override
public List<V> readSort(SortOrder order) {
return get(readSortAsync(order));
}
@Override
public RFuture<List<V>> readSortAsync(SortOrder order) {
return commandExecutor.readAsync(getName(), codec, RedisCommands.SORT_LIST, getName(), order);
}
@Override
public List<V> readSort(SortOrder order, int offset, int count) {
return get(readSortAsync(order, offset, count));
}
@Override
public RFuture<List<V>> readSortAsync(SortOrder order, int offset, int count) {
return commandExecutor.readAsync(getName(), codec, RedisCommands.SORT_LIST, getName(), "LIMIT", offset, count, order);
}
@Override
public List<V> readSort(String byPattern, SortOrder order) {
return get(readSortAsync(byPattern, order));
}
@Override
public RFuture<List<V>> readSortAsync(String byPattern, SortOrder order) {
return commandExecutor.readAsync(getName(), codec, RedisCommands.SORT_LIST, getName(), "BY", byPattern, order);
}
@Override
public List<V> readSort(String byPattern, SortOrder order, int offset, int count) {
return get(readSortAsync(byPattern, order, offset, count));
}
@Override
public RFuture<List<V>> readSortAsync(String byPattern, SortOrder order, int offset, int count) {
return commandExecutor.readAsync(getName(), codec, RedisCommands.SORT_LIST, getName(), "BY", byPattern, "LIMIT", offset, count, order);
}
@Override
public <T> Collection<T> readSort(String byPattern, List<String> getPatterns, SortOrder order) {
return (Collection<T>)get(readSortAsync(byPattern, getPatterns, order));
}
@Override
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order) {
return readSortAsync(byPattern, getPatterns, order, -1, -1);
}
@Override
public <T> Collection<T> readSort(String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
return (Collection<T>)get(readSortAsync(byPattern, getPatterns, order, offset, count));
}
@Override
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
List<Object> params = new ArrayList<Object>();
params.add(getName());
if (byPattern != null) {
params.add("BY");
params.add(byPattern);
}
if (offset != -1 && count != -1) {
params.add("LIMIT");
}
if (offset != -1) {
params.add(offset);
}
if (count != -1) {
params.add(count);
}
for (String pattern : getPatterns) {
params.add("GET");
params.add(pattern);
}
params.add(order);
return commandExecutor.readAsync(getName(), codec, RedisCommands.SORT_LIST, params.toArray());
}
@Override
public int sortTo(String destName, SortOrder order) {
return get(sortToAsync(destName, order));
}
@Override
public RFuture<Integer> sortToAsync(String destName, SortOrder order) {
return sortToAsync(destName, null, Collections.<String>emptyList(), order, -1, -1);
}
@Override
public int sortTo(String destName, SortOrder order, int offset, int count) {
return get(sortToAsync(destName, order, offset, count));
}
@Override
public RFuture<Integer> sortToAsync(String destName, SortOrder order, int offset, int count) {
return sortToAsync(destName, null, Collections.<String>emptyList(), order, offset, count);
}
@Override
public int sortTo(String destName, String byPattern, SortOrder order, int offset, int count) {
return get(sortToAsync(destName, byPattern, order, offset, count));
}
@Override
public int sortTo(String destName, String byPattern, SortOrder order) {
return get(sortToAsync(destName, byPattern, order));
}
@Override
public RFuture<Integer> sortToAsync(String destName, String byPattern, SortOrder order) {
return sortToAsync(destName, byPattern, Collections.<String>emptyList(), order, -1, -1);
}
@Override
public RFuture<Integer> sortToAsync(String destName, String byPattern, SortOrder order, int offset, int count) {
return sortToAsync(destName, byPattern, Collections.<String>emptyList(), order, offset, count);
}
@Override
public int sortTo(String destName, String byPattern, List<String> getPatterns, SortOrder order) {
return get(sortToAsync(destName, byPattern, getPatterns, order));
}
@Override
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns, SortOrder order) {
return sortToAsync(destName, byPattern, getPatterns, order, -1, -1);
}
@Override
public int sortTo(String destName, String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
return get(sortToAsync(destName, byPattern, getPatterns, order, offset, count));
}
@Override
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns, SortOrder order, int offset, int count) {
List<Object> params = new ArrayList<Object>();
params.add(getName());
if (byPattern != null) {
params.add("BY");
params.add(byPattern);
}
if (offset != -1 && count != -1) {
params.add("LIMIT");
}
if (offset != -1) {
params.add(offset);
}
if (count != -1) {
params.add(count);
}
for (String pattern : getPatterns) {
params.add("GET");
params.add(pattern);
}
params.add(order);
params.add("STORE");
params.add(destName);
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SORT_TO, params.toArray());
}
}
| |
/*
Copyright 2017 karol-202
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package pl.karol202.evolution.entity;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import pl.karol202.evolution.entity.behaviour.BehaviourState;
import pl.karol202.evolution.entity.behaviour.ReproductionsLoader;
import pl.karol202.evolution.entity.behaviour.SavableBehaviour;
import pl.karol202.evolution.genes.GenesLoader;
import pl.karol202.evolution.genes.Genotype;
import pl.karol202.evolution.utils.Vector2;
import java.util.HashMap;
import java.util.Map;
import static pl.karol202.evolution.simulation.SimulationManager.*;
public class EntitiesLoader
{
private Entities entities;
private GenesLoader genesLoader;
private ReproductionsLoader reproductionsLoader;
private Document document;
public EntitiesLoader(Entities entities)
{
this.entities = entities;
this.genesLoader = new GenesLoader();
this.reproductionsLoader = new ReproductionsLoader(entities);
}
public void parseEntities(Element elementWorld)
{
Element elementEntities = getElement(elementWorld, "entities");
Element elementReproductions = getElement(elementEntities, "reproductions");
entities.removeAllEntities();
Map<Element, Entity> entitiesMap = new HashMap<>();
NodeList entitiesNodes = elementEntities.getChildNodes();
for(int i = 0; i < entitiesNodes.getLength(); i++)
{
Element element = (Element) entitiesNodes.item(i);
if(element == elementReproductions) continue;
Entity entity = parseEntity(element);
boolean selected = getBooleanAttribute(element, "selected");
entities.addEntityInstantly(entity, selected);
entitiesMap.put(element, entity);
}
reproductionsLoader.parseReproductions(elementReproductions);
for(Map.Entry<Element, Entity> entry : entitiesMap.entrySet())
{
Element element = entry.getKey();
Entity entity = entry.getValue();
parseEntityComponents(entity, element);
parseEntityBehaviours(entity, element);
}
reproductionsLoader.onEntitiesLoadingEnded();
}
private Entity parseEntity(Element elementEntity)
{
float x = getFloatAttribute(elementEntity, "x");
float y = getFloatAttribute(elementEntity, "y");
Genotype genotype = genesLoader.parseGenotype(elementEntity);
Vector2 bornPosition = parseEntityBornPosition(elementEntity);
Entity entity = new Entity(entities, x, y, genotype, bornPosition);
entity.setEnergy(getFloatAttribute(elementEntity, "energy"));
entity.setTimeOfLife(getFloatAttribute(elementEntity, "timeOfLife"));
entity.setReproduceCooldown(getFloatAttribute(elementEntity, "reproduceCooldown"));
return entity;
}
private Vector2 parseEntityBornPosition(Element elementEntity)
{
Element elementVector = getElement(elementEntity, "bornPosition");
float x = getFloatAttribute(elementVector, "x");
float y = getFloatAttribute(elementVector, "y");
return new Vector2(x, y);
}
private void parseEntityComponents(Entity entity, Element elementEntity)
{
Element elementComponents = getElement(elementEntity, "components");
NodeList componentsNodes = elementComponents.getChildNodes();
for(int i = 0; i < componentsNodes.getLength(); i++)
{
Element elementComponent = (Element) componentsNodes.item(i);
parseEntityComponent(entity, elementComponent);
}
}
private void parseEntityComponent(Entity entity, Element elementComponent)
{
ComponentState state = new ComponentState(document, elementComponent);
String componentName = elementComponent.getTagName();
entity.getSavableComponentsStream().filter(sc -> sc.getClass().getName().equals(componentName))
.forEach(sc -> sc.loadState(state));
}
private void parseEntityBehaviours(Entity entity, Element elementEntity)
{
Element elementBehaviours = getElement(elementEntity, "behaviours");
entity.setCurrentBehaviourId(getIntAttribute(elementBehaviours, "currentBehaviour"));
NodeList behavioursList = elementBehaviours.getChildNodes();
for(int i = 0; i < behavioursList.getLength(); i++)
{
Element elementBehaviour = (Element) behavioursList.item(i);
parseEntityBehaviour(entity, elementBehaviour);
}
}
private void parseEntityBehaviour(Entity entity, Element elementBehaviour)
{
BehaviourState state = new BehaviourState(elementBehaviour);
int behaviourId = getIntAttribute(elementBehaviour, "id");
entity.getSavableBehavioursStream().filter(sb -> sb.getId() == behaviourId).forEach(sb -> sb.loadState(state));
}
public Element getEntitiesElement(Document document)
{
this.document = document;
return createEntitiesElement();
}
private Element createEntitiesElement()
{
Element elementEntities = document.createElement("entities");
entities.getEntitiesStream().map(this::createEntityElement).forEach(elementEntities::appendChild);
elementEntities.appendChild(reproductionsLoader.getReproductionsElement(document));
return elementEntities;
}
private Element createEntityElement(Entity entity)
{
Element elementEntity = document.createElement("entity");
setBooleanAttribute(elementEntity, "selected", entities.isEntitySelected(entity));
setNumberAttribute(elementEntity, "x", entity.getX());
setNumberAttribute(elementEntity, "y", entity.getY());
setNumberAttribute(elementEntity, "energy", entity.getEnergy());
setNumberAttribute(elementEntity, "timeOfLife", entity.getTimeOfLife());
setNumberAttribute(elementEntity, "reproduceCooldown", entity.getReproduceCooldown());
elementEntity.appendChild(genesLoader.getGenotypeElement(document, entity.getGenotype()));
elementEntity.appendChild(createEntityBornPosition(entity.getBornPosition()));
elementEntity.appendChild(createEntityComponentsElement(entity));
elementEntity.appendChild(createEntityBehavioursElement(entity));
return elementEntity;
}
private Element createEntityBornPosition(Vector2 bornPosition)
{
Element elementBornPosition = document.createElement("bornPosition");
setNumberAttribute(elementBornPosition, "x", bornPosition.getX());
setNumberAttribute(elementBornPosition, "y", bornPosition.getY());
return elementBornPosition;
}
private Element createEntityComponentsElement(Entity entity)
{
Element elementComponents = document.createElement("components");
entity.getSavableComponentsStream().map(this::createComponentElement).forEach(elementComponents::appendChild);
return elementComponents;
}
private Element createComponentElement(SavableComponent component)
{
ComponentState state = ComponentState.createForComponent(component, document);
return state.getElement();
}
private Element createEntityBehavioursElement(Entity entity)
{
Element elementBehaviours = document.createElement("behaviours");
setNumberAttribute(elementBehaviours, "currentBehaviour", entity.getCurrentBehaviourId());
entity.getSavableBehavioursStream().map(this::createBehaviourElement).forEach(elementBehaviours::appendChild);
return elementBehaviours;
}
private Element createBehaviourElement(SavableBehaviour behaviour)
{
BehaviourState state = BehaviourState.createForBehaviour(behaviour, document);
return state.getElement();
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* DeleteRoute.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST)
*/
package com.amazon.ec2;
/**
* DeleteRoute bean class
*/
public class DeleteRoute
implements org.apache.axis2.databinding.ADBBean{
public static final javax.xml.namespace.QName MY_QNAME = new javax.xml.namespace.QName(
"http://ec2.amazonaws.com/doc/2012-08-15/",
"DeleteRoute",
"ns1");
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for DeleteRoute
*/
protected com.amazon.ec2.DeleteRouteType localDeleteRoute ;
/**
* Auto generated getter method
* @return com.amazon.ec2.DeleteRouteType
*/
public com.amazon.ec2.DeleteRouteType getDeleteRoute(){
return localDeleteRoute;
}
/**
* Auto generated setter method
* @param param DeleteRoute
*/
public void setDeleteRoute(com.amazon.ec2.DeleteRouteType param){
this.localDeleteRoute=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,MY_QNAME){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
DeleteRoute.this.serialize(MY_QNAME,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
MY_QNAME,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
if (localDeleteRoute==null){
throw new org.apache.axis2.databinding.ADBException("Property cannot be null!");
}
localDeleteRoute.serialize(MY_QNAME,factory,xmlWriter);
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
//We can safely assume an element has only one type associated with it
return localDeleteRoute.getPullParser(MY_QNAME);
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static DeleteRoute parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
DeleteRoute object =
new DeleteRoute();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
while(!reader.isEndElement()) {
if (reader.isStartElement() ){
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","DeleteRoute").equals(reader.getName())){
object.setDeleteRoute(com.amazon.ec2.DeleteRouteType.Factory.parse(reader));
} // End of if for expected property start element
else{
// A start element we are not expecting indicates an invalid parameter was passed
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
}
} else {
reader.next();
}
} // end of while loop
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| |
/*
* Copyright (c) 2011-2014, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.sfm.d2;
import boofcv.abst.sfm.d2.ImageMotion2D;
import boofcv.alg.distort.DistortImageOps;
import boofcv.alg.distort.ImageDistort;
import boofcv.alg.misc.GImageMiscOps;
import boofcv.struct.distort.PixelTransform_F32;
import boofcv.struct.image.ImageBase;
import georegression.metric.Area2D_F64;
import georegression.struct.InvertibleTransform;
import georegression.struct.homography.Homography2D_F64;
import georegression.struct.point.Point2D_F64;
import georegression.struct.shapes.RectangleLength2D_I32;
/**
* Stitches together sequences of images using {@link ImageMotion2D}, typically used for image stabilization
* and creating mosaics. Internally any motion model in the Homogeneous family can be used. For convenience,
* those models are converted into a {@link Homography2D_F64} on output.
*
* The size of the stitch region is specified using {@link #configure(int, int, georegression.struct.InvertibleTransform)}
* which must be called before any images are processed. One of the parameters include an initial transform. The
* initial transform can be used to scale/translate/other the input image.
*
* A sudden change or jump in the shape of the view area can be an indication of a bad motion estimate. If a large
* jump larger than the user specified threshold is detected then {@link #process(boofcv.struct.image.ImageBase)}
* will return false.
*
* @author Peter Abeles
*/
public class StitchingFromMotion2D<I extends ImageBase, IT extends InvertibleTransform>
{
// REFERENCE FRAME NOTES:
//
// World references to the stitched image
// Initial is the first video frame in video coordinates
// Current is the current video frame in video coordinates
// estimates image motion
private ImageMotion2D<I,IT> motion;
// renders the distorted image according to results from motion
private ImageDistort<I,I> distorter;
// converts different types of motion models into other formats
private StitchingTransform<IT> converter;
// Transform from first video frame to the initial location in the stitched image
private IT worldToInit;
// size of the stitch image
private int widthStitch, heightStitch;
// Largest allowed fractional change in area
private double maxJumpFraction;
// image corners are used to detect large motions
private Corners corners = new Corners();
// size of view area in previous update
private double previousArea;
// storage for the transform from current frame to the initial frame
private IT worldToCurr;
private PixelTransform_F32 tranWorldToCurr;
private PixelTransform_F32 tranCurrToWorld;
// storage for the stitched image
private I stitchedImage;
private I workImage;
// first time that it has been called
private boolean first = true;
/**
* Provides internal algorithms and tuning parameters.
*
* @param motion Estimates image motion
* @param distorter Applies found transformation to stitch images
* @param converter Converts internal model into a homogenous transformation
* @param maxJumpFraction If the view area changes by more than this fraction a fault is declared
*/
public StitchingFromMotion2D(ImageMotion2D<I, IT> motion,
ImageDistort<I,I> distorter,
StitchingTransform<IT> converter ,
double maxJumpFraction )
{
this.motion = motion;
this.distorter = distorter;
this.converter = converter;
this.maxJumpFraction = maxJumpFraction;
worldToCurr = (IT)motion.getFirstToCurrent().createInstance();
}
/**
* Specifies size of stitch image and the location of the initial coordinate system.
*
* @param widthStitch Width of the image being stitched into
* @param heightStitch Height of the image being stitched into
* @param worldToInit (Option) Used to change the location of the initial frame in stitched image.
* null means no transform.
*/
public void configure( int widthStitch, int heightStitch , IT worldToInit ) {
this.worldToInit = (IT)worldToCurr.createInstance();
if( worldToInit != null )
this.worldToInit.set(worldToInit);
this.widthStitch = widthStitch;
this.heightStitch = heightStitch;
}
/**
* Estimates the image motion and updates stitched image. If it is unable to estimate the motion then false
* is returned and the stitched image is left unmodified. If false is returned then in most situations it is
* best to call {@link #reset()} and start over.
*
* @param image Next image in the sequence
* @return True if the stitched image is updated and false if it failed and was not
*/
public boolean process( I image ) {
if( stitchedImage == null ) {
stitchedImage = (I)image._createNew(widthStitch, heightStitch);
workImage = (I)image._createNew(widthStitch, heightStitch);
}
if( motion.process(image) ) {
update(image);
// check to see if an unstable and improbably solution was generated
return !checkLargeMotion(image.width, image.height);
} else {
return false;
}
}
/**
* Throws away current results and starts over again
*/
public void reset() {
GImageMiscOps.fill(stitchedImage, 0);
motion.reset();
worldToCurr.reset();
first = true;
}
/**
* Looks for sudden large changes in corner location to detect motion estimation faults.
* @param width image width
* @param height image height
* @return true for fault
*/
private boolean checkLargeMotion( int width , int height ) {
if( first ) {
getImageCorners(width,height,corners);
previousArea = computeArea(corners);
first = false;
} else {
getImageCorners(width,height,corners);
double area = computeArea(corners);
double change = Math.max(area/previousArea,previousArea/area)-1;
if( change > maxJumpFraction ) {
return true;
}
previousArea = area;
}
return false;
}
private double computeArea( Corners c ) {
return Area2D_F64.triangle(c.p0,c.p1,c.p2) +
Area2D_F64.triangle(c.p0,c.p2,c.p3);
}
/**
* Adds the latest image into the stitched image
*
* @param image
*/
private void update(I image) {
computeCurrToInit_PixelTran();
// only process a cropped portion to speed up processing
RectangleLength2D_I32 box = DistortImageOps.boundBox(image.width, image.height,
stitchedImage.width, stitchedImage.height, tranCurrToWorld);
int x0 = box.x0;
int y0 = box.y0;
int x1 = box.x0 + box.width;
int y1 = box.y0 + box.height;
distorter.setModel(tranWorldToCurr);
distorter.apply(image, stitchedImage,x0,y0,x1,y1);
}
private void computeCurrToInit_PixelTran() {
IT initToCurr = motion.getFirstToCurrent();
worldToInit.concat(initToCurr, worldToCurr);
tranWorldToCurr = converter.convertPixel(worldToCurr,tranWorldToCurr);
IT currToWorld = (IT) this.worldToCurr.invert(null);
tranCurrToWorld = converter.convertPixel(currToWorld, tranCurrToWorld);
}
/**
* Sets the current image to be the origin of the stitched coordinate system. The background is filled
* with a value of 0.
* Must be called after {@link #process(boofcv.struct.image.ImageBase)}.
*/
public void setOriginToCurrent() {
IT currToWorld = (IT)worldToCurr.invert(null);
IT oldWorldToNewWorld = (IT) worldToInit.concat(currToWorld,null);
PixelTransform_F32 newToOld = converter.convertPixel(oldWorldToNewWorld,null);
// fill in the background color
GImageMiscOps.fill(workImage, 0);
// render the transform
distorter.setModel(newToOld);
distorter.apply(stitchedImage, workImage);
// swap the two images
I s = workImage;
workImage = stitchedImage;
stitchedImage = s;
// have motion estimates be relative to this frame
motion.setToFirst();
first = true;
computeCurrToInit_PixelTran();
}
/**
* Resizes the stitch image. If no transform is provided then the old stitch region is simply
* places on top of the new one and copied. Pixels which do not exist in the old image are filled with zero.
*
* @param widthStitch The new width of the stitch image.
* @param heightStitch The new height of the stitch image.
* @param newToOldStitch (Optional) Transform from new stitch image pixels to old stick pixels. Can be null.
*/
public void resizeStitchImage( int widthStitch, int heightStitch , IT newToOldStitch ) {
// copy the old image into the new one
workImage.reshape(widthStitch,heightStitch);
GImageMiscOps.fill(workImage, 0);
if( newToOldStitch != null ) {
PixelTransform_F32 newToOld = converter.convertPixel(newToOldStitch,null);
distorter.setModel(newToOld);
distorter.apply(stitchedImage, workImage);
// update the transforms
IT tmp = (IT)worldToCurr.createInstance();
newToOldStitch.concat(worldToInit, tmp);
worldToInit.set(tmp);
computeCurrToInit_PixelTran();
} else {
int overlapWidth = Math.min(widthStitch,stitchedImage.width);
int overlapHeight = Math.min(heightStitch,stitchedImage.height);
GImageMiscOps.copy(0,0,0,0,overlapWidth,overlapHeight,stitchedImage,workImage);
}
stitchedImage.reshape(widthStitch,heightStitch);
I tmp = stitchedImage;
stitchedImage = workImage;
workImage = tmp;
this.widthStitch = widthStitch;
this.heightStitch = heightStitch;
}
/**
* Returns the location of the input image's corners inside the stitch image.
*
* @return image corners
*/
public Corners getImageCorners( int width , int height , Corners corners ) {
if( corners == null )
corners = new Corners();
int w = width;
int h = height;
tranCurrToWorld.compute(0,0); corners.p0.set(tranCurrToWorld.distX, tranCurrToWorld.distY);
tranCurrToWorld.compute(w,0); corners.p1.set(tranCurrToWorld.distX, tranCurrToWorld.distY);
tranCurrToWorld.compute(w,h); corners.p2.set(tranCurrToWorld.distX, tranCurrToWorld.distY);
tranCurrToWorld.compute(0,h); corners.p3.set(tranCurrToWorld.distX, tranCurrToWorld.distY);
return corners;
}
/**
* Transform from world coordinate system into the current image frame.
*
* @return Transformation
*/
public Homography2D_F64 getWorldToCurr( Homography2D_F64 storage ) {
return converter.convertH(worldToCurr,storage);
}
public IT getWorldToCurr() {
return worldToCurr;
}
public I getStitchedImage() {
return stitchedImage;
}
public ImageMotion2D<I, IT> getMotion() {
return motion;
}
public static class Corners {
public Point2D_F64 p0 = new Point2D_F64();
public Point2D_F64 p1 = new Point2D_F64();
public Point2D_F64 p2 = new Point2D_F64();
public Point2D_F64 p3 = new Point2D_F64();
}
}
| |
/*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.heuristic.selector.value.chained;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.optaplanner.core.config.heuristic.selector.common.SelectionCacheType;
import org.optaplanner.core.impl.domain.variable.descriptor.GenuineVariableDescriptor;
import org.optaplanner.core.impl.domain.variable.inverserelation.SingletonInverseVariableDemand;
import org.optaplanner.core.impl.domain.variable.inverserelation.SingletonInverseVariableSupply;
import org.optaplanner.core.impl.domain.variable.supply.SupplyManager;
import org.optaplanner.core.impl.heuristic.selector.AbstractSelector;
import org.optaplanner.core.impl.heuristic.selector.common.SelectionCacheLifecycleBridge;
import org.optaplanner.core.impl.heuristic.selector.common.SelectionCacheLifecycleListener;
import org.optaplanner.core.impl.heuristic.selector.common.iterator.UpcomingSelectionIterator;
import org.optaplanner.core.impl.heuristic.selector.entity.pillar.DefaultPillarSelector;
import org.optaplanner.core.impl.heuristic.selector.value.EntityIndependentValueSelector;
import org.optaplanner.core.impl.score.director.InnerScoreDirector;
import org.optaplanner.core.impl.solver.random.RandomUtils;
import org.optaplanner.core.impl.solver.scope.DefaultSolverScope;
/**
* This is the common {@link SubChainSelector} implementation.
*/
public class DefaultSubChainSelector extends AbstractSelector
implements SubChainSelector, SelectionCacheLifecycleListener {
protected static final SelectionCacheType CACHE_TYPE = SelectionCacheType.STEP;
protected final EntityIndependentValueSelector valueSelector;
protected final boolean randomSelection;
protected SingletonInverseVariableSupply inverseVariableSupply;
/**
* Unlike {@link DefaultPillarSelector#minimumSubPillarSize} and {@link DefaultPillarSelector#maximumSubPillarSize},
* the sub selection here is a sequence. For example from ABCDE, it can select BCD, but not ACD.
*/
protected final int minimumSubChainSize;
protected final int maximumSubChainSize;
protected List<SubChain> anchorTrailingChainList = null;
public DefaultSubChainSelector(EntityIndependentValueSelector valueSelector, boolean randomSelection,
int minimumSubChainSize, int maximumSubChainSize) {
this.valueSelector = valueSelector;
this.randomSelection = randomSelection;
if (!valueSelector.getVariableDescriptor().isChained()) {
throw new IllegalArgumentException("The selector (" + this
+ ")'s valueSelector (" + valueSelector
+ ") must have a chained variableDescriptor chained ("
+ valueSelector.getVariableDescriptor().isChained() + ").");
}
if (valueSelector.isNeverEnding()) {
throw new IllegalStateException("The selector (" + this
+ ") has a valueSelector (" + valueSelector
+ ") with neverEnding (" + valueSelector.isNeverEnding() + ").");
}
phaseLifecycleSupport.addEventListener(valueSelector);
phaseLifecycleSupport.addEventListener(new SelectionCacheLifecycleBridge(CACHE_TYPE, this));
this.minimumSubChainSize = minimumSubChainSize;
this.maximumSubChainSize = maximumSubChainSize;
if (minimumSubChainSize < 1) {
throw new IllegalStateException("The selector (" + this
+ ")'s minimumSubChainSize (" + minimumSubChainSize
+ ") must be at least 1.");
}
if (minimumSubChainSize > maximumSubChainSize) {
throw new IllegalStateException("The minimumSubChainSize (" + minimumSubChainSize
+ ") must be at least maximumSubChainSize (" + maximumSubChainSize + ").");
}
}
@Override
public GenuineVariableDescriptor getVariableDescriptor() {
return valueSelector.getVariableDescriptor();
}
@Override
public SelectionCacheType getCacheType() {
return CACHE_TYPE;
}
@Override
public void solvingStarted(DefaultSolverScope solverScope) {
super.solvingStarted(solverScope);
SupplyManager supplyManager = solverScope.getScoreDirector().getSupplyManager();
GenuineVariableDescriptor variableDescriptor = valueSelector.getVariableDescriptor();
inverseVariableSupply = supplyManager.demand(new SingletonInverseVariableDemand(variableDescriptor));
}
@Override
public void solvingEnded(DefaultSolverScope solverScope) {
super.solvingEnded(solverScope);
inverseVariableSupply = null;
}
// ************************************************************************
// Cache lifecycle methods
// ************************************************************************
@Override
public void constructCache(DefaultSolverScope solverScope) {
InnerScoreDirector scoreDirector = solverScope.getScoreDirector();
GenuineVariableDescriptor variableDescriptor = valueSelector.getVariableDescriptor();
long valueSize = valueSelector.getSize();
// Fail-fast when anchorTrailingChainList.size() could ever be too big
if (valueSize > (long) Integer.MAX_VALUE) {
throw new IllegalStateException("The selector (" + this
+ ") has a valueSelector (" + valueSelector
+ ") with valueSize (" + valueSize
+ ") which is higher than Integer.MAX_VALUE.");
}
List<Object> anchorList = new ArrayList<>();
for (Object value : valueSelector) {
if (variableDescriptor.isValuePotentialAnchor(value)) {
anchorList.add(value);
}
}
int anchorListSize = Math.max(anchorList.size(), 1);
anchorTrailingChainList = new ArrayList<>(anchorListSize);
int anchorChainInitialCapacity = ((int) valueSize / anchorListSize) + 1;
for (Object anchor : anchorList) {
List<Object> anchorChain = new ArrayList<>(anchorChainInitialCapacity);
Object trailingEntity = inverseVariableSupply.getInverseSingleton(anchor);
while (trailingEntity != null) {
anchorChain.add(trailingEntity);
trailingEntity = inverseVariableSupply.getInverseSingleton(trailingEntity);
}
if (anchorChain.size() >= minimumSubChainSize) {
anchorTrailingChainList.add(new SubChain(anchorChain));
}
}
}
@Override
public void disposeCache(DefaultSolverScope solverScope) {
anchorTrailingChainList = null;
}
// ************************************************************************
// Worker methods
// ************************************************************************
@Override
public boolean isCountable() {
return true;
}
@Override
public boolean isNeverEnding() {
return randomSelection;
}
@Override
public long getSize() {
long selectionSize = 0L;
for (SubChain anchorTrailingChain : anchorTrailingChainList) {
selectionSize += calculateSubChainSelectionSize(anchorTrailingChain);
}
return selectionSize;
}
protected long calculateSubChainSelectionSize(SubChain anchorTrailingChain) {
long anchorTrailingChainSize = (long) anchorTrailingChain.getSize();
long n = anchorTrailingChainSize - (long) minimumSubChainSize + 1L;
long m = (maximumSubChainSize >= anchorTrailingChainSize)
? 0L : anchorTrailingChainSize - (long) maximumSubChainSize;
return (n * (n + 1L) / 2L) - (m * (m + 1L) / 2L);
}
@Override
public Iterator<SubChain> iterator() {
if (!randomSelection) {
return new OriginalSubChainIterator(anchorTrailingChainList.listIterator());
} else {
return new RandomSubChainIterator();
}
}
@Override
public ListIterator<SubChain> listIterator() {
if (!randomSelection) {
return new OriginalSubChainIterator(anchorTrailingChainList.listIterator());
} else {
throw new IllegalStateException("The selector (" + this
+ ") does not support a ListIterator with randomSelection (" + randomSelection + ").");
}
}
@Override
public ListIterator<SubChain> listIterator(int index) {
if (!randomSelection) {
// TODO Implement more efficient ListIterator https://issues.jboss.org/browse/PLANNER-37
OriginalSubChainIterator it = new OriginalSubChainIterator(anchorTrailingChainList.listIterator());
for (int i = 0; i < index; i++) {
it.next();
}
return it;
} else {
throw new IllegalStateException("The selector (" + this
+ ") does not support a ListIterator with randomSelection (" + randomSelection + ").");
}
}
private class OriginalSubChainIterator extends UpcomingSelectionIterator<SubChain>
implements ListIterator<SubChain> {
private final ListIterator<SubChain> anchorTrailingChainIterator;
private List<Object> anchorTrailingChain;
private int fromIndex; // Inclusive
private int toIndex; // Exclusive
private int nextListIteratorIndex;
public OriginalSubChainIterator(ListIterator<SubChain> anchorTrailingChainIterator) {
this.anchorTrailingChainIterator = anchorTrailingChainIterator;
fromIndex = 0;
toIndex = 1;
anchorTrailingChain = Collections.emptyList();
nextListIteratorIndex = 0;
}
@Override
protected SubChain createUpcomingSelection() {
toIndex++;
if (toIndex - fromIndex > maximumSubChainSize || toIndex > anchorTrailingChain.size()) {
fromIndex++;
toIndex = fromIndex + minimumSubChainSize;
// minimumSubChainSize <= maximumSubChainSize so (toIndex - fromIndex > maximumSubChainSize) is true
while (toIndex > anchorTrailingChain.size()) {
if (!anchorTrailingChainIterator.hasNext()) {
return noUpcomingSelection();
}
anchorTrailingChain = anchorTrailingChainIterator.next().getEntityList();
fromIndex = 0;
toIndex = fromIndex + minimumSubChainSize;
}
}
return new SubChain(anchorTrailingChain.subList(fromIndex, toIndex));
}
@Override
public SubChain next() {
nextListIteratorIndex++;
return super.next();
}
@Override
public int nextIndex() {
return nextListIteratorIndex;
}
@Override
public boolean hasPrevious() {
throw new UnsupportedOperationException("The operation hasPrevious() is not supported."
+ " See https://issues.jboss.org/browse/PLANNER-37");
}
@Override
public SubChain previous() {
throw new UnsupportedOperationException("The operation previous() is not supported."
+ " See https://issues.jboss.org/browse/PLANNER-37");
}
@Override
public int previousIndex() {
throw new UnsupportedOperationException("The operation previousIndex() is not supported."
+ " See https://issues.jboss.org/browse/PLANNER-37");
}
@Override
public void set(SubChain subChain) {
throw new UnsupportedOperationException("The optional operation set() is not supported.");
}
@Override
public void add(SubChain subChain) {
throw new UnsupportedOperationException("The optional operation add() is not supported.");
}
}
private class RandomSubChainIterator extends UpcomingSelectionIterator<SubChain> {
private RandomSubChainIterator() {
if (anchorTrailingChainList.isEmpty()) {
upcomingSelection = noUpcomingSelection();
upcomingCreated = true;
}
}
@Override
protected SubChain createUpcomingSelection() {
SubChain anchorTrailingChain = selectAnchorTrailingChain();
// Every SubChain has the same probability (from this point on at least).
// A random fromIndex and random toIndex would not be fair.
long selectionSize = calculateSubChainSelectionSize(anchorTrailingChain);
long selectionIndex = RandomUtils.nextLong(workingRandom, selectionSize);
// Black magic to translate selectionIndex into fromIndex and toIndex
long fromIndex = selectionIndex;
long subChainSize = minimumSubChainSize;
long countInThatSize = anchorTrailingChain.getSize() - subChainSize + 1;
while (fromIndex >= countInThatSize) {
fromIndex -= countInThatSize;
subChainSize++;
countInThatSize--;
if (countInThatSize <= 0) {
throw new IllegalStateException("Impossible if calculateSubChainSelectionSize() works correctly.");
}
}
return anchorTrailingChain.subChain((int) fromIndex, (int) (fromIndex + subChainSize));
}
private SubChain selectAnchorTrailingChain() {
// Known issue/compromise: Every SubChain should have same probability, but doesn't.
// Instead, every anchorTrailingChain has the same probability.
int anchorTrailingChainListIndex = workingRandom.nextInt(anchorTrailingChainList.size());
return anchorTrailingChainList.get(anchorTrailingChainListIndex);
}
}
@Override
public String toString() {
return getClass().getSimpleName() + "(" + valueSelector + ")";
}
}
| |
/*
* Copyright 2013, Emanuel Rabina (http://www.ultraq.net.nz/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package redhorizon.engine.input;
import redhorizon.engine.SubsystemCallback;
import redhorizon.engine.display.GameWindow;
import redhorizon.engine.display.InputEventDelegate;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.KeyListener;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseListener;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.MouseTrackListener;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import java.util.Iterator;
import java.util.concurrent.ConcurrentLinkedQueue;
/**
* Input subsystem, deals with the handling and dispatch of events from the
* display window.
*
* @author Emanuel Rabina
*/
public class InputSubsystem implements InputEventDelegate, Runnable {
private final GameWindow window;
private final SubsystemCallback callback;
private ConcurrentLinkedQueue<KeyEvent> keyeventqueue = new ConcurrentLinkedQueue<>();
private ConcurrentLinkedQueue<MouseEvent> mouseeventqueue = new ConcurrentLinkedQueue<>();
/**
* Constructor, initializes the input subsystem for the game.
*
* @param window
* @param callback
*/
public InputSubsystem(GameWindow window, SubsystemCallback callback) {
this.window = window;
this.callback = callback;
}
/**
* @inheritDoc
*/
public void keyPressed(char character, int keycode, int modifiers) {
keyeventqueue.add(new KeyEvent(KeyAction.KEY_PRESSED, character, keycode, modifiers));
}
/**
* @inheritDoc
*/
public void keyReleased(int modifiers) {
keyeventqueue.add(new KeyEvent(KeyAction.KEY_RELEASE, (char)0, 0, modifiers));
}
/**
* @inheritDoc
*/
public void mouseDoubleClick(int button, int xcoord, int ycoord) {
mouseeventqueue.add(new MouseEvent(MouseAction.MOUSE_DBLCLICK, button, xcoord, ycoord));
}
/**
* @inheritDoc
*/
public void mouseDown(int button, int xcoord, int ycoord) {
mouseeventqueue.add(new MouseEvent(MouseAction.MOUSE_DOWN, button, xcoord, ycoord));
}
/**
* @inheritDoc
*/
public void mouseMove(int xcoord, int ycoord) {
mouseeventqueue.add(new MouseEvent(MouseAction.MOUSE_MOVE, -1, xcoord, ycoord));
}
/**
* @inheritDoc
*/
public void mouseUp(int button, int xcoord, int ycoord) {
mouseeventqueue.add(new MouseEvent(MouseAction.MOUSE_UP, button, xcoord, ycoord));
}
/**
* Input handling loop, collects and dispatches input events to the
* appropriate handlers.
*/
@Override
public void run() {
Thread.currentThread().setName("Red Horizon - Input subsystem");
try {
// Startup
callback.subsystemInit();
// Input event handling
// Keyboard events
for (Iterator<KeyEvent> keyevents = keyeventqueue.iterator(); keyevents.hasNext(); ) {
KeyEvent keyevent = keyevents.next();
keyevents.remove();
switch (keyevent.keycode) {
// Skip current video
// NOTE: Need a generic 'abort last action' event here
case SWT.ESC:
controller.skipVideo();
break;
// Player movement controls
case SWT.ARROW_UP:
controller.moveUp();
break;
case SWT.ARROW_DOWN:
controller.moveDown();
break;
case SWT.ARROW_LEFT:
controller.moveLeft();
break;
case SWT.ARROW_RIGHT:
controller.moveRight();
break;
}
}
// Mouse events
for (Iterator<MouseEvent> mouseevents = mouseeventqueue.iterator(); mouseevents.hasNext(); ) {
MouseEvent mouseevent = mouseevents.next();
mouseevents.remove();
switch (mouseevent.action) {
case MOUSE_DOWN:
controller.mouseClickDown(mouseevent.x, mouseevent.y);
break;
case MOUSE_UP:
controller.mouseClickUp();
break;
case MOUSE_MOVE:
// Check for left/right scrolling
if (mouseevent.x == Integer.MIN_VALUE) {
controller.moveLeft();
}
else if (mouseevent.x == Integer.MAX_VALUE) {
controller.moveRight();
}
// Check for up/down scrolling
if (mouseevent.y == Integer.MIN_VALUE) {
controller.moveDown();
}
else if (mouseevent.y == Integer.MAX_VALUE) {
controller.moveUp();
}
// Normal on-screen movement
controller.mouseMove(mouseevent.x, mouseevent.y);
break;
}
}
}
finally {
// Shutdown
callback.subsystemStop();
}
}
/**
* Inner-class for handling keyboard events on the game window.
*/
private class SWTKeyListener implements KeyListener {
private final InputEventDelegate handler;
/**
* Constructor, assigns the keyboard handler.
*
* @param handler Keyboard event handler.
*/
private SWTKeyListener(InputEventDelegate handler) {
this.handler = handler;
}
/**
* Method for taking user keyboard presses and translating them into events
* for the registered {@link InputEventDelegate} to deal with.
*
* @param keyevent Details on the keyboard press event that occurred.
*/
public void keyPressed(KeyEvent keyevent) {
handler.keyPressed(keyevent.character, keyevent.keyCode, keyevent.stateMask);
}
/**
* Method for interpreting keyboard key release events. Used only for
* keyboard modifier keys.
*
* @param keyevent Details on the keyboard key release event.
*/
public void keyReleased(KeyEvent keyevent) {
handler.keyReleased(keyevent.stateMask);
}
}
/**
* Inner-class for handling mouse events on the game window.
*/
private class SWTMouseListener implements MouseListener, MouseMoveListener, MouseTrackListener {
private final InputEventDelegate handler;
/**
* Constructor, assigns the mouse handler.
*
* @param handler Mouse event handler.
*/
private SWTMouseListener(InputEventDelegate handler) {
this.handler = handler;
}
/**
* Method for notifying the input handler of mouse double-click events.
*
* @param mouseevent Details of the mouse event that occurred.
*/
public void mouseDoubleClick(MouseEvent mouseevent) {
handler.mouseDoubleClick(mouseevent.button,
mouseevent.x - (width >> 1), (height >> 1) - mouseevent.y);
}
/**
* Method for notifying the input handler of mouse down events.
*
* @param mouseevent Details on the mouse event that occurred.
*/
public void mouseDown(MouseEvent mouseevent) {
handler.mouseDown(mouseevent.button,
mouseevent.x - (width >> 1), (height >> 1) - mouseevent.y);
}
/**
* Checks for when the mouse enters the display area.
*
* @param mouseevent Details on the mouse event that occurred.
*/
public void mouseEnter(MouseEvent mouseevent) {
offscreen = false;
}
/**
* Checks for when the mouse exits the display area.
*
* @param mouseevent Details on the mouse event that occurred.
*/
public void mouseExit(MouseEvent mouseevent) {
offscreen = true;
// Replace off-screen mouse movements with scrolling events
display.asyncExec(new Runnable() {
public void run() {
// Quit
if (!offscreen || shell.isDisposed()) {
return;
}
Point mouse = display.getCursorLocation();
Rectangle displayarea = shell.getBounds();
// Left/Right check
int x = (mouse.x < displayarea.x) ? Integer.MIN_VALUE :
(mouse.x > displayarea.x + displayarea.width) ? Integer.MAX_VALUE :
0;
// Up/Down check
int y = (mouse.y < displayarea.y) ? Integer.MAX_VALUE :
(mouse.y > displayarea.y + displayarea.height) ? Integer.MIN_VALUE :
0;
// Notify and repeat
if (x != 0 || y != 0) {
handler.mouseMove(x, y);
}
display.timerExec(20, this);
}
});
}
/**
* Mouse hovering event, does nothing.
*
* @param mouseevent Details on the mouse event that occurred.
*/
public void mouseHover(MouseEvent mouseevent) {
}
/**
* Method for notifying the input inputhandler of mouse move events.
*
* @param mouseevent Details on the mouse event that occurred.
*/
public void mouseMove(MouseEvent mouseevent) {
handler.mouseMove(mouseevent.x - (width >> 1), (height >> 1) - mouseevent.y);
}
/**
* Method for notifying the input inputhandler of mouse up events.
*
* @param mouseevent Details on the mouse event that occurred.
*/
public void mouseUp(MouseEvent mouseevent) {
handler.mouseUp(mouseevent.button, mouseevent.x - (width >> 1), (height >> 1) - mouseevent.y);
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.util;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.util.GroupedList.GroupedListHelper;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
@RunWith(JUnit4.class)
public class GroupedListTest {
@Test
public void empty() {
createSizeN(0);
}
@Test
public void sizeOne() {
createSizeN(1);
}
@Test
public void sizeTwo() {
createSizeN(2);
}
@Test
public void sizeN() {
createSizeN(10);
}
private void createSizeN(int size) {
List<String> list = new ArrayList<>();
for (int i = 0; i < size; i++) {
list.add("test" + i);
}
Object compressedList = createAndCompress(list);
assertTrue(Iterables.elementsEqual(iterable(compressedList), list));
assertElementsEqual(compressedList, list);
}
@Test
public void elementsNotEqualDifferentOrder() {
List<String> list = Lists.newArrayList("a", "b", "c");
Object compressedList = createAndCompress(list);
ArrayList<String> reversed = new ArrayList<>(list);
Collections.reverse(reversed);
assertFalse(elementsEqual(compressedList, reversed));
}
@Test
public void elementsNotEqualDifferentSizes() {
for (int size1 = 0; size1 < 10; size1++) {
List<String> firstList = new ArrayList<>();
for (int i = 0; i < size1; i++) {
firstList.add("test" + i);
}
Object array = createAndCompress(firstList);
for (int size2 = 0; size2 < 10; size2++) {
List<String> secondList = new ArrayList<>();
for (int i = 0; i < size2; i++) {
secondList.add("test" + i);
}
assertEquals(GroupedList.create(array) + ", " + secondList + ", " + size1 + ", " + size2,
size1 == size2, elementsEqual(array, secondList));
}
}
}
@Test
public void group() {
GroupedList<String> groupedList = new GroupedList<>();
assertTrue(groupedList.isEmpty());
GroupedListHelper<String> helper = new GroupedListHelper<>();
List<ImmutableList<String>> elements = ImmutableList.of(
ImmutableList.of("1"),
ImmutableList.of("2a", "2b"),
ImmutableList.of("3"),
ImmutableList.of("4"),
ImmutableList.of("5a", "5b", "5c"),
ImmutableList.of("6a", "6b", "6c")
);
List<String> allElts = new ArrayList<>();
for (List<String> group : elements) {
if (group.size() > 1) {
helper.startGroup();
}
for (String elt : group) {
helper.add(elt);
}
if (group.size() > 1) {
helper.endGroup();
}
allElts.addAll(group);
}
groupedList.append(helper);
assertEquals(allElts.size(), groupedList.size());
assertFalse(groupedList.isEmpty());
Object compressed = groupedList.compress();
assertElementsEqual(compressed, allElts);
assertElementsEqualInGroups(GroupedList.<String>create(compressed), elements);
assertElementsEqualInGroups(groupedList, elements);
}
@Test
public void singletonAndEmptyGroups() {
GroupedList<String> groupedList = new GroupedList<>();
assertTrue(groupedList.isEmpty());
GroupedListHelper<String> helper = new GroupedListHelper<>();
@SuppressWarnings("unchecked") // varargs
List<ImmutableList<String>> elements = Lists.newArrayList(
ImmutableList.of("1"),
ImmutableList.<String>of(),
ImmutableList.of("2a", "2b"),
ImmutableList.of("3")
);
List<String> allElts = new ArrayList<>();
for (List<String> group : elements) {
helper.startGroup(); // Start a group even if the group has only one element or is empty.
for (String elt : group) {
helper.add(elt);
}
helper.endGroup();
allElts.addAll(group);
}
groupedList.append(helper);
assertEquals(allElts.size(), groupedList.size());
assertFalse(groupedList.isEmpty());
Object compressed = groupedList.compress();
assertElementsEqual(compressed, allElts);
// Get rid of empty list -- it was not stored in groupedList.
elements.remove(1);
assertElementsEqualInGroups(GroupedList.<String>create(compressed), elements);
assertElementsEqualInGroups(groupedList, elements);
}
@Test
public void removeMakesEmpty() {
GroupedList<String> groupedList = new GroupedList<>();
assertTrue(groupedList.isEmpty());
GroupedListHelper<String> helper = new GroupedListHelper<>();
@SuppressWarnings("unchecked") // varargs
List<List<String>> elements = Lists.newArrayList(
(List<String>) ImmutableList.of("1"),
ImmutableList.<String>of(),
Lists.newArrayList("2a", "2b"),
ImmutableList.of("3"),
ImmutableList.of("removedGroup1", "removedGroup2"),
ImmutableList.of("4")
);
List<String> allElts = new ArrayList<>();
for (List<String> group : elements) {
helper.startGroup(); // Start a group even if the group has only one element or is empty.
for (String elt : group) {
helper.add(elt);
}
helper.endGroup();
allElts.addAll(group);
}
groupedList.append(helper);
Set<String> removed = ImmutableSet.of("2a", "3", "removedGroup1", "removedGroup2");
groupedList.remove(removed);
Object compressed = groupedList.compress();
allElts.removeAll(removed);
assertElementsEqual(compressed, allElts);
elements.get(2).remove("2a");
elements.remove(ImmutableList.of("3"));
elements.remove(ImmutableList.of());
elements.remove(ImmutableList.of("removedGroup1", "removedGroup2"));
assertElementsEqualInGroups(GroupedList.<String>create(compressed), elements);
assertElementsEqualInGroups(groupedList, elements);
}
@Test
public void removeGroupFromSmallList() {
GroupedList<String> groupedList = new GroupedList<>();
assertTrue(groupedList.isEmpty());
GroupedListHelper<String> helper = new GroupedListHelper<>();
List<List<String>> elements = new ArrayList<>();
List<String> group = Lists.newArrayList("1a", "1b", "1c", "1d");
elements.add(group);
List<String> allElts = new ArrayList<>();
helper.startGroup();
for (String item : elements.get(0)) {
helper.add(item);
}
allElts.addAll(group);
helper.endGroup();
groupedList.append(helper);
Set<String> removed = ImmutableSet.of("1b", "1c");
groupedList.remove(removed);
Object compressed = groupedList.compress();
allElts.removeAll(removed);
assertElementsEqual(compressed, allElts);
elements.get(0).removeAll(removed);
assertElementsEqualInGroups(GroupedList.<String>create(compressed), elements);
assertElementsEqualInGroups(groupedList, elements);
}
private static Object createAndCompress(Collection<String> list) {
GroupedList<String> result = new GroupedList<>();
result.append(GroupedListHelper.create(list));
return result.compress();
}
private static Iterable<String> iterable(Object compressed) {
return GroupedList.<String>create(compressed).toSet();
}
private static boolean elementsEqual(Object compressed, Iterable<String> expected) {
return Iterables.elementsEqual(GroupedList.<String>create(compressed).toSet(), expected);
}
private static void assertElementsEqualInGroups(
GroupedList<String> groupedList, List<? extends List<String>> elements) {
int i = 0;
for (Iterable<String> group : groupedList) {
assertThat(group).containsExactlyElementsIn(elements.get(i));
i++;
}
assertThat(elements).hasSize(i);
}
private static void assertElementsEqual(Object compressed, Iterable<String> expected) {
assertThat(GroupedList.<String>create(compressed).toSet()).containsExactlyElementsIn(expected);
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Conditions;
import consulo.util.dataholder.Key;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.refactoring.copy.CopyHandler;
import com.intellij.refactoring.move.MoveCallback;
import com.intellij.refactoring.move.MoveHandler;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.JBIterable;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import java.io.File;
import java.util.List;
public class CopyPasteDelegator implements CopyPasteSupport {
private static final ExtensionPointName<PasteProvider> EP_NAME = ExtensionPointName.create("com.intellij.filePasteProvider");
public static final Key<Boolean> SHOW_CHOOSER_KEY = Key.create("show.dirs.chooser");
private final Project myProject;
private final JComponent myKeyReceiver;
private final MyEditable myEditable;
public CopyPasteDelegator(@Nonnull Project project, @Nonnull JComponent keyReceiver) {
myProject = project;
myKeyReceiver = keyReceiver;
myEditable = new MyEditable();
}
@Nonnull
protected PsiElement[] getSelectedElements() {
DataContext dataContext = DataManager.getInstance().getDataContext(myKeyReceiver);
return ObjectUtils.notNull(dataContext.getData(LangDataKeys.PSI_ELEMENT_ARRAY), PsiElement.EMPTY_ARRAY);
}
@Nonnull
private PsiElement[] getValidSelectedElements() {
PsiElement[] selectedElements = getSelectedElements();
for (PsiElement element : selectedElements) {
if (element == null || !element.isValid()) {
return PsiElement.EMPTY_ARRAY;
}
}
return selectedElements;
}
private void updateView() {
myKeyReceiver.repaint();
}
@Override
public CopyProvider getCopyProvider() {
return myEditable;
}
@Override
public CutProvider getCutProvider() {
return myEditable;
}
@Override
public PasteProvider getPasteProvider() {
return myEditable;
}
private class MyEditable implements CutProvider, CopyProvider, PasteProvider {
@Override
public void performCopy(@Nonnull DataContext dataContext) {
PsiElement[] elements = getValidSelectedElements();
PsiCopyPasteManager.getInstance().setElements(elements, true);
updateView();
}
@Override
public boolean isCopyEnabled(@Nonnull DataContext dataContext) {
PsiElement[] elements = getValidSelectedElements();
return CopyHandler.canCopy(elements) || JBIterable.of(elements).filter(Conditions.instanceOf(PsiNamedElement.class)).isNotEmpty();
}
@Override
public boolean isCopyVisible(@Nonnull DataContext dataContext) {
return true;
}
@Override
public void performCut(@Nonnull DataContext dataContext) {
PsiElement[] elements = getValidSelectedElements();
if (MoveHandler.adjustForMove(myProject, elements, null) == null) {
return;
}
// 'elements' passed instead of result of 'adjustForMove' because otherwise ProjectView would
// not recognize adjusted elements when graying them
PsiCopyPasteManager.getInstance().setElements(elements, false);
updateView();
}
@Override
public boolean isCutEnabled(@Nonnull DataContext dataContext) {
final PsiElement[] elements = getValidSelectedElements();
return elements.length != 0 && MoveHandler.canMove(elements, null);
}
@Override
public boolean isCutVisible(@Nonnull DataContext dataContext) {
return true;
}
@Override
public void performPaste(@Nonnull DataContext dataContext) {
if (!performDefaultPaste(dataContext)) {
for (PasteProvider provider : EP_NAME.getExtensionList()) {
if (provider.isPasteEnabled(dataContext)) {
provider.performPaste(dataContext);
break;
}
}
}
}
private boolean performDefaultPaste(final DataContext dataContext) {
final boolean[] isCopied = new boolean[1];
final PsiElement[] elements = PsiCopyPasteManager.getInstance().getElements(isCopied);
if (elements == null) return false;
DumbService.getInstance(myProject).setAlternativeResolveEnabled(true);
try {
final Module module = dataContext.getData(LangDataKeys.MODULE);
PsiElement target = getPasteTarget(dataContext, module);
if (isCopied[0]) {
TransactionGuard.getInstance().submitTransactionAndWait(() -> pasteAfterCopy(elements, module, target, true));
}
else if (MoveHandler.canMove(elements, target)) {
TransactionGuard.getInstance().submitTransactionAndWait(() -> pasteAfterCut(dataContext, elements, target));
}
else {
return false;
}
}
finally {
DumbService.getInstance(myProject).setAlternativeResolveEnabled(false);
updateView();
}
return true;
}
private PsiElement getPasteTarget(@Nonnull DataContext dataContext, @Nullable Module module) {
PsiElement target = dataContext.getData(LangDataKeys.PASTE_TARGET_PSI_ELEMENT);
if (module != null && target instanceof PsiDirectoryContainer) {
final PsiDirectory[] directories = ((PsiDirectoryContainer)target).getDirectories(GlobalSearchScope.moduleScope(module));
if (directories.length == 1) {
return directories[0];
}
}
return target;
}
@Nullable
private PsiDirectory getTargetDirectory(@Nullable Module module, @Nullable PsiElement target) {
PsiDirectory targetDirectory = target instanceof PsiDirectory ? (PsiDirectory)target : null;
if (targetDirectory == null && target instanceof PsiDirectoryContainer) {
final PsiDirectory[] directories = module == null ? ((PsiDirectoryContainer)target).getDirectories() : ((PsiDirectoryContainer)target).getDirectories(GlobalSearchScope.moduleScope(module));
if (directories.length > 0) {
targetDirectory = directories[0];
targetDirectory.putCopyableUserData(SHOW_CHOOSER_KEY, directories.length > 1);
}
}
if (targetDirectory == null && target != null) {
final PsiFile containingFile = target.getContainingFile();
if (containingFile != null) {
targetDirectory = containingFile.getContainingDirectory();
}
}
return targetDirectory;
}
private void pasteAfterCopy(PsiElement[] elements, Module module, PsiElement target, boolean tryFromFiles) {
PsiDirectory targetDirectory = elements.length == 1 && elements[0] == target ? null : getTargetDirectory(module, target);
try {
if (CopyHandler.canCopy(elements)) {
CopyHandler.doCopy(elements, targetDirectory);
}
else if (tryFromFiles) {
List<File> files = PsiCopyPasteManager.asFileList(elements);
if (files != null) {
PsiManager manager = elements[0].getManager();
PsiFileSystemItem[] items = files.stream().map(file -> LocalFileSystem.getInstance().findFileByIoFile(file)).map(file -> {
if (file != null) {
return file.isDirectory() ? manager.findDirectory(file) : manager.findFile(file);
}
return null;
}).filter(file -> file != null).toArray(PsiFileSystemItem[]::new);
pasteAfterCopy(items, module, target, false);
}
}
}
finally {
if (targetDirectory != null) {
targetDirectory.putCopyableUserData(SHOW_CHOOSER_KEY, null);
}
}
}
private void pasteAfterCut(DataContext dataContext, PsiElement[] elements, PsiElement target) {
MoveHandler.doMove(myProject, elements, target, dataContext, new MoveCallback() {
@Override
public void refactoringCompleted() {
PsiCopyPasteManager.getInstance().clear();
}
});
}
@Override
public boolean isPastePossible(@Nonnull DataContext dataContext) {
return true;
}
@Override
public boolean isPasteEnabled(@Nonnull DataContext dataContext) {
if (isDefaultPasteEnabled(dataContext)) {
return true;
}
for (PasteProvider provider : EP_NAME.getExtensionList()) {
if (provider.isPasteEnabled(dataContext)) {
return true;
}
}
return false;
}
private boolean isDefaultPasteEnabled(final DataContext dataContext) {
Project project = dataContext.getData(CommonDataKeys.PROJECT);
if (project == null) {
return false;
}
if (DumbService.isDumb(project)) return false;
Object target = dataContext.getData(LangDataKeys.PASTE_TARGET_PSI_ELEMENT);
if (target == null) {
return false;
}
PsiElement[] elements = PsiCopyPasteManager.getInstance().getElements(new boolean[]{false});
if (elements == null) {
return false;
}
// disable cross-project paste
for (PsiElement element : elements) {
PsiManager manager = element.getManager();
if (manager == null || manager.getProject() != project) {
return false;
}
}
return true;
}
}
}
| |
/*
* Copyright (c) 2009-2016 Dmytro Pishchukhin (http://knowhowlab.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.knowhowlab.osgi.monitoradmin;
import org.knowhowlab.osgi.monitoradmin.util.Utils;
import org.osgi.framework.*;
import org.osgi.service.event.Event;
import org.osgi.service.event.EventAdmin;
import org.osgi.service.log.LogService;
import org.osgi.service.monitor.MonitorAdmin;
import org.osgi.service.monitor.MonitorListener;
import org.osgi.service.monitor.Monitorable;
import org.osgi.util.tracker.ServiceTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Monitor Admin activator
*
* @author dmytro.pishchukhin
* @see org.osgi.framework.BundleActivator
*/
public class Activator implements BundleActivator, OsgiVisitor, LogVisitor {
/**
* Default logger
*/
private static final Logger LOG = LoggerFactory.getLogger(Activator.class);
/**
* <code>MonitorAdmin</code> <code>ServiceFactory</code> instance
*/
private MonitorAdminFactory monitorAdminFactory;
/**
* MonitorAdmin commons actions
*/
private MonitorAdminCommon common;
/**
* BundleContext
*/
private BundleContext bc;
/**
* MonitorAdmin ServiceFactory registration
*/
private ServiceRegistration monitorAdminRegistration;
/**
* MonitorListener registration
*/
private ServiceRegistration monitorListenerRegistration;
/**
* EventAdmin service tracker
*/
private ServiceTracker eventAdminTracker;
/**
* LogService service tracker
*/
private ServiceTracker logServiceTracker;
public void start(BundleContext bundleContext) throws Exception {
bc = bundleContext;
// init LogService tracker
logServiceTracker = new ServiceTracker(bc, LogService.class.getName(), null);
logServiceTracker.open();
// init EventAdmin tracker
eventAdminTracker = new ServiceTracker(bc, EventAdmin.class.getName(), null);
eventAdminTracker.open();
// init commons
common = new MonitorAdminCommon(this, this);
// init factory
monitorAdminFactory = new MonitorAdminFactory(this, common);
// register MonitorAdmin ServiceFactory
monitorAdminRegistration = bundleContext.registerService(MonitorAdmin.class.getName(), monitorAdminFactory, null);
// register MonitorListener
monitorListenerRegistration = bundleContext.registerService(MonitorListener.class.getName(), common, null);
info("MonitorAdmin started", null);
}
public void stop(BundleContext bundleContext) throws Exception {
// unregister MonitorAdmin service
if (monitorAdminRegistration != null) {
monitorAdminRegistration.unregister();
monitorAdminRegistration = null;
}
// unregister MonitorListener service
if (monitorListenerRegistration != null) {
monitorListenerRegistration.unregister();
monitorListenerRegistration = null;
}
if (common != null) {
// cancel started jobs
common.cancelAllJobs();
monitorAdminFactory = null;
}
if (eventAdminTracker != null) {
eventAdminTracker.close();
eventAdminTracker = null;
}
info("MonitorAdmin stoppped", null);
if (logServiceTracker != null) {
logServiceTracker.close();
logServiceTracker = null;
}
bc = null;
}
/**
* Publish DEBUG message. If <code>LogService</code> in unavailable message is published to default JUL logger
*
* @param message message
* @param throwable exception
*/
public void debug(String message, Throwable throwable) {
LogService logService = (LogService) logServiceTracker.getService();
if (logService != null) {
logService.log(LogService.LOG_DEBUG, message, throwable);
} else {
LOG.debug(message, throwable);
}
}
/**
* Publish INFO message. If <code>LogService</code> in unavailable message is published to default JUL logger
*
* @param message message
* @param throwable exception
*/
public void info(String message, Throwable throwable) {
LogService logService = (LogService) logServiceTracker.getService();
if (logService != null) {
logService.log(LogService.LOG_INFO, message, throwable);
} else {
LOG.info(message, throwable);
}
}
/**
* Publish WARNING message. If <code>LogService</code> in unavailable message is published to default JUL logger
*
* @param message message
* @param throwable exception
*/
public void warning(String message, Throwable throwable) {
LogService logService = (LogService) logServiceTracker.getService();
if (logService != null) {
logService.log(LogService.LOG_WARNING, message, throwable);
} else {
LOG.warn(message, throwable);
}
}
/**
* Publish ERROR message. If <code>LogService</code> in unavailable message is published to default JUL logger
*
* @param message message
* @param throwable exception
*/
public void error(String message, Throwable throwable) {
LogService logService = (LogService) logServiceTracker.getService();
if (logService != null) {
logService.log(LogService.LOG_ERROR, message, throwable);
} else {
LOG.error(message, throwable);
}
}
public Monitorable getService(ServiceReference reference) {
return (Monitorable) bc.getService(reference);
}
public ServiceReference[] findMonitorableReferences(String monitorableId) {
String filter = null;
if (monitorableId != null) {
filter = Utils.createServicePidFilter(monitorableId);
}
try {
return bc.getServiceReferences(Monitorable.class.getName(), filter);
} catch (InvalidSyntaxException e) {
warning("Unable to find Monitorable References", e);
return null;
}
}
public void postEvent(Event event) {
EventAdmin eventAdmin = (EventAdmin) eventAdminTracker.getService();
if (eventAdmin != null) {
eventAdmin.postEvent(event);
} else {
warning("EventAdmin is unavailable", null);
}
}
}
| |
package de.marcermarc.veinminer.listener;
import de.marcermarc.veinminer.Util;
import de.marcermarc.veinminer.controller.PluginController;
import de.marcermarc.veinminer.objects.Tool;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.command.TabCompleter;
import org.bukkit.entity.Player;
import org.bukkit.event.Listener;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class Command implements CommandExecutor, TabCompleter, Listener {
private static final String ENABLE = "enable";
private static final String DISABLE = "disable";
private static final String SAVE_BLOCKS = "saveblocks";
private static final String LOAD_BLOCKS = "loadblocks";
private static final String LISTS = "lists";
private static final String ADD = "add";
private static final String REMOVE = "remove";
private static final String MESSAGE_PREFIX = ChatColor.DARK_GREEN + "[marcerVeinminer]" + ChatColor.RESET + " ";
private static final String MESSAGE_ENABLE = MESSAGE_PREFIX + ChatColor.GREEN + "Veinminer enabled, mine a valid block sneaking to use it!";
private static final String MESSAGE_DISABLE = MESSAGE_PREFIX + ChatColor.GREEN + "Veinminer disabled";
private static final String MESSAGE_PLAYER_ONLY = MESSAGE_PREFIX + ChatColor.RED + "Its a Player only command";
private static final String MESSAGE_NO_PERMISSION = MESSAGE_PREFIX + ChatColor.RED + "You have no permission for this command";
private static final String MESSAGE_LISTS_ARGS = MESSAGE_PREFIX + ChatColor.RED + "Command needs 4 arguments.";
private static final String MESSAGE_LISTS_ARG2 = MESSAGE_PREFIX + ChatColor.RED + "Please use a valid tool.";
private static final String MESSAGE_LISTS_ARG3 = MESSAGE_PREFIX + ChatColor.RED + "Please use 'add' or 'remove'.";
private static final String MESSAGE_LISTS_ARG4 = MESSAGE_PREFIX + ChatColor.RED + "This block does not exists.";
private static final String MESSAGE_LISTS_ADD_ALREDY_IN = MESSAGE_PREFIX + ChatColor.GREEN + "Block is already in this list.";
private static final String MESSAGE_LISTS_ADD_SUCCESS = MESSAGE_PREFIX + ChatColor.GREEN + "Block added, don't forget to save.";
private static final String MESSAGE_LISTS_REMOVE_NOT_IN = MESSAGE_PREFIX + ChatColor.GREEN + "Block is not in this list.";
private static final String MESSAGE_LISTS_REMOVE_SUCCESS = MESSAGE_PREFIX + ChatColor.GREEN + "Block removed, don't forget to save.";
private static final String MESSAGE_SAVE = MESSAGE_PREFIX + ChatColor.GREEN + "Saved config.";
private static final String MESSAGE_SAVE_FAILED = ChatColor.RED + "Failed to save the config!";
private static final String MESSAGE_LOAD = MESSAGE_PREFIX + ChatColor.GREEN + "Loaded config.";
private static final String MESSAGE_LOAD_FAILED = ChatColor.RED + "Failed to load the config!";
private static final List<String> ARG1_NO_OP = Arrays.asList(ENABLE, DISABLE);
private static final List<String> ARG1_OP = Arrays.asList(ENABLE, DISABLE, SAVE_BLOCKS, LOAD_BLOCKS, LISTS);
private static final List<String> ARG1_CONSOLE = Arrays.asList(SAVE_BLOCKS, LOAD_BLOCKS, LISTS);
private static final List<String> ARG2_LISTS = Stream.of(Tool.values()).map(Tool::toString).collect(Collectors.toList());
private static final List<String> ARG3_LISTS = Arrays.asList(ADD, REMOVE);
private PluginController controller;
public Command(PluginController controller) {
this.controller = controller;
}
//region CommandExecutor
@Override
public boolean onCommand(CommandSender commandSender, org.bukkit.command.Command command, String s, String[] args) {
if (args.length >= 1) {
switch (args[0].toLowerCase()) {
case ENABLE:
case "e":
if (commandSender instanceof Player) {
controller.getConfig().getEnabledPlayers().add((Player) commandSender);
commandSender.sendMessage(MESSAGE_ENABLE);
} else {
commandSender.sendMessage(MESSAGE_PLAYER_ONLY);
}
return true;
case DISABLE:
case "d":
if (commandSender instanceof Player) {
controller.getConfig().getEnabledPlayers().remove(commandSender);
commandSender.sendMessage(MESSAGE_DISABLE);
} else {
commandSender.sendMessage(MESSAGE_PLAYER_ONLY);
}
return true;
case SAVE_BLOCKS:
if (!commandSender.isOp()) {
commandSender.sendMessage(MESSAGE_NO_PERMISSION);
return false;
} else if (controller.getConfig().saveConfig()) {
commandSender.sendMessage(MESSAGE_SAVE);
return true;
} else {
commandSender.sendMessage(MESSAGE_SAVE_FAILED);
return false;
}
case LOAD_BLOCKS:
if (!commandSender.isOp()) {
commandSender.sendMessage(MESSAGE_NO_PERMISSION);
return false;
} else if (controller.getConfig().loadConfig()) {
commandSender.sendMessage(MESSAGE_LOAD);
return true;
} else {
commandSender.sendMessage(MESSAGE_LOAD_FAILED);
return false;
}
case LISTS:
if (!commandSender.isOp()) {
commandSender.sendMessage(MESSAGE_NO_PERMISSION);
return false;
} else if (args.length != 4)
commandSender.sendMessage(MESSAGE_LISTS_ARGS);
return lists(commandSender, args);
}
}
return false;
}
private boolean lists(CommandSender commandSender, String[] args) {
if (!ARG2_LISTS.contains(args[1])) {
commandSender.sendMessage(MESSAGE_LISTS_ARG2);
} else if (!ARG3_LISTS.contains(args[2])) {
commandSender.sendMessage(MESSAGE_LISTS_ARG3);
} else {
Material material = Util.stringToMaterial(args[3]);
if (material == null || !material.isBlock()) {
commandSender.sendMessage(MESSAGE_LISTS_ARG4);
return false;
}
Tool tool = Tool.getByName(args[1]);
if (args[2].equalsIgnoreCase(ADD)) {
if (tool.getVeinminerMaterials().contains(material)) {
commandSender.sendMessage(MESSAGE_LISTS_ADD_ALREDY_IN);
} else {
tool.getVeinminerMaterials().add(material);
commandSender.sendMessage(MESSAGE_LISTS_ADD_SUCCESS);
}
} else if (args[2].equalsIgnoreCase(REMOVE)) {
if (tool.getVeinminerMaterials().contains(material)) {
tool.getVeinminerMaterials().remove(material);
commandSender.sendMessage(MESSAGE_LISTS_REMOVE_SUCCESS);
} else {
commandSender.sendMessage(MESSAGE_LISTS_REMOVE_NOT_IN);
}
}
return true;
}
return false;
}
//endregion
//region TabComplete
@Override
public List<String> onTabComplete(CommandSender sender, org.bukkit.command.Command command, String alias, String[] args) {
switch (args.length) {
case 1:
return onTabCompleteArg1(sender, args);
case 2:
return onTabCompleteArg2(sender, args);
case 3:
return onTabCompleteArg3(sender, args);
case 4:
return onTabCompleteArg4(sender, args);
default:
return null;
}
}
private List<String> onTabCompleteArg1(CommandSender sender, String[] args) {
if (!(sender instanceof Player)) {
return Util.tabCompleteFilter(ARG1_CONSOLE, args[0]);
} else if (sender.isOp()) {
return Util.tabCompleteFilter(ARG1_OP, args[0]);
} else {
return Util.tabCompleteFilter(ARG1_NO_OP, args[0]);
}
}
private List<String> onTabCompleteArg2(CommandSender sender, String[] args) {
if (args[0].equals(LISTS) && sender.isOp()) {
return Util.tabCompleteFilter(ARG2_LISTS, args[1]);
}
return null;
}
private List<String> onTabCompleteArg3(CommandSender sender, String[] args) {
if (args[0].equals(LISTS) && ARG2_LISTS.contains(args[1]) && sender.isOp()) {
return Util.tabCompleteFilter(ARG3_LISTS, args[2]);
}
return null;
}
private List<String> onTabCompleteArg4(CommandSender sender, String[] args) {
if (args[0].equals(LISTS) && ARG2_LISTS.contains(args[1]) && ARG3_LISTS.contains(args[2]) && sender.isOp()) {
if (args[2].equals(ADD)) {
return Util.tabCompleteFilter(
Stream.of(Material.values())
.filter(Material::isBlock)
.filter(material -> !Tool.getByName(args[1]).getVeinminerMaterials().contains(material))
.map(Util::materialToString)
.collect(Collectors.toList()),
args[3]);
} else if (args[2].equals(REMOVE)) {
return Util.tabCompleteFilter(
Stream.of(Material.values())
.filter(Material::isBlock)
.filter(material -> Tool.getByName(args[1]).getVeinminerMaterials().contains(material))
.map(Util::materialToString)
.collect(Collectors.toList()),
args[3]);
}
}
return null;
}
//endregion
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.ex;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeInsight.daemon.impl.SeverityRegistrar;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.packageDependencies.DependencyValidationManager;
import com.intellij.profile.ProfileEx;
import com.intellij.profile.codeInspection.InspectionProfileManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.scope.packageSet.*;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.SmartList;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class ToolsImpl implements Tools {
@NonNls static final String ENABLED_BY_DEFAULT_ATTRIBUTE = "enabled_by_default";
@NonNls static final String ENABLED_ATTRIBUTE = "enabled";
@NonNls static final String LEVEL_ATTRIBUTE = "level";
private final String myShortName;
private final ScopeToolState myDefaultState;
private List<ScopeToolState> myTools;
private boolean myEnabled;
ToolsImpl(@NotNull InspectionToolWrapper toolWrapper, @NotNull HighlightDisplayLevel level, boolean enabled, boolean enabledByDefault) {
myShortName = toolWrapper.getShortName();
myDefaultState = new ScopeToolState(CustomScopesProviderEx.getAllScope(), toolWrapper, enabledByDefault, level);
myTools = null;
myEnabled = enabled;
}
@NotNull
public ScopeToolState addTool(@NotNull NamedScope scope, @NotNull InspectionToolWrapper toolWrapper, boolean enabled, @NotNull HighlightDisplayLevel level) {
return insertTool(scope, toolWrapper, enabled, level, myTools != null ? myTools.size() : 0);
}
@NotNull
public ScopeToolState prependTool(@NotNull NamedScope scope, @NotNull InspectionToolWrapper toolWrapper, boolean enabled, @NotNull HighlightDisplayLevel level) {
return insertTool(scope, toolWrapper, enabled, level, 0);
}
public ScopeToolState addTool(@NotNull String scopeName, @NotNull InspectionToolWrapper toolWrapper, boolean enabled, @NotNull HighlightDisplayLevel level) {
return insertTool(new ScopeToolState(scopeName, toolWrapper, enabled, level), myTools != null ? myTools.size() : 0);
}
@NotNull
private ScopeToolState insertTool(@NotNull NamedScope scope, @NotNull InspectionToolWrapper toolWrapper, boolean enabled, @NotNull HighlightDisplayLevel level, int idx) {
return insertTool(new ScopeToolState(scope, toolWrapper, enabled, level), idx);
}
@NotNull
private ScopeToolState insertTool(@NotNull final ScopeToolState scopeToolState, final int idx) {
if (myTools == null) {
myTools = new ArrayList<>();
if (scopeToolState.isEnabled()) {
setEnabled(true);
}
}
myTools.add(idx, scopeToolState);
return scopeToolState;
}
@NotNull
@Override
public InspectionToolWrapper getInspectionTool(@Nullable PsiElement element) {
if (myTools != null) {
final Project project = element == null ? null : element.getProject();
final PsiFile containingFile = element == null ? null : InjectedLanguageManager.getInstance(project).getTopLevelFile(element);
for (ScopeToolState state : myTools) {
if (element == null) {
return state.getTool();
}
NamedScope scope = state.getScope(project);
if (scope != null) {
final PackageSet packageSet = scope.getValue();
if (packageSet != null) {
if (containingFile != null && packageSet.contains(containingFile, DependencyValidationManager.getInstance(project))) {
return state.getTool();
}
}
}
}
}
return myDefaultState.getTool();
}
@NotNull
@Override
public String getShortName() {
return myShortName;
}
public void cleanupTools(@NotNull Project project) {
for (ScopeToolState state : getTools()) {
state.getTool().cleanup(project);
}
}
public void scopesChanged() {
if (myTools != null) {
for (ScopeToolState tool : myTools) {
tool.scopesChanged();
}
}
myDefaultState.scopesChanged();
}
public void writeExternal(@NotNull Element inspectionElement) {
if (myTools != null) {
for (ScopeToolState state : myTools) {
final Element scopeElement = new Element("scope");
scopeElement.setAttribute("name", state.getScopeName());
scopeElement.setAttribute(LEVEL_ATTRIBUTE, state.getLevel().getName());
scopeElement.setAttribute(ENABLED_ATTRIBUTE, Boolean.toString(state.isEnabled()));
InspectionToolWrapper toolWrapper = state.getTool();
if (toolWrapper.isInitialized()) {
toolWrapper.getTool().writeSettings(scopeElement);
}
inspectionElement.addContent(scopeElement);
}
}
inspectionElement.setAttribute(ENABLED_ATTRIBUTE, Boolean.toString(isEnabled()));
inspectionElement.setAttribute(LEVEL_ATTRIBUTE, getLevel().getName());
inspectionElement.setAttribute(ENABLED_BY_DEFAULT_ATTRIBUTE, Boolean.toString(myDefaultState.isEnabled()));
InspectionToolWrapper toolWrapper = myDefaultState.getTool();
if (toolWrapper.isInitialized()) {
ScopeToolState.tryWriteSettings(toolWrapper.getTool(), inspectionElement);
}
}
void readExternal(@NotNull Element toolElement, @NotNull InspectionProfileManager profileManager, Map<String, List<String>> dependencies) {
final String levelName = toolElement.getAttributeValue(LEVEL_ATTRIBUTE);
final SeverityRegistrar registrar = profileManager.getSeverityRegistrar();
HighlightDisplayLevel level = levelName != null ? HighlightDisplayLevel.find(registrar.getSeverity(levelName)) : null;
if (level == null) {
level = HighlightDisplayLevel.WARNING;
}
myDefaultState.setLevel(level);
final String enabled = toolElement.getAttributeValue(ENABLED_ATTRIBUTE);
final boolean isEnabled = Boolean.parseBoolean(enabled);
final String enabledTool = toolElement.getAttributeValue(ENABLED_BY_DEFAULT_ATTRIBUTE);
myDefaultState.setEnabled(enabledTool != null ? Boolean.parseBoolean(enabledTool) : isEnabled);
final InspectionToolWrapper toolWrapper = myDefaultState.getTool();
final List<Element> scopeElements = toolElement.getChildren(ProfileEx.SCOPE);
if (!scopeElements.isEmpty()) {
final List<String> scopeNames = new SmartList<>();
for (Element scopeElement : scopeElements) {
final String scopeName = scopeElement.getAttributeValue(ProfileEx.NAME);
if (scopeName == null) {
continue;
}
final NamedScopesHolder scopesHolder = profileManager.getScopesManager();
NamedScope namedScope = null;
if (scopesHolder != null) {
namedScope = scopesHolder.getScope(scopeName);
}
final String errorLevel = scopeElement.getAttributeValue(LEVEL_ATTRIBUTE);
final String enabledInScope = scopeElement.getAttributeValue(ENABLED_ATTRIBUTE);
final InspectionToolWrapper copyToolWrapper = toolWrapper.createCopy();
// check if unknown children exists
if (scopeElement.getAttributes().size() > 3 || !scopeElement.getChildren().isEmpty()) {
copyToolWrapper.getTool().readSettings(scopeElement);
}
HighlightDisplayLevel scopeLevel = errorLevel != null ?
HighlightDisplayLevel.find(registrar.getSeverity(errorLevel)) : null;
if (scopeLevel == null) {
scopeLevel = level;
}
if (namedScope != null) {
addTool(namedScope, copyToolWrapper, Boolean.parseBoolean(enabledInScope), scopeLevel);
}
else {
addTool(scopeName, copyToolWrapper, Boolean.parseBoolean(enabledInScope), scopeLevel);
}
scopeNames.add(scopeName);
}
for (int i = 0; i < scopeNames.size(); i++) {
String scopeName = scopeNames.get(i);
List<String> order = dependencies.computeIfAbsent(scopeName, __ -> new ArrayList<>());
for (int j = i + 1; j < scopeNames.size(); j++) {
order.add(scopeNames.get(j));
}
}
}
// check if unknown children exists
if (toolElement.getAttributes().size() > 4 || toolElement.getChildren().size() > scopeElements.size()) {
ScopeToolState.tryReadSettings(toolWrapper.getTool(), toolElement);
}
myEnabled = isEnabled;
}
/**
* Warning: Usage of this method is discouraged as if separate tool options are defined for different scopes, it just returns
* the options for the first scope which may lead to unexpected results. Consider using {@link #getInspectionTool(PsiElement)} instead.
*
* @return an InspectionToolWrapper associated with this tool.
*/
@NotNull
@Override
public InspectionToolWrapper getTool() {
if (myTools == null) return myDefaultState.getTool();
return myTools.iterator().next().getTool();
}
@Override
@NotNull
public List<ScopeToolState> getTools() {
if (myTools == null) {
return Collections.singletonList(myDefaultState);
}
List<ScopeToolState> result = new ArrayList<>(myTools);
result.add(myDefaultState);
return result;
}
@Override
public void collectTools(@NotNull List<ScopeToolState> result) {
if (myTools != null) {
result.addAll(myTools);
}
result.add(myDefaultState);
}
@Override
@NotNull
public ScopeToolState getDefaultState() {
return myDefaultState;
}
public void setDefaultEnabled(boolean isEnabled) {
getDefaultState().setEnabled(isEnabled);
if (isEnabled) {
setEnabled(true);
}
else {
disableWholeToolIfCan();
}
}
public void removeScope(@NotNull final String scopeName) {
if (myTools != null) {
for (ScopeToolState tool : myTools) {
if (scopeName.equals(tool.getScopeName())) {
myTools.remove(tool);
break;
}
}
checkToolsIsEmpty();
}
}
private void checkToolsIsEmpty() {
if (myTools.isEmpty()) {
myTools = null;
setEnabled(myDefaultState.isEnabled());
}
}
public void removeAllScopes() {
myTools = null;
}
public void setScope(int idx, NamedScope namedScope) {
if (myTools != null && myTools.size() > idx && idx >= 0) {
final ScopeToolState scopeToolState = myTools.get(idx);
InspectionToolWrapper toolWrapper = scopeToolState.getTool();
myTools.remove(idx);
myTools.add(idx, new ScopeToolState(namedScope, toolWrapper, scopeToolState.isEnabled(), scopeToolState.getLevel()));
}
}
public boolean isEnabled(NamedScope namedScope, Project project) {
if (!myEnabled) return false;
if (namedScope != null && myTools != null) {
for (ScopeToolState state : myTools) {
if (Comparing.equal(namedScope, state.getScope(project))) return state.isEnabled();
}
}
return myDefaultState.isEnabled();
}
public HighlightDisplayLevel getLevel(PsiElement element) {
if (myTools == null || element == null) return myDefaultState.getLevel();
final Project project = element.getProject();
final DependencyValidationManager manager = DependencyValidationManager.getInstance(project);
for (ScopeToolState state : myTools) {
final NamedScope scope = state.getScope(project);
final PackageSet set = scope != null ? scope.getValue() : null;
if (set != null && set.contains(element.getContainingFile(), manager)) {
return state.getLevel();
}
}
return myDefaultState.getLevel();
}
public HighlightDisplayLevel getLevel() {
return myDefaultState.getLevel();
}
@Override
public boolean isEnabled() {
return myEnabled;
}
@Override
public boolean isEnabled(PsiElement element) {
if (!myEnabled) return false;
if (myTools == null || element == null) return myDefaultState.isEnabled();
final Project project = element.getProject();
final DependencyValidationManager manager = DependencyValidationManager.getInstance(project);
for (ScopeToolState state : myTools) {
final NamedScope scope = state.getScope(project);
if (scope != null) {
final PackageSet set = scope.getValue();
if (set != null && set.contains(element.getContainingFile(), manager)) {
return state.isEnabled();
}
}
}
return myDefaultState.isEnabled();
}
@Nullable
@Override
public InspectionToolWrapper getEnabledTool(@Nullable PsiElement element, boolean includeDoNotShow) {
if (!myEnabled) return null;
if (myTools != null && element != null) {
final Project project = element.getProject();
final DependencyValidationManager manager = DependencyValidationManager.getInstance(project);
for (ScopeToolState state : myTools) {
final NamedScope scope = state.getScope(project);
if (scope != null) {
final PackageSet set = scope.getValue();
if (set != null && set.contains(element.getContainingFile(), manager)) {
return state.isEnabled() && (includeDoNotShow || !HighlightDisplayLevel.DO_NOT_SHOW.equals(state.getLevel())) ? state.getTool() : null;
}
}
}
}
return myDefaultState.isEnabled() && (includeDoNotShow || !HighlightDisplayLevel.DO_NOT_SHOW.equals(myDefaultState.getLevel())) ? myDefaultState.getTool() : null;
}
@Nullable
@Override
public InspectionToolWrapper getEnabledTool(@Nullable PsiElement element) {
return getEnabledTool(element, true);
}
public void setEnabled(boolean enabled) {
myEnabled = enabled;
}
public void enableTool(@NotNull NamedScope namedScope, Project project) {
if (myTools != null) {
for (ScopeToolState state : myTools) {
if (namedScope.equals(state.getScope(project))) {
state.setEnabled(true);
}
}
}
setEnabled(true);
}
public void disableTool(NamedScope namedScope, Project project) {
if (myTools != null) {
for (ScopeToolState state : myTools) {
if (Comparing.equal(state.getScope(project), namedScope)) {
state.setEnabled(false);
}
}
disableWholeToolIfCan();
}
}
public void disableTool(@NotNull PsiElement element) {
final Project project = element.getProject();
final DependencyValidationManager validationManager = DependencyValidationManager.getInstance(project);
if (myTools != null) {
for (ScopeToolState state : myTools) {
final NamedScope scope = state.getScope(project);
if (scope != null) {
final PackageSet packageSet = scope.getValue();
if (packageSet != null) {
final PsiFile file = element.getContainingFile();
if (file != null) {
if (packageSet.contains(file, validationManager)) {
state.setEnabled(false);
return;
}
}
else {
if (packageSet instanceof PackageSetBase &&
((PackageSetBase)packageSet).contains(PsiUtilCore.getVirtualFile(element), project, validationManager)) {
state.setEnabled(false);
return;
}
}
}
}
}
myDefaultState.setEnabled(false);
}
else {
myDefaultState.setEnabled(false);
setEnabled(false);
}
}
@NotNull
public HighlightDisplayLevel getLevel(final NamedScope scope, Project project) {
if (myTools != null && scope != null){
for (ScopeToolState state : myTools) {
if (Comparing.equal(state.getScope(project), scope)) {
return state.getLevel();
}
}
}
return myDefaultState.getLevel();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof ToolsImpl)) return false;
ToolsImpl tools = (ToolsImpl)o;
if (myEnabled != tools.myEnabled) return false;
if (getTools().size() != tools.getTools().size()) return false;
for (int i = 0; i < getTools().size(); i++) {
final ScopeToolState state = getTools().get(i);
final ScopeToolState toolState = tools.getTools().get(i);
if (!state.equalTo(toolState)) {
return false;
}
}
return true;
}
public void setLevel(@NotNull HighlightDisplayLevel level, @Nullable String scopeName, Project project) {
if (scopeName == null) {
myDefaultState.setLevel(level);
} else {
if (myTools == null) {
return;
}
ScopeToolState scopeToolState = null;
int index = -1;
for (int i = 0; i < myTools.size(); i++) {
ScopeToolState tool = myTools.get(i);
if (scopeName.equals(tool.getScopeName())) {
scopeToolState = tool;
myTools.remove(tool);
index = i;
break;
}
}
if (index < 0) {
throw new IllegalStateException("Scope " + scopeName + " not found");
}
final InspectionToolWrapper toolWrapper = scopeToolState.getTool();
final NamedScope scope = scopeToolState.getScope(project);
if (scope != null) {
myTools.add(index, new ScopeToolState(scope, toolWrapper, scopeToolState.isEnabled(), level));
}
else {
myTools.add(index, new ScopeToolState(scopeToolState.getScopeName(), toolWrapper, scopeToolState.isEnabled(), level));
}
}
}
public void setDefaultState(@NotNull InspectionToolWrapper toolWrapper, boolean enabled, @NotNull HighlightDisplayLevel level) {
myDefaultState.setTool(toolWrapper);
myDefaultState.setLevel(level);
myDefaultState.setEnabled(enabled);
}
public void setLevel(@NotNull HighlightDisplayLevel level) {
myDefaultState.setLevel(level);
}
@Nullable
public List<ScopeToolState> getNonDefaultTools() {
return myTools;
}
private void disableWholeToolIfCan() {
if (myDefaultState.isEnabled()) {
return;
}
if (myTools != null) {
for (ScopeToolState tool : myTools) {
if (tool.isEnabled()) {
return;
}
}
}
setEnabled(false);
}
}
| |
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2015 Groupon, Inc
* Copyright 2014-2015 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.subscription.api.user;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.joda.time.DateTime;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.ObjectType;
import org.killbill.billing.callcontext.InternalCallContext;
import org.killbill.billing.callcontext.InternalTenantContext;
import org.killbill.billing.catalog.api.BillingActionPolicy;
import org.killbill.billing.catalog.api.BillingPeriod;
import org.killbill.billing.catalog.api.Catalog;
import org.killbill.billing.catalog.api.CatalogApiException;
import org.killbill.billing.catalog.api.CatalogService;
import org.killbill.billing.catalog.api.PhaseType;
import org.killbill.billing.catalog.api.Plan;
import org.killbill.billing.catalog.api.PlanChangeResult;
import org.killbill.billing.catalog.api.PlanPhase;
import org.killbill.billing.catalog.api.PlanPhasePriceOverride;
import org.killbill.billing.catalog.api.PlanPhasePriceOverridesWithCallContext;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.PlanSpecifier;
import org.killbill.billing.catalog.api.PriceList;
import org.killbill.billing.catalog.api.PriceListSet;
import org.killbill.billing.catalog.api.Product;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.entitlement.api.Entitlement.EntitlementState;
import org.killbill.billing.subscription.alignment.PlanAligner;
import org.killbill.billing.subscription.alignment.TimedPhase;
import org.killbill.billing.subscription.api.SubscriptionBase;
import org.killbill.billing.subscription.api.SubscriptionBaseApiService;
import org.killbill.billing.subscription.api.svcs.DefaultPlanPhasePriceOverridesWithCallContext;
import org.killbill.billing.subscription.engine.addon.AddonUtils;
import org.killbill.billing.subscription.engine.dao.SubscriptionDao;
import org.killbill.billing.subscription.events.SubscriptionBaseEvent;
import org.killbill.billing.subscription.events.phase.PhaseEvent;
import org.killbill.billing.subscription.events.phase.PhaseEventData;
import org.killbill.billing.subscription.events.user.ApiEvent;
import org.killbill.billing.subscription.events.user.ApiEventBuilder;
import org.killbill.billing.subscription.events.user.ApiEventCancel;
import org.killbill.billing.subscription.events.user.ApiEventChange;
import org.killbill.billing.subscription.events.user.ApiEventCreate;
import org.killbill.billing.subscription.events.user.ApiEventReCreate;
import org.killbill.billing.subscription.events.user.ApiEventUncancel;
import org.killbill.billing.subscription.exceptions.SubscriptionBaseError;
import org.killbill.billing.util.callcontext.CallContext;
import org.killbill.billing.util.callcontext.InternalCallContextFactory;
import org.killbill.billing.util.callcontext.TenantContext;
import org.killbill.clock.Clock;
import org.killbill.clock.DefaultClock;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.inject.Inject;
public class DefaultSubscriptionBaseApiService implements SubscriptionBaseApiService {
private final Clock clock;
private final SubscriptionDao dao;
private final CatalogService catalogService;
private final PlanAligner planAligner;
private final AddonUtils addonUtils;
private final InternalCallContextFactory internalCallContextFactory;
@Inject
public DefaultSubscriptionBaseApiService(final Clock clock, final SubscriptionDao dao, final CatalogService catalogService,
final PlanAligner planAligner, final AddonUtils addonUtils,
final InternalCallContextFactory internalCallContextFactory) {
this.clock = clock;
this.catalogService = catalogService;
this.planAligner = planAligner;
this.dao = dao;
this.addonUtils = addonUtils;
this.internalCallContextFactory = internalCallContextFactory;
}
@Override
public DefaultSubscriptionBase createPlan(final SubscriptionBuilder builder, final Plan plan, final PhaseType initialPhase,
final String realPriceList, final DateTime requestedDate, final DateTime effectiveDate, final DateTime processedDate,
final CallContext context) throws SubscriptionBaseApiException {
final DefaultSubscriptionBase subscription = new DefaultSubscriptionBase(builder, this, clock);
createFromSubscription(subscription, plan, initialPhase, realPriceList, requestedDate, effectiveDate, processedDate, false, context);
return subscription;
}
@Override
public DefaultSubscriptionBase createPlans(final Iterable<SubscriptionSpecifier> subscriptions, final CallContext context) throws SubscriptionBaseApiException {
Map<UUID, List<SubscriptionBaseEvent>> eventsMap = new HashMap<UUID, List<SubscriptionBaseEvent>>();
List<DefaultSubscriptionBase> subscriptionBaseList = new ArrayList<DefaultSubscriptionBase>();
for (SubscriptionSpecifier subscription : subscriptions) {
try {
final DefaultSubscriptionBase subscriptionBase = new DefaultSubscriptionBase(subscription.getBuilder(), this, clock);
final InternalCallContext internalCallContext = createCallContextFromBundleId(subscriptionBase.getBundleId(), context);
final List<SubscriptionBaseEvent> events = getEventsOnCreation(subscriptionBase.getBundleId(), subscriptionBase.getId(), subscriptionBase.getAlignStartDate(),
subscriptionBase.getBundleStartDate(), subscriptionBase.getActiveVersion(), subscription.getPlan(),
subscription.getInitialPhase(), subscription.getRealPriceList(), subscription.getRequestedDate(),
subscription.getEffectiveDate(), subscription.getProcessedDate(), false, internalCallContext);
eventsMap.put(subscriptionBase.getId(), events);
subscriptionBaseList.add(subscriptionBase);
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
final InternalCallContext internalCallContext = createCallContextFromBundleId(subscriptionBaseList.get(0).getBundleId(), context);
dao.createSubscriptionWithAddOns(subscriptionBaseList, eventsMap, internalCallContext);
final DefaultSubscriptionBase baseSubscription = findBaseSubscription(subscriptionBaseList);
try {
baseSubscription.rebuildTransitions(dao.getEventsForSubscription(baseSubscription.getId(), internalCallContext),
catalogService.getFullCatalog(internalCallContext));
} catch (CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
return baseSubscription;
}
private DefaultSubscriptionBase findBaseSubscription(final List<DefaultSubscriptionBase> subscriptionBaseList) {
return Iterables.tryFind(subscriptionBaseList, new Predicate<DefaultSubscriptionBase>() {
@Override
public boolean apply(final DefaultSubscriptionBase subscription) {
return ProductCategory.BASE.equals(subscription.getCategory());
}
}).orNull();
}
@Deprecated
@Override
public boolean recreatePlan(final DefaultSubscriptionBase subscription, final PlanPhaseSpecifier spec, final List<PlanPhasePriceOverride> overrides, final DateTime requestedDateWithMs, final CallContext context)
throws SubscriptionBaseApiException {
final EntitlementState currentState = subscription.getState();
if (currentState != null && currentState != EntitlementState.CANCELLED) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_RECREATE_BAD_STATE, subscription.getId(), currentState);
}
final DateTime now = clock.getUTCNow();
final DateTime effectiveDate = (requestedDateWithMs != null) ? DefaultClock.truncateMs(requestedDateWithMs) : now;
validateEffectiveDate(subscription, effectiveDate);
try {
final String realPriceList = (spec.getPriceListName() == null) ? PriceListSet.DEFAULT_PRICELIST_NAME : spec.getPriceListName();
final InternalTenantContext internalCallContext = createTenantContextFromBundleId(subscription.getBundleId(), context);
final PlanPhasePriceOverridesWithCallContext overridesWithContext = new DefaultPlanPhasePriceOverridesWithCallContext(overrides, context);
final Plan plan = catalogService.getFullCatalog(internalCallContext).createOrFindPlan(spec.getProductName(), spec.getBillingPeriod(), realPriceList, overridesWithContext, effectiveDate);
final PlanPhase phase = plan.getAllPhases()[0];
if (phase == null) {
throw new SubscriptionBaseError(String.format("No initial PlanPhase for Product %s, term %s and set %s does not exist in the catalog",
spec.getProductName(), spec.getBillingPeriod().toString(), realPriceList));
}
final DateTime processedDate = now;
createFromSubscription(subscription, plan, spec.getPhaseType(), realPriceList, now, effectiveDate, processedDate, true, context);
return true;
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
private void createFromSubscription(final DefaultSubscriptionBase subscription, final Plan plan, final PhaseType initialPhase,
final String realPriceList, final DateTime requestedDate, final DateTime effectiveDate, final DateTime processedDate,
final boolean reCreate, final CallContext context) throws SubscriptionBaseApiException {
final InternalCallContext internalCallContext = createCallContextFromBundleId(subscription.getBundleId(), context);
try {
final List<SubscriptionBaseEvent> events = getEventsOnCreation(subscription.getBundleId(), subscription.getId(), subscription.getAlignStartDate(), subscription.getBundleStartDate(), subscription.getActiveVersion(),
plan, initialPhase, realPriceList, requestedDate, effectiveDate, processedDate, reCreate, internalCallContext);
if (reCreate) {
dao.recreateSubscription(subscription, events, internalCallContext);
} else {
dao.createSubscription(subscription, events, internalCallContext);
}
subscription.rebuildTransitions(dao.getEventsForSubscription(subscription.getId(), internalCallContext), catalogService.getFullCatalog(internalCallContext));
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public boolean cancel(final DefaultSubscriptionBase subscription, final CallContext context) throws SubscriptionBaseApiException {
final EntitlementState currentState = subscription.getState();
if (currentState != EntitlementState.ACTIVE) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CANCEL_BAD_STATE, subscription.getId(), currentState);
}
final DateTime now = clock.getUTCNow();
final Plan currentPlan = subscription.getCurrentPlan();
final PlanPhaseSpecifier planPhase = new PlanPhaseSpecifier(currentPlan.getProduct().getName(),
currentPlan.getProduct().getCategory(),
subscription.getCurrentPlan().getRecurringBillingPeriod(),
subscription.getCurrentPriceList().getName(),
subscription.getCurrentPhase().getPhaseType());
try {
final InternalTenantContext internalCallContext = createTenantContextFromBundleId(subscription.getBundleId(), context);
final BillingActionPolicy policy = catalogService.getFullCatalog(internalCallContext).planCancelPolicy(planPhase, now);
final DateTime effectiveDate = subscription.getPlanChangeEffectiveDate(policy);
return doCancelPlan(subscription, now, effectiveDate, context);
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public boolean cancelWithRequestedDate(final DefaultSubscriptionBase subscription, final DateTime requestedDateWithMs, final CallContext context) throws SubscriptionBaseApiException {
final EntitlementState currentState = subscription.getState();
if (currentState != EntitlementState.ACTIVE) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CANCEL_BAD_STATE, subscription.getId(), currentState);
}
final DateTime now = clock.getUTCNow();
final DateTime effectiveDate = (requestedDateWithMs != null) ? DefaultClock.truncateMs(requestedDateWithMs) : now;
return doCancelPlan(subscription, now, effectiveDate, context);
}
@Override
public boolean cancelWithPolicy(final DefaultSubscriptionBase subscription, final BillingActionPolicy policy, final CallContext context) throws SubscriptionBaseApiException {
final EntitlementState currentState = subscription.getState();
if (currentState != EntitlementState.ACTIVE) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CANCEL_BAD_STATE, subscription.getId(), currentState);
}
final DateTime now = clock.getUTCNow();
final DateTime effectiveDate = subscription.getPlanChangeEffectiveDate(policy);
return doCancelPlan(subscription, now, effectiveDate, context);
}
private boolean doCancelPlan(final DefaultSubscriptionBase subscription, final DateTime now, final DateTime effectiveDate, final CallContext context) throws SubscriptionBaseApiException {
try {
validateEffectiveDate(subscription, effectiveDate);
final InternalCallContext internalCallContext = createCallContextFromBundleId(subscription.getBundleId(), context);
final List<SubscriptionBaseEvent> cancelEvents = getEventsOnCancelPlan(subscription, now, effectiveDate, now, false, internalCallContext);
// cancelEvents will contain only one item
dao.cancelSubscription(subscription, cancelEvents.get(0), internalCallContext, 0);
final Catalog fullCatalog = catalogService.getFullCatalog(internalCallContext);
subscription.rebuildTransitions(dao.getEventsForSubscription(subscription.getId(), internalCallContext), fullCatalog);
if (subscription.getCategory() == ProductCategory.BASE) {
final Product baseProduct = (subscription.getState() == EntitlementState.CANCELLED) ? null : subscription.getCurrentPlan().getProduct();
cancelAddOnsIfRequired(baseProduct, subscription.getBundleId(), effectiveDate, context);
}
final boolean isImmediate = subscription.getState() == EntitlementState.CANCELLED;
return isImmediate;
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public boolean uncancel(final DefaultSubscriptionBase subscription, final CallContext context) throws SubscriptionBaseApiException {
if (!subscription.isSubscriptionFutureCancelled()) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_UNCANCEL_BAD_STATE, subscription.getId().toString());
}
final DateTime now = clock.getUTCNow();
final SubscriptionBaseEvent uncancelEvent = new ApiEventUncancel(new ApiEventBuilder()
.setSubscriptionId(subscription.getId())
.setActiveVersion(subscription.getActiveVersion())
.setRequestedDate(now)
.setEffectiveDate(now)
.setFromDisk(true));
final List<SubscriptionBaseEvent> uncancelEvents = new ArrayList<SubscriptionBaseEvent>();
uncancelEvents.add(uncancelEvent);
final InternalCallContext internalCallContext = createCallContextFromBundleId(subscription.getBundleId(), context);
final TimedPhase nextTimedPhase = planAligner.getNextTimedPhase(subscription, now, now, internalCallContext);
final PhaseEvent nextPhaseEvent = (nextTimedPhase != null) ?
PhaseEventData.createNextPhaseEvent(subscription.getId(), subscription.getActiveVersion(), nextTimedPhase.getPhase().getName(), now, nextTimedPhase.getStartPhase()) :
null;
if (nextPhaseEvent != null) {
uncancelEvents.add(nextPhaseEvent);
}
dao.uncancelSubscription(subscription, uncancelEvents, internalCallContext);
try {
final Catalog fullCatalog = catalogService.getFullCatalog(internalCallContext);
subscription.rebuildTransitions(dao.getEventsForSubscription(subscription.getId(), internalCallContext), fullCatalog);
return true;
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public DateTime changePlan(final DefaultSubscriptionBase subscription, final String productName, final BillingPeriod term,
final String priceList, final List<PlanPhasePriceOverride> overrides, final CallContext context) throws SubscriptionBaseApiException {
final DateTime now = clock.getUTCNow();
validateEntitlementState(subscription);
final PlanChangeResult planChangeResult = getPlanChangeResult(subscription, productName, term, priceList, now, context);
final DateTime effectiveDate = subscription.getPlanChangeEffectiveDate(planChangeResult.getPolicy());
validateEffectiveDate(subscription, effectiveDate);
try {
return doChangePlan(subscription, productName, term, planChangeResult.getNewPriceList().getName(), overrides, now, effectiveDate, context);
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public DateTime changePlanWithRequestedDate(final DefaultSubscriptionBase subscription, final String productName, final BillingPeriod term,
final String priceList, final List<PlanPhasePriceOverride> overrides,
final DateTime requestedDateWithMs, final CallContext context) throws SubscriptionBaseApiException {
final DateTime now = clock.getUTCNow();
final DateTime effectiveDate = (requestedDateWithMs != null) ? DefaultClock.truncateMs(requestedDateWithMs) : now;
validateEffectiveDate(subscription, effectiveDate);
validateEntitlementState(subscription);
try {
return doChangePlan(subscription, productName, term, priceList, overrides, now, effectiveDate, context);
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public DateTime changePlanWithPolicy(final DefaultSubscriptionBase subscription, final String productName, final BillingPeriod term,
final String priceList, final List<PlanPhasePriceOverride> overrides, final BillingActionPolicy policy, final CallContext context)
throws SubscriptionBaseApiException {
final DateTime now = clock.getUTCNow();
validateEntitlementState(subscription);
final DateTime effectiveDate = subscription.getPlanChangeEffectiveDate(policy);
try {
return doChangePlan(subscription, productName, term, priceList, overrides, now, effectiveDate, context);
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
}
@Override
public PlanChangeResult getPlanChangeResult(final DefaultSubscriptionBase subscription, final String productName,
final BillingPeriod term, final String priceList, final DateTime effectiveDate, final TenantContext context) throws SubscriptionBaseApiException {
final PlanChangeResult planChangeResult;
try {
final InternalTenantContext internalCallContext = createTenantContextFromBundleId(subscription.getBundleId(), context);
final Product destProduct = catalogService.getFullCatalog(internalCallContext).findProduct(productName, effectiveDate);
final Plan currentPlan = subscription.getCurrentPlan();
final PriceList currentPriceList = subscription.getCurrentPriceList();
final PlanPhaseSpecifier fromPlanPhase = new PlanPhaseSpecifier(currentPlan.getProduct().getName(),
currentPlan.getProduct().getCategory(),
currentPlan.getRecurringBillingPeriod(),
currentPriceList.getName(),
subscription.getCurrentPhase().getPhaseType());
final PlanSpecifier toPlanPhase = new PlanSpecifier(productName,
destProduct.getCategory(),
term,
priceList);
planChangeResult = catalogService.getFullCatalog(internalCallContext).planChange(fromPlanPhase, toPlanPhase, effectiveDate);
} catch (final CatalogApiException e) {
throw new SubscriptionBaseApiException(e);
}
return planChangeResult;
}
private DateTime doChangePlan(final DefaultSubscriptionBase subscription,
final String newProductName,
final BillingPeriod newBillingPeriod,
final String newPriceList,
final List<PlanPhasePriceOverride> overrides,
final DateTime now,
final DateTime effectiveDate,
final CallContext context) throws SubscriptionBaseApiException, CatalogApiException {
final InternalCallContext internalCallContext = createCallContextFromBundleId(subscription.getBundleId(), context);
final PlanPhasePriceOverridesWithCallContext overridesWithContext = new DefaultPlanPhasePriceOverridesWithCallContext(overrides, context);
final Plan newPlan = catalogService.getFullCatalog(internalCallContext).createOrFindPlan(newProductName, newBillingPeriod, newPriceList, overridesWithContext, effectiveDate, subscription.getStartDate());
if (newPlan.getProduct().getCategory() != subscription.getCategory()) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CHANGE_INVALID, subscription.getId());
}
final List<SubscriptionBaseEvent> changeEvents = getEventsOnChangePlan(subscription, newPlan, newPriceList, now, effectiveDate, now, false, internalCallContext);
dao.changePlan(subscription, changeEvents, internalCallContext);
subscription.rebuildTransitions(dao.getEventsForSubscription(subscription.getId(), internalCallContext), catalogService.getFullCatalog(internalCallContext));
if (subscription.getCategory() == ProductCategory.BASE) {
final Product baseProduct = (subscription.getState() == EntitlementState.CANCELLED) ? null : subscription.getCurrentPlan().getProduct();
cancelAddOnsIfRequired(baseProduct, subscription.getBundleId(), effectiveDate, context);
}
return effectiveDate;
}
@Override
public List<SubscriptionBaseEvent> getEventsOnCreation(final UUID bundleId, final UUID subscriptionId, final DateTime alignStartDate, final DateTime bundleStartDate, final long activeVersion,
final Plan plan, final PhaseType initialPhase,
final String realPriceList, final DateTime requestedDate, final DateTime effectiveDate, final DateTime processedDate,
final boolean reCreate, final InternalTenantContext internalTenantContext) throws CatalogApiException, SubscriptionBaseApiException {
final TimedPhase[] curAndNextPhases = planAligner.getCurrentAndNextTimedPhaseOnCreate(alignStartDate, bundleStartDate, plan, initialPhase,
realPriceList, requestedDate, effectiveDate, internalTenantContext);
final ApiEventBuilder createBuilder = new ApiEventBuilder()
.setSubscriptionId(subscriptionId)
.setEventPlan(plan.getName())
.setEventPlanPhase(curAndNextPhases[0].getPhase().getName())
.setEventPriceList(realPriceList)
.setActiveVersion(activeVersion)
.setEffectiveDate(effectiveDate)
.setRequestedDate(requestedDate)
.setFromDisk(true);
final ApiEvent creationEvent = (reCreate) ? new ApiEventReCreate(createBuilder) : new ApiEventCreate(createBuilder);
final TimedPhase nextTimedPhase = curAndNextPhases[1];
final PhaseEvent nextPhaseEvent = (nextTimedPhase != null) ?
PhaseEventData.createNextPhaseEvent(subscriptionId, activeVersion, nextTimedPhase.getPhase().getName(), processedDate, nextTimedPhase.getStartPhase()) :
null;
final List<SubscriptionBaseEvent> events = new ArrayList<SubscriptionBaseEvent>();
events.add(creationEvent);
if (nextPhaseEvent != null) {
events.add(nextPhaseEvent);
}
return events;
}
@Override
public List<SubscriptionBaseEvent> getEventsOnChangePlan(final DefaultSubscriptionBase subscription, final Plan newPlan,
final String newPriceList, final DateTime requestedDate, final DateTime effectiveDate, final DateTime processedDate,
final boolean addCancellationAddOnForEventsIfRequired, final InternalTenantContext internalTenantContext) throws CatalogApiException, SubscriptionBaseApiException {
final TimedPhase currentTimedPhase = planAligner.getCurrentTimedPhaseOnChange(subscription, newPlan, newPriceList, requestedDate, effectiveDate, internalTenantContext);
final SubscriptionBaseEvent changeEvent = new ApiEventChange(new ApiEventBuilder()
.setSubscriptionId(subscription.getId())
.setEventPlan(newPlan.getName())
.setEventPlanPhase(currentTimedPhase.getPhase().getName())
.setEventPriceList(newPriceList)
.setActiveVersion(subscription.getActiveVersion())
.setEffectiveDate(effectiveDate)
.setRequestedDate(requestedDate)
.setFromDisk(true));
final TimedPhase nextTimedPhase = planAligner.getNextTimedPhaseOnChange(subscription, newPlan, newPriceList, processedDate, effectiveDate, internalTenantContext);
final PhaseEvent nextPhaseEvent = (nextTimedPhase != null) ?
PhaseEventData.createNextPhaseEvent(subscription.getId(), subscription.getActiveVersion(),
nextTimedPhase.getPhase().getName(), processedDate, nextTimedPhase.getStartPhase()) :
null;
final List<SubscriptionBaseEvent> changeEvents = new ArrayList<SubscriptionBaseEvent>();
// Only add the PHASE if it does not coincide with the CHANGE, if not this is 'just' a CHANGE.
changeEvents.add(changeEvent);
if (nextPhaseEvent != null && !nextPhaseEvent.getEffectiveDate().equals(changeEvent.getEffectiveDate())) {
changeEvents.add(nextPhaseEvent);
}
if (subscription.getCategory() == ProductCategory.BASE && addCancellationAddOnForEventsIfRequired) {
final Product currentBaseProduct = changeEvent.getEffectiveDate().compareTo(clock.getUTCNow()) <= 0 ? newPlan.getProduct() : subscription.getCurrentPlan().getProduct();
addCancellationAddOnForEventsIfRequired(changeEvents, currentBaseProduct, subscription.getBundleId(), requestedDate, effectiveDate, processedDate, internalTenantContext);
}
return changeEvents;
}
@Override
public List<SubscriptionBaseEvent> getEventsOnCancelPlan(final DefaultSubscriptionBase subscription,
final DateTime requestedDate, final DateTime effectiveDate, final DateTime processedDate,
final boolean addCancellationAddOnForEventsIfRequired, final InternalTenantContext internalTenantContext) throws CatalogApiException {
final List<SubscriptionBaseEvent> cancelEvents = new ArrayList<SubscriptionBaseEvent>();
final SubscriptionBaseEvent cancelEvent = new ApiEventCancel(new ApiEventBuilder()
.setSubscriptionId(subscription.getId())
.setActiveVersion(subscription.getActiveVersion())
.setEffectiveDate(effectiveDate)
.setRequestedDate(requestedDate)
.setFromDisk(true));
cancelEvents.add(cancelEvent);
if (subscription.getCategory() == ProductCategory.BASE && addCancellationAddOnForEventsIfRequired) {
final Product currentBaseProduct = cancelEvent.getEffectiveDate().compareTo(clock.getUTCNow()) <= 0 ? null : subscription.getCurrentPlan().getProduct();
addCancellationAddOnForEventsIfRequired(cancelEvents, currentBaseProduct, subscription.getBundleId(), requestedDate, effectiveDate, processedDate, internalTenantContext);
}
return cancelEvents;
}
public int cancelAddOnsIfRequired(final Product baseProduct, final UUID bundleId, final DateTime effectiveDate, final CallContext context) throws CatalogApiException {
// If cancellation/change occur in the future, there is nothing to do
final DateTime now = clock.getUTCNow();
if (effectiveDate.compareTo(now) > 0) {
return 0;
}
final List<SubscriptionBaseEvent> cancelEvents = new LinkedList<SubscriptionBaseEvent>();
final InternalCallContext internalCallContext = createCallContextFromBundleId(bundleId, context);
final List<DefaultSubscriptionBase> subscriptionsToBeCancelled = addCancellationAddOnForEventsIfRequired(cancelEvents, baseProduct, bundleId, now, effectiveDate, now, internalCallContext);
if (!subscriptionsToBeCancelled.isEmpty()) {
dao.cancelSubscriptions(subscriptionsToBeCancelled, cancelEvents, internalCallContext);
}
return subscriptionsToBeCancelled.size();
}
private List<DefaultSubscriptionBase> addCancellationAddOnForEventsIfRequired(final List<SubscriptionBaseEvent> events, final Product baseProduct, final UUID bundleId,
final DateTime requestedDate, final DateTime effectiveDate, final DateTime processedDate, final InternalTenantContext internalTenantContext) throws CatalogApiException {
final List<DefaultSubscriptionBase> subscriptionsToBeCancelled = new ArrayList<DefaultSubscriptionBase>();
final List<SubscriptionBase> subscriptions = dao.getSubscriptions(bundleId, ImmutableList.<SubscriptionBaseEvent>of(), internalTenantContext);
for (final SubscriptionBase subscription : subscriptions) {
final DefaultSubscriptionBase cur = (DefaultSubscriptionBase) subscription;
if (cur.getState() == EntitlementState.CANCELLED ||
cur.getCategory() != ProductCategory.ADD_ON) {
continue;
}
final Plan addonCurrentPlan = cur.getCurrentPlan();
if (baseProduct == null ||
addonUtils.isAddonIncludedFromProdName(baseProduct.getName(), addonCurrentPlan, requestedDate, internalTenantContext) ||
!addonUtils.isAddonAvailableFromProdName(baseProduct.getName(), addonCurrentPlan, requestedDate, internalTenantContext)) {
//
// Perform AO cancellation using the effectiveDate of the BP
//
final SubscriptionBaseEvent cancelEvent = new ApiEventCancel(new ApiEventBuilder()
.setSubscriptionId(cur.getId())
.setActiveVersion(cur.getActiveVersion())
.setEffectiveDate(effectiveDate)
.setRequestedDate(requestedDate)
.setFromDisk(true));
subscriptionsToBeCancelled.add(cur);
events.add(cancelEvent);
}
}
return subscriptionsToBeCancelled;
}
private void validateEffectiveDate(final DefaultSubscriptionBase subscription, final DateTime effectiveDate) throws SubscriptionBaseApiException {
final SubscriptionBaseTransition previousTransition = subscription.getPreviousTransition();
if (previousTransition != null && previousTransition.getEffectiveTransitionTime().isAfter(effectiveDate)) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_INVALID_REQUESTED_DATE,
effectiveDate.toString(), previousTransition.getEffectiveTransitionTime());
}
}
private void validateEntitlementState(final DefaultSubscriptionBase subscription) throws SubscriptionBaseApiException {
final EntitlementState currentState = subscription.getState();
if (currentState != EntitlementState.ACTIVE) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CHANGE_NON_ACTIVE, subscription.getId(), currentState);
}
if (subscription.isSubscriptionFutureCancelled()) {
throw new SubscriptionBaseApiException(ErrorCode.SUB_CHANGE_FUTURE_CANCELLED, subscription.getId());
}
}
private InternalCallContext createCallContextFromBundleId(final UUID bundleId, final CallContext context) {
return internalCallContextFactory.createInternalCallContext(bundleId, ObjectType.BUNDLE, context);
}
private InternalTenantContext createTenantContextFromBundleId(final UUID bundleId, final TenantContext context) {
return internalCallContextFactory.createInternalTenantContext(bundleId, ObjectType.BUNDLE, context);
}
}
| |
package com.github.jochenw.afw.core.jdbc;
import static org.junit.Assert.*;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.Date;
import java.sql.SQLException;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Calendar;
import java.util.Properties;
import java.util.TimeZone;
import org.junit.Test;
import com.github.jochenw.afw.core.components.Application;
import com.github.jochenw.afw.core.inject.ComponentFactoryBuilder;
import com.github.jochenw.afw.core.inject.ComponentFactoryBuilder.Module;
import com.github.jochenw.afw.core.inject.Scopes;
import com.github.jochenw.afw.core.inject.guice.GuiceComponentFactoryBuilder;
import com.github.jochenw.afw.core.log.simple.SimpleLogFactory;
import com.github.jochenw.afw.core.props.DefaultPropertyFactory;
import com.github.jochenw.afw.core.util.Exceptions;
import com.github.jochenw.afw.core.util.MutableBoolean;
import com.github.jochenw.afw.core.util.Streams;
/** Test suite for the {@link JdbcHelper}.
*/
public class JdbcHelperTest {
private Application getApplication(Module pModule) {
return new Application((b) -> {
b.bind(JdbcHelper.class).in(Scopes.SINGLETON);
b.bind(Worker.class).in(Scopes.SINGLETON);
b.bind(ZoneId.class).toInstance(ZoneId.of("Europe/Berlin"));
b.bind(ZoneId.class, "db").toInstance(ZoneId.of("GMT"));
b.bind(Dialect.class, "h2").to(H2Dialect.class);
b.bind(ConnectionProvider.class).to(DefaultConnectionProvider.class).in(Scopes.SINGLETON);
if (pModule != null) {
pModule.configure(b);
}
}, () -> new SimpleLogFactory(System.out), () -> {
final String uri = "com/github/jochenw/afw/core/jdbc/db-test.properties";
final URL url = Thread.currentThread().getContextClassLoader().getResource(uri);
if (url == null) {
throw new IllegalStateException("Unable to locate resource: " + uri);
}
final Properties props = Streams.load(url);
return new DefaultPropertyFactory(props);
}) {
@Override
protected ComponentFactoryBuilder<?> newComponentFactoryBuilder() {
return new GuiceComponentFactoryBuilder();
}
};
}
/** Test for {@link Worker.Context#getConnection()}.
*/
@Test
public void testOpenConnection() {
final Application application = getApplication(null);
final Worker worker = application.getComponentFactory().requireInstance(Worker.class);
final MutableBoolean success = new MutableBoolean();
worker.run((c) -> {
assertNotNull(c);
final Connection conn = c.getConnection();
assertNotNull(conn);
assertNotNull(conn.getMetaData());
success.set();
});
assertTrue(success.isSet());
}
/** Test for conversion of local time objects to database objects, and back.
*/
@Test
public void testTimeConversions() {
final Application application = getApplication(null);
final Worker worker = application.getComponentFactory().requireInstance(Worker.class);
final JdbcHelper helper = worker.getJdbcHelper();
final ZoneId zoneId = ZoneId.of("Europe/Berlin");
final ZonedDateTime expectedZonedDateTimeValue = ZonedDateTime.of(2021, 11, 21, 12, 56, 0, 0, zoneId);
final LocalDateTime expectedLocalDateTimeValue = expectedZonedDateTimeValue.toLocalDateTime();
final Timestamp timeStamp = helper.asTimestamp(expectedLocalDateTimeValue);
final LocalDateTime actualLocalDateTimeValue = helper.asLocalDateTime(timeStamp);
assertEquals(expectedLocalDateTimeValue, actualLocalDateTimeValue);
final LocalDate expectedLocalDate = expectedLocalDateTimeValue.toLocalDate();
final Date date = helper.asDate(expectedLocalDate);
final LocalDate actualLocalDate = helper.asLocalDate(date);
assertEquals(expectedLocalDate, actualLocalDate);
final LocalTime expectedLocalTime = expectedLocalDateTimeValue.toLocalTime();
final Time time = helper.asTime(expectedLocalTime);
final LocalTime actualLocalTime = helper.asLocalTime(time);
assertEquals(expectedLocalTime, actualLocalTime);
}
/** Test for {@link Worker.Context#executeUpdate()}.
*/
@Test
public void testCreateTable() {
final String sqlCreate = "CREATE TABLE table_one ("
+ " id BIGINT NOT NULL PRIMARY KEY,"
+ " valid TINYINT NOT NULL"
+ ");";
final String sqlDrop = "DROP TABLE table_one";
final Application application = getApplication(null);
final Worker worker = application.getComponentFactory().requireInstance(Worker.class);
final MutableBoolean success = new MutableBoolean();
worker.run((c) -> {
try {
c.executeUpdate(sqlDrop);
} catch (Throwable t) {
final SQLException cause = Exceptions.getCause(t, SQLException.class);
if (cause != null) {
final Dialect dialect = c.getDialect();
if (!dialect.isDroppedTableDoesnExistError(cause)) {
throw Exceptions.show(t);
}
} else {
throw Exceptions.show(t);
}
}
final int affectedRows = c.executeUpdate(sqlCreate);
assertEquals(0, affectedRows);
success.set();
});
assertTrue(success.isSet());
}
/** Test for INSERT, and SELECT.
*/
@Test
public void testInsertAndSelect() {
final String sqlCreate = "CREATE TABLE table_two ("
+ " id BIGINT NOT NULL PRIMARY KEY,"
+ " tinyIntColumn TINYINT,"
+ " smallIntColumn SMALLINT,"
+ " intColumn INTEGER,"
+ " bigIntColumn BIGINT,"
+ " varCharColumn VARCHAR(64),"
+ " varBinaryColumn VARBINARY(64),"
+ " timeStampColumn TIMESTAMP,"
+ " dateColumn DATE,"
+ " timeColumn TIME,"
+ " zonedDateTimeColumn TIMESTAMP,"
+ " localDateTimeColumn TIMESTAMP,"
+ " localDateColumn DATE,"
+ " localTimeColumn TIME"
+ ");";
final String sqlDrop = "DROP TABLE table_two";
final Application application = getApplication(null);
final Worker worker = application.getComponentFactory().requireInstance(Worker.class);
final MutableBoolean success = new MutableBoolean();
worker.run((c) -> {
try {
c.executeUpdate(sqlDrop);
} catch (Throwable t) {
final SQLException cause = Exceptions.getCause(t, SQLException.class);
if (cause != null) {
final Dialect dialect = c.getDialect();
if (!dialect.isDroppedTableDoesnExistError(cause)) {
throw Exceptions.show(t);
}
} else {
throw Exceptions.show(t);
}
}
final int affectedRowsForCreate = c.executeUpdate(sqlCreate);
assertEquals(0, affectedRowsForCreate);
final byte byteColumnValue = (byte) 31;
final short shortColumnValue = (short) 42;
final int intColumnValue = 53;
final long bigIntColumnValue = Integer.MAX_VALUE + 1;
final String varCharColumnValue = "FooBar\u00DC\u00D6";
final byte[] varBinaryColumnValue = varCharColumnValue.getBytes(StandardCharsets.UTF_8);
final ZoneId zoneId = ZoneId.of("Europe/Berlin");
final ZonedDateTime zonedDateTimeValue = ZonedDateTime.of(2021, 11, 21, 12, 56, 0, 0, zoneId);
final Timestamp timeStampColumnValue = Timestamp.valueOf(zonedDateTimeValue.toLocalDateTime());
final Date dateColumnValue = Date.valueOf(zonedDateTimeValue.toLocalDate());
final Time timeColumnValue = Time.valueOf(zonedDateTimeValue.toLocalTime());
final ZonedDateTime zonedDateTimeColumnValue = zonedDateTimeValue;
final LocalDateTime localDateTimeColumnValue = zonedDateTimeValue.toLocalDateTime();
final LocalDate localDateColumnValue = zonedDateTimeValue.toLocalDate();
final LocalTime localTimeColumnValue = zonedDateTimeValue.toLocalTime();
c.executeUpdate("INSERT INTO table_two (id, tinyIntColumn, smallIntColumn,"
+ " intColumn, bigIntColumn, varCharColumn, varBinaryColumn,"
+ " timeStampColumn, dateColumn, timeColumn, zonedDateTimeColumn,"
+ " localDateTimeColumn, localDateColumn, localTimeColumn) VALUES"
+ " (?,?,?,?,?,?,?,?,?,?,?,?,?,?)",
Long.valueOf(1), byteColumnValue, shortColumnValue, intColumnValue,
bigIntColumnValue, varCharColumnValue, varBinaryColumnValue,
timeStampColumnValue, dateColumnValue, timeColumnValue,
zonedDateTimeColumnValue, localDateTimeColumnValue, localDateColumnValue,
localTimeColumnValue);
c.executeUpdate("INSERT INTO table_two (id) VALUES"
+ " (?)",
Long.valueOf(2));
c.executeQuery("SELECT * FROM table_two WHERE id=?", (row) -> {
assertEquals(1, row.nextLong());
assertEquals(byteColumnValue, row.nextByte());
assertEquals(shortColumnValue, row.nextShort());
assertEquals(intColumnValue, row.nextInt());
assertEquals(bigIntColumnValue, row.nextLong());
assertEquals(varCharColumnValue, row.nextStr());
assertArrayEquals(varBinaryColumnValue, row.nextBytes());
assertEquals(timeStampColumnValue, row.nextTimestamp());
assertEquals(dateColumnValue, row.nextDate());
assertEquals(timeColumnValue, row.nextTime());
assertEquals(zonedDateTimeColumnValue, row.nextZonedDateTime(zoneId));
assertEquals(localDateTimeColumnValue, row.nextLocalDateTime());
assertEquals(localDateColumnValue, row.nextLocalDate(zoneId));
assertEquals(localTimeColumnValue, row.nextLocalTime(zoneId));
row.reset();
row
.nextLongObj((l) -> assertEquals(1, l.longValue()))
.nextByte((b) -> assertEquals(byteColumnValue, b))
.nextShort((s) -> assertEquals(shortColumnValue, s))
.nextInt((i) -> assertEquals(intColumnValue, i))
.nextLong((l) -> assertEquals(bigIntColumnValue, l))
.nextStr((s) -> assertEquals(varCharColumnValue, s))
.nextBytes((b) -> assertArrayEquals(varBinaryColumnValue, b))
.nextTimestamp((t) -> assertEquals(timeStampColumnValue, t))
.nextDate((d) -> assertEquals(dateColumnValue, d))
.nextTime((t) -> assertEquals(timeColumnValue, t))
.nextZonedDateTime((z) -> assertEquals(zonedDateTimeColumnValue, z), zoneId)
.nextLocalDateTime((l) -> assertEquals(localDateTimeColumnValue, l))
.nextLocalDate((l) -> assertEquals(localDateColumnValue, l), zoneId)
.nextLocalTime((l) -> assertEquals(localTimeColumnValue, l), zoneId);
}, Long.valueOf(1));
c.executeQuery("SELECT * FROM table_two WHERE id=?", (row) -> {
assertEquals(2, row.nextLong());
assertNull(row.nextByteObj());
assertNull(row.nextShortObj());
assertNull(row.nextIntObj());
assertNull(row.nextLongObj());
assertNull(row.nextStr());
assertNull(row.nextBytes());
assertNull(row.nextTimestamp());
assertNull(row.nextDate());
assertNull(row.nextTime());
assertNull(row.nextZonedDateTime(zoneId));
assertNull(row.nextLocalDateTime());
assertNull(row.nextLocalDate(zoneId));
assertNull(row.nextLocalTime(zoneId));
row.reset();
row
.nextLongObj((l) -> assertEquals(2, l.longValue()))
.nextByteObj((b) -> assertNull(b))
.nextShortObj((s) -> assertNull(s))
.nextIntObj((i) -> assertNull(i))
.nextLongObj((l) -> assertNull(l))
.nextStr((s) -> assertNull(s))
.nextBytes((b) -> assertNull(b))
.nextTimestamp((t) -> assertNull(t))
.nextDate((d) -> assertNull(d))
.nextTime((t) -> assertNull(t))
.nextZonedDateTime((z) -> assertNull(z), zoneId)
.nextLocalDateTime((l) -> assertNull(l))
.nextLocalDate((l) -> assertNull(l), zoneId)
.nextLocalTime((l) -> assertNull(l), zoneId);
}, Long.valueOf(2));
success.set();
});
assertTrue(success.isSet());
}
}
| |
package org.apache.cassandra;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.Closeable;
import java.io.EOFException;
import java.io.IOError;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.schema.ColumnMetadata;
import org.apache.cassandra.schema.TableId;
import org.apache.cassandra.schema.TableMetadata;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.Directories.DataDirectory;
import org.apache.cassandra.db.compaction.AbstractCompactionTask;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.AsciiType;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.partitions.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.RandomPartitioner.BigIntegerToken;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.gms.ApplicationState;
import org.apache.cassandra.gms.Gossiper;
import org.apache.cassandra.gms.VersionedValue;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.service.pager.PagingState;
import org.apache.cassandra.transport.ProtocolVersion;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.CounterId;
import org.apache.cassandra.utils.FBUtilities;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class Util
{
private static final Logger logger = LoggerFactory.getLogger(Util.class);
private static List<UUID> hostIdPool = new ArrayList<>();
public static IPartitioner testPartitioner()
{
return DatabaseDescriptor.getPartitioner();
}
public static DecoratedKey dk(String key)
{
return testPartitioner().decorateKey(ByteBufferUtil.bytes(key));
}
public static DecoratedKey dk(String key, AbstractType<?> type)
{
return testPartitioner().decorateKey(type.fromString(key));
}
public static DecoratedKey dk(ByteBuffer key)
{
return testPartitioner().decorateKey(key);
}
public static PartitionPosition rp(String key)
{
return rp(key, testPartitioner());
}
public static PartitionPosition rp(String key, IPartitioner partitioner)
{
return PartitionPosition.ForKey.get(ByteBufferUtil.bytes(key), partitioner);
}
public static Clustering clustering(ClusteringComparator comparator, Object... o)
{
return comparator.make(o);
}
public static Token token(String key)
{
return testPartitioner().getToken(ByteBufferUtil.bytes(key));
}
public static Range<PartitionPosition> range(String left, String right)
{
return new Range<>(rp(left), rp(right));
}
public static Range<PartitionPosition> range(IPartitioner p, String left, String right)
{
return new Range<>(rp(left, p), rp(right, p));
}
//Test helper to make an iterator iterable once
public static <T> Iterable<T> once(final Iterator<T> source)
{
return new Iterable<T>()
{
private AtomicBoolean exhausted = new AtomicBoolean();
public Iterator<T> iterator()
{
Preconditions.checkState(!exhausted.getAndSet(true));
return source;
}
};
}
public static ByteBuffer getBytes(long v)
{
byte[] bytes = new byte[8];
ByteBuffer bb = ByteBuffer.wrap(bytes);
bb.putLong(v);
bb.rewind();
return bb;
}
public static ByteBuffer getBytes(int v)
{
byte[] bytes = new byte[4];
ByteBuffer bb = ByteBuffer.wrap(bytes);
bb.putInt(v);
bb.rewind();
return bb;
}
/**
* Writes out a bunch of mutations for a single column family.
*
* @param mutations A group of Mutations for the same keyspace and column family.
* @return The ColumnFamilyStore that was used.
*/
public static ColumnFamilyStore writeColumnFamily(List<Mutation> mutations)
{
IMutation first = mutations.get(0);
String keyspaceName = first.getKeyspaceName();
TableId tableId = first.getTableIds().iterator().next();
for (Mutation rm : mutations)
rm.applyUnsafe();
ColumnFamilyStore store = Keyspace.open(keyspaceName).getColumnFamilyStore(tableId);
store.forceBlockingFlush();
return store;
}
public static boolean equalsCounterId(CounterId n, ByteBuffer context, int offset)
{
return CounterId.wrap(context, context.position() + offset).equals(n);
}
/**
* Creates initial set of nodes and tokens. Nodes are added to StorageService as 'normal'
*/
public static void createInitialRing(StorageService ss, IPartitioner partitioner, List<Token> endpointTokens,
List<Token> keyTokens, List<InetAddress> hosts, List<UUID> hostIds, int howMany)
throws UnknownHostException
{
// Expand pool of host IDs as necessary
for (int i = hostIdPool.size(); i < howMany; i++)
hostIdPool.add(UUID.randomUUID());
boolean endpointTokenPrefilled = endpointTokens != null && !endpointTokens.isEmpty();
for (int i=0; i<howMany; i++)
{
if(!endpointTokenPrefilled)
endpointTokens.add(new BigIntegerToken(String.valueOf(10 * i)));
keyTokens.add(new BigIntegerToken(String.valueOf(10 * i + 5)));
hostIds.add(hostIdPool.get(i));
}
for (int i=0; i<endpointTokens.size(); i++)
{
InetAddress ep = InetAddress.getByName("127.0.0." + String.valueOf(i + 1));
Gossiper.instance.initializeNodeUnsafe(ep, hostIds.get(i), 1);
Gossiper.instance.injectApplicationState(ep, ApplicationState.TOKENS, new VersionedValue.VersionedValueFactory(partitioner).tokens(Collections.singleton(endpointTokens.get(i))));
ss.onChange(ep,
ApplicationState.STATUS,
new VersionedValue.VersionedValueFactory(partitioner).normal(Collections.singleton(endpointTokens.get(i))));
hosts.add(ep);
}
// check that all nodes are in token metadata
for (int i=0; i<endpointTokens.size(); ++i)
assertTrue(ss.getTokenMetadata().isMember(hosts.get(i)));
}
public static Future<?> compactAll(ColumnFamilyStore cfs, int gcBefore)
{
List<Descriptor> descriptors = new ArrayList<>();
for (SSTableReader sstable : cfs.getLiveSSTables())
descriptors.add(sstable.descriptor);
return CompactionManager.instance.submitUserDefined(cfs, descriptors, gcBefore);
}
public static void compact(ColumnFamilyStore cfs, Collection<SSTableReader> sstables)
{
int gcBefore = cfs.gcBefore(FBUtilities.nowInSeconds());
List<AbstractCompactionTask> tasks = cfs.getCompactionStrategyManager().getUserDefinedTasks(sstables, gcBefore);
for (AbstractCompactionTask task : tasks)
task.execute(null);
}
public static void expectEOF(Callable<?> callable)
{
expectException(callable, EOFException.class);
}
public static void expectException(Callable<?> callable, Class<?> exception)
{
boolean thrown = false;
try
{
callable.call();
}
catch (Throwable e)
{
assert e.getClass().equals(exception) : e.getClass().getName() + " is not " + exception.getName();
thrown = true;
}
assert thrown : exception.getName() + " not received";
}
public static AbstractReadCommandBuilder.SinglePartitionBuilder cmd(ColumnFamilyStore cfs, Object... partitionKey)
{
return new AbstractReadCommandBuilder.SinglePartitionBuilder(cfs, makeKey(cfs.metadata(), partitionKey));
}
public static AbstractReadCommandBuilder.PartitionRangeBuilder cmd(ColumnFamilyStore cfs)
{
return new AbstractReadCommandBuilder.PartitionRangeBuilder(cfs);
}
static DecoratedKey makeKey(TableMetadata metadata, Object... partitionKey)
{
if (partitionKey.length == 1 && partitionKey[0] instanceof DecoratedKey)
return (DecoratedKey)partitionKey[0];
ByteBuffer key = metadata.partitionKeyAsClusteringComparator().make(partitionKey).serializeAsPartitionKey();
return metadata.partitioner.decorateKey(key);
}
public static void assertEmptyUnfiltered(ReadCommand command)
{
try (ReadExecutionController executionController = command.executionController();
UnfilteredPartitionIterator iterator = command.executeLocally(executionController))
{
if (iterator.hasNext())
{
try (UnfilteredRowIterator partition = iterator.next())
{
throw new AssertionError("Expected no results for query " + command.toCQLString() + " but got key " + command.metadata().partitionKeyType.getString(partition.partitionKey().getKey()));
}
}
}
}
public static void assertEmpty(ReadCommand command)
{
try (ReadExecutionController executionController = command.executionController();
PartitionIterator iterator = command.executeInternal(executionController))
{
if (iterator.hasNext())
{
try (RowIterator partition = iterator.next())
{
throw new AssertionError("Expected no results for query " + command.toCQLString() + " but got key " + command.metadata().partitionKeyType.getString(partition.partitionKey().getKey()));
}
}
}
}
public static List<ImmutableBTreePartition> getAllUnfiltered(ReadCommand command)
{
List<ImmutableBTreePartition> results = new ArrayList<>();
try (ReadExecutionController executionController = command.executionController();
UnfilteredPartitionIterator iterator = command.executeLocally(executionController))
{
while (iterator.hasNext())
{
try (UnfilteredRowIterator partition = iterator.next())
{
results.add(ImmutableBTreePartition.create(partition));
}
}
}
return results;
}
public static List<FilteredPartition> getAll(ReadCommand command)
{
List<FilteredPartition> results = new ArrayList<>();
try (ReadExecutionController executionController = command.executionController();
PartitionIterator iterator = command.executeInternal(executionController))
{
while (iterator.hasNext())
{
try (RowIterator partition = iterator.next())
{
results.add(FilteredPartition.create(partition));
}
}
}
return results;
}
public static Row getOnlyRowUnfiltered(ReadCommand cmd)
{
try (ReadExecutionController executionController = cmd.executionController();
UnfilteredPartitionIterator iterator = cmd.executeLocally(executionController))
{
assert iterator.hasNext() : "Expecting one row in one partition but got nothing";
try (UnfilteredRowIterator partition = iterator.next())
{
assert !iterator.hasNext() : "Expecting a single partition but got more";
assert partition.hasNext() : "Expecting one row in one partition but got an empty partition";
Row row = ((Row)partition.next());
assert !partition.hasNext() : "Expecting a single row but got more";
return row;
}
}
}
public static Row getOnlyRow(ReadCommand cmd)
{
try (ReadExecutionController executionController = cmd.executionController();
PartitionIterator iterator = cmd.executeInternal(executionController))
{
assert iterator.hasNext() : "Expecting one row in one partition but got nothing";
try (RowIterator partition = iterator.next())
{
assert !iterator.hasNext() : "Expecting a single partition but got more";
assert partition.hasNext() : "Expecting one row in one partition but got an empty partition";
Row row = partition.next();
assert !partition.hasNext() : "Expecting a single row but got more";
return row;
}
}
}
public static ImmutableBTreePartition getOnlyPartitionUnfiltered(ReadCommand cmd)
{
try (ReadExecutionController executionController = cmd.executionController();
UnfilteredPartitionIterator iterator = cmd.executeLocally(executionController))
{
assert iterator.hasNext() : "Expecting a single partition but got nothing";
try (UnfilteredRowIterator partition = iterator.next())
{
assert !iterator.hasNext() : "Expecting a single partition but got more";
return ImmutableBTreePartition.create(partition);
}
}
}
public static FilteredPartition getOnlyPartition(ReadCommand cmd)
{
try (ReadExecutionController executionController = cmd.executionController();
PartitionIterator iterator = cmd.executeInternal(executionController))
{
assert iterator.hasNext() : "Expecting a single partition but got nothing";
try (RowIterator partition = iterator.next())
{
assert !iterator.hasNext() : "Expecting a single partition but got more";
return FilteredPartition.create(partition);
}
}
}
public static UnfilteredRowIterator apply(Mutation mutation)
{
mutation.apply();
assert mutation.getPartitionUpdates().size() == 1;
return mutation.getPartitionUpdates().iterator().next().unfilteredIterator();
}
public static Cell cell(ColumnFamilyStore cfs, Row row, String columnName)
{
ColumnMetadata def = cfs.metadata().getColumn(ByteBufferUtil.bytes(columnName));
assert def != null;
return row.getCell(def);
}
public static Row row(Partition partition, Object... clustering)
{
return partition.getRow(partition.metadata().comparator.make(clustering));
}
public static void assertCellValue(Object value, ColumnFamilyStore cfs, Row row, String columnName)
{
Cell cell = cell(cfs, row, columnName);
assert cell != null : "Row " + row.toString(cfs.metadata()) + " has no cell for " + columnName;
assertEquals(value, cell.column().type.compose(cell.value()));
}
public static void consume(UnfilteredRowIterator iter)
{
try (UnfilteredRowIterator iterator = iter)
{
while (iter.hasNext())
iter.next();
}
}
public static int size(PartitionIterator iter)
{
int size = 0;
while (iter.hasNext())
{
++size;
iter.next().close();
}
return size;
}
public static boolean equal(UnfilteredRowIterator a, UnfilteredRowIterator b)
{
return Objects.equals(a.columns(), b.columns())
&& Objects.equals(a.stats(), b.stats())
&& sameContent(a, b);
}
// Test equality of the iterators, but without caring too much about the "metadata" of said iterator. This is often
// what we want in tests. In particular, the columns() reported by the iterators will sometimes differ because they
// are a superset of what the iterator actually contains, and depending on the method used to get each iterator
// tested, one may include a defined column the other don't while there is not actual content for that column.
public static boolean sameContent(UnfilteredRowIterator a, UnfilteredRowIterator b)
{
return Objects.equals(a.metadata(), b.metadata())
&& Objects.equals(a.isReverseOrder(), b.isReverseOrder())
&& Objects.equals(a.partitionKey(), b.partitionKey())
&& Objects.equals(a.partitionLevelDeletion(), b.partitionLevelDeletion())
&& Objects.equals(a.staticRow(), b.staticRow())
&& Iterators.elementsEqual(a, b);
}
public static boolean sameContent(Mutation a, Mutation b)
{
if (!a.key().equals(b.key()) || !a.getTableIds().equals(b.getTableIds()))
return false;
for (PartitionUpdate update : a.getPartitionUpdates())
{
if (!sameContent(update.unfilteredIterator(), b.getPartitionUpdate(update.metadata()).unfilteredIterator()))
return false;
}
return true;
}
// moved & refactored from KeyspaceTest in < 3.0
public static void assertColumns(Row row, String... expectedColumnNames)
{
Iterator<Cell> cells = row == null ? Collections.emptyIterator() : row.cells().iterator();
String[] actual = Iterators.toArray(Iterators.transform(cells, new Function<Cell, String>()
{
public String apply(Cell cell)
{
return cell.column().name.toString();
}
}), String.class);
assert Arrays.equals(actual, expectedColumnNames)
: String.format("Columns [%s])] is not expected [%s]",
((row == null) ? "" : row.columns().toString()),
StringUtils.join(expectedColumnNames, ","));
}
public static void assertColumn(TableMetadata cfm, Row row, String name, String value, long timestamp)
{
Cell cell = row.getCell(cfm.getColumn(new ColumnIdentifier(name, true)));
assertColumn(cell, value, timestamp);
}
public static void assertColumn(Cell cell, String value, long timestamp)
{
assertNotNull(cell);
assertEquals(0, ByteBufferUtil.compareUnsigned(cell.value(), ByteBufferUtil.bytes(value)));
assertEquals(timestamp, cell.timestamp());
}
public static void assertClustering(TableMetadata cfm, Row row, Object... clusteringValue)
{
assertEquals(row.clustering().size(), clusteringValue.length);
assertEquals(0, cfm.comparator.compare(row.clustering(), cfm.comparator.make(clusteringValue)));
}
public static PartitionerSwitcher switchPartitioner(IPartitioner p)
{
return new PartitionerSwitcher(p);
}
public static class PartitionerSwitcher implements AutoCloseable
{
final IPartitioner oldP;
final IPartitioner newP;
public PartitionerSwitcher(IPartitioner partitioner)
{
newP = partitioner;
oldP = StorageService.instance.setPartitionerUnsafe(partitioner);
}
public void close()
{
IPartitioner p = StorageService.instance.setPartitionerUnsafe(oldP);
assert p == newP;
}
}
public static void spinAssertEquals(Object expected, Supplier<Object> s, int timeoutInSeconds)
{
long start = System.currentTimeMillis();
while (System.currentTimeMillis() < start + (1000 * timeoutInSeconds))
{
if (s.get().equals(expected))
break;
Thread.yield();
}
assertEquals(expected, s.get());
}
public static void joinThread(Thread thread) throws InterruptedException
{
thread.join(10000);
}
public static AssertionError runCatchingAssertionError(Runnable test)
{
try
{
test.run();
return null;
}
catch (AssertionError e)
{
return e;
}
}
/**
* Wrapper function used to run a test that can sometimes flake for uncontrollable reasons.
*
* If the given test fails on the first run, it is executed the given number of times again, expecting all secondary
* runs to succeed. If they do, the failure is understood as a flake and the test is treated as passing.
*
* Do not use this if the test is deterministic and its success is not influenced by external factors (such as time,
* selection of random seed, network failures, etc.). If the test can be made independent of such factors, it is
* probably preferable to do so rather than use this method.
*
* @param test The test to run.
* @param rerunsOnFailure How many times to re-run it if it fails. All reruns must pass.
* @param message Message to send to System.err on initial failure.
*/
public static void flakyTest(Runnable test, int rerunsOnFailure, String message)
{
AssertionError e = runCatchingAssertionError(test);
if (e == null)
return; // success
logger.info("Test failed. {}", message, e);
logger.info("Re-running {} times to verify it isn't failing more often than it should.", rerunsOnFailure);
int rerunsFailed = 0;
for (int i = 0; i < rerunsOnFailure; ++i)
{
AssertionError t = runCatchingAssertionError(test);
if (t != null)
{
++rerunsFailed;
e.addSuppressed(t);
logger.debug("Test failed again, total num failures: {}", rerunsFailed, t);
}
}
if (rerunsFailed > 0)
{
logger.error("Test failed in {} of the {} reruns.", rerunsFailed, rerunsOnFailure);
throw e;
}
logger.info("All reruns succeeded. Failure treated as flake.");
}
// for use with Optional in tests, can be used as an argument to orElseThrow
public static Supplier<AssertionError> throwAssert(final String message)
{
return () -> new AssertionError(message);
}
public static class UnfilteredSource extends AbstractUnfilteredRowIterator implements UnfilteredRowIterator
{
Iterator<Unfiltered> content;
public UnfilteredSource(TableMetadata metadata, DecoratedKey partitionKey, Row staticRow, Iterator<Unfiltered> content)
{
super(metadata,
partitionKey,
DeletionTime.LIVE,
metadata.regularAndStaticColumns(),
staticRow != null ? staticRow : Rows.EMPTY_STATIC_ROW,
false,
EncodingStats.NO_STATS);
this.content = content;
}
@Override
protected Unfiltered computeNext()
{
return content.hasNext() ? content.next() : endOfData();
}
}
public static UnfilteredPartitionIterator executeLocally(PartitionRangeReadCommand command,
ColumnFamilyStore cfs,
ReadExecutionController controller)
{
return command.queryStorage(cfs, controller);
}
public static Closeable markDirectoriesUnwriteable(ColumnFamilyStore cfs)
{
try
{
for ( ; ; )
{
DataDirectory dir = cfs.getDirectories().getWriteableLocation(1);
BlacklistedDirectories.maybeMarkUnwritable(cfs.getDirectories().getLocationForDisk(dir));
}
}
catch (IOError e)
{
// Expected -- marked all directories as unwritable
}
return () -> BlacklistedDirectories.clearUnwritableUnsafe();
}
public static PagingState makeSomePagingState(ProtocolVersion protocolVersion)
{
TableMetadata metadata =
TableMetadata.builder("ks", "tbl")
.addPartitionKeyColumn("k", AsciiType.instance)
.addClusteringColumn("c1", AsciiType.instance)
.addClusteringColumn("c2", Int32Type.instance)
.addRegularColumn("myCol", AsciiType.instance)
.build();
ByteBuffer pk = ByteBufferUtil.bytes("someKey");
ColumnMetadata def = metadata.getColumn(new ColumnIdentifier("myCol", false));
Clustering c = Clustering.make(ByteBufferUtil.bytes("c1"), ByteBufferUtil.bytes(42));
Row row = BTreeRow.singleCellRow(c, BufferCell.live(def, 0, ByteBufferUtil.EMPTY_BYTE_BUFFER));
PagingState.RowMark mark = PagingState.RowMark.create(metadata, row, protocolVersion);
return new PagingState(pk, mark, 10, 0);
}
}
| |
package com.flipkart.perf.server.resource;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.flipkart.perf.common.util.ClassHelper;
import com.flipkart.perf.common.util.FileHelper;
import com.flipkart.perf.function.FunctionParameter;
import com.flipkart.perf.inmemorydata.SharedDataInfo;
import org.codehaus.jackson.map.ObjectMapper;
import org.reflections.Reflections;
import org.reflections.Store;
import com.flipkart.perf.server.cache.LibCache;
import com.flipkart.perf.server.config.ResourceStorageFSConfig;
import com.flipkart.perf.server.domain.FunctionInfo;
import com.flipkart.perf.server.util.ObjectMapperUtil;
import com.google.common.collect.Multimap;
import com.sun.jersey.core.header.FormDataContentDisposition;
import com.sun.jersey.multipart.FormDataParam;
import com.yammer.metrics.annotation.Timed;
import com.flipkart.perf.server.util.ResponseBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Resource that deploys libs, file resources on the server
*/
@Path("/resourceTypes")
public class DeployResourcesResource {
private ResourceStorageFSConfig resourceStorageFSConfig;
private LibCache libCache;
private static ObjectMapper objectMapper = ObjectMapperUtil.instance();
private static Logger logger = LoggerFactory.getLogger(DeployResourcesResource.class);
public DeployResourcesResource(ResourceStorageFSConfig resourceStorageFSConfig) throws MalformedURLException {
this.resourceStorageFSConfig = resourceStorageFSConfig;
this.libCache = LibCache.instance();
deployUnDeployedUDFLibs();
}
// UDF Libs which are copied as part of deployment will always be un deployed before 1st loader-server start
private void deployUnDeployedUDFLibs() {
for(File unDeployedUDFLib : new File(resourceStorageFSConfig.getUdfUnDeployedLibsPath()).listFiles()) {
try {
deployUDF(new FileInputStream(unDeployedUDFLib), unDeployedUDFLib.getName());
unDeployedUDFLib.delete();
} catch (Exception e) {
logger.error("Exception in deploying undeployed UDFs", e);
}
}
}
static class CustomClassLoader extends URLClassLoader {
public CustomClassLoader(URL[] urls) {
super(urls);
}
@Override
public void addURL(URL url) {
super.addURL(url);
}
}
/**
Following call simulates html form post call, where somebody uploads a file to server
curl
-X POST
-H "Content-Type: multipart/form-data"
-F "lib=@Path-To-Jar-File"
http://localhost:8888/loader-server/resourceTypes/udfLibs
*
*
*
* @param libInputStream jar input stream
* @param libFileDetails Lib file meta details
* @throws java.io.IOException
*/
@Path("/udfLibs")
@POST
@Timed
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
synchronized public Map<String, FunctionInfo> deployUDF(
@FormDataParam("lib") InputStream libInputStream,
@FormDataParam("lib") FormDataContentDisposition libFileDetails) throws IOException, ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException {
return deployUDF(libInputStream, libFileDetails.getFileName());
}
private Map<String, FunctionInfo> deployUDF(InputStream libInputStream, String udfFileName) throws IOException, ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException {
String userLibPath = resourceStorageFSConfig.getUdfLibsPath()
+ File.separator
+ udfFileName;
FileHelper.persistStream(libInputStream, userLibPath);
Map<String, FunctionInfo> discoveredUserFunctions = discoverUserFunctions(userLibPath);
persistDiscoveredUserFunctions(udfFileName, discoveredUserFunctions);
this.libCache.refreshClassLibMap();
return discoveredUserFunctions;
}
/** * Persist user class and jar mapping
* Persist Class information which could be later presented vie http get end point or UI
* @param libFileName
* @param discoveredUserFunctions
* @throws IOException
*/
private void persistDiscoveredUserFunctions(String libFileName, Map<String, FunctionInfo> discoveredUserFunctions) throws IOException {
for(String userFunction : discoveredUserFunctions.keySet()) {
mergeMappingFile(resourceStorageFSConfig.getUdfLibsPath()
+ File.separator
+ libFileName,
userFunction);
FunctionInfo functionInfo = discoveredUserFunctions.get(userFunction);
String functionInfoFile = resourceStorageFSConfig.getUserClassInfoPath() + File.separator + userFunction + ".info.json";
FileHelper.createFile(functionInfoFile);
objectMapper.writeValue(new File(functionInfoFile), functionInfo);
}
}
enum MapKey {
LIB,CLASS;
}
/**
*
* @param mapKey takes LIB or CLASS as value. Default value is LIB
* @return returns either Map(lib -> list of class) or Map(class -> Lib) depending upon mapKey
* @throws java.io.IOException
*/
@Path("/udfLibs")
@GET
@Produces(MediaType.APPLICATION_JSON)
public Map getLibs(@QueryParam("mapKey") @DefaultValue("LIB") String mapKey) throws IOException {
switch(MapKey.valueOf(mapKey)) {
case LIB:
return libCache.getLibsMapWithLibAsKey();
case CLASS:
return libCache.getLibsMapWithClassAsKey();
default:
throw new WebApplicationException(400);
}
}
/**
Following call simulates html form post call, where somebody uploads a file to server
curl
-X POST
-H "Content-Type: multipart/form-data"
-F "lib=@Path-To-Zip-File-Containing-Platform-Lib-File"
http://localhost:8888/loader-server/resourceTypes/platformLibs
* @param libInputStream zip containing platform jars
*/
@Path("/platformLibs")
@POST
@Timed
@Consumes(MediaType.MULTIPART_FORM_DATA)
synchronized public void deployPlatformLib(
@FormDataParam("lib") InputStream libInputStream){
String platformZipPath = resourceStorageFSConfig.getPlatformLibPath()+ File.separator + "platform.zip";
String tmpPlatformZipPath = resourceStorageFSConfig.getPlatformLibPath()+ File.separator + "platform.zip.tmp";
try {
FileHelper.move(platformZipPath, tmpPlatformZipPath);
FileHelper.persistStream(libInputStream, platformZipPath);
FileHelper.unzip(new FileInputStream(platformZipPath), resourceStorageFSConfig.getPlatformLibPath());
FileHelper.remove(tmpPlatformZipPath);
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
FileHelper.move(tmpPlatformZipPath, platformZipPath);
}
finally {
this.libCache.refreshPlatformLibPath();
}
}
@Path("/platformLibs")
@GET
@Timed
@Produces(MediaType.APPLICATION_JSON)
synchronized public List getPlatformLib(){
return Arrays.asList(new File(resourceStorageFSConfig.getPlatformLibPath()).list());
}
/**
Following call simulates html form post call, where somebody uploads a file to server
curl
-X POST
-H "Content-Type: multipart/form-data"
-F "lib=@Path-To-Zip-File-Containing-Platform-Lib-File"
http://localhost:8888/loader-server/resourceTypes/inputFiles
* @param inputStream zip containing platform jars
*/
@Path("/inputFiles")
@POST
@Timed
@Consumes(MediaType.MULTIPART_FORM_DATA)
@Produces(MediaType.APPLICATION_JSON)
synchronized public Response deployInputFiles(
@FormDataParam("file") InputStream inputStream,
@FormDataParam("resourceName") String resourceName) {
File resourceFile = new File(resourceStorageFSConfig.getInputFilePath(resourceName));
if(resourceFile.exists()) {
throw new WebApplicationException(ResponseBuilder.resourceAlreadyExists("inputFile", resourceName));
}
if(resourceName == null || resourceName.trim().equals(""))
throw new WebApplicationException(ResponseBuilder.badRequest("resourceName can not be empty"));
FileHelper.createFilePath(resourceFile.getAbsolutePath());
try {
FileHelper.persistStream(inputStream, resourceFile.getAbsolutePath());
objectMapper.writerWithDefaultPrettyPrinter().writeValue(new File(resourceStorageFSConfig.getInputFileAgentDeploymentPath(resourceName)), new HashMap());
return ResponseBuilder.resourceCreated("inputFile", resourceName);
} catch (IOException e) {
e.printStackTrace();
FileHelper.remove(resourceStorageFSConfig.getInputFileFolderPath(resourceName));
throw new WebApplicationException(ResponseBuilder.internalServerError(e));
}
}
@Path("/inputFiles")
@GET
@Timed
@Produces(MediaType.APPLICATION_JSON)
synchronized public List<String> getInputFiles() throws IOException {
File inputFilesPath = new File(resourceStorageFSConfig.getInputFilesPath());
if(inputFilesPath.exists()) {
return Arrays.asList(inputFilesPath.list());
}
return new ArrayList<String>();
}
@Path("/inputFiles/{resourceName}")
@GET
@Timed
@Produces(MediaType.TEXT_PLAIN)
public InputStream getInputFile(@PathParam("resourceName") String resourceName) throws IOException {
File resourceFile = new File(resourceStorageFSConfig.getInputFilePath(resourceName));
if(!resourceFile.exists()) {
throw new WebApplicationException(ResponseBuilder.resourceNotFound("inputFile", resourceName));
}
return new FileInputStream(resourceFile);
}
@Path("/inputFiles/{resourceName}")
@PUT
@Timed
@Consumes(MediaType.MULTIPART_FORM_DATA)
synchronized public void updateInputFile(
@FormDataParam("file") InputStream inputStream, @PathParam("resourceName") String resourceName) throws IOException {
File resourceFile = new File(resourceStorageFSConfig.getInputFilePath(resourceName));
if(!resourceFile.exists()) {
throw new WebApplicationException(ResponseBuilder.resourceNotFound("inputFile", resourceName));
}
resourceFile.delete();
FileHelper.persistStream(inputStream, resourceFile.getAbsolutePath());
}
@Path("/inputFiles/{resourceName}")
@DELETE
@Timed
@Produces(MediaType.TEXT_PLAIN)
public void deleteInputFile(@PathParam("resourceName") String resourceName) throws IOException {
File inputFileFolderPath = new File(resourceStorageFSConfig.getInputFileFolderPath(resourceName));
if(!inputFileFolderPath.exists()) {
throw new WebApplicationException(ResponseBuilder.resourceNotFound("inputFile", resourceName));
}
FileHelper.remove(inputFileFolderPath.getAbsolutePath());
}
/**
* Discover Performance Functions from the uploader userLibJar
* @param userLibJar
* @return
* @throws IOException
* @throws ClassNotFoundException
* @throws NoSuchMethodException
* @throws InvocationTargetException
* @throws InstantiationException
* @throws IllegalAccessException
*/
public Map<String,FunctionInfo> discoverUserFunctions(String userLibJar) throws IOException, ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException {
Map<String, FunctionInfo> discoveredUserFunctions = new HashMap<String, FunctionInfo>();
URLClassLoader loader = (URLClassLoader)ClassLoader.getSystemClassLoader();
CustomClassLoader customClassLoader = new CustomClassLoader(loader.getURLs());
File platformLibPath = new File(this.resourceStorageFSConfig.getPlatformLibPath());
if(platformLibPath.exists()) {
File[] platformLibs = platformLibPath.listFiles();
for(File platformLib : platformLibs) {
customClassLoader.addURL(new URL("file://" + platformLib.getAbsolutePath()));
}
}
customClassLoader.addURL(new URL("file://" + userLibJar));
System.out.println("User Lib Path = " + userLibJar);
Reflections reflections = new Reflections("");
reflections.scan(new URL("file://"+userLibJar));
Store reflectionStore = reflections.getStore();
Map<String, Multimap<String, String>> storeMap = reflectionStore.getStoreMap();
Multimap<String,String> subTypesScanner = storeMap.get("SubTypesScanner");
if(subTypesScanner != null) {
Collection<String> performanceFunctions = subTypesScanner.get("com.flipkart.perf.function.PerformanceFunction");
for(String performanceFunction : performanceFunctions) {
if(!discoveredUserFunctions.containsKey(performanceFunction)) {
FunctionInfo functionInfo = new FunctionInfo().
setFunction(performanceFunction);
// Discover Usage description for the UDF
Object object = ClassHelper.getClassInstance(performanceFunction, new Class[]{}, new Object[]{}, customClassLoader);
Method method = ClassHelper.getMethod(performanceFunction , "description", new Class[]{}, customClassLoader);
functionInfo.setDescription((List<String>) method.invoke(object, new Object[]{}));
// Discover Input parameters for the UDF
method = ClassHelper.getMethod(performanceFunction , "inputParameters", new Class[]{}, customClassLoader);
functionInfo.setInputParameters((LinkedHashMap<String, FunctionParameter>) method.invoke(object, new Object[]{}));
// Discover Output parameters for the UDF
method = ClassHelper.getMethod(performanceFunction , "outputParameters", new Class[]{}, customClassLoader);
functionInfo.setOutputParameters((LinkedHashMap<String, FunctionParameter>) method.invoke(object, new Object[]{}));
// Discover Custom timers for the UDF
method = ClassHelper.getMethod(performanceFunction , "customTimers", new Class[]{}, customClassLoader);
functionInfo.setCustomTimers((List<String>) method.invoke(object, new Object[]{}));
// Discover Custom Counters for the UDF
method = ClassHelper.getMethod(performanceFunction , "customCounters", new Class[]{}, customClassLoader);
functionInfo.setCustomCounters((List<String>) method.invoke(object, new Object[]{}));
// Discover Custom Histograms for the UDF
method = ClassHelper.getMethod(performanceFunction , "customHistograms", new Class[]{}, customClassLoader);
functionInfo.setCustomHistograms((List<String>) method.invoke(object, new Object[]{}));
// Discover Input parameters for the UDF
method = ClassHelper.getMethod(performanceFunction , "sharedData", new Class[]{}, customClassLoader);
functionInfo.setSharedData((LinkedHashMap<String, SharedDataInfo>) method.invoke(object, new Object[]{}));
discoveredUserFunctions.put(performanceFunction, functionInfo);
}
}
}
return discoveredUserFunctions;
}
/**
* Update Mapping file which has map of User Function Class and Jar containing that class
* @param libPath
* @param userFunctionClass
* @throws IOException
*/
synchronized private void mergeMappingFile(String libPath, String userFunctionClass) throws IOException {
String mappingFile = resourceStorageFSConfig.getUserClassLibMappingFile();
Properties prop = new Properties();
FileHelper.createFile(mappingFile);
InputStream mappingFileIS = new FileInputStream(mappingFile);
try {
FileHelper.createFile(mappingFile);
prop.load(mappingFileIS);
prop.put(userFunctionClass, libPath);
prop.store(new FileOutputStream(mappingFile), "Class and Library Mapping");
}
catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
finally {
mappingFileIS.close();
}
}
}
| |
/*
* Copyright 2012 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.deserializer;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import org.apache.synapse.mediators.base.SequenceMediator;
import org.apache.synapse.rest.API;
import org.apache.synapse.rest.Handler;
import org.apache.synapse.rest.Resource;
import org.apache.synapse.rest.dispatch.DispatcherHelper;
import org.apache.synapse.rest.dispatch.URITemplateHelper;
import org.apache.synapse.rest.dispatch.URLMappingHelper;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.emf.common.util.BasicEList;
import org.eclipse.emf.common.util.EList;
import org.eclipse.gmf.runtime.diagram.ui.commands.ICommandProxy;
import org.eclipse.gmf.runtime.diagram.ui.commands.SetBoundsCommand;
import org.eclipse.gmf.runtime.diagram.ui.editparts.GraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.emf.core.util.EObjectAdapter;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.APIHandler;
import org.wso2.developerstudio.eclipse.gmf.esb.APIHandlerProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.APIResource;
import org.wso2.developerstudio.eclipse.gmf.esb.ApiResourceUrlStyle;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.LogProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.MediatorFlow;
import org.wso2.developerstudio.eclipse.gmf.esb.RegistryKeyProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.SequenceType;
import org.wso2.developerstudio.eclipse.gmf.esb.SynapseAPI;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.custom.DummyHandler;
import static org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage.Literals.*;
/**
* Synapse API deserializer
*/
public class APIDeserializer extends AbstractEsbNodeDeserializer<API, SynapseAPI> {
@Override
public SynapseAPI createNode(IGraphicalEditPart part,API api) {
SynapseAPI synapseAPI = (SynapseAPI) DeserializerUtils.createNode(part, EsbElementTypes.SynapseAPI_3668);
setElementToEdit(synapseAPI);
refreshEditPartMap();
executeSetValueCommand(SYNAPSE_API__API_NAME, api.getAPIName());
executeSetValueCommand(SYNAPSE_API__CONTEXT, api.getContext());
if (api.getHost() != null) {
executeSetValueCommand(SYNAPSE_API__HOST_NAME, api.getHost());
}
if (api.getPort() > 0) {
executeSetValueCommand(SYNAPSE_API__PORT, api.getPort());
}
GraphicalEditPart apiCompartment = (GraphicalEditPart) getEditpart(synapseAPI).getChildren().get(0);
Resource[] resources = api.getResources();
int locationY = 0;
for (int i = 0; i < resources.length; i++) {
APIResource resource = (APIResource) DeserializerUtils.createNode(apiCompartment, EsbElementTypes.APIResource_3669);
refreshEditPartMap();
setElementToEdit(resource);
List<String> methodList = Arrays.asList(resources[i].getMethods());
executeSetValueCommand(API_RESOURCE__ALLOW_GET, methodList.contains("GET"));
executeSetValueCommand(API_RESOURCE__ALLOW_POST, methodList.contains("POST"));
executeSetValueCommand(API_RESOURCE__ALLOW_OPTIONS, methodList.contains("OPTIONS"));
executeSetValueCommand(API_RESOURCE__ALLOW_DELETE, methodList.contains("DELETE"));
executeSetValueCommand(API_RESOURCE__ALLOW_PUT, methodList.contains("PUT"));
DispatcherHelper dispatcherHelper = resources[i].getDispatcherHelper();
if(dispatcherHelper instanceof URITemplateHelper){
URITemplateHelper helper = (URITemplateHelper) dispatcherHelper;
executeSetValueCommand(API_RESOURCE__URL_STYLE, ApiResourceUrlStyle.URI_TEMPLATE);
executeSetValueCommand(API_RESOURCE__URI_TEMPLATE, helper.getString());
} else if(dispatcherHelper instanceof URLMappingHelper){
URLMappingHelper helper = (URLMappingHelper) dispatcherHelper;
executeSetValueCommand(API_RESOURCE__URL_STYLE,ApiResourceUrlStyle.URL_MAPPING);
executeSetValueCommand(API_RESOURCE__URL_MAPPING, helper.getString());
} else{
executeSetValueCommand(API_RESOURCE__URL_STYLE,ApiResourceUrlStyle.NONE);
}
addRootInputConnector(resource.getInputConnector());
MediatorFlow mediatorFlow = resource.getContainer().getSequenceAndEndpointContainer().getMediatorFlow();
GraphicalEditPart compartment = (GraphicalEditPart)((getEditpart(mediatorFlow)).getChildren().get(0));
SequenceMediator inSequence = resources[i].getInSequence();
if(inSequence!=null){
setRootCompartment(compartment);
deserializeSequence(compartment, inSequence, resource.getOutputConnector());
setRootCompartment(null);
} else{
String inSequenceName = resources[i].getInSequenceKey();
if(inSequenceName!=null){
if(inSequenceName.startsWith("/") || inSequenceName.startsWith("conf:") || inSequenceName.startsWith("gov:")){
resource.setInSequenceType(SequenceType.REGISTRY_REFERENCE);
RegistryKeyProperty keyProperty = EsbFactory.eINSTANCE.createRegistryKeyProperty();
keyProperty.setKeyValue(inSequenceName);
executeSetValueCommand(API_RESOURCE__IN_SEQUENCE_KEY, keyProperty);
} else{
executeSetValueCommand(API_RESOURCE__IN_SEQUENCE_TYPE, SequenceType.NAMED_REFERENCE);
executeSetValueCommand(API_RESOURCE__IN_SEQUENCE_NAME, inSequenceName);
}
}
}
SequenceMediator outSequence = resources[i].getOutSequence();
if(outSequence!=null){
setRootCompartment(compartment);
deserializeSequence(compartment, outSequence, resource.getInputConnector());
setRootCompartment(null);
} else{
String outSequenceName = resources[i].getOutSequenceKey();
if(outSequenceName!=null){
if(outSequenceName.startsWith("/") || outSequenceName.startsWith("conf:") || outSequenceName.startsWith("gov:")){
resource.setOutSequenceType(SequenceType.REGISTRY_REFERENCE);
RegistryKeyProperty keyProperty = EsbFactory.eINSTANCE.createRegistryKeyProperty();
keyProperty.setKeyValue(outSequenceName);
executeSetValueCommand(API_RESOURCE__OUT_SEQUENCE_KEY, keyProperty);
} else{
executeSetValueCommand(API_RESOURCE__OUT_SEQUENCE_TYPE, SequenceType.NAMED_REFERENCE);
executeSetValueCommand(API_RESOURCE__OUT_SEQUENCE_NAME, outSequenceName);
}
}
}
SequenceMediator faultSequence = resources[i].getFaultSequence();
if(faultSequence!=null){
MediatorFlow faultMediatorFlow = resource.getContainer().getFaultContainer().getMediatorFlow();
GraphicalEditPart faultCompartment = (GraphicalEditPart)((getEditpart(faultMediatorFlow)).getChildren().get(0));
setRootCompartment(faultCompartment);
deserializeSequence(faultCompartment, faultSequence, resource.getFaultInputConnector());
setRootCompartment(null);
} else{
String faultSequenceName = resources[i].getFaultSequenceKey();
if(faultSequenceName!=null){
if(faultSequenceName.startsWith("/") || faultSequenceName.startsWith("conf:") || faultSequenceName.startsWith("gov:")){
resource.setFaultSequenceType(SequenceType.REGISTRY_REFERENCE);
RegistryKeyProperty keyProperty = EsbFactory.eINSTANCE.createRegistryKeyProperty();
keyProperty.setKeyValue(faultSequenceName);
executeSetValueCommand(API_RESOURCE__FAULT_SEQUENCE_KEY, keyProperty);
} else{
executeSetValueCommand(API_RESOURCE__FAULT_SEQUENCE_TYPE, SequenceType.NAMED_REFERENCE);
executeSetValueCommand(API_RESOURCE__FAULT_SEQUENCE_NAME, faultSequenceName);
}
}
}
for(Handler handler : api.getHandlers()) {
APIHandler apiHandler = EsbFactory.eINSTANCE.createAPIHandler();
if(handler instanceof DummyHandler) {
DummyHandler dummyHandler = (DummyHandler) handler;
apiHandler.setClassName(dummyHandler.getClassName());
} else {
apiHandler.setClassName(handler.getClass().getName());
}
Iterator itr = handler.getProperties().keySet().iterator();
while (itr.hasNext()) {
APIHandlerProperty handlerProperty = EsbFactory.eINSTANCE.createAPIHandlerProperty();
String propertyName = (String) itr.next();
handlerProperty.setName(propertyName);
handlerProperty.setValue((String)handler.getProperties().get(propertyName));
apiHandler.getProperties().add(handlerProperty);
}
executeAddValueCommand(synapseAPI.getHandlers(),apiHandler);
}
addPairMediatorFlow(resource.getOutputConnector(),resource.getInputConnector());
IGraphicalEditPart graphicalNode = (IGraphicalEditPart) AbstractEsbNodeDeserializer.getEditpart(resource);
if(graphicalNode!=null){
Rectangle rect = new Rectangle(new Point(), graphicalNode.getFigure().getPreferredSize()).getCopy();
rect.x = 0;
rect.y = locationY;
SetBoundsCommand sbc = new SetBoundsCommand(graphicalNode.getEditingDomain(),
"change location", new EObjectAdapter((View) graphicalNode.getModel()), rect);
graphicalNode.getDiagramEditDomain().getDiagramCommandStack()
.execute(new ICommandProxy(sbc));
locationY += rect.height;
locationY += 25;
}
}
return synapseAPI;
}
}
| |
/**
* Copyright 2013, Big Switch Networks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package net.floodlightcontroller.flowcache;
import static org.easymock.EasyMock.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.ListIterator;
import net.floodlightcontroller.core.IListener.Command;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.test.MockThreadPoolService;
import net.floodlightcontroller.counter.ICounterStoreService;
import net.floodlightcontroller.counter.SimpleCounter;
import net.floodlightcontroller.counter.CounterValue.CounterType;
import net.floodlightcontroller.flowcache.IFlowReconcileListener;
import net.floodlightcontroller.flowcache.OFMatchReconcile;
import net.floodlightcontroller.flowcache.PriorityPendingQueue.EventPriority;
import net.floodlightcontroller.test.FloodlightTestCase;
import net.floodlightcontroller.threadpool.IThreadPoolService;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Before;
import org.junit.Test;
import org.openflow.protocol.OFStatisticsRequest;
import org.openflow.protocol.OFType;
public class FlowReconcileMgrTest extends FloodlightTestCase {
protected FlowReconcileManager flowReconcileMgr;
protected MockThreadPoolService threadPool;
protected ICounterStoreService counterStore;
protected FloodlightModuleContext fmc;
OFStatisticsRequest ofStatsRequest;
protected int NUM_FLOWS_PER_THREAD = 100;
protected int NUM_THREADS = 20;
@Before
public void setUp() throws Exception {
super.setUp();
fmc = new FloodlightModuleContext();
flowReconcileMgr = new FlowReconcileManager();
threadPool = new MockThreadPoolService();
counterStore = createMock(ICounterStoreService.class);
fmc.addService(ICounterStoreService.class, counterStore);
fmc.addService(IThreadPoolService.class, threadPool);
threadPool.init(fmc);
flowReconcileMgr.init(fmc);
threadPool.startUp(fmc);
flowReconcileMgr.startUp(fmc);
}
/** Verify pipeline listener registration and ordering
*
* @throws Exception
*/
@SuppressWarnings("unchecked")
@Test
public void testFlowReconcilePipeLine() throws Exception {
flowReconcileMgr.flowReconcileEnabled = true;
IFlowReconcileListener r1 =
EasyMock.createNiceMock(IFlowReconcileListener.class);
IFlowReconcileListener r2 =
EasyMock.createNiceMock(IFlowReconcileListener.class);
IFlowReconcileListener r3 =
EasyMock.createNiceMock(IFlowReconcileListener.class);
expect(r1.getName()).andReturn("r1").anyTimes();
expect(r2.getName()).andReturn("r2").anyTimes();
expect(r3.getName()).andReturn("r3").anyTimes();
// Set the listeners' order: r1 -> r2 -> r3
expect(r1.isCallbackOrderingPrereq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r1.isCallbackOrderingPostreq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r2.isCallbackOrderingPrereq((OFType)anyObject(),
eq("r1"))).andReturn(true).anyTimes();
expect(r2.isCallbackOrderingPrereq((OFType)anyObject(),
eq("r3"))).andReturn(false).anyTimes();
expect(r2.isCallbackOrderingPostreq((OFType)anyObject(),
eq("r1"))).andReturn(false).anyTimes();
expect(r2.isCallbackOrderingPostreq((OFType)anyObject(),
eq("r3"))).andReturn(true).anyTimes();
expect(r3.isCallbackOrderingPrereq((OFType)anyObject(),
eq("r1"))).andReturn(false).anyTimes();
expect(r3.isCallbackOrderingPrereq((OFType)anyObject(),
eq("r2"))).andReturn(true).anyTimes();
expect(r3.isCallbackOrderingPostreq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject())).
andThrow(new RuntimeException("This is NOT an error! " +
"We are testing exception catching."));
SimpleCounter cnt = (SimpleCounter)SimpleCounter.createCounter(
new Date(),
CounterType.LONG);
cnt.increment();
expect(counterStore.getCounter(
flowReconcileMgr.controllerPktInCounterName))
.andReturn(cnt)
.anyTimes();
replay(r1, r2, r3, counterStore);
flowReconcileMgr.clearFlowReconcileListeners();
flowReconcileMgr.addFlowReconcileListener(r1);
flowReconcileMgr.addFlowReconcileListener(r2);
flowReconcileMgr.addFlowReconcileListener(r3);
int pre_flowReconcileThreadRunCount =
flowReconcileMgr.flowReconcileThreadRunCount.get();
Date startTime = new Date();
OFMatchReconcile ofmRcIn = new OFMatchReconcile();
try {
flowReconcileMgr.reconcileFlow(ofmRcIn,EventPriority.HIGH);
flowReconcileMgr.doReconcile();
} catch (RuntimeException e) {
assertEquals(e.getMessage()
.startsWith("This is NOT an error!"), true);
}
verify(r1, r2, r3);
// verify STOP works
reset(r1, r2, r3);
// restart reconcileThread since it exited due to previous runtime
// exception.
flowReconcileMgr.startUp(fmc);
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.STOP).times(1);
expect(r2.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()));
expectLastCall().andAnswer(new IAnswer<Object>() {
public Object answer() {
fail("Unexpected call");
return Command.STOP;
}
}).anyTimes();
pre_flowReconcileThreadRunCount =
flowReconcileMgr.flowReconcileThreadRunCount.get();
startTime = new Date();
replay(r1, r2, r3);
flowReconcileMgr.reconcileFlow(ofmRcIn,EventPriority.HIGH);
while (flowReconcileMgr.flowReconcileThreadRunCount.get() <=
pre_flowReconcileThreadRunCount) {
Thread.sleep(10);
Date currTime = new Date();
assertTrue((currTime.getTime() - startTime.getTime()) < 1000);
}
verify(r1, r2, r3);
// verify CONTINUE works
reset(r1, r2, r3);
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.CONTINUE).times(1);
expect(r2.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.STOP).times(1);
expect(r3.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()));
expectLastCall().andAnswer(new IAnswer<Object>() {
public Object answer() {
fail("Unexpected call");
return Command.STOP;
}
}).anyTimes();
pre_flowReconcileThreadRunCount =
flowReconcileMgr.flowReconcileThreadRunCount.get();
startTime = new Date();
replay(r1, r2, r3);
flowReconcileMgr.reconcileFlow(ofmRcIn,EventPriority.HIGH);
while (flowReconcileMgr.flowReconcileThreadRunCount.get() <=
pre_flowReconcileThreadRunCount) {
Thread.sleep(10);
Date currTime = new Date();
assertTrue((currTime.getTime() - startTime.getTime()) < 1000);
}
verify(r1, r2, r3);
// verify CONTINUE works
reset(r1, r2, r3);
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.CONTINUE).times(1);
expect(r2.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.CONTINUE).times(1);
expect(r3.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.STOP).times(1);
pre_flowReconcileThreadRunCount =
flowReconcileMgr.flowReconcileThreadRunCount.get();
startTime = new Date();
replay(r1, r2, r3);
flowReconcileMgr.reconcileFlow(ofmRcIn,EventPriority.HIGH);
while (flowReconcileMgr.flowReconcileThreadRunCount.get() <=
pre_flowReconcileThreadRunCount) {
Thread.sleep(10);
Date currTime = new Date();
assertTrue((currTime.getTime() - startTime.getTime()) < 1000);
}
verify(r1, r2, r3);
// Verify removeFlowReconcileListener
flowReconcileMgr.removeFlowReconcileListener(r1);
reset(r1, r2, r3);
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()));
expectLastCall().andAnswer(new IAnswer<Object>() {
public Object answer() {
fail("Unexpected call to a listener that is " +
"removed from the chain.");
return Command.STOP;
}
}).anyTimes();
expect(r2.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.CONTINUE).times(1);
expect(r3.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.STOP).times(1);
pre_flowReconcileThreadRunCount =
flowReconcileMgr.flowReconcileThreadRunCount.get();
startTime = new Date();
replay(r1, r2, r3);
flowReconcileMgr.reconcileFlow(ofmRcIn,EventPriority.HIGH);
while (flowReconcileMgr.flowReconcileThreadRunCount.get() <=
pre_flowReconcileThreadRunCount) {
Thread.sleep(10);
Date currTime = new Date();
assertTrue((currTime.getTime() - startTime.getTime()) < 1000);
}
verify(r1, r2, r3);
}
@Test
public void testGetPktInRate() {
internalTestGetPktInRate(CounterType.LONG);
internalTestGetPktInRate(CounterType.DOUBLE);
}
protected void internalTestGetPktInRate(CounterType type) {
Date currentTime = new Date();
SimpleCounter newCnt = (SimpleCounter)SimpleCounter.createCounter(
currentTime, type);
newCnt.increment(currentTime, 1);
// Set the lastCounter time in the future of the current time
Date lastCounterTime = new Date(currentTime.getTime() + 1000);
flowReconcileMgr.lastPacketInCounter =
(SimpleCounter)SimpleCounter.createCounter(
lastCounterTime, type);
flowReconcileMgr.lastPacketInCounter.increment(lastCounterTime, 1);
assertEquals(FlowReconcileManager.MAX_SYSTEM_LOAD_PER_SECOND,
flowReconcileMgr.getPktInRate(newCnt, new Date()));
// Verify the rate == 0 time difference is zero.
lastCounterTime = new Date(currentTime.getTime() - 1000);
flowReconcileMgr.lastPacketInCounter.increment(lastCounterTime, 1);
assertEquals(0, flowReconcileMgr.getPktInRate(newCnt, lastCounterTime));
/** verify the computation is correct.
* new = 2000, old = 1000, Tdiff = 1 second.
* rate should be 1000/second
*/
newCnt = (SimpleCounter)SimpleCounter.createCounter(
currentTime, type);
newCnt.increment(currentTime, 2000);
lastCounterTime = new Date(currentTime.getTime() - 1000);
flowReconcileMgr.lastPacketInCounter =
(SimpleCounter)SimpleCounter.createCounter(
lastCounterTime, type);
flowReconcileMgr.lastPacketInCounter.increment(lastCounterTime, 1000);
assertEquals(1000, flowReconcileMgr.getPktInRate(newCnt, currentTime));
/** verify the computation is correct.
* new = 2,000,000, old = 1,000,000, Tdiff = 2 second.
* rate should be 1000/second
*/
newCnt = (SimpleCounter)SimpleCounter.createCounter(
currentTime, type);
newCnt.increment(currentTime, 2000000);
lastCounterTime = new Date(currentTime.getTime() - 2000);
flowReconcileMgr.lastPacketInCounter =
(SimpleCounter)SimpleCounter.createCounter(
lastCounterTime, type);
flowReconcileMgr.lastPacketInCounter.increment(lastCounterTime,
1000000);
assertEquals(500000, flowReconcileMgr.getPktInRate(newCnt,
currentTime));
}
@Test
public void testGetCurrentCapacity() throws Exception {
// Disable the reconcile thread.
flowReconcileMgr.flowReconcileEnabled = false;
int minFlows = FlowReconcileManager.MIN_FLOW_RECONCILE_PER_SECOND *
FlowReconcileManager.FLOW_RECONCILE_DELAY_MILLISEC / 1000;
/** Verify the initial state, when packetIn counter has not
* been created.
*/
expect(counterStore.getCounter(
flowReconcileMgr.controllerPktInCounterName))
.andReturn(null)
.times(1);
replay(counterStore);
assertEquals(minFlows, flowReconcileMgr.getCurrentCapacity());
verify(counterStore);
/** Verify the initial state, when lastPacketInCounter is null */
reset(counterStore);
Date currentTime = new Date();
SimpleCounter newCnt = (SimpleCounter)SimpleCounter.createCounter(
currentTime, CounterType.LONG);
expect(counterStore.getCounter(
flowReconcileMgr.controllerPktInCounterName))
.andReturn(newCnt)
.times(1);
long initPktInCount = 1000;
newCnt.increment(currentTime, initPktInCount);
replay(counterStore);
assertEquals(minFlows, flowReconcileMgr.getCurrentCapacity());
verify(counterStore);
/** Now the lastPacketInCounter has been set.
* lastCounter = 1,000 and newCounter = 3,000, t = 1 second
* packetInRate = 2,000/sec.
* capacity should be 10k - 2k = 8k
*/
reset(counterStore);
newCnt = (SimpleCounter)SimpleCounter.createCounter(
currentTime, CounterType.LONG);
currentTime = new Date(currentTime.getTime() + 200);
long nextPktInCount = 3000;
newCnt.increment(currentTime, nextPktInCount);
expect(counterStore.getCounter(
flowReconcileMgr.controllerPktInCounterName))
.andReturn(newCnt)
.times(1);
replay(counterStore);
// Wait for 1 second so that enough elapsed time to compute capacity.
Thread.sleep(1000);
int capacity = flowReconcileMgr.getCurrentCapacity();
verify(counterStore);
long expectedCap = (FlowReconcileManager.MAX_SYSTEM_LOAD_PER_SECOND -
(nextPktInCount - initPktInCount)) *
FlowReconcileManager.FLOW_RECONCILE_DELAY_MILLISEC / 1000;
assertEquals(expectedCap, capacity);
}
/** Verify the flows are sent to the reconcile pipeline in order.
*/
@SuppressWarnings("unchecked")
@Test
public void testQueueFlowsOrder() {
flowReconcileMgr.flowReconcileEnabled = false;
IFlowReconcileListener r1 =
EasyMock.createNiceMock(IFlowReconcileListener.class);
expect(r1.getName()).andReturn("r1").anyTimes();
// Set the listeners' order: r1 -> r2 -> r3
expect(r1.isCallbackOrderingPrereq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r1.isCallbackOrderingPostreq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andAnswer(new IAnswer<Command>() {
@Override
public Command answer() throws Throwable {
ArrayList<OFMatchReconcile> ofmList =
(ArrayList<OFMatchReconcile>)EasyMock.
getCurrentArguments()[0];
ListIterator<OFMatchReconcile> lit = ofmList.listIterator();
int index = 0;
while (lit.hasNext()) {
OFMatchReconcile ofm = lit.next();
assertEquals(index++, ofm.cookie);
}
return Command.STOP;
}
}).times(1);
SimpleCounter cnt = (SimpleCounter)SimpleCounter.createCounter(
new Date(),
CounterType.LONG);
cnt.increment();
expect(counterStore.getCounter(
flowReconcileMgr.controllerPktInCounterName))
.andReturn(cnt)
.anyTimes();
replay(r1, counterStore);
flowReconcileMgr.clearFlowReconcileListeners();
flowReconcileMgr.addFlowReconcileListener(r1);
OFMatchReconcile ofmRcIn = new OFMatchReconcile();
int index = 0;
for (index = 0; index < 10; index++) {
ofmRcIn.cookie = index;
flowReconcileMgr.reconcileFlow(ofmRcIn,EventPriority.HIGH);
}
flowReconcileMgr.flowReconcileEnabled = true;
flowReconcileMgr.doReconcile();
verify(r1);
}
@SuppressWarnings("unchecked")
@Test
public void testQueueFlowsByManyThreads() {
// Disable the reconcile thread so that the queue won't be emptied.
flowQueueTest(false);
// Enable the reconcile thread. The queue should be empty.
Date currentTime = new Date();
SimpleCounter newCnt = (SimpleCounter)SimpleCounter.createCounter(
currentTime, CounterType.LONG);
expect(counterStore.getCounter(
flowReconcileMgr.controllerPktInCounterName))
.andReturn(newCnt)
.anyTimes();
long initPktInCount = 10000;
newCnt.increment(currentTime, initPktInCount);
IFlowReconcileListener r1 =
EasyMock.createNiceMock(IFlowReconcileListener.class);
expect(r1.getName()).andReturn("r1").anyTimes();
// Set the listeners' order: r1 -> r2 -> r3
expect(r1.isCallbackOrderingPrereq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r1.isCallbackOrderingPostreq((OFType)anyObject(),
(String)anyObject())).andReturn(false).anyTimes();
expect(r1.reconcileFlows((ArrayList<OFMatchReconcile>)anyObject()))
.andReturn(Command.CONTINUE).anyTimes();
flowReconcileMgr.clearFlowReconcileListeners();
replay(r1, counterStore);
flowQueueTest(true);
verify(r1, counterStore);
}
protected void flowQueueTest(boolean enableReconcileThread) {
flowReconcileMgr.flowReconcileEnabled = enableReconcileThread;
// Simulate flow
for (int i = 0; i < NUM_THREADS; i++) {
Runnable worker = this.new FlowReconcileWorker();
Thread t = new Thread(worker);
t.start();
}
Date startTime = new Date();
int totalFlows = NUM_THREADS * NUM_FLOWS_PER_THREAD;
if (enableReconcileThread) {
totalFlows = 0;
}
while (flowReconcileMgr.flowQueue.size() != totalFlows) {
Date currTime = new Date();
assertTrue((currTime.getTime() - startTime.getTime()) < 1000);
}
// Make sure all flows are in the queue.
assertEquals(totalFlows, flowReconcileMgr.flowQueue.size());
}
private class FlowReconcileWorker implements Runnable {
@Override
public void run() {
OFMatchReconcile ofmRc = new OFMatchReconcile();
// push large number of flows to be reconciled.
for (int i = 0; i < NUM_FLOWS_PER_THREAD; i++) {
flowReconcileMgr.reconcileFlow(ofmRc,EventPriority.LOW);
}
}
}
}
| |
package com.azmotors.store.controller;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javafx.collections.ObservableList;
import javafx.collections.transformation.FilteredList;
import javafx.collections.transformation.SortedList;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.TableCell;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextField;
import javafx.scene.control.TextInputControl;
import javafx.util.Callback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.azmotors.store.controller.Command.handlers.AttemptSellCommandHandler;
import com.azmotors.store.controller.predicate.IndustryPartElementPredicate;
import com.azmotors.store.controller.repo.RepoOnDiskPointer;
import com.azmotors.store.foundation.FileUtil;
import com.azmotors.store.model.Constants;
import com.azmotors.store.model.IndustryPartElement;
import com.azmotors.store.view.TableCellButton;
abstract class IndustryPartElementTableController extends AZController
{
private static final Logger LOGGER = LoggerFactory.getLogger(IndustryPartElementTableController.class);
protected ObservableList<IndustryPartElement> m_repo;
private final FilteredList<IndustryPartElement> m_filteredList;
private final IndustryPartElementPredicate m_industryPartElementPredicate;
private Set<TextInputControl> m_clearableControlSet;
@FXML
private TableView<IndustryPartElement> m_industryPartsElementTableView;
@FXML
private Label m_totalIndustryPartsElementAmount;
@FXML
private Label m_soldIndustryPartsElementAmount;
@FXML
private Label m_inStoreIndustryPartsElementAmount;
@FXML
private Button m_add;
@FXML
protected TextField m_addName;
@FXML
protected TextField m_addKW;
@FXML
protected TextField m_addHP;
@FXML
protected TextField m_addRPM;
@FXML
protected TextField m_addVolt;
@FXML
protected TextField m_addHz;
@FXML
protected TextField m_addHo;
@FXML
protected TextField m_addWholesalePrice;
@FXML
protected TextField m_addRetailPrice;
@FXML
private Button m_go;
@FXML
private TextField m_horsePowerKey;
@FXML
private TextField m_RPMkey;
@FXML
private TextField m_SearchKeyField;
@FXML
private TableColumn<IndustryPartElement, String> m_kindColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_typeColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_kwColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_hpColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_rpmColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_voltColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_HzColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_HoColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_priceWholesaleColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_priceRetailColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_dateStoredColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_dateSoldColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_daysInStoreColumn;
@FXML
private TableColumn<IndustryPartElement, String> m_ButtonAttemptSell;
protected IndustryPartElementTableController(final ObservableList<IndustryPartElement> repo)
{
assert null != repo : "Parameter 'repo' of method 'IndustryPartElementTableController' must not be null";
m_repo = repo;
//1 Wrap the observable list in a filtered list (initially display all data).
m_filteredList = new FilteredList<>(m_repo, p -> true);
/**
* or the following m_filteredList.setPredicate(new Predicate<IndustryPartElement>() {
*
* @Override public boolean test(final IndustryPartElement next) { return true; } });
*/
m_industryPartElementPredicate = new IndustryPartElementPredicate(m_filteredList);
}
protected final void initialiseSuper()
{
assert null != m_repo : "Class field 'm_repo' in method 'initialiseSuper' must not be null";
updateAmounts();
m_typeColumn.setCellValueFactory(cellData -> cellData.getValue().nameProperty());
m_kwColumn.setCellValueFactory(cellData -> cellData.getValue().kwProperty());
m_hpColumn.setCellValueFactory(cellData -> cellData.getValue().hpProperty());
m_rpmColumn.setCellValueFactory(cellData -> cellData.getValue().rpmProperty());
m_voltColumn.setCellValueFactory(cellData -> cellData.getValue().voltProperty());
m_HzColumn.setCellValueFactory(cellData -> cellData.getValue().HzProperty());
m_HoColumn.setCellValueFactory(cellData -> cellData.getValue().HoProperty());
m_priceWholesaleColumn.setCellValueFactory(cellData -> cellData.getValue().priceWholesale());
m_priceRetailColumn.setCellValueFactory(cellData -> cellData.getValue().priceRetail());
m_dateStoredColumn.setCellValueFactory(cellData -> cellData.getValue().dateStoredProperty());
m_dateSoldColumn.setCellValueFactory(cellData -> cellData.getValue().dateSoldProperty());
m_daysInStoreColumn.setCellValueFactory(cellData -> cellData.getValue().daysInStoreProperty());
initialiseSpecific();
m_ButtonAttemptSell.setCellFactory(new Callback<TableColumn<IndustryPartElement, String>, TableCell<IndustryPartElement, String>>()
{
@Override
public TableCell<IndustryPartElement, String> call(TableColumn<IndustryPartElement, String> arg0)
{
return composeButtonAttemptSell();
}
});
//2. Set the filter predicate whenever the filter (seek criterion) changes.
m_SearchKeyField.textProperty().addListener(m_industryPartElementPredicate.predicateOnAllProperties());
// 3 . Wrap the filtered_list in a sorted list
final SortedList<IndustryPartElement> sortedList = new SortedList<>(m_filteredList);
sortedList.comparatorProperty().bind(m_industryPartsElementTableView.comparatorProperty());
m_industryPartsElementTableView.setItems(sortedList);
gatherClearableInputControls();
m_go.setOnAction(new EventHandler<ActionEvent>()
{
@Override
public void handle(final ActionEvent actionEvent)
{
handleWhenGoClicked();
}
});
m_add.setOnAction(new EventHandler<ActionEvent>()
{
@Override
public void handle(ActionEvent event)
{
m_repo.add(instantiateTargetIndustryPart());
clearClearables();
persist(m_repo);
initialiseSuper();
}
});
}
protected abstract void initialiseSpecific();
protected final void clearClearables()
{
if (null == m_clearableControlSet)
{
if (LOGGER.isDebugEnabled())
{
LOGGER.debug("m_clearableControlSet is null");
}
}
else
{
for (final TextInputControl next : m_clearableControlSet)
{
if (null == next)
{
LOGGER.debug("next is null");
}
else
{
next.clear();
}
}
}
}
protected abstract IndustryPartElement instantiateTargetIndustryPart();
protected final void gatherClearableInputControls()
{
final Set<TextInputControl> tmp = new HashSet<>(10);
add2GatherClearableInputControls(tmp);
tmp.add(m_addName);
tmp.add(m_addKW);
tmp.add(m_addHP);
tmp.add(m_addRPM);
tmp.add(m_addVolt);
tmp.add(m_addHz);
tmp.add(m_addHo);
tmp.add(m_addWholesalePrice);
tmp.add(m_addRetailPrice);
m_clearableControlSet = Collections.unmodifiableSet(tmp);
}
protected abstract void add2GatherClearableInputControls(final Set<TextInputControl> tmp);
protected final void persist(final ObservableList<IndustryPartElement> repo)
{
assert null != repo : "Parameter 'repo' of method 'persist' must not be null";
final List<String> lines = new ArrayList<>(repo.size());
for (final IndustryPartElement next : repo)
{
lines.add(next.getPresentationName());
}
FileUtil.write(lines, deriveSpecificRepoOnDiskPointer());
}
protected abstract RepoOnDiskPointer deriveSpecificRepoOnDiskPointer();
protected final TableCell<IndustryPartElement, String> composeButtonAttemptSell()
{
return new TableCellButton(this, Constants.SELL, new AttemptSellCommandHandler(m_industryPartsElementTableView, this));
}
protected final void handleWhenGoClicked()
{
final String horsePowerKeyString = m_horsePowerKey.textProperty().get().toLowerCase();
final String RPMkeyString = m_RPMkey.textProperty().get().toLowerCase();
m_industryPartElementPredicate.predicateOn(RPMkeyString, horsePowerKeyString);
}
@Override
public final void valueModified(int index)
{
persist(m_repo);
}
protected final void updateAmounts()
{
m_inStoreIndustryPartsElementAmount.setText(String.valueOf(deriveInStoreIndustryPartsElementAmount(m_repo)));
m_soldIndustryPartsElementAmount.setText(String.valueOf(deriveAmountOfSoldIndustryPartsElement(m_repo)));
m_totalIndustryPartsElementAmount.setText(String.valueOf(m_repo.size()));
}
private int deriveInStoreIndustryPartsElementAmount(final ObservableList<IndustryPartElement> repo)
{
assert null != repo : "Parameter 'repo' of method 'deriveInStoreMotorsAmount' must not be null";
int amount = 0;
final Iterator<IndustryPartElement> iter = repo.iterator();
while (iter.hasNext())
{
final IndustryPartElement next = iter.next();
if (next.isInStore())
{
amount++;
}
}
return amount;
}
private int deriveAmountOfSoldIndustryPartsElement(final ObservableList<IndustryPartElement> repo)
{
assert null != repo : "Parameter 'repo' of method 'getAmountOfSoldMotors' must not be null";
int amount = 0;
final Iterator<IndustryPartElement> iter = repo.iterator();
while (iter.hasNext())
{
final IndustryPartElement nextMotor = iter.next();
if (nextMotor.isSold())
{
amount++;
}
}
return amount;
}
@Override
public final void sold(IndustryPartElement industryPartElement)
{
assert null != industryPartElement : "Parameter 'industryPartElement' of method 'sold' must not be null";
updateAmounts();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.